diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index 911c5c7de3..7f62772832 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -3,11 +3,14 @@ on:
push:
branches:
- master
- pull_request:
+ pull_request_target:
types: ["opened", "edited", "reopened", "synchronize"]
paths:
- "kafka-ui-api/**"
- "pom.xml"
+permissions:
+ checks: write
+ pull-requests: write
jobs:
build-and-test:
runs-on: ubuntu-latest
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index 745be78228..d08e0cade7 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -1,6 +1,6 @@
name: "E2E: PR healthcheck"
on:
- pull_request:
+ pull_request_target:
types: [ "opened", "edited", "reopened", "synchronize" ]
paths:
- "kafka-ui-api/**"
@@ -8,6 +8,8 @@ on:
- "kafka-ui-react-app/**"
- "kafka-ui-e2e-checks/**"
- "pom.xml"
+permissions:
+ statuses: write
jobs:
build-and-test:
runs-on: ubuntu-latest
@@ -18,8 +20,8 @@ jobs:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
- aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
- aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-access-key-id: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: Set up environment
id: set_env_values
diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml
index 1b1c79944a..02ccd4e135 100644
--- a/.github/workflows/frontend.yaml
+++ b/.github/workflows/frontend.yaml
@@ -3,11 +3,14 @@ on:
push:
branches:
- master
- pull_request:
+ pull_request_target:
types: ["opened", "edited", "reopened", "synchronize"]
paths:
- "kafka-ui-contract/**"
- "kafka-ui-react-app/**"
+permissions:
+ checks: write
+ pull-requests: write
jobs:
build-and-test:
env:
@@ -24,7 +27,7 @@ jobs:
with:
version: 7.4.0
- name: Install node
- uses: actions/setup-node@v3.6.0
+ uses: actions/setup-node@v3.7.0
with:
node-version: "16.15.0"
cache: "pnpm"
diff --git a/.github/workflows/pr-checks.yaml b/.github/workflows/pr-checks.yaml
index 84e58b68c1..ce7dd17ae4 100644
--- a/.github/workflows/pr-checks.yaml
+++ b/.github/workflows/pr-checks.yaml
@@ -1,8 +1,9 @@
name: "PR: Checklist linter"
on:
- pull_request:
+ pull_request_target:
types: [opened, edited, synchronize, reopened]
-
+permissions:
+ checks: write
jobs:
task-check:
runs-on: ubuntu-latest
diff --git a/.github/workflows/welcome-first-time-contributors.yml b/.github/workflows/welcome-first-time-contributors.yml
index 5f662b71c6..1ac861055c 100644
--- a/.github/workflows/welcome-first-time-contributors.yml
+++ b/.github/workflows/welcome-first-time-contributors.yml
@@ -1,13 +1,15 @@
name: Welcome first time contributors
on:
- pull_request:
+ pull_request_target:
types:
- opened
issues:
types:
- opened
-
+permissions:
+ issues: write
+ pull-requests: write
jobs:
welcome:
runs-on: ubuntu-latest
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml
index a7f5df9706..6b22d6a662 100644
--- a/kafka-ui-api/pom.xml
+++ b/kafka-ui-api/pom.xml
@@ -91,7 +91,7 @@
software.amazon.msk
aws-msk-iam-auth
- 1.1.6
+ 1.1.7
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java
index 04960cda69..5ec0fbc642 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java
@@ -123,9 +123,11 @@ public class TopicsController extends AbstractController implements TopicsApi {
.operationName("deleteTopic")
.build();
- return accessControlService.validateAccess(context).then(
- topicsService.deleteTopic(getCluster(clusterName), topicName).map(ResponseEntity::ok)
- ).doOnEach(sig -> auditService.audit(context, sig));
+ return accessControlService.validateAccess(context)
+ .then(
+ topicsService.deleteTopic(getCluster(clusterName), topicName)
+ .thenReturn(ResponseEntity.ok().build())
+ ).doOnEach(sig -> auditService.audit(context, sig));
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaSrMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaSrMapper.java
index ea3435d4aa..9a5f68cbd1 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaSrMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaSrMapper.java
@@ -3,18 +3,21 @@ package com.provectus.kafka.ui.mapper;
import com.provectus.kafka.ui.model.CompatibilityCheckResponseDTO;
import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
import com.provectus.kafka.ui.model.NewSchemaSubjectDTO;
+import com.provectus.kafka.ui.model.SchemaReferenceDTO;
import com.provectus.kafka.ui.model.SchemaSubjectDTO;
import com.provectus.kafka.ui.model.SchemaTypeDTO;
import com.provectus.kafka.ui.service.SchemaRegistryService;
import com.provectus.kafka.ui.sr.model.Compatibility;
import com.provectus.kafka.ui.sr.model.CompatibilityCheckResponse;
import com.provectus.kafka.ui.sr.model.NewSubject;
+import com.provectus.kafka.ui.sr.model.SchemaReference;
import com.provectus.kafka.ui.sr.model.SchemaType;
+import java.util.List;
import java.util.Optional;
import org.mapstruct.Mapper;
-@Mapper(componentModel = "spring")
+@Mapper
public interface KafkaSrMapper {
default SchemaSubjectDTO toDto(SchemaRegistryService.SubjectWithCompatibilityLevel s) {
@@ -24,9 +27,12 @@ public interface KafkaSrMapper {
.subject(s.getSubject())
.schema(s.getSchema())
.schemaType(SchemaTypeDTO.fromValue(Optional.ofNullable(s.getSchemaType()).orElse(SchemaType.AVRO).getValue()))
+ .references(toDto(s.getReferences()))
.compatibilityLevel(s.getCompatibility().toString());
}
+ List toDto(List references);
+
CompatibilityCheckResponseDTO toDto(CompatibilityCheckResponse ccr);
CompatibilityLevelDTO.CompatibilityEnum toDto(Compatibility compatibility);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
index c1fb203691..4f6d4afe51 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
@@ -20,6 +20,7 @@ import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig;
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import io.confluent.kafka.schemaregistry.json.JsonSchema;
import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
@@ -217,7 +218,9 @@ public class SchemaRegistrySerde implements BuiltInSerde {
case AVRO -> new AvroJsonSchemaConverter()
.convert(basePath, ((AvroSchema) parsedSchema).rawSchema())
.toJson();
- case JSON -> schema.getSchema();
+ case JSON ->
+ //need to use confluent JsonSchema since it includes resolved references
+ ((JsonSchema) parsedSchema).rawSchema().toString();
};
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
index f98bb5d376..cae29ba93d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
@@ -14,8 +14,7 @@ import com.provectus.kafka.ui.sr.model.CompatibilityLevelChange;
import com.provectus.kafka.ui.sr.model.NewSubject;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import com.provectus.kafka.ui.util.ReactiveFailover;
-import com.provectus.kafka.ui.util.WebClientConfigurator;
-import java.io.IOException;
+import java.nio.charset.Charset;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
@@ -92,7 +91,7 @@ public class SchemaRegistryService {
private Mono getSchemaSubject(KafkaCluster cluster, String schemaName,
String version) {
return api(cluster)
- .mono(c -> c.getSubjectVersion(schemaName, version))
+ .mono(c -> c.getSubjectVersion(schemaName, version, false))
.zipWith(getSchemaCompatibilityInfoOrGlobal(cluster, schemaName))
.map(t -> new SubjectWithCompatibilityLevel(t.getT1(), t.getT2()))
.onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.error(new SchemaNotFoundException()));
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/SchemaReferencesResolver.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/SchemaReferencesResolver.java
new file mode 100644
index 0000000000..4ff1f8695b
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/SchemaReferencesResolver.java
@@ -0,0 +1,55 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
+import com.provectus.kafka.ui.sr.model.SchemaReference;
+import java.util.List;
+import java.util.Optional;
+import javax.annotation.Nullable;
+import reactor.core.publisher.Mono;
+
+// logic copied from AbstractSchemaProvider:resolveReferences
+// https://github.com/confluentinc/schema-registry/blob/fd59613e2c5adf62e36705307f420712e4c8c1ea/client/src/main/java/io/confluent/kafka/schemaregistry/AbstractSchemaProvider.java#L54
+class SchemaReferencesResolver {
+
+ private final KafkaSrClientApi client;
+
+ SchemaReferencesResolver(KafkaSrClientApi client) {
+ this.client = client;
+ }
+
+ Mono> resolve(List refs) {
+ return resolveReferences(refs, new Resolving(ImmutableMap.of(), ImmutableSet.of()))
+ .map(Resolving::resolved);
+ }
+
+ private record Resolving(ImmutableMap resolved, ImmutableSet visited) {
+
+ Resolving visit(String name) {
+ return new Resolving(resolved, ImmutableSet.builder().addAll(visited).add(name).build());
+ }
+
+ Resolving resolve(String ref, String schema) {
+ return new Resolving(ImmutableMap.builder().putAll(resolved).put(ref, schema).build(), visited);
+ }
+ }
+
+ private Mono resolveReferences(@Nullable List refs, Resolving initState) {
+ Mono result = Mono.just(initState);
+ for (SchemaReference reference : Optional.ofNullable(refs).orElse(List.of())) {
+ result = result.flatMap(state -> {
+ if (state.visited().contains(reference.getName())) {
+ return Mono.just(state);
+ } else {
+ final var newState = state.visit(reference.getName());
+ return client.getSubjectVersion(reference.getSubject(), String.valueOf(reference.getVersion()), true)
+ .flatMap(subj ->
+ resolveReferences(subj.getReferences(), newState)
+ .map(withNewRefs -> withNewRefs.resolve(reference.getName(), subj.getSchema())));
+ }
+ });
+ }
+ return result;
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java
index 4ae036d1e4..29947c6acc 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java
@@ -6,6 +6,7 @@ import com.google.common.collect.ImmutableMap;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.StatisticsCache;
import com.provectus.kafka.ui.service.integration.odd.schema.DataSetFieldsExtractors;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
import java.net.URI;
import java.util.List;
import java.util.Map;
@@ -25,6 +26,8 @@ import org.opendatadiscovery.oddrn.model.KafkaPath;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
@Slf4j
@RequiredArgsConstructor
@@ -101,12 +104,20 @@ class TopicsExporter {
return Mono.just(List.of());
}
String subject = topic + (isKey ? "-key" : "-value");
- return cluster.getSchemaRegistryClient()
- .mono(client -> client.getSubjectVersion(subject, "latest"))
- .map(subj -> DataSetFieldsExtractors.extract(subj, topicOddrn, isKey))
+ return getSubjWithResolvedRefs(cluster, subject)
+ .map(t -> DataSetFieldsExtractors.extract(t.getT1(), t.getT2(), topicOddrn, isKey))
.onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.just(List.of()))
.onErrorMap(WebClientResponseException.class, err ->
new IllegalStateException("Error retrieving subject %s".formatted(subject), err));
}
+ private Mono>> getSubjWithResolvedRefs(KafkaCluster cluster,
+ String subjectName) {
+ return cluster.getSchemaRegistryClient()
+ .mono(client ->
+ client.getSubjectVersion(subjectName, "latest", false)
+ .flatMap(subj -> new SchemaReferencesResolver(client).resolve(subj.getReferences())
+ .map(resolvedRefs -> Tuples.of(subj, resolvedRefs))));
+ }
+
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
index cc799a9e10..f942396293 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
@@ -1,7 +1,7 @@
package com.provectus.kafka.ui.service.integration.odd.schema;
import com.google.common.collect.ImmutableSet;
-import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import java.util.ArrayList;
import java.util.List;
import org.apache.avro.Schema;
@@ -14,8 +14,8 @@ final class AvroExtractor {
private AvroExtractor() {
}
- static List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
- var schema = new Schema.Parser().parse(subject.getSchema());
+ static List extract(AvroSchema avroSchema, KafkaPath topicOddrn, boolean isKey) {
+ var schema = avroSchema.rawSchema();
List result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
extract(
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
index e357db3079..b9093262bd 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
@@ -2,7 +2,11 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import com.provectus.kafka.ui.sr.model.SchemaType;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.json.JsonSchema;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
import org.opendatadiscovery.client.model.DataSetField;
import org.opendatadiscovery.client.model.DataSetFieldType;
@@ -10,12 +14,18 @@ import org.opendatadiscovery.oddrn.model.KafkaPath;
public final class DataSetFieldsExtractors {
- public static List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ public static List extract(SchemaSubject subject,
+ Map resolvedRefs,
+ KafkaPath topicOddrn,
+ boolean isKey) {
SchemaType schemaType = Optional.ofNullable(subject.getSchemaType()).orElse(SchemaType.AVRO);
return switch (schemaType) {
- case AVRO -> AvroExtractor.extract(subject, topicOddrn, isKey);
- case JSON -> JsonSchemaExtractor.extract(subject, topicOddrn, isKey);
- case PROTOBUF -> ProtoExtractor.extract(subject, topicOddrn, isKey);
+ case AVRO -> AvroExtractor.extract(
+ new AvroSchema(subject.getSchema(), List.of(), resolvedRefs, null), topicOddrn, isKey);
+ case JSON -> JsonSchemaExtractor.extract(
+ new JsonSchema(subject.getSchema(), List.of(), resolvedRefs, null), topicOddrn, isKey);
+ case PROTOBUF -> ProtoExtractor.extract(
+ new ProtobufSchema(subject.getSchema(), List.of(), resolvedRefs, null, null), topicOddrn, isKey);
};
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
index 06201b1ce7..93adbdbe0c 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
@@ -30,8 +30,8 @@ final class JsonSchemaExtractor {
private JsonSchemaExtractor() {
}
- static List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
- Schema schema = new JsonSchema(subject.getSchema()).rawSchema();
+ static List extract(JsonSchema jsonSchema, KafkaPath topicOddrn, boolean isKey) {
+ Schema schema = jsonSchema.rawSchema();
List result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
extract(
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
index c1316172f3..01b25ff48d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
@@ -15,7 +15,6 @@ import com.google.protobuf.Timestamp;
import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.Value;
-import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import java.util.ArrayList;
import java.util.List;
@@ -42,8 +41,8 @@ final class ProtoExtractor {
private ProtoExtractor() {
}
- static List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
- Descriptor schema = new ProtobufSchema(subject.getSchema()).toDescriptor();
+ static List extract(ProtobufSchema protobufSchema, KafkaPath topicOddrn, boolean isKey) {
+ Descriptor schema = protobufSchema.toDescriptor();
List result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
var rootOddrn = topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value");
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java
index e0815828e2..c5aca5ad71 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java
@@ -28,6 +28,9 @@ import reactor.netty.http.client.HttpClient;
public class WebClientConfigurator {
private final WebClient.Builder builder = WebClient.builder();
+ private HttpClient httpClient = HttpClient
+ .create()
+ .proxyWithSystemProperties();
public WebClientConfigurator() {
configureObjectMapper(defaultOM());
@@ -90,12 +93,7 @@ public class WebClientConfigurator {
// Create webclient
SslContext context = contextBuilder.build();
- var httpClient = HttpClient
- .create()
- .secure(t -> t.sslContext(context))
- .proxyWithSystemProperties();
-
- builder.clientConnector(new ReactorClientHttpConnector(httpClient));
+ httpClient = httpClient.secure(t -> t.sslContext(context));
return this;
}
@@ -131,6 +129,6 @@ public class WebClientConfigurator {
}
public WebClient build() {
- return builder.build();
+ return builder.clientConnector(new ReactorClientHttpConnector(httpClient)).build();
}
}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
index 392200a15f..314abf914b 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
@@ -1,6 +1,7 @@
package com.provectus.kafka.ui;
import com.provectus.kafka.ui.container.KafkaConnectContainer;
+import com.provectus.kafka.ui.container.KsqlDbContainer;
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
import java.nio.file.Path;
import java.util.List;
@@ -32,7 +33,7 @@ public abstract class AbstractIntegrationTest {
public static final String LOCAL = "local";
public static final String SECOND_LOCAL = "secondLocal";
- private static final String CONFLUENT_PLATFORM_VERSION = "7.2.1";
+ private static final String CONFLUENT_PLATFORM_VERSION = "7.2.1"; // Append ".arm64" for a local run
public static final KafkaContainer kafka = new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION))
@@ -49,6 +50,11 @@ public abstract class AbstractIntegrationTest {
.dependsOn(kafka)
.dependsOn(schemaRegistry);
+ protected static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(
+ DockerImageName.parse("confluentinc/cp-ksqldb-server")
+ .withTag(CONFLUENT_PLATFORM_VERSION))
+ .withKafka(kafka);
+
@TempDir
public static Path tmpDir;
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java
index 60959be049..5fa9aee766 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java
@@ -2,6 +2,7 @@ package com.provectus.kafka.ui;
import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
import com.provectus.kafka.ui.model.NewSchemaSubjectDTO;
+import com.provectus.kafka.ui.model.SchemaReferenceDTO;
import com.provectus.kafka.ui.model.SchemaSubjectDTO;
import com.provectus.kafka.ui.model.SchemaSubjectsResponseDTO;
import com.provectus.kafka.ui.model.SchemaTypeDTO;
@@ -190,6 +191,58 @@ class SchemaRegistryServiceTests extends AbstractIntegrationTest {
Assertions.assertEquals(schema, actual.getSchema());
}
+
+ @Test
+ void shouldCreateNewProtobufSchemaWithRefs() {
+ NewSchemaSubjectDTO requestBody = new NewSchemaSubjectDTO()
+ .schemaType(SchemaTypeDTO.PROTOBUF)
+ .subject(subject + "-ref")
+ .schema("""
+ syntax = "proto3";
+ message MyRecord {
+ int32 id = 1;
+ string name = 2;
+ }
+ """);
+
+ webTestClient
+ .post()
+ .uri("/api/clusters/{clusterName}/schemas", LOCAL)
+ .contentType(MediaType.APPLICATION_JSON)
+ .body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubjectDTO.class))
+ .exchange()
+ .expectStatus()
+ .isOk();
+
+ requestBody = new NewSchemaSubjectDTO()
+ .schemaType(SchemaTypeDTO.PROTOBUF)
+ .subject(subject)
+ .schema("""
+ syntax = "proto3";
+ import "MyRecord.proto";
+ message MyRecordWithRef {
+ int32 id = 1;
+ MyRecord my_ref = 2;
+ }
+ """)
+ .references(List.of(new SchemaReferenceDTO().name("MyRecord.proto").subject(subject + "-ref").version(1)));
+
+ SchemaSubjectDTO actual = webTestClient
+ .post()
+ .uri("/api/clusters/{clusterName}/schemas", LOCAL)
+ .contentType(MediaType.APPLICATION_JSON)
+ .body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubjectDTO.class))
+ .exchange()
+ .expectStatus()
+ .isOk()
+ .expectBody(SchemaSubjectDTO.class)
+ .returnResult()
+ .getResponseBody();
+
+ Assertions.assertNotNull(actual);
+ Assertions.assertEquals(requestBody.getReferences(), actual.getReferences());
+ }
+
@Test
public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() {
webTestClient
@@ -278,7 +331,7 @@ class SchemaRegistryServiceTests extends AbstractIntegrationTest {
void shouldCreateNewSchemaWhenSubjectIncludesNonAsciiCharacters() {
String schema =
"{\"subject\":\"test/test\",\"schemaType\":\"JSON\",\"schema\":"
- + "\"{\\\"type\\\": \\\"string\\\"}\"}";
+ + "\"{\\\"type\\\": \\\"string\\\"}\"}";
webTestClient
.post()
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/SchemaReferencesResolverTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/SchemaReferencesResolverTest.java
new file mode 100644
index 0000000000..d24524473a
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/SchemaReferencesResolverTest.java
@@ -0,0 +1,86 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import com.google.common.collect.ImmutableMap;
+import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
+import com.provectus.kafka.ui.sr.model.SchemaReference;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import java.util.List;
+import org.junit.jupiter.api.Test;
+import reactor.core.publisher.Mono;
+import reactor.test.StepVerifier;
+
+class SchemaReferencesResolverTest {
+
+ private final KafkaSrClientApi srClientMock = mock(KafkaSrClientApi.class);
+
+ private final SchemaReferencesResolver schemaReferencesResolver = new SchemaReferencesResolver(srClientMock);
+
+ @Test
+ void resolvesRefsUsingSrClient() {
+ mockSrCall("sub1", 1,
+ new SchemaSubject()
+ .schema("schema1"));
+
+ mockSrCall("sub2", 1,
+ new SchemaSubject()
+ .schema("schema2")
+ .references(
+ List.of(
+ new SchemaReference().name("ref2_1").subject("sub2_1").version(2),
+ new SchemaReference().name("ref2_2").subject("sub1").version(1))));
+
+ mockSrCall("sub2_1", 2,
+ new SchemaSubject()
+ .schema("schema2_1")
+ .references(
+ List.of(
+ new SchemaReference().name("ref2_1_1").subject("sub2_1_1").version(3),
+ new SchemaReference().name("ref1").subject("should_not_be_called").version(1)
+ ))
+ );
+
+ mockSrCall("sub2_1_1", 3,
+ new SchemaSubject()
+ .schema("schema2_1_1"));
+
+ var resolvedRefsMono = schemaReferencesResolver.resolve(
+ List.of(
+ new SchemaReference().name("ref1").subject("sub1").version(1),
+ new SchemaReference().name("ref2").subject("sub2").version(1)));
+
+ StepVerifier.create(resolvedRefsMono)
+ .assertNext(refs ->
+ assertThat(refs)
+ .containsExactlyEntriesOf(
+ // checking map should be ordered
+ ImmutableMap.builder()
+ .put("ref1", "schema1")
+ .put("ref2_1_1", "schema2_1_1")
+ .put("ref2_1", "schema2_1")
+ .put("ref2_2", "schema1")
+ .put("ref2", "schema2")
+ .build()))
+ .verifyComplete();
+ }
+
+ @Test
+ void returnsEmptyMapOnEmptyInputs() {
+ StepVerifier.create(schemaReferencesResolver.resolve(null))
+ .assertNext(map -> assertThat(map).isEmpty())
+ .verifyComplete();
+
+ StepVerifier.create(schemaReferencesResolver.resolve(List.of()))
+ .assertNext(map -> assertThat(map).isEmpty())
+ .verifyComplete();
+ }
+
+ private void mockSrCall(String subject, int version, SchemaSubject subjectToReturn) {
+ when(srClientMock.getSubjectVersion(subject, version + "", true))
+ .thenReturn(Mono.just(subjectToReturn));
+ }
+
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java
index e593ae29ee..26d98865cf 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java
@@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service.integration.odd;
import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.TopicState;
import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.empty;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -57,9 +58,8 @@ class TopicsExporterTest {
@Test
void doesNotExportTopicsWhichDontFitFiltrationRule() {
- when(schemaRegistryClientMock.getSubjectVersion(anyString(), anyString()))
+ when(schemaRegistryClientMock.getSubjectVersion(anyString(), anyString(), anyBoolean()))
.thenReturn(Mono.error(WebClientResponseException.create(404, "NF", new HttpHeaders(), null, null, null)));
-
stats = Statistics.empty()
.toBuilder()
.clusterState(
@@ -95,14 +95,14 @@ class TopicsExporterTest {
@Test
void doesExportTopicData() {
- when(schemaRegistryClientMock.getSubjectVersion("testTopic-value", "latest"))
+ when(schemaRegistryClientMock.getSubjectVersion("testTopic-value", "latest", false))
.thenReturn(Mono.just(
new SchemaSubject()
.schema("\"string\"")
.schemaType(SchemaType.AVRO)
));
- when(schemaRegistryClientMock.getSubjectVersion("testTopic-key", "latest"))
+ when(schemaRegistryClientMock.getSubjectVersion("testTopic-key", "latest", false))
.thenReturn(Mono.just(
new SchemaSubject()
.schema("\"int\"")
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java
index d523d7cd41..cd1baf7798 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java
@@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import static org.assertj.core.api.Assertions.assertThat;
-import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.opendatadiscovery.client.model.DataSetField;
@@ -15,8 +15,7 @@ class AvroExtractorTest {
@ValueSource(booleans = {true, false})
void test(boolean isKey) {
var list = AvroExtractor.extract(
- new SchemaSubject()
- .schema("""
+ new AvroSchema("""
{
"type": "record",
"name": "Message",
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java
index 7968e52e6d..30a1e6229c 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java
@@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import static org.assertj.core.api.Assertions.assertThat;
-import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import io.confluent.kafka.schemaregistry.json.JsonSchema;
import java.net.URI;
import java.util.List;
import java.util.Map;
@@ -40,7 +40,7 @@ class JsonSchemaExtractorTest {
}
""";
var fields = JsonSchemaExtractor.extract(
- new SchemaSubject().schema(jsonSchema),
+ new JsonSchema(jsonSchema),
KafkaPath.builder()
.cluster("localhost:9092")
.topic("someTopic")
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java
index cbb97a859c..8d6344d7cc 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java
@@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import static org.assertj.core.api.Assertions.assertThat;
-import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.opendatadiscovery.client.model.DataSetField;
@@ -54,8 +54,7 @@ class ProtoExtractorTest {
}""";
var list = ProtoExtractor.extract(
- new SchemaSubject()
- .schema(protoSchema),
+ new ProtobufSchema(protoSchema),
KafkaPath.builder()
.cluster("localhost:9092")
.topic("someTopic")
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java
index f74eccea53..2bca09a1c7 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java
@@ -3,30 +3,24 @@ package com.provectus.kafka.ui.service.ksql;
import static org.assertj.core.api.Assertions.assertThat;
import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.DoubleNode;
+import com.fasterxml.jackson.databind.node.DecimalNode;
import com.fasterxml.jackson.databind.node.IntNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.TextNode;
import com.provectus.kafka.ui.AbstractIntegrationTest;
-import com.provectus.kafka.ui.container.KsqlDbContainer;
+import java.math.BigDecimal;
import java.time.Duration;
-import java.util.List;
import java.util.Map;
import org.junit.Ignore;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.testcontainers.shaded.org.awaitility.Awaitility;
-import org.testcontainers.utility.DockerImageName;
import reactor.test.StepVerifier;
@Ignore
class KsqlApiClientTest extends AbstractIntegrationTest {
- private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(
- DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0"))
- .withKafka(kafka);
-
@BeforeAll
static void startContainer() {
KSQL_DB.start();
@@ -74,7 +68,7 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
private void assertLastKsqTutorialQueryResult(KsqlApiClient client) {
// expected results:
//{"header":"Schema","columnNames":[...],"values":null}
- //{"header":"Row","columnNames":null,"values":[[0.0,["4ab5cbad","8b6eae59","4a7c7b41"],3]]}
+ //{"header":"Row","columnNames":null,"values":[[0,["4ab5cbad","8b6eae59","4a7c7b41"],3]]}
//{"header":"Row","columnNames":null,"values":[[10.0,["18f4ea86"],1]]}
StepVerifier.create(
client.execute(
@@ -88,34 +82,26 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
assertThat(header.getValues()).isNull();
})
.assertNext(row -> {
- assertThat(row).isEqualTo(
- KsqlApiClient.KsqlResponseTable.builder()
- .header("Row")
- .columnNames(null)
- .values(List.of(List.of(
- new DoubleNode(0.0),
- new ArrayNode(JsonNodeFactory.instance)
- .add(new TextNode("4ab5cbad"))
- .add(new TextNode("8b6eae59"))
- .add(new TextNode("4a7c7b41")),
- new IntNode(3)
- )))
- .build()
- );
+ var distance = (DecimalNode) row.getValues().get(0).get(0);
+ var riders = (ArrayNode) row.getValues().get(0).get(1);
+ var count = (IntNode) row.getValues().get(0).get(2);
+
+ assertThat(distance).isEqualTo(new DecimalNode(new BigDecimal(0)));
+ assertThat(riders).isEqualTo(new ArrayNode(JsonNodeFactory.instance)
+ .add(new TextNode("4ab5cbad"))
+ .add(new TextNode("8b6eae59"))
+ .add(new TextNode("4a7c7b41")));
+ assertThat(count).isEqualTo(new IntNode(3));
})
.assertNext(row -> {
- assertThat(row).isEqualTo(
- KsqlApiClient.KsqlResponseTable.builder()
- .header("Row")
- .columnNames(null)
- .values(List.of(List.of(
- new DoubleNode(10.0),
- new ArrayNode(JsonNodeFactory.instance)
- .add(new TextNode("18f4ea86")),
- new IntNode(1)
- )))
- .build()
- );
+ var distance = (DecimalNode) row.getValues().get(0).get(0);
+ var riders = (ArrayNode) row.getValues().get(0).get(1);
+ var count = (IntNode) row.getValues().get(0).get(2);
+
+ assertThat(distance).isEqualTo(new DecimalNode(new BigDecimal(10)));
+ assertThat(riders).isEqualTo(new ArrayNode(JsonNodeFactory.instance)
+ .add(new TextNode("18f4ea86")));
+ assertThat(count).isEqualTo(new IntNode(1));
})
.verifyComplete();
}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java
index afa3700c0f..0e1717430c 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java
@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.service.ksql;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
-import com.provectus.kafka.ui.container.KsqlDbContainer;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.KsqlStreamDescriptionDTO;
import com.provectus.kafka.ui.model.KsqlTableDescriptionDTO;
@@ -15,14 +14,9 @@ import java.util.concurrent.CopyOnWriteArraySet;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
-import org.testcontainers.utility.DockerImageName;
class KsqlServiceV2Test extends AbstractIntegrationTest {
- private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(
- DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0"))
- .withKafka(kafka);
-
private static final Set STREAMS_TO_DELETE = new CopyOnWriteArraySet<>();
private static final Set TABLES_TO_DELETE = new CopyOnWriteArraySet<>();
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-sr-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-sr-api.yaml
index 84ee36b48d..0320e891ec 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-sr-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-sr-api.yaml
@@ -77,6 +77,10 @@ paths:
required: true
schema:
type: string
+ - name: deleted
+ in: query
+ schema:
+ type: boolean
responses:
200:
description: OK
@@ -317,6 +321,10 @@ components:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
+ references:
+ type: array
+ items:
+ $ref: '#/components/schemas/SchemaReference'
required:
- id
- subject
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index 5bff16b8aa..39835623a1 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -2976,6 +2976,10 @@ components:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
+ references:
+ type: array
+ items:
+ $ref: '#/components/schemas/SchemaReference'
required:
- id
- subject
@@ -2993,13 +2997,30 @@ components:
schema:
type: string
schemaType:
- $ref: '#/components/schemas/SchemaType'
- # upon updating a schema, the type of existing schema can't be changed
+ $ref: '#/components/schemas/SchemaType' # upon updating a schema, the type of existing schema can't be changed
+ references:
+ type: array
+ items:
+ $ref: '#/components/schemas/SchemaReference'
required:
- subject
- schema
- schemaType
+ SchemaReference:
+ type: object
+ properties:
+ name:
+ type: string
+ subject:
+ type: string
+ version:
+ type: integer
+ required:
+ - name
+ - subject
+ - version
+
CompatibilityLevel:
type: object
properties:
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml
index 30aed7b94b..71b3b96cbc 100644
--- a/kafka-ui-e2e-checks/pom.xml
+++ b/kafka-ui-e2e-checks/pom.xml
@@ -20,7 +20,7 @@
6.12.3
7.7.1
2.22.2
- 3.0.4
+ 3.0.5
1.9.9.1
3.24.2
2.2
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts
index 27fc112c81..a5235e9ac5 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts
@@ -1,8 +1,8 @@
import { SerdeDescription } from 'generated-sources';
-import { getPrefferedDescription } from 'components/Topics/Topic/SendMessage/utils';
+import { getPreferredDescription } from 'components/Topics/Topic/SendMessage/utils';
export const getDefaultSerdeName = (serdes: SerdeDescription[]) => {
- const preffered = getPrefferedDescription(serdes);
+ const preffered = getPreferredDescription(serdes);
if (preffered) {
return preffered.name || '';
}
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
index b7f31a230b..bef7a4dddb 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
@@ -118,8 +118,8 @@ const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
valueSerde,
});
if (!keepContents) {
- setValue('key', '');
- setValue('content', '');
+ setValue('key', defaultValues.key || '');
+ setValue('content', defaultValues.content || '');
closeSidebar();
}
} catch (e) {
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
index 6f98c5916d..c8161b0c82 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
@@ -13,21 +13,21 @@ jsf.option('fillProperties', false);
jsf.option('alwaysFakeOptionals', true);
jsf.option('failOnInvalidFormat', false);
-const generateValueFromSchema = (preffered?: SerdeDescription) => {
- if (!preffered?.schema) {
+const generateValueFromSchema = (preferred?: SerdeDescription) => {
+ if (!preferred?.schema) {
return undefined;
}
- const parsedSchema = JSON.parse(preffered.schema);
+ const parsedSchema = JSON.parse(preferred.schema);
const value = jsf.generate(parsedSchema);
return JSON.stringify(value);
};
-export const getPrefferedDescription = (serdes: SerdeDescription[]) =>
+export const getPreferredDescription = (serdes: SerdeDescription[]) =>
serdes.find((s) => s.preferred);
export const getDefaultValues = (serdes: TopicSerdeSuggestion) => {
- const keySerde = getPrefferedDescription(serdes.key || []);
- const valueSerde = getPrefferedDescription(serdes.value || []);
+ const keySerde = getPreferredDescription(serdes.key || []);
+ const valueSerde = getPreferredDescription(serdes.value || []);
return {
key: generateValueFromSchema(keySerde),
@@ -65,15 +65,15 @@ export const validateBySchema = (
return errors;
}
- let parcedSchema;
+ let parsedSchema;
let parsedValue;
try {
- parcedSchema = JSON.parse(schema);
+ parsedSchema = JSON.parse(schema);
} catch (e) {
return [`Error in parsing the "${type}" field schema`];
}
- if (parcedSchema.type === 'string') {
+ if (parsedSchema.type === 'string') {
return [];
}
try {
@@ -84,7 +84,7 @@ export const validateBySchema = (
try {
const ajv = new Ajv();
addFormats(ajv);
- const validate = ajv.compile(parcedSchema);
+ const validate = ajv.compile(parsedSchema);
validate(parsedValue);
if (validate.errors) {
errors = validate.errors.map(
diff --git a/pom.xml b/pom.xml
index 41a51da19f..a07f15f1a9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -26,17 +26,17 @@
3.19.0
1.11.1
1.12.19
- 7.3.3
+ 7.4.0
3.1.0
3.0.13
2.14.0
- 3.3.1
+ 3.5.0
1.5.5.Final
1.18.24
3.23.3
2.13.9
2.0
- 3.0.6
+ 3.1.1
1.0.0
0.1.17
0.1.23