Merge branch 'master' of github.com:provectus/kafka-ui into metrics_ph2

 Conflicts:
	kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
This commit is contained in:
iliax 2023-07-24 14:36:49 +04:00
commit 79ba8313f7
33 changed files with 362 additions and 116 deletions

View file

@ -3,11 +3,14 @@ on:
push:
branches:
- master
pull_request:
pull_request_target:
types: ["opened", "edited", "reopened", "synchronize"]
paths:
- "kafka-ui-api/**"
- "pom.xml"
permissions:
checks: write
pull-requests: write
jobs:
build-and-test:
runs-on: ubuntu-latest

View file

@ -1,6 +1,6 @@
name: "E2E: PR healthcheck"
on:
pull_request:
pull_request_target:
types: [ "opened", "edited", "reopened", "synchronize" ]
paths:
- "kafka-ui-api/**"
@ -8,6 +8,8 @@ on:
- "kafka-ui-react-app/**"
- "kafka-ui-e2e-checks/**"
- "pom.xml"
permissions:
statuses: write
jobs:
build-and-test:
runs-on: ubuntu-latest
@ -18,8 +20,8 @@ jobs:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-access-key-id: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: Set up environment
id: set_env_values

View file

@ -3,11 +3,14 @@ on:
push:
branches:
- master
pull_request:
pull_request_target:
types: ["opened", "edited", "reopened", "synchronize"]
paths:
- "kafka-ui-contract/**"
- "kafka-ui-react-app/**"
permissions:
checks: write
pull-requests: write
jobs:
build-and-test:
env:
@ -24,7 +27,7 @@ jobs:
with:
version: 7.4.0
- name: Install node
uses: actions/setup-node@v3.6.0
uses: actions/setup-node@v3.7.0
with:
node-version: "16.15.0"
cache: "pnpm"

View file

@ -1,8 +1,9 @@
name: "PR: Checklist linter"
on:
pull_request:
pull_request_target:
types: [opened, edited, synchronize, reopened]
permissions:
checks: write
jobs:
task-check:
runs-on: ubuntu-latest

View file

@ -1,13 +1,15 @@
name: Welcome first time contributors
on:
pull_request:
pull_request_target:
types:
- opened
issues:
types:
- opened
permissions:
issues: write
pull-requests: write
jobs:
welcome:
runs-on: ubuntu-latest

View file

@ -91,7 +91,7 @@
<dependency>
<groupId>software.amazon.msk</groupId>
<artifactId>aws-msk-iam-auth</artifactId>
<version>1.1.6</version>
<version>1.1.7</version>
</dependency>
<dependency>

View file

@ -123,8 +123,10 @@ public class TopicsController extends AbstractController implements TopicsApi {
.operationName("deleteTopic")
.build();
return accessControlService.validateAccess(context).then(
topicsService.deleteTopic(getCluster(clusterName), topicName).map(ResponseEntity::ok)
return accessControlService.validateAccess(context)
.then(
topicsService.deleteTopic(getCluster(clusterName), topicName)
.thenReturn(ResponseEntity.ok().<Void>build())
).doOnEach(sig -> auditService.audit(context, sig));
}

View file

@ -3,18 +3,21 @@ package com.provectus.kafka.ui.mapper;
import com.provectus.kafka.ui.model.CompatibilityCheckResponseDTO;
import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
import com.provectus.kafka.ui.model.NewSchemaSubjectDTO;
import com.provectus.kafka.ui.model.SchemaReferenceDTO;
import com.provectus.kafka.ui.model.SchemaSubjectDTO;
import com.provectus.kafka.ui.model.SchemaTypeDTO;
import com.provectus.kafka.ui.service.SchemaRegistryService;
import com.provectus.kafka.ui.sr.model.Compatibility;
import com.provectus.kafka.ui.sr.model.CompatibilityCheckResponse;
import com.provectus.kafka.ui.sr.model.NewSubject;
import com.provectus.kafka.ui.sr.model.SchemaReference;
import com.provectus.kafka.ui.sr.model.SchemaType;
import java.util.List;
import java.util.Optional;
import org.mapstruct.Mapper;
@Mapper(componentModel = "spring")
@Mapper
public interface KafkaSrMapper {
default SchemaSubjectDTO toDto(SchemaRegistryService.SubjectWithCompatibilityLevel s) {
@ -24,9 +27,12 @@ public interface KafkaSrMapper {
.subject(s.getSubject())
.schema(s.getSchema())
.schemaType(SchemaTypeDTO.fromValue(Optional.ofNullable(s.getSchemaType()).orElse(SchemaType.AVRO).getValue()))
.references(toDto(s.getReferences()))
.compatibilityLevel(s.getCompatibility().toString());
}
List<SchemaReferenceDTO> toDto(List<SchemaReference> references);
CompatibilityCheckResponseDTO toDto(CompatibilityCheckResponse ccr);
CompatibilityLevelDTO.CompatibilityEnum toDto(Compatibility compatibility);

View file

@ -20,6 +20,7 @@ import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig;
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
import io.confluent.kafka.schemaregistry.json.JsonSchema;
import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
@ -217,7 +218,9 @@ public class SchemaRegistrySerde implements BuiltInSerde {
case AVRO -> new AvroJsonSchemaConverter()
.convert(basePath, ((AvroSchema) parsedSchema).rawSchema())
.toJson();
case JSON -> schema.getSchema();
case JSON ->
//need to use confluent JsonSchema since it includes resolved references
((JsonSchema) parsedSchema).rawSchema().toString();
};
}

View file

@ -14,8 +14,7 @@ import com.provectus.kafka.ui.sr.model.CompatibilityLevelChange;
import com.provectus.kafka.ui.sr.model.NewSubject;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import com.provectus.kafka.ui.util.ReactiveFailover;
import com.provectus.kafka.ui.util.WebClientConfigurator;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
@ -92,7 +91,7 @@ public class SchemaRegistryService {
private Mono<SubjectWithCompatibilityLevel> getSchemaSubject(KafkaCluster cluster, String schemaName,
String version) {
return api(cluster)
.mono(c -> c.getSubjectVersion(schemaName, version))
.mono(c -> c.getSubjectVersion(schemaName, version, false))
.zipWith(getSchemaCompatibilityInfoOrGlobal(cluster, schemaName))
.map(t -> new SubjectWithCompatibilityLevel(t.getT1(), t.getT2()))
.onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.error(new SchemaNotFoundException()));

View file

@ -0,0 +1,55 @@
package com.provectus.kafka.ui.service.integration.odd;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
import com.provectus.kafka.ui.sr.model.SchemaReference;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
import reactor.core.publisher.Mono;
// logic copied from AbstractSchemaProvider:resolveReferences
// https://github.com/confluentinc/schema-registry/blob/fd59613e2c5adf62e36705307f420712e4c8c1ea/client/src/main/java/io/confluent/kafka/schemaregistry/AbstractSchemaProvider.java#L54
class SchemaReferencesResolver {
private final KafkaSrClientApi client;
SchemaReferencesResolver(KafkaSrClientApi client) {
this.client = client;
}
Mono<ImmutableMap<String, String>> resolve(List<SchemaReference> refs) {
return resolveReferences(refs, new Resolving(ImmutableMap.of(), ImmutableSet.of()))
.map(Resolving::resolved);
}
private record Resolving(ImmutableMap<String, String> resolved, ImmutableSet<String> visited) {
Resolving visit(String name) {
return new Resolving(resolved, ImmutableSet.<String>builder().addAll(visited).add(name).build());
}
Resolving resolve(String ref, String schema) {
return new Resolving(ImmutableMap.<String, String>builder().putAll(resolved).put(ref, schema).build(), visited);
}
}
private Mono<Resolving> resolveReferences(@Nullable List<SchemaReference> refs, Resolving initState) {
Mono<Resolving> result = Mono.just(initState);
for (SchemaReference reference : Optional.ofNullable(refs).orElse(List.of())) {
result = result.flatMap(state -> {
if (state.visited().contains(reference.getName())) {
return Mono.just(state);
} else {
final var newState = state.visit(reference.getName());
return client.getSubjectVersion(reference.getSubject(), String.valueOf(reference.getVersion()), true)
.flatMap(subj ->
resolveReferences(subj.getReferences(), newState)
.map(withNewRefs -> withNewRefs.resolve(reference.getName(), subj.getSchema())));
}
});
}
return result;
}
}

View file

@ -6,6 +6,7 @@ import com.google.common.collect.ImmutableMap;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.StatisticsCache;
import com.provectus.kafka.ui.service.integration.odd.schema.DataSetFieldsExtractors;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import java.net.URI;
import java.util.List;
import java.util.Map;
@ -25,6 +26,8 @@ import org.opendatadiscovery.oddrn.model.KafkaPath;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
@Slf4j
@RequiredArgsConstructor
@ -101,12 +104,20 @@ class TopicsExporter {
return Mono.just(List.of());
}
String subject = topic + (isKey ? "-key" : "-value");
return cluster.getSchemaRegistryClient()
.mono(client -> client.getSubjectVersion(subject, "latest"))
.map(subj -> DataSetFieldsExtractors.extract(subj, topicOddrn, isKey))
return getSubjWithResolvedRefs(cluster, subject)
.map(t -> DataSetFieldsExtractors.extract(t.getT1(), t.getT2(), topicOddrn, isKey))
.onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.just(List.of()))
.onErrorMap(WebClientResponseException.class, err ->
new IllegalStateException("Error retrieving subject %s".formatted(subject), err));
}
private Mono<Tuple2<SchemaSubject, Map<String, String>>> getSubjWithResolvedRefs(KafkaCluster cluster,
String subjectName) {
return cluster.getSchemaRegistryClient()
.mono(client ->
client.getSubjectVersion(subjectName, "latest", false)
.flatMap(subj -> new SchemaReferencesResolver(client).resolve(subj.getReferences())
.map(resolvedRefs -> Tuples.of(subj, resolvedRefs))));
}
}

View file

@ -1,7 +1,7 @@
package com.provectus.kafka.ui.service.integration.odd.schema;
import com.google.common.collect.ImmutableSet;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import java.util.ArrayList;
import java.util.List;
import org.apache.avro.Schema;
@ -14,8 +14,8 @@ final class AvroExtractor {
private AvroExtractor() {
}
static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
var schema = new Schema.Parser().parse(subject.getSchema());
static List<DataSetField> extract(AvroSchema avroSchema, KafkaPath topicOddrn, boolean isKey) {
var schema = avroSchema.rawSchema();
List<DataSetField> result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
extract(

View file

@ -2,7 +2,11 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import com.provectus.kafka.ui.sr.model.SchemaType;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import io.confluent.kafka.schemaregistry.json.JsonSchema;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.opendatadiscovery.client.model.DataSetField;
import org.opendatadiscovery.client.model.DataSetFieldType;
@ -10,12 +14,18 @@ import org.opendatadiscovery.oddrn.model.KafkaPath;
public final class DataSetFieldsExtractors {
public static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
public static List<DataSetField> extract(SchemaSubject subject,
Map<String, String> resolvedRefs,
KafkaPath topicOddrn,
boolean isKey) {
SchemaType schemaType = Optional.ofNullable(subject.getSchemaType()).orElse(SchemaType.AVRO);
return switch (schemaType) {
case AVRO -> AvroExtractor.extract(subject, topicOddrn, isKey);
case JSON -> JsonSchemaExtractor.extract(subject, topicOddrn, isKey);
case PROTOBUF -> ProtoExtractor.extract(subject, topicOddrn, isKey);
case AVRO -> AvroExtractor.extract(
new AvroSchema(subject.getSchema(), List.of(), resolvedRefs, null), topicOddrn, isKey);
case JSON -> JsonSchemaExtractor.extract(
new JsonSchema(subject.getSchema(), List.of(), resolvedRefs, null), topicOddrn, isKey);
case PROTOBUF -> ProtoExtractor.extract(
new ProtobufSchema(subject.getSchema(), List.of(), resolvedRefs, null, null), topicOddrn, isKey);
};
}

View file

@ -30,8 +30,8 @@ final class JsonSchemaExtractor {
private JsonSchemaExtractor() {
}
static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
Schema schema = new JsonSchema(subject.getSchema()).rawSchema();
static List<DataSetField> extract(JsonSchema jsonSchema, KafkaPath topicOddrn, boolean isKey) {
Schema schema = jsonSchema.rawSchema();
List<DataSetField> result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
extract(

View file

@ -15,7 +15,6 @@ import com.google.protobuf.Timestamp;
import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.Value;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import java.util.ArrayList;
import java.util.List;
@ -42,8 +41,8 @@ final class ProtoExtractor {
private ProtoExtractor() {
}
static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
Descriptor schema = new ProtobufSchema(subject.getSchema()).toDescriptor();
static List<DataSetField> extract(ProtobufSchema protobufSchema, KafkaPath topicOddrn, boolean isKey) {
Descriptor schema = protobufSchema.toDescriptor();
List<DataSetField> result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
var rootOddrn = topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value");

View file

@ -28,6 +28,9 @@ import reactor.netty.http.client.HttpClient;
public class WebClientConfigurator {
private final WebClient.Builder builder = WebClient.builder();
private HttpClient httpClient = HttpClient
.create()
.proxyWithSystemProperties();
public WebClientConfigurator() {
configureObjectMapper(defaultOM());
@ -90,12 +93,7 @@ public class WebClientConfigurator {
// Create webclient
SslContext context = contextBuilder.build();
var httpClient = HttpClient
.create()
.secure(t -> t.sslContext(context))
.proxyWithSystemProperties();
builder.clientConnector(new ReactorClientHttpConnector(httpClient));
httpClient = httpClient.secure(t -> t.sslContext(context));
return this;
}
@ -131,6 +129,6 @@ public class WebClientConfigurator {
}
public WebClient build() {
return builder.build();
return builder.clientConnector(new ReactorClientHttpConnector(httpClient)).build();
}
}

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui;
import com.provectus.kafka.ui.container.KafkaConnectContainer;
import com.provectus.kafka.ui.container.KsqlDbContainer;
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
import java.nio.file.Path;
import java.util.List;
@ -32,7 +33,7 @@ public abstract class AbstractIntegrationTest {
public static final String LOCAL = "local";
public static final String SECOND_LOCAL = "secondLocal";
private static final String CONFLUENT_PLATFORM_VERSION = "7.2.1";
private static final String CONFLUENT_PLATFORM_VERSION = "7.2.1"; // Append ".arm64" for a local run
public static final KafkaContainer kafka = new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION))
@ -49,6 +50,11 @@ public abstract class AbstractIntegrationTest {
.dependsOn(kafka)
.dependsOn(schemaRegistry);
protected static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(
DockerImageName.parse("confluentinc/cp-ksqldb-server")
.withTag(CONFLUENT_PLATFORM_VERSION))
.withKafka(kafka);
@TempDir
public static Path tmpDir;

View file

@ -2,6 +2,7 @@ package com.provectus.kafka.ui;
import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
import com.provectus.kafka.ui.model.NewSchemaSubjectDTO;
import com.provectus.kafka.ui.model.SchemaReferenceDTO;
import com.provectus.kafka.ui.model.SchemaSubjectDTO;
import com.provectus.kafka.ui.model.SchemaSubjectsResponseDTO;
import com.provectus.kafka.ui.model.SchemaTypeDTO;
@ -190,6 +191,58 @@ class SchemaRegistryServiceTests extends AbstractIntegrationTest {
Assertions.assertEquals(schema, actual.getSchema());
}
@Test
void shouldCreateNewProtobufSchemaWithRefs() {
NewSchemaSubjectDTO requestBody = new NewSchemaSubjectDTO()
.schemaType(SchemaTypeDTO.PROTOBUF)
.subject(subject + "-ref")
.schema("""
syntax = "proto3";
message MyRecord {
int32 id = 1;
string name = 2;
}
""");
webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubjectDTO.class))
.exchange()
.expectStatus()
.isOk();
requestBody = new NewSchemaSubjectDTO()
.schemaType(SchemaTypeDTO.PROTOBUF)
.subject(subject)
.schema("""
syntax = "proto3";
import "MyRecord.proto";
message MyRecordWithRef {
int32 id = 1;
MyRecord my_ref = 2;
}
""")
.references(List.of(new SchemaReferenceDTO().name("MyRecord.proto").subject(subject + "-ref").version(1)));
SchemaSubjectDTO actual = webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubjectDTO.class))
.exchange()
.expectStatus()
.isOk()
.expectBody(SchemaSubjectDTO.class)
.returnResult()
.getResponseBody();
Assertions.assertNotNull(actual);
Assertions.assertEquals(requestBody.getReferences(), actual.getReferences());
}
@Test
public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() {
webTestClient

View file

@ -0,0 +1,86 @@
package com.provectus.kafka.ui.service.integration.odd;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableMap;
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
import com.provectus.kafka.ui.sr.model.SchemaReference;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import java.util.List;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
class SchemaReferencesResolverTest {
private final KafkaSrClientApi srClientMock = mock(KafkaSrClientApi.class);
private final SchemaReferencesResolver schemaReferencesResolver = new SchemaReferencesResolver(srClientMock);
@Test
void resolvesRefsUsingSrClient() {
mockSrCall("sub1", 1,
new SchemaSubject()
.schema("schema1"));
mockSrCall("sub2", 1,
new SchemaSubject()
.schema("schema2")
.references(
List.of(
new SchemaReference().name("ref2_1").subject("sub2_1").version(2),
new SchemaReference().name("ref2_2").subject("sub1").version(1))));
mockSrCall("sub2_1", 2,
new SchemaSubject()
.schema("schema2_1")
.references(
List.of(
new SchemaReference().name("ref2_1_1").subject("sub2_1_1").version(3),
new SchemaReference().name("ref1").subject("should_not_be_called").version(1)
))
);
mockSrCall("sub2_1_1", 3,
new SchemaSubject()
.schema("schema2_1_1"));
var resolvedRefsMono = schemaReferencesResolver.resolve(
List.of(
new SchemaReference().name("ref1").subject("sub1").version(1),
new SchemaReference().name("ref2").subject("sub2").version(1)));
StepVerifier.create(resolvedRefsMono)
.assertNext(refs ->
assertThat(refs)
.containsExactlyEntriesOf(
// checking map should be ordered
ImmutableMap.<String, String>builder()
.put("ref1", "schema1")
.put("ref2_1_1", "schema2_1_1")
.put("ref2_1", "schema2_1")
.put("ref2_2", "schema1")
.put("ref2", "schema2")
.build()))
.verifyComplete();
}
@Test
void returnsEmptyMapOnEmptyInputs() {
StepVerifier.create(schemaReferencesResolver.resolve(null))
.assertNext(map -> assertThat(map).isEmpty())
.verifyComplete();
StepVerifier.create(schemaReferencesResolver.resolve(List.of()))
.assertNext(map -> assertThat(map).isEmpty())
.verifyComplete();
}
private void mockSrCall(String subject, int version, SchemaSubject subjectToReturn) {
when(srClientMock.getSubjectVersion(subject, version + "", true))
.thenReturn(Mono.just(subjectToReturn));
}
}

View file

@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service.integration.odd;
import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.TopicState;
import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.empty;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -57,9 +58,8 @@ class TopicsExporterTest {
@Test
void doesNotExportTopicsWhichDontFitFiltrationRule() {
when(schemaRegistryClientMock.getSubjectVersion(anyString(), anyString()))
when(schemaRegistryClientMock.getSubjectVersion(anyString(), anyString(), anyBoolean()))
.thenReturn(Mono.error(WebClientResponseException.create(404, "NF", new HttpHeaders(), null, null, null)));
stats = Statistics.empty()
.toBuilder()
.clusterState(
@ -95,14 +95,14 @@ class TopicsExporterTest {
@Test
void doesExportTopicData() {
when(schemaRegistryClientMock.getSubjectVersion("testTopic-value", "latest"))
when(schemaRegistryClientMock.getSubjectVersion("testTopic-value", "latest", false))
.thenReturn(Mono.just(
new SchemaSubject()
.schema("\"string\"")
.schemaType(SchemaType.AVRO)
));
when(schemaRegistryClientMock.getSubjectVersion("testTopic-key", "latest"))
when(schemaRegistryClientMock.getSubjectVersion("testTopic-key", "latest", false))
.thenReturn(Mono.just(
new SchemaSubject()
.schema("\"int\"")

View file

@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.opendatadiscovery.client.model.DataSetField;
@ -15,8 +15,7 @@ class AvroExtractorTest {
@ValueSource(booleans = {true, false})
void test(boolean isKey) {
var list = AvroExtractor.extract(
new SchemaSubject()
.schema("""
new AvroSchema("""
{
"type": "record",
"name": "Message",

View file

@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.json.JsonSchema;
import java.net.URI;
import java.util.List;
import java.util.Map;
@ -40,7 +40,7 @@ class JsonSchemaExtractorTest {
}
""";
var fields = JsonSchemaExtractor.extract(
new SchemaSubject().schema(jsonSchema),
new JsonSchema(jsonSchema),
KafkaPath.builder()
.cluster("localhost:9092")
.topic("someTopic")

View file

@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.integration.odd.schema;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.opendatadiscovery.client.model.DataSetField;
@ -54,8 +54,7 @@ class ProtoExtractorTest {
}""";
var list = ProtoExtractor.extract(
new SchemaSubject()
.schema(protoSchema),
new ProtobufSchema(protoSchema),
KafkaPath.builder()
.cluster("localhost:9092")
.topic("someTopic")

View file

@ -3,30 +3,24 @@ package com.provectus.kafka.ui.service.ksql;
import static org.assertj.core.api.Assertions.assertThat;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.DoubleNode;
import com.fasterxml.jackson.databind.node.DecimalNode;
import com.fasterxml.jackson.databind.node.IntNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.TextNode;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.container.KsqlDbContainer;
import java.math.BigDecimal;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import org.junit.Ignore;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.testcontainers.shaded.org.awaitility.Awaitility;
import org.testcontainers.utility.DockerImageName;
import reactor.test.StepVerifier;
@Ignore
class KsqlApiClientTest extends AbstractIntegrationTest {
private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(
DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0"))
.withKafka(kafka);
@BeforeAll
static void startContainer() {
KSQL_DB.start();
@ -74,7 +68,7 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
private void assertLastKsqTutorialQueryResult(KsqlApiClient client) {
// expected results:
//{"header":"Schema","columnNames":[...],"values":null}
//{"header":"Row","columnNames":null,"values":[[0.0,["4ab5cbad","8b6eae59","4a7c7b41"],3]]}
//{"header":"Row","columnNames":null,"values":[[0,["4ab5cbad","8b6eae59","4a7c7b41"],3]]}
//{"header":"Row","columnNames":null,"values":[[10.0,["18f4ea86"],1]]}
StepVerifier.create(
client.execute(
@ -88,34 +82,26 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
assertThat(header.getValues()).isNull();
})
.assertNext(row -> {
assertThat(row).isEqualTo(
KsqlApiClient.KsqlResponseTable.builder()
.header("Row")
.columnNames(null)
.values(List.of(List.of(
new DoubleNode(0.0),
new ArrayNode(JsonNodeFactory.instance)
var distance = (DecimalNode) row.getValues().get(0).get(0);
var riders = (ArrayNode) row.getValues().get(0).get(1);
var count = (IntNode) row.getValues().get(0).get(2);
assertThat(distance).isEqualTo(new DecimalNode(new BigDecimal(0)));
assertThat(riders).isEqualTo(new ArrayNode(JsonNodeFactory.instance)
.add(new TextNode("4ab5cbad"))
.add(new TextNode("8b6eae59"))
.add(new TextNode("4a7c7b41")),
new IntNode(3)
)))
.build()
);
.add(new TextNode("4a7c7b41")));
assertThat(count).isEqualTo(new IntNode(3));
})
.assertNext(row -> {
assertThat(row).isEqualTo(
KsqlApiClient.KsqlResponseTable.builder()
.header("Row")
.columnNames(null)
.values(List.of(List.of(
new DoubleNode(10.0),
new ArrayNode(JsonNodeFactory.instance)
.add(new TextNode("18f4ea86")),
new IntNode(1)
)))
.build()
);
var distance = (DecimalNode) row.getValues().get(0).get(0);
var riders = (ArrayNode) row.getValues().get(0).get(1);
var count = (IntNode) row.getValues().get(0).get(2);
assertThat(distance).isEqualTo(new DecimalNode(new BigDecimal(10)));
assertThat(riders).isEqualTo(new ArrayNode(JsonNodeFactory.instance)
.add(new TextNode("18f4ea86")));
assertThat(count).isEqualTo(new IntNode(1));
})
.verifyComplete();
}

View file

@ -3,7 +3,6 @@ package com.provectus.kafka.ui.service.ksql;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.container.KsqlDbContainer;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.KsqlStreamDescriptionDTO;
import com.provectus.kafka.ui.model.KsqlTableDescriptionDTO;
@ -15,14 +14,9 @@ import java.util.concurrent.CopyOnWriteArraySet;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.testcontainers.utility.DockerImageName;
class KsqlServiceV2Test extends AbstractIntegrationTest {
private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(
DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0"))
.withKafka(kafka);
private static final Set<String> STREAMS_TO_DELETE = new CopyOnWriteArraySet<>();
private static final Set<String> TABLES_TO_DELETE = new CopyOnWriteArraySet<>();

View file

@ -77,6 +77,10 @@ paths:
required: true
schema:
type: string
- name: deleted
in: query
schema:
type: boolean
responses:
200:
description: OK
@ -317,6 +321,10 @@ components:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
references:
type: array
items:
$ref: '#/components/schemas/SchemaReference'
required:
- id
- subject

View file

@ -2976,6 +2976,10 @@ components:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
references:
type: array
items:
$ref: '#/components/schemas/SchemaReference'
required:
- id
- subject
@ -2993,13 +2997,30 @@ components:
schema:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
# upon updating a schema, the type of existing schema can't be changed
$ref: '#/components/schemas/SchemaType' # upon updating a schema, the type of existing schema can't be changed
references:
type: array
items:
$ref: '#/components/schemas/SchemaReference'
required:
- subject
- schema
- schemaType
SchemaReference:
type: object
properties:
name:
type: string
subject:
type: string
version:
type: integer
required:
- name
- subject
- version
CompatibilityLevel:
type: object
properties:

View file

@ -20,7 +20,7 @@
<selenide.version>6.12.3</selenide.version>
<testng.version>7.7.1</testng.version>
<allure.version>2.22.2</allure.version>
<qase.io.version>3.0.4</qase.io.version>
<qase.io.version>3.0.5</qase.io.version>
<aspectj.version>1.9.9.1</aspectj.version>
<assertj.version>3.24.2</assertj.version>
<hamcrest.version>2.2</hamcrest.version>

View file

@ -1,8 +1,8 @@
import { SerdeDescription } from 'generated-sources';
import { getPrefferedDescription } from 'components/Topics/Topic/SendMessage/utils';
import { getPreferredDescription } from 'components/Topics/Topic/SendMessage/utils';
export const getDefaultSerdeName = (serdes: SerdeDescription[]) => {
const preffered = getPrefferedDescription(serdes);
const preffered = getPreferredDescription(serdes);
if (preffered) {
return preffered.name || '';
}

View file

@ -118,8 +118,8 @@ const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
valueSerde,
});
if (!keepContents) {
setValue('key', '');
setValue('content', '');
setValue('key', defaultValues.key || '');
setValue('content', defaultValues.content || '');
closeSidebar();
}
} catch (e) {

View file

@ -13,21 +13,21 @@ jsf.option('fillProperties', false);
jsf.option('alwaysFakeOptionals', true);
jsf.option('failOnInvalidFormat', false);
const generateValueFromSchema = (preffered?: SerdeDescription) => {
if (!preffered?.schema) {
const generateValueFromSchema = (preferred?: SerdeDescription) => {
if (!preferred?.schema) {
return undefined;
}
const parsedSchema = JSON.parse(preffered.schema);
const parsedSchema = JSON.parse(preferred.schema);
const value = jsf.generate(parsedSchema);
return JSON.stringify(value);
};
export const getPrefferedDescription = (serdes: SerdeDescription[]) =>
export const getPreferredDescription = (serdes: SerdeDescription[]) =>
serdes.find((s) => s.preferred);
export const getDefaultValues = (serdes: TopicSerdeSuggestion) => {
const keySerde = getPrefferedDescription(serdes.key || []);
const valueSerde = getPrefferedDescription(serdes.value || []);
const keySerde = getPreferredDescription(serdes.key || []);
const valueSerde = getPreferredDescription(serdes.value || []);
return {
key: generateValueFromSchema(keySerde),
@ -65,15 +65,15 @@ export const validateBySchema = (
return errors;
}
let parcedSchema;
let parsedSchema;
let parsedValue;
try {
parcedSchema = JSON.parse(schema);
parsedSchema = JSON.parse(schema);
} catch (e) {
return [`Error in parsing the "${type}" field schema`];
}
if (parcedSchema.type === 'string') {
if (parsedSchema.type === 'string') {
return [];
}
try {
@ -84,7 +84,7 @@ export const validateBySchema = (
try {
const ajv = new Ajv();
addFormats(ajv);
const validate = ajv.compile(parcedSchema);
const validate = ajv.compile(parsedSchema);
validate(parsedValue);
if (validate.errors) {
errors = validate.errors.map(

View file

@ -26,17 +26,17 @@
<assertj.version>3.19.0</assertj.version>
<avro.version>1.11.1</avro.version>
<byte-buddy.version>1.12.19</byte-buddy.version>
<confluent.version>7.3.3</confluent.version>
<confluent.version>7.4.0</confluent.version>
<datasketches-java.version>3.1.0</datasketches-java.version>
<groovy.version>3.0.13</groovy.version>
<jackson.version>2.14.0</jackson.version>
<kafka-clients.version>3.3.1</kafka-clients.version>
<kafka-clients.version>3.5.0</kafka-clients.version>
<org.mapstruct.version>1.5.5.Final</org.mapstruct.version>
<org.projectlombok.version>1.18.24</org.projectlombok.version>
<protobuf-java.version>3.23.3</protobuf-java.version>
<scala-lang.library.version>2.13.9</scala-lang.library.version>
<snakeyaml.version>2.0</snakeyaml.version>
<spring-boot.version>3.0.6</spring-boot.version>
<spring-boot.version>3.1.1</spring-boot.version>
<kafka-ui-serde-api.version>1.0.0</kafka-ui-serde-api.version>
<odd-oddrn-generator.version>0.1.17</odd-oddrn-generator.version>
<odd-oddrn-client.version>0.1.23</odd-oddrn-client.version>