ISSUE-166 Pass ProtobufSchemaProvider to CachedSchemaRegistryClient to deserialize protobuf records (#178)
This commit is contained in:
parent
40d85643bb
commit
ba4e1748ee
1 changed files with 13 additions and 8 deletions
|
@ -4,12 +4,14 @@ import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.protobuf.Message;
|
import com.google.protobuf.Message;
|
||||||
import com.provectus.kafka.ui.cluster.model.KafkaCluster;
|
import com.provectus.kafka.ui.cluster.model.KafkaCluster;
|
||||||
|
import io.confluent.kafka.schemaregistry.SchemaProvider;
|
||||||
import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
|
import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
|
||||||
import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
|
import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
|
||||||
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
|
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
|
||||||
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
|
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
|
||||||
import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
|
import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
|
||||||
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
|
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
|
||||||
|
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
|
||||||
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
|
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
|
||||||
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
|
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
|
||||||
import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
|
import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
|
||||||
|
@ -41,14 +43,17 @@ public class SchemaRegistryRecordDeserializer implements RecordDeserializer {
|
||||||
this.cluster = cluster;
|
this.cluster = cluster;
|
||||||
this.objectMapper = objectMapper;
|
this.objectMapper = objectMapper;
|
||||||
|
|
||||||
this.schemaRegistryClient = Optional.ofNullable(cluster.getSchemaRegistry()).map(e ->
|
this.schemaRegistryClient = Optional.ofNullable(cluster.getSchemaRegistry())
|
||||||
new CachedSchemaRegistryClient(
|
.map(schemaRegistryUrl -> {
|
||||||
Collections.singletonList(e),
|
List<SchemaProvider> schemaProviders = List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider());
|
||||||
CLIENT_IDENTITY_MAP_CAPACITY,
|
return new CachedSchemaRegistryClient(
|
||||||
Collections.singletonList(new AvroSchemaProvider()),
|
Collections.singletonList(schemaRegistryUrl),
|
||||||
Collections.emptyMap()
|
CLIENT_IDENTITY_MAP_CAPACITY,
|
||||||
)
|
schemaProviders,
|
||||||
).orElse(null);
|
Collections.emptyMap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
).orElse(null);
|
||||||
|
|
||||||
this.avroDeserializer = Optional.ofNullable(this.schemaRegistryClient)
|
this.avroDeserializer = Optional.ofNullable(this.schemaRegistryClient)
|
||||||
.map(KafkaAvroDeserializer::new)
|
.map(KafkaAvroDeserializer::new)
|
||||||
|
|
Loading…
Add table
Reference in a new issue