|
@@ -6,6 +6,7 @@ import com.google.protobuf.util.JsonFormat;
|
|
import com.provectus.kafka.ui.model.MessageSchemaDTO;
|
|
import com.provectus.kafka.ui.model.MessageSchemaDTO;
|
|
import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
|
|
import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
|
|
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
|
|
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
|
|
|
|
+import com.provectus.kafka.ui.serde.schemaregistry.StringMessageFormatter;
|
|
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
|
|
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
|
|
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
|
|
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
|
|
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
|
|
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
|
|
@@ -22,20 +23,26 @@ import java.util.stream.Collectors;
|
|
import java.util.stream.Stream;
|
|
import java.util.stream.Stream;
|
|
import javax.annotation.Nullable;
|
|
import javax.annotation.Nullable;
|
|
import lombok.SneakyThrows;
|
|
import lombok.SneakyThrows;
|
|
|
|
+import lombok.extern.slf4j.Slf4j;
|
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
import org.apache.kafka.clients.producer.ProducerRecord;
|
|
import org.apache.kafka.clients.producer.ProducerRecord;
|
|
import org.apache.kafka.common.utils.Bytes;
|
|
import org.apache.kafka.common.utils.Bytes;
|
|
|
|
|
|
-//TODO: currently we assume that keys for this serde are always string - need to discuss if it is ok
|
|
|
|
|
|
+@Slf4j
|
|
public class ProtobufFileRecordSerDe implements RecordSerDe {
|
|
public class ProtobufFileRecordSerDe implements RecordSerDe {
|
|
|
|
+ private static final StringMessageFormatter FALLBACK_FORMATTER = new StringMessageFormatter();
|
|
|
|
+
|
|
private final ProtobufSchema protobufSchema;
|
|
private final ProtobufSchema protobufSchema;
|
|
private final Path protobufSchemaPath;
|
|
private final Path protobufSchemaPath;
|
|
private final ProtobufSchemaConverter schemaConverter = new ProtobufSchemaConverter();
|
|
private final ProtobufSchemaConverter schemaConverter = new ProtobufSchemaConverter();
|
|
private final Map<String, Descriptor> messageDescriptorMap;
|
|
private final Map<String, Descriptor> messageDescriptorMap;
|
|
|
|
+ private final Map<String, Descriptor> keyMessageDescriptorMap;
|
|
private final Descriptor defaultMessageDescriptor;
|
|
private final Descriptor defaultMessageDescriptor;
|
|
|
|
+ private final Descriptor defaultKeyMessageDescriptor;
|
|
|
|
|
|
public ProtobufFileRecordSerDe(Path protobufSchemaPath, Map<String, String> messageNameMap,
|
|
public ProtobufFileRecordSerDe(Path protobufSchemaPath, Map<String, String> messageNameMap,
|
|
- String defaultMessageName)
|
|
|
|
|
|
+ Map<String, String> keyMessageNameMap, String defaultMessageName,
|
|
|
|
+ @Nullable String defaultKeyMessageName)
|
|
throws IOException {
|
|
throws IOException {
|
|
this.protobufSchemaPath = protobufSchemaPath;
|
|
this.protobufSchemaPath = protobufSchemaPath;
|
|
try (final Stream<String> lines = Files.lines(protobufSchemaPath)) {
|
|
try (final Stream<String> lines = Files.lines(protobufSchemaPath)) {
|
|
@@ -49,35 +56,70 @@ public class ProtobufFileRecordSerDe implements RecordSerDe {
|
|
}
|
|
}
|
|
this.messageDescriptorMap = new HashMap<>();
|
|
this.messageDescriptorMap = new HashMap<>();
|
|
if (messageNameMap != null) {
|
|
if (messageNameMap != null) {
|
|
- for (Map.Entry<String, String> entry : messageNameMap.entrySet()) {
|
|
|
|
- var descriptor = Objects.requireNonNull(protobufSchema.toDescriptor(entry.getValue()),
|
|
|
|
- "The given message type is not found in protobuf definition: "
|
|
|
|
- + entry.getValue());
|
|
|
|
- messageDescriptorMap.put(entry.getKey(), descriptor);
|
|
|
|
- }
|
|
|
|
|
|
+ populateDescriptors(messageNameMap, messageDescriptorMap);
|
|
|
|
+ }
|
|
|
|
+ this.keyMessageDescriptorMap = new HashMap<>();
|
|
|
|
+ if (keyMessageNameMap != null) {
|
|
|
|
+ populateDescriptors(keyMessageNameMap, keyMessageDescriptorMap);
|
|
}
|
|
}
|
|
defaultMessageDescriptor = Objects.requireNonNull(protobufSchema.toDescriptor(),
|
|
defaultMessageDescriptor = Objects.requireNonNull(protobufSchema.toDescriptor(),
|
|
"The given message type is not found in protobuf definition: "
|
|
"The given message type is not found in protobuf definition: "
|
|
+ defaultMessageName);
|
|
+ defaultMessageName);
|
|
|
|
+ if (defaultKeyMessageName != null) {
|
|
|
|
+ defaultKeyMessageDescriptor = schema.copy(defaultKeyMessageName).toDescriptor();
|
|
|
|
+ } else {
|
|
|
|
+ defaultKeyMessageDescriptor = null;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ private void populateDescriptors(Map<String, String> messageNameMap, Map<String, Descriptor> messageDescriptorMap) {
|
|
|
|
+ for (Map.Entry<String, String> entry : messageNameMap.entrySet()) {
|
|
|
|
+ var descriptor = Objects.requireNonNull(protobufSchema.toDescriptor(entry.getValue()),
|
|
|
|
+ "The given message type is not found in protobuf definition: "
|
|
|
|
+ + entry.getValue());
|
|
|
|
+ messageDescriptorMap.put(entry.getKey(), descriptor);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
@Override
|
|
@Override
|
|
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
|
|
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
|
|
- try {
|
|
|
|
- var builder = DeserializedKeyValue.builder();
|
|
|
|
- if (msg.key() != null) {
|
|
|
|
- builder.key(new String(msg.key().get()));
|
|
|
|
- builder.keyFormat(MessageFormat.UNKNOWN);
|
|
|
|
|
|
+ var builder = DeserializedKeyValue.builder();
|
|
|
|
+
|
|
|
|
+ if (msg.key() != null) {
|
|
|
|
+ Descriptor descriptor = getKeyDescriptor(msg.topic());
|
|
|
|
+ if (descriptor == null) {
|
|
|
|
+ builder.key(FALLBACK_FORMATTER.format(msg.topic(), msg.key().get()));
|
|
|
|
+ builder.keyFormat(FALLBACK_FORMATTER.getFormat());
|
|
|
|
+ } else {
|
|
|
|
+ try {
|
|
|
|
+ builder.key(parse(msg.key().get(), descriptor));
|
|
|
|
+ builder.keyFormat(MessageFormat.PROTOBUF);
|
|
|
|
+ } catch (Throwable e) {
|
|
|
|
+ log.debug("Failed to deserialize key as protobuf, falling back to string formatter", e);
|
|
|
|
+ builder.key(FALLBACK_FORMATTER.format(msg.topic(), msg.key().get()));
|
|
|
|
+ builder.keyFormat(FALLBACK_FORMATTER.getFormat());
|
|
|
|
+ }
|
|
}
|
|
}
|
|
- if (msg.value() != null) {
|
|
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (msg.value() != null) {
|
|
|
|
+ try {
|
|
builder.value(parse(msg.value().get(), getDescriptor(msg.topic())));
|
|
builder.value(parse(msg.value().get(), getDescriptor(msg.topic())));
|
|
builder.valueFormat(MessageFormat.PROTOBUF);
|
|
builder.valueFormat(MessageFormat.PROTOBUF);
|
|
|
|
+ } catch (Throwable e) {
|
|
|
|
+ log.debug("Failed to deserialize value as protobuf, falling back to string formatter", e);
|
|
|
|
+ builder.key(FALLBACK_FORMATTER.format(msg.topic(), msg.value().get()));
|
|
|
|
+ builder.keyFormat(FALLBACK_FORMATTER.getFormat());
|
|
}
|
|
}
|
|
- return builder.build();
|
|
|
|
- } catch (Throwable e) {
|
|
|
|
- throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
|
|
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ return builder.build();
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ @Nullable
|
|
|
|
+ private Descriptor getKeyDescriptor(String topic) {
|
|
|
|
+ return keyMessageDescriptorMap.getOrDefault(topic, defaultKeyMessageDescriptor);
|
|
}
|
|
}
|
|
|
|
|
|
private Descriptor getDescriptor(String topic) {
|
|
private Descriptor getDescriptor(String topic) {
|
|
@@ -99,40 +141,67 @@ public class ProtobufFileRecordSerDe implements RecordSerDe {
|
|
@Nullable String key,
|
|
@Nullable String key,
|
|
@Nullable String data,
|
|
@Nullable String data,
|
|
@Nullable Integer partition) {
|
|
@Nullable Integer partition) {
|
|
- if (data == null) {
|
|
|
|
- return new ProducerRecord<>(topic, partition, Objects.requireNonNull(key).getBytes(), null);
|
|
|
|
|
|
+ byte[] keyPayload = null;
|
|
|
|
+ byte[] valuePayload = null;
|
|
|
|
+
|
|
|
|
+ if (key != null) {
|
|
|
|
+ Descriptor keyDescriptor = getKeyDescriptor(topic);
|
|
|
|
+ if (keyDescriptor == null) {
|
|
|
|
+ keyPayload = key.getBytes();
|
|
|
|
+ } else {
|
|
|
|
+ DynamicMessage.Builder builder = DynamicMessage.newBuilder(keyDescriptor);
|
|
|
|
+ try {
|
|
|
|
+ JsonFormat.parser().merge(key, builder);
|
|
|
|
+ keyPayload = builder.build().toByteArray();
|
|
|
|
+ } catch (Throwable e) {
|
|
|
|
+ throw new RuntimeException("Failed to merge record key for topic " + topic, e);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
}
|
|
}
|
|
- DynamicMessage.Builder builder = DynamicMessage.newBuilder(getDescriptor(topic));
|
|
|
|
- try {
|
|
|
|
- JsonFormat.parser().merge(data, builder);
|
|
|
|
- final DynamicMessage message = builder.build();
|
|
|
|
- return new ProducerRecord<>(
|
|
|
|
- topic,
|
|
|
|
- partition,
|
|
|
|
- Optional.ofNullable(key).map(String::getBytes).orElse(null),
|
|
|
|
- message.toByteArray()
|
|
|
|
- );
|
|
|
|
- } catch (Throwable e) {
|
|
|
|
- throw new RuntimeException("Failed to merge record for topic " + topic, e);
|
|
|
|
|
|
+
|
|
|
|
+ if (data != null) {
|
|
|
|
+ DynamicMessage.Builder builder = DynamicMessage.newBuilder(getDescriptor(topic));
|
|
|
|
+ try {
|
|
|
|
+ JsonFormat.parser().merge(data, builder);
|
|
|
|
+ valuePayload = builder.build().toByteArray();
|
|
|
|
+ } catch (Throwable e) {
|
|
|
|
+ throw new RuntimeException("Failed to merge record value for topic " + topic, e);
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ return new ProducerRecord<>(
|
|
|
|
+ topic,
|
|
|
|
+ partition,
|
|
|
|
+ keyPayload,
|
|
|
|
+ valuePayload);
|
|
}
|
|
}
|
|
|
|
|
|
@Override
|
|
@Override
|
|
public TopicMessageSchemaDTO getTopicSchema(String topic) {
|
|
public TopicMessageSchemaDTO getTopicSchema(String topic) {
|
|
|
|
+ JsonSchema keyJsonSchema;
|
|
|
|
+
|
|
|
|
+ Descriptor keyDescriptor = getKeyDescriptor(topic);
|
|
|
|
+ if (keyDescriptor == null) {
|
|
|
|
+ keyJsonSchema = JsonSchema.stringSchema();
|
|
|
|
+ } else {
|
|
|
|
+ keyJsonSchema = schemaConverter.convert(
|
|
|
|
+ protobufSchemaPath.toUri(),
|
|
|
|
+ keyDescriptor);
|
|
|
|
+ }
|
|
|
|
|
|
- final JsonSchema jsonSchema = schemaConverter.convert(
|
|
|
|
- protobufSchemaPath.toUri(),
|
|
|
|
- getDescriptor(topic)
|
|
|
|
- );
|
|
|
|
final MessageSchemaDTO keySchema = new MessageSchemaDTO()
|
|
final MessageSchemaDTO keySchema = new MessageSchemaDTO()
|
|
.name(protobufSchema.fullName())
|
|
.name(protobufSchema.fullName())
|
|
.source(MessageSchemaDTO.SourceEnum.PROTO_FILE)
|
|
.source(MessageSchemaDTO.SourceEnum.PROTO_FILE)
|
|
- .schema(JsonSchema.stringSchema().toJson());
|
|
|
|
|
|
+ .schema(keyJsonSchema.toJson());
|
|
|
|
+
|
|
|
|
+ final JsonSchema valueJsonSchema = schemaConverter.convert(
|
|
|
|
+ protobufSchemaPath.toUri(),
|
|
|
|
+ getDescriptor(topic));
|
|
|
|
|
|
final MessageSchemaDTO valueSchema = new MessageSchemaDTO()
|
|
final MessageSchemaDTO valueSchema = new MessageSchemaDTO()
|
|
.name(protobufSchema.fullName())
|
|
.name(protobufSchema.fullName())
|
|
.source(MessageSchemaDTO.SourceEnum.PROTO_FILE)
|
|
.source(MessageSchemaDTO.SourceEnum.PROTO_FILE)
|
|
- .schema(jsonSchema.toJson());
|
|
|
|
|
|
+ .schema(valueJsonSchema.toJson());
|
|
|
|
|
|
return new TopicMessageSchemaDTO()
|
|
return new TopicMessageSchemaDTO()
|
|
.key(keySchema)
|
|
.key(keySchema)
|