Allow producing messages with an empty key/value
* [ISSUE 1046] UI allows to submit message with empty key & value (#1264)
* [ISSUE 1046] UI allows to submit message with empty key & value
* Update Contract
(cherry picked from commit 4b730eb288
)
* Backend fix
* Refactoring
* Fix nullable & checkstyle
* Fix jsonnullable get
* Remove unnecessary check and add a test
Co-authored-by: Oleg Shur <workshur@gmail.com>
This commit is contained in:
parent
5a487e437d
commit
32a2e753b0
21 changed files with 348 additions and 495 deletions
|
@ -1,5 +1,6 @@
|
||||||
package com.provectus.kafka.ui.config;
|
package com.provectus.kafka.ui.config;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.Module;
|
||||||
import com.provectus.kafka.ui.model.JmxConnectionInfo;
|
import com.provectus.kafka.ui.model.JmxConnectionInfo;
|
||||||
import com.provectus.kafka.ui.util.JmxPoolFactory;
|
import com.provectus.kafka.ui.util.JmxPoolFactory;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -9,6 +10,7 @@ import lombok.AllArgsConstructor;
|
||||||
import org.apache.commons.pool2.KeyedObjectPool;
|
import org.apache.commons.pool2.KeyedObjectPool;
|
||||||
import org.apache.commons.pool2.impl.GenericKeyedObjectPool;
|
import org.apache.commons.pool2.impl.GenericKeyedObjectPool;
|
||||||
import org.apache.commons.pool2.impl.GenericKeyedObjectPoolConfig;
|
import org.apache.commons.pool2.impl.GenericKeyedObjectPoolConfig;
|
||||||
|
import org.openapitools.jackson.nullable.JsonNullableModule;
|
||||||
import org.springframework.beans.factory.ObjectProvider;
|
import org.springframework.beans.factory.ObjectProvider;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.boot.autoconfigure.web.ServerProperties;
|
import org.springframework.boot.autoconfigure.web.ServerProperties;
|
||||||
|
@ -78,4 +80,9 @@ public class Config {
|
||||||
.codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
|
.codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JsonNullableModule jsonNullableModule() {
|
||||||
|
return new JsonNullableModule();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class DeserializationService {
|
||||||
objectMapper);
|
objectMapper);
|
||||||
} else {
|
} else {
|
||||||
log.info("Using SchemaRegistryAwareRecordSerDe for cluster '{}'", cluster.getName());
|
log.info("Using SchemaRegistryAwareRecordSerDe for cluster '{}'", cluster.getName());
|
||||||
return new SchemaRegistryAwareRecordSerDe(cluster);
|
return new SchemaRegistryAwareRecordSerDe(cluster, objectMapper);
|
||||||
}
|
}
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
throw new RuntimeException("Can't init deserializer", e);
|
throw new RuntimeException("Can't init deserializer", e);
|
||||||
|
|
|
@ -13,6 +13,8 @@ import org.apache.kafka.common.utils.Bytes;
|
||||||
|
|
||||||
public class SimpleRecordSerDe implements RecordSerDe {
|
public class SimpleRecordSerDe implements RecordSerDe {
|
||||||
|
|
||||||
|
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
|
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
|
||||||
var builder = DeserializedKeyValue.builder();
|
var builder = DeserializedKeyValue.builder();
|
||||||
|
@ -45,7 +47,7 @@ public class SimpleRecordSerDe implements RecordSerDe {
|
||||||
final MessageSchemaDTO schema = new MessageSchemaDTO()
|
final MessageSchemaDTO schema = new MessageSchemaDTO()
|
||||||
.name("unknown")
|
.name("unknown")
|
||||||
.source(MessageSchemaDTO.SourceEnum.UNKNOWN)
|
.source(MessageSchemaDTO.SourceEnum.UNKNOWN)
|
||||||
.schema(JsonSchema.stringSchema().toJson(new ObjectMapper()));
|
.schema(JsonSchema.stringSchema().toJson(objectMapper));
|
||||||
return new TopicMessageSchemaDTO()
|
return new TopicMessageSchemaDTO()
|
||||||
.key(schema)
|
.key(schema)
|
||||||
.value(schema);
|
.value(schema);
|
||||||
|
|
|
@ -10,7 +10,6 @@ import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
import com.provectus.kafka.ui.model.MessageSchemaDTO;
|
import com.provectus.kafka.ui.model.MessageSchemaDTO;
|
||||||
import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
|
import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
|
||||||
import com.provectus.kafka.ui.serde.RecordSerDe;
|
import com.provectus.kafka.ui.serde.RecordSerDe;
|
||||||
import com.provectus.kafka.ui.util.ConsumerRecordUtil;
|
|
||||||
import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
|
import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
|
||||||
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
|
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
|
||||||
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
|
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
|
||||||
|
@ -27,7 +26,6 @@ import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
|
||||||
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
|
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -47,31 +45,32 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
|
||||||
|
|
||||||
private static final int CLIENT_IDENTITY_MAP_CAPACITY = 100;
|
private static final int CLIENT_IDENTITY_MAP_CAPACITY = 100;
|
||||||
|
|
||||||
|
private static final StringMessageFormatter stringFormatter = new StringMessageFormatter();
|
||||||
|
private static final ProtobufSchemaConverter protoSchemaConverter = new ProtobufSchemaConverter();
|
||||||
|
private static final AvroJsonSchemaConverter avroSchemaConverter = new AvroJsonSchemaConverter();
|
||||||
|
|
||||||
private final KafkaCluster cluster;
|
private final KafkaCluster cluster;
|
||||||
private final Map<String, MessageFormatter> valueFormatMap = new ConcurrentHashMap<>();
|
private final Map<String, MessageFormatter> valueFormatMap = new ConcurrentHashMap<>();
|
||||||
private final Map<String, MessageFormatter> keyFormatMap = new ConcurrentHashMap<>();
|
private final Map<String, MessageFormatter> keyFormatMap = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private final SchemaRegistryClient schemaRegistryClient;
|
private final SchemaRegistryClient schemaRegistryClient;
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private final AvroMessageFormatter avroFormatter;
|
private final AvroMessageFormatter avroFormatter;
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private final ProtobufMessageFormatter protobufFormatter;
|
private final ProtobufMessageFormatter protobufFormatter;
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private final JsonSchemaMessageFormatter jsonSchemaMessageFormatter;
|
private final JsonSchemaMessageFormatter jsonSchemaMessageFormatter;
|
||||||
|
|
||||||
private final StringMessageFormatter stringFormatter = new StringMessageFormatter();
|
private ObjectMapper objectMapper;
|
||||||
private final ProtobufSchemaConverter protoSchemaConverter = new ProtobufSchemaConverter();
|
|
||||||
private final AvroJsonSchemaConverter avroSchemaConverter = new AvroJsonSchemaConverter();
|
|
||||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
|
||||||
|
|
||||||
private static SchemaRegistryClient createSchemaRegistryClient(KafkaCluster cluster) {
|
private SchemaRegistryClient createSchemaRegistryClient(KafkaCluster cluster,
|
||||||
|
ObjectMapper objectMapper) {
|
||||||
if (cluster.getSchemaRegistry() == null) {
|
if (cluster.getSchemaRegistry() == null) {
|
||||||
throw new ValidationException("schemaRegistry is not specified");
|
throw new ValidationException("schemaRegistry is not specified");
|
||||||
}
|
}
|
||||||
|
this.objectMapper = objectMapper;
|
||||||
|
|
||||||
List<SchemaProvider> schemaProviders =
|
List<SchemaProvider> schemaProviders =
|
||||||
List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider());
|
List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider());
|
||||||
|
|
||||||
|
@ -97,10 +96,10 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SchemaRegistryAwareRecordSerDe(KafkaCluster cluster) {
|
public SchemaRegistryAwareRecordSerDe(KafkaCluster cluster, ObjectMapper objectMapper) {
|
||||||
this.cluster = cluster;
|
this.cluster = cluster;
|
||||||
this.schemaRegistryClient = cluster.getSchemaRegistry() != null
|
this.schemaRegistryClient = cluster.getSchemaRegistry() != null
|
||||||
? createSchemaRegistryClient(cluster)
|
? createSchemaRegistryClient(cluster, objectMapper)
|
||||||
: null;
|
: null;
|
||||||
if (schemaRegistryClient != null) {
|
if (schemaRegistryClient != null) {
|
||||||
this.avroFormatter = new AvroMessageFormatter(schemaRegistryClient);
|
this.avroFormatter = new AvroMessageFormatter(schemaRegistryClient);
|
||||||
|
@ -147,41 +146,45 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
|
||||||
@Nullable String key,
|
@Nullable String key,
|
||||||
@Nullable String data,
|
@Nullable String data,
|
||||||
@Nullable Integer partition) {
|
@Nullable Integer partition) {
|
||||||
final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
|
|
||||||
final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
|
final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
|
||||||
|
final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
|
||||||
|
|
||||||
final byte[] serializedValue = data != null
|
final byte[] serializedKey = maybeKeySchema.isPresent()
|
||||||
? serialize(maybeValueSchema, topic, data, false)
|
? serialize(maybeKeySchema.get(), topic, key, true)
|
||||||
: null;
|
: serialize(key);
|
||||||
final byte[] serializedKey = key != null
|
|
||||||
? serialize(maybeKeySchema, topic, key, true)
|
final byte[] serializedValue = maybeValueSchema.isPresent()
|
||||||
: null;
|
? serialize(maybeValueSchema.get(), topic, data, false)
|
||||||
|
: serialize(data);
|
||||||
|
|
||||||
return new ProducerRecord<>(topic, partition, serializedKey, serializedValue);
|
return new ProducerRecord<>(topic, partition, serializedKey, serializedValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
private byte[] serialize(
|
private byte[] serialize(SchemaMetadata schema, String topic, String value, boolean isKey) {
|
||||||
Optional<SchemaMetadata> maybeSchema, String topic, String value, boolean isKey) {
|
if (value == null) {
|
||||||
if (maybeSchema.isPresent()) {
|
return null;
|
||||||
final SchemaMetadata schema = maybeSchema.get();
|
|
||||||
|
|
||||||
MessageReader<?> reader;
|
|
||||||
if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
|
|
||||||
reader = new ProtobufMessageReader(topic, isKey, schemaRegistryClient, schema);
|
|
||||||
} else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
|
|
||||||
reader = new AvroMessageReader(topic, isKey, schemaRegistryClient, schema);
|
|
||||||
} else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
|
|
||||||
reader = new JsonSchemaMessageReader(topic, isKey, schemaRegistryClient, schema);
|
|
||||||
} else {
|
|
||||||
throw new IllegalStateException("Unsupported schema type: " + schema.getSchemaType());
|
|
||||||
}
|
|
||||||
|
|
||||||
return reader.read(value);
|
|
||||||
} else {
|
|
||||||
// if no schema provided serialize input as raw string
|
|
||||||
return value.getBytes();
|
|
||||||
}
|
}
|
||||||
|
MessageReader<?> reader;
|
||||||
|
if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
|
||||||
|
reader = new ProtobufMessageReader(topic, isKey, schemaRegistryClient, schema);
|
||||||
|
} else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
|
||||||
|
reader = new AvroMessageReader(topic, isKey, schemaRegistryClient, schema);
|
||||||
|
} else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
|
||||||
|
reader = new JsonSchemaMessageReader(topic, isKey, schemaRegistryClient, schema);
|
||||||
|
} else {
|
||||||
|
throw new IllegalStateException("Unsupported schema type: " + schema.getSchemaType());
|
||||||
|
}
|
||||||
|
|
||||||
|
return reader.read(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
private byte[] serialize(String value) {
|
||||||
|
if (value == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// if no schema provided serialize input as raw string
|
||||||
|
return value.getBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -81,9 +81,6 @@ public class MessagesService {
|
||||||
|
|
||||||
public Mono<RecordMetadata> sendMessage(KafkaCluster cluster, String topic,
|
public Mono<RecordMetadata> sendMessage(KafkaCluster cluster, String topic,
|
||||||
CreateTopicMessageDTO msg) {
|
CreateTopicMessageDTO msg) {
|
||||||
if (msg.getKey() == null && msg.getContent() == null) {
|
|
||||||
throw new ValidationException("Invalid message: both key and value can't be null");
|
|
||||||
}
|
|
||||||
if (msg.getPartition() != null
|
if (msg.getPartition() != null
|
||||||
&& msg.getPartition() > metricsCache.get(cluster).getTopicDescriptions()
|
&& msg.getPartition() > metricsCache.get(cluster).getTopicDescriptions()
|
||||||
.get(topic).partitions().size() - 1) {
|
.get(topic).partitions().size() - 1) {
|
||||||
|
@ -100,8 +97,8 @@ public class MessagesService {
|
||||||
try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(properties)) {
|
try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(properties)) {
|
||||||
ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(
|
ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(
|
||||||
topic,
|
topic,
|
||||||
msg.getKey(),
|
msg.getKey().orElse(null),
|
||||||
msg.getContent(),
|
msg.getContent().orElse(null),
|
||||||
msg.getPartition()
|
msg.getPartition()
|
||||||
);
|
);
|
||||||
producerRecord = new ProducerRecord<>(
|
producerRecord = new ProducerRecord<>(
|
||||||
|
|
|
@ -17,7 +17,7 @@ class SchemaRegistryRecordDeserializerTest {
|
||||||
new SchemaRegistryAwareRecordSerDe(
|
new SchemaRegistryAwareRecordSerDe(
|
||||||
KafkaCluster.builder()
|
KafkaCluster.builder()
|
||||||
.schemaNameTemplate("%s-value")
|
.schemaNameTemplate("%s-value")
|
||||||
.build()
|
.build(), new ObjectMapper()
|
||||||
);
|
);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -465,6 +465,20 @@ public class SendAndReadTests extends AbstractBaseTest {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void noKeyAndNoContentPresentTest() {
|
||||||
|
new SendAndReadSpec()
|
||||||
|
.withMsgToSend(
|
||||||
|
new CreateTopicMessageDTO()
|
||||||
|
.key(null)
|
||||||
|
.content(null)
|
||||||
|
)
|
||||||
|
.doAssert(polled -> {
|
||||||
|
assertThat(polled.getKey()).isNull();
|
||||||
|
assertThat(polled.getContent()).isNull();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
private void assertJsonEqual(String actual, String expected) {
|
private void assertJsonEqual(String actual, String expected) {
|
||||||
var mapper = new ObjectMapper();
|
var mapper = new ObjectMapper();
|
||||||
|
|
|
@ -1870,12 +1870,16 @@ components:
|
||||||
type: integer
|
type: integer
|
||||||
key:
|
key:
|
||||||
type: string
|
type: string
|
||||||
|
nullable: true
|
||||||
headers:
|
headers:
|
||||||
type: object
|
type: object
|
||||||
additionalProperties:
|
additionalProperties:
|
||||||
type: string
|
type: string
|
||||||
content:
|
content:
|
||||||
type: string
|
type: string
|
||||||
|
nullable: true
|
||||||
|
required:
|
||||||
|
- partition
|
||||||
|
|
||||||
TopicMessageSchema:
|
TopicMessageSchema:
|
||||||
type: object
|
type: object
|
||||||
|
|
|
@ -3,7 +3,7 @@ import styled from 'styled-components';
|
||||||
|
|
||||||
export const Alert = styled.div<{ $type: AlertType }>`
|
export const Alert = styled.div<{ $type: AlertType }>`
|
||||||
background-color: ${({ $type, theme }) => theme.alert.color[$type]};
|
background-color: ${({ $type, theme }) => theme.alert.color[$type]};
|
||||||
width: 400px;
|
min-width: 400px;
|
||||||
min-height: 64px;
|
min-height: 64px;
|
||||||
border-radius: 8px;
|
border-radius: 8px;
|
||||||
padding: 12px;
|
padding: 12px;
|
||||||
|
@ -20,8 +20,14 @@ export const Title = styled.div`
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const Message = styled.p`
|
export const Message = styled.div`
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
margin: 3px 0;
|
margin: 3px 0;
|
||||||
|
|
||||||
|
ol,
|
||||||
|
ul {
|
||||||
|
padding-left: 25px;
|
||||||
|
list-style: auto;
|
||||||
|
}
|
||||||
`;
|
`;
|
||||||
|
|
|
@ -168,7 +168,7 @@ export const AlertsContainer = styled.div`
|
||||||
width: 500px;
|
width: 500px;
|
||||||
position: fixed;
|
position: fixed;
|
||||||
bottom: 15px;
|
bottom: 15px;
|
||||||
left: 15px;
|
right: 15px;
|
||||||
z-index: 1000;
|
z-index: 1000;
|
||||||
|
|
||||||
@media screen and (max-width: 1023px) {
|
@media screen and (max-width: 1023px) {
|
||||||
|
|
|
@ -1,47 +1,32 @@
|
||||||
import JSONEditor from 'components/common/JSONEditor/JSONEditor';
|
import JSONEditor from 'components/common/JSONEditor/JSONEditor';
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||||
import {
|
|
||||||
CreateTopicMessage,
|
|
||||||
Partition,
|
|
||||||
TopicMessageSchema,
|
|
||||||
} from 'generated-sources';
|
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { useForm, Controller } from 'react-hook-form';
|
import { useForm, Controller } from 'react-hook-form';
|
||||||
import { useHistory } from 'react-router';
|
import { useHistory, useParams } from 'react-router';
|
||||||
import { clusterTopicMessagesPath } from 'lib/paths';
|
import { clusterTopicMessagesPath } from 'lib/paths';
|
||||||
import jsf from 'json-schema-faker';
|
import jsf from 'json-schema-faker';
|
||||||
|
import { fetchTopicMessageSchema, messagesApiClient } from 'redux/actions';
|
||||||
|
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
||||||
|
import { alertAdded } from 'redux/reducers/alerts/alertsSlice';
|
||||||
|
import { now } from 'lodash';
|
||||||
|
import { Button } from 'components/common/Button/Button';
|
||||||
|
import { ClusterName, TopicName } from 'redux/interfaces';
|
||||||
|
import {
|
||||||
|
getMessageSchemaByTopicName,
|
||||||
|
getPartitionsByTopicName,
|
||||||
|
getTopicMessageSchemaFetched,
|
||||||
|
} from 'redux/reducers/topics/selectors';
|
||||||
|
|
||||||
import validateMessage from './validateMessage';
|
import validateMessage from './validateMessage';
|
||||||
|
|
||||||
export interface Props {
|
interface RouterParams {
|
||||||
clusterName: string;
|
clusterName: ClusterName;
|
||||||
topicName: string;
|
topicName: TopicName;
|
||||||
fetchTopicMessageSchema: (clusterName: string, topicName: string) => void;
|
|
||||||
sendTopicMessage: (
|
|
||||||
clusterName: string,
|
|
||||||
topicName: string,
|
|
||||||
payload: CreateTopicMessage
|
|
||||||
) => void;
|
|
||||||
messageSchema: TopicMessageSchema | undefined;
|
|
||||||
schemaIsFetched: boolean;
|
|
||||||
messageIsSending: boolean;
|
|
||||||
partitions: Partition[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const SendMessage: React.FC<Props> = ({
|
const SendMessage: React.FC = () => {
|
||||||
clusterName,
|
const dispatch = useAppDispatch();
|
||||||
topicName,
|
const { clusterName, topicName } = useParams<RouterParams>();
|
||||||
fetchTopicMessageSchema,
|
|
||||||
sendTopicMessage,
|
|
||||||
messageSchema,
|
|
||||||
schemaIsFetched,
|
|
||||||
messageIsSending,
|
|
||||||
partitions,
|
|
||||||
}) => {
|
|
||||||
const [keyExampleValue, setKeyExampleValue] = React.useState('');
|
|
||||||
const [contentExampleValue, setContentExampleValue] = React.useState('');
|
|
||||||
const [schemaIsReady, setSchemaIsReady] = React.useState(false);
|
|
||||||
const [schemaErrors, setSchemaErrors] = React.useState<string[]>([]);
|
|
||||||
const {
|
const {
|
||||||
register,
|
register,
|
||||||
handleSubmit,
|
handleSubmit,
|
||||||
|
@ -54,27 +39,38 @@ const SendMessage: React.FC<Props> = ({
|
||||||
jsf.option('alwaysFakeOptionals', true);
|
jsf.option('alwaysFakeOptionals', true);
|
||||||
|
|
||||||
React.useEffect(() => {
|
React.useEffect(() => {
|
||||||
fetchTopicMessageSchema(clusterName, topicName);
|
dispatch(fetchTopicMessageSchema(clusterName, topicName));
|
||||||
}, []);
|
}, []);
|
||||||
React.useEffect(() => {
|
|
||||||
if (schemaIsFetched && messageSchema) {
|
const messageSchema = useAppSelector((state) =>
|
||||||
setKeyExampleValue(
|
getMessageSchemaByTopicName(state, topicName)
|
||||||
JSON.stringify(
|
);
|
||||||
jsf.generate(JSON.parse(messageSchema.key.schema)),
|
const partitions = useAppSelector((state) =>
|
||||||
null,
|
getPartitionsByTopicName(state, topicName)
|
||||||
'\t'
|
);
|
||||||
)
|
const schemaIsFetched = useAppSelector(getTopicMessageSchemaFetched);
|
||||||
);
|
|
||||||
setContentExampleValue(
|
const keyDefaultValue = React.useMemo(() => {
|
||||||
JSON.stringify(
|
if (!schemaIsFetched || !messageSchema) {
|
||||||
jsf.generate(JSON.parse(messageSchema.value.schema)),
|
return undefined;
|
||||||
null,
|
|
||||||
'\t'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
setSchemaIsReady(true);
|
|
||||||
}
|
}
|
||||||
}, [schemaIsFetched]);
|
return JSON.stringify(
|
||||||
|
jsf.generate(JSON.parse(messageSchema.key.schema)),
|
||||||
|
null,
|
||||||
|
'\t'
|
||||||
|
);
|
||||||
|
}, [messageSchema, schemaIsFetched]);
|
||||||
|
|
||||||
|
const contentDefaultValue = React.useMemo(() => {
|
||||||
|
if (!schemaIsFetched || !messageSchema) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return JSON.stringify(
|
||||||
|
jsf.generate(JSON.parse(messageSchema.value.schema)),
|
||||||
|
null,
|
||||||
|
'\t'
|
||||||
|
);
|
||||||
|
}, [messageSchema, schemaIsFetched]);
|
||||||
|
|
||||||
const onSubmit = async (data: {
|
const onSubmit = async (data: {
|
||||||
key: string;
|
key: string;
|
||||||
|
@ -83,30 +79,55 @@ const SendMessage: React.FC<Props> = ({
|
||||||
partition: number;
|
partition: number;
|
||||||
}) => {
|
}) => {
|
||||||
if (messageSchema) {
|
if (messageSchema) {
|
||||||
const key = data.key || keyExampleValue;
|
const { partition, key, content } = data;
|
||||||
const content = data.content || contentExampleValue;
|
|
||||||
const { partition } = data;
|
|
||||||
const headers = data.headers ? JSON.parse(data.headers) : undefined;
|
const headers = data.headers ? JSON.parse(data.headers) : undefined;
|
||||||
const messageIsValid = await validateMessage(
|
const errors = validateMessage(key, content, messageSchema);
|
||||||
key,
|
if (errors.length > 0) {
|
||||||
content,
|
dispatch(
|
||||||
messageSchema,
|
alertAdded({
|
||||||
setSchemaErrors
|
id: `${clusterName}-${topicName}-createTopicMessageError`,
|
||||||
);
|
type: 'error',
|
||||||
|
title: 'Validation Error',
|
||||||
if (messageIsValid) {
|
message: (
|
||||||
sendTopicMessage(clusterName, topicName, {
|
<ul>
|
||||||
key,
|
{errors.map((e) => (
|
||||||
content,
|
<li>{e}</li>
|
||||||
headers,
|
))}
|
||||||
partition,
|
</ul>
|
||||||
});
|
),
|
||||||
history.push(clusterTopicMessagesPath(clusterName, topicName));
|
createdAt: now(),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await messagesApiClient.sendTopicMessages({
|
||||||
|
clusterName,
|
||||||
|
topicName,
|
||||||
|
createTopicMessage: {
|
||||||
|
key: !key ? null : key,
|
||||||
|
content: !content ? null : content,
|
||||||
|
headers,
|
||||||
|
partition,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
dispatch(
|
||||||
|
alertAdded({
|
||||||
|
id: `${clusterName}-${topicName}-sendTopicMessagesError`,
|
||||||
|
type: 'error',
|
||||||
|
title: `Error in sending a message to ${topicName}`,
|
||||||
|
message: e?.message,
|
||||||
|
createdAt: now(),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
history.push(clusterTopicMessagesPath(clusterName, topicName));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!schemaIsReady) {
|
if (!schemaIsFetched) {
|
||||||
return <PageLoader />;
|
return <PageLoader />;
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
|
@ -121,7 +142,7 @@ const SendMessage: React.FC<Props> = ({
|
||||||
<select
|
<select
|
||||||
id="select"
|
id="select"
|
||||||
defaultValue={partitions[0].partition}
|
defaultValue={partitions[0].partition}
|
||||||
disabled={isSubmitting || messageIsSending}
|
disabled={isSubmitting}
|
||||||
{...register('partition')}
|
{...register('partition')}
|
||||||
>
|
>
|
||||||
{partitions.map((partition) => (
|
{partitions.map((partition) => (
|
||||||
|
@ -142,8 +163,8 @@ const SendMessage: React.FC<Props> = ({
|
||||||
name="key"
|
name="key"
|
||||||
render={({ field: { name, onChange } }) => (
|
render={({ field: { name, onChange } }) => (
|
||||||
<JSONEditor
|
<JSONEditor
|
||||||
readOnly={isSubmitting || messageIsSending}
|
readOnly={isSubmitting}
|
||||||
defaultValue={keyExampleValue}
|
defaultValue={keyDefaultValue}
|
||||||
name={name}
|
name={name}
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
/>
|
/>
|
||||||
|
@ -157,8 +178,8 @@ const SendMessage: React.FC<Props> = ({
|
||||||
name="content"
|
name="content"
|
||||||
render={({ field: { name, onChange } }) => (
|
render={({ field: { name, onChange } }) => (
|
||||||
<JSONEditor
|
<JSONEditor
|
||||||
readOnly={isSubmitting || messageIsSending}
|
readOnly={isSubmitting}
|
||||||
defaultValue={contentExampleValue}
|
defaultValue={contentDefaultValue}
|
||||||
name={name}
|
name={name}
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
/>
|
/>
|
||||||
|
@ -174,7 +195,7 @@ const SendMessage: React.FC<Props> = ({
|
||||||
name="headers"
|
name="headers"
|
||||||
render={({ field: { name, onChange } }) => (
|
render={({ field: { name, onChange } }) => (
|
||||||
<JSONEditor
|
<JSONEditor
|
||||||
readOnly={isSubmitting || messageIsSending}
|
readOnly={isSubmitting}
|
||||||
defaultValue="{}"
|
defaultValue="{}"
|
||||||
name={name}
|
name={name}
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
|
@ -184,22 +205,14 @@ const SendMessage: React.FC<Props> = ({
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{schemaErrors && (
|
<Button
|
||||||
<div className="mb-4">
|
buttonSize="M"
|
||||||
{schemaErrors.map((err) => (
|
buttonType="primary"
|
||||||
<p className="help is-danger" key={err}>
|
|
||||||
{err}
|
|
||||||
</p>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="submit"
|
type="submit"
|
||||||
className="button is-primary"
|
disabled={!isDirty || isSubmitting}
|
||||||
disabled={!isDirty || isSubmitting || messageIsSending}
|
|
||||||
>
|
>
|
||||||
Send
|
Send
|
||||||
</button>
|
</Button>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,44 +0,0 @@
|
||||||
import { connect } from 'react-redux';
|
|
||||||
import { RootState, ClusterName, TopicName } from 'redux/interfaces';
|
|
||||||
import { withRouter, RouteComponentProps } from 'react-router-dom';
|
|
||||||
import { fetchTopicMessageSchema, sendTopicMessage } from 'redux/actions';
|
|
||||||
import {
|
|
||||||
getMessageSchemaByTopicName,
|
|
||||||
getPartitionsByTopicName,
|
|
||||||
getTopicMessageSchemaFetched,
|
|
||||||
getTopicMessageSending,
|
|
||||||
} from 'redux/reducers/topics/selectors';
|
|
||||||
|
|
||||||
import SendMessage from './SendMessage';
|
|
||||||
|
|
||||||
interface RouteProps {
|
|
||||||
clusterName: ClusterName;
|
|
||||||
topicName: TopicName;
|
|
||||||
}
|
|
||||||
|
|
||||||
type OwnProps = RouteComponentProps<RouteProps>;
|
|
||||||
|
|
||||||
const mapStateToProps = (
|
|
||||||
state: RootState,
|
|
||||||
{
|
|
||||||
match: {
|
|
||||||
params: { topicName, clusterName },
|
|
||||||
},
|
|
||||||
}: OwnProps
|
|
||||||
) => ({
|
|
||||||
clusterName,
|
|
||||||
topicName,
|
|
||||||
messageSchema: getMessageSchemaByTopicName(state, topicName),
|
|
||||||
schemaIsFetched: getTopicMessageSchemaFetched(state),
|
|
||||||
messageIsSending: getTopicMessageSending(state),
|
|
||||||
partitions: getPartitionsByTopicName(state, topicName),
|
|
||||||
});
|
|
||||||
|
|
||||||
const mapDispatchToProps = {
|
|
||||||
fetchTopicMessageSchema,
|
|
||||||
sendTopicMessage,
|
|
||||||
};
|
|
||||||
|
|
||||||
export default withRouter(
|
|
||||||
connect(mapStateToProps, mapDispatchToProps)(SendMessage)
|
|
||||||
);
|
|
|
@ -1,11 +1,25 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import SendMessage, {
|
import SendMessage from 'components/Topics/Topic/SendMessage/SendMessage';
|
||||||
Props,
|
import {
|
||||||
} from 'components/Topics/Topic/SendMessage/SendMessage';
|
screen,
|
||||||
import { MessageSchemaSourceEnum } from 'generated-sources';
|
waitFor,
|
||||||
import { screen, waitFor } from '@testing-library/react';
|
waitForElementToBeRemoved,
|
||||||
|
} from '@testing-library/react';
|
||||||
import userEvent from '@testing-library/user-event';
|
import userEvent from '@testing-library/user-event';
|
||||||
|
import fetchMock from 'fetch-mock';
|
||||||
|
import { createMemoryHistory } from 'history';
|
||||||
import { render } from 'lib/testHelpers';
|
import { render } from 'lib/testHelpers';
|
||||||
|
import { Route, Router } from 'react-router';
|
||||||
|
import {
|
||||||
|
clusterTopicMessagesPath,
|
||||||
|
clusterTopicSendMessagePath,
|
||||||
|
} from 'lib/paths';
|
||||||
|
import { store } from 'redux/store';
|
||||||
|
import { fetchTopicDetailsAction } from 'redux/actions';
|
||||||
|
import { initialState } from 'redux/reducers/topics/reducer';
|
||||||
|
import { externalTopicPayload } from 'redux/reducers/topics/__test__/fixtures';
|
||||||
|
|
||||||
|
import { testSchema } from './fixtures';
|
||||||
|
|
||||||
jest.mock('json-schema-faker', () => ({
|
jest.mock('json-schema-faker', () => ({
|
||||||
generate: () => ({
|
generate: () => ({
|
||||||
|
@ -16,118 +30,68 @@ jest.mock('json-schema-faker', () => ({
|
||||||
option: jest.fn(),
|
option: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const setupWrapper = (props?: Partial<Props>) => (
|
const clusterName = 'testCluster';
|
||||||
<SendMessage
|
const topicName = externalTopicPayload.name;
|
||||||
clusterName="testCluster"
|
const history = createMemoryHistory();
|
||||||
topicName="testTopic"
|
|
||||||
fetchTopicMessageSchema={jest.fn()}
|
const renderComponent = () => {
|
||||||
sendTopicMessage={jest.fn()}
|
history.push(clusterTopicSendMessagePath(clusterName, topicName));
|
||||||
messageSchema={{
|
render(
|
||||||
key: {
|
<Router history={history}>
|
||||||
name: 'key',
|
<Route path={clusterTopicSendMessagePath(':clusterName', ':topicName')}>
|
||||||
source: MessageSchemaSourceEnum.SCHEMA_REGISTRY,
|
<SendMessage />
|
||||||
schema: `{
|
</Route>
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
</Router>
|
||||||
"$id": "http://example.com/myURI.schema.json",
|
);
|
||||||
"title": "TestRecord",
|
};
|
||||||
"type": "object",
|
|
||||||
"additionalProperties": false,
|
|
||||||
"properties": {
|
|
||||||
"f1": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"f2": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"schema": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
},
|
|
||||||
value: {
|
|
||||||
name: 'value',
|
|
||||||
source: MessageSchemaSourceEnum.SCHEMA_REGISTRY,
|
|
||||||
schema: `{
|
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
||||||
"$id": "http://example.com/myURI1.schema.json",
|
|
||||||
"title": "TestRecord",
|
|
||||||
"type": "object",
|
|
||||||
"additionalProperties": false,
|
|
||||||
"properties": {
|
|
||||||
"f1": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"f2": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"schema": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
schemaIsFetched={false}
|
|
||||||
messageIsSending={false}
|
|
||||||
partitions={[
|
|
||||||
{
|
|
||||||
partition: 0,
|
|
||||||
leader: 2,
|
|
||||||
replicas: [
|
|
||||||
{
|
|
||||||
broker: 2,
|
|
||||||
leader: false,
|
|
||||||
inSync: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
offsetMax: 0,
|
|
||||||
offsetMin: 0,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
partition: 1,
|
|
||||||
leader: 1,
|
|
||||||
replicas: [
|
|
||||||
{
|
|
||||||
broker: 1,
|
|
||||||
leader: false,
|
|
||||||
inSync: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
offsetMax: 0,
|
|
||||||
offsetMin: 0,
|
|
||||||
},
|
|
||||||
]}
|
|
||||||
{...props}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('SendMessage', () => {
|
describe('SendMessage', () => {
|
||||||
it('calls fetchTopicMessageSchema on first render', () => {
|
beforeAll(() => {
|
||||||
const fetchTopicMessageSchemaMock = jest.fn();
|
store.dispatch(
|
||||||
render(
|
fetchTopicDetailsAction.success({
|
||||||
setupWrapper({ fetchTopicMessageSchema: fetchTopicMessageSchemaMock })
|
...initialState,
|
||||||
|
byName: {
|
||||||
|
[externalTopicPayload.name]: externalTopicPayload,
|
||||||
|
},
|
||||||
|
})
|
||||||
);
|
);
|
||||||
expect(fetchTopicMessageSchemaMock).toHaveBeenCalledTimes(1);
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
fetchMock.reset();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('fetches schema on first render', () => {
|
||||||
|
const fetchTopicMessageSchemaMock = fetchMock.getOnce(
|
||||||
|
`/api/clusters/${clusterName}/topics/${topicName}/messages/schema`,
|
||||||
|
testSchema
|
||||||
|
);
|
||||||
|
renderComponent();
|
||||||
|
expect(fetchTopicMessageSchemaMock.called()).toBeTruthy();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('when schema is fetched', () => {
|
describe('when schema is fetched', () => {
|
||||||
it('calls sendTopicMessage on submit', async () => {
|
beforeEach(() => {
|
||||||
jest.mock('../validateMessage', () => jest.fn().mockReturnValue(true));
|
fetchMock.getOnce(
|
||||||
const mockSendTopicMessage = jest.fn();
|
`/api/clusters/${clusterName}/topics/${topicName}/messages/schema`,
|
||||||
render(
|
testSchema
|
||||||
setupWrapper({
|
);
|
||||||
schemaIsFetched: true,
|
});
|
||||||
sendTopicMessage: mockSendTopicMessage,
|
|
||||||
})
|
it('calls sendTopicMessage on submit', async () => {
|
||||||
|
const sendTopicMessageMock = fetchMock.postOnce(
|
||||||
|
`/api/clusters/${clusterName}/topics/${topicName}/messages`,
|
||||||
|
200
|
||||||
|
);
|
||||||
|
renderComponent();
|
||||||
|
await waitForElementToBeRemoved(() => screen.getByRole('progressbar'));
|
||||||
|
|
||||||
|
userEvent.selectOptions(screen.getByLabelText('Partition'), '0');
|
||||||
|
await screen.findByText('Send');
|
||||||
|
userEvent.click(screen.getByText('Send'));
|
||||||
|
await waitFor(() => expect(sendTopicMessageMock.called()).toBeTruthy());
|
||||||
|
expect(history.location.pathname).toEqual(
|
||||||
|
clusterTopicMessagesPath(clusterName, topicName)
|
||||||
);
|
);
|
||||||
userEvent.selectOptions(screen.getByLabelText('Partition'), '1');
|
|
||||||
await waitFor(async () => {
|
|
||||||
userEvent.click(await screen.findByText('Send'));
|
|
||||||
expect(mockSendTopicMessage).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -3,45 +3,34 @@ import validateMessage from 'components/Topics/Topic/SendMessage/validateMessage
|
||||||
import { testSchema } from './fixtures';
|
import { testSchema } from './fixtures';
|
||||||
|
|
||||||
describe('validateMessage', () => {
|
describe('validateMessage', () => {
|
||||||
it('returns true on correct input data', async () => {
|
it('returns no errors on correct input data', () => {
|
||||||
const mockSetError = jest.fn();
|
const key = `{"f1": 32, "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
|
||||||
expect(
|
const content = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"}`;
|
||||||
await validateMessage(
|
expect(validateMessage(key, content, testSchema)).toEqual([]);
|
||||||
`{
|
|
||||||
"f1": 32,
|
|
||||||
"f2": "multi-state",
|
|
||||||
"schema": "Bedfordshire violet SAS"
|
|
||||||
}`,
|
|
||||||
`{
|
|
||||||
"f1": 21128,
|
|
||||||
"f2": "Health Berkshire Re-engineered",
|
|
||||||
"schema": "Dynamic Greenland Beauty"
|
|
||||||
}`,
|
|
||||||
testSchema,
|
|
||||||
mockSetError
|
|
||||||
)
|
|
||||||
).toBe(true);
|
|
||||||
expect(mockSetError).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns false on incorrect input data', async () => {
|
it('returns errors on invalid input data', () => {
|
||||||
const mockSetError = jest.fn();
|
const key = `{"f1": "32", "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
|
||||||
expect(
|
const content = `{"f1": "21128", "f2": "Health Berkshire", "schema": "Dynamic"}`;
|
||||||
await validateMessage(
|
expect(validateMessage(key, content, testSchema)).toEqual([
|
||||||
`{
|
'Key/properties/f1/type - must be integer',
|
||||||
"f1": "32",
|
'Content/properties/f1/type - must be integer',
|
||||||
"f2": "multi-state",
|
]);
|
||||||
"schema": "Bedfordshire violet SAS"
|
});
|
||||||
}`,
|
|
||||||
`{
|
it('returns error on broken key value', () => {
|
||||||
"f1": "21128",
|
const key = `{"f1": "32", "f2": "multi-state", "schema": "Bedfordshire violet SAS"`;
|
||||||
"f2": "Health Berkshire Re-engineered",
|
const content = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"}`;
|
||||||
"schema": "Dynamic Greenland Beauty"
|
expect(validateMessage(key, content, testSchema)).toEqual([
|
||||||
}`,
|
'Error in parsing the "key" field value',
|
||||||
testSchema,
|
]);
|
||||||
mockSetError
|
});
|
||||||
)
|
|
||||||
).toBe(false);
|
it('returns error on broken content value', () => {
|
||||||
expect(mockSetError).toHaveBeenCalledTimes(3);
|
const key = `{"f1": 32, "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
|
||||||
|
const content = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"`;
|
||||||
|
expect(validateMessage(key, content, testSchema)).toEqual([
|
||||||
|
'Error in parsing the "content" field value',
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,71 +1,57 @@
|
||||||
import { TopicMessageSchema } from 'generated-sources';
|
import { TopicMessageSchema } from 'generated-sources';
|
||||||
import Ajv from 'ajv/dist/2020';
|
import Ajv from 'ajv/dist/2020';
|
||||||
|
import { upperFirst } from 'lodash';
|
||||||
|
|
||||||
const validateMessage = async (
|
const validateBySchema = (
|
||||||
key: string,
|
value: string,
|
||||||
content: string,
|
schema: string | undefined,
|
||||||
messageSchema: TopicMessageSchema | undefined,
|
type: 'key' | 'content'
|
||||||
setSchemaErrors: React.Dispatch<React.SetStateAction<string[]>>
|
) => {
|
||||||
): Promise<boolean> => {
|
let errors: string[] = [];
|
||||||
setSchemaErrors([]);
|
|
||||||
const keyAjv = new Ajv();
|
|
||||||
const contentAjv = new Ajv();
|
|
||||||
try {
|
|
||||||
if (messageSchema) {
|
|
||||||
let keyIsValid = false;
|
|
||||||
let contentIsValid = false;
|
|
||||||
|
|
||||||
try {
|
if (!value || !schema) {
|
||||||
const keySchema = JSON.parse(messageSchema.key.schema);
|
return errors;
|
||||||
const validateKey = keyAjv.compile(keySchema);
|
|
||||||
if (keySchema.type === 'string') {
|
|
||||||
keyIsValid = true;
|
|
||||||
} else {
|
|
||||||
keyIsValid = validateKey(JSON.parse(key));
|
|
||||||
}
|
|
||||||
if (!keyIsValid) {
|
|
||||||
const errorString: string[] = [];
|
|
||||||
if (validateKey.errors) {
|
|
||||||
validateKey.errors.forEach((e) => {
|
|
||||||
errorString.push(
|
|
||||||
`${e.schemaPath.replace('#', 'Key')} ${e.message}`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
setSchemaErrors((e) => [...e, ...errorString]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setSchemaErrors((e) => [...e, `Key ${err.message}`]);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const contentSchema = JSON.parse(messageSchema.value.schema);
|
|
||||||
const validateContent = contentAjv.compile(contentSchema);
|
|
||||||
if (contentSchema.type === 'string') {
|
|
||||||
contentIsValid = true;
|
|
||||||
} else {
|
|
||||||
contentIsValid = validateContent(JSON.parse(content));
|
|
||||||
}
|
|
||||||
if (!contentIsValid) {
|
|
||||||
const errorString: string[] = [];
|
|
||||||
if (validateContent.errors) {
|
|
||||||
validateContent.errors.forEach((e) => {
|
|
||||||
errorString.push(
|
|
||||||
`${e.schemaPath.replace('#', 'Content')} ${e.message}`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
setSchemaErrors((e) => [...e, ...errorString]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setSchemaErrors((e) => [...e, `Content ${err.message}`]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return keyIsValid && contentIsValid;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setSchemaErrors((e) => [...e, err.message]);
|
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
|
let parcedSchema;
|
||||||
|
let parsedValue;
|
||||||
|
|
||||||
|
try {
|
||||||
|
parcedSchema = JSON.parse(schema);
|
||||||
|
} catch (e) {
|
||||||
|
return [`Error in parsing the "${type}" field schema`];
|
||||||
|
}
|
||||||
|
if (parcedSchema.type === 'string') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
parsedValue = JSON.parse(value);
|
||||||
|
} catch (e) {
|
||||||
|
return [`Error in parsing the "${type}" field value`];
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const validate = new Ajv().compile(parcedSchema);
|
||||||
|
validate(parsedValue);
|
||||||
|
if (validate.errors) {
|
||||||
|
errors = validate.errors.map(
|
||||||
|
({ schemaPath, message }) =>
|
||||||
|
`${schemaPath.replace('#', upperFirst(type))} - ${message}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
return [`${upperFirst(type)} ${e.message}`];
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const validateMessage = (
|
||||||
|
key: string,
|
||||||
|
content: string,
|
||||||
|
messageSchema: TopicMessageSchema | undefined
|
||||||
|
): string[] => [
|
||||||
|
...validateBySchema(key, messageSchema?.key?.schema, 'key'),
|
||||||
|
...validateBySchema(content, messageSchema?.value?.schema, 'content'),
|
||||||
|
];
|
||||||
|
|
||||||
export default validateMessage;
|
export default validateMessage;
|
||||||
|
|
|
@ -5,7 +5,7 @@ import EditContainer from 'components/Topics/Topic/Edit/EditContainer';
|
||||||
import DetailsContainer from 'components/Topics/Topic/Details/DetailsContainer';
|
import DetailsContainer from 'components/Topics/Topic/Details/DetailsContainer';
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||||
|
|
||||||
import SendMessageContainer from './SendMessage/SendMessageContainer';
|
import SendMessage from './SendMessage/SendMessage';
|
||||||
|
|
||||||
interface RouterParams {
|
interface RouterParams {
|
||||||
clusterName: ClusterName;
|
clusterName: ClusterName;
|
||||||
|
@ -41,7 +41,7 @@ const Topic: React.FC<TopicProps> = ({
|
||||||
<Route
|
<Route
|
||||||
exact
|
exact
|
||||||
path="/ui/clusters/:clusterName/topics/:topicName/message"
|
path="/ui/clusters/:clusterName/topics/:topicName/message"
|
||||||
component={SendMessageContainer}
|
component={SendMessage}
|
||||||
/>
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/ui/clusters/:clusterName/topics/:topicName"
|
path="/ui/clusters/:clusterName/topics/:topicName"
|
||||||
|
|
|
@ -211,57 +211,6 @@ describe('Thunks', () => {
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('sendTopicMessage', () => {
|
|
||||||
it('creates SEND_TOPIC_MESSAGE__FAILURE', async () => {
|
|
||||||
fetchMock.postOnce(
|
|
||||||
`/api/clusters/${clusterName}/topics/${topicName}/messages`,
|
|
||||||
404
|
|
||||||
);
|
|
||||||
try {
|
|
||||||
await store.dispatch(
|
|
||||||
thunks.sendTopicMessage(clusterName, topicName, {
|
|
||||||
key: '{}',
|
|
||||||
content: '{}',
|
|
||||||
headers: undefined,
|
|
||||||
partition: 0,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
const err = error as Response;
|
|
||||||
expect(err.status).toEqual(404);
|
|
||||||
expect(store.getActions()).toEqual([
|
|
||||||
actions.sendTopicMessageAction.request(),
|
|
||||||
actions.sendTopicMessageAction.failure({
|
|
||||||
alert: {
|
|
||||||
subject: ['topic', topicName].join('-'),
|
|
||||||
title: `Topic Message ${topicName}`,
|
|
||||||
response: err,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates SEND_TOPIC_MESSAGE__SUCCESS', async () => {
|
|
||||||
fetchMock.postOnce(
|
|
||||||
`/api/clusters/${clusterName}/topics/${topicName}/messages`,
|
|
||||||
200
|
|
||||||
);
|
|
||||||
await store.dispatch(
|
|
||||||
thunks.sendTopicMessage(clusterName, topicName, {
|
|
||||||
key: '{}',
|
|
||||||
content: '{}',
|
|
||||||
headers: undefined,
|
|
||||||
partition: 0,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
expect(store.getActions()).toEqual([
|
|
||||||
actions.sendTopicMessageAction.request(),
|
|
||||||
actions.sendTopicMessageAction.success(),
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
describe('increasing partitions count', () => {
|
describe('increasing partitions count', () => {
|
||||||
it('calls updateTopicPartitionsCountAction.success on success', async () => {
|
it('calls updateTopicPartitionsCountAction.success on success', async () => {
|
||||||
fetchMock.patchOnce(
|
fetchMock.patchOnce(
|
||||||
|
|
|
@ -219,12 +219,6 @@ export const fetchTopicMessageSchemaAction = createAsyncAction(
|
||||||
{ alert?: FailurePayload }
|
{ alert?: FailurePayload }
|
||||||
>();
|
>();
|
||||||
|
|
||||||
export const sendTopicMessageAction = createAsyncAction(
|
|
||||||
'SEND_TOPIC_MESSAGE__REQUEST',
|
|
||||||
'SEND_TOPIC_MESSAGE__SUCCESS',
|
|
||||||
'SEND_TOPIC_MESSAGE__FAILURE'
|
|
||||||
)<undefined, undefined, { alert?: FailurePayload }>();
|
|
||||||
|
|
||||||
export const updateTopicPartitionsCountAction = createAsyncAction(
|
export const updateTopicPartitionsCountAction = createAsyncAction(
|
||||||
'UPDATE_PARTITIONS__REQUEST',
|
'UPDATE_PARTITIONS__REQUEST',
|
||||||
'UPDATE_PARTITIONS__SUCCESS',
|
'UPDATE_PARTITIONS__SUCCESS',
|
||||||
|
|
|
@ -8,7 +8,6 @@ import {
|
||||||
TopicUpdate,
|
TopicUpdate,
|
||||||
TopicConfig,
|
TopicConfig,
|
||||||
ConsumerGroupsApi,
|
ConsumerGroupsApi,
|
||||||
CreateTopicMessage,
|
|
||||||
GetTopicsRequest,
|
GetTopicsRequest,
|
||||||
} from 'generated-sources';
|
} from 'generated-sources';
|
||||||
import {
|
import {
|
||||||
|
@ -318,36 +317,6 @@ export const fetchTopicMessageSchema =
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const sendTopicMessage =
|
|
||||||
(
|
|
||||||
clusterName: ClusterName,
|
|
||||||
topicName: TopicName,
|
|
||||||
payload: CreateTopicMessage
|
|
||||||
): PromiseThunkResult =>
|
|
||||||
async (dispatch) => {
|
|
||||||
dispatch(actions.sendTopicMessageAction.request());
|
|
||||||
try {
|
|
||||||
await messagesApiClient.sendTopicMessages({
|
|
||||||
clusterName,
|
|
||||||
topicName,
|
|
||||||
createTopicMessage: {
|
|
||||||
key: payload.key,
|
|
||||||
content: payload.content,
|
|
||||||
headers: payload.headers,
|
|
||||||
partition: payload.partition,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
dispatch(actions.sendTopicMessageAction.success());
|
|
||||||
} catch (e) {
|
|
||||||
const response = await getResponse(e);
|
|
||||||
const alert: FailurePayload = {
|
|
||||||
subject: ['topic', topicName].join('-'),
|
|
||||||
title: `Topic Message ${topicName}`,
|
|
||||||
response,
|
|
||||||
};
|
|
||||||
dispatch(actions.sendTopicMessageAction.failure({ alert }));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
export const updateTopicPartitionsCount =
|
export const updateTopicPartitionsCount =
|
||||||
(
|
(
|
||||||
clusterName: ClusterName,
|
clusterName: ClusterName,
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
import { createEntityAdapter, createSlice } from '@reduxjs/toolkit';
|
import {
|
||||||
|
createEntityAdapter,
|
||||||
|
createSlice,
|
||||||
|
PayloadAction,
|
||||||
|
} from '@reduxjs/toolkit';
|
||||||
import { UnknownAsyncThunkRejectedWithValueAction } from '@reduxjs/toolkit/dist/matchers';
|
import { UnknownAsyncThunkRejectedWithValueAction } from '@reduxjs/toolkit/dist/matchers';
|
||||||
import { now } from 'lodash';
|
import { now } from 'lodash';
|
||||||
import { Alert, RootState, ServerResponse } from 'redux/interfaces';
|
import { Alert, RootState, ServerResponse } from 'redux/interfaces';
|
||||||
|
@ -19,6 +23,9 @@ const alertsSlice = createSlice({
|
||||||
initialState: alertsAdapter.getInitialState(),
|
initialState: alertsAdapter.getInitialState(),
|
||||||
reducers: {
|
reducers: {
|
||||||
alertDissmissed: alertsAdapter.removeOne,
|
alertDissmissed: alertsAdapter.removeOne,
|
||||||
|
alertAdded(state, action: PayloadAction<Alert>) {
|
||||||
|
alertsAdapter.upsertOne(state, action.payload);
|
||||||
|
},
|
||||||
},
|
},
|
||||||
extraReducers: (builder) => {
|
extraReducers: (builder) => {
|
||||||
builder.addMatcher(
|
builder.addMatcher(
|
||||||
|
@ -47,6 +54,6 @@ export const { selectAll } = alertsAdapter.getSelectors<RootState>(
|
||||||
(state) => state.alerts
|
(state) => state.alerts
|
||||||
);
|
);
|
||||||
|
|
||||||
export const { alertDissmissed } = alertsSlice.actions;
|
export const { alertDissmissed, alertAdded } = alertsSlice.actions;
|
||||||
|
|
||||||
export default alertsSlice.reducer;
|
export default alertsSlice.reducer;
|
||||||
|
|
|
@ -26,8 +26,6 @@ const getTopicCreationStatus = createLeagcyFetchingSelector('POST_TOPIC');
|
||||||
const getTopicUpdateStatus = createLeagcyFetchingSelector('PATCH_TOPIC');
|
const getTopicUpdateStatus = createLeagcyFetchingSelector('PATCH_TOPIC');
|
||||||
const getTopicMessageSchemaFetchingStatus =
|
const getTopicMessageSchemaFetchingStatus =
|
||||||
createLeagcyFetchingSelector('GET_TOPIC_SCHEMA');
|
createLeagcyFetchingSelector('GET_TOPIC_SCHEMA');
|
||||||
const getTopicMessageSendingStatus =
|
|
||||||
createLeagcyFetchingSelector('SEND_TOPIC_MESSAGE');
|
|
||||||
const getPartitionsCountIncreaseStatus =
|
const getPartitionsCountIncreaseStatus =
|
||||||
createLeagcyFetchingSelector('UPDATE_PARTITIONS');
|
createLeagcyFetchingSelector('UPDATE_PARTITIONS');
|
||||||
const getReplicationFactorUpdateStatus = createLeagcyFetchingSelector(
|
const getReplicationFactorUpdateStatus = createLeagcyFetchingSelector(
|
||||||
|
@ -80,11 +78,6 @@ export const getTopicMessageSchemaFetched = createSelector(
|
||||||
(status) => status === 'fetched'
|
(status) => status === 'fetched'
|
||||||
);
|
);
|
||||||
|
|
||||||
export const getTopicMessageSending = createSelector(
|
|
||||||
getTopicMessageSendingStatus,
|
|
||||||
(status) => status === 'fetching'
|
|
||||||
);
|
|
||||||
|
|
||||||
export const getTopicPartitionsCountIncreased = createSelector(
|
export const getTopicPartitionsCountIncreased = createSelector(
|
||||||
getPartitionsCountIncreaseStatus,
|
getPartitionsCountIncreaseStatus,
|
||||||
(status) => status === 'fetched'
|
(status) => status === 'fetched'
|
||||||
|
|
Loading…
Add table
Reference in a new issue