From 7eaae31345f7efc5df2f82c50d27f4abfd869877 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Wed, 26 Jul 2023 12:17:55 +0400
Subject: [PATCH 01/31] Error message from SR propagation (#4058)
Co-authored-by: iliax
---
.../com/provectus/kafka/ui/service/SchemaRegistryService.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
index cae29ba93d..fd7efff606 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
@@ -125,7 +125,7 @@ public class SchemaRegistryService {
.onErrorMap(WebClientResponseException.Conflict.class,
th -> new SchemaCompatibilityException())
.onErrorMap(WebClientResponseException.UnprocessableEntity.class,
- th -> new ValidationException("Invalid schema"))
+ th -> new ValidationException("Invalid schema. Error from registry: " + th.getResponseBodyAsString()))
.then(getLatestSchemaVersionBySubject(cluster, subject));
}
From 0b99f745b01abd734738fc187afcec4589168d8f Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Mon, 31 Jul 2023 16:01:36 +0700
Subject: [PATCH 02/31] BE: Migrate deprecated spring components (#4056)
Co-authored-by: Ilya Kuramshin
---
.../config/auth/BasicAuthSecurityConfig.java | 22 +++++++-------
.../auth/DisabledAuthSecurityConfig.java | 10 ++++---
.../ui/config/auth/LdapSecurityConfig.java | 25 +++++++---------
.../ui/config/auth/OAuthSecurityConfig.java | 29 ++++++++-----------
4 files changed, 39 insertions(+), 47 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/BasicAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/BasicAuthSecurityConfig.java
index ae98dfdd7a..36ccf7212a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/BasicAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/BasicAuthSecurityConfig.java
@@ -7,12 +7,10 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
-import org.springframework.security.config.web.server.SecurityWebFiltersOrder;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.web.server.SecurityWebFilterChain;
import org.springframework.security.web.server.authentication.RedirectServerAuthenticationSuccessHandler;
import org.springframework.security.web.server.authentication.logout.RedirectServerLogoutSuccessHandler;
-import org.springframework.security.web.server.ui.LogoutPageGeneratingWebFilter;
@Configuration
@EnableWebFluxSecurity
@@ -33,15 +31,17 @@ public class BasicAuthSecurityConfig extends AbstractAuthSecurityConfig {
final var logoutSuccessHandler = new RedirectServerLogoutSuccessHandler();
logoutSuccessHandler.setLogoutSuccessUrl(URI.create(LOGOUT_URL));
- return http
- .addFilterAfter(new LogoutPageGeneratingWebFilter(), SecurityWebFiltersOrder.REACTOR_CONTEXT)
- .csrf().disable()
- .authorizeExchange()
- .pathMatchers(AUTH_WHITELIST).permitAll()
- .anyExchange().authenticated()
- .and().formLogin().loginPage(LOGIN_URL).authenticationSuccessHandler(authHandler)
- .and().logout().logoutSuccessHandler(logoutSuccessHandler)
- .and().build();
+
+ return http.authorizeExchange(spec -> spec
+ .pathMatchers(AUTH_WHITELIST)
+ .permitAll()
+ .anyExchange()
+ .authenticated()
+ )
+ .formLogin(spec -> spec.loginPage(LOGIN_URL).authenticationSuccessHandler(authHandler))
+ .logout(spec -> spec.logoutSuccessHandler(logoutSuccessHandler))
+ .csrf(ServerHttpSecurity.CsrfSpec::disable)
+ .build();
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/DisabledAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/DisabledAuthSecurityConfig.java
index 4b1cc9a933..39d56a05bf 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/DisabledAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/DisabledAuthSecurityConfig.java
@@ -27,10 +27,12 @@ public class DisabledAuthSecurityConfig extends AbstractAuthSecurityConfig {
System.exit(1);
}
log.warn("Authentication is disabled. Access will be unrestricted.");
- return http.authorizeExchange()
- .anyExchange().permitAll()
- .and()
- .csrf().disable()
+
+ return http.authorizeExchange(spec -> spec
+ .anyExchange()
+ .permitAll()
+ )
+ .csrf(ServerHttpSecurity.CsrfSpec::disable)
.build();
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
index b7750d528b..20ce2aaa58 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
@@ -24,6 +24,7 @@ import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.ProviderManager;
import org.springframework.security.authentication.ReactiveAuthenticationManager;
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
+import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.core.GrantedAuthority;
@@ -126,21 +127,15 @@ public class LdapSecurityConfig {
log.info("Active Directory support for LDAP has been enabled.");
}
- return http
- .authorizeExchange()
- .pathMatchers(AUTH_WHITELIST)
- .permitAll()
- .anyExchange()
- .authenticated()
-
- .and()
- .formLogin()
-
- .and()
- .logout()
-
- .and()
- .csrf().disable()
+ return http.authorizeExchange(spec -> spec
+ .pathMatchers(AUTH_WHITELIST)
+ .permitAll()
+ .anyExchange()
+ .authenticated()
+ )
+ .formLogin(Customizer.withDefaults())
+ .logout(Customizer.withDefaults())
+ .csrf(ServerHttpSecurity.CsrfSpec::disable)
.build();
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
index d170a7338c..0e7a228e48 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
@@ -12,10 +12,11 @@ import lombok.extern.log4j.Log4j2;
import org.jetbrains.annotations.Nullable;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties;
-import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientPropertiesRegistrationAdapter;
+import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientPropertiesMapper;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.method.configuration.EnableReactiveMethodSecurity;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
@@ -49,21 +50,15 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
public SecurityWebFilterChain configure(ServerHttpSecurity http, OAuthLogoutSuccessHandler logoutHandler) {
log.info("Configuring OAUTH2 authentication.");
- return http.authorizeExchange()
- .pathMatchers(AUTH_WHITELIST)
- .permitAll()
- .anyExchange()
- .authenticated()
-
- .and()
- .oauth2Login()
-
- .and()
- .logout()
- .logoutSuccessHandler(logoutHandler)
-
- .and()
- .csrf().disable()
+ return http.authorizeExchange(spec -> spec
+ .pathMatchers(AUTH_WHITELIST)
+ .permitAll()
+ .anyExchange()
+ .authenticated()
+ )
+ .oauth2Login(Customizer.withDefaults())
+ .logout(spec -> spec.logoutSuccessHandler(logoutHandler))
+ .csrf(ServerHttpSecurity.CsrfSpec::disable)
.build();
}
@@ -103,7 +98,7 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
public InMemoryReactiveClientRegistrationRepository clientRegistrationRepository() {
final OAuth2ClientProperties props = OAuthPropertiesConverter.convertProperties(properties);
final List registrations =
- new ArrayList<>(OAuth2ClientPropertiesRegistrationAdapter.getClientRegistrations(props).values());
+ new ArrayList<>(new OAuth2ClientPropertiesMapper(props).asClientRegistrations().values());
return new InMemoryReactiveClientRegistrationRepository(registrations);
}
From 2db89593a78cb6478889cd455c9e20b23e2e3a92 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 12:30:05 +0400
Subject: [PATCH 03/31] BE: Implement Hex serde (#4074)
Co-authored-by: iliax
---
.../kafka/ui/serdes/SerdesInitializer.java | 2 +
.../kafka/ui/serdes/builtin/HexSerde.java | 80 ++++++++++++++++++
.../kafka/ui/serdes/builtin/HexSerdeTest.java | 84 +++++++++++++++++++
3 files changed, 166 insertions(+)
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java
create mode 100644 kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
index ac3c2241cf..c833d9fc72 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
@@ -12,6 +12,7 @@ import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.builtin.AvroEmbeddedSerde;
import com.provectus.kafka.ui.serdes.builtin.Base64Serde;
import com.provectus.kafka.ui.serdes.builtin.ConsumerOffsetsSerde;
+import com.provectus.kafka.ui.serdes.builtin.HexSerde;
import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde;
@@ -47,6 +48,7 @@ public class SerdesInitializer {
.put(UInt64Serde.name(), UInt64Serde.class)
.put(AvroEmbeddedSerde.name(), AvroEmbeddedSerde.class)
.put(Base64Serde.name(), Base64Serde.class)
+ .put(HexSerde.name(), HexSerde.class)
.put(UuidBinarySerde.name(), UuidBinarySerde.class)
.build(),
new CustomSerdeLoader()
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java
new file mode 100644
index 0000000000..cf1a6b793f
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java
@@ -0,0 +1,80 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.HexFormat;
+import java.util.Map;
+import java.util.Optional;
+
+public class HexSerde implements BuiltInSerde {
+
+ private HexFormat deserializeHexFormat;
+
+ public static String name() {
+ return "Hex";
+ }
+
+ @Override
+ public void configure(PropertyResolver serdeProperties,
+ PropertyResolver kafkaClusterProperties,
+ PropertyResolver globalProperties) {
+ String delim = serdeProperties.getProperty("delimiter", String.class).orElse(" ");
+ boolean uppercase = serdeProperties.getProperty("uppercase", Boolean.class).orElse(true);
+ deserializeHexFormat = HexFormat.ofDelimiter(delim);
+ if (uppercase) {
+ deserializeHexFormat = deserializeHexFormat.withUpperCase();
+ }
+ }
+
+ @Override
+ public Optional getDescription() {
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional getSchema(String topic, Target type) {
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean canDeserialize(String topic, Target type) {
+ return true;
+ }
+
+ @Override
+ public boolean canSerialize(String topic, Target type) {
+ return true;
+ }
+
+ @Override
+ public Serializer serializer(String topic, Target type) {
+ return input -> {
+ input = input.trim();
+ // it is a hack to provide ability to sent empty array as a key/value
+ if (input.length() == 0) {
+ return new byte[] {};
+ }
+ return HexFormat.of().parseHex(prepareInputForParse(input));
+ };
+ }
+
+ // removing most-common delimiters and prefixes
+ private static String prepareInputForParse(String input) {
+ return input
+ .replaceAll(" ", "")
+ .replaceAll("#", "")
+ .replaceAll(":", "");
+ }
+
+ @Override
+ public Deserializer deserializer(String topic, Target type) {
+ return (headers, data) ->
+ new DeserializeResult(
+ deserializeHexFormat.formatHex(data),
+ DeserializeResult.Type.STRING,
+ Map.of()
+ );
+ }
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java
new file mode 100644
index 0000000000..a318279f56
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java
@@ -0,0 +1,84 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.EnumSource;
+
+public class HexSerdeTest {
+
+ private static final byte[] TEST_BYTES = "hello world".getBytes();
+ private static final String TEST_BYTES_HEX_ENCODED = "68 65 6C 6C 6F 20 77 6F 72 6C 64";
+
+ private Serde hexSerde;
+
+ @BeforeEach
+ void init() {
+ hexSerde = new HexSerde();
+ hexSerde.configure(
+ PropertyResolverImpl.empty(),
+ PropertyResolverImpl.empty(),
+ PropertyResolverImpl.empty()
+ );
+ }
+
+
+ @ParameterizedTest
+ @CsvSource({
+ "68656C6C6F20776F726C64", // uppercase
+ "68656c6c6f20776f726c64", // lowercase
+ "68:65:6c:6c:6f:20:77:6f:72:6c:64", // ':' delim
+ "68 65 6C 6C 6F 20 77 6F 72 6C 64", // space delim, UC
+ "68 65 6c 6c 6f 20 77 6f 72 6c 64", // space delim, LC
+ "#68 #65 #6C #6C #6F #20 #77 #6F #72 #6C #64" // '#' prefix, space delim
+ })
+ void serializesInputAsHexString(String hexString) {
+ for (Serde.Target type : Serde.Target.values()) {
+ var serializer = hexSerde.serializer("anyTopic", type);
+ byte[] bytes = serializer.serialize(hexString);
+ assertThat(bytes).isEqualTo(TEST_BYTES);
+ }
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void serializesEmptyStringAsEmptyBytesArray(Serde.Target type) {
+ var serializer = hexSerde.serializer("anyTopic", type);
+ byte[] bytes = serializer.serialize("");
+ assertThat(bytes).isEqualTo(new byte[] {});
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void deserializesDataAsHexBytes(Serde.Target type) {
+ var deserializer = hexSerde.deserializer("anyTopic", type);
+ var result = deserializer.deserialize(new RecordHeadersImpl(), TEST_BYTES);
+ assertThat(result.getResult()).isEqualTo(TEST_BYTES_HEX_ENCODED);
+ assertThat(result.getType()).isEqualTo(DeserializeResult.Type.STRING);
+ assertThat(result.getAdditionalProperties()).isEmpty();
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void getSchemaReturnsEmpty(Serde.Target type) {
+ assertThat(hexSerde.getSchema("anyTopic", type)).isEmpty();
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void canDeserializeReturnsTrueForAllInputs(Serde.Target type) {
+ assertThat(hexSerde.canDeserialize("anyTopic", type)).isTrue();
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void canSerializeReturnsTrueForAllInput(Serde.Target type) {
+ assertThat(hexSerde.canSerialize("anyTopic", type)).isTrue();
+ }
+}
From 476cbfb691a0b19f9227ecb12b2e5ce4b5a7156c Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 15:47:03 +0400
Subject: [PATCH 04/31] BE: Serde tiny improvements and fixes (#4063)
Co-authored-by: iliax
---
.../ui/serdes/builtin/AvroEmbeddedSerde.java | 6 ---
.../kafka/ui/serdes/builtin/Base64Serde.java | 32 ++++--------
.../kafka/ui/serdes/builtin/Int64Serde.java | 8 +--
.../kafka/ui/serdes/builtin/UInt64Serde.java | 12 ++---
.../ui/serdes/builtin/UuidBinarySerde.java | 50 ++++++++-----------
5 files changed, 36 insertions(+), 72 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
index 73a1ed5484..68ea03cfa6 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
@@ -19,12 +19,6 @@ public class AvroEmbeddedSerde implements BuiltInSerde {
return "Avro (Embedded)";
}
- @Override
- public void configure(PropertyResolver serdeProperties,
- PropertyResolver kafkaClusterProperties,
- PropertyResolver globalProperties) {
- }
-
@Override
public Optional getDescription() {
return Optional.empty();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Base64Serde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Base64Serde.java
index 14861ade6a..e27215f688 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Base64Serde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Base64Serde.java
@@ -1,8 +1,6 @@
package com.provectus.kafka.ui.serdes.builtin;
import com.provectus.kafka.ui.serde.api.DeserializeResult;
-import com.provectus.kafka.ui.serde.api.PropertyResolver;
-import com.provectus.kafka.ui.serde.api.RecordHeaders;
import com.provectus.kafka.ui.serde.api.SchemaDescription;
import com.provectus.kafka.ui.serdes.BuiltInSerde;
import java.util.Base64;
@@ -16,12 +14,6 @@ public class Base64Serde implements BuiltInSerde {
return "Base64";
}
- @Override
- public void configure(PropertyResolver serdeProperties,
- PropertyResolver kafkaClusterProperties,
- PropertyResolver globalProperties) {
- }
-
@Override
public Optional getDescription() {
return Optional.empty();
@@ -44,31 +36,25 @@ public class Base64Serde implements BuiltInSerde {
@Override
public Serializer serializer(String topic, Target type) {
- return new Serializer() {
- @Override
- public byte[] serialize(String input) {
- input = input.trim();
- // it is actually a hack to provide ability to sent empty array as a key/value
- if (input.length() == 0) {
- return new byte[]{};
- }
- return Base64.getDecoder().decode(input);
+ var decoder = Base64.getDecoder();
+ return inputString -> {
+ inputString = inputString.trim();
+ // it is actually a hack to provide ability to sent empty array as a key/value
+ if (inputString.length() == 0) {
+ return new byte[] {};
}
+ return decoder.decode(inputString);
};
}
@Override
public Deserializer deserializer(String topic, Target type) {
var encoder = Base64.getEncoder();
- return new Deserializer() {
- @Override
- public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
- return new DeserializeResult(
+ return (headers, data) ->
+ new DeserializeResult(
encoder.encodeToString(data),
DeserializeResult.Type.STRING,
Map.of()
);
- }
- };
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int64Serde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int64Serde.java
index 54741291b2..a897f941e0 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int64Serde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int64Serde.java
@@ -55,15 +55,11 @@ public class Int64Serde implements BuiltInSerde {
@Override
public Deserializer deserializer(String topic, Target type) {
- return new Deserializer() {
- @Override
- public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
- return new DeserializeResult(
+ return (headers, data) ->
+ new DeserializeResult(
String.valueOf(Longs.fromByteArray(data)),
DeserializeResult.Type.JSON,
Map.of()
);
- }
- };
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt64Serde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt64Serde.java
index 9f6d1421dd..de7c8a6a63 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt64Serde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt64Serde.java
@@ -1,10 +1,8 @@
package com.provectus.kafka.ui.serdes.builtin;
import com.google.common.primitives.Longs;
-import com.google.common.primitives.UnsignedInteger;
import com.google.common.primitives.UnsignedLong;
import com.provectus.kafka.ui.serde.api.DeserializeResult;
-import com.provectus.kafka.ui.serde.api.RecordHeaders;
import com.provectus.kafka.ui.serde.api.SchemaDescription;
import com.provectus.kafka.ui.serdes.BuiltInSerde;
import java.util.Map;
@@ -32,7 +30,7 @@ public class UInt64Serde implements BuiltInSerde {
+ " \"minimum\" : 0, "
+ " \"maximum\" : %s "
+ "}",
- UnsignedInteger.MAX_VALUE
+ UnsignedLong.MAX_VALUE
),
Map.of()
)
@@ -56,15 +54,11 @@ public class UInt64Serde implements BuiltInSerde {
@Override
public Deserializer deserializer(String topic, Target type) {
- return new Deserializer() {
- @Override
- public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
- return new DeserializeResult(
+ return (headers, data) ->
+ new DeserializeResult(
UnsignedLong.fromLongBits(Longs.fromByteArray(data)).toString(),
DeserializeResult.Type.JSON,
Map.of()
);
- }
- };
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerde.java
index c5ae606b93..0be17e757f 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerde.java
@@ -50,41 +50,35 @@ public class UuidBinarySerde implements BuiltInSerde {
@Override
public Serializer serializer(String topic, Target type) {
- return new Serializer() {
- @Override
- public byte[] serialize(String input) {
- UUID uuid = UUID.fromString(input);
- ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
- if (mostSignificantBitsFirst) {
- bb.putLong(uuid.getMostSignificantBits());
- bb.putLong(uuid.getLeastSignificantBits());
- } else {
- bb.putLong(uuid.getLeastSignificantBits());
- bb.putLong(uuid.getMostSignificantBits());
- }
- return bb.array();
+ return input -> {
+ UUID uuid = UUID.fromString(input);
+ ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
+ if (mostSignificantBitsFirst) {
+ bb.putLong(uuid.getMostSignificantBits());
+ bb.putLong(uuid.getLeastSignificantBits());
+ } else {
+ bb.putLong(uuid.getLeastSignificantBits());
+ bb.putLong(uuid.getMostSignificantBits());
}
+ return bb.array();
};
}
@Override
public Deserializer deserializer(String topic, Target type) {
- return new Deserializer() {
- @Override
- public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
- if (data.length != 16) {
- throw new ValidationException("UUID data should be 16 bytes, but it is " + data.length);
- }
- ByteBuffer bb = ByteBuffer.wrap(data);
- long msb = bb.getLong();
- long lsb = bb.getLong();
- UUID uuid = mostSignificantBitsFirst ? new UUID(msb, lsb) : new UUID(lsb, msb);
- return new DeserializeResult(
- uuid.toString(),
- DeserializeResult.Type.STRING,
- Map.of()
- );
+ return (headers, data) -> {
+ if (data.length != 16) {
+ throw new ValidationException("UUID data should be 16 bytes, but it is " + data.length);
}
+ ByteBuffer bb = ByteBuffer.wrap(data);
+ long msb = bb.getLong();
+ long lsb = bb.getLong();
+ UUID uuid = mostSignificantBitsFirst ? new UUID(msb, lsb) : new UUID(lsb, msb);
+ return new DeserializeResult(
+ uuid.toString(),
+ DeserializeResult.Type.STRING,
+ Map.of()
+ );
};
}
}
From 895d27a306f0a69e0f6c665be121edc62c61c730 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 16:07:46 +0400
Subject: [PATCH 05/31] Unavailable connects suppressing (#4061)
1. suppressing connect errors when returning list of all connectors
2. minor refactoring of KafkaConnectService.getAllConnectors
---
.../kafka/ui/mapper/KafkaConnectMapper.java | 2 +-
.../kafka/ui/service/KafkaConnectService.java | 56 +++++++------------
.../integration/odd/ConnectorsExporter.java | 2 +-
.../kafka/ui/AbstractIntegrationTest.java | 2 +
.../odd/ConnectorsExporterTest.java | 2 +-
5 files changed, 26 insertions(+), 38 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java
index 468c86ecbe..a41054de6c 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java
@@ -34,7 +34,7 @@ public interface KafkaConnectMapper {
com.provectus.kafka.ui.connect.model.ConnectorPluginConfigValidationResponse
connectorPluginConfigValidationResponse);
- default FullConnectorInfoDTO fullConnectorInfoFromTuple(InternalConnectInfo connectInfo) {
+ default FullConnectorInfoDTO fullConnectorInfo(InternalConnectInfo connectInfo) {
ConnectorDTO connector = connectInfo.getConnector();
List tasks = connectInfo.getTasks();
int failedTasksCount = (int) tasks.stream()
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
index 390348707d..605d5cab20 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
@@ -28,7 +28,6 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Predicate;
-import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
@@ -39,7 +38,6 @@ import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
-import reactor.util.function.Tuples;
@Service
@Slf4j
@@ -61,39 +59,22 @@ public class KafkaConnectService {
public Flux getAllConnectors(final KafkaCluster cluster,
@Nullable final String search) {
return getConnects(cluster)
- .flatMap(connect -> getConnectorNames(cluster, connect.getName()).map(cn -> Tuples.of(connect.getName(), cn)))
- .flatMap(pair -> getConnector(cluster, pair.getT1(), pair.getT2()))
- .flatMap(connector ->
- getConnectorConfig(cluster, connector.getConnect(), connector.getName())
- .map(config -> InternalConnectInfo.builder()
- .connector(connector)
- .config(config)
- .build()
- )
- )
- .flatMap(connectInfo -> {
- ConnectorDTO connector = connectInfo.getConnector();
- return getConnectorTasks(cluster, connector.getConnect(), connector.getName())
- .collectList()
- .map(tasks -> InternalConnectInfo.builder()
- .connector(connector)
- .config(connectInfo.getConfig())
- .tasks(tasks)
- .build()
- );
- })
- .flatMap(connectInfo -> {
- ConnectorDTO connector = connectInfo.getConnector();
- return getConnectorTopics(cluster, connector.getConnect(), connector.getName())
- .map(ct -> InternalConnectInfo.builder()
- .connector(connector)
- .config(connectInfo.getConfig())
- .tasks(connectInfo.getTasks())
- .topics(ct.getTopics())
- .build()
- );
- })
- .map(kafkaConnectMapper::fullConnectorInfoFromTuple)
+ .flatMap(connect ->
+ getConnectorNamesWithErrorsSuppress(cluster, connect.getName())
+ .flatMap(connectorName ->
+ Mono.zip(
+ getConnector(cluster, connect.getName(), connectorName),
+ getConnectorConfig(cluster, connect.getName(), connectorName),
+ getConnectorTasks(cluster, connect.getName(), connectorName).collectList(),
+ getConnectorTopics(cluster, connect.getName(), connectorName)
+ ).map(tuple ->
+ InternalConnectInfo.builder()
+ .connector(tuple.getT1())
+ .config(tuple.getT2())
+ .tasks(tuple.getT3())
+ .topics(tuple.getT4().getTopics())
+ .build())))
+ .map(kafkaConnectMapper::fullConnectorInfo)
.filter(matchesSearchTerm(search));
}
@@ -132,6 +113,11 @@ public class KafkaConnectService {
.flatMapMany(Flux::fromIterable);
}
+ // returns empty flux if there was an error communicating with Connect
+ public Flux getConnectorNamesWithErrorsSuppress(KafkaCluster cluster, String connectName) {
+ return getConnectorNames(cluster, connectName).onErrorComplete();
+ }
+
@SneakyThrows
private List parseConnectorsNamesStringToList(String json) {
return objectMapper.readValue(json, new TypeReference<>() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java
index 2fad00bbfa..2259d5ebb1 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java
@@ -25,7 +25,7 @@ class ConnectorsExporter {
Flux export(KafkaCluster cluster) {
return kafkaConnectService.getConnects(cluster)
- .flatMap(connect -> kafkaConnectService.getConnectorNames(cluster, connect.getName())
+ .flatMap(connect -> kafkaConnectService.getConnectorNamesWithErrorsSuppress(cluster, connect.getName())
.flatMap(connectorName -> kafkaConnectService.getConnector(cluster, connect.getName(), connectorName))
.flatMap(connectorDTO ->
kafkaConnectService.getConnectorTopics(cluster, connect.getName(), connectorDTO.getName())
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
index 314abf914b..d185e64671 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
@@ -77,6 +77,8 @@ public abstract class AbstractIntegrationTest {
System.setProperty("kafka.clusters.0.kafkaConnect.0.userName", "kafka-connect");
System.setProperty("kafka.clusters.0.kafkaConnect.0.password", "kafka-connect");
System.setProperty("kafka.clusters.0.kafkaConnect.0.address", kafkaConnect.getTarget());
+ System.setProperty("kafka.clusters.0.kafkaConnect.1.name", "notavailable");
+ System.setProperty("kafka.clusters.0.kafkaConnect.1.address", "http://notavailable:6666");
System.setProperty("kafka.clusters.0.masking.0.type", "REPLACE");
System.setProperty("kafka.clusters.0.masking.0.replacement", "***");
System.setProperty("kafka.clusters.0.masking.0.topicValuesPattern", "masking-test-.*");
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java
index 20c0d96ad1..e06a16388e 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java
@@ -61,7 +61,7 @@ class ConnectorsExporterTest {
when(kafkaConnectService.getConnects(CLUSTER))
.thenReturn(Flux.just(connect));
- when(kafkaConnectService.getConnectorNames(CLUSTER, connect.getName()))
+ when(kafkaConnectService.getConnectorNamesWithErrorsSuppress(CLUSTER, connect.getName()))
.thenReturn(Flux.just(sinkConnector.getName(), sourceConnector.getName()));
when(kafkaConnectService.getConnector(CLUSTER, connect.getName(), sinkConnector.getName()))
From 1cd303a90b497546db1eccc664e26a277257923c Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 16:23:19 +0400
Subject: [PATCH 06/31] BE: Impl ACL endpoints for consumer, producer, stream
apps (#3783)
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
---
.../kafka/ui/controller/AclsController.java | 54 +++++
.../kafka/ui/service/acl/AclsService.java | 185 ++++++++++++++-
.../kafka/ui/service/acl/AclsServiceTest.java | 220 +++++++++++++++++-
.../main/resources/swagger/kafka-ui-api.yaml | 128 +++++++++-
4 files changed, 576 insertions(+), 11 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
index 1de8f4d71b..71700e3f7b 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
@@ -2,6 +2,9 @@ package com.provectus.kafka.ui.controller;
import com.provectus.kafka.ui.api.AclsApi;
import com.provectus.kafka.ui.mapper.ClusterMapper;
+import com.provectus.kafka.ui.model.CreateConsumerAclDTO;
+import com.provectus.kafka.ui.model.CreateProducerAclDTO;
+import com.provectus.kafka.ui.model.CreateStreamAppAclDTO;
import com.provectus.kafka.ui.model.KafkaAclDTO;
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
@@ -123,4 +126,55 @@ public class AclsController extends AbstractController implements AclsApi {
.doOnEach(sig -> auditService.audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
+
+ @Override
+ public Mono> createConsumerAcl(String clusterName,
+ Mono createConsumerAclDto,
+ ServerWebExchange exchange) {
+ AccessContext context = AccessContext.builder()
+ .cluster(clusterName)
+ .aclActions(AclAction.EDIT)
+ .operationName("createConsumerAcl")
+ .build();
+
+ return accessControlService.validateAccess(context)
+ .then(createConsumerAclDto)
+ .flatMap(req -> aclsService.createConsumerAcl(getCluster(clusterName), req))
+ .doOnEach(sig -> auditService.audit(context, sig))
+ .thenReturn(ResponseEntity.ok().build());
+ }
+
+ @Override
+ public Mono> createProducerAcl(String clusterName,
+ Mono createProducerAclDto,
+ ServerWebExchange exchange) {
+ AccessContext context = AccessContext.builder()
+ .cluster(clusterName)
+ .aclActions(AclAction.EDIT)
+ .operationName("createProducerAcl")
+ .build();
+
+ return accessControlService.validateAccess(context)
+ .then(createProducerAclDto)
+ .flatMap(req -> aclsService.createProducerAcl(getCluster(clusterName), req))
+ .doOnEach(sig -> auditService.audit(context, sig))
+ .thenReturn(ResponseEntity.ok().build());
+ }
+
+ @Override
+ public Mono> createStreamAppAcl(String clusterName,
+ Mono createStreamAppAclDto,
+ ServerWebExchange exchange) {
+ AccessContext context = AccessContext.builder()
+ .cluster(clusterName)
+ .aclActions(AclAction.EDIT)
+ .operationName("createStreamAppAcl")
+ .build();
+
+ return accessControlService.validateAccess(context)
+ .then(createStreamAppAclDto)
+ .flatMap(req -> aclsService.createStreamAppAcl(getCluster(clusterName), req))
+ .doOnEach(sig -> auditService.audit(context, sig))
+ .thenReturn(ResponseEntity.ok().build());
+ }
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java
index c2ab1b5eb4..a621ce99cc 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java
@@ -1,16 +1,44 @@
package com.provectus.kafka.ui.service.acl;
+import static org.apache.kafka.common.acl.AclOperation.ALL;
+import static org.apache.kafka.common.acl.AclOperation.CREATE;
+import static org.apache.kafka.common.acl.AclOperation.DESCRIBE;
+import static org.apache.kafka.common.acl.AclOperation.IDEMPOTENT_WRITE;
+import static org.apache.kafka.common.acl.AclOperation.READ;
+import static org.apache.kafka.common.acl.AclOperation.WRITE;
+import static org.apache.kafka.common.acl.AclPermissionType.ALLOW;
+import static org.apache.kafka.common.resource.PatternType.LITERAL;
+import static org.apache.kafka.common.resource.PatternType.PREFIXED;
+import static org.apache.kafka.common.resource.ResourceType.CLUSTER;
+import static org.apache.kafka.common.resource.ResourceType.GROUP;
+import static org.apache.kafka.common.resource.ResourceType.TOPIC;
+import static org.apache.kafka.common.resource.ResourceType.TRANSACTIONAL_ID;
+
import com.google.common.collect.Sets;
+import com.provectus.kafka.ui.model.CreateConsumerAclDTO;
+import com.provectus.kafka.ui.model.CreateProducerAclDTO;
+import com.provectus.kafka.ui.model.CreateStreamAppAclDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.AdminClientService;
+import com.provectus.kafka.ui.service.ReactiveAdminClient;
+import java.util.ArrayList;
+import java.util.Collection;
import java.util.Comparator;
import java.util.List;
+import java.util.Optional;
import java.util.Set;
+import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
+import org.apache.kafka.common.acl.AclOperation;
+import org.apache.kafka.common.resource.Resource;
+import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
+import org.apache.kafka.common.resource.ResourceType;
import org.springframework.stereotype.Service;
+import org.springframework.util.CollectionUtils;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -22,11 +50,14 @@ public class AclsService {
private final AdminClientService adminClientService;
public Mono createAcl(KafkaCluster cluster, AclBinding aclBinding) {
- var aclString = AclCsv.createAclString(aclBinding);
- log.info("CREATING ACL: [{}]", aclString);
return adminClientService.get(cluster)
- .flatMap(ac -> ac.createAcls(List.of(aclBinding)))
- .doOnSuccess(v -> log.info("ACL CREATED: [{}]", aclString));
+ .flatMap(ac -> createAclsWithLogging(ac, List.of(aclBinding)));
+ }
+
+ private Mono createAclsWithLogging(ReactiveAdminClient ac, Collection bindings) {
+ bindings.forEach(b -> log.info("CREATING ACL: [{}]", AclCsv.createAclString(b)));
+ return ac.createAcls(bindings)
+ .doOnSuccess(v -> bindings.forEach(b -> log.info("ACL CREATED: [{}]", AclCsv.createAclString(b))));
}
public Mono deleteAcl(KafkaCluster cluster, AclBinding aclBinding) {
@@ -92,4 +123,150 @@ public class AclsService {
}
}
+ // creates allow binding for resources by prefix or specific names list
+ private List createAllowBindings(ResourceType resourceType,
+ List opsToAllow,
+ String principal,
+ String host,
+ @Nullable String resourcePrefix,
+ @Nullable Collection resourceNames) {
+ List bindings = new ArrayList<>();
+ if (resourcePrefix != null) {
+ for (var op : opsToAllow) {
+ bindings.add(
+ new AclBinding(
+ new ResourcePattern(resourceType, resourcePrefix, PREFIXED),
+ new AccessControlEntry(principal, host, op, ALLOW)));
+ }
+ }
+ if (!CollectionUtils.isEmpty(resourceNames)) {
+ resourceNames.stream()
+ .distinct()
+ .forEach(resource ->
+ opsToAllow.forEach(op ->
+ bindings.add(
+ new AclBinding(
+ new ResourcePattern(resourceType, resource, LITERAL),
+ new AccessControlEntry(principal, host, op, ALLOW)))));
+ }
+ return bindings;
+ }
+
+ public Mono createConsumerAcl(KafkaCluster cluster, CreateConsumerAclDTO request) {
+ return adminClientService.get(cluster)
+ .flatMap(ac -> createAclsWithLogging(ac, createConsumerBindings(request)))
+ .then();
+ }
+
+ //Read, Describe on topics, Read on consumerGroups
+ private List createConsumerBindings(CreateConsumerAclDTO request) {
+ List bindings = new ArrayList<>();
+ bindings.addAll(
+ createAllowBindings(TOPIC,
+ List.of(READ, DESCRIBE),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getTopicsPrefix(),
+ request.getTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ GROUP,
+ List.of(READ),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getConsumerGroupsPrefix(),
+ request.getConsumerGroups()));
+ return bindings;
+ }
+
+ public Mono createProducerAcl(KafkaCluster cluster, CreateProducerAclDTO request) {
+ return adminClientService.get(cluster)
+ .flatMap(ac -> createAclsWithLogging(ac, createProducerBindings(request)))
+ .then();
+ }
+
+ //Write, Describe, Create permission on topics, Write, Describe on transactionalIds
+ //IDEMPOTENT_WRITE on cluster if idempotent is enabled
+ private List createProducerBindings(CreateProducerAclDTO request) {
+ List bindings = new ArrayList<>();
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(WRITE, DESCRIBE, CREATE),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getTopicsPrefix(),
+ request.getTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ TRANSACTIONAL_ID,
+ List.of(WRITE, DESCRIBE),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getTransactionsIdPrefix(),
+ Optional.ofNullable(request.getTransactionalId()).map(List::of).orElse(null)));
+
+ if (Boolean.TRUE.equals(request.getIdempotent())) {
+ bindings.addAll(
+ createAllowBindings(
+ CLUSTER,
+ List.of(IDEMPOTENT_WRITE),
+ request.getPrincipal(),
+ request.getHost(),
+ null,
+ List.of(Resource.CLUSTER_NAME))); // cluster name is a const string in ACL api
+ }
+ return bindings;
+ }
+
+ public Mono createStreamAppAcl(KafkaCluster cluster, CreateStreamAppAclDTO request) {
+ return adminClientService.get(cluster)
+ .flatMap(ac -> createAclsWithLogging(ac, createStreamAppBindings(request)))
+ .then();
+ }
+
+ // Read on input topics, Write on output topics
+ // ALL on applicationId-prefixed Groups and Topics
+ private List createStreamAppBindings(CreateStreamAppAclDTO request) {
+ List bindings = new ArrayList<>();
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(READ),
+ request.getPrincipal(),
+ request.getHost(),
+ null,
+ request.getInputTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(WRITE),
+ request.getPrincipal(),
+ request.getHost(),
+ null,
+ request.getOutputTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ GROUP,
+ List.of(ALL),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getApplicationId(),
+ null));
+
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(ALL),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getApplicationId(),
+ null));
+ return bindings;
+ }
+
}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java
index 5791bb2041..340aad7091 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java
@@ -4,16 +4,21 @@ import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
+import com.provectus.kafka.ui.model.CreateConsumerAclDTO;
+import com.provectus.kafka.ui.model.CreateProducerAclDTO;
+import com.provectus.kafka.ui.model.CreateStreamAppAclDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.AdminClientService;
import com.provectus.kafka.ui.service.ReactiveAdminClient;
import java.util.Collection;
import java.util.List;
+import java.util.UUID;
import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.acl.AclPermissionType;
import org.apache.kafka.common.resource.PatternType;
+import org.apache.kafka.common.resource.Resource;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import org.apache.kafka.common.resource.ResourceType;
@@ -53,12 +58,12 @@ class AclsServiceTest {
when(adminClientMock.listAcls(ResourcePatternFilter.ANY))
.thenReturn(Mono.just(List.of(existingBinding1, existingBinding2)));
- ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
- when(adminClientMock.createAcls((Collection) createdCaptor.capture()))
+ ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
.thenReturn(Mono.empty());
- ArgumentCaptor> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
- when(adminClientMock.deleteAcls((Collection) deletedCaptor.capture()))
+ ArgumentCaptor> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.deleteAcls(deletedCaptor.capture()))
.thenReturn(Mono.empty());
aclsService.syncAclWithAclCsv(
@@ -68,15 +73,218 @@ class AclsServiceTest {
+ "User:test3,GROUP,PREFIXED,groupNew,DESCRIBE,DENY,localhost"
).block();
- Collection createdBindings = (Collection) createdCaptor.getValue();
+ Collection createdBindings = createdCaptor.getValue();
assertThat(createdBindings)
.hasSize(1)
.contains(newBindingToBeAdded);
- Collection deletedBindings = (Collection) deletedCaptor.getValue();
+ Collection deletedBindings = deletedCaptor.getValue();
assertThat(deletedBindings)
.hasSize(1)
.contains(existingBinding2);
}
+
+ @Test
+ void createsConsumerDependantAcls() {
+ ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createConsumerAcl(
+ CLUSTER,
+ new CreateConsumerAclDTO()
+ .principal(principal)
+ .host(host)
+ .consumerGroups(List.of("cg1", "cg2"))
+ .topics(List.of("t1", "t2"))
+ ).block();
+
+ //Read, Describe on topics, Read on consumerGroups
+ Collection createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(6)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "cg1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "cg2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)));
+ }
+
+ @Test
+ void createsConsumerDependantAclsWhenTopicsAndGroupsSpecifiedByPrefix() {
+ ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createConsumerAcl(
+ CLUSTER,
+ new CreateConsumerAclDTO()
+ .principal(principal)
+ .host(host)
+ .consumerGroupsPrefix("cgPref")
+ .topicsPrefix("topicPref")
+ ).block();
+
+ //Read, Describe on topics, Read on consumerGroups
+ Collection createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(3)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "cgPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)));
+ }
+
+ @Test
+ void createsProducerDependantAcls() {
+ ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createProducerAcl(
+ CLUSTER,
+ new CreateProducerAclDTO()
+ .principal(principal)
+ .host(host)
+ .topics(List.of("t1"))
+ .idempotent(true)
+ .transactionalId("txId1")
+ ).block();
+
+ //Write, Describe, Create permission on topics, Write, Describe on transactionalIds
+ //IDEMPOTENT_WRITE on cluster if idempotent is enabled (true)
+ Collection createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(6)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.CREATE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txId1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txId1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.CLUSTER, Resource.CLUSTER_NAME, PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.IDEMPOTENT_WRITE, AclPermissionType.ALLOW)));
+ }
+
+
+ @Test
+ void createsProducerDependantAclsWhenTopicsAndTxIdSpecifiedByPrefix() {
+ ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createProducerAcl(
+ CLUSTER,
+ new CreateProducerAclDTO()
+ .principal(principal)
+ .host(host)
+ .topicsPrefix("topicPref")
+ .transactionsIdPrefix("txIdPref")
+ .idempotent(false)
+ ).block();
+
+ //Write, Describe, Create permission on topics, Write, Describe on transactionalIds
+ //IDEMPOTENT_WRITE on cluster if idempotent is enabled (false)
+ Collection createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(5)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.CREATE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)));
+ }
+
+
+ @Test
+ void createsStreamAppDependantAcls() {
+ ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createStreamAppAcl(
+ CLUSTER,
+ new CreateStreamAppAclDTO()
+ .principal(principal)
+ .host(host)
+ .inputTopics(List.of("t1"))
+ .outputTopics(List.of("t2", "t3"))
+ .applicationId("appId1")
+ ).block();
+
+ // Read on input topics, Write on output topics
+ // ALL on applicationId-prefixed Groups and Topics
+ Collection createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(5)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t3", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "appId1", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.ALL, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "appId1", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.ALL, AclPermissionType.ALLOW)));
+ }
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index f9ed233bc1..9484948b67 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -1868,6 +1868,69 @@ paths:
404:
description: Acl not found
+ /api/clusters/{clusterName}/acl/consumer:
+ post:
+ tags:
+ - Acls
+ summary: createConsumerAcl
+ operationId: createConsumerAcl
+ parameters:
+ - name: clusterName
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateConsumerAcl'
+ responses:
+ 200:
+ description: OK
+
+ /api/clusters/{clusterName}/acl/producer:
+ post:
+ tags:
+ - Acls
+ summary: createProducerAcl
+ operationId: createProducerAcl
+ parameters:
+ - name: clusterName
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateProducerAcl'
+ responses:
+ 200:
+ description: OK
+
+ /api/clusters/{clusterName}/acl/streamApp:
+ post:
+ tags:
+ - Acls
+ summary: createStreamAppAcl
+ operationId: createStreamAppAcl
+ parameters:
+ - name: clusterName
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateStreamAppAcl'
+ responses:
+ 200:
+ description: OK
+
/api/authorization:
get:
tags:
@@ -3551,7 +3614,7 @@ components:
principal:
type: string
host:
- type: string # "*" if acl can be applied to any resource of given type
+ type: string
operation:
type: string
enum:
@@ -3575,6 +3638,69 @@ components:
- ALLOW
- DENY
+ CreateConsumerAcl:
+ type: object
+ required: [principal, host]
+ properties:
+ principal:
+ type: string
+ host:
+ type: string
+ topics:
+ type: array
+ items:
+ type: string
+ topicsPrefix:
+ type: string
+ consumerGroups:
+ type: array
+ items:
+ type: string
+ consumerGroupsPrefix:
+ type: string
+
+ CreateProducerAcl:
+ type: object
+ required: [principal, host]
+ properties:
+ principal:
+ type: string
+ host:
+ type: string
+ topics:
+ type: array
+ items:
+ type: string
+ topicsPrefix:
+ type: string
+ transactionalId:
+ type: string
+ transactionsIdPrefix:
+ type: string
+ idempotent:
+ type: boolean
+ default: false
+
+ CreateStreamAppAcl:
+ type: object
+ required: [principal, host, applicationId, inputTopics, outputTopics]
+ properties:
+ principal:
+ type: string
+ host:
+ type: string
+ inputTopics:
+ type: array
+ items:
+ type: string
+ outputTopics:
+ type: array
+ items:
+ type: string
+ applicationId:
+ nullable: false
+ type: string
+
KafkaAclResourceType:
type: string
enum:
From b2c3fcc321c84a5c85f4ca4a96a3c7b647f0d087 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 16:42:00 +0400
Subject: [PATCH 07/31] BE: ACL enablement check fixes (#4034)
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
---
.../kafka/ui/service/ReactiveAdminClient.java | 67 ++++++++++---------
1 file changed, 37 insertions(+), 30 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index 0b6f16a223..9de908efa7 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -15,6 +15,8 @@ import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.util.KafkaVersion;
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
import java.io.Closeable;
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@@ -129,38 +131,41 @@ public class ReactiveAdminClient implements Closeable {
Set features,
boolean topicDeletionIsAllowed) {
- private static Mono extract(AdminClient ac, int controllerId) {
- return loadBrokersConfig(ac, List.of(controllerId))
- .map(map -> map.isEmpty() ? List.of() : map.get(controllerId))
- .flatMap(configs -> {
- String version = "1.0-UNKNOWN";
- boolean topicDeletionEnabled = true;
- for (ConfigEntry entry : configs) {
- if (entry.name().contains("inter.broker.protocol.version")) {
- version = entry.value();
- }
- if (entry.name().equals("delete.topic.enable")) {
- topicDeletionEnabled = Boolean.parseBoolean(entry.value());
- }
- }
- var builder = ConfigRelatedInfo.builder()
- .version(version)
- .topicDeletionIsAllowed(topicDeletionEnabled);
- return SupportedFeature.forVersion(ac, version)
- .map(features -> builder.features(features).build());
- });
+ static final Duration UPDATE_DURATION = Duration.of(1, ChronoUnit.HOURS);
+
+ private static Mono extract(AdminClient ac) {
+ return ReactiveAdminClient.describeClusterImpl(ac, Set.of())
+ .flatMap(desc -> {
+ // choosing node from which we will get configs (starting with controller)
+ var targetNodeId = Optional.ofNullable(desc.controller)
+ .map(Node::id)
+ .orElse(desc.getNodes().iterator().next().id());
+ return loadBrokersConfig(ac, List.of(targetNodeId))
+ .map(map -> map.isEmpty() ? List.of() : map.get(targetNodeId))
+ .flatMap(configs -> {
+ String version = "1.0-UNKNOWN";
+ boolean topicDeletionEnabled = true;
+ for (ConfigEntry entry : configs) {
+ if (entry.name().contains("inter.broker.protocol.version")) {
+ version = entry.value();
+ }
+ if (entry.name().equals("delete.topic.enable")) {
+ topicDeletionEnabled = Boolean.parseBoolean(entry.value());
+ }
+ }
+ final String finalVersion = version;
+ final boolean finalTopicDeletionEnabled = topicDeletionEnabled;
+ return SupportedFeature.forVersion(ac, version)
+ .map(features -> new ConfigRelatedInfo(finalVersion, features, finalTopicDeletionEnabled));
+ });
+ })
+ .cache(UPDATE_DURATION);
}
}
public static Mono create(AdminClient adminClient) {
- return describeClusterImpl(adminClient, Set.of())
- // choosing node from which we will get configs (starting with controller)
- .flatMap(descr -> descr.controller != null
- ? Mono.just(descr.controller)
- : Mono.justOrEmpty(descr.nodes.stream().findFirst())
- )
- .flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id()))
- .map(info -> new ReactiveAdminClient(adminClient, info));
+ Mono configRelatedInfoMono = ConfigRelatedInfo.extract(adminClient);
+ return configRelatedInfoMono.map(info -> new ReactiveAdminClient(adminClient, configRelatedInfoMono, info));
}
@@ -170,7 +175,7 @@ public class ReactiveAdminClient implements Closeable {
.doOnError(th -> !(th instanceof SecurityDisabledException)
&& !(th instanceof InvalidRequestException)
&& !(th instanceof UnsupportedVersionException),
- th -> log.warn("Error checking if security enabled", th))
+ th -> log.debug("Error checking if security enabled", th))
.onErrorReturn(false);
}
@@ -202,6 +207,8 @@ public class ReactiveAdminClient implements Closeable {
@Getter(AccessLevel.PACKAGE) // visible for testing
private final AdminClient client;
+ private final Mono configRelatedInfoMono;
+
private volatile ConfigRelatedInfo configRelatedInfo;
public Set getClusterFeatures() {
@@ -228,7 +235,7 @@ public class ReactiveAdminClient implements Closeable {
if (controller == null) {
return Mono.empty();
}
- return ConfigRelatedInfo.extract(client, controller.id())
+ return configRelatedInfoMono
.doOnNext(info -> this.configRelatedInfo = info)
.then();
}
From c96a0c6be517e9e03cc6f0ab0e33c5ce7791e56c Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 18:19:03 +0400
Subject: [PATCH 08/31] ISSSUE4052: Polling metrics (#4069)
Co-authored-by: iliax
---
kafka-ui-api/pom.xml | 5 ++
.../auth/AbstractAuthSecurityConfig.java | 1 +
.../kafka/ui/emitter/AbstractEmitter.java | 25 ++----
.../ui/emitter/BackwardRecordEmitter.java | 16 ++--
.../kafka/ui/emitter/ConsumingStats.java | 17 +---
.../kafka/ui/emitter/EmptyPollsCounter.java | 4 +-
.../kafka/ui/emitter/EnhancedConsumer.java | 82 +++++++++++++++++++
.../ui/emitter/ForwardRecordEmitter.java | 12 ++-
.../kafka/ui/emitter/MessagesProcessing.java | 8 +-
.../kafka/ui/emitter/PolledRecords.java | 48 +++++++++++
.../kafka/ui/emitter/PollingThrottler.java | 12 +--
.../kafka/ui/emitter/TailingEmitter.java | 10 +--
.../ui/service/ConsumerGroupService.java | 19 +++--
.../service/analyze/TopicAnalysisService.java | 13 +--
.../kafka/ui/util/ApplicationMetrics.java | 82 +++++++++++++++++++
.../kafka/ui/util/ConsumerRecordsUtil.java | 29 -------
.../src/main/resources/application.yml | 2 +-
.../kafka/ui/service/RecordEmitterTest.java | 14 ++--
18 files changed, 275 insertions(+), 124 deletions(-)
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EnhancedConsumer.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PolledRecords.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ApplicationMetrics.java
delete mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ConsumerRecordsUtil.java
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml
index 829f7dc5c5..558a446e94 100644
--- a/kafka-ui-api/pom.xml
+++ b/kafka-ui-api/pom.xml
@@ -114,6 +114,11 @@
json
${org.json.version}
+
+ io.micrometer
+ micrometer-registry-prometheus
+ runtime
+
org.springframework.boot
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/AbstractAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/AbstractAuthSecurityConfig.java
index 7fc9f82780..0c70b79716 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/AbstractAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/AbstractAuthSecurityConfig.java
@@ -13,6 +13,7 @@ abstract class AbstractAuthSecurityConfig {
"/resources/**",
"/actuator/health/**",
"/actuator/info",
+ "/actuator/prometheus",
"/auth",
"/login",
"/logout",
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java
index 9ea0526bac..66bb24f0e1 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java
@@ -2,37 +2,28 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import java.time.Duration;
-import java.time.Instant;
-import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
public abstract class AbstractEmitter {
private final MessagesProcessing messagesProcessing;
- private final PollingThrottler throttler;
protected final PollingSettings pollingSettings;
protected AbstractEmitter(MessagesProcessing messagesProcessing, PollingSettings pollingSettings) {
this.messagesProcessing = messagesProcessing;
this.pollingSettings = pollingSettings;
- this.throttler = pollingSettings.getPollingThrottler();
}
- protected ConsumerRecords poll(
- FluxSink sink, Consumer consumer) {
+ protected PolledRecords poll(
+ FluxSink sink, EnhancedConsumer consumer) {
return poll(sink, consumer, pollingSettings.getPollTimeout());
}
- protected ConsumerRecords poll(
- FluxSink sink, Consumer consumer, Duration timeout) {
- Instant start = Instant.now();
- ConsumerRecords records = consumer.poll(timeout);
- Instant finish = Instant.now();
- int polledBytes = sendConsuming(sink, records, Duration.between(start, finish).toMillis());
- throttler.throttleAfterPoll(polledBytes);
+ protected PolledRecords poll(FluxSink sink, EnhancedConsumer consumer, Duration timeout) {
+ var records = consumer.pollEnhanced(timeout);
+ sendConsuming(sink, records);
return records;
}
@@ -49,10 +40,8 @@ public abstract class AbstractEmitter {
messagesProcessing.sendPhase(sink, name);
}
- protected int sendConsuming(FluxSink sink,
- ConsumerRecords records,
- long elapsed) {
- return messagesProcessing.sentConsumingInfo(sink, records, elapsed);
+ protected void sendConsuming(FluxSink sink, PolledRecords records) {
+ messagesProcessing.sentConsumingInfo(sink, records);
}
protected void sendFinishStatsAndCompleteSink(FluxSink sink) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java
index ccd24e8568..368b7cee5b 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java
@@ -9,9 +9,7 @@ import java.util.List;
import java.util.TreeMap;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.InterruptException;
import org.apache.kafka.common.utils.Bytes;
@@ -22,12 +20,12 @@ public class BackwardRecordEmitter
extends AbstractEmitter
implements java.util.function.Consumer> {
- private final Supplier> consumerSupplier;
+ private final Supplier consumerSupplier;
private final ConsumerPosition consumerPosition;
private final int messagesPerPage;
public BackwardRecordEmitter(
- Supplier> consumerSupplier,
+ Supplier consumerSupplier,
ConsumerPosition consumerPosition,
int messagesPerPage,
MessagesProcessing messagesProcessing,
@@ -41,7 +39,7 @@ public class BackwardRecordEmitter
@Override
public void accept(FluxSink sink) {
log.debug("Starting backward polling for {}", consumerPosition);
- try (KafkaConsumer consumer = consumerSupplier.get()) {
+ try (EnhancedConsumer consumer = consumerSupplier.get()) {
sendPhase(sink, "Created consumer");
var seekOperations = SeekOperations.create(consumer, consumerPosition);
@@ -91,7 +89,7 @@ public class BackwardRecordEmitter
TopicPartition tp,
long fromOffset,
long toOffset,
- Consumer consumer,
+ EnhancedConsumer consumer,
FluxSink sink
) {
consumer.assign(Collections.singleton(tp));
@@ -101,13 +99,13 @@ public class BackwardRecordEmitter
var recordsToSend = new ArrayList>();
- EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
+ EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
while (!sink.isCancelled()
&& !sendLimitReached()
&& recordsToSend.size() < desiredMsgsToPoll
&& !emptyPolls.noDataEmptyPollsReached()) {
var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());
- emptyPolls.count(polledRecords);
+ emptyPolls.count(polledRecords.count());
log.debug("{} records polled from {}", polledRecords.count(), tp);
@@ -115,7 +113,7 @@ public class BackwardRecordEmitter
.filter(r -> r.offset() < toOffset)
.toList();
- if (!polledRecords.isEmpty() && filteredRecords.isEmpty()) {
+ if (polledRecords.count() > 0 && filteredRecords.isEmpty()) {
// we already read all messages in target offsets interval
break;
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ConsumingStats.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ConsumingStats.java
index 0e002f36a4..0440f7d37f 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ConsumingStats.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ConsumingStats.java
@@ -2,9 +2,6 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageConsumingDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.util.ConsumerRecordsUtil;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
class ConsumingStats {
@@ -13,23 +10,17 @@ class ConsumingStats {
private int records = 0;
private long elapsed = 0;
- /**
- * returns bytes polled.
- */
- int sendConsumingEvt(FluxSink sink,
- ConsumerRecords polledRecords,
- long elapsed,
+ void sendConsumingEvt(FluxSink sink,
+ PolledRecords polledRecords,
int filterApplyErrors) {
- int polledBytes = ConsumerRecordsUtil.calculatePolledSize(polledRecords);
- bytes += polledBytes;
+ bytes += polledRecords.bytes();
this.records += polledRecords.count();
- this.elapsed += elapsed;
+ this.elapsed += polledRecords.elapsed().toMillis();
sink.next(
new TopicMessageEventDTO()
.type(TopicMessageEventDTO.TypeEnum.CONSUMING)
.consuming(createConsumingStats(sink, filterApplyErrors))
);
- return polledBytes;
}
void sendFinishEvent(FluxSink sink, int filterApplyErrors) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java
index 3bc2ca38c1..cd035f9c65 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java
@@ -17,8 +17,8 @@ public class EmptyPollsCounter {
this.maxEmptyPolls = maxEmptyPolls;
}
- public void count(ConsumerRecords, ?> polled) {
- emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
+ public void count(int polledCount) {
+ emptyPolls = polledCount == 0 ? emptyPolls + 1 : 0;
}
public boolean noDataEmptyPollsReached() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EnhancedConsumer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EnhancedConsumer.java
new file mode 100644
index 0000000000..be849c7888
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EnhancedConsumer.java
@@ -0,0 +1,82 @@
+package com.provectus.kafka.ui.emitter;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Stopwatch;
+import com.provectus.kafka.ui.util.ApplicationMetrics;
+import java.time.Duration;
+import java.util.Collection;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import lombok.RequiredArgsConstructor;
+import lombok.experimental.Delegate;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.serialization.BytesDeserializer;
+import org.apache.kafka.common.utils.Bytes;
+
+
+public class EnhancedConsumer extends KafkaConsumer {
+
+ private final PollingThrottler throttler;
+ private final ApplicationMetrics metrics;
+ private String pollingTopic;
+
+ public EnhancedConsumer(Properties properties,
+ PollingThrottler throttler,
+ ApplicationMetrics metrics) {
+ super(properties, new BytesDeserializer(), new BytesDeserializer());
+ this.throttler = throttler;
+ this.metrics = metrics;
+ metrics.activeConsumers().incrementAndGet();
+ }
+
+ public PolledRecords pollEnhanced(Duration dur) {
+ var stopwatch = Stopwatch.createStarted();
+ ConsumerRecords polled = poll(dur);
+ PolledRecords polledEnhanced = PolledRecords.create(polled, stopwatch.elapsed());
+ var throttled = throttler.throttleAfterPoll(polledEnhanced.bytes());
+ metrics.meterPolledRecords(pollingTopic, polledEnhanced, throttled);
+ return polledEnhanced;
+ }
+
+ @Override
+ public void assign(Collection partitions) {
+ super.assign(partitions);
+ Set assignedTopics = partitions.stream().map(TopicPartition::topic).collect(Collectors.toSet());
+ Preconditions.checkState(assignedTopics.size() == 1);
+ this.pollingTopic = assignedTopics.iterator().next();
+ }
+
+ @Override
+ public void subscribe(Pattern pattern) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void subscribe(Collection topics) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void subscribe(Pattern pattern, ConsumerRebalanceListener listener) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void subscribe(Collection topics, ConsumerRebalanceListener listener) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void close(Duration timeout) {
+ metrics.activeConsumers().decrementAndGet();
+ super.close(timeout);
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java
index 8f85e0a8ba..44c6b63038 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java
@@ -5,8 +5,6 @@ import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.InterruptException;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
@@ -16,11 +14,11 @@ public class ForwardRecordEmitter
extends AbstractEmitter
implements java.util.function.Consumer> {
- private final Supplier> consumerSupplier;
+ private final Supplier consumerSupplier;
private final ConsumerPosition position;
public ForwardRecordEmitter(
- Supplier> consumerSupplier,
+ Supplier consumerSupplier,
ConsumerPosition position,
MessagesProcessing messagesProcessing,
PollingSettings pollingSettings) {
@@ -32,7 +30,7 @@ public class ForwardRecordEmitter
@Override
public void accept(FluxSink sink) {
log.debug("Starting forward polling for {}", position);
- try (KafkaConsumer consumer = consumerSupplier.get()) {
+ try (EnhancedConsumer consumer = consumerSupplier.get()) {
sendPhase(sink, "Assigning partitions");
var seekOperations = SeekOperations.create(consumer, position);
seekOperations.assignAndSeekNonEmptyPartitions();
@@ -44,8 +42,8 @@ public class ForwardRecordEmitter
&& !emptyPolls.noDataEmptyPollsReached()) {
sendPhase(sink, "Polling");
- ConsumerRecords records = poll(sink, consumer);
- emptyPolls.count(records);
+ var records = poll(sink, consumer);
+ emptyPolls.count(records.count());
log.debug("{} records polled", records.count());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessagesProcessing.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessagesProcessing.java
index b6d23bc90d..d2d938cda6 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessagesProcessing.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessagesProcessing.java
@@ -8,7 +8,6 @@ import java.util.function.Predicate;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
@@ -54,13 +53,10 @@ public class MessagesProcessing {
}
}
- int sentConsumingInfo(FluxSink sink,
- ConsumerRecords polledRecords,
- long elapsed) {
+ void sentConsumingInfo(FluxSink sink, PolledRecords polledRecords) {
if (!sink.isCancelled()) {
- return consumingStats.sendConsumingEvt(sink, polledRecords, elapsed, filterApplyErrors);
+ consumingStats.sendConsumingEvt(sink, polledRecords, filterApplyErrors);
}
- return 0;
}
void sendFinishEvent(FluxSink sink) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PolledRecords.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PolledRecords.java
new file mode 100644
index 0000000000..bc6bd95d5f
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PolledRecords.java
@@ -0,0 +1,48 @@
+package com.provectus.kafka.ui.emitter;
+
+import java.time.Duration;
+import java.util.Iterator;
+import java.util.List;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.header.Header;
+import org.apache.kafka.common.utils.Bytes;
+
+public record PolledRecords(int count,
+ int bytes,
+ Duration elapsed,
+ ConsumerRecords records) implements Iterable> {
+
+ static PolledRecords create(ConsumerRecords polled, Duration pollDuration) {
+ return new PolledRecords(
+ polled.count(),
+ calculatePolledRecSize(polled),
+ pollDuration,
+ polled
+ );
+ }
+
+ public List> records(TopicPartition tp) {
+ return records.records(tp);
+ }
+
+ @Override
+ public Iterator> iterator() {
+ return records.iterator();
+ }
+
+ private static int calculatePolledRecSize(Iterable> recs) {
+ int polledBytes = 0;
+ for (ConsumerRecord rec : recs) {
+ for (Header header : rec.headers()) {
+ polledBytes +=
+ (header.key() != null ? header.key().getBytes().length : 0)
+ + (header.value() != null ? header.value().length : 0);
+ }
+ polledBytes += rec.key() == null ? 0 : rec.serializedKeySize();
+ polledBytes += rec.value() == null ? 0 : rec.serializedValueSize();
+ }
+ return polledBytes;
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java
index 15dfcd91c9..4cde50cdef 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java
@@ -3,11 +3,8 @@ package com.provectus.kafka.ui.emitter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.RateLimiter;
import com.provectus.kafka.ui.config.ClustersProperties;
-import com.provectus.kafka.ui.util.ConsumerRecordsUtil;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.utils.Bytes;
@Slf4j
public class PollingThrottler {
@@ -36,18 +33,17 @@ public class PollingThrottler {
return new PollingThrottler("noop", RateLimiter.create(Long.MAX_VALUE));
}
- public void throttleAfterPoll(int polledBytes) {
+ //returns true if polling was throttled
+ public boolean throttleAfterPoll(int polledBytes) {
if (polledBytes > 0) {
double sleptSeconds = rateLimiter.acquire(polledBytes);
if (!throttled && sleptSeconds > 0.0) {
throttled = true;
log.debug("Polling throttling enabled for cluster {} at rate {} bytes/sec", clusterName, rateLimiter.getRate());
+ return true;
}
}
- }
-
- public void throttleAfterPoll(ConsumerRecords polled) {
- throttleAfterPoll(ConsumerRecordsUtil.calculatePolledSize(polled));
+ return false;
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java
index b17d69a09b..c3e6dcc8ff 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java
@@ -5,19 +5,17 @@ import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import java.util.HashMap;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.InterruptException;
-import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
@Slf4j
public class TailingEmitter extends AbstractEmitter
implements java.util.function.Consumer> {
- private final Supplier> consumerSupplier;
+ private final Supplier consumerSupplier;
private final ConsumerPosition consumerPosition;
- public TailingEmitter(Supplier> consumerSupplier,
+ public TailingEmitter(Supplier consumerSupplier,
ConsumerPosition consumerPosition,
MessagesProcessing messagesProcessing,
PollingSettings pollingSettings) {
@@ -29,7 +27,7 @@ public class TailingEmitter extends AbstractEmitter
@Override
public void accept(FluxSink sink) {
log.debug("Starting tailing polling for {}", consumerPosition);
- try (KafkaConsumer consumer = consumerSupplier.get()) {
+ try (EnhancedConsumer consumer = consumerSupplier.get()) {
assignAndSeek(consumer);
while (!sink.isCancelled()) {
sendPhase(sink, "Polling");
@@ -47,7 +45,7 @@ public class TailingEmitter extends AbstractEmitter
}
}
- private void assignAndSeek(KafkaConsumer consumer) {
+ private void assignAndSeek(EnhancedConsumer consumer) {
var seekOperations = SeekOperations.create(consumer, consumerPosition);
var seekOffsets = new HashMap<>(seekOperations.getEndOffsets()); // defaulting offsets to topic end
seekOffsets.putAll(seekOperations.getOffsetsForSeek()); // this will only set non-empty partitions
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
index 17291ffe60..9764664d6a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
@@ -2,12 +2,14 @@ package com.provectus.kafka.ui.service;
import com.google.common.collect.Streams;
import com.google.common.collect.Table;
+import com.provectus.kafka.ui.emitter.EnhancedConsumer;
import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
import com.provectus.kafka.ui.model.InternalConsumerGroup;
import com.provectus.kafka.ui.model.InternalTopicConsumerGroup;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.SortOrderDTO;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.util.ApplicationMetrics;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.util.ArrayList;
import java.util.Collection;
@@ -26,11 +28,8 @@ import org.apache.kafka.clients.admin.ConsumerGroupDescription;
import org.apache.kafka.clients.admin.ConsumerGroupListing;
import org.apache.kafka.clients.admin.OffsetSpec;
import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.ConsumerGroupState;
import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.common.serialization.BytesDeserializer;
-import org.apache.kafka.common.utils.Bytes;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;
@@ -248,25 +247,27 @@ public class ConsumerGroupService {
.flatMap(adminClient -> adminClient.deleteConsumerGroups(List.of(groupId)));
}
- public KafkaConsumer createConsumer(KafkaCluster cluster) {
+ public EnhancedConsumer createConsumer(KafkaCluster cluster) {
return createConsumer(cluster, Map.of());
}
- public KafkaConsumer createConsumer(KafkaCluster cluster,
- Map properties) {
+ public EnhancedConsumer createConsumer(KafkaCluster cluster,
+ Map properties) {
Properties props = new Properties();
SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), props);
props.putAll(cluster.getProperties());
props.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-ui-consumer-" + System.currentTimeMillis());
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
- props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
- props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
props.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, "false");
props.putAll(properties);
- return new KafkaConsumer<>(props);
+ return new EnhancedConsumer(
+ props,
+ cluster.getPollingSettings().getPollingThrottler(),
+ ApplicationMetrics.forCluster(cluster)
+ );
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java
index 7ea7a16598..e17d2cf84a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java
@@ -1,9 +1,9 @@
package com.provectus.kafka.ui.service.analyze;
import com.provectus.kafka.ui.emitter.EmptyPollsCounter;
+import com.provectus.kafka.ui.emitter.EnhancedConsumer;
import com.provectus.kafka.ui.emitter.OffsetsInfo;
import com.provectus.kafka.ui.emitter.PollingSettings;
-import com.provectus.kafka.ui.emitter.PollingThrottler;
import com.provectus.kafka.ui.exception.TopicAnalysisException;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.TopicAnalysisDTO;
@@ -20,11 +20,9 @@ import java.util.stream.IntStream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.InterruptException;
import org.apache.kafka.common.errors.WakeupException;
-import org.apache.kafka.common.utils.Bytes;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
@@ -84,12 +82,11 @@ public class TopicAnalysisService {
private final int partitionsCnt;
private final long approxNumberOfMsgs;
private final EmptyPollsCounter emptyPollsCounter;
- private final PollingThrottler throttler;
private final TopicAnalysisStats totalStats = new TopicAnalysisStats();
private final Map partitionStats = new HashMap<>();
- private final KafkaConsumer consumer;
+ private final EnhancedConsumer consumer;
AnalysisTask(KafkaCluster cluster, TopicIdentity topicId, int partitionsCnt,
long approxNumberOfMsgs, PollingSettings pollingSettings) {
@@ -104,7 +101,6 @@ public class TopicAnalysisService {
ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "100000"
)
);
- this.throttler = pollingSettings.getPollingThrottler();
this.emptyPollsCounter = pollingSettings.createEmptyPollsCounter();
}
@@ -127,9 +123,8 @@ public class TopicAnalysisService {
var offsetsInfo = new OffsetsInfo(consumer, topicId.topicName);
while (!offsetsInfo.assignedPartitionsFullyPolled() && !emptyPollsCounter.noDataEmptyPollsReached()) {
- var polled = consumer.poll(Duration.ofSeconds(3));
- throttler.throttleAfterPoll(polled);
- emptyPollsCounter.count(polled);
+ var polled = consumer.pollEnhanced(Duration.ofSeconds(3));
+ emptyPollsCounter.count(polled.count());
polled.forEach(r -> {
totalStats.apply(r);
partitionStats.get(r.partition()).apply(r);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ApplicationMetrics.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ApplicationMetrics.java
new file mode 100644
index 0000000000..841d5e87a5
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ApplicationMetrics.java
@@ -0,0 +1,82 @@
+package com.provectus.kafka.ui.util;
+
+import static lombok.AccessLevel.PRIVATE;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.provectus.kafka.ui.emitter.PolledRecords;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import io.micrometer.core.instrument.Counter;
+import io.micrometer.core.instrument.DistributionSummary;
+import io.micrometer.core.instrument.Gauge;
+import io.micrometer.core.instrument.MeterRegistry;
+import io.micrometer.core.instrument.Metrics;
+import io.micrometer.core.instrument.Timer;
+import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
+import java.util.concurrent.atomic.AtomicInteger;
+import lombok.RequiredArgsConstructor;
+
+@RequiredArgsConstructor(access = PRIVATE)
+public class ApplicationMetrics {
+
+ private final String clusterName;
+ private final MeterRegistry registry;
+
+ public static ApplicationMetrics forCluster(KafkaCluster cluster) {
+ return new ApplicationMetrics(cluster.getName(), Metrics.globalRegistry);
+ }
+
+ @VisibleForTesting
+ public static ApplicationMetrics noop() {
+ return new ApplicationMetrics("noop", new SimpleMeterRegistry());
+ }
+
+ public void meterPolledRecords(String topic, PolledRecords polled, boolean throttled) {
+ pollTimer(topic).record(polled.elapsed());
+ polledRecords(topic).increment(polled.count());
+ polledBytes(topic).record(polled.bytes());
+ if (throttled) {
+ pollThrottlingActivations().increment();
+ }
+ }
+
+ private Counter polledRecords(String topic) {
+ return Counter.builder("topic_records_polled")
+ .description("Number of records polled from topic")
+ .tag("cluster", clusterName)
+ .tag("topic", topic)
+ .register(registry);
+ }
+
+ private DistributionSummary polledBytes(String topic) {
+ return DistributionSummary.builder("topic_polled_bytes")
+ .description("Bytes polled from kafka topic")
+ .tag("cluster", clusterName)
+ .tag("topic", topic)
+ .register(registry);
+ }
+
+ private Timer pollTimer(String topic) {
+ return Timer.builder("topic_poll_time")
+ .description("Time spend in polling for topic")
+ .tag("cluster", clusterName)
+ .tag("topic", topic)
+ .register(registry);
+ }
+
+ private Counter pollThrottlingActivations() {
+ return Counter.builder("poll_throttling_activations")
+ .description("Number of poll throttling activations")
+ .tag("cluster", clusterName)
+ .register(registry);
+ }
+
+ public AtomicInteger activeConsumers() {
+ var count = new AtomicInteger();
+ Gauge.builder("active_consumers", () -> count)
+ .description("Number of active consumers")
+ .tag("cluster", clusterName)
+ .register(registry);
+ return count;
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ConsumerRecordsUtil.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ConsumerRecordsUtil.java
deleted file mode 100644
index e4efcb0f75..0000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ConsumerRecordsUtil.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.provectus.kafka.ui.util;
-
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.header.Header;
-import org.apache.kafka.common.utils.Bytes;
-
-public class ConsumerRecordsUtil {
-
- public static int calculatePolledRecSize(ConsumerRecord rec) {
- int polledBytes = 0;
- for (Header header : rec.headers()) {
- polledBytes +=
- (header.key() != null ? header.key().getBytes().length : 0)
- + (header.value() != null ? header.value().length : 0);
- }
- polledBytes += rec.key() == null ? 0 : rec.serializedKeySize();
- polledBytes += rec.value() == null ? 0 : rec.serializedValueSize();
- return polledBytes;
- }
-
- public static int calculatePolledSize(Iterable> recs) {
- int polledBytes = 0;
- for (ConsumerRecord rec : recs) {
- polledBytes += calculatePolledRecSize(rec);
- }
- return polledBytes;
- }
-
-}
diff --git a/kafka-ui-api/src/main/resources/application.yml b/kafka-ui-api/src/main/resources/application.yml
index 71af4e8d92..e879920613 100644
--- a/kafka-ui-api/src/main/resources/application.yml
+++ b/kafka-ui-api/src/main/resources/application.yml
@@ -10,7 +10,7 @@ management:
endpoints:
web:
exposure:
- include: "info,health"
+ include: "info,health,prometheus"
logging:
level:
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java
index 239f3cf994..d093d3144c 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java
@@ -8,9 +8,11 @@ import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
+import com.provectus.kafka.ui.emitter.EnhancedConsumer;
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
import com.provectus.kafka.ui.emitter.MessagesProcessing;
import com.provectus.kafka.ui.emitter.PollingSettings;
+import com.provectus.kafka.ui.emitter.PollingThrottler;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
@@ -18,6 +20,7 @@ import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
import com.provectus.kafka.ui.serdes.builtin.StringSerde;
+import com.provectus.kafka.ui.util.ApplicationMetrics;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
@@ -38,7 +41,6 @@ import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.serialization.BytesDeserializer;
-import org.apache.kafka.common.utils.Bytes;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -325,22 +327,20 @@ class RecordEmitterTest extends AbstractIntegrationTest {
assertionsConsumer.accept(step.expectComplete().verifyThenAssertThat());
}
- private KafkaConsumer createConsumer() {
+ private EnhancedConsumer createConsumer() {
return createConsumer(Map.of());
}
- private KafkaConsumer createConsumer(Map properties) {
+ private EnhancedConsumer createConsumer(Map properties) {
final Map map = Map.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers(),
ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString(),
- ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 19, // to check multiple polls
- ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class,
- ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class
+ ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 19 // to check multiple polls
);
Properties props = new Properties();
props.putAll(map);
props.putAll(properties);
- return new KafkaConsumer<>(props);
+ return new EnhancedConsumer(props, PollingThrottler.noop(), ApplicationMetrics.noop());
}
@Value
From 15f4543402694e877cd1de4e85ff850ffb505d7f Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Tue, 1 Aug 2023 21:27:31 +0700
Subject: [PATCH 09/31] BE: RBAC: Fix unknown resource exception (#4033)
---
.../java/com/provectus/kafka/ui/model/rbac/Permission.java | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java
index dd456400da..56b0a09802 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java
@@ -16,6 +16,7 @@ import com.provectus.kafka.ui.model.rbac.permission.KsqlAction;
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
@@ -73,6 +74,10 @@ public class Permission {
}
private List getAllActionValues() {
+ if (resource == null) {
+ return Collections.emptyList();
+ }
+
return switch (this.resource) {
case APPLICATIONCONFIG -> Arrays.stream(ApplicationConfigAction.values()).map(Enum::toString).toList();
case CLUSTERCONFIG -> Arrays.stream(ClusterConfigAction.values()).map(Enum::toString).toList();
From 3cde6c21ecbc89ae902f89960afe2894014d1b89 Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Tue, 1 Aug 2023 21:27:57 +0700
Subject: [PATCH 10/31] FE: Logout button link is bound to a wrong div (#4045)
---
.../src/components/NavBar/UserInfo/UserInfo.tsx | 4 ++--
.../components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx | 2 --
2 files changed, 2 insertions(+), 4 deletions(-)
diff --git a/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx b/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx
index 2b432b10e3..dae43364c4 100644
--- a/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx
+++ b/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx
@@ -19,8 +19,8 @@ const UserInfo = () => {
}
>
-
- Log out
+
+ Log out
) : null;
diff --git a/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx b/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx
index b51f00da02..2231b09a25 100644
--- a/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx
+++ b/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx
@@ -34,7 +34,6 @@ describe('UserInfo', () => {
const logout = screen.getByText('Log out');
expect(logout).toBeInTheDocument();
- expect(logout).toHaveAttribute('href', '/logout');
});
it('should render correct url during basePath initialization', async () => {
@@ -50,7 +49,6 @@ describe('UserInfo', () => {
const logout = screen.getByText('Log out');
expect(logout).toBeInTheDocument();
- expect(logout).toHaveAttribute('href', `${baseUrl}/logout`);
});
it('should not render anything if the username does not exists', () => {
From 77f1ec949028d3ae7de2f79835096fedc2b22334 Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Tue, 1 Aug 2023 22:06:17 +0700
Subject: [PATCH 11/31] BE: RBAC: Skip rbac checks in case of app config
(#4078)
---
.../ui/service/rbac/AccessControlService.java | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
index 6cc455624b..59ea02fea8 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
@@ -51,6 +51,8 @@ import reactor.core.publisher.Mono;
@Slf4j
public class AccessControlService {
+ private static final String ACCESS_DENIED = "Access denied";
+
@Nullable
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
private final RoleBasedAccessControlProperties properties;
@@ -97,6 +99,17 @@ public class AccessControlService {
return Mono.empty();
}
+ if (CollectionUtils.isNotEmpty(context.getApplicationConfigActions())) {
+ return getUser()
+ .doOnNext(user -> {
+ boolean accessGranted = isApplicationConfigAccessible(context, user);
+
+ if (!accessGranted) {
+ throw new AccessDeniedException(ACCESS_DENIED);
+ }
+ }).then();
+ }
+
return getUser()
.doOnNext(user -> {
boolean accessGranted =
@@ -113,7 +126,7 @@ public class AccessControlService {
&& isAuditAccessible(context, user);
if (!accessGranted) {
- throw new AccessDeniedException("Access denied");
+ throw new AccessDeniedException(ACCESS_DENIED);
}
})
.then();
From 8126607b914d7aa53e37e1585b03df7bebdc6a4b Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 1 Aug 2023 19:25:09 +0400
Subject: [PATCH 12/31] BE: Audit: Minor refactoring (#4064)
---
.../kafka/ui/service/audit/AuditService.java | 56 ++++++++++++-------
.../kafka/ui/service/audit/AuditWriter.java | 4 +-
.../ui/service/audit/AuditServiceTest.java | 8 +--
3 files changed, 43 insertions(+), 25 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditService.java
index 8d57307891..ad5f11e1f3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditService.java
@@ -13,6 +13,7 @@ import com.provectus.kafka.ui.service.ClustersStorage;
import com.provectus.kafka.ui.service.ReactiveAdminClient;
import java.io.Closeable;
import java.io.IOException;
+import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@@ -37,6 +38,7 @@ import reactor.core.publisher.Signal;
public class AuditService implements Closeable {
private static final Mono NO_AUTH_USER = Mono.just(new AuthenticatedUser("Unknown", Set.of()));
+ private static final Duration BLOCK_TIMEOUT = Duration.ofSeconds(5);
private static final String DEFAULT_AUDIT_TOPIC_NAME = "__kui-audit-log";
private static final int DEFAULT_AUDIT_TOPIC_PARTITIONS = 1;
@@ -56,14 +58,8 @@ public class AuditService implements Closeable {
public AuditService(AdminClientService adminClientService, ClustersStorage clustersStorage) {
Map auditWriters = new HashMap<>();
for (var cluster : clustersStorage.getKafkaClusters()) {
- ReactiveAdminClient adminClient;
- try {
- adminClient = adminClientService.get(cluster).block();
- } catch (Exception e) {
- printAuditInitError(cluster, "Error connect to cluster", e);
- continue;
- }
- createAuditWriter(cluster, adminClient, () -> createProducer(cluster, AUDIT_PRODUCER_CONFIG))
+ Supplier adminClientSupplier = () -> adminClientService.get(cluster).block(BLOCK_TIMEOUT);
+ createAuditWriter(cluster, adminClientSupplier, () -> createProducer(cluster, AUDIT_PRODUCER_CONFIG))
.ifPresent(writer -> auditWriters.put(cluster.getName(), writer));
}
this.auditWriters = auditWriters;
@@ -76,7 +72,7 @@ public class AuditService implements Closeable {
@VisibleForTesting
static Optional createAuditWriter(KafkaCluster cluster,
- ReactiveAdminClient ac,
+ Supplier acSupplier,
Supplier> producerFactory) {
var auditProps = cluster.getOriginalProperties().getAudit();
if (auditProps == null) {
@@ -87,32 +83,54 @@ public class AuditService implements Closeable {
if (!topicAudit && !consoleAudit) {
return Optional.empty();
}
- String auditTopicName = Optional.ofNullable(auditProps.getTopic()).orElse(DEFAULT_AUDIT_TOPIC_NAME);
- @Nullable KafkaProducer producer = null;
- if (topicAudit && createTopicIfNeeded(cluster, ac, auditTopicName, auditProps)) {
- producer = producerFactory.get();
+ if (!topicAudit) {
+ log.info("Audit initialization finished for cluster '{}' (console only)", cluster.getName());
+ return Optional.of(consoleOnlyWriter(cluster));
}
- log.info("Audit service initialized for cluster '{}'", cluster.getName());
+ String auditTopicName = Optional.ofNullable(auditProps.getTopic()).orElse(DEFAULT_AUDIT_TOPIC_NAME);
+ boolean topicAuditCanBeDone = createTopicIfNeeded(cluster, acSupplier, auditTopicName, auditProps);
+ if (!topicAuditCanBeDone) {
+ if (consoleAudit) {
+ log.info(
+ "Audit initialization finished for cluster '{}' (console only, topic audit init failed)",
+ cluster.getName()
+ );
+ return Optional.of(consoleOnlyWriter(cluster));
+ }
+ return Optional.empty();
+ }
+ log.info("Audit initialization finished for cluster '{}'", cluster.getName());
return Optional.of(
new AuditWriter(
cluster.getName(),
auditTopicName,
- producer,
+ producerFactory.get(),
consoleAudit ? AUDIT_LOGGER : null
)
);
}
+ private static AuditWriter consoleOnlyWriter(KafkaCluster cluster) {
+ return new AuditWriter(cluster.getName(), null, null, AUDIT_LOGGER);
+ }
+
/**
* return true if topic created/existing and producing can be enabled.
*/
private static boolean createTopicIfNeeded(KafkaCluster cluster,
- ReactiveAdminClient ac,
+ Supplier acSupplier,
String auditTopicName,
ClustersProperties.AuditProperties auditProps) {
+ ReactiveAdminClient ac;
+ try {
+ ac = acSupplier.get();
+ } catch (Exception e) {
+ printAuditInitError(cluster, "Error while connecting to the cluster", e);
+ return false;
+ }
boolean topicExists;
try {
- topicExists = ac.listTopics(true).block().contains(auditTopicName);
+ topicExists = ac.listTopics(true).block(BLOCK_TIMEOUT).contains(auditTopicName);
} catch (Exception e) {
printAuditInitError(cluster, "Error checking audit topic existence", e);
return false;
@@ -130,7 +148,7 @@ public class AuditService implements Closeable {
.ifPresent(topicConfig::putAll);
log.info("Creating audit topic '{}' for cluster '{}'", auditTopicName, cluster.getName());
- ac.createTopic(auditTopicName, topicPartitions, null, topicConfig).block();
+ ac.createTopic(auditTopicName, topicPartitions, null, topicConfig).block(BLOCK_TIMEOUT);
log.info("Audit topic created for cluster '{}'", cluster.getName());
return true;
} catch (Exception e) {
@@ -142,7 +160,7 @@ public class AuditService implements Closeable {
private static void printAuditInitError(KafkaCluster cluster, String errorMsg, Exception cause) {
log.error("-----------------------------------------------------------------");
log.error(
- "Error initializing Audit Service for cluster '{}'. Audit will be disabled. See error below: ",
+ "Error initializing Audit for cluster '{}'. Audit will be disabled. See error below: ",
cluster.getName()
);
log.error("{}", errorMsg, cause);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditWriter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditWriter.java
index c66bd8a1f5..8fa8239278 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditWriter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/audit/AuditWriter.java
@@ -18,7 +18,7 @@ import org.slf4j.Logger;
@Slf4j
record AuditWriter(String clusterName,
- String targetTopic,
+ @Nullable String targetTopic,
@Nullable KafkaProducer producer,
@Nullable Logger consoleLogger) implements Closeable {
@@ -43,7 +43,7 @@ record AuditWriter(String clusterName,
if (consoleLogger != null) {
consoleLogger.info(json);
}
- if (producer != null) {
+ if (targetTopic != null && producer != null) {
producer.send(
new ProducerRecord<>(targetTopic, null, json.getBytes(UTF_8)),
(metadata, ex) -> {
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/audit/AuditServiceTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/audit/AuditServiceTest.java
index f123329ccf..fd90dbacd6 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/audit/AuditServiceTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/audit/AuditServiceTest.java
@@ -81,7 +81,7 @@ class AuditServiceTest {
@Test
void noWriterIfNoAuditPropsSet() {
- var maybeWriter = createAuditWriter(cluster, adminClientMock, producerSupplierMock);
+ var maybeWriter = createAuditWriter(cluster, () -> adminClientMock, producerSupplierMock);
assertThat(maybeWriter).isEmpty();
}
@@ -91,7 +91,7 @@ class AuditServiceTest {
auditProps.setConsoleAuditEnabled(true);
clustersProperties.setAudit(auditProps);
- var maybeWriter = createAuditWriter(cluster, adminClientMock, producerSupplierMock);
+ var maybeWriter = createAuditWriter(cluster, () -> adminClientMock, producerSupplierMock);
assertThat(maybeWriter).isPresent();
var writer = maybeWriter.get();
@@ -116,7 +116,7 @@ class AuditServiceTest {
when(adminClientMock.listTopics(true))
.thenReturn(Mono.just(Set.of("test_audit_topic")));
- var maybeWriter = createAuditWriter(cluster, adminClientMock, producerSupplierMock);
+ var maybeWriter = createAuditWriter(cluster, () -> adminClientMock, producerSupplierMock);
assertThat(maybeWriter).isPresent();
//checking there was no topic creation request
@@ -136,7 +136,7 @@ class AuditServiceTest {
when(adminClientMock.createTopic(eq("test_audit_topic"), eq(3), eq(null), anyMap()))
.thenReturn(Mono.empty());
- var maybeWriter = createAuditWriter(cluster, adminClientMock, producerSupplierMock);
+ var maybeWriter = createAuditWriter(cluster, () -> adminClientMock, producerSupplierMock);
assertThat(maybeWriter).isPresent();
//verifying topic created
From 92157bdd3966a9462ce694da7da107a6034a86aa Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Wed, 2 Aug 2023 13:58:58 +0400
Subject: [PATCH 13/31] BE: Hex serde initialization fix (#4081)
Co-authored-by: iliax
---
.../com/provectus/kafka/ui/serdes/builtin/HexSerde.java | 9 +++++++++
.../provectus/kafka/ui/serdes/builtin/HexSerdeTest.java | 8 ++------
2 files changed, 11 insertions(+), 6 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java
index cf1a6b793f..343bb4e705 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java
@@ -16,12 +16,21 @@ public class HexSerde implements BuiltInSerde {
return "Hex";
}
+ @Override
+ public void autoConfigure(PropertyResolver kafkaClusterProperties, PropertyResolver globalProperties) {
+ configure(" ", true);
+ }
+
@Override
public void configure(PropertyResolver serdeProperties,
PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
String delim = serdeProperties.getProperty("delimiter", String.class).orElse(" ");
boolean uppercase = serdeProperties.getProperty("uppercase", Boolean.class).orElse(true);
+ configure(delim, uppercase);
+ }
+
+ private void configure(String delim, boolean uppercase) {
deserializeHexFormat = HexFormat.ofDelimiter(delim);
if (uppercase) {
deserializeHexFormat = deserializeHexFormat.withUpperCase();
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java
index a318279f56..4ec28c1509 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java
@@ -16,16 +16,12 @@ public class HexSerdeTest {
private static final byte[] TEST_BYTES = "hello world".getBytes();
private static final String TEST_BYTES_HEX_ENCODED = "68 65 6C 6C 6F 20 77 6F 72 6C 64";
- private Serde hexSerde;
+ private HexSerde hexSerde;
@BeforeEach
void init() {
hexSerde = new HexSerde();
- hexSerde.configure(
- PropertyResolverImpl.empty(),
- PropertyResolverImpl.empty(),
- PropertyResolverImpl.empty()
- );
+ hexSerde.autoConfigure(PropertyResolverImpl.empty(), PropertyResolverImpl.empty());
}
From 4515ecaf41c3da1e7c15fcdd2e6420e6788a772b Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Wed, 2 Aug 2023 15:30:37 +0400
Subject: [PATCH 14/31] Update README
Add stats banner
---
README.md | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/README.md b/README.md
index b276e2756f..e3fa2ab9d2 100644
--- a/README.md
+++ b/README.md
@@ -18,6 +18,10 @@
ProductHunt
+
+
+
+
#### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
From 6a40146fb1fdd6b4c9edb22d7e8c4e94944a506d Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Wed, 2 Aug 2023 20:06:31 +0700
Subject: [PATCH 15/31] BE: Throw obvious exception in case no oauth2 providers
have been defined (#4002)
---
.../provectus/kafka/ui/config/auth/OAuthSecurityConfig.java | 3 +++
.../kafka/ui/config/auth/condition/CognitoCondition.java | 5 +++--
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
index 0e7a228e48..797b41c6df 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
@@ -99,6 +99,9 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
final OAuth2ClientProperties props = OAuthPropertiesConverter.convertProperties(properties);
final List registrations =
new ArrayList<>(new OAuth2ClientPropertiesMapper(props).asClientRegistrations().values());
+ if (registrations.isEmpty()) {
+ throw new IllegalArgumentException("OAuth2 authentication is enabled but no providers specified.");
+ }
return new InMemoryReactiveClientRegistrationRepository(registrations);
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/CognitoCondition.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/CognitoCondition.java
index dc56195ecc..c369985872 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/CognitoCondition.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/CognitoCondition.java
@@ -1,13 +1,14 @@
package com.provectus.kafka.ui.config.auth.condition;
import com.provectus.kafka.ui.service.rbac.AbstractProviderCondition;
+import org.jetbrains.annotations.NotNull;
import org.springframework.context.annotation.Condition;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.type.AnnotatedTypeMetadata;
public class CognitoCondition extends AbstractProviderCondition implements Condition {
@Override
- public boolean matches(final ConditionContext context, final AnnotatedTypeMetadata metadata) {
+ public boolean matches(final ConditionContext context, final @NotNull AnnotatedTypeMetadata metadata) {
return getRegisteredProvidersTypes(context.getEnvironment()).stream().anyMatch(a -> a.equalsIgnoreCase("cognito"));
}
-}
\ No newline at end of file
+}
From 69ebd3d52b2065ef970063d92d36aa2684ecf289 Mon Sep 17 00:00:00 2001
From: Sungyun Hur
Date: Thu, 3 Aug 2023 20:05:09 +0900
Subject: [PATCH 16/31] FE: display consumerGroupID in Reset Offsets page
(#3866)
---
.../components/ConsumerGroups/Details/ResetOffsets/Form.tsx | 2 +-
.../ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx | 3 ++-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx
index 8ee7995bf2..0e4e05ba14 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx
@@ -186,7 +186,7 @@ const Form: React.FC = ({ defaultValues, partitions, topics }) => {
type="submit"
disabled={partitionsValue.length === 0}
>
- Submit
+ Reset Offsets
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx
index 548ed47f54..0b58647946 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx
@@ -15,6 +15,7 @@ import Form from './Form';
const ResetOffsets: React.FC = () => {
const routerParams = useAppParams();
+ const { consumerGroupID } = routerParams;
const consumerGroup = useConsumerGroupDetails(routerParams);
if (consumerGroup.isLoading || !consumerGroup.isSuccess)
@@ -37,7 +38,7 @@ const ResetOffsets: React.FC = () => {
return (
<>
From 333eae24759aaa7b3fc14e5e7dea232200c13dcd Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Fri, 4 Aug 2023 18:43:40 +0700
Subject: [PATCH 17/31] Make the container timezone configurable (#4084)
Co-authored-by: Ilya Kuramshin
---
kafka-ui-api/Dockerfile | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/kafka-ui-api/Dockerfile b/kafka-ui-api/Dockerfile
index d969ec7631..98dcdb46ac 100644
--- a/kafka-ui-api/Dockerfile
+++ b/kafka-ui-api/Dockerfile
@@ -1,7 +1,11 @@
#FROM azul/zulu-openjdk-alpine:17-jre-headless
FROM azul/zulu-openjdk-alpine@sha256:a36679ac0d28cb835e2a8c00e1e0d95509c6c51c5081c7782b85edb1f37a771a
-RUN apk add --no-cache gcompat # need to make snappy codec work
+RUN apk add --no-cache \
+ # snappy codec
+ gcompat \
+ # configuring timezones
+ tzdata
RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
# creating folder for dynamic config usage (certificates uploads, etc)
From ac09efcd3486fda6cee41986a01820710863eb81 Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Mon, 7 Aug 2023 21:04:10 +0700
Subject: [PATCH 18/31] FE: Update topic deletion disabled message (#4092)
---
kafka-ui-react-app/src/components/Topics/List/ActionsCell.tsx | 4 ++--
kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/kafka-ui-react-app/src/components/Topics/List/ActionsCell.tsx b/kafka-ui-react-app/src/components/Topics/List/ActionsCell.tsx
index cdd669567f..05c61044f0 100644
--- a/kafka-ui-react-app/src/components/Topics/List/ActionsCell.tsx
+++ b/kafka-ui-react-app/src/components/Topics/List/ActionsCell.tsx
@@ -86,9 +86,9 @@ const ActionsCell: React.FC> = ({ row }) => {
Remove Topic
{!isTopicDeletionAllowed && (
- The topic deletion is restricted at the application
+ The topic deletion is restricted at the broker
- configuration level
+ configuration level (delete.topic.enable = false)
)}
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
index ca4801c6da..5a639f0c48 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
@@ -162,9 +162,9 @@ const Topic: React.FC = () => {
Remove Topic
{!isTopicDeletionAllowed && (
- The topic deletion is restricted at the application
+ The topic deletion is restricted at the broker
- configuration level
+ configuration level (delete.topic.enable = false)
)}
From ba18f3b0421afa10072c7ba603e4cdb105663827 Mon Sep 17 00:00:00 2001
From: Narekmat <47845266+Narekmat@users.noreply.github.com>
Date: Mon, 7 Aug 2023 18:18:24 +0400
Subject: [PATCH 19/31] Infra: Fix frontend sonar jdk warning (#4098)
---
.github/workflows/frontend.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml
index 02ccd4e135..b4abbd5ae9 100644
--- a/.github/workflows/frontend.yaml
+++ b/.github/workflows/frontend.yaml
@@ -49,7 +49,7 @@ jobs:
cd kafka-ui-react-app/
pnpm test:CI
- name: SonarCloud Scan
- uses: workshur/sonarcloud-github-action@improved_basedir
+ uses: sonarsource/sonarcloud-github-action@master
with:
projectBaseDir: ./kafka-ui-react-app
args: -Dsonar.pullrequest.key=${{ github.event.pull_request.number }} -Dsonar.pullrequest.branch=${{ github.head_ref }} -Dsonar.pullrequest.base=${{ github.base_ref }}
From 150fc21fb84770520e669f8681b5c9a2bf3ba344 Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Mon, 7 Aug 2023 22:01:39 +0700
Subject: [PATCH 20/31] RBAC: Implement roles by github teams (#4093)
Co-authored-by: Ilya Kuramshin
---
.../extractor/GithubAuthorityExtractor.java | 116 ++++++++++++++----
1 file changed, 92 insertions(+), 24 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
index 654654a05d..90c4ceebc6 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
@@ -5,6 +5,8 @@ import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GITHUB;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -26,6 +28,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
private static final String ORGANIZATION_ATTRIBUTE_NAME = "organizations_url";
private static final String USERNAME_ATTRIBUTE_NAME = "login";
private static final String ORGANIZATION_NAME = "login";
+ private static final String ORGANIZATION = "organization";
+ private static final String TEAM_NAME = "slug";
private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
private static final String DUMMY = "dummy";
// The number of results (max 100) per page of list organizations for authenticated user.
@@ -46,7 +50,7 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
throw new RuntimeException();
}
- Set groupsByUsername = new HashSet<>();
+ Set rolesByUsername = new HashSet<>();
String username = principal.getAttribute(USERNAME_ATTRIBUTE_NAME);
if (username == null) {
log.debug("Github username param is not present");
@@ -59,13 +63,7 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
.filter(s -> s.getType().equals("user"))
.anyMatch(s -> s.getValue().equals(username)))
.map(Role::getName)
- .forEach(groupsByUsername::add);
- }
-
- String organization = principal.getAttribute(ORGANIZATION_ATTRIBUTE_NAME);
- if (organization == null) {
- log.debug("Github organization param is not present");
- return Mono.just(groupsByUsername);
+ .forEach(rolesByUsername::add);
}
OAuth2UserRequest req = (OAuth2UserRequest) additionalParams.get("request");
@@ -80,8 +78,24 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
.getUserInfoEndpoint()
.getUri();
}
+ var webClient = WebClient.create(infoEndpoint);
- WebClient webClient = WebClient.create(infoEndpoint);
+ Mono> rolesByOrganization = getOrganizationRoles(principal, additionalParams, acs, webClient);
+ Mono> rolesByTeams = getTeamRoles(webClient, additionalParams, acs);
+
+ return Mono.zip(rolesByOrganization, rolesByTeams)
+ .map((t) -> Stream.of(t.getT1(), t.getT2(), rolesByUsername)
+ .flatMap(Collection::stream)
+ .collect(Collectors.toSet()));
+ }
+
+ private Mono> getOrganizationRoles(DefaultOAuth2User principal, Map additionalParams,
+ AccessControlService acs, WebClient webClient) {
+ String organization = principal.getAttribute(ORGANIZATION_ATTRIBUTE_NAME);
+ if (organization == null) {
+ log.debug("Github organization param is not present");
+ return Mono.just(Collections.emptySet());
+ }
final Mono>> userOrganizations = webClient
.get()
@@ -99,22 +113,76 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
//@formatter:on
return userOrganizations
- .map(orgsMap -> {
- var groupsByOrg = acs.getRoles()
- .stream()
- .filter(role -> role.getSubjects()
- .stream()
- .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB))
- .filter(s -> s.getType().equals("organization"))
- .anyMatch(subject -> orgsMap.stream()
- .map(org -> org.get(ORGANIZATION_NAME).toString())
- .distinct()
- .anyMatch(orgName -> orgName.equalsIgnoreCase(subject.getValue()))
- ))
- .map(Role::getName);
+ .map(orgsMap -> acs.getRoles()
+ .stream()
+ .filter(role -> role.getSubjects()
+ .stream()
+ .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB))
+ .filter(s -> s.getType().equals(ORGANIZATION))
+ .anyMatch(subject -> orgsMap.stream()
+ .map(org -> org.get(ORGANIZATION_NAME).toString())
+ .anyMatch(orgName -> orgName.equalsIgnoreCase(subject.getValue()))
+ ))
+ .map(Role::getName)
+ .collect(Collectors.toSet()));
+ }
- return Stream.concat(groupsByOrg, groupsByUsername.stream()).collect(Collectors.toSet());
- });
+ @SuppressWarnings("unchecked")
+ private Mono> getTeamRoles(WebClient webClient, Map additionalParams,
+ AccessControlService acs) {
+
+ var requestedTeams = acs.getRoles()
+ .stream()
+ .filter(r -> r.getSubjects()
+ .stream()
+ .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB))
+ .anyMatch(s -> s.getType().equals("team")))
+ .collect(Collectors.toSet());
+
+ if (requestedTeams.isEmpty()) {
+ log.debug("No roles with github teams found, skipping");
+ return Mono.just(Collections.emptySet());
+ }
+
+ final Mono>> rawTeams = webClient
+ .get()
+ .uri(uriBuilder -> uriBuilder.path("/teams")
+ .queryParam("per_page", ORGANIZATIONS_PER_PAGE)
+ .build())
+ .headers(headers -> {
+ headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
+ OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");
+ headers.setBearerAuth(request.getAccessToken().getTokenValue());
+ })
+ .retrieve()
+ //@formatter:off
+ .bodyToMono(new ParameterizedTypeReference<>() {});
+ //@formatter:on
+
+ final Mono> mappedTeams = rawTeams
+ .map(teams -> teams.stream()
+ .map(teamInfo -> {
+ var name = teamInfo.get(TEAM_NAME);
+ var orgInfo = (Map) teamInfo.get(ORGANIZATION);
+ var orgName = orgInfo.get(ORGANIZATION_NAME);
+ return orgName + "/" + name;
+ })
+ .map(Object::toString)
+ .collect(Collectors.toList())
+ );
+
+ return mappedTeams
+ .map(teams -> acs.getRoles()
+ .stream()
+ .filter(role -> role.getSubjects()
+ .stream()
+ .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB))
+ .filter(s -> s.getType().equals("team"))
+ .anyMatch(subject -> teams.stream()
+ .anyMatch(teamName -> teamName.equalsIgnoreCase(subject.getValue()))
+ ))
+ .map(Role::getName)
+ .collect(Collectors.toSet()));
}
}
From d915de4fd811265c8303cd61b10ac2cf9caea0ce Mon Sep 17 00:00:00 2001
From: Kostas Dizas <254960+kostasdizas@users.noreply.github.com>
Date: Fri, 11 Aug 2023 09:47:28 +0100
Subject: [PATCH 21/31] Add protobuf raw message deserializer (#4041)
Implemented a Protobuf Raw deserialiser that works like protoc --decode_raw. This is a no config alternative to the existing ProtobufFileSerde.
Co-authored-by: Ilya Kuramshin
---
.../kafka/ui/serdes/SerdesInitializer.java | 2 +
.../ui/serdes/builtin/ProtobufRawSerde.java | 59 ++++++++++
.../serdes/builtin/ProtobufRawSerdeTest.java | 108 ++++++++++++++++++
3 files changed, 169 insertions(+)
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerde.java
create mode 100644 kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerdeTest.java
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
index c833d9fc72..6e28c2fdcf 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
@@ -16,6 +16,7 @@ import com.provectus.kafka.ui.serdes.builtin.HexSerde;
import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde;
+import com.provectus.kafka.ui.serdes.builtin.ProtobufRawSerde;
import com.provectus.kafka.ui.serdes.builtin.StringSerde;
import com.provectus.kafka.ui.serdes.builtin.UInt32Serde;
import com.provectus.kafka.ui.serdes.builtin.UInt64Serde;
@@ -50,6 +51,7 @@ public class SerdesInitializer {
.put(Base64Serde.name(), Base64Serde.class)
.put(HexSerde.name(), HexSerde.class)
.put(UuidBinarySerde.name(), UuidBinarySerde.class)
+ .put(ProtobufRawSerde.name(), ProtobufRawSerde.class)
.build(),
new CustomSerdeLoader()
);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerde.java
new file mode 100644
index 0000000000..221b8b5ea5
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerde.java
@@ -0,0 +1,59 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.google.protobuf.UnknownFieldSet;
+import com.provectus.kafka.ui.exception.ValidationException;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.Map;
+import java.util.Optional;
+import lombok.SneakyThrows;
+
+public class ProtobufRawSerde implements BuiltInSerde {
+
+ public static String name() {
+ return "ProtobufDecodeRaw";
+ }
+
+ @Override
+ public Optional getDescription() {
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional getSchema(String topic, Target type) {
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean canSerialize(String topic, Target type) {
+ return false;
+ }
+
+ @Override
+ public boolean canDeserialize(String topic, Target type) {
+ return true;
+ }
+
+ @Override
+ public Serializer serializer(String topic, Target type) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Deserializer deserializer(String topic, Target type) {
+ return new Deserializer() {
+ @SneakyThrows
+ @Override
+ public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+ try {
+ UnknownFieldSet unknownFields = UnknownFieldSet.parseFrom(data);
+ return new DeserializeResult(unknownFields.toString(), DeserializeResult.Type.STRING, Map.of());
+ } catch (Exception e) {
+ throw new ValidationException(e.getMessage());
+ }
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerdeTest.java
new file mode 100644
index 0000000000..a71e9969a8
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufRawSerdeTest.java
@@ -0,0 +1,108 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import com.google.protobuf.DescriptorProtos;
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.DynamicMessage;
+import com.provectus.kafka.ui.exception.ValidationException;
+import com.provectus.kafka.ui.serde.api.Serde;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import lombok.SneakyThrows;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+class ProtobufRawSerdeTest {
+
+ private static final String DUMMY_TOPIC = "dummy-topic";
+
+ private ProtobufRawSerde serde;
+
+ @BeforeEach
+ void init() {
+ serde = new ProtobufRawSerde();
+ }
+
+ @SneakyThrows
+ ProtobufSchema getSampleSchema() {
+ return new ProtobufSchema(
+ """
+ syntax = "proto3";
+ message Message1 {
+ int32 my_field = 1;
+ }
+ """
+ );
+ }
+
+ @SneakyThrows
+ private byte[] getProtobufMessage() {
+ DynamicMessage.Builder builder = DynamicMessage.newBuilder(getSampleSchema().toDescriptor("Message1"));
+ builder.setField(builder.getDescriptorForType().findFieldByName("my_field"), 5);
+ return builder.build().toByteArray();
+ }
+
+ @Test
+ void deserializeSimpleMessage() {
+ var deserialized = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE)
+ .deserialize(null, getProtobufMessage());
+ assertThat(deserialized.getResult()).isEqualTo("1: 5\n");
+ }
+
+ @Test
+ void deserializeEmptyMessage() {
+ var deserialized = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE)
+ .deserialize(null, new byte[0]);
+ assertThat(deserialized.getResult()).isEqualTo("");
+ }
+
+ @Test
+ void deserializeInvalidMessage() {
+ var deserializer = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE);
+ assertThatThrownBy(() -> deserializer.deserialize(null, new byte[] { 1, 2, 3 }))
+ .isInstanceOf(ValidationException.class)
+ .hasMessageContaining("Protocol message contained an invalid tag");
+ }
+
+ @Test
+ void deserializeNullMessage() {
+ var deserializer = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE);
+ assertThatThrownBy(() -> deserializer.deserialize(null, null))
+ .isInstanceOf(ValidationException.class)
+ .hasMessageContaining("Cannot read the array length");
+ }
+
+ ProtobufSchema getSampleNestedSchema() {
+ return new ProtobufSchema(
+ """
+ syntax = "proto3";
+ message Message2 {
+ int32 my_nested_field = 1;
+ }
+ message Message1 {
+ int32 my_field = 1;
+ Message2 my_nested_message = 2;
+ }
+ """
+ );
+ }
+
+ @SneakyThrows
+ private byte[] getComplexProtobufMessage() {
+ DynamicMessage.Builder builder = DynamicMessage.newBuilder(getSampleNestedSchema().toDescriptor("Message1"));
+ builder.setField(builder.getDescriptorForType().findFieldByName("my_field"), 5);
+ DynamicMessage.Builder nestedBuilder = DynamicMessage.newBuilder(getSampleNestedSchema().toDescriptor("Message2"));
+ nestedBuilder.setField(nestedBuilder.getDescriptorForType().findFieldByName("my_nested_field"), 10);
+ builder.setField(builder.getDescriptorForType().findFieldByName("my_nested_message"), nestedBuilder.build());
+
+ return builder.build().toByteArray();
+ }
+
+ @Test
+ void deserializeNestedMessage() {
+ var deserialized = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE)
+ .deserialize(null, getComplexProtobufMessage());
+ assertThat(deserialized.getResult()).isEqualTo("1: 5\n2: {\n 1: 10\n}\n");
+ }
+}
\ No newline at end of file
From fa9547b95a4f40ea28be8868f0ad169c53ac7208 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Fri, 11 Aug 2023 16:41:07 +0400
Subject: [PATCH 22/31] BE: Controllers structure minor refactr (#4110)
Audit & access controll services moved to AbstractController
---
.../ui/controller/AbstractController.java | 27 +++++++++-
.../kafka/ui/controller/AclsController.java | 36 ++++++-------
.../ApplicationConfigController.java | 22 ++++----
.../ui/controller/BrokersController.java | 29 +++++-----
.../ui/controller/ClustersController.java | 16 +++---
.../controller/ConsumerGroupsController.java | 24 ++++-----
.../ui/controller/KafkaConnectController.java | 46 ++++++++--------
.../kafka/ui/controller/KsqlController.java | 18 +++----
.../ui/controller/MessagesController.java | 18 +++----
.../ui/controller/SchemasController.java | 46 ++++++++--------
.../kafka/ui/controller/TopicsController.java | 54 +++++++++----------
.../service/SchemaRegistryPaginationTest.java | 5 +-
.../service/TopicsServicePaginationTest.java | 6 ++-
13 files changed, 167 insertions(+), 180 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AbstractController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AbstractController.java
index fd323d55a1..e4dbb3cfcf 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AbstractController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AbstractController.java
@@ -2,12 +2,19 @@ package com.provectus.kafka.ui.controller;
import com.provectus.kafka.ui.exception.ClusterNotFoundException;
import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.service.ClustersStorage;
+import com.provectus.kafka.ui.service.audit.AuditService;
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
import org.springframework.beans.factory.annotation.Autowired;
+import reactor.core.publisher.Mono;
+import reactor.core.publisher.Signal;
public abstract class AbstractController {
- private ClustersStorage clustersStorage;
+ protected ClustersStorage clustersStorage;
+ protected AccessControlService accessControlService;
+ protected AuditService auditService;
protected KafkaCluster getCluster(String name) {
return clustersStorage.getClusterByName(name)
@@ -15,8 +22,26 @@ public abstract class AbstractController {
String.format("Cluster with name '%s' not found", name)));
}
+ protected Mono validateAccess(AccessContext context) {
+ return accessControlService.validateAccess(context);
+ }
+
+ protected void audit(AccessContext acxt, Signal> sig) {
+ auditService.audit(acxt, sig);
+ }
+
@Autowired
public void setClustersStorage(ClustersStorage clustersStorage) {
this.clustersStorage = clustersStorage;
}
+
+ @Autowired
+ public void setAccessControlService(AccessControlService accessControlService) {
+ this.accessControlService = accessControlService;
+ }
+
+ @Autowired
+ public void setAuditService(AuditService auditService) {
+ this.auditService = auditService;
+ }
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
index 71700e3f7b..2ba0add5be 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
@@ -11,8 +11,6 @@ import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
import com.provectus.kafka.ui.service.acl.AclsService;
-import com.provectus.kafka.ui.service.audit.AuditService;
-import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.common.resource.PatternType;
@@ -29,8 +27,6 @@ import reactor.core.publisher.Mono;
public class AclsController extends AbstractController implements AclsApi {
private final AclsService aclsService;
- private final AccessControlService accessControlService;
- private final AuditService auditService;
@Override
public Mono> createAcl(String clusterName, Mono kafkaAclDto,
@@ -41,11 +37,11 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("createAcl")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(kafkaAclDto)
.map(ClusterMapper::toAclBinding)
.flatMap(binding -> aclsService.createAcl(getCluster(clusterName), binding))
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
@@ -58,11 +54,11 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("deleteAcl")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(kafkaAclDto)
.map(ClusterMapper::toAclBinding)
.flatMap(binding -> aclsService.deleteAcl(getCluster(clusterName), binding))
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
@@ -88,12 +84,12 @@ public class AclsController extends AbstractController implements AclsApi {
var filter = new ResourcePatternFilter(resourceType, resourceName, namePatternType);
- return accessControlService.validateAccess(context).then(
+ return validateAccess(context).then(
Mono.just(
ResponseEntity.ok(
aclsService.listAcls(getCluster(clusterName), filter)
.map(ClusterMapper::toKafkaAclDto)))
- ).doOnEach(sig -> auditService.audit(context, sig));
+ ).doOnEach(sig -> audit(context, sig));
}
@Override
@@ -104,11 +100,11 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("getAclAsCsv")
.build();
- return accessControlService.validateAccess(context).then(
+ return validateAccess(context).then(
aclsService.getAclAsCsvString(getCluster(clusterName))
.map(ResponseEntity::ok)
.flatMap(Mono::just)
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
);
}
@@ -120,10 +116,10 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("syncAclsCsv")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(csvMono)
.flatMap(csv -> aclsService.syncAclWithAclCsv(getCluster(clusterName), csv))
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
@@ -137,10 +133,10 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("createConsumerAcl")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(createConsumerAclDto)
.flatMap(req -> aclsService.createConsumerAcl(getCluster(clusterName), req))
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
@@ -154,10 +150,10 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("createProducerAcl")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(createProducerAclDto)
.flatMap(req -> aclsService.createProducerAcl(getCluster(clusterName), req))
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
@@ -171,10 +167,10 @@ public class AclsController extends AbstractController implements AclsApi {
.operationName("createStreamAppAcl")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(createStreamAppAclDto)
.flatMap(req -> aclsService.createStreamAppAcl(getCluster(clusterName), req))
- .doOnEach(sig -> auditService.audit(context, sig))
+ .doOnEach(sig -> audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
index 5f03c9ab5c..480d62b178 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
@@ -15,8 +15,6 @@ import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.service.ApplicationInfoService;
import com.provectus.kafka.ui.service.KafkaClusterFactory;
-import com.provectus.kafka.ui.service.audit.AuditService;
-import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.util.ApplicationRestarter;
import com.provectus.kafka.ui.util.DynamicConfigOperations;
import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
@@ -39,7 +37,7 @@ import reactor.util.function.Tuples;
@Slf4j
@RestController
@RequiredArgsConstructor
-public class ApplicationConfigController implements ApplicationConfigApi {
+public class ApplicationConfigController extends AbstractController implements ApplicationConfigApi {
private static final PropertiesMapper MAPPER = Mappers.getMapper(PropertiesMapper.class);
@@ -51,12 +49,10 @@ public class ApplicationConfigController implements ApplicationConfigApi {
ApplicationConfigPropertiesDTO toDto(PropertiesStructure propertiesStructure);
}
- private final AccessControlService accessControlService;
private final DynamicConfigOperations dynamicConfigOperations;
private final ApplicationRestarter restarter;
private final KafkaClusterFactory kafkaClusterFactory;
private final ApplicationInfoService applicationInfoService;
- private final AuditService auditService;
@Override
public Mono> getApplicationInfo(ServerWebExchange exchange) {
@@ -69,12 +65,12 @@ public class ApplicationConfigController implements ApplicationConfigApi {
.applicationConfigActions(VIEW)
.operationName("getCurrentConfig")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(Mono.fromSupplier(() -> ResponseEntity.ok(
new ApplicationConfigDTO()
.properties(MAPPER.toDto(dynamicConfigOperations.getCurrentProperties()))
)))
- .doOnEach(sig -> auditService.audit(context, sig));
+ .doOnEach(sig -> audit(context, sig));
}
@Override
@@ -84,14 +80,14 @@ public class ApplicationConfigController implements ApplicationConfigApi {
.applicationConfigActions(EDIT)
.operationName("restartWithConfig")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(restartRequestDto)
.>map(dto -> {
dynamicConfigOperations.persist(MAPPER.fromDto(dto.getConfig().getProperties()));
restarter.requestRestart();
return ResponseEntity.ok().build();
})
- .doOnEach(sig -> auditService.audit(context, sig));
+ .doOnEach(sig -> audit(context, sig));
}
@Override
@@ -101,13 +97,13 @@ public class ApplicationConfigController implements ApplicationConfigApi {
.applicationConfigActions(EDIT)
.operationName("uploadConfigRelatedFile")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(fileFlux.single())
.flatMap(file ->
dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file)
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
.map(ResponseEntity::ok))
- .doOnEach(sig -> auditService.audit(context, sig));
+ .doOnEach(sig -> audit(context, sig));
}
@Override
@@ -117,7 +113,7 @@ public class ApplicationConfigController implements ApplicationConfigApi {
.applicationConfigActions(EDIT)
.operationName("validateConfig")
.build();
- return accessControlService.validateAccess(context)
+ return validateAccess(context)
.then(configDto)
.flatMap(config -> {
PropertiesStructure propertiesStructure = MAPPER.fromDto(config.getProperties());
@@ -126,7 +122,7 @@ public class ApplicationConfigController implements ApplicationConfigApi {
.map(validations -> new ApplicationConfigValidationDTO().clusters(validations));
})
.map(ResponseEntity::ok)
- .doOnEach(sig -> auditService.audit(context, sig));
+ .doOnEach(sig -> audit(context, sig));
}
private Mono