SchemaRegistryService.java 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. package com.provectus.kafka.ui.service;
  2. import com.fasterxml.jackson.core.type.TypeReference;
  3. import com.fasterxml.jackson.databind.json.JsonMapper;
  4. import com.provectus.kafka.ui.exception.SchemaCompatibilityException;
  5. import com.provectus.kafka.ui.exception.SchemaNotFoundException;
  6. import com.provectus.kafka.ui.exception.ValidationException;
  7. import com.provectus.kafka.ui.model.KafkaCluster;
  8. import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
  9. import com.provectus.kafka.ui.sr.model.Compatibility;
  10. import com.provectus.kafka.ui.sr.model.CompatibilityCheckResponse;
  11. import com.provectus.kafka.ui.sr.model.CompatibilityConfig;
  12. import com.provectus.kafka.ui.sr.model.CompatibilityLevelChange;
  13. import com.provectus.kafka.ui.sr.model.NewSubject;
  14. import com.provectus.kafka.ui.sr.model.SchemaSubject;
  15. import com.provectus.kafka.ui.util.ReactiveFailover;
  16. import com.provectus.kafka.ui.util.WebClientConfigurator;
  17. import java.io.IOException;
  18. import java.util.List;
  19. import java.util.stream.Collectors;
  20. import lombok.AllArgsConstructor;
  21. import lombok.Getter;
  22. import lombok.RequiredArgsConstructor;
  23. import lombok.SneakyThrows;
  24. import lombok.experimental.Delegate;
  25. import lombok.extern.slf4j.Slf4j;
  26. import org.springframework.stereotype.Service;
  27. import org.springframework.web.reactive.function.client.WebClientResponseException;
  28. import reactor.core.publisher.Flux;
  29. import reactor.core.publisher.Mono;
  30. @Service
  31. @Slf4j
  32. @RequiredArgsConstructor
  33. public class SchemaRegistryService {
  34. private static final String LATEST = "latest";
  35. @AllArgsConstructor
  36. public static class SubjectWithCompatibilityLevel {
  37. @Delegate
  38. SchemaSubject subject;
  39. @Getter
  40. Compatibility compatibility;
  41. }
  42. private ReactiveFailover<KafkaSrClientApi> api(KafkaCluster cluster) {
  43. return cluster.getSchemaRegistryClient();
  44. }
  45. public Mono<List<SubjectWithCompatibilityLevel>> getAllLatestVersionSchemas(KafkaCluster cluster,
  46. List<String> subjects) {
  47. return Flux.fromIterable(subjects)
  48. .concatMap(subject -> getLatestSchemaVersionBySubject(cluster, subject))
  49. .collect(Collectors.toList());
  50. }
  51. public Mono<List<String>> getAllSubjectNames(KafkaCluster cluster) {
  52. return api(cluster)
  53. .mono(c -> c.getAllSubjectNames(null, false))
  54. .flatMapIterable(this::parseSubjectListString)
  55. .collectList();
  56. }
  57. @SneakyThrows
  58. private List<String> parseSubjectListString(String subjectNamesStr) {
  59. //workaround for https://github.com/spring-projects/spring-framework/issues/24734
  60. return new JsonMapper().readValue(subjectNamesStr, new TypeReference<List<String>>() {
  61. });
  62. }
  63. public Flux<SubjectWithCompatibilityLevel> getAllVersionsBySubject(KafkaCluster cluster, String subject) {
  64. Flux<Integer> versions = getSubjectVersions(cluster, subject);
  65. return versions.flatMap(version -> getSchemaSubjectByVersion(cluster, subject, version));
  66. }
  67. private Flux<Integer> getSubjectVersions(KafkaCluster cluster, String schemaName) {
  68. return api(cluster).flux(c -> c.getSubjectVersions(schemaName));
  69. }
  70. public Mono<SubjectWithCompatibilityLevel> getSchemaSubjectByVersion(KafkaCluster cluster,
  71. String schemaName,
  72. Integer version) {
  73. return getSchemaSubject(cluster, schemaName, String.valueOf(version));
  74. }
  75. public Mono<SubjectWithCompatibilityLevel> getLatestSchemaVersionBySubject(KafkaCluster cluster,
  76. String schemaName) {
  77. return getSchemaSubject(cluster, schemaName, LATEST);
  78. }
  79. private Mono<SubjectWithCompatibilityLevel> getSchemaSubject(KafkaCluster cluster, String schemaName,
  80. String version) {
  81. return api(cluster)
  82. .mono(c -> c.getSubjectVersion(schemaName, version))
  83. .zipWith(getSchemaCompatibilityInfoOrGlobal(cluster, schemaName))
  84. .map(t -> new SubjectWithCompatibilityLevel(t.getT1(), t.getT2()))
  85. .onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.error(new SchemaNotFoundException()));
  86. }
  87. public Mono<Void> deleteSchemaSubjectByVersion(KafkaCluster cluster, String schemaName, Integer version) {
  88. return deleteSchemaSubject(cluster, schemaName, String.valueOf(version));
  89. }
  90. public Mono<Void> deleteLatestSchemaSubject(KafkaCluster cluster, String schemaName) {
  91. return deleteSchemaSubject(cluster, schemaName, LATEST);
  92. }
  93. private Mono<Void> deleteSchemaSubject(KafkaCluster cluster, String schemaName, String version) {
  94. return api(cluster).mono(c -> c.deleteSubjectVersion(schemaName, version, false));
  95. }
  96. public Mono<Void> deleteSchemaSubjectEntirely(KafkaCluster cluster, String schemaName) {
  97. return api(cluster).mono(c -> c.deleteAllSubjectVersions(schemaName, false));
  98. }
  99. /**
  100. * Checks whether the provided schema duplicates the previous or not, creates a new schema
  101. * and then returns the whole content by requesting its latest version.
  102. */
  103. public Mono<SubjectWithCompatibilityLevel> registerNewSchema(KafkaCluster cluster,
  104. String subject,
  105. NewSubject newSchemaSubject) {
  106. return api(cluster)
  107. .mono(c -> c.registerNewSchema(subject, newSchemaSubject))
  108. .onErrorMap(WebClientResponseException.Conflict.class,
  109. th -> new SchemaCompatibilityException())
  110. .onErrorMap(WebClientResponseException.UnprocessableEntity.class,
  111. th -> new ValidationException("Invalid schema"))
  112. .then(getLatestSchemaVersionBySubject(cluster, subject));
  113. }
  114. public Mono<Void> updateSchemaCompatibility(KafkaCluster cluster,
  115. String schemaName,
  116. Compatibility compatibility) {
  117. return api(cluster)
  118. .mono(c -> c.updateSubjectCompatibilityLevel(
  119. schemaName, new CompatibilityLevelChange().compatibility(compatibility)))
  120. .then();
  121. }
  122. public Mono<Void> updateGlobalSchemaCompatibility(KafkaCluster cluster,
  123. Compatibility compatibility) {
  124. return api(cluster)
  125. .mono(c -> c.updateGlobalCompatibilityLevel(new CompatibilityLevelChange().compatibility(compatibility)))
  126. .then();
  127. }
  128. public Mono<Compatibility> getSchemaCompatibilityLevel(KafkaCluster cluster,
  129. String schemaName) {
  130. return api(cluster)
  131. .mono(c -> c.getSubjectCompatibilityLevel(schemaName, true))
  132. .map(CompatibilityConfig::getCompatibilityLevel)
  133. .onErrorResume(error -> Mono.empty());
  134. }
  135. public Mono<Compatibility> getGlobalSchemaCompatibilityLevel(KafkaCluster cluster) {
  136. return api(cluster)
  137. .mono(KafkaSrClientApi::getGlobalCompatibilityLevel)
  138. .map(CompatibilityConfig::getCompatibilityLevel);
  139. }
  140. private Mono<Compatibility> getSchemaCompatibilityInfoOrGlobal(KafkaCluster cluster,
  141. String schemaName) {
  142. return getSchemaCompatibilityLevel(cluster, schemaName)
  143. .switchIfEmpty(this.getGlobalSchemaCompatibilityLevel(cluster));
  144. }
  145. public Mono<CompatibilityCheckResponse> checksSchemaCompatibility(KafkaCluster cluster,
  146. String schemaName,
  147. NewSubject newSchemaSubject) {
  148. return api(cluster).mono(c -> c.checkSchemaCompatibility(schemaName, LATEST, true, newSchemaSubject));
  149. }
  150. }