This commit is contained in:
iliax 2023-07-20 18:19:59 +04:00
parent d6c16e2905
commit 066157577d
6 changed files with 49 additions and 52 deletions

View file

@ -7,9 +7,9 @@ import com.provectus.kafka.ui.model.GraphDescriptionsDTO;
import com.provectus.kafka.ui.model.GraphParameterDTO; import com.provectus.kafka.ui.model.GraphParameterDTO;
import com.provectus.kafka.ui.model.PrometheusApiQueryResponseDTO; import com.provectus.kafka.ui.model.PrometheusApiQueryResponseDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext; import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.service.graphs.GraphDescription;
import com.provectus.kafka.ui.service.graphs.GraphsService; import com.provectus.kafka.ui.service.graphs.GraphsService;
import com.provectus.kafka.ui.service.audit.AuditService; import com.provectus.kafka.ui.service.audit.AuditService;
import com.provectus.kafka.ui.service.graphs.GraphsStorage;
import com.provectus.kafka.ui.service.rbac.AccessControlService; import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.time.Duration; import java.time.Duration;
import java.time.OffsetDateTime; import java.time.OffsetDateTime;
@ -65,11 +65,7 @@ public class GraphsController extends AbstractController implements GraphsApi {
@Override @Override
public Mono<ResponseEntity<GraphDescriptionsDTO>> getGraphsList(String clusterName, public Mono<ResponseEntity<GraphDescriptionsDTO>> getGraphsList(String clusterName,
ServerWebExchange exchange) { ServerWebExchange exchange) {
var graphs = graphsService.getAllGraphs(); var graphs = graphsService.getGraphs(getCluster(clusterName));
var cluster = getCluster(clusterName);
if (cluster.getPrometheusStorageClient() == null) {
graphs = Stream.empty();
}
return Mono.just( return Mono.just(
ResponseEntity.ok( ResponseEntity.ok(
new GraphDescriptionsDTO().graphs(graphs.map(this::map).toList()) new GraphDescriptionsDTO().graphs(graphs.map(this::map).toList())
@ -77,7 +73,7 @@ public class GraphsController extends AbstractController implements GraphsApi {
); );
} }
private GraphDescriptionDTO map(GraphsStorage.GraphDescription graph) { private GraphDescriptionDTO map(GraphDescription graph) {
return new GraphDescriptionDTO(graph.id()) return new GraphDescriptionDTO(graph.id())
.defaultPeriod(Optional.ofNullable(graph.defaultInterval()).map(Duration::toString).orElse(null)) .defaultPeriod(Optional.ofNullable(graph.defaultInterval()).map(Duration::toString).orElse(null))
.type(graph.isRange() ? GraphDescriptionDTO.TypeEnum.RANGE : GraphDescriptionDTO.TypeEnum.INSTANT) .type(graph.isRange() ? GraphDescriptionDTO.TypeEnum.RANGE : GraphDescriptionDTO.TypeEnum.INSTANT)

View file

@ -0,0 +1,17 @@
package com.provectus.kafka.ui.service.graphs;
import java.time.Duration;
import java.util.Set;
import javax.annotation.Nullable;
import lombok.Builder;
@Builder
public record GraphDescription(String id,
@Nullable Duration defaultInterval, //null for instant queries, set for range
String prometheusQuery,
Set<String> params) {
public boolean isRange() {
return defaultInterval != null;
}
}

View file

@ -10,38 +10,26 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.Builder;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@Component @Component
public class GraphsStorage { class GraphDescriptions {
private static final Duration DEFAULT_RANGE_DURATION = Duration.ofDays(7); private static final Duration DEFAULT_RANGE_DURATION = Duration.ofDays(7);
@Builder
public record GraphDescription(String id,
@Nullable Duration defaultInterval,
String prometheusQuery,
Set<String> params) {
public boolean isRange() {
return defaultInterval != null;
}
}
private final Map<String, GraphDescription> graphsById; private final Map<String, GraphDescription> graphsById;
GraphsStorage() { GraphDescriptions() {
validateGraphDescr(PREDEFINED_GRAPHS); validateGraphDescr(PREDEFINED_GRAPHS);
this.graphsById = PREDEFINED_GRAPHS.stream() this.graphsById = PREDEFINED_GRAPHS.stream()
.collect(Collectors.toMap(GraphDescription::id, d -> d)); .collect(Collectors.toMap(GraphDescription::id, d -> d));
} }
Optional<GraphDescription> getDescription(String id) { Optional<GraphDescription> getById(String id) {
return Optional.ofNullable(graphsById.get(id)); return Optional.ofNullable(graphsById.get(id));
} }
Stream<GraphDescription> getAll() { Stream<GraphDescription> all() {
return graphsById.values().stream(); return graphsById.values().stream();
} }

View file

@ -4,7 +4,6 @@ import com.google.common.base.Preconditions;
import com.provectus.kafka.ui.exception.NotFoundException; import com.provectus.kafka.ui.exception.NotFoundException;
import com.provectus.kafka.ui.exception.ValidationException; import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.model.KafkaCluster; import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.graphs.GraphsStorage.GraphDescription;
import com.provectus.kafka.ui.service.metrics.prometheus.PromQueryTemplate; import com.provectus.kafka.ui.service.metrics.prometheus.PromQueryTemplate;
import java.time.Duration; import java.time.Duration;
import java.time.Instant; import java.time.Instant;
@ -24,7 +23,7 @@ public class GraphsService {
private static final int TARGET_MATRIX_DATA_POINTS = 200; private static final int TARGET_MATRIX_DATA_POINTS = 200;
private final GraphsStorage graphsStorage; private final GraphDescriptions graphDescriptions;
public Mono<QueryResponse> getGraphData(KafkaCluster cluster, public Mono<QueryResponse> getGraphData(KafkaCluster cluster,
String id, String id,
@ -32,16 +31,16 @@ public class GraphsService {
@Nullable Instant to, @Nullable Instant to,
@Nullable Map<String, String> params) { @Nullable Map<String, String> params) {
var graph = graphsStorage.getDescription(id) var graph = graphDescriptions.getById(id)
.orElseThrow(() -> new NotFoundException("No graph found with id = " + id)); .orElseThrow(() -> new NotFoundException("No graph found with id = " + id));
var promClient = cluster.getPrometheusStorageClient(); var promClient = cluster.getPrometheusStorageClient();
if (promClient == null) { if (promClient == null) {
throw new ValidationException("Prometheus not configured for cluster"); throw new ValidationException("Prometheus not configured for cluster");
} }
String preparedQuery = prepareQuery(graph, cluster.getName(), params);
return cluster.getPrometheusStorageClient() return cluster.getPrometheusStorageClient()
.mono(client -> { .mono(client -> {
String preparedQuery = prepareQuery(graph, cluster.getName(), params);
if (graph.isRange()) { if (graph.isRange()) {
return queryRange(client, preparedQuery, graph.defaultInterval(), from, to); return queryRange(client, preparedQuery, graph.defaultInterval(), from, to);
} }
@ -75,8 +74,7 @@ public class GraphsService {
if (intervalInSecs <= TARGET_MATRIX_DATA_POINTS) { if (intervalInSecs <= TARGET_MATRIX_DATA_POINTS) {
return intervalInSecs + "s"; return intervalInSecs + "s";
} }
int step = ((int) (((double) intervalInSecs) / 200)); int step = ((int) (((double) intervalInSecs) / TARGET_MATRIX_DATA_POINTS));
System.out.println("Chosen step size " + step); //TODo
return step + "s"; return step + "s";
} }
@ -84,12 +82,15 @@ public class GraphsService {
return c.query(preparedQuery, null, null); return c.query(preparedQuery, null, null);
} }
public static String prepareQuery(GraphDescription d, String clusterName, @Nullable Map<String, String> params) { private String prepareQuery(GraphDescription d, String clusterName, @Nullable Map<String, String> params) {
return new PromQueryTemplate(d).getQuery(clusterName, Optional.ofNullable(params).orElse(Map.of())); return new PromQueryTemplate(d).getQuery(clusterName, Optional.ofNullable(params).orElse(Map.of()));
} }
public Stream<GraphDescription> getAllGraphs() { public Stream<GraphDescription> getGraphs(KafkaCluster cluster) {
return graphsStorage.getAll(); if (cluster.getPrometheusStorageClient() == null) {
return Stream.empty();
}
return graphDescriptions.all();
} }
} }

View file

@ -1,12 +1,10 @@
package com.provectus.kafka.ui.service.metrics.prometheus; package com.provectus.kafka.ui.service.metrics.prometheus;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.Optional; import java.util.Optional;
import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.BailErrorStrategy;
import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.misc.ParseCancellationException;
import org.antlr.v4.runtime.Recognizer;
import promql.PromQLLexer; import promql.PromQLLexer;
import promql.PromQLParser; import promql.PromQLParser;
@ -17,27 +15,23 @@ class PromQueryLangGrammar {
try { try {
parseExpression(query); parseExpression(query);
return Optional.empty(); return Optional.empty();
} catch (ValidationException v) { } catch (ParseCancellationException e) {
return Optional.of(v.getMessage()); //TODO: add more descriptive msg
return Optional.of("Syntax error");
} }
} }
static PromQLParser.ExpressionContext parseExpression(String query) { static PromQLParser.ExpressionContext parseExpression(String query) {
return parse(query).expression(); return createParser(query).expression();
} }
private static PromQLParser parse(String str) throws ValidationException { private static PromQLParser createParser(String str) {
PromQLLexer lexer = new PromQLLexer(CharStreams.fromString(str)); PromQLLexer lexer = new PromQLLexer(CharStreams.fromString(str));
lexer.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
int line, int charPositionInLine,
String msg, RecognitionException e) {
throw new ValidationException("Invalid syntax: " + msg);
}
});
CommonTokenStream tokenStream = new CommonTokenStream(lexer); CommonTokenStream tokenStream = new CommonTokenStream(lexer);
return new PromQLParser(tokenStream); var parser = new PromQLParser(tokenStream);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
return parser;
} }
} }

View file

@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.metrics.prometheus;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.provectus.kafka.ui.exception.ValidationException; import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.service.graphs.GraphsStorage; import com.provectus.kafka.ui.service.graphs.GraphDescription;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
@ -14,7 +14,7 @@ public class PromQueryTemplate {
private final String queryTemplate; private final String queryTemplate;
private final Set<String> paramsNames; private final Set<String> paramsNames;
public PromQueryTemplate(GraphsStorage.GraphDescription d) { public PromQueryTemplate(GraphDescription d) {
this(d.prometheusQuery(), d.params()); this(d.prometheusQuery(), d.params());
} }
@ -35,8 +35,9 @@ public class PromQueryTemplate {
public Optional<String> validateSyntax() { public Optional<String> validateSyntax() {
Map<String, String> fakeReplacements = new HashMap<>(); Map<String, String> fakeReplacements = new HashMap<>();
paramsNames.forEach(paramName -> fakeReplacements.put(paramName, "1"));
fakeReplacements.put("cluster", "1"); fakeReplacements.put("cluster", "1");
paramsNames.forEach(paramName -> fakeReplacements.put(paramName, "1"));
String prepared = replaceParams(fakeReplacements); String prepared = replaceParams(fakeReplacements);
return PromQueryLangGrammar.validateExpression(prepared); return PromQueryLangGrammar.validateExpression(prepared);
} }