iliax 1 year ago
parent
commit
3789693777
20 changed files with 1038 additions and 160 deletions
  1. 36 27
      documentation/compose/kafka-ui-arm64.yaml
  2. 14 0
      documentation/compose/scripts/prometheus.yaml
  3. 2 2
      kafka-ui-api/Dockerfile
  4. 1 1
      kafka-ui-api/pom.xml
  5. 176 0
      kafka-ui-api/src/main/antlr4/promql/PromQLLexer.g4
  6. 114 0
      kafka-ui-api/src/main/antlr4/promql/PromQLParser.g4
  7. 18 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  8. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/AbstractAuthSecurityConfig.java
  9. 4 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/PrometheusExposeController.java
  10. 18 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsScrapeProperties.java
  11. 43 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/prometheus/PromQlGrammar.java
  12. 20 18
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/prometheus/PrometheusExpose.java
  13. 20 34
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/MetricsScrapping.java
  14. 23 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/sink/MetricsSink.java
  15. 17 20
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/sink/PrometheusPushGatewaySink.java
  16. 24 45
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/sink/PrometheusRemoteWriteSink.java
  17. 2 2
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/metrics/scrape/prometheus/PrometheusEndpointParserTest.java
  18. 25 1
      kafka-ui-contract/pom.xml
  19. 1 1
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  20. 479 0
      kafka-ui-contract/src/main/resources/swagger/prometheus-query-api.yaml

+ 36 - 27
documentation/compose/kafka-ui-arm64.yaml

@@ -11,18 +11,27 @@ services:
     depends_on:
       - kafka0
       - schema-registry0
-      - kafka-connect0
     environment:
       KAFKA_CLUSTERS_0_NAME: local
       KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
+      KAFKA_CLUSTERS_0_METRICS_STORE_PROMETHEUS_URL: "http://prometheus:9090"
+      KAFKA_CLUSTERS_0_METRICS_STORE_PROMETHEUS_REMOTEWRITE: 'true'
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schema-registry0:8085
-      KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
-      KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
       DYNAMIC_CONFIG_ENABLED: 'true'  # not necessary, added for tests
       KAFKA_CLUSTERS_0_AUDIT_TOPICAUDITENABLED: 'true'
       KAFKA_CLUSTERS_0_AUDIT_CONSOLEAUDITENABLED: 'true'
 
+  prometheus:
+    image: prom/prometheus:latest
+    hostname: prometheus
+    container_name: prometheus
+    ports:
+      - 9090:9090
+    volumes:
+      - ./scripts:/etc/prometheus
+    command: --web.enable-remote-write-receiver  --config.file=/etc/prometheus/prometheus.yaml
+
   kafka0:
     image: confluentinc/cp-kafka:7.2.1.arm64
     hostname: kafka0
@@ -67,30 +76,30 @@ services:
       SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
       SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
 
-  kafka-connect0:
-    image: confluentinc/cp-kafka-connect:7.2.1.arm64
-    ports:
-      - 8083:8083
-    depends_on:
-      - kafka0
-      - schema-registry0
-    environment:
-      CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
-      CONNECT_GROUP_ID: compose-connect-group
-      CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
-      CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
-      CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_STATUS_STORAGE_TOPIC: _connect_status
-      CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
-      CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085
-      CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
-      CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085
-      CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
-      CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
-      CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
-      CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
+#  kafka-connect0:
+#    image: confluentinc/cp-kafka-connect:7.2.1.arm64
+#    ports:
+#      - 8083:8083
+#    depends_on:
+#      - kafka0
+#      - schema-registry0
+#    environment:
+#      CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
+#      CONNECT_GROUP_ID: compose-connect-group
+#      CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
+#      CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
+#      CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
+#      CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
+#      CONNECT_STATUS_STORAGE_TOPIC: _connect_status
+#      CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
+#      CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+#      CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085
+#      CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
+#      CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085
+#      CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+#      CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
+#      CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
+#      CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
 
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1.arm64

+ 14 - 0
documentation/compose/scripts/prometheus.yaml

@@ -0,0 +1,14 @@
+global:
+  scrape_interval: 30s
+  scrape_timeout: 10s
+
+rule_files:
+  - alert.yml
+
+scrape_configs:
+  - job_name: services
+    metrics_path: /metrics
+    static_configs:
+      - targets:
+          - 'prometheus:9090'
+#          - 'kafka-ui:8080'

+ 2 - 2
kafka-ui-api/Dockerfile

@@ -1,5 +1,5 @@
 #FROM azul/zulu-openjdk-alpine:17-jre-headless
-FROM azul/zulu-openjdk-alpine@sha256:a36679ac0d28cb835e2a8c00e1e0d95509c6c51c5081c7782b85edb1f37a771a
+FROM azul/zulu-openjdk-alpine:17-jre-headless
 
 RUN apk add --no-cache gcompat # need to make snappy codec work
 RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
@@ -18,4 +18,4 @@ ENV JAVA_OPTS=
 EXPOSE 8080
 
 # see JmxSslSocketFactory docs to understand why add-opens is needed
-CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED  $JAVA_OPTS -jar kafka-ui-api.jar
+CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED $JAVA_OPTS -jar kafka-ui-api.jar

+ 1 - 1
kafka-ui-api/pom.xml

@@ -249,7 +249,7 @@
         <dependency>
             <groupId>org.xerial.snappy</groupId>
             <artifactId>snappy-java</artifactId>
-            <version>1.1.9.1</version>
+            <version>1.1.8.4</version>
         </dependency>
 
         <dependency>

+ 176 - 0
kafka-ui-api/src/main/antlr4/promql/PromQLLexer.g4

@@ -0,0 +1,176 @@
+lexer grammar PromQLLexer;
+
+channels { WHITESPACE, COMMENTS }
+
+// All keywords in PromQL are case insensitive, it is just function,
+// label and metric names that are not.
+options { caseInsensitive=true; }
+
+fragment NUMERAL: [0-9]+ ('.' [0-9]+)?;
+
+fragment SCIENTIFIC_NUMBER
+   : NUMERAL ('e' [-+]? NUMERAL)?
+   ;
+
+NUMBER
+    : NUMERAL
+    | SCIENTIFIC_NUMBER;
+
+STRING
+    : '\'' (~('\'' | '\\') | '\\' .)* '\''
+    | '"' (~('"' | '\\') | '\\' .)* '"'
+    ;
+
+// Binary operators
+
+ADD:  '+';
+SUB:  '-';
+MULT: '*';
+DIV:  '/';
+MOD:  '%';
+POW:  '^';
+
+AND:    'and';
+OR:     'or';
+UNLESS: 'unless';
+
+// Comparison operators
+
+EQ:  '=';
+DEQ: '==';
+NE:  '!=';
+GT:  '>';
+LT:  '<';
+GE:  '>=';
+LE:  '<=';
+RE:  '=~';
+NRE: '!~';
+
+// Aggregation modifiers
+
+BY:      'by';
+WITHOUT: 'without';
+
+// Join modifiers
+
+ON:          'on';
+IGNORING:    'ignoring';
+GROUP_LEFT:  'group_left';
+GROUP_RIGHT: 'group_right';
+
+OFFSET: 'offset';
+
+BOOL: 'bool';
+
+AGGREGATION_OPERATOR
+    : 'sum'
+    | 'min'
+    | 'max'
+    | 'avg'
+    | 'group'
+    | 'stddev'
+    | 'stdvar'
+    | 'count'
+    | 'count_values'
+    | 'bottomk'
+    | 'topk'
+    | 'quantile'
+    ;
+
+FUNCTION options { caseInsensitive=false; }
+    : 'abs'
+    | 'absent'
+    | 'absent_over_time'
+    | 'ceil'
+    | 'changes'
+    | 'clamp_max'
+    | 'clamp_min'
+    | 'day_of_month'
+    | 'day_of_week'
+    | 'days_in_month'
+    | 'delta'
+    | 'deriv'
+    | 'exp'
+    | 'floor'
+    | 'histogram_quantile'
+    | 'holt_winters'
+    | 'hour'
+    | 'idelta'
+    | 'increase'
+    | 'irate'
+    | 'label_join'
+    | 'label_replace'
+    | 'ln'
+    | 'log2'
+    | 'log10'
+    | 'minute'
+    | 'month'
+    | 'predict_linear'
+    | 'rate'
+    | 'resets'
+    | 'round'
+    | 'scalar'
+    | 'sort'
+    | 'sort_desc'
+    | 'sqrt'
+    | 'time'
+    | 'timestamp'
+    | 'vector'
+    | 'year'
+    | 'avg_over_time'
+    | 'min_over_time'
+    | 'max_over_time'
+    | 'sum_over_time'
+    | 'count_over_time'
+    | 'quantile_over_time'
+    | 'stddev_over_time'
+    | 'stdvar_over_time'
+    | 'last_over_time'
+    | 'acos'
+    | 'acosh'
+    | 'asin'
+    | 'asinh'
+    | 'atan'
+    | 'atanh'
+    | 'cos'
+    | 'cosh'
+    | 'sin'
+    | 'sinh'
+    | 'tan'
+    | 'tanh'
+    | 'deg'
+    | 'pi'
+    | 'rad'
+    ;
+
+LEFT_BRACE:  '{';
+RIGHT_BRACE: '}';
+
+LEFT_PAREN:  '(';
+RIGHT_PAREN: ')';
+
+LEFT_BRACKET:  '[';
+RIGHT_BRACKET: ']';
+
+COMMA: ',';
+
+AT: '@';
+
+SUBQUERY_RANGE
+     : LEFT_BRACKET DURATION ':' DURATION? RIGHT_BRACKET;
+
+TIME_RANGE
+    : LEFT_BRACKET DURATION RIGHT_BRACKET;
+
+// The proper order (longest to the shortest) must be validated after parsing
+DURATION: ([0-9]+ ('ms' | [smhdwy]))+;
+
+METRIC_NAME: [a-z_:] [a-z0-9_:]*;
+LABEL_NAME:  [a-z_] [a-z0-9_]*;
+
+
+
+WS: [\r\t\n ]+ -> channel(WHITESPACE);
+SL_COMMENT
+    : '#' .*? '\n' -> channel(COMMENTS)
+    ;

+ 114 - 0
kafka-ui-api/src/main/antlr4/promql/PromQLParser.g4

@@ -0,0 +1,114 @@
+parser grammar PromQLParser;
+
+options { tokenVocab = PromQLLexer; }
+
+expression: vectorOperation EOF;
+
+// Binary operations are ordered by precedence
+
+// Unary operations have the same precedence as multiplications
+
+vectorOperation
+    : <assoc=right> vectorOperation powOp vectorOperation
+    | <assoc=right> vectorOperation subqueryOp
+    | unaryOp vectorOperation
+    | vectorOperation multOp vectorOperation
+    | vectorOperation addOp vectorOperation
+    | vectorOperation compareOp vectorOperation
+    | vectorOperation andUnlessOp vectorOperation
+    | vectorOperation orOp vectorOperation
+    | vectorOperation vectorMatchOp vectorOperation
+    | vectorOperation AT vectorOperation
+    | vector
+    ;
+
+// Operators
+
+unaryOp:        (ADD | SUB);
+powOp:          POW grouping?;
+multOp:         (MULT | DIV | MOD) grouping?;
+addOp:          (ADD | SUB) grouping?;
+compareOp:      (DEQ | NE | GT | LT | GE | LE) BOOL? grouping?;
+andUnlessOp:    (AND | UNLESS) grouping?;
+orOp:           OR grouping?;
+vectorMatchOp:  (ON | UNLESS) grouping?;
+subqueryOp:     SUBQUERY_RANGE offsetOp?;
+offsetOp:       OFFSET DURATION;
+
+vector
+    : function_
+    | aggregation
+    | instantSelector
+    | matrixSelector
+    | offset
+    | literal
+    | parens
+    ;
+
+parens: LEFT_PAREN vectorOperation RIGHT_PAREN;
+
+// Selectors
+
+instantSelector
+    : METRIC_NAME (LEFT_BRACE labelMatcherList? RIGHT_BRACE)?
+    | LEFT_BRACE labelMatcherList RIGHT_BRACE
+    ;
+
+labelMatcher:         labelName labelMatcherOperator STRING;
+labelMatcherOperator: EQ | NE | RE | NRE;
+labelMatcherList:     labelMatcher (COMMA labelMatcher)* COMMA?;
+
+matrixSelector: instantSelector TIME_RANGE;
+
+offset
+    : instantSelector OFFSET DURATION
+    | matrixSelector OFFSET DURATION
+    ;
+
+// Functions
+
+function_: FUNCTION LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN;
+
+parameter:     literal | vectorOperation;
+parameterList: LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN;
+
+// Aggregations
+
+aggregation
+    : AGGREGATION_OPERATOR parameterList
+    | AGGREGATION_OPERATOR (by | without) parameterList
+    | AGGREGATION_OPERATOR parameterList ( by | without)
+    ;
+by:      BY labelNameList;
+without: WITHOUT labelNameList;
+
+// Vector one-to-one/one-to-many joins
+
+grouping:   (on_ | ignoring) (groupLeft | groupRight)?;
+on_:         ON labelNameList;
+ignoring:   IGNORING labelNameList;
+groupLeft:  GROUP_LEFT labelNameList?;
+groupRight: GROUP_RIGHT labelNameList?;
+
+// Label names
+
+labelName:     keyword | METRIC_NAME | LABEL_NAME;
+labelNameList: LEFT_PAREN (labelName (COMMA labelName)*)? RIGHT_PAREN;
+
+keyword
+    : AND
+    | OR
+    | UNLESS
+    | BY
+    | WITHOUT
+    | ON
+    | IGNORING
+    | GROUP_LEFT
+    | GROUP_RIGHT
+    | OFFSET
+    | BOOL
+    | AGGREGATION_OPERATOR
+    | FUNCTION
+    ;
+
+literal: NUMBER | STRING;

+ 18 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -74,6 +74,24 @@ public class ClustersProperties {
     String password;
     String keystoreLocation;
     String keystorePassword;
+
+    MetricsStorage store;
+  }
+
+  @Data
+  public static class MetricsStorage {
+    PrometheusStorage prometheus;
+  }
+
+  @Data
+  @ToString(exclude = {"pushGatewayPassword"})
+  public static class PrometheusStorage {
+    String url;
+    String pushGatewayUrl;
+    String pushGatewayUsername;
+    String pushGatewayPassword;
+    String pushGatewayJobName;
+    Boolean remoteWrite;
   }
 
   @Data

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/AbstractAuthSecurityConfig.java

@@ -19,7 +19,7 @@ abstract class AbstractAuthSecurityConfig {
       "/oauth2/**",
       "/static/**",
       "/api/clusters/**/prometheus/expose/**",
-      "/api/prometheus/expose/all"
+      "/metrics"
   };
 
 }

+ 4 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/PrometheusExposeController.java

@@ -3,7 +3,7 @@ package com.provectus.kafka.ui.controller;
 import com.provectus.kafka.ui.api.PrometheusExposeApi;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.service.StatisticsCache;
-import com.provectus.kafka.ui.service.metrics.PrometheusEndpointExpose;
+import com.provectus.kafka.ui.service.metrics.prometheus.PrometheusExpose;
 import java.util.stream.Collectors;
 import lombok.RequiredArgsConstructor;
 import org.springframework.http.ResponseEntity;
@@ -20,7 +20,7 @@ public class PrometheusExposeController extends AbstractController implements Pr
   @Override
   public Mono<ResponseEntity<String>> getAllMetrics(ServerWebExchange exchange) {
     return Mono.just(
-        PrometheusEndpointExpose.exposeAllMetrics(
+        PrometheusExpose.exposeAllMetrics(
             clustersStorage.getKafkaClusters()
                 .stream()
                 .collect(Collectors.toMap(KafkaCluster::getName, c -> statisticsCache.get(c).getMetrics()))
@@ -31,7 +31,7 @@ public class PrometheusExposeController extends AbstractController implements Pr
   @Override
   public Mono<ResponseEntity<String>> getAllClusterMetrics(String clusterName, ServerWebExchange exchange) {
     return Mono.just(
-        PrometheusEndpointExpose.exposeClusterMetrics(
+        PrometheusExpose.exposeClusterMetrics(
             statisticsCache.get(getCluster(clusterName)).getMetrics()
         )
     );
@@ -42,7 +42,7 @@ public class PrometheusExposeController extends AbstractController implements Pr
                                                        Long brokerId,
                                                        ServerWebExchange exchange) {
     return Mono.just(
-        PrometheusEndpointExpose.exposeBrokerMetrics(
+        PrometheusExpose.exposeBrokerMetrics(
             statisticsCache.get(getCluster(clusterName)).getMetrics(), brokerId.intValue()
         )
     );

+ 18 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsScrapeProperties.java

@@ -3,7 +3,10 @@ package com.provectus.kafka.ui.model;
 import static com.provectus.kafka.ui.config.ClustersProperties.KeystoreConfig;
 import static com.provectus.kafka.ui.config.ClustersProperties.TruststoreConfig;
 
+import com.provectus.kafka.ui.config.ClustersProperties;
 import jakarta.annotation.Nullable;
+import java.util.Objects;
+import java.util.Optional;
 import lombok.Builder;
 import lombok.Value;
 
@@ -24,5 +27,20 @@ public class MetricsScrapeProperties {
   @Nullable
   TruststoreConfig truststoreConfig;
 
+  public static MetricsScrapeProperties create(ClustersProperties.Cluster cluster){
+    var metrics = Objects.requireNonNull(cluster.getMetrics());
+    return MetricsScrapeProperties.builder()
+        .port(metrics.getPort())
+        .ssl(Optional.ofNullable(metrics.getSsl()).orElse(false))
+        .username(metrics.getUsername())
+        .password(metrics.getPassword())
+        .truststoreConfig(cluster.getSsl())
+        .keystoreConfig(
+            metrics.getKeystoreLocation() != null
+                ? new KeystoreConfig(metrics.getKeystoreLocation(), metrics.getKeystorePassword())
+                : null
+        )
+        .build();
+  }
 
 }

+ 43 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/prometheus/PromQlGrammar.java

@@ -0,0 +1,43 @@
+package com.provectus.kafka.ui.service.metrics.prometheus;
+
+import com.provectus.kafka.ui.exception.ValidationException;
+import org.antlr.v4.runtime.BaseErrorListener;
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.RecognitionException;
+import org.antlr.v4.runtime.Recognizer;
+import promql.PromQLLexer;
+import promql.PromQLParser;
+
+public class PromQlGrammar {
+
+  public static void main(String[] args) {
+    String promql = "sum( " +
+        "        kafka_controller_kafkacontroller_activecontrollercount{cluster_name=\"3299fef4\",metrics=\"kafka\"}) OR " +
+        "        kafka_controller_kafkacontroller_activecontrollercount{cluster_name=\"3299fef4\",job=\"topic-scanner\"}";
+    System.out.println(parseMetricSelector(promql));
+  }
+
+  public static PromQLParser.InstantSelectorContext parseMetricSelector(String selector) {
+    return parse(selector).instantSelector();
+  }
+
+  public static PromQLParser.ExpressionContext parseExpression(String query) {
+    return parse(query).expression();
+  }
+
+  private static PromQLParser parse(String str) {
+    PromQLLexer lexer = new PromQLLexer(CharStreams.fromString(str));
+    lexer.addErrorListener(new BaseErrorListener() {
+      @Override
+      public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
+                              int line, int charPositionInLine,
+                              String msg, RecognitionException e) {
+        throw new ValidationException("Invalid syntax: " + msg);
+      }
+    });
+    CommonTokenStream tokenStream = new CommonTokenStream(lexer);
+    return new PromQLParser(tokenStream);
+  }
+
+}

+ 20 - 18
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusEndpointExpose.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/prometheus/PrometheusExpose.java

@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.service.metrics;
+package com.provectus.kafka.ui.service.metrics.prometheus;
 
 import static io.prometheus.client.Collector.MetricFamilySamples;
 
@@ -17,22 +17,24 @@ import lombok.SneakyThrows;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.ResponseEntity;
 
-public final class PrometheusEndpointExpose {
+public final class PrometheusExpose {
 
-  private PrometheusEndpointExpose() {
+  private static final String CLUSTER_SELECTION_EXPOSE_LBL_NAME = "cluster";
+
+  private PrometheusExpose() {
   }
 
   public static ResponseEntity<String> exposeAllMetrics(Map<String, Metrics> clustersMetrics) {
-    return constructResponse(getSummarizedMetricsWithClusterLbl(clustersMetrics));
+    return constructHttpsResponse(getMetricsForGlobalExpose(clustersMetrics));
   }
 
   public static ResponseEntity<String> exposeClusterMetrics(Metrics clusterMetrics) {
-    return constructResponse(clusterMetrics.getSummarizedMetrics());
+    return constructHttpsResponse(clusterMetrics.getSummarizedMetrics());
   }
 
   public static ResponseEntity<String> exposeBrokerMetrics(Metrics clusterMetrics, int brokerId) {
     //TODO: discuss - do we need to append broker_id lbl ?
-    return constructResponse(
+    return constructHttpsResponse(
         clusterMetrics
             .getPerBrokerScrapedMetrics()
             .getOrDefault(brokerId, List.of())
@@ -40,26 +42,27 @@ public final class PrometheusEndpointExpose {
     );
   }
 
-  private static Stream<MetricFamilySamples> getSummarizedMetricsWithClusterLbl(Map<String, Metrics> clustersMetrics) {
+  private static Stream<MetricFamilySamples> getMetricsForGlobalExpose(Map<String, Metrics> clustersMetrics) {
     return clustersMetrics.entrySet()
         .stream()
-        .flatMap(e -> e.getValue()
-            .getSummarizedMetrics()
-            .map(mfs -> addLbl(mfs, "cluster", e.getKey())))
-        // merging MFS with same name, keeping order
+        .flatMap(e -> prepareMetricsForGlobalExpose(e.getKey(), e.getValue()))
+        // merging MFS with same name with LinkedHashMap(for order keeping)
         .collect(Collectors.toMap(mfs -> mfs.name, mfs -> mfs,
-            PrometheusEndpointExpose::concatSamples, LinkedHashMap::new))
+            PrometheusExpose::concatSamples, LinkedHashMap::new))
         .values()
         .stream();
   }
 
+  public static Stream<MetricFamilySamples> prepareMetricsForGlobalExpose(String clusterName, Metrics metrics) {
+    return metrics
+        .getSummarizedMetrics()
+        .map(mfs -> addLbl(mfs, CLUSTER_SELECTION_EXPOSE_LBL_NAME, clusterName));
+  }
+
   private static MetricFamilySamples concatSamples(MetricFamilySamples mfs1,
                                                    MetricFamilySamples mfs2) {
     return new MetricFamilySamples(
-        mfs1.name,
-        mfs1.unit,
-        mfs1.type,
-        mfs1.help,
+        mfs1.name, mfs1.unit, mfs1.type, mfs1.help,
         Stream.concat(mfs1.samples.stream(), mfs2.samples.stream()).toList()
     );
   }
@@ -87,13 +90,12 @@ public final class PrometheusEndpointExpose {
 
   @VisibleForTesting
   @SneakyThrows
-  public static ResponseEntity<String> constructResponse(Stream<MetricFamilySamples> metrics) {
+  public static ResponseEntity<String> constructHttpsResponse(Stream<MetricFamilySamples> metrics) {
     StringWriter writer = new StringWriter();
     TextFormat.writeOpenMetrics100(writer, Iterators.asEnumeration(metrics.iterator()));
 
     HttpHeaders responseHeaders = new HttpHeaders();
     responseHeaders.set(HttpHeaders.CONTENT_TYPE, TextFormat.CONTENT_TYPE_OPENMETRICS_100);
-
     return ResponseEntity
         .ok()
         .headers(responseHeaders)

+ 20 - 34
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/MetricsScrapping.java

@@ -4,9 +4,11 @@ import static com.provectus.kafka.ui.config.ClustersProperties.Cluster;
 import static com.provectus.kafka.ui.config.ClustersProperties.KeystoreConfig;
 import static com.provectus.kafka.ui.model.MetricsScrapeProperties.JMX_METRICS_TYPE;
 import static com.provectus.kafka.ui.model.MetricsScrapeProperties.PROMETHEUS_METRICS_TYPE;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;
+import com.provectus.kafka.ui.service.metrics.prometheus.PrometheusExpose;
 import com.provectus.kafka.ui.service.metrics.scrape.inferred.InferredMetrics;
 import com.provectus.kafka.ui.service.metrics.scrape.inferred.InferredMetricsScraper;
 import com.provectus.kafka.ui.service.metrics.scrape.jmx.JmxMetricsRetriever;
@@ -16,6 +18,7 @@ import com.provectus.kafka.ui.service.metrics.sink.MetricsSink;
 import jakarta.annotation.Nullable;
 import java.util.Collection;
 import java.util.Optional;
+import java.util.stream.Stream;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.common.Node;
@@ -25,56 +28,36 @@ import reactor.core.publisher.Mono;
 @RequiredArgsConstructor
 public class MetricsScrapping {
 
+  private final String clusterName;
+  private final MetricsSink sink;
   private final InferredMetricsScraper inferredMetricsScraper;
-
   @Nullable
   private final JmxMetricsScraper jmxMetricsScraper;
-
   @Nullable
   private final PrometheusScraper prometheusScraper;
 
-  private final MetricsSink sink;
-
   public static MetricsScrapping create(Cluster cluster,
                                         JmxMetricsRetriever jmxMetricsRetriever) {
     JmxMetricsScraper jmxMetricsScraper = null;
     PrometheusScraper prometheusScraper = null;
-    MetricsSink sink = MetricsSink.noop();
-
     var metrics = cluster.getMetrics();
     if (cluster.getMetrics() != null) {
-      var scrapeProperties = createScrapeProps(cluster);
+      var scrapeProperties = MetricsScrapeProperties.create(cluster);
       if (metrics.getType().equalsIgnoreCase(JMX_METRICS_TYPE) && metrics.getPort() != null) {
         jmxMetricsScraper = new JmxMetricsScraper(scrapeProperties, jmxMetricsRetriever);
       } else if (metrics.getType().equalsIgnoreCase(PROMETHEUS_METRICS_TYPE)) {
         prometheusScraper = new PrometheusScraper(scrapeProperties);
       }
-      sink = MetricsSink.create(cluster.getMetrics());
     }
     return new MetricsScrapping(
+        cluster.getName(),
+        MetricsSink.create(cluster),
         new InferredMetricsScraper(),
         jmxMetricsScraper,
-        prometheusScraper,
-        sink
+        prometheusScraper
     );
   }
 
-  private static MetricsScrapeProperties createScrapeProps(Cluster cluster) {
-    var metrics = cluster.getMetrics();
-    return MetricsScrapeProperties.builder()
-        .port(metrics.getPort())
-        .ssl(Optional.ofNullable(metrics.getSsl()).orElse(false))
-        .username(metrics.getUsername())
-        .password(metrics.getPassword())
-        .truststoreConfig(cluster.getSsl())
-        .keystoreConfig(
-            metrics.getKeystoreLocation() != null
-                ? new KeystoreConfig(metrics.getKeystoreLocation(), metrics.getKeystorePassword())
-                : null
-        )
-        .build();
-  }
-
   public Mono<Metrics> scrape(ScrapedClusterState clusterState, Collection<Node> nodes) {
     Mono<InferredMetrics> inferred = inferredMetricsScraper.scrape(clusterState);
     Mono<PerBrokerScrapedMetrics> external = scrapeExternal(nodes);
@@ -85,14 +68,17 @@ public class MetricsScrapping {
             .ioRates(ext.ioRates())
             .perBrokerScrapedMetrics(ext.perBrokerMetrics())
             .build()
-    ).flatMap(metrics ->
-        sink.send(metrics.getSummarizedMetrics())
-            .onErrorResume(th -> {
-                  log.warn("Error sending metrics to metrics sink", th);
-                  return Mono.empty();
-                }
-            ).thenReturn(metrics)
-    );
+    ).doOnNext(this::sendMetricsToSink);
+  }
+
+  private void sendMetricsToSink(Metrics metrics) {
+    sink.send(prepareMetricsForSending(metrics))
+        .doOnError(th -> log.warn("Error sending metrics to metrics sink", th))
+        .subscribe();
+  }
+
+  private Stream<MetricFamilySamples> prepareMetricsForSending(Metrics metrics) {
+    return PrometheusExpose.prepareMetricsForGlobalExpose(clusterName, metrics);
   }
 
   private Mono<PerBrokerScrapedMetrics> scrapeExternal(Collection<Node> nodes) {

+ 23 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/sink/MetricsSink.java

@@ -1,19 +1,38 @@
 package com.provectus.kafka.ui.service.metrics.sink;
 
 import static io.prometheus.client.Collector.MetricFamilySamples;
+import static org.springframework.util.StringUtils.hasText;
 
 import com.provectus.kafka.ui.config.ClustersProperties;
+import java.util.Optional;
 import java.util.stream.Stream;
 import reactor.core.publisher.Mono;
 
 public interface MetricsSink {
 
-  static MetricsSink noop() {
-    return m -> Mono.empty();
+  static MetricsSink create(ClustersProperties.Cluster cluster) {
+    return Optional.ofNullable(cluster.getMetrics())
+        .flatMap(metrics -> Optional.ofNullable(metrics.getStore()))
+        .flatMap(store -> Optional.ofNullable(store.getPrometheus()))
+        .map(prometheusConf -> {
+              if (hasText(prometheusConf.getUrl()) && Boolean.TRUE.equals(prometheusConf.getRemoteWrite())) {
+                return new PrometheusRemoteWriteSink(prometheusConf.getUrl());
+              }
+              if (hasText(prometheusConf.getPushGatewayUrl())) {
+                return PrometheusPushGatewaySink.create(
+                    prometheusConf.getPushGatewayUrl(),
+                    prometheusConf.getPushGatewayJobName(),
+                    prometheusConf.getPushGatewayUsername(),
+                    prometheusConf.getPushGatewayPassword()
+                );
+              }
+              return noop();
+            }
+        ).orElse(noop());
   }
 
-  static MetricsSink create(ClustersProperties.MetricsConfig metricsConfig) {
-
+  static MetricsSink noop() {
+    return m -> Mono.empty();
   }
 
   Mono<Void> send(Stream<MetricFamilySamples> metrics);

+ 17 - 20
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/sink/PrometheusPushGatewaySink.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.service.metrics.sink;
 
 import static io.prometheus.client.Collector.MetricFamilySamples;
+import static org.springframework.util.StringUtils.hasText;
 
 import io.prometheus.client.Collector;
 import io.prometheus.client.exporter.BasicAuthHttpConnectionFactory;
@@ -12,54 +13,50 @@ import java.util.Optional;
 import java.util.stream.Stream;
 import lombok.RequiredArgsConstructor;
 import lombok.SneakyThrows;
-import org.springframework.util.StringUtils;
 import reactor.core.publisher.Mono;
-import reactor.core.scheduler.Scheduler;
 import reactor.core.scheduler.Schedulers;
 
 @RequiredArgsConstructor
 class PrometheusPushGatewaySink implements MetricsSink {
 
-  private final static String DEFAULT_PGW_JOBNAME = "kafkaui";
+  private static final String DEFAULT_PGW_JOB_NAME = "kafkaui";
 
   private final PushGateway pushGateway;
   private final String job;
-  //TODO: read about grouping rules
 
   @SneakyThrows
   static PrometheusPushGatewaySink create(String url,
-                                          @Nullable String job,
+                                          @Nullable String jobName,
                                           @Nullable String username,
                                           @Nullable String passw) {
     var pushGateway = new PushGateway(new URL(url));
-    if (StringUtils.hasText(username) && StringUtils.hasText(passw)) {
+    if (hasText(username) && hasText(passw)) {
       pushGateway.setConnectionFactory(new BasicAuthHttpConnectionFactory(username, passw));
     }
     return new PrometheusPushGatewaySink(
         pushGateway,
-        Optional.ofNullable(job).orElse(DEFAULT_PGW_JOBNAME)
+        Optional.ofNullable(jobName).orElse(DEFAULT_PGW_JOB_NAME)
     );
   }
 
   @Override
   public Mono<Void> send(Stream<MetricFamilySamples> metrics) {
-    return Mono.<Void>fromRunnable(() -> pushSync(metrics.toList()))
+    List<MetricFamilySamples> metricsToPush = metrics.toList();
+    if (metricsToPush.isEmpty()) {
+      return Mono.empty();
+    }
+    return Mono.<Void>fromRunnable(() -> pushSync(metricsToPush))
         .subscribeOn(Schedulers.boundedElastic());
   }
 
   @SneakyThrows
   private void pushSync(List<MetricFamilySamples> metricsToPush) {
-    if (metricsToPush.isEmpty()) {
-      return;
-    }
-    pushGateway.push(
-        new Collector() {
-          @Override
-          public List<MetricFamilySamples> collect() {
-            return metricsToPush;
-          }
-        },
-        job
-    );
+    Collector allMetrics = new Collector() {
+      @Override
+      public List<MetricFamilySamples> collect() {
+        return metricsToPush;
+      }
+    };
+    pushGateway.push(allMetrics, job);
   }
 }

+ 24 - 45
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/sink/PrometheusRemoteWriteSink.java

@@ -1,51 +1,41 @@
 package com.provectus.kafka.ui.service.metrics.sink;
 
-import static io.prometheus.client.Collector.*;
-import static prometheus.Types.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
+import static prometheus.Types.Label;
+import static prometheus.Types.Sample;
+import static prometheus.Types.TimeSeries;
 
-import com.google.common.base.Enums;
 import com.provectus.kafka.ui.util.WebClientConfigurator;
-import groovy.lang.Tuple;
-import io.prometheus.client.Collector;
 import java.io.IOException;
 import java.io.StringWriter;
-import java.io.Writer;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
+import java.net.URI;
 import java.util.stream.Stream;
 import lombok.RequiredArgsConstructor;
 import lombok.SneakyThrows;
-import org.springframework.http.MediaType;
 import org.springframework.util.unit.DataSize;
 import org.springframework.web.reactive.function.client.WebClient;
-import org.xerial.snappy.Snappy;
+import org.xerial.snappy.pure.PureJavaSnappy;
 import prometheus.Remote;
-import prometheus.Types;
-import prometheus.Types.MetricMetadata.MetricType;
 import reactor.core.publisher.Mono;
-import reactor.util.function.Tuple2;
-import reactor.util.function.Tuples;
 
 @RequiredArgsConstructor
 class PrometheusRemoteWriteSink implements MetricsSink {
 
   private final WebClient webClient;
-  private final String writeEndpoint;
+  private final URI writeEndpoint;
 
   PrometheusRemoteWriteSink(String prometheusUrl) {
-    this.writeEndpoint = prometheusUrl + "/api/v1/write";
+    this.writeEndpoint = URI.create(prometheusUrl).resolve("/api/v1/write");
     this.webClient = new WebClientConfigurator().configureBufferSize(DataSize.ofMegabytes(20)).build();
   }
 
   @SneakyThrows
   @Override
   public Mono<Void> send(Stream<MetricFamilySamples> metrics) {
-    byte[] bytesToWrite = Snappy.compress(createWriteRequest(metrics).toByteArray());
+    byte[] bytesToWrite = compressSnappy(createWriteRequest(metrics).toByteArray());
     return webClient.post()
         .uri(writeEndpoint)
-        //.contentType(MediaType.APPLICATION_FORM_URLENCODED)
-        .contentType(MediaType.parseMediaType("application/x-protobuf")) //???
+        .header("Content-Type", "application/x-protobuf")
         .header("User-Agent", "promremote-kui/0.1.0")
         .header("Content-Encoding", "snappy")
         .header("X-Prometheus-Remote-Write-Version", "0.1.0")
@@ -55,56 +45,45 @@ class PrometheusRemoteWriteSink implements MetricsSink {
         .then();
   }
 
-  private Remote.WriteRequest createWriteRequest(Stream<MetricFamilySamples> metrics) {
-    var tsAndMeta = createTimeSeries(metrics);
-    return Remote.WriteRequest.newBuilder()
-        .addAllTimeseries(tsAndMeta.getT1())
-        .addAllMetadata(tsAndMeta.getT2())
-        .build();
+  //TODO: rm this
+  private static byte[] compressSnappy(byte[] data) throws IOException {
+    PureJavaSnappy impl = new PureJavaSnappy();
+    byte[] buf = new byte[impl.maxCompressedLength(data.length)];
+    int compressedByteSize = impl.rawCompress(data, 0, data.length, buf, 0);
+    byte[] result = new byte[compressedByteSize];
+    System.arraycopy(buf, 0, result, 0, compressedByteSize);
+    return result;
   }
 
-  public Tuple2<List<TimeSeries>, List<MetricMetadata>> createTimeSeries(Stream<MetricFamilySamples> metrics) {
+  private static Remote.WriteRequest createWriteRequest(Stream<MetricFamilySamples> metrics) {
     long currentTs = System.currentTimeMillis();
-    List<TimeSeries> timeSeriesList = new ArrayList<>();
-    List<MetricMetadata> metadatasList = new ArrayList<>();
+    Remote.WriteRequest.Builder request = Remote.WriteRequest.newBuilder();
     metrics.forEach(mfs -> {
       for (MetricFamilySamples.Sample sample : mfs.samples) {
         TimeSeries.Builder timeSeriesBuilder = TimeSeries.newBuilder();
         timeSeriesBuilder.addLabels(
-            Label.newBuilder()
-                .setName("__name__")
-                .setValue(escapedLabelValue(sample.name))
-                .build()
+            Label.newBuilder().setName("__name__").setValue(sample.name)
         );
         for (int i = 0; i < sample.labelNames.size(); i++) {
           timeSeriesBuilder.addLabels(
               Label.newBuilder()
                   .setName(sample.labelNames.get(i))
                   .setValue(escapedLabelValue(sample.labelValues.get(i)))
-                  .build()
           );
         }
         timeSeriesBuilder.addSamples(
             Sample.newBuilder()
                 .setValue(sample.value)
                 .setTimestamp(currentTs)
-                .build()
-        );
-        timeSeriesList.add(timeSeriesBuilder.build());
-        metadatasList.add(
-            MetricMetadata.newBuilder()
-                .setType(Enums.getIfPresent(MetricType.class, mfs.type.toString()).or(MetricType.UNKNOWN))
-                .setHelp(mfs.help)
-                .setUnit(mfs.unit)
-                .build()
         );
+        request.addTimeseries(timeSeriesBuilder);
       }
     });
-    return Tuples.of(timeSeriesList, metadatasList);
+    //TODO: how to pass Metadata????
+    return request.build();
   }
 
   private static String escapedLabelValue(String s) {
-    //TODO: refactor
     StringWriter writer = new StringWriter(s.length());
     for (int i = 0; i < s.length(); i++) {
       char c = s.charAt(i);

+ 2 - 2
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/metrics/scrape/prometheus/PrometheusEndpointParserTest.java

@@ -9,7 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat;
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
-import com.provectus.kafka.ui.service.metrics.PrometheusEndpointExpose;
+import com.provectus.kafka.ui.service.metrics.prometheus.PrometheusExpose;
 import io.prometheus.client.Collector;
 import io.prometheus.client.CollectorRegistry;
 import io.prometheus.client.Counter;
@@ -26,7 +26,7 @@ class PrometheusEndpointParserTest {
   @Test
   void parsesAllGeneratedMetricTypes() {
     List<MetricFamilySamples> original = generateMfs();
-    String exposed = PrometheusEndpointExpose.constructResponse(original.stream()).getBody();
+    String exposed = PrometheusExpose.constructHttpsResponse(original.stream()).getBody();
     List<MetricFamilySamples> parsed = parse(exposed.lines());
     assertThat(parsed).containsExactlyElementsOf(original);
   }

+ 25 - 1
kafka-ui-contract/pom.xml

@@ -156,6 +156,30 @@
                                     </configOptions>
                                 </configuration>
                             </execution>
+                            <execution>
+                                <id>generate-prometheus-query-api</id>
+                                <goals>
+                                    <goal>generate</goal>
+                                </goals>
+                                <configuration>
+                                    <inputSpec>${project.basedir}/src/main/resources/swagger/prometheus-query-api.yaml
+                                    </inputSpec>
+                                    <output>${project.build.directory}/generated-sources/prometheus-query-api</output>
+                                    <generatorName>java</generatorName>
+                                    <generateApiTests>false</generateApiTests>
+                                    <generateModelTests>false</generateModelTests>
+                                    <configOptions>
+                                        <modelPackage>prometheus.query.model</modelPackage>
+                                        <apiPackage>prometheus.query.api</apiPackage>
+                                        <sourceFolder>prometheus-query</sourceFolder>
+                                        <asyncNative>true</asyncNative>
+                                        <library>webclient</library>
+                                        <useJakartaEe>true</useJakartaEe>
+                                        <useBeanValidation>true</useBeanValidation>
+                                        <dateLibrary>java8</dateLibrary>
+                                    </configOptions>
+                                </configuration>
+                            </execution>
                         </executions>
                     </plugin>
                     <plugin>
@@ -262,7 +286,7 @@
                         </executions>
                         <configuration>
                             <attachProtoSources>false</attachProtoSources>
-                            <protoSourceRoot>${project.basedir}/src/main/resources/proto</protoSourceRoot>
+                            <protoSourceRoot>${project.basedir}/src/main/resources/proto/prometheus-remote-write-api</protoSourceRoot>
                             <includes>
                                 <include>**/*.proto</include>
                             </includes>

+ 1 - 1
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -157,7 +157,7 @@ paths:
               schema:
                 $ref: '#/components/schemas/ClusterMetrics'
 
-  /api/prometheus/expose/all:
+  /metrics:
     get:
       tags:
         - PrometheusExpose

+ 479 - 0
kafka-ui-contract/src/main/resources/swagger/prometheus-query-api.yaml

@@ -0,0 +1,479 @@
+openapi: 3.0.1
+info:
+  title: |
+    Prometheus query HTTP API
+    Copied from https://raw.githubusercontent.com/HelloKunal/OpenAPI-Specification-of-Go-API/main/swagger.yaml
+  description: |
+    The current stable HTTP API is reachable under /api/v1 on a Prometheus server. Any non-breaking additions will be added under that endpoint.
+  termsOfService: urn:tos
+  version: 0.1.0
+  contact: { }
+
+tags:
+  - name: /promclient
+servers:
+  - url: /localhost
+
+
+paths:
+  /api/v1/label/{label_name}/values:
+    get:
+      tags:
+        - PrometheusClient
+      summary: Returns label values
+      description: "returns a list of label values for a provided label name"
+      operationId: labelValuesGET
+      parameters:
+        - name: label_name
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: start
+          in: query
+          description: Start timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: end
+          in: query
+          description: End timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: match[]
+          in: query
+          description: Repeated series selector argument that selects the series from which to read the label values. Optional.
+          schema:
+            type: string
+            format: series_selector
+      responses:
+        200:
+          description: |
+            Success
+
+            This example queries for all label values for the job label
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/responseLabelValues'
+
+  /api/v1/labels:
+    get:
+      tags:
+        - PrometheusClient
+      summary: Returns label names
+      description: returns a list of label names
+      operationId: labelNamesGET
+      parameters:
+        - name: start
+          in: query
+          description: |
+            Start timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: end
+          in: query
+          description: |
+            End timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: match[]
+          in: query
+          description: Repeated series selector argument that selects the series from which to read the label values. Optional.
+          schema:
+            type: string
+            format: series_selector
+      responses:
+        200:
+          description: Success
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/responseLabelNames'
+
+  /api/v1/metadata:
+    get:
+      tags:
+        - PrometheusClient
+      summary: Returns metric metadata
+      description: returns a list of label names
+      operationId: metricMetadataGET
+      parameters:
+        - name: limit
+          in: query
+          description: |
+            Maximum number of metrics to return.
+          required: true
+          schema:
+            type: number
+        - name: metric
+          in: query
+          description: |
+            A metric name to filter metadata for. All metric metadata is retrieved if left empty.
+          schema:
+            type: string
+      responses:
+        200:
+          description: |
+            Success
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/responseMetadata'
+        201:
+          description: |
+            Success
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/responseMetadata'
+
+  /api/v1/query:
+    get:
+      tags:
+        - PrometheusClient
+      summary: Evaluates instant query
+      description: |
+        Evaluates an instant query at a single point in time
+      operationId: queryGET
+      parameters:
+        - name: query
+          in: query
+          description: |
+            Prometheus expression query string.
+          required: true
+          schema:
+            type: string
+        - name: time
+          in: query
+          description: |
+            Evaluation timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: timeout
+          in: query
+          description: |
+            Evaluation timeout. Optional.
+          schema:
+            type: string
+            format: duration
+      responses:
+        200:
+          description: |
+            Success
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/queryData'
+
+
+  /api/v1/query_range:
+    get:
+      tags:
+        - PrometheusClient
+      summary: Evaluates query over range of time.
+      description: |
+        The following endpoint evaluates an expression query over a range of time
+
+        You can URL-encode these parameters directly in the request body by using the ```POST``` method and ```Content-Type: application/x-www-form-urlencoded``` header. This is useful when specifying a large query that may breach server-side URL character limits.
+
+        The data section of the query result has the following format
+        ```
+        {
+          "resultType": "matrix",
+          "result": <value>
+        }
+        ```
+        For the format of the ```<value>``` placeholder, see the [range-vector result format](https://prometheus.io/docs/prometheus/latest/querying/api/#range-vectors).
+      operationId: queryRangeGET
+      parameters:
+        - name: query
+          in: query
+          description: |
+            Prometheus expression query string.
+
+            Example: ```?query=up```
+          required: true
+          schema:
+            type: string
+        - name: start
+          in: query
+          description: |
+            Start timestamp.
+
+            Example: ```&start=2015-07-01T20:10:30.781Z```
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: end
+          in: query
+          description: |
+            End timestamp.
+
+            Example: ```&end=2015-07-01T20:11:00.781Z```
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: step
+          in: query
+          description: |
+            Query resolution step width in ```duration``` format or float number of seconds.
+          schema:
+            type: string
+            format: duration | float
+        - name: timeout
+          in: query
+          description: |
+            Evaluation timeout. Optional. Defaults to and is capped by the value of the ```-query.timeout``` flag.
+          schema:
+            type: string
+            format: duration
+      responses:
+        200:
+          description: |
+            Success
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/responseQuery_range"
+              example:
+                status: success
+                data:
+                  resultType: matrix
+                  result:
+                    - metric:
+                        __name__: up
+                        job: prometheus
+                        instance: localhost:9090
+                      values:
+                        - - 1.435781430781E9
+                          - "1"
+                        - - 1.435781445781E9
+                          - "1"
+                        - - 1.435781460781E9
+                          - "1"
+                    - metric:
+                        __name__: up
+                        job: node
+                        instance: localhost:9091
+                      values:
+                        - - 1.435781430781E9
+                          - "0"
+                        - - 1.435781445781E9
+                          - "0"
+                        - - 1.435781460781E9
+                          - "1"
+
+
+  /api/v1/series:
+    get:
+      tags:
+        - PrometheusClient
+      summary: Returns time series
+      operationId: seriesGET
+      parameters:
+        - name: start
+          in: query
+          description: |
+            Start timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: end
+          in: query
+          description: |
+            End timestamp. Optional.
+          schema:
+            type: string
+            format: rfc3339 | unix_timestamp
+        - name: match[]
+          in: query
+          description: |
+            Repeated series selector argument that selects the series to return. At least one ```match[]``` argument must be provided.
+          required: true
+          schema:
+            type: string
+            format: series_selector
+      responses:
+        200:
+          description: |
+            Success
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/responseSeries'
+              example:
+                status: success
+                data:
+                  - __name__: up
+                    job: prometheus
+                    instance: localhost:9090
+                  - __name__: up
+                    job: node
+                    instance: localhost:9091
+                  - __name__: process_start_time_seconds
+                    job: prometheus
+                    instance: localhost:9090
+
+components:
+  schemas:
+    Label:
+      type: object
+      properties:
+        Name:
+          type: string
+        Value:
+          type: string
+      description: Label is a key/value pair of strings.
+    Labels:
+      type: array
+      description: |-
+        Labels is a sorted set of labels. Order has to be guaranteed upon
+        instantiation.
+      items:
+        $ref: '#/components/schemas/Label'
+    MetricType:
+      type: string
+      description: MetricType represents metric type values.
+
+    metadata:
+      type: object
+      properties:
+        Help:
+          type: string
+        Type:
+          $ref: '#/components/schemas/MetricType'
+        Unit:
+          type: string
+
+    queryData:
+      type: object
+      properties:
+        Result:
+          type: object
+          properties:
+            metric:
+              type: object
+              properties:
+                __name__:
+                  type: string
+                job:
+                  type: string
+                instance:
+                  type: string
+            value:
+              type: array
+              items:
+                oneOf:
+                  - type: string
+                    format: "unix_timestamp"
+                  - type: string
+                    format: "sample_value"
+        ResultType:
+          type: string
+          enum:
+            - matrix
+            - vector
+            - scalar
+            - string
+            -
+    responseSeries:
+      type: array
+      description: a list of objects that contain the label name/value pairs which
+        identify each series
+      items:
+        type: object
+        properties:
+          __name__:
+            type: string
+          job:
+            type: string
+          instance:
+            type: string
+
+    responseSnapshot:
+      type: object
+      properties:
+        name:
+          type: string
+
+    responseQuery_exemplars:
+      type: object
+      properties:
+        seriesLabels:
+          type: object
+          properties:
+            __name__:
+              type: string
+            job:
+              type: string
+            instance:
+              type: string
+            service:
+              type: string
+        exemplars:
+          type: object
+          properties:
+            labels:
+              type: object
+              properties:
+                traceID:
+                  type: string
+            values:
+              type: string
+            timestamp:
+              type: string
+              format: "unix_timestamp"
+
+    responseQuery_range:
+      type: object
+      properties:
+        resultType:
+          type: string
+        result:
+          type: object
+          properties:
+            metric:
+              type: object
+              properties:
+                __name__:
+                  type: string
+                job:
+                  type: string
+                instance:
+                  type: string
+            values:
+              type: array
+              items:
+                oneOf:
+                  - type: string
+                    format: "unix_timestamp"
+                  - type: string
+                    format: "sample_value"
+
+    responseMetadata:
+      type: object
+      properties:
+        metric name:
+          type: string
+      additionalProperties:
+        $ref: '#/components/schemas/metadata'
+      description: a (key, object) map. `metric name`is an example key
+
+    responseLabelValues:
+      type: array
+      description: a list of string label values
+      items:
+        type: string
+
+    responseLabelNames:
+      type: array
+      description: a list of string label names
+      items:
+        type: string
+
+