wip
This commit is contained in:
parent
7e3831b715
commit
a9a22b4025
7 changed files with 207 additions and 197 deletions
|
@ -75,6 +75,7 @@ public class ClustersProperties {
|
||||||
String keystoreLocation;
|
String keystoreLocation;
|
||||||
String keystorePassword;
|
String keystorePassword;
|
||||||
|
|
||||||
|
Boolean prometheusExpose;
|
||||||
MetricsStorage store;
|
MetricsStorage store;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ public class PrometheusExposeController extends AbstractController implements Pr
|
||||||
PrometheusExpose.exposeAllMetrics(
|
PrometheusExpose.exposeAllMetrics(
|
||||||
clustersStorage.getKafkaClusters()
|
clustersStorage.getKafkaClusters()
|
||||||
.stream()
|
.stream()
|
||||||
|
.filter(KafkaCluster::isExposeMetricsViaPrometheusEndpoint)
|
||||||
.collect(Collectors.toMap(KafkaCluster::getName, c -> statisticsCache.get(c).getMetrics()))
|
.collect(Collectors.toMap(KafkaCluster::getName, c -> statisticsCache.get(c).getMetrics()))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -30,9 +31,13 @@ public class PrometheusExposeController extends AbstractController implements Pr
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<ResponseEntity<String>> getAllClusterMetrics(String clusterName, ServerWebExchange exchange) {
|
public Mono<ResponseEntity<String>> getAllClusterMetrics(String clusterName, ServerWebExchange exchange) {
|
||||||
|
var cluster = getCluster(clusterName);
|
||||||
|
if (!cluster.isExposeMetricsViaPrometheusEndpoint()) {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
return Mono.just(
|
return Mono.just(
|
||||||
PrometheusExpose.exposeClusterMetrics(
|
PrometheusExpose.exposeClusterMetrics(
|
||||||
statisticsCache.get(getCluster(clusterName)).getMetrics()
|
statisticsCache.get(cluster).getMetrics()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -41,9 +46,13 @@ public class PrometheusExposeController extends AbstractController implements Pr
|
||||||
public Mono<ResponseEntity<String>> getBrokerMetrics(String clusterName,
|
public Mono<ResponseEntity<String>> getBrokerMetrics(String clusterName,
|
||||||
Long brokerId,
|
Long brokerId,
|
||||||
ServerWebExchange exchange) {
|
ServerWebExchange exchange) {
|
||||||
|
var cluster = getCluster(clusterName);
|
||||||
|
if (!cluster.isExposeMetricsViaPrometheusEndpoint()) {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
return Mono.just(
|
return Mono.just(
|
||||||
PrometheusExpose.exposeBrokerMetrics(
|
PrometheusExpose.exposeBrokerMetrics(
|
||||||
statisticsCache.get(getCluster(clusterName)).getMetrics(), brokerId.intValue()
|
statisticsCache.get(cluster).getMetrics(), brokerId.intValue()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ import lombok.AccessLevel;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
import prometheus.query.api.PrometheusClientApi;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@Builder(toBuilder = true)
|
@Builder(toBuilder = true)
|
||||||
|
@ -26,10 +27,12 @@ public class KafkaCluster {
|
||||||
private final String bootstrapServers;
|
private final String bootstrapServers;
|
||||||
private final Properties properties;
|
private final Properties properties;
|
||||||
private final boolean readOnly;
|
private final boolean readOnly;
|
||||||
|
private final boolean exposeMetricsViaPrometheusEndpoint;
|
||||||
private final DataMasking masking;
|
private final DataMasking masking;
|
||||||
private final PollingSettings pollingSettings;
|
private final PollingSettings pollingSettings;
|
||||||
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
|
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
|
||||||
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
|
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
|
||||||
private final ReactiveFailover<KsqlApiClient> ksqlClient;
|
private final ReactiveFailover<KsqlApiClient> ksqlClient;
|
||||||
private final MetricsScrapping metricsScrapping;
|
private final MetricsScrapping metricsScrapping;
|
||||||
|
private final ReactiveFailover<PrometheusClientApi> prometheusStorageClient;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,12 @@
|
||||||
package com.provectus.kafka.ui.service;
|
package com.provectus.kafka.ui.service;
|
||||||
|
|
||||||
|
import static com.provectus.kafka.ui.util.KafkaServicesValidation.validateClusterConnection;
|
||||||
|
import static com.provectus.kafka.ui.util.KafkaServicesValidation.validateConnect;
|
||||||
|
import static com.provectus.kafka.ui.util.KafkaServicesValidation.validateKsql;
|
||||||
|
import static com.provectus.kafka.ui.util.KafkaServicesValidation.validatePrometheusStore;
|
||||||
|
import static com.provectus.kafka.ui.util.KafkaServicesValidation.validateSchemaRegistry;
|
||||||
|
import static com.provectus.kafka.ui.util.KafkaServicesValidation.validateTruststore;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
|
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
import com.provectus.kafka.ui.config.WebclientProperties;
|
import com.provectus.kafka.ui.config.WebclientProperties;
|
||||||
|
@ -25,8 +32,10 @@ import java.util.Properties;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.util.StringUtils;
|
||||||
import org.springframework.util.unit.DataSize;
|
import org.springframework.util.unit.DataSize;
|
||||||
import org.springframework.web.reactive.function.client.WebClient;
|
import org.springframework.web.reactive.function.client.WebClient;
|
||||||
|
import prometheus.query.api.PrometheusClientApi;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.util.function.Tuple2;
|
import reactor.util.function.Tuple2;
|
||||||
|
@ -56,6 +65,7 @@ public class KafkaClusterFactory {
|
||||||
builder.bootstrapServers(clusterProperties.getBootstrapServers());
|
builder.bootstrapServers(clusterProperties.getBootstrapServers());
|
||||||
builder.properties(convertProperties(clusterProperties.getProperties()));
|
builder.properties(convertProperties(clusterProperties.getProperties()));
|
||||||
builder.readOnly(clusterProperties.isReadOnly());
|
builder.readOnly(clusterProperties.isReadOnly());
|
||||||
|
builder.exposeMetricsViaPrometheusEndpoint(exposeMetricsViaPrometheusEndpoint(clusterProperties));
|
||||||
builder.masking(DataMasking.create(clusterProperties.getMasking()));
|
builder.masking(DataMasking.create(clusterProperties.getMasking()));
|
||||||
builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
|
builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
|
||||||
builder.metricsScrapping(MetricsScrapping.create(clusterProperties, jmxMetricsRetriever));
|
builder.metricsScrapping(MetricsScrapping.create(clusterProperties, jmxMetricsRetriever));
|
||||||
|
@ -69,13 +79,16 @@ public class KafkaClusterFactory {
|
||||||
if (ksqlConfigured(clusterProperties)) {
|
if (ksqlConfigured(clusterProperties)) {
|
||||||
builder.ksqlClient(ksqlClient(clusterProperties));
|
builder.ksqlClient(ksqlClient(clusterProperties));
|
||||||
}
|
}
|
||||||
|
if (prometheusStorageConfigured(clusterProperties)) {
|
||||||
|
builder.prometheusStorageClient(prometheusStorageClient(clusterProperties));
|
||||||
|
}
|
||||||
builder.originalProperties(clusterProperties);
|
builder.originalProperties(clusterProperties);
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<ClusterConfigValidationDTO> validate(ClustersProperties.Cluster clusterProperties) {
|
public Mono<ClusterConfigValidationDTO> validate(ClustersProperties.Cluster clusterProperties) {
|
||||||
if (clusterProperties.getSsl() != null) {
|
if (clusterProperties.getSsl() != null) {
|
||||||
Optional<String> errMsg = KafkaServicesValidation.validateTruststore(clusterProperties.getSsl());
|
Optional<String> errMsg = validateTruststore(clusterProperties.getSsl());
|
||||||
if (errMsg.isPresent()) {
|
if (errMsg.isPresent()) {
|
||||||
return Mono.just(new ClusterConfigValidationDTO()
|
return Mono.just(new ClusterConfigValidationDTO()
|
||||||
.kafka(new ApplicationPropertyValidationDTO()
|
.kafka(new ApplicationPropertyValidationDTO()
|
||||||
|
@ -85,40 +98,51 @@ public class KafkaClusterFactory {
|
||||||
}
|
}
|
||||||
|
|
||||||
return Mono.zip(
|
return Mono.zip(
|
||||||
KafkaServicesValidation.validateClusterConnection(
|
validateClusterConnection(
|
||||||
clusterProperties.getBootstrapServers(),
|
clusterProperties.getBootstrapServers(),
|
||||||
convertProperties(clusterProperties.getProperties()),
|
convertProperties(clusterProperties.getProperties()),
|
||||||
clusterProperties.getSsl()
|
clusterProperties.getSsl()
|
||||||
),
|
),
|
||||||
schemaRegistryConfigured(clusterProperties)
|
schemaRegistryConfigured(clusterProperties)
|
||||||
? KafkaServicesValidation.validateSchemaRegistry(
|
? validateSchemaRegistry(() -> schemaRegistryClient(clusterProperties)).map(Optional::of)
|
||||||
() -> schemaRegistryClient(clusterProperties)).map(Optional::of)
|
|
||||||
: Mono.<Optional<ApplicationPropertyValidationDTO>>just(Optional.empty()),
|
: Mono.<Optional<ApplicationPropertyValidationDTO>>just(Optional.empty()),
|
||||||
|
|
||||||
ksqlConfigured(clusterProperties)
|
ksqlConfigured(clusterProperties)
|
||||||
? KafkaServicesValidation.validateKsql(() -> ksqlClient(clusterProperties)).map(Optional::of)
|
? validateKsql(() -> ksqlClient(clusterProperties)).map(Optional::of)
|
||||||
: Mono.<Optional<ApplicationPropertyValidationDTO>>just(Optional.empty()),
|
: Mono.<Optional<ApplicationPropertyValidationDTO>>just(Optional.empty()),
|
||||||
|
|
||||||
connectClientsConfigured(clusterProperties)
|
connectClientsConfigured(clusterProperties)
|
||||||
?
|
?
|
||||||
Flux.fromIterable(clusterProperties.getKafkaConnect())
|
Flux.fromIterable(clusterProperties.getKafkaConnect())
|
||||||
.flatMap(c ->
|
.flatMap(c ->
|
||||||
KafkaServicesValidation.validateConnect(() -> connectClient(clusterProperties, c))
|
validateConnect(() -> connectClient(clusterProperties, c))
|
||||||
.map(r -> Tuples.of(c.getName(), r)))
|
.map(r -> Tuples.of(c.getName(), r)))
|
||||||
.collectMap(Tuple2::getT1, Tuple2::getT2)
|
.collectMap(Tuple2::getT1, Tuple2::getT2)
|
||||||
.map(Optional::of)
|
.map(Optional::of)
|
||||||
:
|
:
|
||||||
Mono.<Optional<Map<String, ApplicationPropertyValidationDTO>>>just(Optional.empty())
|
Mono.<Optional<Map<String, ApplicationPropertyValidationDTO>>>just(Optional.empty()),
|
||||||
|
|
||||||
|
prometheusStorageConfigured(clusterProperties)
|
||||||
|
? validatePrometheusStore(() -> prometheusStorageClient(clusterProperties)).map(Optional::of)
|
||||||
|
: Mono.<Optional<ApplicationPropertyValidationDTO>>just(Optional.empty())
|
||||||
|
|
||||||
).map(tuple -> {
|
).map(tuple -> {
|
||||||
var validation = new ClusterConfigValidationDTO();
|
var validation = new ClusterConfigValidationDTO();
|
||||||
validation.kafka(tuple.getT1());
|
validation.kafka(tuple.getT1());
|
||||||
tuple.getT2().ifPresent(validation::schemaRegistry);
|
tuple.getT2().ifPresent(validation::schemaRegistry);
|
||||||
tuple.getT3().ifPresent(validation::ksqldb);
|
tuple.getT3().ifPresent(validation::ksqldb);
|
||||||
tuple.getT4().ifPresent(validation::kafkaConnects);
|
tuple.getT4().ifPresent(validation::kafkaConnects);
|
||||||
|
tuple.getT5().ifPresent(validation::prometheusStorage);
|
||||||
return validation;
|
return validation;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean exposeMetricsViaPrometheusEndpoint(ClustersProperties.Cluster clusterProperties) {
|
||||||
|
return Optional.ofNullable(clusterProperties.getMetrics())
|
||||||
|
.map(m -> Boolean.TRUE.equals(m.getPrometheusExpose()))
|
||||||
|
.orElse(true);
|
||||||
|
}
|
||||||
|
|
||||||
private Properties convertProperties(Map<String, Object> propertiesMap) {
|
private Properties convertProperties(Map<String, Object> propertiesMap) {
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
if (propertiesMap != null) {
|
if (propertiesMap != null) {
|
||||||
|
@ -153,6 +177,28 @@ public class KafkaClusterFactory {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private ReactiveFailover<PrometheusClientApi> prometheusStorageClient(ClustersProperties.Cluster cluster) {
|
||||||
|
WebClient webClient = new WebClientConfigurator()
|
||||||
|
.configureSsl(cluster.getSsl(), cluster.getSchemaRegistrySsl())
|
||||||
|
.configureBufferSize(webClientMaxBuffSize)
|
||||||
|
.build();
|
||||||
|
return ReactiveFailover.create(
|
||||||
|
parseUrlList(cluster.getMetrics().getStore().getPrometheus().getUrl()),
|
||||||
|
url -> new PrometheusClientApi(new prometheus.query.ApiClient(webClient, null, null).setBasePath(url)),
|
||||||
|
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
|
||||||
|
"No live schemaRegistry instances available",
|
||||||
|
ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean prometheusStorageConfigured(ClustersProperties.Cluster cluster) {
|
||||||
|
return Optional.ofNullable(cluster.getMetrics())
|
||||||
|
.flatMap(m -> Optional.ofNullable(m.getStore()))
|
||||||
|
.flatMap(s -> Optional.of(s.getPrometheus()))
|
||||||
|
.map(p -> StringUtils.hasText(p.getUrl()))
|
||||||
|
.orElse(false);
|
||||||
|
}
|
||||||
|
|
||||||
private boolean schemaRegistryConfigured(ClustersProperties.Cluster clusterProperties) {
|
private boolean schemaRegistryConfigured(ClustersProperties.Cluster clusterProperties) {
|
||||||
return clusterProperties.getSchemaRegistry() != null;
|
return clusterProperties.getSchemaRegistry() != null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.kafka.clients.admin.AdminClient;
|
import org.apache.kafka.clients.admin.AdminClient;
|
||||||
import org.apache.kafka.clients.admin.AdminClientConfig;
|
import org.apache.kafka.clients.admin.AdminClientConfig;
|
||||||
import org.springframework.util.ResourceUtils;
|
import org.springframework.util.ResourceUtils;
|
||||||
|
import prometheus.query.api.PrometheusClientApi;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
@ -141,5 +142,18 @@ public final class KafkaServicesValidation {
|
||||||
.onErrorResume(KafkaServicesValidation::invalid);
|
.onErrorResume(KafkaServicesValidation::invalid);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Mono<ApplicationPropertyValidationDTO> validatePrometheusStore(
|
||||||
|
Supplier<ReactiveFailover<PrometheusClientApi>> clientSupplier) {
|
||||||
|
ReactiveFailover<PrometheusClientApi> client;
|
||||||
|
try {
|
||||||
|
client = clientSupplier.get();
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error creating Prometheus client", e);
|
||||||
|
return invalid("Error creating Prometheus client: " + e.getMessage());
|
||||||
|
}
|
||||||
|
return client.mono(c -> c.query("1", null, null)) //TODO: check params
|
||||||
|
.then(valid())
|
||||||
|
.onErrorResume(KafkaServicesValidation::invalid);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3677,6 +3677,8 @@ components:
|
||||||
$ref: '#/components/schemas/ApplicationPropertyValidation'
|
$ref: '#/components/schemas/ApplicationPropertyValidation'
|
||||||
ksqldb:
|
ksqldb:
|
||||||
$ref: '#/components/schemas/ApplicationPropertyValidation'
|
$ref: '#/components/schemas/ApplicationPropertyValidation'
|
||||||
|
prometheusStorage:
|
||||||
|
$ref: '#/components/schemas/ApplicationPropertyValidation'
|
||||||
|
|
||||||
ApplicationConfig:
|
ApplicationConfig:
|
||||||
type: object
|
type: object
|
||||||
|
|
|
@ -2,10 +2,6 @@ openapi: 3.0.1
|
||||||
info:
|
info:
|
||||||
title: |
|
title: |
|
||||||
Prometheus query HTTP API
|
Prometheus query HTTP API
|
||||||
Copied from https://raw.githubusercontent.com/HelloKunal/OpenAPI-Specification-of-Go-API/main/swagger.yaml
|
|
||||||
description: |
|
|
||||||
The current stable HTTP API is reachable under /api/v1 on a Prometheus server. Any non-breaking additions will be added under that endpoint.
|
|
||||||
termsOfService: urn:tos
|
|
||||||
version: 0.1.0
|
version: 0.1.0
|
||||||
contact: { }
|
contact: { }
|
||||||
|
|
||||||
|
@ -53,7 +49,7 @@ paths:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/responseLabelValues'
|
$ref: '#/components/schemas/LabelValuesResponse'
|
||||||
|
|
||||||
/api/v1/labels:
|
/api/v1/labels:
|
||||||
get:
|
get:
|
||||||
|
@ -89,7 +85,7 @@ paths:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/responseLabelNames'
|
$ref: '#/components/schemas/LabelNamesResponse'
|
||||||
|
|
||||||
/api/v1/metadata:
|
/api/v1/metadata:
|
||||||
get:
|
get:
|
||||||
|
@ -104,7 +100,7 @@ paths:
|
||||||
description: Maximum number of metrics to return.
|
description: Maximum number of metrics to return.
|
||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: number
|
type: integer
|
||||||
- name: metric
|
- name: metric
|
||||||
in: query
|
in: query
|
||||||
description: A metric name to filter metadata for. All metric metadata is retrieved if left empty.
|
description: A metric name to filter metadata for. All metric metadata is retrieved if left empty.
|
||||||
|
@ -116,14 +112,14 @@ paths:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/responseMetadata'
|
$ref: '#/components/schemas/MetadataResponse'
|
||||||
201:
|
201:
|
||||||
description: |
|
description: |
|
||||||
Success
|
Success
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/responseMetadata'
|
$ref: '#/components/schemas/MetadataResponse'
|
||||||
|
|
||||||
/api/v1/query:
|
/api/v1/query:
|
||||||
get:
|
get:
|
||||||
|
@ -162,7 +158,7 @@ paths:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/queryData'
|
$ref: '#/components/schemas/QueryResponse'
|
||||||
|
|
||||||
|
|
||||||
/api/v1/query_range:
|
/api/v1/query_range:
|
||||||
|
@ -212,34 +208,7 @@ paths:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/responseQuery_range"
|
$ref: "#/components/schemas/QueryResponse"
|
||||||
example:
|
|
||||||
status: success
|
|
||||||
data:
|
|
||||||
resultType: matrix
|
|
||||||
result:
|
|
||||||
- metric:
|
|
||||||
__name__: up
|
|
||||||
job: prometheus
|
|
||||||
instance: localhost:9090
|
|
||||||
values:
|
|
||||||
- - 1.435781430781E9
|
|
||||||
- "1"
|
|
||||||
- - 1.435781445781E9
|
|
||||||
- "1"
|
|
||||||
- - 1.435781460781E9
|
|
||||||
- "1"
|
|
||||||
- metric:
|
|
||||||
__name__: up
|
|
||||||
job: node
|
|
||||||
instance: localhost:9091
|
|
||||||
values:
|
|
||||||
- - 1.435781430781E9
|
|
||||||
- "0"
|
|
||||||
- - 1.435781445781E9
|
|
||||||
- "0"
|
|
||||||
- - 1.435781460781E9
|
|
||||||
- "1"
|
|
||||||
|
|
||||||
|
|
||||||
/api/v1/series:
|
/api/v1/series:
|
||||||
|
@ -278,83 +247,95 @@ paths:
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/responseSeries'
|
$ref: '#/components/schemas/SeriesResponse'
|
||||||
example:
|
|
||||||
status: success
|
|
||||||
data:
|
|
||||||
- __name__: up
|
|
||||||
job: prometheus
|
|
||||||
instance: localhost:9090
|
|
||||||
- __name__: up
|
|
||||||
job: node
|
|
||||||
instance: localhost:9091
|
|
||||||
- __name__: process_start_time_seconds
|
|
||||||
job: prometheus
|
|
||||||
instance: localhost:9090
|
|
||||||
|
|
||||||
components:
|
components:
|
||||||
schemas:
|
schemas:
|
||||||
Label:
|
BaseResponse:
|
||||||
type: object
|
type: object
|
||||||
|
required: [ status ]
|
||||||
properties:
|
properties:
|
||||||
Name:
|
status:
|
||||||
type: string
|
type: string
|
||||||
Value:
|
enum: [ "success", "error" ]
|
||||||
|
error:
|
||||||
type: string
|
type: string
|
||||||
description: Label is a key/value pair of strings.
|
errorType:
|
||||||
Labels:
|
|
||||||
type: array
|
|
||||||
description: |-
|
|
||||||
Labels is a sorted set of labels. Order has to be guaranteed upon
|
|
||||||
instantiation.
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/Label'
|
|
||||||
MetricType:
|
|
||||||
type: string
|
type: string
|
||||||
description: MetricType represents metric type values.
|
warnings:
|
||||||
|
|
||||||
metadata:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
Help:
|
|
||||||
type: string
|
|
||||||
Type:
|
|
||||||
$ref: '#/components/schemas/MetricType'
|
|
||||||
Unit:
|
|
||||||
type: string
|
|
||||||
|
|
||||||
queryData:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
Result:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
metric:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
__name__:
|
|
||||||
type: string
|
|
||||||
job:
|
|
||||||
type: string
|
|
||||||
instance:
|
|
||||||
type: string
|
|
||||||
value:
|
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
oneOf:
|
|
||||||
- type: string
|
|
||||||
format: "unix_timestamp"
|
|
||||||
- type: string
|
|
||||||
format: "sample_value"
|
|
||||||
ResultType:
|
|
||||||
type: string
|
type: string
|
||||||
enum:
|
|
||||||
- matrix
|
QueryResponse:
|
||||||
- vector
|
type: object
|
||||||
- scalar
|
allOf:
|
||||||
- string
|
- $ref: "#/components/schemas/BaseResponse"
|
||||||
-
|
properties:
|
||||||
responseSeries:
|
data:
|
||||||
|
$ref: '#/components/schemas/QueryResponseData'
|
||||||
|
|
||||||
|
QueryResponseData:
|
||||||
|
type: object
|
||||||
|
required: [ "resultType" ]
|
||||||
|
properties:
|
||||||
|
resultType:
|
||||||
|
type: string
|
||||||
|
discriminator:
|
||||||
|
propertyName: resultType
|
||||||
|
mapping:
|
||||||
|
matrix: '#/components/schemas/MatrixQueryResponse'
|
||||||
|
vector: '#/components/schemas/InstantVectorQueryResponse'
|
||||||
|
scalar: '#/components/schemas/ScalarQueryResponse'
|
||||||
|
string: '#/components/schemas/StringQueryResponse'
|
||||||
|
anyOf:
|
||||||
|
- $ref: '#/components/schemas/MatrixQueryResponse'
|
||||||
|
- $ref: '#/components/schemas/InstantVectorQueryResponse'
|
||||||
|
- $ref: '#/components/schemas/ScalarQueryResponse'
|
||||||
|
- $ref: '#/components/schemas/StringQueryResponse'
|
||||||
|
|
||||||
|
MatrixQueryResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/QueryResponseData"
|
||||||
|
properties:
|
||||||
|
result:
|
||||||
|
type: array
|
||||||
|
items: { }
|
||||||
|
|
||||||
|
InstantVectorQueryResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/QueryResponseData"
|
||||||
|
properties:
|
||||||
|
result:
|
||||||
|
type: array
|
||||||
|
items: { }
|
||||||
|
|
||||||
|
ScalarQueryResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/QueryResponseData"
|
||||||
|
properties:
|
||||||
|
result:
|
||||||
|
type: array
|
||||||
|
items: { }
|
||||||
|
|
||||||
|
StringQueryResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/QueryResponseData"
|
||||||
|
properties:
|
||||||
|
result:
|
||||||
|
type: array
|
||||||
|
items: { }
|
||||||
|
|
||||||
|
SeriesResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/BaseResponse"
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
type: array
|
type: array
|
||||||
description: a list of objects that contain the label name/value pairs which
|
description: a list of objects that contain the label name/value pairs which
|
||||||
identify each series
|
identify each series
|
||||||
|
@ -368,82 +349,36 @@ components:
|
||||||
instance:
|
instance:
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
responseSnapshot:
|
MetadataResponse:
|
||||||
type: object
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/BaseResponse"
|
||||||
properties:
|
properties:
|
||||||
name:
|
data:
|
||||||
type: string
|
|
||||||
|
|
||||||
responseQuery_exemplars:
|
|
||||||
type: object
|
type: object
|
||||||
properties:
|
additionalProperties:
|
||||||
seriesLabels:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
__name__:
|
|
||||||
type: string
|
|
||||||
job:
|
|
||||||
type: string
|
|
||||||
instance:
|
|
||||||
type: string
|
|
||||||
service:
|
|
||||||
type: string
|
|
||||||
exemplars:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
labels:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
traceID:
|
|
||||||
type: string
|
|
||||||
values:
|
|
||||||
type: string
|
|
||||||
timestamp:
|
|
||||||
type: string
|
|
||||||
format: "unix_timestamp"
|
|
||||||
|
|
||||||
responseQuery_range:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
resultType:
|
|
||||||
type: string
|
|
||||||
result:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
metric:
|
|
||||||
type: object
|
|
||||||
properties:
|
|
||||||
__name__:
|
|
||||||
type: string
|
|
||||||
job:
|
|
||||||
type: string
|
|
||||||
instance:
|
|
||||||
type: string
|
|
||||||
values:
|
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
oneOf:
|
|
||||||
- type: string
|
|
||||||
format: "unix_timestamp"
|
|
||||||
- type: string
|
|
||||||
format: "sample_value"
|
|
||||||
|
|
||||||
responseMetadata:
|
|
||||||
type: object
|
type: object
|
||||||
properties:
|
additionalProperties: true
|
||||||
metric name:
|
|
||||||
type: string
|
|
||||||
additionalProperties:
|
|
||||||
$ref: '#/components/schemas/metadata'
|
|
||||||
description: a (key, object) map. `metric name`is an example key
|
|
||||||
|
|
||||||
responseLabelValues:
|
LabelValuesResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/BaseResponse"
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
type: array
|
type: array
|
||||||
description: a list of string label values
|
description: a list of string label values
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
responseLabelNames:
|
LabelNamesResponse:
|
||||||
|
type: object
|
||||||
|
allOf:
|
||||||
|
- $ref: "#/components/schemas/BaseResponse"
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
type: array
|
type: array
|
||||||
description: a list of string label names
|
description: a list of string label names
|
||||||
items:
|
items:
|
||||||
|
|
Loading…
Add table
Reference in a new issue