diff --git a/documentation/compose/kafka-ui-arm64.yaml b/documentation/compose/kafka-ui-arm64.yaml index 8b7ac667f2..91f8dc252c 100644 --- a/documentation/compose/kafka-ui-arm64.yaml +++ b/documentation/compose/kafka-ui-arm64.yaml @@ -10,17 +10,21 @@ services: - 8080:8080 depends_on: - kafka0 + - schema-registry0 + - kafka-connect0 environment: - KAFKA_CLUSTERS_0_NAME: cluster + DYNAMIC_CONFIG_ENABLED: 'true' # not necessary, added for tests + KAFKA_CLUSTERS_0_NAME: local KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 KAFKA_CLUSTERS_0_METRICS_PORT: 9997 - KAFKA_CLUSTERS_1_NAME: cluster2 - KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: kafka0:29092 KAFKA_CLUSTERS_0_METRICS_STORE_PROMETHEUS_URL: "http://prometheus:9090" KAFKA_CLUSTERS_0_METRICS_STORE_PROMETHEUS_REMOTEWRITE: 'true' + KAFKA_CLUSTERS_0_METRICS_STORE_KAFKA_TOPIC: "kafka_metrics" + KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schema-registry0:8085 + KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first + KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083 KAFKA_CLUSTERS_0_AUDIT_TOPICAUDITENABLED: 'true' KAFKA_CLUSTERS_0_AUDIT_CONSOLEAUDITENABLED: 'true' - KAFKA_CLUSTERS_0_METRICS_STORE_KAFKA_TOPIC: "kafka_metrics" prometheus: image: prom/prometheus:latest @@ -60,46 +64,46 @@ services: - ./scripts/update_run.sh:/tmp/update_run.sh command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" -# schema-registry0: -# image: confluentinc/cp-schema-registry:7.2.1.arm64 -# ports: -# - 8085:8085 -# depends_on: -# - kafka0 -# environment: -# SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092 -# SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT -# SCHEMA_REGISTRY_HOST_NAME: schema-registry0 -# SCHEMA_REGISTRY_LISTENERS: http://schema-registry0:8085 -# -# SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http" -# SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO -# SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas + schema-registry0: + image: confluentinc/cp-schema-registry:7.2.1.arm64 + ports: + - 8085:8085 + depends_on: + - kafka0 + environment: + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092 + SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT + SCHEMA_REGISTRY_HOST_NAME: schema-registry0 + SCHEMA_REGISTRY_LISTENERS: http://schema-registry0:8085 -# kafka-connect0: -# image: confluentinc/cp-kafka-connect:7.2.1.arm64 -# ports: -# - 8083:8083 -# depends_on: -# - kafka0 -# - schema-registry0 -# environment: -# CONNECT_BOOTSTRAP_SERVERS: kafka0:29092 -# CONNECT_GROUP_ID: compose-connect-group -# CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs -# CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1 -# CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset -# CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1 -# CONNECT_STATUS_STORAGE_TOPIC: _connect_status -# CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1 -# CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter -# CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085 -# CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter -# CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085 -# CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter -# CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter -# CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0 -# CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components" + SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http" + SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO + SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas + + kafka-connect0: + image: confluentinc/cp-kafka-connect:7.2.1.arm64 + ports: + - 8083:8083 + depends_on: + - kafka0 + - schema-registry0 + environment: + CONNECT_BOOTSTRAP_SERVERS: kafka0:29092 + CONNECT_GROUP_ID: compose-connect-group + CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_STATUS_STORAGE_TOPIC: _connect_status + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter + CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085 + CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter + CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry0:8085 + CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter + CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter + CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0 + CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components" kafka-init-topics: image: confluentinc/cp-kafka:7.2.1.arm64 diff --git a/kafka-ui-api/Dockerfile b/kafka-ui-api/Dockerfile index d364aaf297..d969ec7631 100644 --- a/kafka-ui-api/Dockerfile +++ b/kafka-ui-api/Dockerfile @@ -18,4 +18,4 @@ ENV JAVA_OPTS= EXPOSE 8080 # see JmxSslSocketFactory docs to understand why add-opens is needed -CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED $JAVA_OPTS -jar kafka-ui-api.jar +CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED $JAVA_OPTS -jar kafka-ui-api.jar diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/graphs/PromQueryTemplate.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/graphs/PromQueryTemplate.java index 06325760e2..3f5b8ed058 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/graphs/PromQueryTemplate.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/graphs/PromQueryTemplate.java @@ -10,6 +10,8 @@ import org.apache.commons.lang3.text.StrSubstitutor; class PromQueryTemplate { + private static final String CLUSTER_LABEL_NAME = "cluster"; + private final String queryTemplate; private final Set paramsNames; @@ -28,13 +30,14 @@ class PromQueryTemplate { throw new ValidationException("Not all params set for query, missing: " + missingParams); } Map replacements = new HashMap<>(paramValues); - replacements.put("cluster", clusterName); + replacements.put(CLUSTER_LABEL_NAME, clusterName); return replaceParams(replacements); } + // returns error msg or empty if no errors found Optional validateSyntax() { Map fakeReplacements = new HashMap<>(); - fakeReplacements.put("cluster", "1"); + fakeReplacements.put(CLUSTER_LABEL_NAME, "1"); paramsNames.forEach(paramName -> fakeReplacements.put(paramName, "1")); String prepared = replaceParams(fakeReplacements);