From 3083a2d4302c3ff98519eba24003c671ce726748 Mon Sep 17 00:00:00 2001 From: Kefu Chai Date: Sun, 19 May 2024 18:46:49 +0800 Subject: [PATCH] tests: test protobuf support in prometheus_test.py in this change, we * add a test for testing protobuf support in prometheus_test.py * drop test_metrics.py, as all the tests in it have been moved into prometheus_test.py Signed-off-by: Kefu Chai --- tests/unit/prometheus_test.py | 60 ++++++++++++++++++++++++ tests/unit/test_metrics.py | 88 ----------------------------------- 2 files changed, 60 insertions(+), 88 deletions(-) delete mode 100755 tests/unit/test_metrics.py diff --git a/tests/unit/prometheus_test.py b/tests/unit/prometheus_test.py index 3fc442d96eb..d9f523826c7 100755 --- a/tests/unit/prometheus_test.py +++ b/tests/unit/prometheus_test.py @@ -22,9 +22,11 @@ import argparse import math +import json import re import subprocess import sys +import time import unittest import urllib.request import urllib.parse @@ -231,6 +233,8 @@ class TestPrometheus(unittest.TestCase): exporter_process = None exporter_config = None port = 10001 + prometheus = None + prometheus_scrape_interval = 15 @classmethod def setUpClass(cls) -> None: @@ -335,6 +339,54 @@ def test_help(self) -> None: else: self.assertIsNone(msg) + @staticmethod + def _from_native_histogram(values) -> dict[float, float]: + results = {} + for v in values: + bucket = Exposition.value_to_bucket(float(v[2]) - 1) + results[bucket] = float(v[3]) + return results + + @staticmethod + def _query_prometheus(host: str, query: str, type_: str) -> float | dict[float, float]: + url = f'http://{host}/api/v1/query?query={query}' + headers = {"Accept": "application/json"} + req = urllib.request.Request(url, headers=headers) + with urllib.request.urlopen(req) as f: + results = json.load(f)["data"]["result"][0] + if type_ == 'histogram': + buckets = results["histogram"][1]["buckets"] + return TestPrometheus._from_native_histogram(buckets) + return float(results["value"][1]) + + def test_protobuf(self) -> None: + if self.prometheus is None: + self.skipTest("prometheus is not configured") + + # Prometheus does not allow us to push metrics to it, neither + # can we force it to scrape an exporter, so we have to wait + # until prometheus scrapes the server + time.sleep(self.prometheus_scrape_interval + 1) + with open(self.exporter_config, encoding='utf-8') as f: + config = yaml.safe_load(f) + + labels = {'private': '1'} + for metric in config['metrics']: + name = metric['name'] + metric_name = f'{Metrics.prefix}_{Metrics.group}_{name}' + metric_labels = metric['labels'] + if metric_labels != labels: + continue + metric_type = metric['type'] + metric_value = metric['values'] + e = Exposition.from_conf(metric_name, + metric_type, + metric_value, + metric_labels) + res = self._query_prometheus(self.prometheus, + metric_name, + metric_type) + self.assertEqual(res, e.value) if __name__ == '__main__': @@ -345,8 +397,16 @@ def test_help(self) -> None: parser.add_argument('--config', required=True, help='Path to the metrics definition file') + parser.add_argument('--prometheus', + help='A Prometheus to connect to') + parser.add_argument('--prometheus-scrape-interval', + type=int, + help='Prometheus scrape interval (in seconds)', + default=15) opts, remaining = parser.parse_known_args() remaining.insert(0, sys.argv[0]) TestPrometheus.exporter_path = opts.exporter TestPrometheus.exporter_config = opts.config + TestPrometheus.prometheus = opts.prometheus + TestPrometheus.prometheus_scrape_interval = opts.prometheus_scrape_interval unittest.main(argv=remaining) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py deleted file mode 100755 index b5a5773e42f..00000000000 --- a/tests/unit/test_metrics.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import requests -import yaml -import math -import re - -MATCH_TYPE = re.compile("# TYPE (.*) (.*)") -MATCH_VALUE = re.compile(r".*\{.*\} ([\d]+)") -MATCH_HISTOGRAM = re.compile(r'.*\{.*le="([\d]+\.[\d]+)".*\} ([\d]+)') - - -def from_native_histogram(values): - results = {} - for v in values: - results[val_to_bucket(float(v[2]) - 1)] = float(v[3]) - return results - - -def query_prometheus(host, query, type): - url = "http://" + host + "/api/v1/query?query=" + query - r = requests.get(url, headers={"Accept": "application/json"}) - results = r.json()["data"]["result"][0] - return ( - from_native_histogram(results["histogram"][1]["buckets"]) - if type == "histogram" - else float(results["value"][1]) - ) - - -def val_to_bucket(val): - low = 2 ** math.floor(math.log(val, 2)) - high = 2 * low - dif = (high - low) / 4 - return low + dif * math.floor((val - low) / dif) - - -def mk_histogram(values): - hist = {} - for val in values: - bucket = val_to_bucket(val) - if bucket not in hist: - hist[bucket] = 1 - else: - hist[bucket] = hist[bucket] + 1 - return hist - - -def conf_to_metrics(conf): - res = {} - for c in conf["metrics"]: - name = "seastar_test_group_" + c["name"] - res[name] = c - res[name]["value"] = ( - mk_histogram(c["values"]) if c["type"] == "histogram" else c["values"][0] - ) - return res - - -parser = argparse.ArgumentParser( - description="Validate that the text and protobuf metrics representative work as expected. You will need to run metrics_tester and a Prometheus server that reads from the metrics_tester", - conflict_handler="resolve", -) -parser.add_argument( - "-h", - "--host", - default="localhost:9180/metrics", - help="A host to connect to (the metrics_tester)", -) -parser.add_argument( - "-p", "--prometheus", default="localhost:9090", help="A Prometheus to connect to" -) -parser.add_argument( - "-c", "--config", default="conf.yaml", help="The metrics definition file" -) -args = parser.parse_args() - -with open(args.config, "r") as file: - metrics = yaml.safe_load(file) - conf_metrics = conf_to_metrics(metrics) - -# Validate protobuf -for v in conf_metrics: - res = query_prometheus(args.prometheus, v, conf_metrics[v]["type"]) - if res != conf_metrics[v]["value"]: - print("Protobuf format: Metrics", v, "type", conf_metrics[v]["type"], "Mismatch, expected", - res, "!=", conf_metrics[v]["value"])