OpenMetrics test: Add parser to check against spec

This commit is contained in:
Florian Sesser 2021-09-10 13:00:15 +00:00
parent fca1482b35
commit d04157d18a
2 changed files with 13 additions and 7 deletions

View File

@ -1,4 +1,5 @@
import mock
from prometheus_client.openmetrics import parser
from twisted.trial import unittest
from allmydata.web.status import Statistics
@ -8,18 +9,21 @@ class FakeStatsProvider(object):
return stats
class OpenMetrics(unittest.TestCase):
def test_header(self):
def test_spec_compliance(self):
"""
Does our output adhere to the OpenMetrics spec?
https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md
"""
req = mock.Mock()
stats = mock.Mock()
stats._provider = FakeStatsProvider()
metrics = Statistics.render_OPENMETRICS(stats, req)
# "The content type MUST be..."
req.setHeader.assert_called_with("content-type", "application/openmetrics-text; version=1.0.0; charset=utf-8")
def test_spec_compliance(self):
req = mock.Mock()
stats = mock.Mock()
stats._provider = FakeStatsProvider()
metrics = Statistics.render_OPENMETRICS(stats, req)
# TODO test that output adheres to spec
# The parser throws if it can't parse.
# Wrap in a list() to drain the generator.
families = list(parser.text_string_to_metric_families(metrics))
# TODO add more realistic stats, incl. missing (None) values

View File

@ -52,6 +52,8 @@ deps =
certifi
# VCS hooks support
py36,!coverage: pre-commit
# Does our OpenMetrics endpoint adhere to the spec:
prometheus-client==0.11.0
# We add usedevelop=False because testing against a true installation gives
# more useful results.