OpenMetrics: rework test suite with exarkun

This commit is contained in:
Florian Sesser 2021-09-16 15:58:04 +00:00
parent b0e1cf924d
commit 5825b8bd42

View File

@ -1,7 +1,25 @@
import mock
from prometheus_client.openmetrics import parser from prometheus_client.openmetrics import parser
from twisted.trial import unittest
from treq.testing import RequestTraversalAgent
from twisted.web.http import OK
from twisted.web.client import readBody
from twisted.web.resource import Resource
from testtools.twistedsupport import succeeded
from testtools.matchers import (
Always,
AfterPreprocessing,
Equals,
MatchesAll,
MatchesStructure,
MatchesPredicate,
)
from testtools.content import text_content
from allmydata.web.status import Statistics from allmydata.web.status import Statistics
from allmydata.test.common import SyncTestCase
class FakeStatsProvider(object): class FakeStatsProvider(object):
def get_stats(self): def get_stats(self):
@ -97,31 +115,67 @@ class FakeStatsProvider(object):
} }
return stats return stats
class FakeStats(Statistics): class HackItResource(Resource):
def __init__(self): def getChildWithDefault(self, path, request):
self._provider = FakeStatsProvider() request.fields = None
return Resource.getChildWithDefault(self, path, request)
class OpenMetrics(unittest.TestCase):
class OpenMetrics(SyncTestCase):
def test_spec_compliance(self): def test_spec_compliance(self):
""" """
Does our output adhere to the `OpenMetrics <https://openmetrics.io/>` spec? Does our output adhere to the `OpenMetrics <https://openmetrics.io/>` spec?
https://github.com/OpenObservability/OpenMetrics/ https://github.com/OpenObservability/OpenMetrics/
https://prometheus.io/docs/instrumenting/exposition_formats/ https://prometheus.io/docs/instrumenting/exposition_formats/
""" """
req = mock.Mock() root = HackItResource()
stats = FakeStats() root.putChild(b"", Statistics(FakeStatsProvider()))
metrics = Statistics.render_OPENMETRICS(stats, req) rta = RequestTraversalAgent(root)
d = rta.request(b"GET", b"http://localhost/?t=openmetrics")
self.assertThat(d, succeeded(matches_stats(self)))
# "The content type MUST be..." def matches_stats(testcase):
req.setHeader.assert_called_with( def add_detail(testcase):
"content-type", "application/openmetrics-text; version=1.0.0; charset=utf-8" def predicate(body):
testcase.addDetail("body", text_content(body))
return True
return predicate
return MatchesAll(
MatchesStructure(
code=Equals(OK),
# "The content type MUST be..."
headers=has_header("content-type", "application/openmetrics-text; version=1.0.0; charset=utf-8"),
),
AfterPreprocessing(
readBodyText,
succeeded(MatchesAll(
MatchesPredicate(add_detail(testcase), "%s dummy"),
parses_as_openmetrics(),
))
) )
)
# The parser throws if it does not like its input. def readBodyText(response):
# Wrapped in a list() to drain the generator. d = readBody(response)
families = list(parser.text_string_to_metric_families(metrics)) d.addCallback(lambda body: body.decode("utf-8"))
return d
def has_header(name, value):
return AfterPreprocessing(
lambda headers: headers.getRawHeaders(name),
Equals([value]),
)
def parses_as_openmetrics():
# The parser throws if it does not like its input.
# Wrapped in a list() to drain the generator.
return AfterPreprocessing(
lambda body: list(parser.text_string_to_metric_families(body)),
AfterPreprocessing(
lambda families: families[-1].name,
Equals(u"tahoe_stats_storage_server_total_bucket_count"),
),
)
# Has the parser parsed our data?
# Just check the last item.
self.assertEqual(families[-1].name, u"tahoe_stats_storage_server_total_bucket_count")