2019-07-18 07:57:15 -06:00
|
|
|
# Copyright 2015-2019 Prometheus Python Client Developers
|
|
|
|
# Copyright 2019 Matrix.org Foundation C.I.C.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
"""
|
|
|
|
This code is based off `prometheus_client/exposition.py` from version 0.7.1.
|
|
|
|
|
|
|
|
Due to the renaming of metrics in prometheus_client 0.4.0, this customised
|
|
|
|
vendoring of the code will emit both the old versions that Synapse dashboards
|
|
|
|
expect, and the newer "best practice" version of the up-to-date official client.
|
|
|
|
"""
|
2022-10-07 06:35:44 -06:00
|
|
|
import logging
|
2019-07-18 07:57:15 -06:00
|
|
|
import math
|
|
|
|
import threading
|
|
|
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
|
|
from socketserver import ThreadingMixIn
|
2021-11-17 12:07:02 -07:00
|
|
|
from typing import Any, Dict, List, Type, Union
|
2019-07-18 07:57:15 -06:00
|
|
|
from urllib.parse import parse_qs, urlparse
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
from prometheus_client import REGISTRY, CollectorRegistry
|
|
|
|
from prometheus_client.core import Sample
|
2019-07-18 07:57:15 -06:00
|
|
|
|
|
|
|
from twisted.web.resource import Resource
|
2021-11-17 12:07:02 -07:00
|
|
|
from twisted.web.server import Request
|
2019-07-18 07:57:15 -06:00
|
|
|
|
2022-10-07 06:35:44 -06:00
|
|
|
logger = logging.getLogger(__name__)
|
2021-07-19 08:28:05 -06:00
|
|
|
CONTENT_TYPE_LATEST = "text/plain; version=0.0.4; charset=utf-8"
|
2019-07-18 07:57:15 -06:00
|
|
|
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def floatToGoString(d: Union[int, float]) -> str:
|
2019-07-18 07:57:15 -06:00
|
|
|
d = float(d)
|
2021-11-17 12:07:02 -07:00
|
|
|
if d == math.inf:
|
2019-07-18 07:57:15 -06:00
|
|
|
return "+Inf"
|
2021-11-17 12:07:02 -07:00
|
|
|
elif d == -math.inf:
|
2019-07-18 07:57:15 -06:00
|
|
|
return "-Inf"
|
|
|
|
elif math.isnan(d):
|
|
|
|
return "NaN"
|
|
|
|
else:
|
|
|
|
s = repr(d)
|
|
|
|
dot = s.find(".")
|
|
|
|
# Go switches to exponents sooner than Python.
|
|
|
|
# We only need to care about positive values for le/quantile.
|
|
|
|
if d > 0 and dot > 6:
|
2021-07-19 08:28:05 -06:00
|
|
|
mantissa = f"{s[0]}.{s[1:dot]}{s[dot + 1 :]}".rstrip("0.")
|
|
|
|
return f"{mantissa}e+0{dot - 1}"
|
2019-07-18 07:57:15 -06:00
|
|
|
return s
|
|
|
|
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def sample_line(line: Sample, name: str) -> str:
|
2019-07-18 07:57:15 -06:00
|
|
|
if line.labels:
|
|
|
|
labelstr = "{{{0}}}".format(
|
|
|
|
",".join(
|
|
|
|
[
|
2021-07-19 08:28:05 -06:00
|
|
|
'{}="{}"'.format(
|
2019-07-18 07:57:15 -06:00
|
|
|
k,
|
|
|
|
v.replace("\\", r"\\").replace("\n", r"\n").replace('"', r"\""),
|
|
|
|
)
|
|
|
|
for k, v in sorted(line.labels.items())
|
|
|
|
]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
labelstr = ""
|
|
|
|
timestamp = ""
|
|
|
|
if line.timestamp is not None:
|
|
|
|
# Convert to milliseconds.
|
2021-07-19 08:28:05 -06:00
|
|
|
timestamp = f" {int(float(line.timestamp) * 1000):d}"
|
|
|
|
return "{}{} {}{}\n".format(name, labelstr, floatToGoString(line.value), timestamp)
|
2019-07-18 07:57:15 -06:00
|
|
|
|
|
|
|
|
2022-08-24 05:35:54 -06:00
|
|
|
# Mapping from new metric names to legacy metric names.
|
|
|
|
# We translate these back to their old names when exposing them through our
|
|
|
|
# legacy vendored exporter.
|
|
|
|
# Only this legacy exposition module applies these name changes.
|
|
|
|
LEGACY_METRIC_NAMES = {
|
|
|
|
"synapse_util_caches_cache_hits": "synapse_util_caches_cache:hits",
|
|
|
|
"synapse_util_caches_cache_size": "synapse_util_caches_cache:size",
|
|
|
|
"synapse_util_caches_cache_evicted_size": "synapse_util_caches_cache:evicted_size",
|
2022-09-06 05:21:21 -06:00
|
|
|
"synapse_util_caches_cache": "synapse_util_caches_cache:total",
|
2022-08-24 05:35:54 -06:00
|
|
|
"synapse_util_caches_response_cache_size": "synapse_util_caches_response_cache:size",
|
|
|
|
"synapse_util_caches_response_cache_hits": "synapse_util_caches_response_cache:hits",
|
|
|
|
"synapse_util_caches_response_cache_evicted_size": "synapse_util_caches_response_cache:evicted_size",
|
2022-09-06 05:21:21 -06:00
|
|
|
"synapse_util_caches_response_cache": "synapse_util_caches_response_cache:total",
|
2022-09-08 08:01:42 -06:00
|
|
|
"synapse_federation_client_sent_pdu_destinations": "synapse_federation_client_sent_pdu_destinations:total",
|
|
|
|
"synapse_federation_client_sent_pdu_destinations_count": "synapse_federation_client_sent_pdu_destinations:count",
|
|
|
|
"synapse_admin_mau_current": "synapse_admin_mau:current",
|
|
|
|
"synapse_admin_mau_max": "synapse_admin_mau:max",
|
|
|
|
"synapse_admin_mau_registered_reserved_users": "synapse_admin_mau:registered_reserved_users",
|
2022-08-24 05:35:54 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> bytes:
|
2022-08-24 05:35:54 -06:00
|
|
|
"""
|
|
|
|
Generate metrics in legacy format. Modern metrics are generated directly
|
|
|
|
by prometheus-client.
|
|
|
|
"""
|
2019-07-18 07:57:15 -06:00
|
|
|
|
2020-05-11 11:45:23 -06:00
|
|
|
output = []
|
2019-07-18 07:57:15 -06:00
|
|
|
|
2020-05-11 11:45:23 -06:00
|
|
|
for metric in registry.collect():
|
2019-07-18 07:57:15 -06:00
|
|
|
if not metric.samples:
|
|
|
|
# No samples, don't bother.
|
|
|
|
continue
|
|
|
|
|
2022-08-24 05:35:54 -06:00
|
|
|
# Translate to legacy metric name if it has one.
|
|
|
|
mname = LEGACY_METRIC_NAMES.get(metric.name, metric.name)
|
2019-07-18 07:57:15 -06:00
|
|
|
mnewname = metric.name
|
|
|
|
mtype = metric.type
|
|
|
|
|
|
|
|
# OpenMetrics -> Prometheus
|
|
|
|
if mtype == "counter":
|
|
|
|
mnewname = mnewname + "_total"
|
|
|
|
elif mtype == "info":
|
|
|
|
mtype = "gauge"
|
|
|
|
mnewname = mnewname + "_info"
|
|
|
|
elif mtype == "stateset":
|
|
|
|
mtype = "gauge"
|
|
|
|
elif mtype == "gaugehistogram":
|
|
|
|
mtype = "histogram"
|
|
|
|
elif mtype == "unknown":
|
|
|
|
mtype = "untyped"
|
|
|
|
|
|
|
|
# Output in the old format for compatibility.
|
|
|
|
if emit_help:
|
|
|
|
output.append(
|
2021-07-19 08:28:05 -06:00
|
|
|
"# HELP {} {}\n".format(
|
2019-07-18 07:57:15 -06:00
|
|
|
mname,
|
|
|
|
metric.documentation.replace("\\", r"\\").replace("\n", r"\n"),
|
|
|
|
)
|
|
|
|
)
|
2021-07-19 08:28:05 -06:00
|
|
|
output.append(f"# TYPE {mname} {mtype}\n")
|
2020-09-30 09:44:10 -06:00
|
|
|
|
2021-07-15 04:02:43 -06:00
|
|
|
om_samples: Dict[str, List[str]] = {}
|
2020-09-30 09:44:10 -06:00
|
|
|
for s in metric.samples:
|
2019-07-18 07:57:15 -06:00
|
|
|
for suffix in ["_created", "_gsum", "_gcount"]:
|
2022-08-24 05:35:54 -06:00
|
|
|
if s.name == mname + suffix:
|
2020-09-30 09:44:10 -06:00
|
|
|
# OpenMetrics specific sample, put in a gauge at the end.
|
|
|
|
# (these come from gaugehistograms which don't get renamed,
|
|
|
|
# so no need to faff with mnewname)
|
|
|
|
om_samples.setdefault(suffix, []).append(sample_line(s, s.name))
|
2019-07-18 07:57:15 -06:00
|
|
|
break
|
|
|
|
else:
|
2020-09-30 09:44:10 -06:00
|
|
|
newname = s.name.replace(mnewname, mname)
|
2019-07-18 07:57:15 -06:00
|
|
|
if ":" in newname and newname.endswith("_total"):
|
|
|
|
newname = newname[: -len("_total")]
|
2020-09-30 09:44:10 -06:00
|
|
|
output.append(sample_line(s, newname))
|
|
|
|
|
|
|
|
for suffix, lines in sorted(om_samples.items()):
|
|
|
|
if emit_help:
|
|
|
|
output.append(
|
2021-07-19 08:28:05 -06:00
|
|
|
"# HELP {}{} {}\n".format(
|
2022-08-24 05:35:54 -06:00
|
|
|
mname,
|
2020-09-30 09:44:10 -06:00
|
|
|
suffix,
|
|
|
|
metric.documentation.replace("\\", r"\\").replace("\n", r"\n"),
|
|
|
|
)
|
|
|
|
)
|
2022-08-24 05:35:54 -06:00
|
|
|
output.append(f"# TYPE {mname}{suffix} gauge\n")
|
2020-09-30 09:44:10 -06:00
|
|
|
output.extend(lines)
|
2019-07-18 07:57:15 -06:00
|
|
|
|
|
|
|
# Get rid of the weird colon things while we're at it
|
|
|
|
if mtype == "counter":
|
|
|
|
mnewname = mnewname.replace(":total", "")
|
|
|
|
mnewname = mnewname.replace(":", "_")
|
|
|
|
|
|
|
|
if mname == mnewname:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Also output in the new format, if it's different.
|
|
|
|
if emit_help:
|
|
|
|
output.append(
|
2021-07-19 08:28:05 -06:00
|
|
|
"# HELP {} {}\n".format(
|
2019-07-18 07:57:15 -06:00
|
|
|
mnewname,
|
|
|
|
metric.documentation.replace("\\", r"\\").replace("\n", r"\n"),
|
|
|
|
)
|
|
|
|
)
|
2021-07-19 08:28:05 -06:00
|
|
|
output.append(f"# TYPE {mnewname} {mtype}\n")
|
2020-09-30 09:44:10 -06:00
|
|
|
|
|
|
|
for s in metric.samples:
|
|
|
|
# Get rid of the OpenMetrics specific samples (we should already have
|
|
|
|
# dealt with them above anyway.)
|
2019-07-18 07:57:15 -06:00
|
|
|
for suffix in ["_created", "_gsum", "_gcount"]:
|
2022-08-24 05:35:54 -06:00
|
|
|
if s.name == mname + suffix:
|
2019-07-18 07:57:15 -06:00
|
|
|
break
|
|
|
|
else:
|
2022-08-24 05:35:54 -06:00
|
|
|
sample_name = LEGACY_METRIC_NAMES.get(s.name, s.name)
|
2019-07-18 07:57:15 -06:00
|
|
|
output.append(
|
2022-08-24 05:35:54 -06:00
|
|
|
sample_line(s, sample_name.replace(":total", "").replace(":", "_"))
|
2019-07-18 07:57:15 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
return "".join(output).encode("utf-8")
|
|
|
|
|
|
|
|
|
|
|
|
class MetricsHandler(BaseHTTPRequestHandler):
|
|
|
|
"""HTTP handler that gives metrics from ``REGISTRY``."""
|
|
|
|
|
|
|
|
registry = REGISTRY
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def do_GET(self) -> None:
|
2019-07-18 07:57:15 -06:00
|
|
|
registry = self.registry
|
|
|
|
params = parse_qs(urlparse(self.path).query)
|
|
|
|
|
|
|
|
if "help" in params:
|
|
|
|
emit_help = True
|
|
|
|
else:
|
|
|
|
emit_help = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
output = generate_latest(registry, emit_help=emit_help)
|
|
|
|
except Exception:
|
|
|
|
self.send_error(500, "error generating metric output")
|
|
|
|
raise
|
2022-10-07 06:35:44 -06:00
|
|
|
try:
|
|
|
|
self.send_response(200)
|
|
|
|
self.send_header("Content-Type", CONTENT_TYPE_LATEST)
|
|
|
|
self.send_header("Content-Length", str(len(output)))
|
|
|
|
self.end_headers()
|
|
|
|
self.wfile.write(output)
|
|
|
|
except BrokenPipeError as e:
|
|
|
|
logger.warning(
|
|
|
|
"BrokenPipeError when serving metrics (%s). Did Prometheus restart?", e
|
|
|
|
)
|
2019-07-18 07:57:15 -06:00
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def log_message(self, format: str, *args: Any) -> None:
|
2019-07-18 07:57:15 -06:00
|
|
|
"""Log nothing."""
|
|
|
|
|
|
|
|
@classmethod
|
2021-11-17 12:07:02 -07:00
|
|
|
def factory(cls, registry: CollectorRegistry) -> Type:
|
2019-07-18 07:57:15 -06:00
|
|
|
"""Returns a dynamic MetricsHandler class tied
|
|
|
|
to the passed registry.
|
|
|
|
"""
|
|
|
|
# This implementation relies on MetricsHandler.registry
|
|
|
|
# (defined above and defaulted to REGISTRY).
|
|
|
|
|
|
|
|
# As we have unicode_literals, we need to create a str()
|
|
|
|
# object for type().
|
|
|
|
cls_name = str(cls.__name__)
|
|
|
|
MyMetricsHandler = type(cls_name, (cls, object), {"registry": registry})
|
|
|
|
return MyMetricsHandler
|
|
|
|
|
|
|
|
|
|
|
|
class _ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
|
|
|
|
"""Thread per request HTTP server."""
|
|
|
|
|
|
|
|
# Make worker threads "fire and forget". Beginning with Python 3.7 this
|
|
|
|
# prevents a memory leak because ``ThreadingMixIn`` starts to gather all
|
|
|
|
# non-daemon threads in a list in order to join on them at server close.
|
|
|
|
# Enabling daemon threads virtually makes ``_ThreadingSimpleServer`` the
|
|
|
|
# same as Python 3.7's ``ThreadingHTTPServer``.
|
|
|
|
daemon_threads = True
|
|
|
|
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def start_http_server(
|
|
|
|
port: int, addr: str = "", registry: CollectorRegistry = REGISTRY
|
|
|
|
) -> None:
|
2019-07-18 07:57:15 -06:00
|
|
|
"""Starts an HTTP server for prometheus metrics as a daemon thread"""
|
|
|
|
CustomMetricsHandler = MetricsHandler.factory(registry)
|
|
|
|
httpd = _ThreadingSimpleServer((addr, port), CustomMetricsHandler)
|
|
|
|
t = threading.Thread(target=httpd.serve_forever)
|
|
|
|
t.daemon = True
|
|
|
|
t.start()
|
|
|
|
|
|
|
|
|
|
|
|
class MetricsResource(Resource):
|
|
|
|
"""
|
|
|
|
Twisted ``Resource`` that serves prometheus metrics.
|
|
|
|
"""
|
|
|
|
|
|
|
|
isLeaf = True
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def __init__(self, registry: CollectorRegistry = REGISTRY):
|
2019-07-18 07:57:15 -06:00
|
|
|
self.registry = registry
|
|
|
|
|
2021-11-17 12:07:02 -07:00
|
|
|
def render_GET(self, request: Request) -> bytes:
|
2019-07-18 07:57:15 -06:00
|
|
|
request.setHeader(b"Content-Type", CONTENT_TYPE_LATEST.encode("ascii"))
|
2020-06-23 11:06:01 -06:00
|
|
|
response = generate_latest(self.registry)
|
|
|
|
request.setHeader(b"Content-Length", str(len(response)))
|
|
|
|
return response
|