first commit
This commit is contained in:
305
searx/metrics/__init__.py
Normal file
305
searx/metrics/__init__.py
Normal file
@@ -0,0 +1,305 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import typing
|
||||
import math
|
||||
import contextlib
|
||||
from timeit import default_timer
|
||||
from operator import itemgetter
|
||||
|
||||
from searx.engines import engines
|
||||
from searx.openmetrics import OpenMetricsFamily
|
||||
from .models import HistogramStorage, CounterStorage, VoidHistogram, VoidCounterStorage
|
||||
from .error_recorder import count_error, count_exception, errors_per_engines
|
||||
|
||||
__all__ = [
|
||||
"initialize",
|
||||
"get_engines_stats",
|
||||
"get_engine_errors",
|
||||
"histogram",
|
||||
"histogram_observe",
|
||||
"histogram_observe_time",
|
||||
"counter",
|
||||
"counter_inc",
|
||||
"counter_add",
|
||||
"count_error",
|
||||
"count_exception",
|
||||
]
|
||||
|
||||
|
||||
ENDPOINTS = {'search'}
|
||||
|
||||
|
||||
histogram_storage: typing.Optional[HistogramStorage] = None
|
||||
counter_storage: typing.Optional[CounterStorage] = None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def histogram_observe_time(*args):
|
||||
h = histogram_storage.get(*args)
|
||||
before = default_timer()
|
||||
yield before
|
||||
duration = default_timer() - before
|
||||
if h:
|
||||
h.observe(duration)
|
||||
else:
|
||||
raise ValueError("histogram " + repr((*args,)) + " doesn't not exist")
|
||||
|
||||
|
||||
def histogram_observe(duration, *args):
|
||||
histogram_storage.get(*args).observe(duration)
|
||||
|
||||
|
||||
def histogram(*args, raise_on_not_found=True):
|
||||
h = histogram_storage.get(*args)
|
||||
if raise_on_not_found and h is None:
|
||||
raise ValueError("histogram " + repr((*args,)) + " doesn't not exist")
|
||||
return h
|
||||
|
||||
|
||||
def counter_inc(*args):
|
||||
counter_storage.add(1, *args)
|
||||
|
||||
|
||||
def counter_add(value, *args):
|
||||
counter_storage.add(value, *args)
|
||||
|
||||
|
||||
def counter(*args):
|
||||
return counter_storage.get(*args)
|
||||
|
||||
|
||||
def initialize(engine_names=None, enabled=True):
|
||||
"""
|
||||
Initialize metrics
|
||||
"""
|
||||
global counter_storage, histogram_storage # pylint: disable=global-statement
|
||||
|
||||
if enabled:
|
||||
counter_storage = CounterStorage()
|
||||
histogram_storage = HistogramStorage()
|
||||
else:
|
||||
counter_storage = VoidCounterStorage()
|
||||
histogram_storage = HistogramStorage(histogram_class=VoidHistogram)
|
||||
|
||||
# max_timeout = max of all the engine.timeout
|
||||
max_timeout = 2
|
||||
for engine_name in engine_names or engines:
|
||||
if engine_name in engines:
|
||||
max_timeout = max(max_timeout, engines[engine_name].timeout)
|
||||
|
||||
# histogram configuration
|
||||
histogram_width = 0.1
|
||||
histogram_size = int(1.5 * max_timeout / histogram_width)
|
||||
|
||||
# engines
|
||||
for engine_name in engine_names or engines:
|
||||
# search count
|
||||
counter_storage.configure('engine', engine_name, 'search', 'count', 'sent')
|
||||
counter_storage.configure('engine', engine_name, 'search', 'count', 'successful')
|
||||
# global counter of errors
|
||||
counter_storage.configure('engine', engine_name, 'search', 'count', 'error')
|
||||
# score of the engine
|
||||
counter_storage.configure('engine', engine_name, 'score')
|
||||
# result count per requests
|
||||
histogram_storage.configure(1, 100, 'engine', engine_name, 'result', 'count')
|
||||
# time doing HTTP requests
|
||||
histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'http')
|
||||
# total time
|
||||
# .time.request and ...response times may overlap .time.http time.
|
||||
histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'total')
|
||||
|
||||
|
||||
def get_engine_errors(engline_name_list):
|
||||
result = {}
|
||||
engine_names = list(errors_per_engines.keys())
|
||||
engine_names.sort()
|
||||
for engine_name in engine_names:
|
||||
if engine_name not in engline_name_list:
|
||||
continue
|
||||
|
||||
error_stats = errors_per_engines[engine_name]
|
||||
sent_search_count = max(counter('engine', engine_name, 'search', 'count', 'sent'), 1)
|
||||
sorted_context_count_list = sorted(error_stats.items(), key=lambda context_count: context_count[1])
|
||||
r = []
|
||||
for context, count in sorted_context_count_list:
|
||||
percentage = round(20 * count / sent_search_count) * 5
|
||||
r.append(
|
||||
{
|
||||
'filename': context.filename,
|
||||
'function': context.function,
|
||||
'line_no': context.line_no,
|
||||
'code': context.code,
|
||||
'exception_classname': context.exception_classname,
|
||||
'log_message': context.log_message,
|
||||
'log_parameters': context.log_parameters,
|
||||
'secondary': context.secondary,
|
||||
'percentage': percentage,
|
||||
}
|
||||
)
|
||||
result[engine_name] = sorted(r, reverse=True, key=lambda d: d['percentage'])
|
||||
return result
|
||||
|
||||
|
||||
def get_reliabilities(engline_name_list, checker_results):
|
||||
reliabilities = {}
|
||||
|
||||
engine_errors = get_engine_errors(engline_name_list)
|
||||
|
||||
for engine_name in engline_name_list:
|
||||
checker_result = checker_results.get(engine_name, {})
|
||||
checker_success = checker_result.get('success', True)
|
||||
errors = engine_errors.get(engine_name) or []
|
||||
sent_count = counter('engine', engine_name, 'search', 'count', 'sent')
|
||||
|
||||
if sent_count == 0:
|
||||
# no request
|
||||
reliability = None
|
||||
elif checker_success and not errors:
|
||||
reliability = 100
|
||||
elif 'simple' in checker_result.get('errors', {}):
|
||||
# the basic (simple) test doesn't work: the engine is broken according to the checker
|
||||
# even if there is no exception
|
||||
reliability = 0
|
||||
else:
|
||||
# pylint: disable=consider-using-generator
|
||||
reliability = 100 - sum([error['percentage'] for error in errors if not error.get('secondary')])
|
||||
|
||||
reliabilities[engine_name] = {
|
||||
'reliability': reliability,
|
||||
'sent_count': sent_count,
|
||||
'errors': errors,
|
||||
'checker': checker_result.get('errors', {}),
|
||||
}
|
||||
return reliabilities
|
||||
|
||||
|
||||
def get_engines_stats(engine_name_list):
|
||||
assert counter_storage is not None
|
||||
assert histogram_storage is not None
|
||||
|
||||
list_time = []
|
||||
max_time_total = max_result_count = None
|
||||
|
||||
for engine_name in engine_name_list:
|
||||
|
||||
sent_count = counter('engine', engine_name, 'search', 'count', 'sent')
|
||||
if sent_count == 0:
|
||||
continue
|
||||
|
||||
result_count = histogram('engine', engine_name, 'result', 'count').percentage(50)
|
||||
result_count_sum = histogram('engine', engine_name, 'result', 'count').sum
|
||||
successful_count = counter('engine', engine_name, 'search', 'count', 'successful')
|
||||
|
||||
time_total = histogram('engine', engine_name, 'time', 'total').percentage(50)
|
||||
max_time_total = max(time_total or 0, max_time_total or 0)
|
||||
max_result_count = max(result_count or 0, max_result_count or 0)
|
||||
|
||||
stats = {
|
||||
'name': engine_name,
|
||||
'total': None,
|
||||
'total_p80': None,
|
||||
'total_p95': None,
|
||||
'http': None,
|
||||
'http_p80': None,
|
||||
'http_p95': None,
|
||||
'processing': None,
|
||||
'processing_p80': None,
|
||||
'processing_p95': None,
|
||||
'score': 0,
|
||||
'score_per_result': 0,
|
||||
'result_count': result_count,
|
||||
}
|
||||
|
||||
if successful_count and result_count_sum:
|
||||
score = counter('engine', engine_name, 'score')
|
||||
|
||||
stats['score'] = score
|
||||
stats['score_per_result'] = score / float(result_count_sum)
|
||||
|
||||
time_http = histogram('engine', engine_name, 'time', 'http').percentage(50)
|
||||
time_http_p80 = time_http_p95 = 0
|
||||
|
||||
if time_http is not None:
|
||||
|
||||
time_http_p80 = histogram('engine', engine_name, 'time', 'http').percentage(80)
|
||||
time_http_p95 = histogram('engine', engine_name, 'time', 'http').percentage(95)
|
||||
|
||||
stats['http'] = round(time_http, 1)
|
||||
stats['http_p80'] = round(time_http_p80, 1)
|
||||
stats['http_p95'] = round(time_http_p95, 1)
|
||||
|
||||
if time_total is not None:
|
||||
|
||||
time_total_p80 = histogram('engine', engine_name, 'time', 'total').percentage(80)
|
||||
time_total_p95 = histogram('engine', engine_name, 'time', 'total').percentage(95)
|
||||
|
||||
stats['total'] = round(time_total, 1)
|
||||
stats['total_p80'] = round(time_total_p80, 1)
|
||||
stats['total_p95'] = round(time_total_p95, 1)
|
||||
|
||||
stats['processing'] = round(time_total - (time_http or 0), 1)
|
||||
stats['processing_p80'] = round(time_total_p80 - time_http_p80, 1)
|
||||
stats['processing_p95'] = round(time_total_p95 - time_http_p95, 1)
|
||||
|
||||
list_time.append(stats)
|
||||
|
||||
return {
|
||||
'time': list_time,
|
||||
'max_time': math.ceil(max_time_total or 0),
|
||||
'max_result_count': math.ceil(max_result_count or 0),
|
||||
}
|
||||
|
||||
|
||||
def openmetrics(engine_stats, engine_reliabilities):
|
||||
metrics = [
|
||||
OpenMetricsFamily(
|
||||
key="searxng_engines_response_time_total_seconds",
|
||||
type_hint="gauge",
|
||||
help_hint="The average total response time of the engine",
|
||||
data_info=[{'engine_name': engine['name']} for engine in engine_stats['time']],
|
||||
data=[engine['total'] or 0 for engine in engine_stats['time']],
|
||||
),
|
||||
OpenMetricsFamily(
|
||||
key="searxng_engines_response_time_processing_seconds",
|
||||
type_hint="gauge",
|
||||
help_hint="The average processing response time of the engine",
|
||||
data_info=[{'engine_name': engine['name']} for engine in engine_stats['time']],
|
||||
data=[engine['processing'] or 0 for engine in engine_stats['time']],
|
||||
),
|
||||
OpenMetricsFamily(
|
||||
key="searxng_engines_response_time_http_seconds",
|
||||
type_hint="gauge",
|
||||
help_hint="The average HTTP response time of the engine",
|
||||
data_info=[{'engine_name': engine['name']} for engine in engine_stats['time']],
|
||||
data=[engine['http'] or 0 for engine in engine_stats['time']],
|
||||
),
|
||||
OpenMetricsFamily(
|
||||
key="searxng_engines_result_count_total",
|
||||
type_hint="counter",
|
||||
help_hint="The total amount of results returned by the engine",
|
||||
data_info=[{'engine_name': engine['name']} for engine in engine_stats['time']],
|
||||
data=[engine['result_count'] or 0 for engine in engine_stats['time']],
|
||||
),
|
||||
OpenMetricsFamily(
|
||||
key="searxng_engines_request_count_total",
|
||||
type_hint="counter",
|
||||
help_hint="The total amount of user requests made to this engine",
|
||||
data_info=[{'engine_name': engine['name']} for engine in engine_stats['time']],
|
||||
data=[
|
||||
engine_reliabilities.get(engine['name'], {}).get('sent_count', 0) or 0
|
||||
for engine in engine_stats['time']
|
||||
],
|
||||
),
|
||||
OpenMetricsFamily(
|
||||
key="searxng_engines_reliability_total",
|
||||
type_hint="counter",
|
||||
help_hint="The overall reliability of the engine",
|
||||
data_info=[{'engine_name': engine['name']} for engine in engine_stats['time']],
|
||||
data=[
|
||||
engine_reliabilities.get(engine['name'], {}).get('reliability', 0) or 0
|
||||
for engine in engine_stats['time']
|
||||
],
|
||||
),
|
||||
]
|
||||
return "".join([str(metric) for metric in metrics])
|
||||
195
searx/metrics/error_recorder.py
Normal file
195
searx/metrics/error_recorder.py
Normal file
@@ -0,0 +1,195 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring, invalid-name
|
||||
|
||||
import typing
|
||||
import inspect
|
||||
from json import JSONDecodeError
|
||||
from urllib.parse import urlparse
|
||||
from httpx import HTTPError, HTTPStatusError
|
||||
from searx.exceptions import (
|
||||
SearxXPathSyntaxException,
|
||||
SearxEngineXPathException,
|
||||
SearxEngineAPIException,
|
||||
SearxEngineAccessDeniedException,
|
||||
)
|
||||
from searx import searx_parent_dir, settings
|
||||
from searx.engines import engines
|
||||
|
||||
|
||||
errors_per_engines = {}
|
||||
|
||||
|
||||
class ErrorContext: # pylint: disable=missing-class-docstring
|
||||
|
||||
__slots__ = (
|
||||
'filename',
|
||||
'function',
|
||||
'line_no',
|
||||
'code',
|
||||
'exception_classname',
|
||||
'log_message',
|
||||
'log_parameters',
|
||||
'secondary',
|
||||
)
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary
|
||||
):
|
||||
self.filename = filename
|
||||
self.function = function
|
||||
self.line_no = line_no
|
||||
self.code = code
|
||||
self.exception_classname = exception_classname
|
||||
self.log_message = log_message
|
||||
self.log_parameters = log_parameters
|
||||
self.secondary = secondary
|
||||
|
||||
def __eq__(self, o) -> bool: # pylint: disable=invalid-name
|
||||
if not isinstance(o, ErrorContext):
|
||||
return False
|
||||
return (
|
||||
self.filename == o.filename
|
||||
and self.function == o.function
|
||||
and self.line_no == o.line_no
|
||||
and self.code == o.code
|
||||
and self.exception_classname == o.exception_classname
|
||||
and self.log_message == o.log_message
|
||||
and self.log_parameters == o.log_parameters
|
||||
and self.secondary == o.secondary
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.filename,
|
||||
self.function,
|
||||
self.line_no,
|
||||
self.code,
|
||||
self.exception_classname,
|
||||
self.log_message,
|
||||
self.log_parameters,
|
||||
self.secondary,
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "ErrorContext({!r}, {!r}, {!r}, {!r}, {!r}, {!r}) {!r}".format(
|
||||
self.filename,
|
||||
self.line_no,
|
||||
self.code,
|
||||
self.exception_classname,
|
||||
self.log_message,
|
||||
self.log_parameters,
|
||||
self.secondary,
|
||||
)
|
||||
|
||||
|
||||
def add_error_context(engine_name: str, error_context: ErrorContext) -> None:
|
||||
errors_for_engine = errors_per_engines.setdefault(engine_name, {})
|
||||
errors_for_engine[error_context] = errors_for_engine.get(error_context, 0) + 1
|
||||
engines[engine_name].logger.warning('%s', str(error_context))
|
||||
|
||||
|
||||
def get_trace(traces):
|
||||
for trace in reversed(traces):
|
||||
split_filename = trace.filename.split('/')
|
||||
if '/'.join(split_filename[-3:-1]) == 'searx/engines':
|
||||
return trace
|
||||
if '/'.join(split_filename[-4:-1]) == 'searx/search/processors':
|
||||
return trace
|
||||
return traces[-1]
|
||||
|
||||
|
||||
def get_hostname(exc: HTTPError) -> typing.Optional[None]:
|
||||
url = exc.request.url
|
||||
if url is None and exc.response is not None:
|
||||
url = exc.response.url
|
||||
return urlparse(url).netloc
|
||||
|
||||
|
||||
def get_request_exception_messages(
|
||||
exc: HTTPError,
|
||||
) -> typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]]:
|
||||
url = None
|
||||
status_code = None
|
||||
reason = None
|
||||
hostname = None
|
||||
if hasattr(exc, '_request') and exc._request is not None: # pylint: disable=protected-access
|
||||
# exc.request is property that raise an RuntimeException
|
||||
# if exc._request is not defined.
|
||||
url = exc.request.url
|
||||
if url is None and hasattr(exc, 'response') and exc.response is not None:
|
||||
url = exc.response.url
|
||||
if url is not None:
|
||||
hostname = url.host
|
||||
if isinstance(exc, HTTPStatusError):
|
||||
status_code = str(exc.response.status_code)
|
||||
reason = exc.response.reason_phrase
|
||||
return (status_code, reason, hostname)
|
||||
|
||||
|
||||
def get_messages(exc, filename) -> typing.Tuple: # pylint: disable=too-many-return-statements
|
||||
if isinstance(exc, JSONDecodeError):
|
||||
return (exc.msg,)
|
||||
if isinstance(exc, TypeError):
|
||||
return (str(exc),)
|
||||
if isinstance(exc, ValueError) and 'lxml' in filename:
|
||||
return (str(exc),)
|
||||
if isinstance(exc, HTTPError):
|
||||
return get_request_exception_messages(exc)
|
||||
if isinstance(exc, SearxXPathSyntaxException):
|
||||
return (exc.xpath_str, exc.message)
|
||||
if isinstance(exc, SearxEngineXPathException):
|
||||
return (exc.xpath_str, exc.message)
|
||||
if isinstance(exc, SearxEngineAPIException):
|
||||
return (str(exc.args[0]),)
|
||||
if isinstance(exc, SearxEngineAccessDeniedException):
|
||||
return (exc.message,)
|
||||
return ()
|
||||
|
||||
|
||||
def get_exception_classname(exc: Exception) -> str:
|
||||
exc_class = exc.__class__
|
||||
exc_name = exc_class.__qualname__
|
||||
exc_module = exc_class.__module__
|
||||
if exc_module is None or exc_module == str.__class__.__module__:
|
||||
return exc_name
|
||||
return exc_module + '.' + exc_name
|
||||
|
||||
|
||||
def get_error_context(framerecords, exception_classname, log_message, log_parameters, secondary) -> ErrorContext:
|
||||
searx_frame = get_trace(framerecords)
|
||||
filename = searx_frame.filename
|
||||
if filename.startswith(searx_parent_dir):
|
||||
filename = filename[len(searx_parent_dir) + 1 :]
|
||||
function = searx_frame.function
|
||||
line_no = searx_frame.lineno
|
||||
code = searx_frame.code_context[0].strip()
|
||||
del framerecords
|
||||
return ErrorContext(filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary)
|
||||
|
||||
|
||||
def count_exception(engine_name: str, exc: Exception, secondary: bool = False) -> None:
|
||||
if not settings['general']['enable_metrics']:
|
||||
return
|
||||
framerecords = inspect.trace()
|
||||
try:
|
||||
exception_classname = get_exception_classname(exc)
|
||||
log_parameters = get_messages(exc, framerecords[-1][1])
|
||||
error_context = get_error_context(framerecords, exception_classname, None, log_parameters, secondary)
|
||||
add_error_context(engine_name, error_context)
|
||||
finally:
|
||||
del framerecords
|
||||
|
||||
|
||||
def count_error(
|
||||
engine_name: str, log_message: str, log_parameters: typing.Optional[typing.Tuple] = None, secondary: bool = False
|
||||
) -> None:
|
||||
if not settings['general']['enable_metrics']:
|
||||
return
|
||||
framerecords = list(reversed(inspect.stack()[1:]))
|
||||
try:
|
||||
error_context = get_error_context(framerecords, None, log_message, log_parameters or (), secondary)
|
||||
add_error_context(engine_name, error_context)
|
||||
finally:
|
||||
del framerecords
|
||||
166
searx/metrics/models.py
Normal file
166
searx/metrics/models.py
Normal file
@@ -0,0 +1,166 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import decimal
|
||||
import threading
|
||||
|
||||
from searx import logger
|
||||
|
||||
|
||||
__all__ = ["Histogram", "HistogramStorage", "CounterStorage"]
|
||||
|
||||
logger = logger.getChild('searx.metrics')
|
||||
|
||||
|
||||
class Histogram: # pylint: disable=missing-class-docstring
|
||||
|
||||
_slots__ = '_lock', '_size', '_sum', '_quartiles', '_count', '_width'
|
||||
|
||||
def __init__(self, width=10, size=200):
|
||||
self._lock = threading.Lock()
|
||||
self._width = width
|
||||
self._size = size
|
||||
self._quartiles = [0] * size
|
||||
self._count = 0
|
||||
self._sum = 0
|
||||
|
||||
def observe(self, value):
|
||||
q = int(value / self._width)
|
||||
if q < 0: # pylint: disable=consider-using-max-builtin
|
||||
# Value below zero is ignored
|
||||
q = 0
|
||||
if q >= self._size:
|
||||
# Value above the maximum is replaced by the maximum
|
||||
q = self._size - 1
|
||||
with self._lock:
|
||||
self._quartiles[q] += 1
|
||||
self._count += 1
|
||||
self._sum += value
|
||||
|
||||
@property
|
||||
def quartiles(self):
|
||||
return list(self._quartiles)
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
return self._count
|
||||
|
||||
@property
|
||||
def sum(self):
|
||||
return self._sum
|
||||
|
||||
@property
|
||||
def average(self):
|
||||
with self._lock:
|
||||
if self._count != 0:
|
||||
return self._sum / self._count
|
||||
return 0
|
||||
|
||||
@property
|
||||
def quartile_percentage(self):
|
||||
'''Quartile in percentage'''
|
||||
with self._lock:
|
||||
if self._count > 0:
|
||||
return [int(q * 100 / self._count) for q in self._quartiles]
|
||||
return self._quartiles
|
||||
|
||||
@property
|
||||
def quartile_percentage_map(self):
|
||||
result = {}
|
||||
# use Decimal to avoid rounding errors
|
||||
x = decimal.Decimal(0)
|
||||
width = decimal.Decimal(self._width)
|
||||
width_exponent = -width.as_tuple().exponent
|
||||
with self._lock:
|
||||
if self._count > 0:
|
||||
for y in self._quartiles:
|
||||
yp = int(y * 100 / self._count) # pylint: disable=invalid-name
|
||||
if yp != 0:
|
||||
result[round(float(x), width_exponent)] = yp
|
||||
x += width
|
||||
return result
|
||||
|
||||
def percentage(self, percentage):
|
||||
# use Decimal to avoid rounding errors
|
||||
x = decimal.Decimal(0)
|
||||
width = decimal.Decimal(self._width)
|
||||
stop_at_value = decimal.Decimal(self._count) / 100 * percentage
|
||||
sum_value = 0
|
||||
with self._lock:
|
||||
if self._count > 0:
|
||||
for y in self._quartiles:
|
||||
sum_value += y
|
||||
if sum_value >= stop_at_value:
|
||||
return x
|
||||
x += width
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
return "Histogram<avg: " + str(self.average) + ", count: " + str(self._count) + ">"
|
||||
|
||||
|
||||
class HistogramStorage: # pylint: disable=missing-class-docstring
|
||||
|
||||
__slots__ = 'measures', 'histogram_class'
|
||||
|
||||
def __init__(self, histogram_class=Histogram):
|
||||
self.clear()
|
||||
self.histogram_class = histogram_class
|
||||
|
||||
def clear(self):
|
||||
self.measures = {}
|
||||
|
||||
def configure(self, width, size, *args):
|
||||
measure = self.histogram_class(width, size)
|
||||
self.measures[args] = measure
|
||||
return measure
|
||||
|
||||
def get(self, *args):
|
||||
return self.measures.get(args, None)
|
||||
|
||||
def dump(self):
|
||||
logger.debug("Histograms:")
|
||||
ks = sorted(self.measures.keys(), key='/'.join) # pylint: disable=invalid-name
|
||||
for k in ks:
|
||||
logger.debug("- %-60s %s", '|'.join(k), self.measures[k])
|
||||
|
||||
|
||||
class CounterStorage: # pylint: disable=missing-class-docstring
|
||||
|
||||
__slots__ = 'counters', 'lock'
|
||||
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.clear()
|
||||
|
||||
def clear(self):
|
||||
with self.lock:
|
||||
self.counters = {}
|
||||
|
||||
def configure(self, *args):
|
||||
with self.lock:
|
||||
self.counters[args] = 0
|
||||
|
||||
def get(self, *args):
|
||||
return self.counters[args]
|
||||
|
||||
def add(self, value, *args):
|
||||
with self.lock:
|
||||
self.counters[args] += value
|
||||
|
||||
def dump(self):
|
||||
with self.lock:
|
||||
ks = sorted(self.counters.keys(), key='/'.join) # pylint: disable=invalid-name
|
||||
logger.debug("Counters:")
|
||||
for k in ks:
|
||||
logger.debug("- %-60s %s", '|'.join(k), self.counters[k])
|
||||
|
||||
|
||||
class VoidHistogram(Histogram): # pylint: disable=missing-class-docstring
|
||||
def observe(self, value):
|
||||
pass
|
||||
|
||||
|
||||
class VoidCounterStorage(CounterStorage): # pylint: disable=missing-class-docstring
|
||||
def add(self, value, *args):
|
||||
pass
|
||||
Reference in New Issue
Block a user