main commit
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-16 16:30:25 +09:00
parent 91c7e04474
commit 537e7b363f
1146 changed files with 45926 additions and 77196 deletions

View File

@@ -3,16 +3,16 @@ from __future__ import absolute_import
import abc
from kafka.metrics.stat import AbstractStat
from kafka.vendor.six import add_metaclass
@add_metaclass(abc.ABCMeta)
class AbstractCompoundStat(AbstractStat):
"""
A compound stat is a stat where a single measurement and associated
data structure feeds many metrics. This is the example for a
histogram which has many associated percentiles.
"""
__metaclass__ = abc.ABCMeta
def stats(self):
"""
Return list of NamedMeasurable
@@ -21,8 +21,6 @@ class AbstractCompoundStat(AbstractStat):
class NamedMeasurable(object):
__slots__ = ('_name', '_stat')
def __init__(self, metric_name, measurable_stat):
self._name = metric_name
self._stat = measurable_stat

View File

@@ -4,8 +4,6 @@ import time
class KafkaMetric(object):
__slots__ = ('_metric_name', '_measurable', '_config')
# NOTE java constructor takes a lock instance
def __init__(self, metric_name, measurable, config):
if not metric_name:
@@ -35,4 +33,4 @@ class KafkaMetric(object):
def value(self, time_ms=None):
if time_ms is None:
time_ms = time.time() * 1000
return self._measurable.measure(self._config, time_ms)
return self.measurable.measure(self.config, time_ms)

View File

@@ -4,10 +4,8 @@ import abc
from kafka.metrics.measurable import AbstractMeasurable
from kafka.metrics.stat import AbstractStat
from kafka.vendor.six import add_metaclass
@add_metaclass(abc.ABCMeta)
class AbstractMeasurableStat(AbstractStat, AbstractMeasurable):
"""
An AbstractMeasurableStat is an AbstractStat that is also
@@ -15,3 +13,4 @@ class AbstractMeasurableStat(AbstractStat, AbstractMeasurable):
This is the interface used for most of the simple statistics such
as Avg, Max, Count, etc.
"""
__metaclass__ = abc.ABCMeta

View File

@@ -5,8 +5,6 @@ import sys
class MetricConfig(object):
"""Configuration values for metrics"""
__slots__ = ('quota', '_samples', 'event_window', 'time_window_ms', 'tags')
def __init__(self, quota=None, samples=2, event_window=sys.maxsize,
time_window_ms=30 * 1000, tags=None):
"""

View File

@@ -38,7 +38,6 @@ class MetricName(object):
# as messages are sent we record the sizes
sensor.record(message_size)
"""
__slots__ = ('_name', '_group', '_description', '_tags', '_hash')
def __init__(self, name, group, description=None, tags=None):
"""
@@ -94,7 +93,7 @@ class MetricName(object):
return True
if other is None:
return False
return (isinstance(self, type(other)) and
return (type(self) == type(other) and
self.group == other.group and
self.name == other.name and
self.tags == other.tags)

View File

@@ -55,11 +55,10 @@ class Metrics(object):
self._reporters = reporters or []
for reporter in self._reporters:
reporter.init([])
self._closed = False
if enable_expiration:
def expire_loop():
while not self._closed:
while True:
# delay 30 seconds
time.sleep(30)
self.ExpireSensorTask.run(self)
@@ -260,4 +259,3 @@ class Metrics(object):
reporter.close()
self._metrics.clear()
self._closed = True

View File

@@ -2,15 +2,14 @@ from __future__ import absolute_import
import abc
from kafka.vendor.six import add_metaclass
@add_metaclass(abc.ABCMeta)
class AbstractMetricsReporter(object):
"""
An abstract class to allow things to listen as new metrics
are created so they can be reported.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def init(self, metrics):
"""

View File

@@ -3,8 +3,6 @@ from __future__ import absolute_import
class Quota(object):
"""An upper or lower bound for metrics"""
__slots__ = ('_bound', '_upper')
def __init__(self, bound, is_upper):
self._bound = bound
self._upper = is_upper
@@ -36,7 +34,7 @@ class Quota(object):
def __eq__(self, other):
if self is other:
return True
return (isinstance(self, type(other)) and
return (type(self) == type(other) and
self.bound == other.bound and
self.is_upper_bound() == other.is_upper_bound())

View File

@@ -2,15 +2,14 @@ from __future__ import absolute_import
import abc
from kafka.vendor.six import add_metaclass
@add_metaclass(abc.ABCMeta)
class AbstractStat(object):
"""
An AbstractStat is a quantity such as average, max, etc that is computed
off the stream of updates to a sensor
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def record(self, config, value, time_ms):
"""

View File

@@ -7,8 +7,6 @@ class Avg(AbstractSampledStat):
"""
An AbstractSampledStat that maintains a simple average over its samples.
"""
__slots__ = ('_initial_value', '_samples', '_current')
def __init__(self):
super(Avg, self).__init__(0.0)

View File

@@ -7,8 +7,6 @@ class Count(AbstractSampledStat):
"""
An AbstractSampledStat that maintains a simple count of what it has seen.
"""
__slots__ = ('_initial_value', '_samples', '_current')
def __init__(self):
super(Count, self).__init__(0.0)

View File

@@ -4,8 +4,6 @@ import math
class Histogram(object):
__slots__ = ('_hist', '_count', '_bin_scheme')
def __init__(self, bin_scheme):
self._hist = [0.0] * bin_scheme.bins
self._count = 0.0
@@ -42,8 +40,6 @@ class Histogram(object):
return '{%s}' % ','.join(values)
class ConstantBinScheme(object):
__slots__ = ('_min', '_max', '_bins', '_bucket_width')
def __init__(self, bins, min_val, max_val):
if bins < 2:
raise ValueError('Must have at least 2 bins.')
@@ -73,8 +69,6 @@ class Histogram(object):
return int(((x - self._min) / self._bucket_width) + 1)
class LinearBinScheme(object):
__slots__ = ('_bins', '_max', '_scale')
def __init__(self, num_bins, max_val):
self._bins = num_bins
self._max = max_val

View File

@@ -5,8 +5,6 @@ from kafka.metrics.stats.sampled_stat import AbstractSampledStat
class Max(AbstractSampledStat):
"""An AbstractSampledStat that gives the max over its samples."""
__slots__ = ('_initial_value', '_samples', '_current')
def __init__(self):
super(Max, self).__init__(float('-inf'))

View File

@@ -7,8 +7,6 @@ from kafka.metrics.stats.sampled_stat import AbstractSampledStat
class Min(AbstractSampledStat):
"""An AbstractSampledStat that gives the min over its samples."""
__slots__ = ('_initial_value', '_samples', '_current')
def __init__(self):
super(Min, self).__init__(float(sys.maxsize))

View File

@@ -2,8 +2,6 @@ from __future__ import absolute_import
class Percentile(object):
__slots__ = ('_metric_name', '_percentile')
def __init__(self, metric_name, percentile):
self._metric_name = metric_name
self._percentile = float(percentile)

View File

@@ -13,9 +13,6 @@ class BucketSizing(object):
class Percentiles(AbstractSampledStat, AbstractCompoundStat):
"""A compound stat that reports one or more percentiles"""
__slots__ = ('_initial_value', '_samples', '_current',
'_percentiles', '_buckets', '_bin_scheme')
def __init__(self, size_in_bytes, bucketing, max_val, min_val=0.0,
percentiles=None):
super(Percentiles, self).__init__(0.0)
@@ -30,7 +27,7 @@ class Percentiles(AbstractSampledStat, AbstractCompoundStat):
' to be 0.0.')
self.bin_scheme = Histogram.LinearBinScheme(self._buckets, max_val)
else:
raise ValueError('Unknown bucket type: %s' % (bucketing,))
ValueError('Unknown bucket type: %s' % (bucketing,))
def stats(self):
measurables = []

View File

@@ -37,8 +37,6 @@ class Rate(AbstractMeasurableStat):
occurrences (e.g. the count of values measured over the time interval)
or other such values.
"""
__slots__ = ('_stat', '_unit')
def __init__(self, time_unit=TimeUnit.SECONDS, sampled_stat=None):
self._stat = sampled_stat or SampledTotal()
self._unit = time_unit
@@ -107,7 +105,6 @@ class Rate(AbstractMeasurableStat):
class SampledTotal(AbstractSampledStat):
__slots__ = ('_initial_value', '_samples', '_current')
def __init__(self, initial_value=None):
if initial_value is not None:
raise ValueError('initial_value cannot be set on SampledTotal')

View File

@@ -3,10 +3,8 @@ from __future__ import absolute_import
import abc
from kafka.metrics.measurable_stat import AbstractMeasurableStat
from kafka.vendor.six import add_metaclass
@add_metaclass(abc.ABCMeta)
class AbstractSampledStat(AbstractMeasurableStat):
"""
An AbstractSampledStat records a single scalar value measured over
@@ -22,7 +20,7 @@ class AbstractSampledStat(AbstractMeasurableStat):
Subclasses of this class define different statistics measured
using this basic pattern.
"""
__slots__ = ('_initial_value', '_samples', '_current')
__metaclass__ = abc.ABCMeta
def __init__(self, initial_value):
self._initial_value = initial_value

View File

@@ -15,10 +15,6 @@ class Sensor(object):
the `record(double)` api and would maintain a set
of metrics about request sizes such as the average or max.
"""
__slots__ = ('_lock', '_registry', '_name', '_parents', '_metrics',
'_stats', '_config', '_inactive_sensor_expiration_time_ms',
'_last_record_time')
def __init__(self, registry, name, parents, config,
inactive_sensor_expiration_time_seconds):
if not name:

View File

@@ -5,8 +5,6 @@ from kafka.metrics.measurable_stat import AbstractMeasurableStat
class Total(AbstractMeasurableStat):
"""An un-windowed cumulative total maintained over all time."""
__slots__ = ('_total')
def __init__(self, value=0.0):
self._total = value