Add FakeStatsdClient to unit tests
Currently we simply mock calls in the FakeLogger for calls statsd calls, and there are also some helper methods for counting and collating metrics that were called. This Fakelogger is overloaded and doesn't simulate the real world. In real life we use a Statsdclient that is attached to the logger. We've been in the situation where unit tests pass but the statsd client stacktraces because we don't actually fake the statsdclient based off the real one and let it's use its internal logic. This patch creates a new FakeStatsdClient that is based off the real one, this can then be used (like the real statsd client) and attached to the FakeLogger. There is quite a bit of churn in tests to make this work, because we now have to looking into the fake statsd client to check the faked calls made. The FakeStatsdClient does everything the real one does, except overrides the _send method and socket creation so no actual statsd metrics are emitted. Change-Id: I9cdf395e85ab559c2b67b0617f898ad2d6a870d4
This commit is contained in:
parent
1f9937b245
commit
00bfc425ce
@ -1234,21 +1234,7 @@ class StatsdClient(object):
|
||||
self.logger = logger
|
||||
|
||||
# Determine if host is IPv4 or IPv6
|
||||
addr_info = None
|
||||
try:
|
||||
addr_info = socket.getaddrinfo(host, port, socket.AF_INET)
|
||||
self._sock_family = socket.AF_INET
|
||||
except socket.gaierror:
|
||||
try:
|
||||
addr_info = socket.getaddrinfo(host, port, socket.AF_INET6)
|
||||
self._sock_family = socket.AF_INET6
|
||||
except socket.gaierror:
|
||||
# Don't keep the server from starting from what could be a
|
||||
# transient DNS failure. Any hostname will get re-resolved as
|
||||
# necessary in the .sendto() calls.
|
||||
# However, we don't know if we're IPv4 or IPv6 in this case, so
|
||||
# we assume legacy IPv4.
|
||||
self._sock_family = socket.AF_INET
|
||||
addr_info, self._sock_family = self._determine_sock_family(host, port)
|
||||
|
||||
# NOTE: we use the original host value, not the DNS-resolved one
|
||||
# because if host is a hostname, we don't want to cache the DNS
|
||||
@ -1268,6 +1254,24 @@ class StatsdClient(object):
|
||||
else:
|
||||
self._target = (host, port)
|
||||
|
||||
def _determine_sock_family(self, host, port):
|
||||
addr_info = sock_family = None
|
||||
try:
|
||||
addr_info = socket.getaddrinfo(host, port, socket.AF_INET)
|
||||
sock_family = socket.AF_INET
|
||||
except socket.gaierror:
|
||||
try:
|
||||
addr_info = socket.getaddrinfo(host, port, socket.AF_INET6)
|
||||
sock_family = socket.AF_INET6
|
||||
except socket.gaierror:
|
||||
# Don't keep the server from starting from what could be a
|
||||
# transient DNS failure. Any hostname will get re-resolved as
|
||||
# necessary in the .sendto() calls.
|
||||
# However, we don't know if we're IPv4 or IPv6 in this case, so
|
||||
# we assume legacy IPv4.
|
||||
sock_family = socket.AF_INET
|
||||
return addr_info, sock_family
|
||||
|
||||
def _set_prefix(self, tail_prefix):
|
||||
"""
|
||||
Modifies the prefix that is added to metric names. The resulting prefix
|
||||
@ -1342,12 +1346,16 @@ class StatsdClient(object):
|
||||
def decrement(self, metric, sample_rate=None):
|
||||
return self.update_stats(metric, -1, sample_rate)
|
||||
|
||||
def timing(self, metric, timing_ms, sample_rate=None):
|
||||
def _timing(self, metric, timing_ms, sample_rate):
|
||||
# This method was added to disagregate timing metrics when testing
|
||||
return self._send(metric, timing_ms, 'ms', sample_rate)
|
||||
|
||||
def timing(self, metric, timing_ms, sample_rate=None):
|
||||
return self._timing(metric, timing_ms, sample_rate)
|
||||
|
||||
def timing_since(self, metric, orig_time, sample_rate=None):
|
||||
return self.timing(metric, (time.time() - orig_time) * 1000,
|
||||
sample_rate)
|
||||
return self._timing(metric, (time.time() - orig_time) * 1000,
|
||||
sample_rate)
|
||||
|
||||
def transfer_rate(self, metric, elapsed_time, byte_xfer, sample_rate=None):
|
||||
if byte_xfer:
|
||||
|
@ -29,6 +29,74 @@ class WARN_DEPRECATED(Exception):
|
||||
print(self.msg)
|
||||
|
||||
|
||||
class FakeStatsdClient(utils.StatsdClient):
|
||||
def __init__(self, host, port, base_prefix='', tail_prefix='',
|
||||
default_sample_rate=1, sample_rate_factor=1, logger=None):
|
||||
super(FakeStatsdClient, self).__init__(
|
||||
host, port, base_prefix, tail_prefix, default_sample_rate,
|
||||
sample_rate_factor, logger)
|
||||
self.clear()
|
||||
|
||||
# Capture then call parent pubic stat functions
|
||||
self.update_stats = self._capture("update_stats")
|
||||
self.increment = self._capture("increment")
|
||||
self.decrement = self._capture("decrement")
|
||||
self.timing = self._capture("timing")
|
||||
self.timing_since = self._capture("timing_since")
|
||||
self.transfer_rate = self._capture("transfer_rate")
|
||||
|
||||
def _capture(self, func_name):
|
||||
func = getattr(super(FakeStatsdClient, self), func_name)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
self.calls[func_name].append((args, kwargs))
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
def _determine_sock_family(self, host, port):
|
||||
return None, None
|
||||
|
||||
def _open_socket(self):
|
||||
return self
|
||||
|
||||
# sendto and close are mimicing the socket calls.
|
||||
def sendto(self, msg, target):
|
||||
self.sendto_calls.append((msg, target))
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def _send(self, *args, **kwargs):
|
||||
self.send_calls.append((args, kwargs))
|
||||
super(FakeStatsdClient, self)._send(*args, **kwargs)
|
||||
|
||||
def clear(self):
|
||||
self.send_calls = []
|
||||
self.calls = defaultdict(list)
|
||||
self.sendto_calls = []
|
||||
|
||||
def get_increments(self):
|
||||
return [call[0][0] for call in self.calls['increment']]
|
||||
|
||||
def get_increment_counts(self):
|
||||
# note: this method reports the sum of stats sent via the increment
|
||||
# method only; consider using get_stats_counts instead to get the sum
|
||||
# of stats sent via both the increment and update_stats methods
|
||||
counts = defaultdict(int)
|
||||
for metric in self.get_increments():
|
||||
counts[metric] += 1
|
||||
return counts
|
||||
|
||||
def get_update_stats(self):
|
||||
return [call[0][:2] for call in self.calls['update_stats']]
|
||||
|
||||
def get_stats_counts(self):
|
||||
counts = defaultdict(int)
|
||||
for metric, step in self.get_update_stats():
|
||||
counts[metric] += step
|
||||
return counts
|
||||
|
||||
|
||||
class CaptureLog(object):
|
||||
"""
|
||||
Captures log records passed to the ``handle`` method and provides accessor
|
||||
@ -79,7 +147,7 @@ class FakeLogger(logging.Logger, CaptureLog):
|
||||
self.level = logging.NOTSET
|
||||
if 'facility' in kwargs:
|
||||
self.facility = kwargs['facility']
|
||||
self.statsd_client = None
|
||||
self.statsd_client = FakeStatsdClient("host", 8125)
|
||||
self.thread_locals = None
|
||||
self.parent = None
|
||||
|
||||
@ -92,6 +160,13 @@ class FakeLogger(logging.Logger, CaptureLog):
|
||||
NOTICE: 'notice',
|
||||
}
|
||||
|
||||
def clear(self):
|
||||
self._clear()
|
||||
self.statsd_client.clear()
|
||||
|
||||
def close(self):
|
||||
self.clear()
|
||||
|
||||
def warn(self, *args, **kwargs):
|
||||
raise WARN_DEPRECATED("Deprecated Method warn use warning instead")
|
||||
|
||||
@ -116,53 +191,9 @@ class FakeLogger(logging.Logger, CaptureLog):
|
||||
self.log_dict[store_name].append((tuple(cargs), captured))
|
||||
super(FakeLogger, self)._log(level, msg, *args, **kwargs)
|
||||
|
||||
def _store_in(store_name):
|
||||
def stub_fn(self, *args, **kwargs):
|
||||
self.log_dict[store_name].append((args, kwargs))
|
||||
return stub_fn
|
||||
|
||||
# mock out the StatsD logging methods:
|
||||
update_stats = _store_in('update_stats')
|
||||
increment = _store_in('increment')
|
||||
decrement = _store_in('decrement')
|
||||
timing = _store_in('timing')
|
||||
timing_since = _store_in('timing_since')
|
||||
transfer_rate = _store_in('transfer_rate')
|
||||
set_statsd_prefix = _store_in('set_statsd_prefix')
|
||||
|
||||
def get_increments(self):
|
||||
return [call[0][0] for call in self.log_dict['increment']]
|
||||
|
||||
def get_increment_counts(self):
|
||||
# note: this method reports the sum of stats sent via the increment
|
||||
# method only; consider using get_stats_counts instead to get the sum
|
||||
# of stats sent via both the increment and update_stats methods
|
||||
counts = {}
|
||||
for metric in self.get_increments():
|
||||
if metric not in counts:
|
||||
counts[metric] = 0
|
||||
counts[metric] += 1
|
||||
return counts
|
||||
|
||||
def get_update_stats(self):
|
||||
return [call[0] for call in self.log_dict['update_stats']]
|
||||
|
||||
def get_stats_counts(self):
|
||||
# return dict key->count for stats, aggregating calls to both the
|
||||
# increment and update methods
|
||||
counts = self.get_increment_counts()
|
||||
for metric, step in self.get_update_stats():
|
||||
if metric not in counts:
|
||||
counts[metric] = 0
|
||||
counts[metric] += step
|
||||
return counts
|
||||
|
||||
def setFormatter(self, obj):
|
||||
self.formatter = obj
|
||||
|
||||
def close(self):
|
||||
self._clear()
|
||||
|
||||
def set_name(self, name):
|
||||
# don't touch _handlers
|
||||
self._name = name
|
||||
@ -214,20 +245,6 @@ class DebugLogger(FakeLogger):
|
||||
|
||||
class DebugLogAdapter(utils.LogAdapter):
|
||||
|
||||
def _send_to_logger(name):
|
||||
def stub_fn(self, *args, **kwargs):
|
||||
return getattr(self.logger, name)(*args, **kwargs)
|
||||
return stub_fn
|
||||
|
||||
# delegate to FakeLogger's mocks
|
||||
update_stats = _send_to_logger('update_stats')
|
||||
increment = _send_to_logger('increment')
|
||||
decrement = _send_to_logger('decrement')
|
||||
timing = _send_to_logger('timing')
|
||||
timing_since = _send_to_logger('timing_since')
|
||||
transfer_rate = _send_to_logger('transfer_rate')
|
||||
set_statsd_prefix = _send_to_logger('set_statsd_prefix')
|
||||
|
||||
def __getattribute__(self, name):
|
||||
try:
|
||||
return object.__getattribute__(self, name)
|
||||
|
@ -130,8 +130,9 @@ class TestAuditorRealBroker(unittest.TestCase):
|
||||
'The total container_count for the account a (%d) does not match '
|
||||
'the sum of container_count across policies (%d)'
|
||||
% (num_containers, num_containers - 1), error_message)
|
||||
self.assertEqual(test_auditor.logger.get_increment_counts(),
|
||||
{'failures': 1})
|
||||
self.assertEqual(
|
||||
test_auditor.logger.statsd_client.get_increment_counts(),
|
||||
{'failures': 1})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -469,7 +469,8 @@ class TestReaper(unittest.TestCase):
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object):
|
||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 1)
|
||||
self.assertEqual(
|
||||
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 1)
|
||||
self.assertEqual(r.stats_containers_deleted, 1)
|
||||
|
||||
def test_reap_container_partial_fail(self):
|
||||
@ -488,7 +489,8 @@ class TestReaper(unittest.TestCase):
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object):
|
||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 4)
|
||||
self.assertEqual(
|
||||
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 4)
|
||||
self.assertEqual(r.stats_containers_possibly_remaining, 1)
|
||||
|
||||
def test_reap_container_full_fail(self):
|
||||
@ -507,7 +509,8 @@ class TestReaper(unittest.TestCase):
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object):
|
||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 5)
|
||||
self.assertEqual(
|
||||
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 5)
|
||||
self.assertEqual(r.stats_containers_remaining, 1)
|
||||
|
||||
def test_reap_container_get_object_timeout(self):
|
||||
|
@ -2649,7 +2649,7 @@ class TestAccountController(unittest.TestCase):
|
||||
with mock.patch(
|
||||
'time.time',
|
||||
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
|
||||
10002.0])):
|
||||
10002.0, 10002.0])):
|
||||
with mock.patch(
|
||||
'os.getpid', mock.MagicMock(return_value=1234)):
|
||||
req.get_response(self.controller)
|
||||
|
@ -243,8 +243,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_header_auth': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_header_auth': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_bad_method(self):
|
||||
req = Request.blank('/',
|
||||
@ -253,8 +254,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
|
||||
self.assertEqual({'405.MethodNotAllowed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'405.MethodNotAllowed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_bad_method_but_method_exists_in_controller(self):
|
||||
req = Request.blank(
|
||||
@ -264,8 +266,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
|
||||
self.assertEqual({'405.MethodNotAllowed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'405.MethodNotAllowed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_path_info_encode(self):
|
||||
bucket_name = 'b%75cket'
|
||||
@ -397,8 +400,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.expired': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.expired': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls(self):
|
||||
# Set expire to last 32b timestamp value
|
||||
@ -445,8 +449,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_expires': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_expires': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_no_sign(self):
|
||||
expire = '2147483647' # 19 Jan 2038 03:14:07
|
||||
@ -458,8 +463,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_query_auth': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_query_auth': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_no_access(self):
|
||||
expire = '2147483647' # 19 Jan 2038 03:14:07
|
||||
@ -470,8 +476,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_query_auth': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_query_auth': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_v4(self):
|
||||
req = Request.blank(
|
||||
@ -518,8 +525,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'AuthorizationQueryParametersError')
|
||||
self.assertEqual(self._get_error_message(body), message)
|
||||
self.assertIn(extra, body)
|
||||
self.assertEqual({'400.AuthorizationQueryParametersError': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.AuthorizationQueryParametersError': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
dt = self.get_v4_amz_date_header().split('T', 1)[0]
|
||||
test('test:tester/not-a-date/us-east-1/s3/aws4_request',
|
||||
@ -548,8 +556,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_date': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_date': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_v4_invalid_algorithm(self):
|
||||
req = Request.blank(
|
||||
@ -565,8 +574,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
||||
self.assertEqual({'400.InvalidArgument': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidArgument': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_v4_missing_signed_headers(self):
|
||||
req = Request.blank(
|
||||
@ -582,8 +592,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body),
|
||||
'AuthorizationHeaderMalformed')
|
||||
self.assertEqual({'400.AuthorizationHeaderMalformed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.AuthorizationHeaderMalformed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_v4_invalid_credentials(self):
|
||||
req = Request.blank('/bucket/object'
|
||||
@ -598,8 +609,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_credential': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_credential': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signed_urls_v4_missing_signature(self):
|
||||
req = Request.blank(
|
||||
@ -614,8 +626,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
req.content_type = 'text/plain'
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_query_auth': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_query_auth': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_bucket_virtual_hosted_style(self):
|
||||
req = Request.blank('/',
|
||||
@ -712,8 +725,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidURI')
|
||||
self.assertEqual({'400.InvalidURI': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidURI': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_object_create_bad_md5_unreadable(self):
|
||||
req = Request.blank('/bucket/object',
|
||||
@ -723,8 +737,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||
self.assertEqual({'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_object_create_bad_md5_too_short(self):
|
||||
too_short_digest = md5(b'hey', usedforsecurity=False).digest()[:-1]
|
||||
@ -739,8 +754,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||
self.assertEqual({'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_object_create_bad_md5_bad_padding(self):
|
||||
too_short_digest = md5(b'hey', usedforsecurity=False).digest()
|
||||
@ -755,8 +771,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||
self.assertEqual({'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_object_create_bad_md5_too_long(self):
|
||||
too_long_digest = md5(
|
||||
@ -772,8 +789,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||
self.assertEqual({'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidDigest': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_invalid_metadata_directive(self):
|
||||
req = Request.blank('/',
|
||||
@ -784,8 +802,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
||||
self.assertEqual({'400.InvalidArgument': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidArgument': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_invalid_storage_class(self):
|
||||
req = Request.blank('/',
|
||||
@ -795,8 +814,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidStorageClass')
|
||||
self.assertEqual({'400.InvalidStorageClass': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidStorageClass': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_invalid_ssc(self):
|
||||
req = Request.blank('/',
|
||||
@ -806,8 +826,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
||||
self.assertEqual({'400.InvalidArgument': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidArgument': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def _test_unsupported_header(self, header, value=None):
|
||||
if value is None:
|
||||
@ -820,8 +841,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.s3api.logger.logger.clear()
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||
self.assertEqual({'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_mfa(self):
|
||||
self._test_unsupported_header('x-amz-mfa')
|
||||
@ -881,8 +903,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
headers={'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||
self.assertEqual({'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_notification(self):
|
||||
self._test_unsupported_resource('notification')
|
||||
@ -909,8 +932,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(status.split()[0], '200')
|
||||
self.assertEqual({},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
req = Request.blank('/bucket?tagging',
|
||||
environ={'REQUEST_METHOD': 'PUT'},
|
||||
@ -919,8 +943,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.s3api.logger.logger.clear()
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||
self.assertEqual({'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
req = Request.blank('/bucket?tagging',
|
||||
environ={'REQUEST_METHOD': 'DELETE'},
|
||||
@ -929,8 +954,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.s3api.logger.logger.clear()
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||
self.assertEqual({'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'501.NotImplemented': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_restore(self):
|
||||
self._test_unsupported_resource('restore')
|
||||
@ -945,8 +971,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.assertEqual(elem.find('./Code').text, 'MethodNotAllowed')
|
||||
self.assertEqual(elem.find('./Method').text, 'POST')
|
||||
self.assertEqual(elem.find('./ResourceType').text, 'ACL')
|
||||
self.assertEqual({'405.MethodNotAllowed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'405.MethodNotAllowed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
@mock.patch.object(registry, '_sensitive_headers', set())
|
||||
@mock.patch.object(registry, '_sensitive_params', set())
|
||||
@ -1079,8 +1106,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(status.split()[0], '403')
|
||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||
self.assertEqual({'403.AccessDenied.invalid_date': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_date': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signature_v4_no_payload(self):
|
||||
environ = {
|
||||
@ -1100,8 +1128,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.assertEqual(
|
||||
self._get_error_message(body),
|
||||
'Missing required header for this request: x-amz-content-sha256')
|
||||
self.assertEqual({'400.InvalidRequest': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.InvalidRequest': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_signature_v4_bad_authorization_string(self):
|
||||
def test(auth_str, error, msg, metric, extra=b''):
|
||||
@ -1119,8 +1148,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.assertEqual(self._get_error_code(body), error)
|
||||
self.assertEqual(self._get_error_message(body), msg)
|
||||
self.assertIn(extra, body)
|
||||
self.assertEqual({metric: 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{metric: 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
auth_str = ('AWS4-HMAC-SHA256 '
|
||||
'SignedHeaders=host;x-amz-date,'
|
||||
@ -1382,8 +1412,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
# FIXME: should this failed as 400 or pass via query auth?
|
||||
# for now, 403 forbidden for safety
|
||||
self.assertEqual(status.split()[0], '403', body)
|
||||
self.assertEqual({'403.AccessDenied.invalid_expires': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_expires': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
# But if we are missing Signature in query param
|
||||
req = Request.blank(
|
||||
@ -1397,8 +1428,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.s3api.logger.logger.clear()
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(status.split()[0], '403', body)
|
||||
self.assertEqual({'403.AccessDenied.invalid_expires': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_expires': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_s3api_with_only_s3_token(self):
|
||||
self.swift = FakeSwift()
|
||||
@ -1510,9 +1542,10 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.assertEqual(1, mock_req.call_count)
|
||||
# it never even tries to contact keystone
|
||||
self.assertEqual(0, mock_fetch.call_count)
|
||||
statsd_client = self.s3api.logger.logger.statsd_client
|
||||
self.assertEqual(
|
||||
{'403.SignatureDoesNotMatch': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
statsd_client.get_increment_counts())
|
||||
|
||||
def test_s3api_with_only_s3_token_in_s3acl(self):
|
||||
self.swift = FakeSwift()
|
||||
@ -1564,30 +1597,35 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
|
||||
status, _, body = do_test(800)
|
||||
self.assertEqual('200 OK', status)
|
||||
self.assertFalse(self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertFalse(
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
status, _, body = do_test(-800)
|
||||
self.assertEqual('200 OK', status)
|
||||
self.assertFalse(self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertFalse(
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
status, _, body = do_test(1000)
|
||||
self.assertEqual('403 Forbidden', status)
|
||||
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
||||
self.assertEqual({'403.RequestTimeTooSkewed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.RequestTimeTooSkewed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
status, _, body = do_test(-1000)
|
||||
self.assertEqual('403 Forbidden', status)
|
||||
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
||||
self.assertEqual({'403.RequestTimeTooSkewed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.RequestTimeTooSkewed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
self.s3api.conf.allowable_clock_skew = 100
|
||||
status, _, body = do_test(800)
|
||||
self.assertEqual('403 Forbidden', status)
|
||||
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
||||
self.assertEqual({'403.RequestTimeTooSkewed': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.RequestTimeTooSkewed': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_s3api_error_metric(self):
|
||||
class KaboomResponse(ErrorResponse):
|
||||
@ -1607,17 +1645,20 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
self.call_s3api(req)
|
||||
|
||||
do_test(ErrorResponse(status=403, msg='not good', reason='bad'))
|
||||
self.assertEqual({'403.ErrorResponse.bad': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.ErrorResponse.bad': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
do_test(AccessDenied(msg='no entry', reason='invalid_date'))
|
||||
self.assertEqual({'403.AccessDenied.invalid_date': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'403.AccessDenied.invalid_date': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
# check whitespace replaced with underscore
|
||||
do_test(KaboomResponse(status=400, msg='boom', reason='boom boom'))
|
||||
self.assertEqual({'400.ka_boom.boom_boom': 1},
|
||||
self.s3api.logger.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{'400.ka_boom.boom_boom': 1},
|
||||
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -107,7 +107,8 @@ class TestBackendRatelimitMiddleware(unittest.TestCase):
|
||||
fake_time[0] += 0.01
|
||||
self.assertEqual(
|
||||
ratelimited,
|
||||
logger.get_increment_counts().get('backend.ratelimit', 0))
|
||||
logger.statsd_client.get_increment_counts().get(
|
||||
'backend.ratelimit', 0))
|
||||
return success
|
||||
|
||||
def test_ratelimited(self):
|
||||
@ -163,7 +164,8 @@ class TestBackendRatelimitMiddleware(unittest.TestCase):
|
||||
resp = req.get_response(rl)
|
||||
self.assertEqual(200, resp.status_int)
|
||||
self.assertEqual(
|
||||
0, logger.get_increment_counts().get('backend.ratelimit', 0))
|
||||
0, logger.statsd_client.get_increment_counts().get(
|
||||
'backend.ratelimit', 0))
|
||||
|
||||
do_test('/recon/version')
|
||||
do_test('/healthcheck')
|
||||
|
@ -1553,7 +1553,7 @@ class TestFormPost(unittest.TestCase):
|
||||
do_test(digest, False)
|
||||
|
||||
# NB: one increment per *upload*, not client request
|
||||
self.assertEqual(self.logger.get_increment_counts(), {
|
||||
self.assertEqual(self.logger.statsd_client.get_increment_counts(), {
|
||||
'formpost.digests.sha1': 4,
|
||||
'formpost.digests.sha256': 4,
|
||||
'formpost.digests.sha512': 4,
|
||||
|
@ -123,7 +123,7 @@ class TestProxyLogging(unittest.TestCase):
|
||||
return info_calls[0][0][0].split(' ')
|
||||
|
||||
def assertTiming(self, exp_metric, app, exp_timing=None):
|
||||
timing_calls = app.access_logger.log_dict['timing']
|
||||
timing_calls = app.access_logger.statsd_client.calls['timing']
|
||||
found = False
|
||||
for timing_call in timing_calls:
|
||||
self.assertEqual({}, timing_call[1])
|
||||
@ -138,12 +138,13 @@ class TestProxyLogging(unittest.TestCase):
|
||||
exp_metric, timing_calls))
|
||||
|
||||
def assertNotTiming(self, not_exp_metric, app):
|
||||
timing_calls = app.access_logger.log_dict['timing']
|
||||
timing_calls = app.access_logger.statsd_client.calls['timing']
|
||||
for timing_call in timing_calls:
|
||||
self.assertNotEqual(not_exp_metric, timing_call[0][0])
|
||||
|
||||
def assertUpdateStats(self, exp_metrics_and_values, app):
|
||||
update_stats_calls = sorted(app.access_logger.log_dict['update_stats'])
|
||||
update_stats_calls = sorted(
|
||||
app.access_logger.statsd_client.calls['update_stats'])
|
||||
got_metrics_values_and_kwargs = [(usc[0][0], usc[0][1], usc[1])
|
||||
for usc in update_stats_calls]
|
||||
exp_metrics_values_and_kwargs = [(emv[0], emv[1], {})
|
||||
@ -239,7 +240,7 @@ class TestProxyLogging(unittest.TestCase):
|
||||
self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
|
||||
4 + 7),
|
||||
('object.policy.0.GET.321.xfer',
|
||||
4 + 7)],
|
||||
4 + 7)],
|
||||
app)
|
||||
else:
|
||||
self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
|
||||
@ -335,8 +336,8 @@ class TestProxyLogging(unittest.TestCase):
|
||||
self.assertNotTiming(
|
||||
'%s.PUT.314.first-byte.timing' % exp_type, app)
|
||||
# No results returned for the non-existent policy
|
||||
self.assertUpdateStats([('object.PUT.314.xfer', 6 + 8)],
|
||||
app)
|
||||
self.assertUpdateStats(
|
||||
[('object.PUT.314.xfer', 6 + 8)], app)
|
||||
|
||||
def test_log_request_stat_method_filtering_default(self):
|
||||
method_map = {
|
||||
@ -506,7 +507,8 @@ class TestProxyLogging(unittest.TestCase):
|
||||
self.assertEqual(log_parts[6], '200')
|
||||
self.assertEqual(resp_body, b'somechunksof data')
|
||||
self.assertEqual(log_parts[11], str(len(resp_body)))
|
||||
self.assertUpdateStats([('SOS.GET.200.xfer', len(resp_body))], app)
|
||||
self.assertUpdateStats([('SOS.GET.200.xfer', len(resp_body))],
|
||||
app)
|
||||
|
||||
def test_log_headers(self):
|
||||
for conf_key in ['access_log_headers', 'log_headers']:
|
||||
|
@ -173,7 +173,7 @@ class TestTempURL(unittest.TestCase):
|
||||
key, hmac_body, hashlib.sha512).digest())
|
||||
self.assert_valid_sig(expires, path, [key], b'sha512:' + sig)
|
||||
|
||||
self.assertEqual(self.logger.get_increment_counts(), {
|
||||
self.assertEqual(self.logger.statsd_client.get_increment_counts(), {
|
||||
'tempurl.digests.sha1': 1,
|
||||
'tempurl.digests.sha256': 2,
|
||||
'tempurl.digests.sha512': 1
|
||||
|
@ -1155,8 +1155,9 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertFalse(os.path.exists(temp_file.name))
|
||||
self.assertTrue(os.path.exists(temp_hash_dir2))
|
||||
self.assertTrue(os.path.exists(temp_file2.name))
|
||||
self.assertEqual([(('removes.some_device',), {})],
|
||||
replicator.logger.log_dict['increment'])
|
||||
self.assertEqual(
|
||||
[(('removes.some_device',), {})],
|
||||
replicator.logger.statsd_client.calls['increment'])
|
||||
self.assertEqual(1, replicator.stats['remove'])
|
||||
|
||||
temp_file2.db_file = temp_file2.name
|
||||
@ -1169,8 +1170,9 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertFalse(os.path.exists(temp_file.name))
|
||||
self.assertFalse(os.path.exists(temp_hash_dir2))
|
||||
self.assertFalse(os.path.exists(temp_file2.name))
|
||||
self.assertEqual([(('removes.some_device',), {})] * 2,
|
||||
replicator.logger.log_dict['increment'])
|
||||
self.assertEqual(
|
||||
[(('removes.some_device',), {})] * 2,
|
||||
replicator.logger.statsd_client.calls['increment'])
|
||||
self.assertEqual(2, replicator.stats['remove'])
|
||||
finally:
|
||||
rmtree(temp_dir)
|
||||
|
@ -1134,28 +1134,28 @@ class TestMemcached(unittest.TestCase):
|
||||
with patch('time.time',) as mock_time:
|
||||
mock_time.return_value = 1000.99
|
||||
memcache_client.set('some_key', [1, 2, 3])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.set.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 1000.99)
|
||||
mock_time.return_value = 2000.99
|
||||
self.assertEqual(memcache_client.get('some_key'), [1, 2, 3])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.get.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 2000.99)
|
||||
mock_time.return_value = 3000.99
|
||||
self.assertEqual(memcache_client.decr('decr_key', delta=5), 0)
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.decr.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 3000.99)
|
||||
mock_time.return_value = 4000.99
|
||||
self.assertEqual(memcache_client.incr('decr_key', delta=5), 5)
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.incr.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4000.99)
|
||||
mock_time.return_value = 5000.99
|
||||
memcache_client.set_multi(
|
||||
{'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key')
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.set_multi.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 5000.99)
|
||||
mock_time.return_value = 6000.99
|
||||
@ -1165,12 +1165,12 @@ class TestMemcached(unittest.TestCase):
|
||||
'multi_key'),
|
||||
[[4, 5, 6],
|
||||
[1, 2, 3]])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.get_multi.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 6000.99)
|
||||
mock_time.return_value = 7000.99
|
||||
memcache_client.delete('some_key')
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.delete.timing', last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 7000.99)
|
||||
|
||||
@ -1190,8 +1190,10 @@ class TestMemcached(unittest.TestCase):
|
||||
mock_time.return_value = 4000.99
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.incr('incr_key', delta=5)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.incr.errors.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4000.99)
|
||||
@ -1218,8 +1220,10 @@ class TestMemcached(unittest.TestCase):
|
||||
memcache_client.set(
|
||||
'set_key', [1, 2, 3],
|
||||
raise_on_error=True)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.set.errors.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4000.99)
|
||||
@ -1244,8 +1248,10 @@ class TestMemcached(unittest.TestCase):
|
||||
mock_time.return_value = 4000.99
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.get('get_key', raise_on_error=True)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.get.errors.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4000.99)
|
||||
@ -1270,8 +1276,10 @@ class TestMemcached(unittest.TestCase):
|
||||
mock_time.return_value = 4000.99
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.get('get_key', raise_on_error=True)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.get.conn_err.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4000.99)
|
||||
@ -1297,8 +1305,10 @@ class TestMemcached(unittest.TestCase):
|
||||
mock_time.side_effect = itertools.count(4000.99, 1.0)
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.incr('nvratelimit/v2/wf/124593', delta=5)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.incr.timeout.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4002.99)
|
||||
@ -1330,8 +1340,10 @@ class TestMemcached(unittest.TestCase):
|
||||
memcache_client.set(
|
||||
'shard-updating-v2/acc/container', [1, 2, 3],
|
||||
raise_on_error=True)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.set.timeout.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4002.99)
|
||||
@ -1362,8 +1374,10 @@ class TestMemcached(unittest.TestCase):
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.get(
|
||||
'shard-updating-v2/acc/container', raise_on_error=True)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(
|
||||
self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = \
|
||||
self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.get.timeout.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4002.99)
|
||||
@ -1407,8 +1421,8 @@ class TestMemcached(unittest.TestCase):
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.incr(
|
||||
'shard-updating-v2/acc/container', time=1.23)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.incr.conn_err.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4002.99)
|
||||
@ -1440,8 +1454,8 @@ class TestMemcached(unittest.TestCase):
|
||||
with self.assertRaises(MemcacheConnectionError):
|
||||
memcache_client.incr(
|
||||
'shard-updating-v2/acc/container', time=1.23)
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
||||
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||
self.assertEqual('memcached.incr.errors.timing',
|
||||
last_stats[0][0])
|
||||
self.assertEqual(last_stats[0][1], 4002.99)
|
||||
@ -1451,7 +1465,7 @@ class TestMemcached(unittest.TestCase):
|
||||
self.assertIn("with key_prefix shard-updating-v2/acc, method incr, "
|
||||
"time_spent 1.0" % resp.split(), error_logs[0])
|
||||
self.assertIn('1.2.3.4:11211', memcache_client._errors)
|
||||
self.assertEqual([4004.99], memcache_client._errors['1.2.3.4:11211'])
|
||||
self.assertEqual([4005.99], memcache_client._errors['1.2.3.4:11211'])
|
||||
|
||||
|
||||
class ExcConfigParser(object):
|
||||
|
@ -884,9 +884,11 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
# push to remote, and third node was missing (also maybe reconciler)
|
||||
self.assertTrue(2 < daemon.stats['rsync'] <= 3, daemon.stats['rsync'])
|
||||
self.assertEqual(
|
||||
1, self.logger.get_stats_counts().get('reconciler_db_created'))
|
||||
1, self.logger.statsd_client.get_stats_counts().get(
|
||||
'reconciler_db_created'))
|
||||
self.assertFalse(
|
||||
self.logger.get_stats_counts().get('reconciler_db_exists'))
|
||||
self.logger.statsd_client.get_stats_counts().get(
|
||||
'reconciler_db_exists'))
|
||||
|
||||
# grab the rsynced instance of remote_broker
|
||||
remote_broker = self._get_broker('a', 'c', node_index=1)
|
||||
@ -909,9 +911,11 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
self.logger.clear()
|
||||
reconciler = daemon.get_reconciler_broker(misplaced[0]['created_at'])
|
||||
self.assertFalse(
|
||||
self.logger.get_stats_counts().get('reconciler_db_created'))
|
||||
self.logger.statsd_client.get_stats_counts().get(
|
||||
'reconciler_db_created'))
|
||||
self.assertEqual(
|
||||
1, self.logger.get_stats_counts().get('reconciler_db_exists'))
|
||||
1, self.logger.statsd_client.get_stats_counts().get(
|
||||
'reconciler_db_exists'))
|
||||
# but it may not be on the same node as us anymore though...
|
||||
reconciler = self._get_broker(reconciler.account,
|
||||
reconciler.container, node_index=0)
|
||||
@ -1482,7 +1486,7 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
self.assertEqual(0, daemon.stats['rsync'])
|
||||
self.assertEqual(1, daemon.stats['diff'])
|
||||
self.assertEqual({'diffs': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
|
||||
# update one shard range
|
||||
shard_ranges[1].update_meta(50, 50)
|
||||
@ -1494,7 +1498,7 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
self.assertEqual(0, daemon.stats['rsync'])
|
||||
self.assertEqual(0, daemon.stats['diff'])
|
||||
self.assertEqual({'no_changes': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
|
||||
# now enable local broker for sharding
|
||||
own_sr = broker.enable_sharding(Timestamp.now())
|
||||
@ -1509,7 +1513,7 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
self.assertEqual(0, daemon.stats['no_change'])
|
||||
self.assertEqual(0, daemon.stats['rsync'])
|
||||
self.assertEqual(0, daemon.stats['diff'])
|
||||
self.assertFalse(daemon.logger.get_increments())
|
||||
self.assertFalse(daemon.logger.statsd_client.get_increments())
|
||||
|
||||
daemon = check_replicate(broker_ranges, broker, remote_broker)
|
||||
check_stats(daemon)
|
||||
@ -1614,7 +1618,8 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
self.assertIn('Bad response 500', error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
self.assertEqual(1, daemon.stats['diff'])
|
||||
self.assertEqual(1, daemon.logger.get_increment_counts()['diffs'])
|
||||
self.assertEqual(
|
||||
1, daemon.logger.statsd_client.get_increment_counts()['diffs'])
|
||||
|
||||
def test_sync_shard_ranges_timeout_in_fetch(self):
|
||||
# verify that replication is not considered successful if
|
||||
@ -1652,9 +1657,11 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
||||
self.assertIn('ERROR syncing /', error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
self.assertEqual(0, daemon.stats['diff'])
|
||||
self.assertNotIn('diffs', daemon.logger.get_increment_counts())
|
||||
self.assertNotIn(
|
||||
'diffs', daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertEqual(1, daemon.stats['failure'])
|
||||
self.assertEqual(1, daemon.logger.get_increment_counts()['failures'])
|
||||
self.assertEqual(
|
||||
1, daemon.logger.statsd_client.get_increment_counts()['failures'])
|
||||
|
||||
def test_sync_shard_ranges_none_to_sync(self):
|
||||
# verify that merge_shard_ranges is not sent if there are no shard
|
||||
|
@ -5530,7 +5530,7 @@ class TestContainerController(unittest.TestCase):
|
||||
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
|
||||
with mock.patch('time.time',
|
||||
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
|
||||
10002.0])), \
|
||||
10002.0, 10002.0])), \
|
||||
mock.patch('os.getpid', mock.MagicMock(return_value=1234)):
|
||||
req.get_response(self.controller)
|
||||
info_lines = self.controller.logger.get_lines_for_level('info')
|
||||
|
@ -557,7 +557,7 @@ class TestSharder(BaseTestSharder):
|
||||
'failure': 2,
|
||||
'completed': 1}
|
||||
self._assert_stats(expected, sharder, 'visited')
|
||||
counts = sharder.logger.get_stats_counts()
|
||||
counts = sharder.logger.statsd_client.get_stats_counts()
|
||||
self.assertEqual(2, counts.get('visited_success'))
|
||||
self.assertEqual(1, counts.get('visited_failure'))
|
||||
self.assertIsNone(counts.get('visited_completed'))
|
||||
@ -572,7 +572,7 @@ class TestSharder(BaseTestSharder):
|
||||
self._assert_stats({'found': 4}, sharder, 'scanned')
|
||||
self._assert_stats({'placed': 456}, sharder, 'misplaced')
|
||||
self.assertEqual({'misplaced_placed': 456},
|
||||
sharder.logger.get_stats_counts())
|
||||
sharder.logger.statsd_client.get_stats_counts())
|
||||
|
||||
def test_run_forever(self):
|
||||
conf = {'recon_cache_path': self.tempdir,
|
||||
@ -1569,9 +1569,11 @@ class TestSharder(BaseTestSharder):
|
||||
self.assertIsInstance(stats['max_time'], float)
|
||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_exists'))
|
||||
self.assertEqual(SHARDING, broker.get_db_state())
|
||||
sharder._replicate_object.assert_called_once_with(
|
||||
0, expected_shard_dbs[0], 0)
|
||||
@ -1634,9 +1636,11 @@ class TestSharder(BaseTestSharder):
|
||||
'db_created': 1, 'db_exists': 0}
|
||||
self._assert_stats(expected, sharder, 'cleaved')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_exists'))
|
||||
|
||||
# cleaving state is unchanged
|
||||
updated_shard_ranges = broker.get_shard_ranges()
|
||||
@ -1673,9 +1677,11 @@ class TestSharder(BaseTestSharder):
|
||||
self.assertIsInstance(stats['max_time'], float)
|
||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_created'))
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_exists'))
|
||||
|
||||
self.assertEqual(SHARDING, broker.get_db_state())
|
||||
sharder._replicate_object.assert_has_calls(
|
||||
@ -1739,9 +1745,11 @@ class TestSharder(BaseTestSharder):
|
||||
self.assertIsInstance(stats['max_time'], float)
|
||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_exists'))
|
||||
|
||||
self.assertEqual(SHARDING, broker.get_db_state())
|
||||
sharder._replicate_object.assert_called_once_with(
|
||||
@ -1812,9 +1820,11 @@ class TestSharder(BaseTestSharder):
|
||||
self.assertIsInstance(stats['max_time'], float)
|
||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'cleaved_db_exists'))
|
||||
|
||||
sharder._replicate_object.assert_called_once_with(
|
||||
0, expected_shard_dbs[4], 0)
|
||||
@ -2466,19 +2476,22 @@ class TestSharder(BaseTestSharder):
|
||||
self.assertEqual('', context.cursor)
|
||||
self.assertEqual(10, context.cleave_to_row)
|
||||
self.assertEqual(12, context.max_row) # note that max row increased
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
self.assertEqual('sharder.sharding.move_misplaced',
|
||||
self.logger.log_dict['timing_since'][-3][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-3][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.set_state',
|
||||
self.logger.log_dict['timing_since'][-2][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-2][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.cleave',
|
||||
self.logger.log_dict['timing_since'][-1][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-1][0][1], 0)
|
||||
for call in self.logger.log_dict['timing_since']:
|
||||
self.assertNotIsInstance(call[0][1], Timestamp)
|
||||
self.assertIsInstance(call[0][1], float)
|
||||
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||
self.assertEqual(
|
||||
'sharder.sharding.move_misplaced',
|
||||
self.logger.statsd_client.calls['timing_since'][-3][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-3][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.set_state',
|
||||
self.logger.statsd_client.calls['timing_since'][-2][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-2][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.cleave',
|
||||
self.logger.statsd_client.calls['timing_since'][-1][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-1][0][1], 0)
|
||||
lines = sharder.logger.get_lines_for_level('info')
|
||||
self.assertEqual(
|
||||
["Kick off container cleaving, own shard range in state "
|
||||
@ -2524,22 +2537,27 @@ class TestSharder(BaseTestSharder):
|
||||
'Completed cleaving, DB set to sharded state, path: a/c, db: %s'
|
||||
% broker.db_file, lines[1:])
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
self.assertEqual('sharder.sharding.move_misplaced',
|
||||
self.logger.log_dict['timing_since'][-4][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-4][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.cleave',
|
||||
self.logger.log_dict['timing_since'][-3][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-3][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.completed',
|
||||
self.logger.log_dict['timing_since'][-2][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-2][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.send_sr',
|
||||
self.logger.log_dict['timing_since'][-1][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-1][0][1], 0)
|
||||
for call in self.logger.log_dict['timing_since']:
|
||||
self.assertNotIsInstance(call[0][1], Timestamp)
|
||||
self.assertIsInstance(call[0][1], float)
|
||||
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||
self.assertEqual(
|
||||
'sharder.sharding.move_misplaced',
|
||||
self.logger.statsd_client.calls['timing_since'][-4][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-4][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.cleave',
|
||||
self.logger.statsd_client.calls['timing_since'][-3][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-3][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.completed',
|
||||
self.logger.statsd_client.calls['timing_since'][-2][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-2][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.send_sr',
|
||||
self.logger.statsd_client.calls['timing_since'][-1][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-1][0][1], 0)
|
||||
|
||||
def test_cleave_timing_metrics(self):
|
||||
broker = self._make_broker()
|
||||
@ -2578,19 +2596,27 @@ class TestSharder(BaseTestSharder):
|
||||
'Completed cleaving, DB set to sharded state, path: a/c, db: %s'
|
||||
% broker.db_file, lines[1:])
|
||||
|
||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
||||
self.assertEqual('sharder.sharding.move_misplaced',
|
||||
self.logger.log_dict['timing_since'][-4][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-4][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.cleave',
|
||||
self.logger.log_dict['timing_since'][-3][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-3][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.completed',
|
||||
self.logger.log_dict['timing_since'][-2][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-2][0][1], 0)
|
||||
self.assertEqual('sharder.sharding.send_sr',
|
||||
self.logger.log_dict['timing_since'][-1][0][0])
|
||||
self.assertGreater(self.logger.log_dict['timing_since'][-1][0][1], 0)
|
||||
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||
self.assertEqual(
|
||||
'sharder.sharding.move_misplaced',
|
||||
self.logger.statsd_client.calls['timing_since'][-4][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-4][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.cleave',
|
||||
self.logger.statsd_client.calls['timing_since'][-3][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-3][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.completed',
|
||||
self.logger.statsd_client.calls['timing_since'][-2][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-2][0][1], 0)
|
||||
self.assertEqual(
|
||||
'sharder.sharding.send_sr',
|
||||
self.logger.statsd_client.calls['timing_since'][-1][0][0])
|
||||
self.assertGreater(
|
||||
self.logger.statsd_client.calls['timing_since'][-1][0][1], 0)
|
||||
|
||||
# check shard ranges were updated to ACTIVE
|
||||
self.assertEqual([ShardRange.ACTIVE] * 2,
|
||||
@ -3822,15 +3848,20 @@ class TestSharder(BaseTestSharder):
|
||||
'db_created': 0, 'db_exists': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
|
||||
# sharding - no misplaced objects
|
||||
self.assertTrue(broker.set_sharding_state())
|
||||
@ -3839,15 +3870,20 @@ class TestSharder(BaseTestSharder):
|
||||
sharder._replicate_object.assert_not_called()
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
|
||||
# pretend we cleaved up to end of second shard range
|
||||
context = CleavingContext.load(broker)
|
||||
@ -3858,15 +3894,20 @@ class TestSharder(BaseTestSharder):
|
||||
sharder._replicate_object.assert_not_called()
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
|
||||
# sharding - misplaced objects
|
||||
for obj in objects:
|
||||
@ -3879,15 +3920,20 @@ class TestSharder(BaseTestSharder):
|
||||
sharder._replicate_object.assert_not_called()
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
self.assertFalse(os.path.exists(expected_shard_dbs[0]))
|
||||
self.assertFalse(os.path.exists(expected_shard_dbs[1]))
|
||||
self.assertFalse(os.path.exists(expected_shard_dbs[2]))
|
||||
@ -3907,13 +3953,17 @@ class TestSharder(BaseTestSharder):
|
||||
'db_created': 1, 'db_exists': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertEqual(
|
||||
2, sharder.logger.get_stats_counts()['misplaced_placed'])
|
||||
2, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_placed'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_db_created'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_db_created'])
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
|
||||
# check misplaced objects were moved
|
||||
self._check_objects(objects[:2], expected_shard_dbs[1])
|
||||
@ -3950,13 +4000,17 @@ class TestSharder(BaseTestSharder):
|
||||
)
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertEqual(
|
||||
4, sharder.logger.get_stats_counts()['misplaced_placed'])
|
||||
4, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_placed'])
|
||||
self.assertEqual(
|
||||
3, sharder.logger.get_stats_counts()['misplaced_db_created'])
|
||||
3, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_db_created'])
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
|
||||
# check misplaced objects were moved
|
||||
self._check_objects(new_objects, expected_shard_dbs[0])
|
||||
@ -3977,13 +4031,17 @@ class TestSharder(BaseTestSharder):
|
||||
'db_created': 0, 'db_exists': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_created'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_db_exists'))
|
||||
|
||||
# and then more misplaced updates arrive
|
||||
newer_objects = [
|
||||
@ -4012,13 +4070,17 @@ class TestSharder(BaseTestSharder):
|
||||
'db_created': 1, 'db_exists': 1}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertEqual(
|
||||
3, sharder.logger.get_stats_counts()['misplaced_placed'])
|
||||
3, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_placed'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_db_created'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_db_created'])
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_db_exists'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_db_exists'])
|
||||
|
||||
# check new misplaced objects were moved
|
||||
self._check_objects(newer_objects[:1] + new_objects,
|
||||
@ -4212,7 +4274,8 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 4, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
# check misplaced objects were moved
|
||||
self._check_objects(objects[:2], expected_dbs[1])
|
||||
self._check_objects(objects[2:3], expected_dbs[2])
|
||||
@ -4245,7 +4308,8 @@ class TestSharder(BaseTestSharder):
|
||||
'placed': 4, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
# check misplaced objects were moved to shard dbs
|
||||
self._check_objects(objects[:2], expected_dbs[1])
|
||||
self._check_objects(objects[2:3], expected_dbs[2])
|
||||
@ -4278,7 +4342,8 @@ class TestSharder(BaseTestSharder):
|
||||
'placed': 4, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
# check misplaced objects were moved to shard dbs
|
||||
self._check_objects(objects[:2], expected_dbs[1])
|
||||
self._check_objects(objects[2:3], expected_dbs[2])
|
||||
@ -4311,7 +4376,8 @@ class TestSharder(BaseTestSharder):
|
||||
'placed': 4, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
# check misplaced objects were moved to shard dbs
|
||||
self._check_objects(objects[:2], expected_dbs[1])
|
||||
self._check_objects(objects[2:3], expected_dbs[2])
|
||||
@ -4365,15 +4431,20 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 0, 'placed': 0, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_success'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_failure'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
|
||||
# now put objects
|
||||
@ -4401,15 +4472,20 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 2, 'unplaced': 2}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_success'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_success'))
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('misplaced_failure'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_failure'))
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertEqual(
|
||||
2, sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertEqual(
|
||||
2, sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
# some misplaced objects could not be moved...
|
||||
warning_lines = sharder.logger.get_lines_for_level('warning')
|
||||
self.assertIn(
|
||||
@ -4445,15 +4521,20 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_success'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_failure'))
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertEqual(
|
||||
2, sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
|
||||
# check misplaced objects were moved
|
||||
@ -4477,16 +4558,21 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 0, 'placed': 0, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_success'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_failure'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_found'))
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
|
||||
# and then more misplaced updates arrive
|
||||
new_objects = [
|
||||
@ -4520,15 +4606,20 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
||||
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_success'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_failure'))
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertEqual(
|
||||
2, sharder.logger.get_stats_counts().get('misplaced_placed'))
|
||||
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_placed'))
|
||||
self.assertFalse(
|
||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
||||
sharder.logger.statsd_client.get_stats_counts().get(
|
||||
'misplaced_unplaced'))
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
|
||||
# check new misplaced objects were moved
|
||||
@ -4615,7 +4706,8 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 1, 'unplaced': 2}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
warning_lines = sharder.logger.get_lines_for_level('warning')
|
||||
self.assertIn(
|
||||
'Failed to find destination for at least 2 misplaced objects',
|
||||
@ -4650,7 +4742,8 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
|
||||
# check misplaced objects were moved
|
||||
@ -4701,7 +4794,8 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 5, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||
|
||||
# check *all* the misplaced objects were moved
|
||||
@ -4757,7 +4851,8 @@ class TestSharder(BaseTestSharder):
|
||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
|
||||
# check new misplaced objects were moved
|
||||
self._check_objects(objects[:1], expected_shard_dbs[0])
|
||||
@ -4784,7 +4879,8 @@ class TestSharder(BaseTestSharder):
|
||||
)
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
|
||||
# check older misplaced objects were not merged to shard brokers
|
||||
self._check_objects(objects[:1], expected_shard_dbs[0])
|
||||
@ -4823,7 +4919,8 @@ class TestSharder(BaseTestSharder):
|
||||
)
|
||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||
self.assertEqual(
|
||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
||||
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||
'misplaced_found'])
|
||||
|
||||
# check only the newer misplaced object was moved
|
||||
self._check_objects([newer_object], expected_shard_dbs[0])
|
||||
|
@ -140,9 +140,9 @@ def _create_test_rings(path, next_part_power=None):
|
||||
|
||||
def count_stats(logger, key, metric):
|
||||
count = 0
|
||||
for record in logger.log_dict[key]:
|
||||
log_args, log_kwargs = record
|
||||
m = log_args[0]
|
||||
for record in logger.statsd_client.calls[key]:
|
||||
stat_args, stat_kwargs = record
|
||||
m = stat_args[0]
|
||||
if re.match(metric, m):
|
||||
count += 1
|
||||
return count
|
||||
@ -1704,7 +1704,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
||||
part_info)
|
||||
found_jobs.extend(jobs)
|
||||
for job in jobs:
|
||||
self.logger._clear()
|
||||
self.logger.clear()
|
||||
node_count = len(job['sync_to'])
|
||||
rehash_count = node_count * rehash_per_job_type[
|
||||
job['job_type']]
|
||||
@ -1745,7 +1745,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
||||
part_info)
|
||||
found_jobs.extend(jobs)
|
||||
for job in jobs:
|
||||
self.logger._clear()
|
||||
self.logger.clear()
|
||||
self.reconstructor.process_job(job)
|
||||
for line in self.logger.get_lines_for_level('error'):
|
||||
self.assertIn('responded as unmounted', line)
|
||||
@ -1778,7 +1778,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
||||
part_info)
|
||||
found_jobs.extend(jobs)
|
||||
for job in jobs:
|
||||
self.logger._clear()
|
||||
self.logger.clear()
|
||||
self.reconstructor.process_job(job)
|
||||
for line in self.logger.get_lines_for_level('error'):
|
||||
self.assertIn('Invalid response 400', line)
|
||||
@ -1810,7 +1810,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
||||
part_info)
|
||||
found_jobs.extend(jobs)
|
||||
for job in jobs:
|
||||
self.logger._clear()
|
||||
self.logger.clear()
|
||||
self.reconstructor.process_job(job)
|
||||
for line in self.logger.get_lines_for_level('error'):
|
||||
self.assertIn('Timeout (Nones)', line)
|
||||
|
@ -1881,9 +1881,9 @@ class TestObjectReplicator(unittest.TestCase):
|
||||
|
||||
# attempt to 16 times but succeeded only 15 times due to Timeout
|
||||
suffix_hashes = sum(
|
||||
count for (metric, count), _junk in
|
||||
replicator.logger.logger.log_dict['update_stats']
|
||||
if metric == 'suffix.hashes')
|
||||
call[0][1] for call in
|
||||
replicator.logger.logger.statsd_client.calls['update_stats']
|
||||
if call[0][0] == 'suffix.hashes')
|
||||
self.assertEqual(15, suffix_hashes)
|
||||
|
||||
def test_run(self):
|
||||
|
@ -8155,7 +8155,7 @@ class TestObjectController(BaseTestCase):
|
||||
self.object_controller.logger = self.logger
|
||||
with mock.patch('time.time',
|
||||
side_effect=[10000.0, 10000.0, 10001.0, 10002.0,
|
||||
10002.0]), \
|
||||
10002.0, 10002.0]), \
|
||||
mock.patch('os.getpid', return_value=1234):
|
||||
req.get_response(self.object_controller)
|
||||
self.assertEqual(
|
||||
|
@ -461,7 +461,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual([
|
||||
mock.call(self.devices_dir, 'sda1', True),
|
||||
], mock_check_drive.mock_calls)
|
||||
self.assertEqual(ou.logger.get_increment_counts(), {})
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(), {})
|
||||
|
||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||
@mock.patch.object(object_updater, 'check_drive')
|
||||
@ -525,16 +525,16 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
ou.run_once()
|
||||
self.assertTrue(not os.path.exists(older_op_path))
|
||||
self.assertTrue(os.path.exists(op_path))
|
||||
self.assertEqual(ou.logger.get_increment_counts(),
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||
{'failures': 1, 'unlinks': 1})
|
||||
self.assertIsNone(pickle.load(open(op_path, 'rb')).get('successes'))
|
||||
self.assertEqual(
|
||||
['ERROR with remote server 127.0.0.1:67890/sda1: '
|
||||
'Connection refused'] * 3,
|
||||
ou.logger.get_lines_for_level('error'))
|
||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
||||
('updater.timing.status.500', mock.ANY),
|
||||
] * 3)
|
||||
self.assertEqual(
|
||||
sorted(ou.logger.statsd_client.calls['timing']),
|
||||
sorted([(('updater.timing.status.500', mock.ANY), {}), ] * 3))
|
||||
ou.logger.clear()
|
||||
|
||||
bindsock = listen_zero()
|
||||
@ -590,17 +590,17 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
if err:
|
||||
raise err
|
||||
self.assertTrue(os.path.exists(op_path))
|
||||
self.assertEqual(ou.logger.get_increment_counts(),
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||
{'failures': 1})
|
||||
self.assertEqual([0],
|
||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
||||
self.assertEqual(
|
||||
sorted([args for args, kw in ou.logger.log_dict['timing']]),
|
||||
sorted(ou.logger.statsd_client.calls['timing']),
|
||||
sorted([
|
||||
('updater.timing.status.201', mock.ANY),
|
||||
('updater.timing.status.500', mock.ANY),
|
||||
('updater.timing.status.500', mock.ANY),
|
||||
(('updater.timing.status.201', mock.ANY), {}),
|
||||
(('updater.timing.status.500', mock.ANY), {}),
|
||||
(('updater.timing.status.500', mock.ANY), {}),
|
||||
]))
|
||||
|
||||
# only 1/2 updates succeeds
|
||||
@ -611,16 +611,16 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
if err:
|
||||
raise err
|
||||
self.assertTrue(os.path.exists(op_path))
|
||||
self.assertEqual(ou.logger.get_increment_counts(),
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||
{'failures': 1})
|
||||
self.assertEqual([0, 2],
|
||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
||||
self.assertEqual(
|
||||
sorted([args for args, kw in ou.logger.log_dict['timing']]),
|
||||
sorted(ou.logger.statsd_client.calls['timing']),
|
||||
sorted([
|
||||
('updater.timing.status.404', mock.ANY),
|
||||
('updater.timing.status.201', mock.ANY),
|
||||
(('updater.timing.status.404', mock.ANY), {}),
|
||||
(('updater.timing.status.201', mock.ANY), {}),
|
||||
]))
|
||||
|
||||
# final update has Timeout
|
||||
@ -630,7 +630,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
mock_connect.return_value.getresponse.side_effect = exc
|
||||
ou.run_once()
|
||||
self.assertTrue(os.path.exists(op_path))
|
||||
self.assertEqual(ou.logger.get_increment_counts(),
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||
{'failures': 1})
|
||||
self.assertEqual([0, 2],
|
||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||
@ -638,9 +638,10 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertIn(
|
||||
'Timeout waiting on remote server 127.0.0.1:%d/sda1: 99 seconds'
|
||||
% bindsock.getsockname()[1], ou.logger.get_lines_for_level('info'))
|
||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
||||
('updater.timing.status.499', mock.ANY),
|
||||
])
|
||||
self.assertEqual(
|
||||
sorted(ou.logger.statsd_client.calls['timing']),
|
||||
sorted([
|
||||
(('updater.timing.status.499', mock.ANY), {})]))
|
||||
|
||||
# final update has ConnectionTimeout
|
||||
ou.logger.clear()
|
||||
@ -649,7 +650,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
mock_connect.return_value.getresponse.side_effect = exc
|
||||
ou.run_once()
|
||||
self.assertTrue(os.path.exists(op_path))
|
||||
self.assertEqual(ou.logger.get_increment_counts(),
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||
{'failures': 1})
|
||||
self.assertEqual([0, 2],
|
||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||
@ -657,9 +658,11 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertIn(
|
||||
'Timeout connecting to remote server 127.0.0.1:%d/sda1: 9 seconds'
|
||||
% bindsock.getsockname()[1], ou.logger.get_lines_for_level('info'))
|
||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
||||
('updater.timing.status.500', mock.ANY),
|
||||
])
|
||||
self.assertEqual(
|
||||
sorted(ou.logger.statsd_client.calls['timing']),
|
||||
sorted([
|
||||
(('updater.timing.status.500', mock.ANY), {})
|
||||
]))
|
||||
|
||||
# final update succeeds
|
||||
event = spawn(accept, [201])
|
||||
@ -676,11 +679,13 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertTrue(os.path.exists(os.path.dirname(os.path.dirname(
|
||||
op_path))))
|
||||
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
||||
self.assertEqual(ou.logger.get_increment_counts(),
|
||||
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||
{'unlinks': 1, 'successes': 1})
|
||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
||||
('updater.timing.status.201', mock.ANY),
|
||||
])
|
||||
self.assertEqual(
|
||||
sorted(ou.logger.statsd_client.calls['timing']),
|
||||
sorted([
|
||||
(('updater.timing.status.201', mock.ANY), {}),
|
||||
]))
|
||||
|
||||
def test_obj_put_legacy_updates(self):
|
||||
ts = (normalize_timestamp(t) for t in
|
||||
@ -698,7 +703,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
account, container, obj = 'a', 'c', 'o'
|
||||
# write an async
|
||||
for op in ('PUT', 'DELETE'):
|
||||
self.logger._clear()
|
||||
self.logger.clear()
|
||||
daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
|
||||
dfmanager = DiskFileManager(conf, daemon.logger)
|
||||
# don't include storage-policy-index in headers_out pickle
|
||||
@ -728,9 +733,9 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual(method, op)
|
||||
self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
|
||||
str(int(policy)))
|
||||
self.assertEqual(daemon.logger.get_increment_counts(),
|
||||
{'successes': 1, 'unlinks': 1,
|
||||
'async_pendings': 1})
|
||||
self.assertEqual(
|
||||
daemon.logger.statsd_client.get_increment_counts(),
|
||||
{'successes': 1, 'unlinks': 1, 'async_pendings': 1})
|
||||
|
||||
def _write_async_update(self, dfmanager, timestamp, policy,
|
||||
headers=None, container_path=None):
|
||||
@ -791,7 +796,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual(method, 'PUT')
|
||||
self.assertDictEqual(expected, headers)
|
||||
self.assertEqual(
|
||||
daemon.logger.get_increment_counts(),
|
||||
daemon.logger.statsd_client.get_increment_counts(),
|
||||
{'successes': 1, 'unlinks': 1, 'async_pendings': 1})
|
||||
self.assertFalse(os.listdir(async_dir))
|
||||
daemon.logger.clear()
|
||||
@ -908,7 +913,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
{'redirects': 1, 'successes': 1,
|
||||
'unlinks': 1, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||
|
||||
def test_obj_put_async_root_update_redirected_previous_success(self):
|
||||
@ -940,7 +945,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
[req['path'] for req in conn.requests])
|
||||
self.assertEqual(
|
||||
{'failures': 1, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
async_path, async_data = self._check_async_file(async_dir)
|
||||
self.assertEqual(dict(orig_async_data, successes=[1]), async_data)
|
||||
|
||||
@ -968,7 +973,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
{'redirects': 1, 'successes': 1, 'failures': 1, 'unlinks': 1,
|
||||
'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||
|
||||
def _check_async_file(self, async_dir):
|
||||
@ -1016,7 +1021,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
[req['path'] for req in conn.requests])
|
||||
self.assertEqual(
|
||||
{'failures': 1, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
# async file still intact
|
||||
async_path, async_data = self._check_async_file(async_dir)
|
||||
self.assertEqual(orig_async_path, async_path)
|
||||
@ -1095,7 +1100,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
[req['path'] for req in conn.requests])
|
||||
self.assertEqual(
|
||||
{'redirects': 2, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
# update failed, we still have pending file with most recent redirect
|
||||
# response Location header value added to data
|
||||
async_path, async_data = self._check_async_file(async_dir)
|
||||
@ -1121,7 +1126,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
{'redirects': 2, 'successes': 1, 'unlinks': 1,
|
||||
'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||
|
||||
def test_obj_put_async_update_redirection_loop(self):
|
||||
@ -1169,7 +1174,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
[req['path'] for req in conn.requests])
|
||||
self.assertEqual(
|
||||
{'redirects': 2, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
# update failed, we still have pending file with most recent redirect
|
||||
# response Location header value added to data
|
||||
async_path, async_data = self._check_async_file(async_dir)
|
||||
@ -1201,7 +1206,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
[req['path'] for req in conn.requests])
|
||||
self.assertEqual(
|
||||
{'redirects': 4, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
# update failed, we still have pending file with most recent redirect
|
||||
# response Location header value from root added to persisted data
|
||||
async_path, async_data = self._check_async_file(async_dir)
|
||||
@ -1231,7 +1236,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
[req['path'] for req in conn.requests])
|
||||
self.assertEqual(
|
||||
{'redirects': 6, 'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
# update failed, we still have pending file, but container_path is None
|
||||
# because most recent redirect location was a repeat
|
||||
async_path, async_data = self._check_async_file(async_dir)
|
||||
@ -1255,7 +1260,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
{'redirects': 6, 'successes': 1, 'unlinks': 1,
|
||||
'async_pendings': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||
|
||||
def test_obj_update_quarantine(self):
|
||||
@ -1287,7 +1292,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
|
||||
self.assertEqual(
|
||||
{'quarantines': 1},
|
||||
daemon.logger.get_increment_counts())
|
||||
daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertFalse(os.listdir(async_dir)) # no asyncs
|
||||
|
||||
def test_obj_update_gone_missing(self):
|
||||
@ -1319,7 +1324,8 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
with mocked_http_conn():
|
||||
with mock.patch('swift.obj.updater.dump_recon_cache'):
|
||||
daemon._load_update(self.sda1, op_path)
|
||||
self.assertEqual({}, daemon.logger.get_increment_counts())
|
||||
self.assertEqual(
|
||||
{}, daemon.logger.statsd_client.get_increment_counts())
|
||||
self.assertEqual(os.listdir(async_dir), [ohash[-3:]])
|
||||
self.assertFalse(os.listdir(odir))
|
||||
|
||||
@ -1399,7 +1405,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
info_lines[-1])
|
||||
self.assertEqual({'skips': 9, 'successes': 2, 'unlinks': 2,
|
||||
'deferrals': 9},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||
def test_per_container_rate_limit_unlimited(self, mock_recon):
|
||||
@ -1437,7 +1443,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
'0 errors, 0 redirects, 0 skips, 0 deferrals, 0 drains',
|
||||
info_lines[-1])
|
||||
self.assertEqual({'successes': 11, 'unlinks': 11},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||
def test_per_container_rate_limit_some_limited(self, mock_recon):
|
||||
@ -1506,7 +1512,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
info_lines[-1])
|
||||
self.assertEqual({'skips': 2, 'successes': 2, 'unlinks': 2,
|
||||
'deferrals': 2},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||
def test_per_container_rate_limit_defer_2_skip_1(self, mock_recon):
|
||||
@ -1556,7 +1562,8 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
|
||||
def fake_get_time(bucket_iter):
|
||||
captured_skips_stats.append(
|
||||
daemon.logger.get_increment_counts().get('skips', 0))
|
||||
daemon.logger.statsd_client.get_increment_counts().get(
|
||||
'skips', 0))
|
||||
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
||||
# make each update delay before the iter being called again
|
||||
now[0] += latencies.pop(0)
|
||||
@ -1623,7 +1630,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
info_lines[-1])
|
||||
self.assertEqual(
|
||||
{'skips': 1, 'successes': 3, 'unlinks': 3, 'deferrals': 2,
|
||||
'drains': 1}, self.logger.get_increment_counts())
|
||||
'drains': 1}, self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||
def test_per_container_rate_limit_defer_3_skip_1(self, mock_recon):
|
||||
@ -1673,7 +1680,8 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
|
||||
def fake_get_time(bucket_iter):
|
||||
captured_skips_stats.append(
|
||||
daemon.logger.get_increment_counts().get('skips', 0))
|
||||
daemon.logger.statsd_client.get_increment_counts().get(
|
||||
'skips', 0))
|
||||
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
||||
# make each update delay before the iter being called again
|
||||
now[0] += latencies.pop(0)
|
||||
@ -1743,7 +1751,7 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
info_lines[-1])
|
||||
self.assertEqual(
|
||||
{'skips': 1, 'successes': 4, 'unlinks': 4, 'deferrals': 3,
|
||||
'drains': 2}, self.logger.get_increment_counts())
|
||||
'drains': 2}, self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||
def test_per_container_rate_limit_unsent_deferrals(self, mock_recon):
|
||||
@ -1799,7 +1807,8 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
if not captured_skips_stats:
|
||||
daemon.begin = now[0]
|
||||
captured_skips_stats.append(
|
||||
daemon.logger.get_increment_counts().get('skips', 0))
|
||||
daemon.logger.statsd_client.get_increment_counts().get(
|
||||
'skips', 0))
|
||||
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
||||
# insert delay each time iter is called
|
||||
now[0] += latencies.pop(0)
|
||||
@ -1870,8 +1879,9 @@ class TestObjectUpdater(unittest.TestCase):
|
||||
info_lines[-1])
|
||||
self.assertEqual(
|
||||
{'successes': 5, 'unlinks': 5, 'deferrals': 4, 'drains': 2},
|
||||
self.logger.get_increment_counts())
|
||||
self.assertEqual([('skips', 2)], self.logger.get_update_stats())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
self.assertEqual(
|
||||
2, self.logger.statsd_client.get_stats_counts()['skips'])
|
||||
|
||||
|
||||
class TestObjectUpdaterFunctions(unittest.TestCase):
|
||||
|
@ -553,7 +553,8 @@ class TestFuncs(BaseTest):
|
||||
self.assertEqual(resp['object_count'], 0)
|
||||
self.assertEqual(resp['versions'], None)
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.miss'])
|
||||
|
||||
# container info is cached in cache.
|
||||
@ -583,7 +584,8 @@ class TestFuncs(BaseTest):
|
||||
[(k, str, v, str)
|
||||
for k, v in subdict.items()])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.hit'])
|
||||
|
||||
def test_get_cache_key(self):
|
||||
@ -668,27 +670,27 @@ class TestFuncs(BaseTest):
|
||||
record_cache_op_metrics(
|
||||
self.logger, 'shard_listing', 'infocache_hit')
|
||||
self.assertEqual(
|
||||
self.logger.get_increment_counts().get(
|
||||
self.logger.statsd_client.get_increment_counts().get(
|
||||
'shard_listing.infocache.hit'),
|
||||
1)
|
||||
record_cache_op_metrics(
|
||||
self.logger, 'shard_listing', 'hit')
|
||||
self.assertEqual(
|
||||
self.logger.get_increment_counts().get(
|
||||
self.logger.statsd_client.get_increment_counts().get(
|
||||
'shard_listing.cache.hit'),
|
||||
1)
|
||||
resp = FakeResponse(status_int=200)
|
||||
record_cache_op_metrics(
|
||||
self.logger, 'shard_updating', 'skip', resp)
|
||||
self.assertEqual(
|
||||
self.logger.get_increment_counts().get(
|
||||
self.logger.statsd_client.get_increment_counts().get(
|
||||
'shard_updating.cache.skip.200'),
|
||||
1)
|
||||
resp = FakeResponse(status_int=503)
|
||||
record_cache_op_metrics(
|
||||
self.logger, 'shard_updating', 'disabled', resp)
|
||||
self.assertEqual(
|
||||
self.logger.get_increment_counts().get(
|
||||
self.logger.statsd_client.get_increment_counts().get(
|
||||
'shard_updating.cache.disabled.503'),
|
||||
1)
|
||||
|
||||
|
@ -2767,7 +2767,8 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(self.ns_bound_list,
|
||||
req.environ['swift.infocache'][cache_key])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.miss',
|
||||
'container.shard_listing.cache.bypass.200'])
|
||||
|
||||
@ -2805,7 +2806,8 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(self.ns_bound_list,
|
||||
req.environ['swift.infocache'][cache_key])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.hit',
|
||||
'container.shard_listing.cache.miss.200'])
|
||||
|
||||
@ -2829,7 +2831,8 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(self.ns_bound_list,
|
||||
req.environ['swift.infocache'][cache_key])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.hit',
|
||||
'container.shard_listing.cache.hit'])
|
||||
|
||||
@ -2866,7 +2869,8 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(self.ns_bound_list,
|
||||
req.environ['swift.infocache'][cache_key])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.hit',
|
||||
'container.shard_listing.cache.skip.200'])
|
||||
|
||||
@ -2890,7 +2894,8 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(self.ns_bound_list,
|
||||
req.environ['swift.infocache'][cache_key])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.cache.hit',
|
||||
'container.shard_listing.cache.hit'])
|
||||
|
||||
@ -2913,7 +2918,8 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(self.ns_bound_list,
|
||||
req.environ['swift.infocache'][cache_key])
|
||||
self.assertEqual(
|
||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
||||
[x[0][0] for x in
|
||||
self.logger.logger.statsd_client.calls['increment']],
|
||||
['container.info.infocache.hit',
|
||||
'container.shard_listing.infocache.hit'])
|
||||
|
||||
@ -3068,7 +3074,7 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(404, resp.status_int)
|
||||
self.assertEqual({'container.info.cache.hit': 1,
|
||||
'container.shard_listing.cache.miss.404': 1},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def test_GET_shard_ranges_read_from_cache_error(self):
|
||||
self._setup_shard_range_stubs()
|
||||
@ -3101,7 +3107,7 @@ class TestContainerController(TestRingBase):
|
||||
self.assertEqual(404, resp.status_int)
|
||||
self.assertEqual({'container.info.cache.hit': 1,
|
||||
'container.shard_listing.cache.error.404': 1},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def _do_test_GET_shard_ranges_read_from_cache(self, params, record_type):
|
||||
# pre-warm cache with container metadata and shard ranges and verify
|
||||
@ -3124,7 +3130,7 @@ class TestContainerController(TestRingBase):
|
||||
self.memcache.calls)
|
||||
self.assertEqual({'container.info.cache.hit': 1,
|
||||
'container.shard_listing.cache.hit': 1},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
return resp
|
||||
|
||||
def test_GET_shard_ranges_read_from_cache(self):
|
||||
@ -3220,7 +3226,7 @@ class TestContainerController(TestRingBase):
|
||||
self.memcache.calls[2][1][1]['sharding_state'])
|
||||
self.assertEqual({'container.info.cache.miss': 1,
|
||||
'container.shard_listing.cache.bypass.200': 1},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
return resp
|
||||
|
||||
def test_GET_shard_ranges_write_to_cache(self):
|
||||
@ -3315,7 +3321,7 @@ class TestContainerController(TestRingBase):
|
||||
self.memcache.calls[2][1][1]['sharding_state'])
|
||||
self.assertEqual({'container.info.cache.miss': 1,
|
||||
'container.shard_listing.cache.force_skip.200': 1},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
|
||||
def _do_test_GET_shard_ranges_no_cache_write(self, resp_hdrs):
|
||||
# verify that there is a cache lookup to check container info but then
|
||||
@ -3488,7 +3494,7 @@ class TestContainerController(TestRingBase):
|
||||
self.memcache.calls[1][1][1]['sharding_state'])
|
||||
self.assertEqual({'container.info.cache.miss': 1,
|
||||
'container.shard_listing.cache.bypass.200': 1},
|
||||
self.logger.get_increment_counts())
|
||||
self.logger.statsd_client.get_increment_counts())
|
||||
self.memcache.delete_all()
|
||||
|
||||
def test_GET_shard_ranges_bad_response_body(self):
|
||||
|
@ -1257,7 +1257,7 @@ class TestProxyServer(unittest.TestCase):
|
||||
self.assertTrue(log_kwargs['exc_info'])
|
||||
self.assertIs(caught_exc, log_kwargs['exc_info'][1])
|
||||
incremented_limit_samples.append(
|
||||
logger.get_increment_counts().get(
|
||||
logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.incremented_limit', 0))
|
||||
self.assertEqual([0] * 10 + [1], incremented_limit_samples)
|
||||
self.assertEqual(
|
||||
@ -1294,7 +1294,7 @@ class TestProxyServer(unittest.TestCase):
|
||||
self.assertIn(expected_msg, line)
|
||||
self.assertIn(node_to_string(node), line)
|
||||
incremented_limit_samples.append(
|
||||
logger.get_increment_counts().get(
|
||||
logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.incremented_limit', 0))
|
||||
|
||||
self.assertEqual([0] * 10 + [1], incremented_limit_samples)
|
||||
@ -1310,8 +1310,9 @@ class TestProxyServer(unittest.TestCase):
|
||||
line = logger.get_lines_for_level('error')[-2]
|
||||
self.assertIn(expected_msg, line)
|
||||
self.assertIn(node_to_string(node), line)
|
||||
self.assertEqual(2, logger.get_increment_counts().get(
|
||||
'error_limiter.incremented_limit', 0))
|
||||
self.assertEqual(
|
||||
2, logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.incremented_limit', 0))
|
||||
self.assertEqual(
|
||||
('Node will be error limited for 60.00s: %s' %
|
||||
node_to_string(node)),
|
||||
@ -3609,7 +3610,7 @@ class TestReplicatedObjectController(
|
||||
error_node = object_ring.get_part_nodes(1)[0]
|
||||
self.app.error_limit(error_node, 'test')
|
||||
self.assertEqual(
|
||||
1, self.logger.get_increment_counts().get(
|
||||
1, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.forced_limit', 0))
|
||||
line = self.logger.get_lines_for_level('error')[-1]
|
||||
self.assertEqual(
|
||||
@ -3618,7 +3619,7 @@ class TestReplicatedObjectController(
|
||||
|
||||
# no error limited checking yet.
|
||||
self.assertEqual(
|
||||
0, self.logger.get_increment_counts().get(
|
||||
0, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.is_limited', 0))
|
||||
set_http_connect(200, 200, # account, container
|
||||
201, 201, 201, # 3 working backends
|
||||
@ -3630,7 +3631,7 @@ class TestReplicatedObjectController(
|
||||
self.assertTrue(res.status.startswith('201 '))
|
||||
# error limited happened during PUT.
|
||||
self.assertEqual(
|
||||
1, self.logger.get_increment_counts().get(
|
||||
1, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.is_limited', 0))
|
||||
|
||||
# this is kind of a hokey test, but in FakeRing, the port is even when
|
||||
@ -4328,7 +4329,7 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual(
|
||||
{'account.info.cache.disabled.200': 1,
|
||||
'account.info.infocache.hit': 2,
|
||||
@ -4425,7 +4426,7 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||
'account.info.infocache.hit': 2,
|
||||
'container.info.cache.miss.200': 1,
|
||||
@ -4538,7 +4539,8 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||
'account.info.infocache.hit': 1,
|
||||
'container.info.cache.miss.200': 1,
|
||||
@ -4636,7 +4638,8 @@ class TestReplicatedObjectController(
|
||||
|
||||
# verify request hitted infocache.
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual({'account.info.cache.disabled.200': 1,
|
||||
'account.info.infocache.hit': 1,
|
||||
'container.info.cache.disabled.200': 1,
|
||||
@ -4737,7 +4740,8 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||
'account.info.infocache.hit': 1,
|
||||
'container.info.cache.miss.200': 1,
|
||||
@ -4782,7 +4786,8 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||
'account.info.infocache.hit': 1,
|
||||
'container.info.cache.miss.200': 1,
|
||||
@ -4850,7 +4855,7 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual(stats, {
|
||||
'account.info.cache.hit': 2,
|
||||
'account.info.cache.miss.200': 1,
|
||||
@ -4896,7 +4901,7 @@ class TestReplicatedObjectController(
|
||||
resp = req.get_response(self.app)
|
||||
|
||||
self.assertEqual(resp.status_int, 202)
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual(
|
||||
{'account.info.cache.disabled.200': 1,
|
||||
'account.info.infocache.hit': 2,
|
||||
@ -5500,7 +5505,8 @@ class TestReplicatedObjectController(
|
||||
collected_nodes.append(node)
|
||||
self.assertEqual(len(collected_nodes), 7)
|
||||
self.assertEqual(self.app.logger.log_dict['warning'], [])
|
||||
self.assertEqual(self.app.logger.get_increments(), [])
|
||||
self.assertEqual(
|
||||
self.app.logger.statsd_client.get_increments(), [])
|
||||
|
||||
# one error-limited primary node -> one handoff warning
|
||||
self.app.log_handoffs = True
|
||||
@ -5520,7 +5526,7 @@ class TestReplicatedObjectController(
|
||||
self.app.logger.get_lines_for_level('warning'), [
|
||||
'Handoff requested (5)'])
|
||||
self.assertEqual(
|
||||
self.app.logger.get_increments(),
|
||||
self.app.logger.statsd_client.get_increments(),
|
||||
['error_limiter.is_limited', 'handoff_count'])
|
||||
|
||||
# two error-limited primary nodes -> two handoff warnings
|
||||
@ -5543,7 +5549,7 @@ class TestReplicatedObjectController(
|
||||
'Handoff requested (5)',
|
||||
'Handoff requested (6)',
|
||||
])
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual(2, stats.get('error_limiter.is_limited', 0))
|
||||
self.assertEqual(2, stats.get('handoff_count', 0))
|
||||
|
||||
@ -5571,7 +5577,7 @@ class TestReplicatedObjectController(
|
||||
'Handoff requested (9)',
|
||||
'Handoff requested (10)',
|
||||
])
|
||||
stats = self.app.logger.get_increment_counts()
|
||||
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||
self.assertEqual(4, stats.get('error_limiter.is_limited', 0))
|
||||
self.assertEqual(4, stats.get('handoff_count', 0))
|
||||
self.assertEqual(1, stats.get('handoff_all_count', 0))
|
||||
@ -5608,15 +5614,15 @@ class TestReplicatedObjectController(
|
||||
self.assertIn(first_nodes[0], second_nodes)
|
||||
|
||||
self.assertEqual(
|
||||
0, self.logger.get_increment_counts().get(
|
||||
0, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.is_limited', 0))
|
||||
self.assertEqual(
|
||||
0, self.logger.get_increment_counts().get(
|
||||
0, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.forced_limit', 0))
|
||||
|
||||
self.app.error_limit(first_nodes[0], 'test')
|
||||
self.assertEqual(
|
||||
1, self.logger.get_increment_counts().get(
|
||||
1, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.forced_limit', 0))
|
||||
line = self.logger.get_lines_for_level('error')[-1]
|
||||
self.assertEqual(
|
||||
@ -5627,13 +5633,13 @@ class TestReplicatedObjectController(
|
||||
object_ring, 0, self.logger, request=Request.blank('')))
|
||||
self.assertNotIn(first_nodes[0], second_nodes)
|
||||
self.assertEqual(
|
||||
1, self.logger.get_increment_counts().get(
|
||||
1, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.is_limited', 0))
|
||||
third_nodes = list(self.app.iter_nodes(
|
||||
object_ring, 0, self.logger, request=Request.blank('')))
|
||||
self.assertNotIn(first_nodes[0], third_nodes)
|
||||
self.assertEqual(
|
||||
2, self.logger.get_increment_counts().get(
|
||||
2, self.logger.statsd_client.get_increment_counts().get(
|
||||
'error_limiter.is_limited', 0))
|
||||
|
||||
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
|
||||
|
Loading…
Reference in New Issue
Block a user