Clean up a bunch of deprecation warnings

pytest still complains about some 20k warnings, but the vast majority
are actually because of eventlet, and a lot of those will get cleaned up
when upper-constraints picks up v0.33.2.

Change-Id: If48cda4ae206266bb41a4065cd90c17cbac84b7f
This commit is contained in:
Tim Burke 2022-07-26 15:11:43 -07:00
parent 9dc12a8902
commit 20b48a6900
12 changed files with 217 additions and 156 deletions

View File

@ -250,7 +250,7 @@ def validate_hash_conf():
if six.PY3: if six.PY3:
# Use Latin1 to accept arbitrary bytes in the hash prefix/suffix # Use Latin1 to accept arbitrary bytes in the hash prefix/suffix
with open(SWIFT_CONF_FILE, encoding='latin1') as swift_conf_file: with open(SWIFT_CONF_FILE, encoding='latin1') as swift_conf_file:
hash_conf.readfp(swift_conf_file) hash_conf.read_file(swift_conf_file)
else: else:
with open(SWIFT_CONF_FILE) as swift_conf_file: with open(SWIFT_CONF_FILE) as swift_conf_file:
hash_conf.readfp(swift_conf_file) hash_conf.readfp(swift_conf_file)
@ -1950,7 +1950,7 @@ class StatsdClient(object):
warnings.warn( warnings.warn(
'set_prefix() is deprecated; use the ``tail_prefix`` argument of ' 'set_prefix() is deprecated; use the ``tail_prefix`` argument of '
'the constructor when instantiating the class instead.', 'the constructor when instantiating the class instead.',
DeprecationWarning DeprecationWarning, stacklevel=2
) )
self._set_prefix(tail_prefix) self._set_prefix(tail_prefix)
@ -2262,8 +2262,13 @@ class LogAdapter(logging.LoggerAdapter, object):
in the proxy-server to differentiate the Account, Container, and Object in the proxy-server to differentiate the Account, Container, and Object
controllers. controllers.
""" """
warnings.warn(
'set_statsd_prefix() is deprecated; use the '
'``statsd_tail_prefix`` argument to ``get_logger`` instead.',
DeprecationWarning, stacklevel=2
)
if self.logger.statsd_client: if self.logger.statsd_client:
self.logger.statsd_client.set_prefix(prefix) self.logger.statsd_client._set_prefix(prefix)
def statsd_delegate(statsd_func_name): def statsd_delegate(statsd_func_name):
""" """
@ -3139,7 +3144,10 @@ def readconf(conf_path, section_name=None, log_name=None, defaults=None,
if hasattr(conf_path, 'readline'): if hasattr(conf_path, 'readline'):
if hasattr(conf_path, 'seek'): if hasattr(conf_path, 'seek'):
conf_path.seek(0) conf_path.seek(0)
c.readfp(conf_path) if six.PY2:
c.readfp(conf_path)
else:
c.read_file(conf_path)
else: else:
if os.path.isdir(conf_path): if os.path.isdir(conf_path):
# read all configs in directory # read all configs in directory
@ -3580,7 +3588,7 @@ def ratelimit_sleep(running_time, max_rate, incr_by=1, rate_buffer=5):
""" """
warnings.warn( warnings.warn(
'ratelimit_sleep() is deprecated; use the ``EventletRateLimiter`` ' 'ratelimit_sleep() is deprecated; use the ``EventletRateLimiter`` '
'class instead.', DeprecationWarning 'class instead.', DeprecationWarning, stacklevel=2
) )
rate_limit = EventletRateLimiter(max_rate, rate_buffer=rate_buffer, rate_limit = EventletRateLimiter(max_rate, rate_buffer=rate_buffer,
running_time=running_time) running_time=running_time)

View File

@ -25,6 +25,7 @@ from swift import gettext_ as _
import sys import sys
from textwrap import dedent from textwrap import dedent
import time import time
import warnings
import eventlet import eventlet
import eventlet.debug import eventlet.debug
@ -128,7 +129,10 @@ class ConfigString(NamedConfigLoader):
self.parser.optionxform = str # Don't lower-case keys self.parser.optionxform = str # Don't lower-case keys
# Defaults don't need interpolation (crazy PasteDeploy...) # Defaults don't need interpolation (crazy PasteDeploy...)
self.parser.defaults = lambda: dict(self.parser._defaults, **defaults) self.parser.defaults = lambda: dict(self.parser._defaults, **defaults)
self.parser.readfp(self.contents) if six.PY2:
self.parser.readfp(self.contents)
else:
self.parser.read_file(self.contents)
def readline(self, *args, **kwargs): def readline(self, *args, **kwargs):
return self.contents.readline(*args, **kwargs) return self.contents.readline(*args, **kwargs)
@ -654,6 +658,9 @@ def run_server(conf, logger, sock, global_conf=None, ready_callback=None,
} }
if ready_callback: if ready_callback:
ready_callback() ready_callback()
# Yes, eventlet, we know -- we have to support bad clients, though
warnings.filterwarnings(
'ignore', message='capitalize_response_headers is disabled')
try: try:
wsgi.server(sock, app, wsgi_logger, **server_kwargs) wsgi.server(sock, app, wsgi_logger, **server_kwargs)
except socket.error as err: except socket.error as err:

View File

@ -51,8 +51,9 @@ warnings.filterwarnings('ignore', message=(
'Therefore, support for it is deprecated in cryptography ' 'Therefore, support for it is deprecated in cryptography '
'and will be removed in a future release.')) 'and will be removed in a future release.'))
import unittest
if sys.version_info < (3, 2): if sys.version_info < (3, 2):
import unittest
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
unittest.TestCase.assertRegex = unittest.TestCase.assertRegexpMatches unittest.TestCase.assertRegex = unittest.TestCase.assertRegexpMatches
@ -132,3 +133,34 @@ def annotate_failure(msg):
err_val = '%s Failed with %s' % (msg, err) err_val = '%s Failed with %s' % (msg, err)
err_typ = AssertionError err_typ = AssertionError
reraise(err_typ, err_val, err_tb) reraise(err_typ, err_val, err_tb)
class BaseTestCase(unittest.TestCase):
def _assertDictContainsSubset(self, subset, dictionary, msg=None):
"""Checks whether dictionary is a superset of subset."""
# This is almost identical to the method in python3.4 version of
# unitest.case.TestCase.assertDictContainsSubset, reproduced here to
# avoid the deprecation warning in the original when using python3.
missing = []
mismatched = []
for key, value in subset.items():
if key not in dictionary:
missing.append(key)
elif value != dictionary[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(dictionary[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))

View File

@ -72,7 +72,7 @@ GOOD_RESPONSE_V3 = {'token': {
}} }}
class TestResponse(requests.Response): class FakeResponse(requests.Response):
"""Utility class to wrap requests.Response. """Utility class to wrap requests.Response.
Class used to wrap requests.Response and provide some convenience to Class used to wrap requests.Response and provide some convenience to
@ -81,7 +81,7 @@ class TestResponse(requests.Response):
def __init__(self, data): def __init__(self, data):
self._text = None self._text = None
super(TestResponse, self).__init__() super(FakeResponse, self).__init__()
if isinstance(data, dict): if isinstance(data, dict):
self.status_code = data.get('status_code', 200) self.status_code = data.get('status_code', 200)
headers = data.get('headers') headers = data.get('headers')
@ -328,7 +328,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
'insecure': 'True', 'auth_uri': 'http://example.com'}) 'insecure': 'True', 'auth_uri': 'http://example.com'})
text_return_value = json.dumps(GOOD_RESPONSE_V2) text_return_value = json.dumps(GOOD_RESPONSE_V2)
MOCK_REQUEST.return_value = TestResponse({ MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201, 'status_code': 201,
'text': text_return_value}) 'text': text_return_value})
@ -413,7 +413,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
'auth_uri': 'http://example.com', 'auth_uri': 'http://example.com',
}) })
MOCK_REQUEST.return_value = TestResponse({ MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201, 'status_code': 201,
'text': json.dumps(GOOD_RESPONSE_V2)}) 'text': json.dumps(GOOD_RESPONSE_V2)})
@ -536,7 +536,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
fake_cache_response = ({}, {'id': 'tenant_id'}, 'secret') fake_cache_response = ({}, {'id': 'tenant_id'}, 'secret')
cache.get.return_value = fake_cache_response cache.get.return_value = fake_cache_response
MOCK_REQUEST.return_value = TestResponse({ MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201, 'status_code': 201,
'text': json.dumps(GOOD_RESPONSE_V2)}) 'text': json.dumps(GOOD_RESPONSE_V2)})
@ -578,7 +578,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
keystone_client = MOCK_KEYSTONE.return_value keystone_client = MOCK_KEYSTONE.return_value
keystone_client.ec2.get.return_value = mock.Mock(secret='secret') keystone_client.ec2.get.return_value = mock.Mock(secret='secret')
MOCK_REQUEST.return_value = TestResponse({ MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201, 'status_code': 201,
'text': json.dumps(GOOD_RESPONSE_V2).encode('ascii')}) 'text': json.dumps(GOOD_RESPONSE_V2).encode('ascii')})

View File

@ -287,7 +287,7 @@ class FakeAccountBroker(FakeBroker):
info = {'account': TEST_ACCOUNT_NAME} info = {'account': TEST_ACCOUNT_NAME}
class TestReplicator(db_replicator.Replicator): class ConcreteReplicator(db_replicator.Replicator):
server_type = 'container' server_type = 'container'
ring_file = 'container.ring.gz' ring_file = 'container.ring.gz'
brokerclass = FakeBroker brokerclass = FakeBroker
@ -323,11 +323,11 @@ class TestDBReplicator(unittest.TestCase):
def test_creation(self): def test_creation(self):
# later config should be extended to assert more config options # later config should be extended to assert more config options
replicator = TestReplicator({'node_timeout': '3.5'}) replicator = ConcreteReplicator({'node_timeout': '3.5'})
self.assertEqual(replicator.node_timeout, 3.5) self.assertEqual(replicator.node_timeout, 3.5)
self.assertEqual(replicator.databases_per_second, 50.0) self.assertEqual(replicator.databases_per_second, 50.0)
replicator = TestReplicator({'databases_per_second': '0.1'}) replicator = ConcreteReplicator({'databases_per_second': '0.1'})
self.assertEqual(replicator.node_timeout, 10) self.assertEqual(replicator.node_timeout, 10)
self.assertEqual(replicator.databases_per_second, 0.1) self.assertEqual(replicator.databases_per_second, 0.1)
@ -365,7 +365,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(None, conn.sock) self.assertEqual(None, conn.sock)
def test_rsync_file(self): def test_rsync_file(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
with _mock_process(-1): with _mock_process(-1):
self.assertEqual( self.assertEqual(
False, False,
@ -376,7 +376,7 @@ class TestDBReplicator(unittest.TestCase):
replicator._rsync_file('/some/file', 'remote:/some/file')) replicator._rsync_file('/some/file', 'remote:/some/file'))
def test_rsync_file_popen_args(self): def test_rsync_file_popen_args(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
with _mock_process(0) as process: with _mock_process(0) as process:
replicator._rsync_file('/some/file', 'remote:/some_file') replicator._rsync_file('/some/file', 'remote:/some_file')
exp_args = ([ exp_args = ([
@ -387,7 +387,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(exp_args, process.args) self.assertEqual(exp_args, process.args)
def test_rsync_file_popen_args_whole_file_false(self): def test_rsync_file_popen_args_whole_file_false(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
with _mock_process(0) as process: with _mock_process(0) as process:
replicator._rsync_file('/some/file', 'remote:/some_file', False) replicator._rsync_file('/some/file', 'remote:/some_file', False)
exp_args = ([ exp_args = ([
@ -398,7 +398,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(exp_args, process.args) self.assertEqual(exp_args, process.args)
def test_rsync_file_popen_args_different_region_and_rsync_compress(self): def test_rsync_file_popen_args_different_region_and_rsync_compress(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
for rsync_compress in (False, True): for rsync_compress in (False, True):
replicator.rsync_compress = rsync_compress replicator.rsync_compress = rsync_compress
for different_region in (False, True): for different_region in (False, True):
@ -415,7 +415,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertFalse('--compress' in process.args[0]) self.assertFalse('--compress' in process.args[0])
def test_rsync_db(self): def test_rsync_db(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator._rsync_file = lambda *args, **kwargs: True replicator._rsync_file = lambda *args, **kwargs: True
fake_device = {'replication_ip': '127.0.0.1', 'device': 'sda1'} fake_device = {'replication_ip': '127.0.0.1', 'device': 'sda1'}
replicator._rsync_db(FakeBroker(), fake_device, ReplHttp(), 'abcd') replicator._rsync_db(FakeBroker(), fake_device, ReplHttp(), 'abcd')
@ -425,7 +425,7 @@ class TestDBReplicator(unittest.TestCase):
'replication_ip': '127.0.0.1', 'replication_port': '0', 'replication_ip': '127.0.0.1', 'replication_port': '0',
'device': 'sda1'} 'device': 'sda1'}
class MyTestReplicator(TestReplicator): class MyTestReplicator(ConcreteReplicator):
def __init__(self, db_file, remote_file): def __init__(self, db_file, remote_file):
super(MyTestReplicator, self).__init__({}) super(MyTestReplicator, self).__init__({})
self.db_file = db_file self.db_file = db_file
@ -445,7 +445,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertTrue(replicator._rsync_file_called) self.assertTrue(replicator._rsync_file_called)
def test_rsync_db_rsync_file_failure(self): def test_rsync_db_rsync_file_failure(self):
class MyTestReplicator(TestReplicator): class MyTestReplicator(ConcreteReplicator):
def __init__(self): def __init__(self):
super(MyTestReplicator, self).__init__({}) super(MyTestReplicator, self).__init__({})
self._rsync_file_called = False self._rsync_file_called = False
@ -465,7 +465,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(True, replicator._rsync_file_called) self.assertEqual(True, replicator._rsync_file_called)
def test_rsync_db_change_after_sync(self): def test_rsync_db_change_after_sync(self):
class MyTestReplicator(TestReplicator): class MyTestReplicator(ConcreteReplicator):
def __init__(self, broker): def __init__(self, broker):
super(MyTestReplicator, self).__init__({}) super(MyTestReplicator, self).__init__({})
self.broker = broker self.broker = broker
@ -505,7 +505,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(2, replicator._rsync_file_call_count) self.assertEqual(2, replicator._rsync_file_call_count)
def test_in_sync(self): def test_in_sync(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
self.assertEqual(replicator._in_sync( self.assertEqual(replicator._in_sync(
{'id': 'a', 'point': 0, 'max_row': 0, 'hash': 'b'}, {'id': 'a', 'point': 0, 'max_row': 0, 'hash': 'b'},
{'id': 'a', 'point': -1, 'max_row': 0, 'hash': 'b'}, {'id': 'a', 'point': -1, 'max_row': 0, 'hash': 'b'},
@ -520,8 +520,8 @@ class TestDBReplicator(unittest.TestCase):
FakeBroker(), -1)), False) FakeBroker(), -1)), False)
def test_run_once_no_local_device_in_ring(self): def test_run_once_no_local_device_in_ring(self):
replicator = TestReplicator({'recon_cache_path': self.recon_cache}, replicator = ConcreteReplicator({'recon_cache_path': self.recon_cache},
logger=self.logger) logger=self.logger)
with patch('swift.common.db_replicator.whataremyips', with patch('swift.common.db_replicator.whataremyips',
return_value=['127.0.0.1']): return_value=['127.0.0.1']):
replicator.run_once() replicator.run_once()
@ -535,14 +535,15 @@ class TestDBReplicator(unittest.TestCase):
base = 'swift.common.db_replicator.' base = 'swift.common.db_replicator.'
with patch(base + 'whataremyips', return_value=['1.1.1.1']), \ with patch(base + 'whataremyips', return_value=['1.1.1.1']), \
patch(base + 'ring', FakeRingWithNodes()): patch(base + 'ring', FakeRingWithNodes()):
replicator = TestReplicator({'bind_port': 6200, replicator = ConcreteReplicator({
'recon_cache_path': self.recon_cache}, 'bind_port': 6200,
logger=self.logger) 'recon_cache_path': self.recon_cache
}, logger=self.logger)
replicator.run_once() replicator.run_once()
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
def test_run_once_no_ips(self): def test_run_once_no_ips(self):
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
self._patch(patch.object, db_replicator, 'whataremyips', self._patch(patch.object, db_replicator, 'whataremyips',
lambda *a, **kw: []) lambda *a, **kw: [])
@ -558,7 +559,7 @@ class TestDBReplicator(unittest.TestCase):
# returned by itself. # returned by itself.
conf = {'mount_check': 'true', 'bind_ip': '1.1.1.1', conf = {'mount_check': 'true', 'bind_ip': '1.1.1.1',
'bind_port': 6200} 'bind_port': 6200}
replicator = TestReplicator(conf, logger=self.logger) replicator = ConcreteReplicator(conf, logger=self.logger)
self.assertEqual(replicator.mount_check, True) self.assertEqual(replicator.mount_check, True)
self.assertEqual(replicator.port, 6200) self.assertEqual(replicator.port, 6200)
@ -581,7 +582,7 @@ class TestDBReplicator(unittest.TestCase):
def test_run_once_node_is_mounted(self): def test_run_once_node_is_mounted(self):
db_replicator.ring = FakeRingWithSingleNode() db_replicator.ring = FakeRingWithSingleNode()
conf = {'mount_check': 'true', 'bind_port': 6200} conf = {'mount_check': 'true', 'bind_port': 6200}
replicator = TestReplicator(conf, logger=self.logger) replicator = ConcreteReplicator(conf, logger=self.logger)
self.assertEqual(replicator.mount_check, True) self.assertEqual(replicator.mount_check, True)
self.assertEqual(replicator.port, 6200) self.assertEqual(replicator.port, 6200)
@ -622,25 +623,25 @@ class TestDBReplicator(unittest.TestCase):
def test_usync(self): def test_usync(self):
fake_http = ReplHttp() fake_http = ReplHttp()
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890') replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890')
def test_usync_http_error_above_300(self): def test_usync_http_error_above_300(self):
fake_http = ReplHttp(set_status=301) fake_http = ReplHttp(set_status=301)
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
self.assertFalse( self.assertFalse(
replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890')) replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890'))
def test_usync_http_error_below_200(self): def test_usync_http_error_below_200(self):
fake_http = ReplHttp(set_status=101) fake_http = ReplHttp(set_status=101)
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
self.assertFalse( self.assertFalse(
replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890')) replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890'))
@mock.patch('swift.common.db_replicator.dump_recon_cache') @mock.patch('swift.common.db_replicator.dump_recon_cache')
@mock.patch('swift.common.db_replicator.time.time', return_value=1234.5678) @mock.patch('swift.common.db_replicator.time.time', return_value=1234.5678)
def test_stats(self, mock_time, mock_recon_cache): def test_stats(self, mock_time, mock_recon_cache):
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
replicator._zero_stats() replicator._zero_stats()
self.assertEqual(replicator.stats['start'], mock_time.return_value) self.assertEqual(replicator.stats['start'], mock_time.return_value)
replicator._report_stats() replicator._report_stats()
@ -696,7 +697,7 @@ class TestDBReplicator(unittest.TestCase):
# verify return values from replicate_object # verify return values from replicate_object
db_replicator.ring = FakeRingWithNodes() db_replicator.ring = FakeRingWithNodes()
db_path = '/path/to/file' db_path = '/path/to/file'
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
info = FakeBroker().get_replication_info() info = FakeBroker().get_replication_info()
# make remote appear to be in sync # make remote appear to be in sync
rinfo = {'point': info['max_row'], 'id': 'remote_id'} rinfo = {'point': info['max_row'], 'id': 'remote_id'}
@ -796,7 +797,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertFalse(replicator.logger.get_lines_for_level('warning')) self.assertFalse(replicator.logger.get_lines_for_level('warning'))
def test_replicate_object_quarantine(self): def test_replicate_object_quarantine(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
self._patch(patch.object, replicator.brokerclass, 'db_file', self._patch(patch.object, replicator.brokerclass, 'db_file',
'/a/b/c/d/e/hey') '/a/b/c/d/e/hey')
self._patch(patch.object, replicator.brokerclass, self._patch(patch.object, replicator.brokerclass,
@ -821,7 +822,7 @@ class TestDBReplicator(unittest.TestCase):
replicator._replicate_object('0', 'file', 'node_id') replicator._replicate_object('0', 'file', 'node_id')
def test_replicate_object_delete_because_deleted(self): def test_replicate_object_delete_because_deleted(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
try: try:
replicator.delete_db = self.stub_delete_db replicator.delete_db = self.stub_delete_db
replicator.brokerclass.stub_replication_info = { replicator.brokerclass.stub_replication_info = {
@ -832,7 +833,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(['/path/to/file'], self.delete_db_calls) self.assertEqual(['/path/to/file'], self.delete_db_calls)
def test_replicate_object_delete_because_not_shouldbehere(self): def test_replicate_object_delete_because_not_shouldbehere(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator.brokerclass = FakeAccountBroker replicator.brokerclass = FakeAccountBroker
replicator._repl_to_node = lambda *args: True replicator._repl_to_node = lambda *args: True
@ -850,7 +851,7 @@ class TestDBReplicator(unittest.TestCase):
def test_handoff_delete(self): def test_handoff_delete(self):
def do_test(config, repl_to_node_results, expect_delete): def do_test(config, repl_to_node_results, expect_delete):
self.delete_db_calls = [] self.delete_db_calls = []
replicator = TestReplicator(config) replicator = ConcreteReplicator(config)
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator.brokerclass = FakeAccountBroker replicator.brokerclass = FakeAccountBroker
mock_repl_to_node = mock.Mock() mock_repl_to_node = mock.Mock()
@ -893,7 +894,7 @@ class TestDBReplicator(unittest.TestCase):
do_test(cfg, repl_results, expected_delete) do_test(cfg, repl_results, expected_delete)
def test_replicate_object_delete_delegated_to_cleanup_post_replicate(self): def test_replicate_object_delete_delegated_to_cleanup_post_replicate(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator.brokerclass = FakeAccountBroker replicator.brokerclass = FakeAccountBroker
replicator._repl_to_node = lambda *args: True replicator._repl_to_node = lambda *args: True
@ -934,7 +935,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(2, replicator.stats['success']) self.assertEqual(2, replicator.stats['success'])
def test_cleanup_post_replicate(self): def test_cleanup_post_replicate(self):
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
broker = FakeBroker() broker = FakeBroker()
replicator._repl_to_node = lambda *args: True replicator._repl_to_node = lambda *args: True
@ -1000,7 +1001,7 @@ class TestDBReplicator(unittest.TestCase):
replicator.logger.clear() replicator.logger.clear()
def test_replicate_object_with_exception(self): def test_replicate_object_with_exception(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator.brokerclass = FakeAccountBroker replicator.brokerclass = FakeAccountBroker
replicator.delete_db = self.stub_delete_db replicator.delete_db = self.stub_delete_db
@ -1033,7 +1034,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(4, replicator._repl_to_node.call_count) self.assertEqual(4, replicator._repl_to_node.call_count)
def test_replicate_object_with_exception_run_out_of_nodes(self): def test_replicate_object_with_exception_run_out_of_nodes(self):
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator.brokerclass = FakeAccountBroker replicator.brokerclass = FakeAccountBroker
replicator.delete_db = self.stub_delete_db replicator.delete_db = self.stub_delete_db
@ -1044,7 +1045,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(5, replicator._repl_to_node.call_count) self.assertEqual(5, replicator._repl_to_node.call_count)
def test_replicate_account_out_of_place(self): def test_replicate_account_out_of_place(self):
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator.brokerclass = FakeAccountBroker replicator.brokerclass = FakeAccountBroker
replicator._repl_to_node = lambda *args: True replicator._repl_to_node = lambda *args: True
@ -1060,7 +1061,7 @@ class TestDBReplicator(unittest.TestCase):
self.assertEqual(error_msgs, [expected]) self.assertEqual(error_msgs, [expected])
def test_replicate_container_out_of_place(self): def test_replicate_container_out_of_place(self):
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
replicator.ring = FakeRingWithNodes().Ring('path') replicator.ring = FakeRingWithNodes().Ring('path')
replicator._repl_to_node = lambda *args: True replicator._repl_to_node = lambda *args: True
replicator.delete_db = self.stub_delete_db replicator.delete_db = self.stub_delete_db
@ -1076,7 +1077,7 @@ class TestDBReplicator(unittest.TestCase):
'be on partition 0; will replicate out and remove.']) 'be on partition 0; will replicate out and remove.'])
def test_replicate_container_out_of_place_no_node(self): def test_replicate_container_out_of_place_no_node(self):
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
replicator.ring = FakeRingWithSingleNode().Ring('path') replicator.ring = FakeRingWithSingleNode().Ring('path')
replicator._repl_to_node = lambda *args: True replicator._repl_to_node = lambda *args: True
@ -1101,7 +1102,7 @@ class TestDBReplicator(unittest.TestCase):
def test_replicate_object_different_region(self): def test_replicate_object_different_region(self):
db_replicator.ring = FakeRingWithNodes() db_replicator.ring = FakeRingWithNodes()
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator._repl_to_node = mock.Mock() replicator._repl_to_node = mock.Mock()
# For node_id = 1, one replica in same region(1) and other is in a # For node_id = 1, one replica in same region(1) and other is in a
# different region(2). Refer: FakeRingWithNodes # different region(2). Refer: FakeRingWithNodes
@ -1115,7 +1116,7 @@ class TestDBReplicator(unittest.TestCase):
def test_delete_db(self): def test_delete_db(self):
db_replicator.lock_parent_directory = lock_parent_directory db_replicator.lock_parent_directory = lock_parent_directory
replicator = TestReplicator({}, logger=self.logger) replicator = ConcreteReplicator({}, logger=self.logger)
replicator._zero_stats() replicator._zero_stats()
replicator.extract_device = lambda _: 'some_device' replicator.extract_device = lambda _: 'some_device'
@ -1175,7 +1176,7 @@ class TestDBReplicator(unittest.TestCase):
rmtree(temp_dir) rmtree(temp_dir)
def test_extract_device(self): def test_extract_device(self):
replicator = TestReplicator({'devices': '/some/root'}) replicator = ConcreteReplicator({'devices': '/some/root'})
self.assertEqual('some_device', replicator.extract_device( self.assertEqual('some_device', replicator.extract_device(
'/some/root/some_device/deeper/and/deeper')) '/some/root/some_device/deeper/and/deeper'))
self.assertEqual('UNKNOWN', replicator.extract_device( self.assertEqual('UNKNOWN', replicator.extract_device(
@ -1784,7 +1785,7 @@ class TestDBReplicator(unittest.TestCase):
node = "node" node = "node"
partition = "partition" partition = "partition"
db_file = __file__ db_file = __file__
replicator = TestReplicator({}) replicator = ConcreteReplicator({})
replicator._http_connect(node, partition, db_file) replicator._http_connect(node, partition, db_file)
expected_hsh = os.path.basename(db_file).split('.', 1)[0] expected_hsh = os.path.basename(db_file).split('.', 1)[0]
expected_hsh = expected_hsh.split('_', 1)[0] expected_hsh = expected_hsh.split('_', 1)[0]
@ -1890,7 +1891,7 @@ class TestHandoffsOnly(unittest.TestCase):
rmtree(self.root, ignore_errors=True) rmtree(self.root, ignore_errors=True)
def test_scary_warnings(self): def test_scary_warnings(self):
replicator = TestReplicator({ replicator = ConcreteReplicator({
'handoffs_only': 'yes', 'handoffs_only': 'yes',
'devices': self.root, 'devices': self.root,
'bind_port': 6201, 'bind_port': 6201,
@ -1914,7 +1915,7 @@ class TestHandoffsOnly(unittest.TestCase):
'disable them.')]) 'disable them.')])
def test_skips_primary_partitions(self): def test_skips_primary_partitions(self):
replicator = TestReplicator({ replicator = ConcreteReplicator({
'handoffs_only': 'yes', 'handoffs_only': 'yes',
'devices': self.root, 'devices': self.root,
'bind_port': 6201, 'bind_port': 6201,
@ -1938,7 +1939,7 @@ class TestHandoffsOnly(unittest.TestCase):
'bcbcbcbc15d3835053d568c57e2c83b5.db'), 1)]) 'bcbcbcbc15d3835053d568c57e2c83b5.db'), 1)])
def test_override_partitions(self): def test_override_partitions(self):
replicator = TestReplicator({ replicator = ConcreteReplicator({
'devices': self.root, 'devices': self.root,
'bind_port': 6201, 'bind_port': 6201,
'mount_check': 'no', 'mount_check': 'no',
@ -1961,7 +1962,7 @@ class TestHandoffsOnly(unittest.TestCase):
'bcbcbcbc15d3835053d568c57e2c83b5.db'), 1)]) 'bcbcbcbc15d3835053d568c57e2c83b5.db'), 1)])
def test_override_devices(self): def test_override_devices(self):
replicator = TestReplicator({ replicator = ConcreteReplicator({
'devices': self.root, 'devices': self.root,
'bind_port': 6201, 'bind_port': 6201,
'mount_check': 'no', 'mount_check': 'no',
@ -1984,7 +1985,7 @@ class TestHandoffsOnly(unittest.TestCase):
'abababab2b5368158355e799323b498d.db'), 0)]) 'abababab2b5368158355e799323b498d.db'), 0)])
def test_override_devices_and_partitions(self): def test_override_devices_and_partitions(self):
replicator = TestReplicator({ replicator = ConcreteReplicator({
'devices': self.root, 'devices': self.root,
'bind_port': 6201, 'bind_port': 6201,
'mount_check': 'no', 'mount_check': 'no',
@ -2008,7 +2009,7 @@ class TestReplToNode(unittest.TestCase):
db_replicator.ring = FakeRing() db_replicator.ring = FakeRing()
self.delete_db_calls = [] self.delete_db_calls = []
self.broker = FakeBroker() self.broker = FakeBroker()
self.replicator = TestReplicator({'per_diff': 10}) self.replicator = ConcreteReplicator({'per_diff': 10})
self.fake_node = {'ip': '127.0.0.1', 'device': 'sda1', 'port': 1000} self.fake_node = {'ip': '127.0.0.1', 'device': 'sda1', 'port': 1000}
self.fake_info = {'id': 'a', 'point': -1, 'max_row': 20, 'hash': 'b', self.fake_info = {'id': 'a', 'point': -1, 'max_row': 20, 'hash': 'b',
'created_at': 100, 'put_timestamp': 0, 'created_at': 100, 'put_timestamp': 0,

View File

@ -74,9 +74,10 @@ class TestStoragePolicies(unittest.TestCase):
conf_str = "\n".join(line.strip() for line in conf_str.split("\n")) conf_str = "\n".join(line.strip() for line in conf_str.split("\n"))
if six.PY2: if six.PY2:
conf = ConfigParser() conf = ConfigParser()
conf.readfp(six.StringIO(conf_str))
else: else:
conf = ConfigParser(strict=False) conf = ConfigParser(strict=False)
conf.readfp(six.StringIO(conf_str)) conf.read_file(six.StringIO(conf_str))
return conf return conf
def assertRaisesWithMessage(self, exc_class, message, f, *args, **kwargs): def assertRaisesWithMessage(self, exc_class, message, f, *args, **kwargs):

View File

@ -2429,9 +2429,11 @@ class TestUtils(unittest.TestCase):
def _test_validate_hash_conf(self, sections, options, should_raise_error): def _test_validate_hash_conf(self, sections, options, should_raise_error):
class FakeConfigParser(object): class FakeConfigParser(object):
def readfp(self, fp): def read_file(self, fp):
pass pass
readfp = read_file
def get(self, section, option): def get(self, section, option):
if section not in sections: if section not in sections:
raise NoSectionError('section error') raise NoSectionError('section error')
@ -2770,54 +2772,63 @@ log_name = %(yarr)s'''
"Expected %d < 100" % diff_from_target_ms) "Expected %d < 100" % diff_from_target_ms)
def test_ratelimit_sleep(self): def test_ratelimit_sleep(self):
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'ratelimit_sleep\(\) is deprecated')
def testfunc(): def testfunc():
running_time = 0 running_time = 0
for i in range(100): for i in range(100):
running_time = utils.ratelimit_sleep(running_time, -5) running_time = utils.ratelimit_sleep(running_time, -5)
self.verify_under_pseudo_time(testfunc, target_runtime_ms=1) self.verify_under_pseudo_time(testfunc, target_runtime_ms=1)
def testfunc(): def testfunc():
running_time = 0 running_time = 0
for i in range(100): for i in range(100):
running_time = utils.ratelimit_sleep(running_time, 0) running_time = utils.ratelimit_sleep(running_time, 0)
self.verify_under_pseudo_time(testfunc, target_runtime_ms=1) self.verify_under_pseudo_time(testfunc, target_runtime_ms=1)
def testfunc(): def testfunc():
running_time = 0 running_time = 0
for i in range(50): for i in range(50):
running_time = utils.ratelimit_sleep(running_time, 200) running_time = utils.ratelimit_sleep(running_time, 200)
self.verify_under_pseudo_time(testfunc, target_runtime_ms=250) self.verify_under_pseudo_time(testfunc, target_runtime_ms=250)
def test_ratelimit_sleep_with_incr(self): def test_ratelimit_sleep_with_incr(self):
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'ratelimit_sleep\(\) is deprecated')
def testfunc(): def testfunc():
running_time = 0 running_time = 0
vals = [5, 17, 0, 3, 11, 30, vals = [5, 17, 0, 3, 11, 30,
40, 4, 13, 2, -1] * 2 # adds up to 248 40, 4, 13, 2, -1] * 2 # adds up to 248
total = 0 total = 0
for i in vals: for i in vals:
running_time = utils.ratelimit_sleep(running_time, running_time = utils.ratelimit_sleep(running_time,
500, incr_by=i) 500, incr_by=i)
total += i total += i
self.assertEqual(248, total) self.assertEqual(248, total)
self.verify_under_pseudo_time(testfunc, target_runtime_ms=500) self.verify_under_pseudo_time(testfunc, target_runtime_ms=500)
def test_ratelimit_sleep_with_sleep(self): def test_ratelimit_sleep_with_sleep(self):
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'ratelimit_sleep\(\) is deprecated')
def testfunc(): def testfunc():
running_time = 0 running_time = 0
sleeps = [0] * 7 + [.2] * 3 + [0] * 30 sleeps = [0] * 7 + [.2] * 3 + [0] * 30
for i in sleeps: for i in sleeps:
running_time = utils.ratelimit_sleep(running_time, 40, running_time = utils.ratelimit_sleep(running_time, 40,
rate_buffer=1) rate_buffer=1)
time.sleep(i) time.sleep(i)
self.verify_under_pseudo_time(testfunc, target_runtime_ms=900) self.verify_under_pseudo_time(testfunc, target_runtime_ms=900)
def test_search_tree(self): def test_search_tree(self):
# file match & ext miss # file match & ext miss
@ -5425,12 +5436,18 @@ class TestStatsdLogging(unittest.TestCase):
# note: set_statsd_prefix is deprecated # note: set_statsd_prefix is deprecated
logger2 = utils.get_logger({'log_statsd_host': 'some.host.com'}, logger2 = utils.get_logger({'log_statsd_host': 'some.host.com'},
'other-name', log_route='some-route') 'other-name', log_route='some-route')
logger.set_statsd_prefix('some-name.more-specific') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
logger.set_statsd_prefix('some-name.more-specific')
self.assertEqual(logger.logger.statsd_client._prefix, self.assertEqual(logger.logger.statsd_client._prefix,
'some-name.more-specific.') 'some-name.more-specific.')
self.assertEqual(logger2.logger.statsd_client._prefix, self.assertEqual(logger2.logger.statsd_client._prefix,
'some-name.more-specific.') 'some-name.more-specific.')
logger.set_statsd_prefix('') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
logger.set_statsd_prefix('')
self.assertEqual(logger.logger.statsd_client._prefix, '') self.assertEqual(logger.logger.statsd_client._prefix, '')
self.assertEqual(logger2.logger.statsd_client._prefix, '') self.assertEqual(logger2.logger.statsd_client._prefix, '')
@ -5452,10 +5469,16 @@ class TestStatsdLogging(unittest.TestCase):
'tomato.sauce.some-name.more-specific.') 'tomato.sauce.some-name.more-specific.')
# note: set_statsd_prefix is deprecated # note: set_statsd_prefix is deprecated
logger.set_statsd_prefix('some-name.more-specific') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
logger.set_statsd_prefix('some-name.more-specific')
self.assertEqual(logger.logger.statsd_client._prefix, self.assertEqual(logger.logger.statsd_client._prefix,
'tomato.sauce.some-name.more-specific.') 'tomato.sauce.some-name.more-specific.')
logger.set_statsd_prefix('') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
logger.set_statsd_prefix('')
self.assertEqual(logger.logger.statsd_client._prefix, 'tomato.sauce.') self.assertEqual(logger.logger.statsd_client._prefix, 'tomato.sauce.')
self.assertEqual(logger.logger.statsd_client._host, 'another.host.com') self.assertEqual(logger.logger.statsd_client._host, 'another.host.com')
self.assertEqual(logger.logger.statsd_client._port, 9876) self.assertEqual(logger.logger.statsd_client._port, 9876)
@ -5491,10 +5514,10 @@ class TestStatsdLogging(unittest.TestCase):
logger.set_statsd_prefix('some-name.more-specific') logger.set_statsd_prefix('some-name.more-specific')
msgs = [str(warning.message) msgs = [str(warning.message)
for warning in cm for warning in cm
if str(warning.message).startswith('set_prefix')] if str(warning.message).startswith('set_statsd_prefix')]
self.assertEqual( self.assertEqual(
['set_prefix() is deprecated; use the ``tail_prefix`` argument of ' ['set_statsd_prefix() is deprecated; use the '
'the constructor when instantiating the class instead.'], '``statsd_tail_prefix`` argument to ``get_logger`` instead.'],
msgs) msgs)
def test_ipv4_or_ipv6_hostname_defaults_to_ipv4(self): def test_ipv4_or_ipv6_hostname_defaults_to_ipv4(self):
@ -6240,7 +6263,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.logger.update_stats, 'another.counter', 42) self.logger.update_stats, 'another.counter', 42)
# Each call can override the sample_rate (also, bonus prefix test) # Each call can override the sample_rate (also, bonus prefix test)
self.logger.set_statsd_prefix('pfx') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
self.logger.set_statsd_prefix('pfx')
self.assertStat('pfx.some.counter:1|c|@0.972', self.logger.increment, self.assertStat('pfx.some.counter:1|c|@0.972', self.logger.increment,
'some.counter', sample_rate=0.972) 'some.counter', sample_rate=0.972)
self.assertStat('pfx.some.counter:-1|c|@0.972', self.logger.decrement, self.assertStat('pfx.some.counter:-1|c|@0.972', self.logger.decrement,
@ -6256,7 +6282,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
sample_rate=0.972) sample_rate=0.972)
# Can override sample_rate with non-keyword arg # Can override sample_rate with non-keyword arg
self.logger.set_statsd_prefix('') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
self.logger.set_statsd_prefix('')
self.assertStat('some.counter:1|c|@0.939', self.logger.increment, self.assertStat('some.counter:1|c|@0.939', self.logger.increment,
'some.counter', 0.939) 'some.counter', 0.939)
self.assertStat('some.counter:-1|c|@0.939', self.logger.decrement, self.assertStat('some.counter:-1|c|@0.939', self.logger.decrement,
@ -6304,7 +6333,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
sample_rate=0.9912) sample_rate=0.9912)
# Can override sample_rate with non-keyword arg # Can override sample_rate with non-keyword arg
self.logger.set_statsd_prefix('') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
self.logger.set_statsd_prefix('')
self.assertStat('some.counter:1|c|@0.987654', self.logger.increment, self.assertStat('some.counter:1|c|@0.987654', self.logger.increment,
'some.counter', 0.987654) 'some.counter', 0.987654)
self.assertStat('some.counter:-1|c|@0.987654', self.logger.decrement, self.assertStat('some.counter:-1|c|@0.987654', self.logger.decrement,
@ -6337,7 +6369,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('alpha.beta.pfx.another.counter:3|c', self.assertStat('alpha.beta.pfx.another.counter:3|c',
self.logger.update_stats, 'another.counter', 3) self.logger.update_stats, 'another.counter', 3)
self.logger.set_statsd_prefix('') with warnings.catch_warnings():
warnings.filterwarnings(
'ignore', r'set_statsd_prefix\(\) is deprecated')
self.logger.set_statsd_prefix('')
self.assertStat('alpha.beta.some.counter:1|c|@0.9912', self.assertStat('alpha.beta.some.counter:1|c|@0.9912',
self.logger.increment, 'some.counter', self.logger.increment, 'some.counter',
sample_rate=0.9912) sample_rate=0.9912)

View File

@ -2139,8 +2139,8 @@ class TestContainerBroker(unittest.TestCase):
iters = 100 iters = 100
for i in range(iters): for i in range(iters):
policy_index = random.randint(0, iters * 0.1) policy_index = random.randint(0, iters // 10)
name = 'object-%s' % random.randint(0, iters * 0.1) name = 'object-%s' % random.randint(0, iters // 10)
size = random.randint(0, iters) size = random.randint(0, iters)
broker.put_object(name, next(ts).internal, size, 'text/plain', broker.put_object(name, next(ts).internal, size, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe', '5af83e3196bf99f440f31f2e1a6c9afe',

View File

@ -25,7 +25,7 @@ from contextlib import closing
from gzip import GzipFile from gzip import GzipFile
from tempfile import mkdtemp from tempfile import mkdtemp
import time import time
import warnings
from eventlet import spawn, wsgi from eventlet import spawn, wsgi
import mock import mock
@ -215,6 +215,10 @@ def setup_servers(the_object_server=object_server, extra_conf=None):
logging_prosv = proxy_logging.ProxyLoggingMiddleware( logging_prosv = proxy_logging.ProxyLoggingMiddleware(
listing_formats.ListingFilter(prosrv, {}, logger=prosrv.logger), listing_formats.ListingFilter(prosrv, {}, logger=prosrv.logger),
conf, logger=prosrv.logger) conf, logger=prosrv.logger)
# Yes, eventlet, we know -- we have to support bad clients, though
warnings.filterwarnings(
'ignore', module='eventlet',
message='capitalize_response_headers is disabled')
prospa = spawn(wsgi.server, prolis, logging_prosv, nl, prospa = spawn(wsgi.server, prolis, logging_prosv, nl,
protocol=SwiftHttpProtocol, protocol=SwiftHttpProtocol,
capitalize_response_headers=False) capitalize_response_headers=False)

View File

@ -27,6 +27,8 @@ from shutil import rmtree
from tempfile import mkdtemp from tempfile import mkdtemp
import textwrap import textwrap
from os.path import dirname, basename from os.path import dirname, basename
from test import BaseTestCase
from test.debug_logger import debug_logger from test.debug_logger import debug_logger
from test.unit import ( from test.unit import (
DEFAULT_TEST_EC_TYPE, make_timestamp_iter, patch_policies, DEFAULT_TEST_EC_TYPE, make_timestamp_iter, patch_policies,
@ -111,7 +113,7 @@ class FakeRing2(object):
return (1, nodes) return (1, nodes)
class TestAuditorBase(unittest.TestCase): class TestAuditorBase(BaseTestCase):
def setUp(self): def setUp(self):
skip_if_no_xattrs() skip_if_no_xattrs()
@ -1715,23 +1717,23 @@ class TestAuditWatchers(TestAuditorBase):
# irrelevant; what matters is that it finds all the things. # irrelevant; what matters is that it finds all the things.
calls[2:5] = sorted(calls[2:5], key=lambda item: item[1]['name']) calls[2:5] = sorted(calls[2:5], key=lambda item: item[1]['name'])
self.assertDictContainsSubset({'name': '/a/c/o0', self._assertDictContainsSubset({'name': '/a/c/o0',
'X-Object-Meta-Flavor': 'banana'}, 'X-Object-Meta-Flavor': 'banana'},
calls[2][1]) calls[2][1])
self.assertIn('node/sda/objects/0/', calls[2][2]) # data_file_path self.assertIn('node/sda/objects/0/', calls[2][2]) # data_file_path
self.assertTrue(calls[2][2].endswith('.data')) # data_file_path self.assertTrue(calls[2][2].endswith('.data')) # data_file_path
self.assertEqual({}, calls[2][3]) self.assertEqual({}, calls[2][3])
self.assertDictContainsSubset({'name': '/a/c/o1', self._assertDictContainsSubset({'name': '/a/c/o1',
'X-Object-Meta-Flavor': 'orange'}, 'X-Object-Meta-Flavor': 'orange'},
calls[3][1]) calls[3][1])
self.assertIn('node/sda/objects/0/', calls[3][2]) # data_file_path self.assertIn('node/sda/objects/0/', calls[3][2]) # data_file_path
self.assertTrue(calls[3][2].endswith('.data')) # data_file_path self.assertTrue(calls[3][2].endswith('.data')) # data_file_path
self.assertEqual({}, calls[3][3]) self.assertEqual({}, calls[3][3])
self.assertDictContainsSubset({'name': '/a/c_ec/o', self._assertDictContainsSubset({'name': '/a/c_ec/o',
'X-Object-Meta-Flavor': 'peach'}, 'X-Object-Meta-Flavor': 'peach'},
calls[4][1]) calls[4][1])
self.assertIn('node/sda/objects-2/0/', calls[4][2]) # data_file_path self.assertIn('node/sda/objects-2/0/', calls[4][2]) # data_file_path
self.assertTrue(calls[4][2].endswith('.data')) # data_file_path self.assertTrue(calls[4][2].endswith('.data')) # data_file_path
self.assertEqual({}, calls[4][3]) self.assertEqual({}, calls[4][3])

View File

@ -19,7 +19,6 @@ import six.moves.cPickle as pickle
import os import os
import errno import errno
import itertools import itertools
from unittest.util import safe_repr
import mock import mock
import unittest import unittest
import email import email
@ -40,6 +39,7 @@ import pyeclib.ec_iface
from eventlet import hubs, timeout, tpool from eventlet import hubs, timeout, tpool
from swift.obj.diskfile import MD5_OF_EMPTY_STRING, update_auditor_status from swift.obj.diskfile import MD5_OF_EMPTY_STRING, update_auditor_status
from test import BaseTestCase
from test.debug_logger import debug_logger from test.debug_logger import debug_logger
from test.unit import (mock as unit_mock, temptree, mock_check_drive, from test.unit import (mock as unit_mock, temptree, mock_check_drive,
patch_policies, EMPTY_ETAG, make_timestamp_iter, patch_policies, EMPTY_ETAG, make_timestamp_iter,
@ -1012,35 +1012,6 @@ class BaseDiskFileTestMixin(object):
return '.'.join([ return '.'.join([
mgr_cls.__module__, mgr_cls.__name__, manager_attribute_name]) mgr_cls.__module__, mgr_cls.__name__, manager_attribute_name])
def _assertDictContainsSubset(self, subset, dictionary, msg=None):
"""Checks whether dictionary is a superset of subset."""
# This is almost identical to the method in python3.4 version of
# unitest.case.TestCase.assertDictContainsSubset, reproduced here to
# avoid the deprecation warning in the original when using python3.
missing = []
mismatched = []
for key, value in subset.items():
if key not in dictionary:
missing.append(key)
elif value != dictionary[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(dictionary[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
class DiskFileManagerMixin(BaseDiskFileTestMixin): class DiskFileManagerMixin(BaseDiskFileTestMixin):
""" """
@ -2008,7 +1979,7 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
@patch_policies @patch_policies
class TestDiskFileManager(DiskFileManagerMixin, unittest.TestCase): class TestDiskFileManager(DiskFileManagerMixin, BaseTestCase):
mgr_cls = diskfile.DiskFileManager mgr_cls = diskfile.DiskFileManager
@ -2319,7 +2290,7 @@ class TestDiskFileManager(DiskFileManagerMixin, unittest.TestCase):
@patch_policies(with_ec_default=True) @patch_policies(with_ec_default=True)
class TestECDiskFileManager(DiskFileManagerMixin, unittest.TestCase): class TestECDiskFileManager(DiskFileManagerMixin, BaseTestCase):
mgr_cls = diskfile.ECDiskFileManager mgr_cls = diskfile.ECDiskFileManager

View File

@ -40,7 +40,7 @@ from eventlet.green import httplib
from swift import __version__ as swift_version from swift import __version__ as swift_version
from swift.common.http import is_success from swift.common.http import is_success
from test import listen_zero from test import listen_zero, BaseTestCase
from test.debug_logger import debug_logger from test.debug_logger import debug_logger
from test.unit import mocked_http_conn, \ from test.unit import mocked_http_conn, \
make_timestamp_iter, DEFAULT_TEST_EC_TYPE, skip_if_no_xattrs, \ make_timestamp_iter, DEFAULT_TEST_EC_TYPE, skip_if_no_xattrs, \
@ -134,7 +134,7 @@ class TestTpoolSize(unittest.TestCase):
@patch_policies(test_policies) @patch_policies(test_policies)
class TestObjectController(unittest.TestCase): class TestObjectController(BaseTestCase):
"""Test swift.obj.server.ObjectController""" """Test swift.obj.server.ObjectController"""
def setUp(self): def setUp(self):
@ -3996,7 +3996,7 @@ class TestObjectController(unittest.TestCase):
'X-Backend-Durable-Timestamp': ts_0.internal, 'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '0', 'X-Object-Sysmeta-Ec-Frag-Index': '0',
'X-Object-Meta-Test': 'abc'} 'X-Object-Meta-Test': 'abc'}
self.assertDictContainsSubset(expect, resp.headers) self._assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads( self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments'])) resp.headers['X-Backend-Fragments']))
@ -4007,7 +4007,7 @@ class TestObjectController(unittest.TestCase):
'X-Backend-Timestamp': ts_2.internal, 'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal, 'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_2.internal} 'X-Backend-Durable-Timestamp': ts_2.internal}
self.assertDictContainsSubset(expect, resp.headers) self._assertDictContainsSubset(expect, resp.headers)
self.assertNotIn('X-Object-Meta-Test', resp.headers) self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Sanity check: Request with no preferences should default to the # Sanity check: Request with no preferences should default to the
@ -4067,7 +4067,7 @@ class TestObjectController(unittest.TestCase):
'X-Backend-Data-Timestamp': ts_2.internal, 'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_0.internal, 'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '2'} 'X-Object-Sysmeta-Ec-Frag-Index': '2'}
self.assertDictContainsSubset(expect, resp.headers) self._assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads( self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments'])) resp.headers['X-Backend-Fragments']))
self.assertNotIn('X-Object-Meta-Test', resp.headers) self.assertNotIn('X-Object-Meta-Test', resp.headers)