Merge "Get rid of contextlib.nested() for py3"
This commit is contained in:
commit
ec9095bf58
@ -22,7 +22,6 @@ import unittest
|
||||
|
||||
from logging import DEBUG
|
||||
from mock import patch, call, DEFAULT
|
||||
from contextlib import nested
|
||||
import six
|
||||
|
||||
from swift.account import reaper
|
||||
@ -420,15 +419,14 @@ class TestReaper(unittest.TestCase):
|
||||
self.reap_obj_fail = False
|
||||
self.amount_delete_fail = 0
|
||||
self.max_delete_fail = 0
|
||||
ctx = [patch('swift.account.reaper.direct_get_container',
|
||||
self.fake_direct_get_container),
|
||||
patch('swift.account.reaper.direct_delete_container',
|
||||
self.fake_direct_delete_container),
|
||||
patch('swift.account.reaper.AccountReaper.get_container_ring',
|
||||
self.fake_container_ring),
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.direct_get_container',
|
||||
self.fake_direct_get_container), \
|
||||
patch('swift.account.reaper.direct_delete_container',
|
||||
self.fake_direct_delete_container), \
|
||||
patch('swift.account.reaper.AccountReaper.get_container_ring',
|
||||
self.fake_container_ring), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object):
|
||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 1)
|
||||
self.assertEqual(r.stats_containers_deleted, 1)
|
||||
@ -439,15 +437,14 @@ class TestReaper(unittest.TestCase):
|
||||
self.reap_obj_fail = False
|
||||
self.amount_delete_fail = 0
|
||||
self.max_delete_fail = 2
|
||||
ctx = [patch('swift.account.reaper.direct_get_container',
|
||||
self.fake_direct_get_container),
|
||||
patch('swift.account.reaper.direct_delete_container',
|
||||
self.fake_direct_delete_container),
|
||||
patch('swift.account.reaper.AccountReaper.get_container_ring',
|
||||
self.fake_container_ring),
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.direct_get_container',
|
||||
self.fake_direct_get_container), \
|
||||
patch('swift.account.reaper.direct_delete_container',
|
||||
self.fake_direct_delete_container), \
|
||||
patch('swift.account.reaper.AccountReaper.get_container_ring',
|
||||
self.fake_container_ring), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object):
|
||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 2)
|
||||
self.assertEqual(r.stats_containers_possibly_remaining, 1)
|
||||
@ -458,15 +455,14 @@ class TestReaper(unittest.TestCase):
|
||||
self.reap_obj_fail = False
|
||||
self.amount_delete_fail = 0
|
||||
self.max_delete_fail = 3
|
||||
ctx = [patch('swift.account.reaper.direct_get_container',
|
||||
self.fake_direct_get_container),
|
||||
patch('swift.account.reaper.direct_delete_container',
|
||||
self.fake_direct_delete_container),
|
||||
patch('swift.account.reaper.AccountReaper.get_container_ring',
|
||||
self.fake_container_ring),
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.direct_get_container',
|
||||
self.fake_direct_get_container), \
|
||||
patch('swift.account.reaper.direct_delete_container',
|
||||
self.fake_direct_delete_container), \
|
||||
patch('swift.account.reaper.AccountReaper.get_container_ring',
|
||||
self.fake_container_ring), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||
self.fake_reap_object):
|
||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 3)
|
||||
self.assertEqual(r.stats_containers_remaining, 1)
|
||||
@ -537,11 +533,10 @@ class TestReaper(unittest.TestCase):
|
||||
self.r = r = self.init_reaper({}, fakelogger=True)
|
||||
self.called_amount = 0
|
||||
r.start_time = time.time()
|
||||
ctx = [patch('swift.account.reaper.AccountReaper.reap_container',
|
||||
self.fake_reap_container),
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.AccountReaper.reap_container',
|
||||
self.fake_reap_container), \
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring):
|
||||
nodes = r.get_account_ring().get_part_nodes()
|
||||
self.assertTrue(r.reap_account(broker, 'partition', nodes))
|
||||
self.assertTrue(r.logger.get_lines_for_level(
|
||||
@ -553,13 +548,12 @@ class TestReaper(unittest.TestCase):
|
||||
self.called_amount = 0
|
||||
conf = {'devices': devices}
|
||||
r = self.init_reaper(conf)
|
||||
ctx = [patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker),
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring),
|
||||
patch('swift.account.reaper.AccountReaper.reap_account',
|
||||
self.fake_reap_account)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker), \
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_account',
|
||||
self.fake_reap_account):
|
||||
r.reap_device('sda1')
|
||||
self.assertEqual(self.called_amount, 1)
|
||||
|
||||
@ -568,13 +562,12 @@ class TestReaper(unittest.TestCase):
|
||||
self.called_amount = 0
|
||||
conf = {'devices': devices}
|
||||
r = self.init_reaper(conf=conf)
|
||||
ctx = [patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker),
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring),
|
||||
patch('swift.account.reaper.AccountReaper.reap_account',
|
||||
self.fake_reap_account)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker), \
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_account',
|
||||
self.fake_reap_account):
|
||||
r.reap_device('sda1')
|
||||
self.assertEqual(self.called_amount, 0)
|
||||
|
||||
@ -583,13 +576,12 @@ class TestReaper(unittest.TestCase):
|
||||
self.called_amount = 0
|
||||
conf = {'devices': devices}
|
||||
r = self.init_reaper(conf, myips=['10.10.1.2'])
|
||||
ctx = [patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker),
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring),
|
||||
patch('swift.account.reaper.AccountReaper.reap_account',
|
||||
self.fake_reap_account)]
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker), \
|
||||
patch('swift.account.reaper.AccountReaper.get_account_ring',
|
||||
self.fake_account_ring), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_account',
|
||||
self.fake_reap_account):
|
||||
r.reap_device('sda1')
|
||||
self.assertEqual(self.called_amount, 0)
|
||||
|
||||
@ -632,14 +624,14 @@ class TestReaper(unittest.TestCase):
|
||||
account_nodes, container):
|
||||
container_reaped[0] += 1
|
||||
|
||||
ctx = [patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker),
|
||||
patch('swift.account.reaper.AccountBroker.list_containers_iter',
|
||||
fake_list_containers_iter),
|
||||
patch('swift.account.reaper.AccountReaper.reap_container',
|
||||
fake_reap_container), ]
|
||||
fake_ring = FakeRing()
|
||||
with nested(*ctx):
|
||||
with patch('swift.account.reaper.AccountBroker',
|
||||
FakeAccountBroker), \
|
||||
patch(
|
||||
'swift.account.reaper.AccountBroker.list_containers_iter',
|
||||
fake_list_containers_iter), \
|
||||
patch('swift.account.reaper.AccountReaper.reap_container',
|
||||
fake_reap_container):
|
||||
fake_broker = FakeAccountBroker(['c', 'd', 'e'])
|
||||
r.reap_account(fake_broker, 10, fake_ring.nodes, 0)
|
||||
self.assertEqual(container_reaped[0], 1)
|
||||
|
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from contextlib import nested
|
||||
import json
|
||||
import mock
|
||||
import os
|
||||
@ -240,11 +239,8 @@ class TestRecon(unittest.TestCase):
|
||||
mock_scout.return_value = scout_instance
|
||||
stdout = StringIO()
|
||||
mock_hash = mock.MagicMock()
|
||||
patches = [
|
||||
mock.patch('sys.stdout', new=stdout),
|
||||
mock.patch('swift.cli.recon.md5', new=mock_hash),
|
||||
]
|
||||
with nested(*patches):
|
||||
with mock.patch('sys.stdout', new=stdout), \
|
||||
mock.patch('swift.cli.recon.md5', new=mock_hash):
|
||||
mock_hash.return_value.hexdigest.return_value = \
|
||||
empty_file_hash
|
||||
self.recon_instance.get_ringmd5(hosts, self.swift_dir)
|
||||
@ -295,11 +291,9 @@ class TestRecon(unittest.TestCase):
|
||||
return url, response, status, 0, 0
|
||||
|
||||
stdout = StringIO()
|
||||
patches = [
|
||||
mock.patch('swift.cli.recon.Scout.scout', mock_scout_quarantine),
|
||||
mock.patch('sys.stdout', new=stdout),
|
||||
]
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout',
|
||||
mock_scout_quarantine), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon_instance.quarantine_check(hosts)
|
||||
|
||||
output = stdout.getvalue()
|
||||
@ -332,11 +326,9 @@ class TestRecon(unittest.TestCase):
|
||||
return url, response, status, 0, 0
|
||||
|
||||
stdout = StringIO()
|
||||
patches = [
|
||||
mock.patch('swift.cli.recon.Scout.scout', mock_scout_driveaudit),
|
||||
mock.patch('sys.stdout', new=stdout),
|
||||
]
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout',
|
||||
mock_scout_driveaudit), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon_instance.driveaudit_check(hosts)
|
||||
|
||||
output = stdout.getvalue()
|
||||
@ -394,19 +386,15 @@ class TestReconCommands(unittest.TestCase):
|
||||
return url, response, status
|
||||
|
||||
stdout = StringIO()
|
||||
patches = [
|
||||
mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type),
|
||||
mock.patch('sys.stdout', new=stdout),
|
||||
]
|
||||
|
||||
res_object = 'Invalid: http://127.0.0.1:6010/ is object-server'
|
||||
res_container = 'Invalid: http://127.0.0.1:6011/ is container-server'
|
||||
res_account = 'Invalid: http://127.0.0.1:6012/ is account-server'
|
||||
valid = "1/1 hosts ok, 0 error[s] while checking hosts."
|
||||
|
||||
# Test for object server type - default
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon.server_type_check(hosts)
|
||||
|
||||
output = stdout.getvalue()
|
||||
@ -415,7 +403,9 @@ class TestReconCommands(unittest.TestCase):
|
||||
stdout.truncate(0)
|
||||
|
||||
# Test ok for object server type - default
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon.server_type_check([hosts[0]])
|
||||
|
||||
output = stdout.getvalue()
|
||||
@ -423,7 +413,9 @@ class TestReconCommands(unittest.TestCase):
|
||||
stdout.truncate(0)
|
||||
|
||||
# Test for account server type
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon.server_type = 'account'
|
||||
self.recon.server_type_check(hosts)
|
||||
|
||||
@ -433,7 +425,9 @@ class TestReconCommands(unittest.TestCase):
|
||||
stdout.truncate(0)
|
||||
|
||||
# Test ok for account server type
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon.server_type = 'account'
|
||||
self.recon.server_type_check([hosts[2]])
|
||||
|
||||
@ -442,7 +436,9 @@ class TestReconCommands(unittest.TestCase):
|
||||
stdout.truncate(0)
|
||||
|
||||
# Test for container server type
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon.server_type = 'container'
|
||||
self.recon.server_type_check(hosts)
|
||||
|
||||
@ -452,7 +448,9 @@ class TestReconCommands(unittest.TestCase):
|
||||
stdout.truncate(0)
|
||||
|
||||
# Test ok for container server type
|
||||
with nested(*patches):
|
||||
with mock.patch('swift.cli.recon.Scout.scout_server_type',
|
||||
mock_scout_server_type), \
|
||||
mock.patch('sys.stdout', new=stdout):
|
||||
self.recon.server_type = 'container'
|
||||
self.recon.server_type_check([hosts[1]])
|
||||
|
||||
|
@ -14,7 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import json
|
||||
import mock
|
||||
@ -701,12 +700,11 @@ class TestDloGetManifest(DloTestCase):
|
||||
'/v1/AUTH_test/mancon/manifest',
|
||||
environ={'REQUEST_METHOD': 'GET'})
|
||||
|
||||
with contextlib.nested(
|
||||
mock.patch('swift.common.request_helpers.time.time',
|
||||
mock_time),
|
||||
with mock.patch('swift.common.request_helpers.time.time',
|
||||
mock_time), \
|
||||
mock.patch('swift.common.request_helpers.is_success',
|
||||
mock_is_success),
|
||||
mock.patch.object(dlo, 'is_success', mock_is_success)):
|
||||
mock_is_success), \
|
||||
mock.patch.object(dlo, 'is_success', mock_is_success):
|
||||
status, headers, body, exc = self.call_dlo(
|
||||
req, expect_exception=True)
|
||||
|
||||
|
@ -19,7 +19,6 @@ from six.moves import range
|
||||
import hashlib
|
||||
import time
|
||||
import unittest
|
||||
from contextlib import nested
|
||||
from mock import patch
|
||||
from hashlib import md5
|
||||
from swift.common import swob, utils
|
||||
@ -2250,13 +2249,13 @@ class TestSloGetManifest(SloTestCase):
|
||||
'/v1/AUTH_test/gettest/manifest-abcd',
|
||||
environ={'REQUEST_METHOD': 'GET'})
|
||||
|
||||
with nested(patch.object(slo, 'is_success', mock_is_success),
|
||||
patch('swift.common.request_helpers.time.time',
|
||||
mock_time),
|
||||
patch('swift.common.request_helpers.is_success',
|
||||
mock_is_success)):
|
||||
status, headers, body, exc = self.call_slo(
|
||||
req, expect_exception=True)
|
||||
with patch.object(slo, 'is_success', mock_is_success), \
|
||||
patch('swift.common.request_helpers.time.time',
|
||||
mock_time), \
|
||||
patch('swift.common.request_helpers.is_success',
|
||||
mock_is_success):
|
||||
status, headers, body, exc = self.call_slo(
|
||||
req, expect_exception=True)
|
||||
|
||||
self.assertIsInstance(exc, SegmentError)
|
||||
self.assertEqual(status, '200 OK')
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
import json
|
||||
import unittest
|
||||
from contextlib import contextmanager, nested
|
||||
from contextlib import contextmanager
|
||||
from base64 import b64encode
|
||||
from time import time
|
||||
import mock
|
||||
@ -273,8 +273,8 @@ class TestAuth(unittest.TestCase):
|
||||
headers={'X-Auth-Token': 't',
|
||||
'AUTHORIZATION': 'AWS s3:s3:pass'})
|
||||
|
||||
with nested(mock.patch('base64.urlsafe_b64decode'),
|
||||
mock.patch('base64.encodestring')) as (msg, sign):
|
||||
with mock.patch('base64.urlsafe_b64decode') as msg, \
|
||||
mock.patch('base64.encodestring') as sign:
|
||||
msg.return_value = ''
|
||||
sign.return_value = 'pass'
|
||||
resp = req.get_response(local_auth)
|
||||
|
@ -46,7 +46,6 @@ import traceback
|
||||
import unittest
|
||||
import fcntl
|
||||
import shutil
|
||||
from contextlib import nested
|
||||
|
||||
from getpass import getuser
|
||||
from shutil import rmtree
|
||||
@ -1546,9 +1545,8 @@ class TestUtils(unittest.TestCase):
|
||||
def my_ifaddress_error(interface):
|
||||
raise ValueError
|
||||
|
||||
with nested(
|
||||
patch('netifaces.interfaces', my_interfaces),
|
||||
patch('netifaces.ifaddresses', my_ifaddress_error)):
|
||||
with patch('netifaces.interfaces', my_interfaces), \
|
||||
patch('netifaces.ifaddresses', my_ifaddress_error):
|
||||
self.assertEqual(utils.whataremyips(), [])
|
||||
|
||||
def test_whataremyips_ipv6(self):
|
||||
@ -1562,9 +1560,8 @@ class TestUtils(unittest.TestCase):
|
||||
return {AF_INET6:
|
||||
[{'netmask': 'ffff:ffff:ffff:ffff::',
|
||||
'addr': '%s%%%s' % (test_ipv6_address, test_interface)}]}
|
||||
with nested(
|
||||
patch('netifaces.interfaces', my_ipv6_interfaces),
|
||||
patch('netifaces.ifaddresses', my_ipv6_ifaddresses)):
|
||||
with patch('netifaces.interfaces', my_ipv6_interfaces), \
|
||||
patch('netifaces.ifaddresses', my_ipv6_ifaddresses):
|
||||
myips = utils.whataremyips()
|
||||
self.assertEqual(len(myips), 1)
|
||||
self.assertEqual(myips[0], test_ipv6_address)
|
||||
@ -1879,10 +1876,9 @@ log_name = %(yarr)s'''
|
||||
curr_time[0] += 0.001
|
||||
curr_time[0] += duration
|
||||
|
||||
with nested(
|
||||
patch('time.time', my_time),
|
||||
patch('time.sleep', my_sleep),
|
||||
patch('eventlet.sleep', my_sleep)):
|
||||
with patch('time.time', my_time), \
|
||||
patch('time.sleep', my_sleep), \
|
||||
patch('eventlet.sleep', my_sleep):
|
||||
start = time.time()
|
||||
func(*args, **kwargs)
|
||||
# make sure it's accurate to 10th of a second, converting the time
|
||||
@ -3828,9 +3824,8 @@ class TestRateLimitedIterator(unittest.TestCase):
|
||||
curr_time[0] += 0.001
|
||||
curr_time[0] += duration
|
||||
|
||||
with nested(
|
||||
patch('time.time', my_time),
|
||||
patch('eventlet.sleep', my_sleep)):
|
||||
with patch('time.time', my_time), \
|
||||
patch('eventlet.sleep', my_sleep):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
def test_rate_limiting(self):
|
||||
|
@ -22,7 +22,6 @@ import socket
|
||||
import unittest
|
||||
import os
|
||||
from textwrap import dedent
|
||||
from contextlib import nested
|
||||
from collections import defaultdict
|
||||
|
||||
from eventlet import listen
|
||||
@ -413,13 +412,12 @@ class TestWSGI(unittest.TestCase):
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(contents.replace('TEMPDIR', t))
|
||||
_fake_rings(t)
|
||||
with nested(
|
||||
mock.patch('swift.proxy.server.Application.'
|
||||
'modify_wsgi_pipeline'),
|
||||
mock.patch('swift.common.wsgi.wsgi'),
|
||||
mock.patch('swift.common.wsgi.eventlet'),
|
||||
mock.patch('swift.common.wsgi.inspect',
|
||||
getargspec=argspec_stub)) as (_, _wsgi, _, _):
|
||||
with mock.patch('swift.proxy.server.Application.'
|
||||
'modify_wsgi_pipeline'), \
|
||||
mock.patch('swift.common.wsgi.wsgi') as _wsgi, \
|
||||
mock.patch('swift.common.wsgi.eventlet'), \
|
||||
mock.patch('swift.common.wsgi.inspect',
|
||||
getargspec=argspec_stub):
|
||||
conf = wsgi.appconfig(conf_file)
|
||||
logger = logging.getLogger('test')
|
||||
sock = listen(('localhost', 0))
|
||||
@ -658,15 +656,15 @@ class TestWSGI(unittest.TestCase):
|
||||
self.assertEqual(kwargs['global_conf'],
|
||||
{'log_name': 'log_name', 'test1': 'one'})
|
||||
|
||||
with nested(
|
||||
mock.patch.object(wsgi, '_initrp', _initrp),
|
||||
mock.patch.object(wsgi, 'get_socket'),
|
||||
mock.patch.object(wsgi, 'drop_privileges'),
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
||||
mock.patch.object(wsgi, 'capture_stdio'),
|
||||
mock.patch.object(wsgi, 'run_server')):
|
||||
with mock.patch.object(wsgi, '_initrp', _initrp), \
|
||||
mock.patch.object(wsgi, 'get_socket'), \
|
||||
mock.patch.object(wsgi, 'drop_privileges'), \
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp), \
|
||||
mock.patch.object(wsgi, 'capture_stdio'), \
|
||||
mock.patch.object(wsgi, 'run_server'):
|
||||
wsgi.run_wsgi('conf_file', 'app_section',
|
||||
global_conf_callback=_global_conf_callback)
|
||||
|
||||
self.assertEqual(calls['_global_conf_callback'], 1)
|
||||
self.assertEqual(calls['_loadapp'], 1)
|
||||
|
||||
@ -683,13 +681,12 @@ class TestWSGI(unittest.TestCase):
|
||||
def _loadapp(uri, name=None, **kwargs):
|
||||
calls['_loadapp'] += 1
|
||||
|
||||
with nested(
|
||||
mock.patch.object(wsgi, '_initrp', _initrp),
|
||||
mock.patch.object(wsgi, 'get_socket'),
|
||||
mock.patch.object(wsgi, 'drop_privileges'),
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
||||
mock.patch.object(wsgi, 'capture_stdio'),
|
||||
mock.patch.object(wsgi, 'run_server')):
|
||||
with mock.patch.object(wsgi, '_initrp', _initrp), \
|
||||
mock.patch.object(wsgi, 'get_socket'), \
|
||||
mock.patch.object(wsgi, 'drop_privileges'), \
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp), \
|
||||
mock.patch.object(wsgi, 'capture_stdio'), \
|
||||
mock.patch.object(wsgi, 'run_server'):
|
||||
rc = wsgi.run_wsgi('conf_file', 'app_section')
|
||||
self.assertEqual(calls['_initrp'], 1)
|
||||
self.assertEqual(calls['_loadapp'], 1)
|
||||
@ -764,13 +761,12 @@ class TestWSGI(unittest.TestCase):
|
||||
def _loadapp(uri, name=None, **kwargs):
|
||||
calls['_loadapp'] += 1
|
||||
|
||||
with nested(
|
||||
mock.patch.object(wsgi, '_initrp', _initrp),
|
||||
mock.patch.object(wsgi, 'get_socket'),
|
||||
mock.patch.object(wsgi, 'drop_privileges'),
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
||||
mock.patch.object(wsgi, 'capture_stdio'),
|
||||
mock.patch.object(wsgi, 'run_server')):
|
||||
with mock.patch.object(wsgi, '_initrp', _initrp), \
|
||||
mock.patch.object(wsgi, 'get_socket'), \
|
||||
mock.patch.object(wsgi, 'drop_privileges'), \
|
||||
mock.patch.object(wsgi, 'loadapp', _loadapp), \
|
||||
mock.patch.object(wsgi, 'capture_stdio'), \
|
||||
mock.patch.object(wsgi, 'run_server'):
|
||||
rc = wsgi.run_wsgi('conf_file', 'app_section')
|
||||
self.assertEqual(calls['_initrp'], 1)
|
||||
self.assertEqual(calls['_loadapp'], 0)
|
||||
|
@ -12,7 +12,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import contextlib
|
||||
import mock
|
||||
import operator
|
||||
import time
|
||||
@ -567,10 +566,8 @@ class TestReconcilerUtils(unittest.TestCase):
|
||||
mock_direct_delete = mock.MagicMock()
|
||||
mock_direct_delete.side_effect = stub_resp
|
||||
|
||||
with contextlib.nested(
|
||||
mock.patch(mock_path, mock_direct_delete),
|
||||
mock.patch('eventlet.greenpool.DEBUG', False),
|
||||
):
|
||||
with mock.patch(mock_path, mock_direct_delete), \
|
||||
mock.patch('eventlet.greenpool.DEBUG', False):
|
||||
rv = reconciler.direct_delete_container_entry(
|
||||
self.fake_ring, 'a', 'c', 'o')
|
||||
self.assertEqual(rv, None)
|
||||
@ -623,11 +620,9 @@ class TestReconcilerUtils(unittest.TestCase):
|
||||
|
||||
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
|
||||
now = time.time()
|
||||
with contextlib.nested(
|
||||
mock.patch(mock_path, fake_hc),
|
||||
with mock.patch(mock_path, fake_hc), \
|
||||
mock.patch('swift.container.reconciler.time.time',
|
||||
lambda: now),
|
||||
):
|
||||
lambda: now):
|
||||
ret = reconciler.add_to_reconciler_queue(
|
||||
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'PUT',
|
||||
force=True)
|
||||
|
@ -16,7 +16,6 @@
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from contextlib import nested
|
||||
from textwrap import dedent
|
||||
|
||||
import mock
|
||||
@ -492,10 +491,9 @@ class TestContainerSync(unittest.TestCase):
|
||||
metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1),
|
||||
'x-container-sync-key': ('key', 1)},
|
||||
items_since=[{'ROWID': 1, 'name': 'o'}])
|
||||
with nested(
|
||||
mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb),
|
||||
mock.patch('swift.container.sync.hash_path', fake_hash_path)):
|
||||
with mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb), \
|
||||
mock.patch('swift.container.sync.hash_path', fake_hash_path):
|
||||
cs._myips = ['10.0.0.0'] # Match
|
||||
cs._myport = 1000 # Match
|
||||
cs.allowed_sync_hosts = ['127.0.0.1']
|
||||
@ -520,10 +518,9 @@ class TestContainerSync(unittest.TestCase):
|
||||
'x-container-sync-key':
|
||||
('key', 1)},
|
||||
items_since=[{'ROWID': 1, 'name': 'o'}])
|
||||
with nested(
|
||||
mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb),
|
||||
mock.patch('swift.container.sync.hash_path', fake_hash_path)):
|
||||
with mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb), \
|
||||
mock.patch('swift.container.sync.hash_path', fake_hash_path):
|
||||
cs._myips = ['10.0.0.0'] # Match
|
||||
cs._myport = 1000 # Match
|
||||
cs.allowed_sync_hosts = ['127.0.0.1']
|
||||
@ -567,11 +564,10 @@ class TestContainerSync(unittest.TestCase):
|
||||
'x-container-sync-key': ('key', 1)},
|
||||
items_since=[{'ROWID': 1, 'name': 'o', 'created_at': '1.2',
|
||||
'deleted': True}])
|
||||
with nested(
|
||||
mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb),
|
||||
with mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb), \
|
||||
mock.patch('swift.container.sync.delete_object',
|
||||
fake_delete_object)):
|
||||
fake_delete_object):
|
||||
cs._myips = ['10.0.0.0'] # Match
|
||||
cs._myport = 1000 # Match
|
||||
cs.allowed_sync_hosts = ['127.0.0.1']
|
||||
@ -592,11 +588,10 @@ class TestContainerSync(unittest.TestCase):
|
||||
'x-container-sync-key': ('key', 1)},
|
||||
items_since=[{'ROWID': 1, 'name': 'o', 'created_at': '1.2',
|
||||
'deleted': True}])
|
||||
with nested(
|
||||
mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb),
|
||||
with mock.patch('swift.container.sync.ContainerBroker',
|
||||
lambda p: fcb), \
|
||||
mock.patch('swift.container.sync.delete_object',
|
||||
lambda *x, **y: None)):
|
||||
lambda *x, **y: None):
|
||||
cs._myips = ['10.0.0.0'] # Match
|
||||
cs._myport = 1000 # Match
|
||||
cs.allowed_sync_hosts = ['127.0.0.1']
|
||||
|
@ -33,7 +33,7 @@ from shutil import rmtree
|
||||
from time import time
|
||||
from tempfile import mkdtemp
|
||||
from hashlib import md5
|
||||
from contextlib import closing, nested, contextmanager
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
|
||||
from eventlet import hubs, timeout, tpool
|
||||
@ -689,11 +689,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_dev_path_fail(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value=None)
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
hclistdir.return_value = ['1381679759.90941.data']
|
||||
readmeta.return_value = {'name': '/a/c/o'}
|
||||
self.assertRaises(
|
||||
@ -703,12 +702,12 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_not_dir(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata'),
|
||||
mock.patch(self._manager_mock('quarantine_renamer'))) as \
|
||||
(dfclass, hclistdir, readmeta, quarantine_renamer):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta, \
|
||||
mock.patch(self._manager_mock(
|
||||
'quarantine_renamer')) as quarantine_renamer:
|
||||
osexc = OSError()
|
||||
osexc.errno = errno.ENOTDIR
|
||||
hclistdir.side_effect = osexc
|
||||
@ -723,11 +722,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_no_dir(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
osexc = OSError()
|
||||
osexc.errno = errno.ENOENT
|
||||
hclistdir.side_effect = osexc
|
||||
@ -739,11 +737,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_other_oserror(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
osexc = OSError()
|
||||
hclistdir.side_effect = osexc
|
||||
readmeta.return_value = {'name': '/a/c/o'}
|
||||
@ -754,11 +751,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_no_actual_files(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
hclistdir.return_value = []
|
||||
readmeta.return_value = {'name': '/a/c/o'}
|
||||
self.assertRaises(
|
||||
@ -768,11 +764,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_read_metadata_problem(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
hclistdir.return_value = ['1381679759.90941.data']
|
||||
readmeta.side_effect = EOFError()
|
||||
self.assertRaises(
|
||||
@ -782,11 +777,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_no_meta_name(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
hclistdir.return_value = ['1381679759.90941.data']
|
||||
readmeta.return_value = {}
|
||||
try:
|
||||
@ -799,11 +793,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash_bad_meta_name(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')), \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
hclistdir.return_value = ['1381679759.90941.data']
|
||||
readmeta.return_value = {'name': 'bad'}
|
||||
try:
|
||||
@ -816,11 +809,10 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
def test_get_diskfile_from_hash(self):
|
||||
self.df_mgr.get_dev_path = mock.MagicMock(return_value='/srv/dev/')
|
||||
with nested(
|
||||
mock.patch(self._manager_mock('diskfile_cls')),
|
||||
mock.patch(self._manager_mock('hash_cleanup_listdir')),
|
||||
mock.patch('swift.obj.diskfile.read_metadata')) as \
|
||||
(dfclass, hclistdir, readmeta):
|
||||
with mock.patch(self._manager_mock('diskfile_cls')) as dfclass, \
|
||||
mock.patch(self._manager_mock(
|
||||
'hash_cleanup_listdir')) as hclistdir, \
|
||||
mock.patch('swift.obj.diskfile.read_metadata') as readmeta:
|
||||
hclistdir.return_value = ['1381679759.90941.data']
|
||||
readmeta.return_value = {'name': '/a/c/o'}
|
||||
self.df_mgr.get_diskfile_from_hash(
|
||||
@ -924,9 +916,8 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
expected_items = [
|
||||
(os.path.join(part_path, hash_[-3:], hash_), hash_, timestamps)
|
||||
for hash_, timestamps in expected.items()]
|
||||
with nested(
|
||||
mock.patch('os.listdir', _listdir),
|
||||
mock.patch('os.unlink')):
|
||||
with mock.patch('os.listdir', _listdir), \
|
||||
mock.patch('os.unlink'):
|
||||
df_mgr = self.df_router[policy]
|
||||
hash_items = list(df_mgr.yield_hashes(
|
||||
device, part, policy, **kwargs))
|
||||
|
@ -26,7 +26,7 @@ import random
|
||||
import struct
|
||||
from eventlet import Timeout, sleep
|
||||
|
||||
from contextlib import closing, nested, contextmanager
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from shutil import rmtree
|
||||
from swift.common import utils
|
||||
@ -1060,25 +1060,24 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
||||
|
||||
def test_process_job_all_timeout(self):
|
||||
self.reconstructor._reset_stats()
|
||||
with mock_ssync_sender():
|
||||
with nested(mocked_http_conn(*[Timeout()] * 8)):
|
||||
found_jobs = []
|
||||
for part_info in self.reconstructor.collect_parts():
|
||||
jobs = self.reconstructor.build_reconstruction_jobs(
|
||||
part_info)
|
||||
found_jobs.extend(jobs)
|
||||
for job in jobs:
|
||||
self.logger._clear()
|
||||
self.reconstructor.process_job(job)
|
||||
for line in self.logger.get_lines_for_level('error'):
|
||||
self.assertTrue('Timeout (Nones)' in line)
|
||||
self.assertStatCount(
|
||||
'update_stats', 'suffix.hashes', 0)
|
||||
self.assertStatCount(
|
||||
'update_stats', 'suffix.syncs', 0)
|
||||
self.assertEqual(self.reconstructor.suffix_sync, 0)
|
||||
self.assertEqual(self.reconstructor.suffix_count, 0)
|
||||
self.assertEqual(len(found_jobs), 6)
|
||||
with mock_ssync_sender(), mocked_http_conn(*[Timeout()] * 8):
|
||||
found_jobs = []
|
||||
for part_info in self.reconstructor.collect_parts():
|
||||
jobs = self.reconstructor.build_reconstruction_jobs(
|
||||
part_info)
|
||||
found_jobs.extend(jobs)
|
||||
for job in jobs:
|
||||
self.logger._clear()
|
||||
self.reconstructor.process_job(job)
|
||||
for line in self.logger.get_lines_for_level('error'):
|
||||
self.assertTrue('Timeout (Nones)' in line)
|
||||
self.assertStatCount(
|
||||
'update_stats', 'suffix.hashes', 0)
|
||||
self.assertStatCount(
|
||||
'update_stats', 'suffix.syncs', 0)
|
||||
self.assertEqual(self.reconstructor.suffix_sync, 0)
|
||||
self.assertEqual(self.reconstructor.suffix_count, 0)
|
||||
self.assertEqual(len(found_jobs), 6)
|
||||
|
||||
|
||||
@patch_policies(with_ec_default=True)
|
||||
@ -1174,10 +1173,10 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
'replication_port': self.port,
|
||||
})
|
||||
self.reconstructor.bind_ip = '0.0.0.0' # use whataremyips
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
found_parts = sorted(int(p['partition']) for p in part_infos)
|
||||
expected_parts = sorted(itertools.chain(
|
||||
@ -1226,10 +1225,10 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
'replication_port': self.port,
|
||||
})
|
||||
self.reconstructor.bind_ip = '0.0.0.0' # use whataremyips
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
found_parts = sorted(int(p['partition']) for p in part_infos)
|
||||
expected_parts = sorted(itertools.chain(
|
||||
@ -1266,10 +1265,10 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
'replication_port': self.port,
|
||||
} for dev in local_devs]
|
||||
self.reconstructor.bind_ip = '0.0.0.0' # use whataremyips
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
found_parts = sorted(int(p['partition']) for p in part_infos)
|
||||
expected_parts = sorted(itertools.chain(
|
||||
@ -1297,10 +1296,10 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
'replication_ip': self.ip,
|
||||
'replication_port': self.port
|
||||
} for dev in local_devs]
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
self.assertEqual(2, len(part_infos)) # sanity
|
||||
self.assertEqual(set(int(p['partition']) for p in part_infos),
|
||||
@ -1312,12 +1311,12 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
paths.append(os.path.join(devices, device))
|
||||
return False
|
||||
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs),
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
fake_check_mount)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs), \
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
fake_check_mount):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
self.assertEqual(2, len(part_infos)) # sanity, same jobs
|
||||
self.assertEqual(set(int(p['partition']) for p in part_infos),
|
||||
@ -1331,12 +1330,12 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
self.assertTrue(self.reconstructor.mount_check)
|
||||
for policy in POLICIES:
|
||||
self.assertTrue(self.reconstructor._df_router[policy].mount_check)
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs),
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
fake_check_mount)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs), \
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
fake_check_mount):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
self.assertEqual([], part_infos) # sanity, no jobs
|
||||
|
||||
@ -1351,12 +1350,12 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
else:
|
||||
return False
|
||||
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs),
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
fake_check_mount)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs), \
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
fake_check_mount):
|
||||
part_infos = list(self.reconstructor.collect_parts())
|
||||
self.assertEqual(1, len(part_infos)) # only sda picked up (part 0)
|
||||
self.assertEqual(part_infos[0]['partition'], 0)
|
||||
@ -1373,14 +1372,14 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
fake_unlink = mock.MagicMock()
|
||||
self.reconstructor.reclaim_age = 1000
|
||||
now = time.time()
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch('swift.obj.reconstructor.time.time',
|
||||
return_value=now),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs),
|
||||
mock.patch('swift.obj.reconstructor.unlink_older_than',
|
||||
fake_unlink)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch('swift.obj.reconstructor.time.time',
|
||||
return_value=now), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs), \
|
||||
mock.patch('swift.obj.reconstructor.unlink_older_than',
|
||||
fake_unlink):
|
||||
self.assertEqual([], list(self.reconstructor.collect_parts()))
|
||||
# each local device hash unlink_older_than called on it,
|
||||
# with now - self.reclaim_age
|
||||
@ -1406,10 +1405,10 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
datadir_path = os.path.join(self.devices, self.local_dev['device'],
|
||||
diskfile.get_data_dir(self.policy))
|
||||
utils.mkdirs(os.path.dirname(datadir_path))
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch('swift.obj.reconstructor.mkdirs',
|
||||
side_effect=OSError('kaboom!'))):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch('swift.obj.reconstructor.mkdirs',
|
||||
side_effect=OSError('kaboom!')):
|
||||
self.assertEqual([], list(self.reconstructor.collect_parts()))
|
||||
error_lines = self.logger.get_lines_for_level('error')
|
||||
self.assertEqual(len(error_lines), 1)
|
||||
@ -1511,10 +1510,10 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
('sda', 843),
|
||||
]),
|
||||
)
|
||||
with nested(mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]),
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs)):
|
||||
with mock.patch('swift.obj.reconstructor.whataremyips',
|
||||
return_value=[self.ip]), \
|
||||
mock.patch.object(self.policy.object_ring, '_devs',
|
||||
new=stub_ring_devs):
|
||||
for kwargs, expected_parts in expected:
|
||||
part_infos = list(self.reconstructor.collect_parts(**kwargs))
|
||||
expected_paths = set(
|
||||
@ -1851,12 +1850,11 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
|
||||
ssync_calls = []
|
||||
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls),
|
||||
with mock_ssync_sender(ssync_calls), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
expected_suffix_calls = set([
|
||||
('10.0.0.1', '/sdb/0'),
|
||||
@ -1904,12 +1902,11 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
codes, body_iter = zip(*responses)
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls),
|
||||
with mock_ssync_sender(ssync_calls), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
expected_suffix_calls = set([
|
||||
('10.0.0.1', '/sdb/0'),
|
||||
@ -1975,12 +1972,11 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
codes, body_iter = zip(*responses)
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls),
|
||||
with mock_ssync_sender(ssync_calls), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
expected_suffix_calls = set([
|
||||
('10.0.0.1', '/sdb/0'),
|
||||
@ -2041,12 +2037,11 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
|
||||
ssync_calls = []
|
||||
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls),
|
||||
with mock_ssync_sender(ssync_calls), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*codes, body_iter=body_iter) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
expected_suffix_calls = set([
|
||||
('10.0.0.1', '/sdb/0'),
|
||||
@ -2114,14 +2109,13 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
])
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls,
|
||||
response_callback=ssync_response_callback),
|
||||
with mock_ssync_sender(ssync_calls,
|
||||
response_callback=ssync_response_callback), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*[200] * len(expected_suffix_calls),
|
||||
body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*[200] * len(expected_suffix_calls),
|
||||
body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
found_suffix_calls = set((r['ip'], r['path'])
|
||||
for r in request_log.requests)
|
||||
@ -2176,12 +2170,11 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
for r in expected_suffix_calls]
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls),
|
||||
with mock_ssync_sender(ssync_calls), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*codes) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*codes) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
found_suffix_calls = set((r['ip'], r['path'])
|
||||
for r in request_log.requests)
|
||||
@ -2217,12 +2210,11 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
}
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls),
|
||||
with mock_ssync_sender(ssync_calls), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(200, body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(200, body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
expected_suffix_calls = set([
|
||||
(sync_to[0]['ip'], '/%s/0/123-abc' % sync_to[0]['device']),
|
||||
@ -2279,14 +2271,13 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
])
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls,
|
||||
response_callback=ssync_response_callback),
|
||||
with mock_ssync_sender(ssync_calls,
|
||||
response_callback=ssync_response_callback), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*[200] * len(expected_suffix_calls),
|
||||
body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*[200] * len(expected_suffix_calls),
|
||||
body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
found_suffix_calls = set((r['ip'], r['path'])
|
||||
for r in request_log.requests)
|
||||
@ -2339,14 +2330,13 @@ class TestObjectReconstructor(unittest.TestCase):
|
||||
])
|
||||
|
||||
ssync_calls = []
|
||||
with nested(
|
||||
mock_ssync_sender(ssync_calls,
|
||||
response_callback=ssync_response_callback),
|
||||
with mock_ssync_sender(ssync_calls,
|
||||
response_callback=ssync_response_callback), \
|
||||
mock.patch('swift.obj.diskfile.ECDiskFileManager._get_hashes',
|
||||
return_value=(None, stub_hashes))):
|
||||
with mocked_http_conn(*[200] * len(expected_suffix_calls),
|
||||
body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
return_value=(None, stub_hashes)), \
|
||||
mocked_http_conn(*[200] * len(expected_suffix_calls),
|
||||
body=pickle.dumps({})) as request_log:
|
||||
self.reconstructor.process_job(job)
|
||||
|
||||
found_suffix_calls = set((r['ip'], r['path'])
|
||||
for r in request_log.requests)
|
||||
|
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import os
|
||||
import shutil
|
||||
@ -367,17 +366,12 @@ class TestReceiver(unittest.TestCase):
|
||||
self.assertFalse(mocked_replication_semaphore.release.called)
|
||||
|
||||
def test_SSYNC_mount_check(self):
|
||||
with contextlib.nested(
|
||||
mock.patch.object(
|
||||
self.controller, 'replication_semaphore'),
|
||||
with mock.patch.object(self.controller, 'replication_semaphore'), \
|
||||
mock.patch.object(
|
||||
self.controller._diskfile_router[POLICIES.legacy],
|
||||
'mount_check', False),
|
||||
'mount_check', False), \
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
return_value=False)) as (
|
||||
mocked_replication_semaphore,
|
||||
mocked_mount_check,
|
||||
mocked_check_mount):
|
||||
return_value=False) as mocked_check_mount:
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
|
||||
resp = req.get_response(self.controller)
|
||||
@ -387,17 +381,12 @@ class TestReceiver(unittest.TestCase):
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertFalse(mocked_check_mount.called)
|
||||
|
||||
with contextlib.nested(
|
||||
mock.patch.object(
|
||||
self.controller, 'replication_semaphore'),
|
||||
with mock.patch.object(self.controller, 'replication_semaphore'), \
|
||||
mock.patch.object(
|
||||
self.controller._diskfile_router[POLICIES.legacy],
|
||||
'mount_check', True),
|
||||
'mount_check', True), \
|
||||
mock.patch('swift.obj.diskfile.check_mount',
|
||||
return_value=False)) as (
|
||||
mocked_replication_semaphore,
|
||||
mocked_mount_check,
|
||||
mocked_check_mount):
|
||||
return_value=False) as mocked_check_mount:
|
||||
req = swob.Request.blank(
|
||||
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
|
||||
resp = req.get_response(self.controller)
|
||||
@ -932,13 +921,11 @@ class TestReceiver(unittest.TestCase):
|
||||
return self.mock_socket
|
||||
|
||||
self.controller.client_timeout = 0.01
|
||||
with contextlib.nested(
|
||||
mock.patch.object(
|
||||
ssync_receiver.eventlet.greenio, 'shutdown_safe'),
|
||||
with mock.patch.object(ssync_receiver.eventlet.greenio,
|
||||
'shutdown_safe') as mock_shutdown_safe, \
|
||||
mock.patch.object(
|
||||
self.controller, 'DELETE',
|
||||
return_value=swob.HTTPNoContent())) as (
|
||||
mock_shutdown_safe, mock_delete):
|
||||
return_value=swob.HTTPNoContent()):
|
||||
req = swob.Request.blank(
|
||||
'/device/partition',
|
||||
environ={'REQUEST_METHOD': 'SSYNC'},
|
||||
@ -1584,10 +1571,9 @@ class TestReceiver(unittest.TestCase):
|
||||
_requests.append(request)
|
||||
return swob.HTTPNoContent()
|
||||
|
||||
with contextlib.nested(
|
||||
mock.patch.object(self.controller, 'PUT', _PUT),
|
||||
mock.patch.object(self.controller, 'POST', _POST),
|
||||
mock.patch.object(self.controller, 'DELETE', _DELETE)):
|
||||
with mock.patch.object(self.controller, 'PUT', _PUT), \
|
||||
mock.patch.object(self.controller, 'POST', _POST), \
|
||||
mock.patch.object(self.controller, 'DELETE', _DELETE):
|
||||
self.controller.logger = mock.MagicMock()
|
||||
req = swob.Request.blank(
|
||||
'/device/partition',
|
||||
|
@ -22,7 +22,7 @@ import os
|
||||
import pickle
|
||||
import sys
|
||||
import unittest
|
||||
from contextlib import closing, contextmanager, nested
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from shutil import rmtree
|
||||
import gc
|
||||
@ -2049,10 +2049,8 @@ class TestObjectController(unittest.TestCase):
|
||||
commit_confirmation = \
|
||||
'swift.proxy.controllers.obj.ECPutter.send_commit_confirmation'
|
||||
|
||||
with nested(
|
||||
mock.patch('swift.obj.server.md5', busted_md5_constructor),
|
||||
mock.patch(commit_confirmation, mock_committer)) as \
|
||||
(_junk, commit_call):
|
||||
with mock.patch('swift.obj.server.md5', busted_md5_constructor), \
|
||||
mock.patch(commit_confirmation, mock_committer):
|
||||
fd = sock.makefile()
|
||||
fd.write('PUT /v1/a/ec-con/quorum HTTP/1.1\r\n'
|
||||
'Host: localhost\r\n'
|
||||
@ -2102,10 +2100,8 @@ class TestObjectController(unittest.TestCase):
|
||||
commit_confirmation = \
|
||||
'swift.proxy.controllers.obj.ECPutter.send_commit_confirmation'
|
||||
|
||||
with nested(
|
||||
mock.patch(read_footer),
|
||||
mock.patch(commit_confirmation, mock_committer)) as \
|
||||
(read_footer_call, commit_call):
|
||||
with mock.patch(read_footer) as read_footer_call, \
|
||||
mock.patch(commit_confirmation, mock_committer):
|
||||
# Emulate missing footer MIME doc in all object-servers
|
||||
read_footer_call.side_effect = HTTPBadRequest(
|
||||
body="couldn't find footer MIME doc")
|
||||
@ -3891,11 +3887,10 @@ class TestObjectController(unittest.TestCase):
|
||||
|
||||
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
|
||||
object_ring = self.app.get_object_ring(None)
|
||||
with nested(
|
||||
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
|
||||
with mock.patch.object(self.app, 'sort_nodes', lambda n: n), \
|
||||
mock.patch.object(self.app, 'request_node_count',
|
||||
lambda r: 6),
|
||||
mock.patch.object(object_ring, 'max_more_nodes', 99)):
|
||||
lambda r: 6), \
|
||||
mock.patch.object(object_ring, 'max_more_nodes', 99):
|
||||
first_nodes = list(self.app.iter_nodes(object_ring, 0))
|
||||
second_nodes = []
|
||||
for node in self.app.iter_nodes(object_ring, 0):
|
||||
@ -3909,18 +3904,16 @@ class TestObjectController(unittest.TestCase):
|
||||
object_ring = self.app.get_object_ring(None)
|
||||
node_list = [dict(id=n, ip='1.2.3.4', port=n, device='D')
|
||||
for n in range(10)]
|
||||
with nested(
|
||||
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
|
||||
with mock.patch.object(self.app, 'sort_nodes', lambda n: n), \
|
||||
mock.patch.object(self.app, 'request_node_count',
|
||||
lambda r: 3)):
|
||||
lambda r: 3):
|
||||
got_nodes = list(self.app.iter_nodes(object_ring, 0,
|
||||
node_iter=iter(node_list)))
|
||||
self.assertEqual(node_list[:3], got_nodes)
|
||||
|
||||
with nested(
|
||||
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
|
||||
with mock.patch.object(self.app, 'sort_nodes', lambda n: n), \
|
||||
mock.patch.object(self.app, 'request_node_count',
|
||||
lambda r: 1000000)):
|
||||
lambda r: 1000000):
|
||||
got_nodes = list(self.app.iter_nodes(object_ring, 0,
|
||||
node_iter=iter(node_list)))
|
||||
self.assertEqual(node_list, got_nodes)
|
||||
@ -6189,20 +6182,18 @@ class TestECMismatchedFA(unittest.TestCase):
|
||||
# Server obj1 will have the first version of the object (obj2 also
|
||||
# gets it, but that gets stepped on later)
|
||||
prosrv._error_limiting = {}
|
||||
with nested(
|
||||
mock.patch.object(obj3srv, 'PUT', bad_disk),
|
||||
with mock.patch.object(obj3srv, 'PUT', bad_disk), \
|
||||
mock.patch(
|
||||
'swift.common.storage_policy.ECStoragePolicy.quorum')):
|
||||
'swift.common.storage_policy.ECStoragePolicy.quorum'):
|
||||
type(ec_policy).quorum = mock.PropertyMock(return_value=2)
|
||||
resp = put_req1.get_response(prosrv)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
|
||||
# Servers obj2 and obj3 will have the second version of the object.
|
||||
prosrv._error_limiting = {}
|
||||
with nested(
|
||||
mock.patch.object(obj1srv, 'PUT', bad_disk),
|
||||
with mock.patch.object(obj1srv, 'PUT', bad_disk), \
|
||||
mock.patch(
|
||||
'swift.common.storage_policy.ECStoragePolicy.quorum')):
|
||||
'swift.common.storage_policy.ECStoragePolicy.quorum'):
|
||||
type(ec_policy).quorum = mock.PropertyMock(return_value=2)
|
||||
resp = put_req2.get_response(prosrv)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
@ -6212,9 +6203,8 @@ class TestECMismatchedFA(unittest.TestCase):
|
||||
environ={"REQUEST_METHOD": "GET"},
|
||||
headers={"X-Auth-Token": "t"})
|
||||
prosrv._error_limiting = {}
|
||||
with nested(
|
||||
mock.patch.object(obj1srv, 'GET', bad_disk),
|
||||
mock.patch.object(obj2srv, 'GET', bad_disk)):
|
||||
with mock.patch.object(obj1srv, 'GET', bad_disk), \
|
||||
mock.patch.object(obj2srv, 'GET', bad_disk):
|
||||
resp = get_req.get_response(prosrv)
|
||||
self.assertEqual(resp.status_int, 503)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user