Stop syncing empty suffixes list
Change-Id: I918ab4ccbf4d081b26f4117937410cdad1caf8d3 Closes-Bug: #1862645 Closes-Bug: #1886782
This commit is contained in:
parent
2e001431fd
commit
907942eb47
@ -687,6 +687,9 @@ class ObjectReplicator(Daemon):
|
|||||||
suffixes = [suffix for suffix in local_hash if
|
suffixes = [suffix for suffix in local_hash if
|
||||||
local_hash[suffix] !=
|
local_hash[suffix] !=
|
||||||
remote_hash.get(suffix, -1)]
|
remote_hash.get(suffix, -1)]
|
||||||
|
if not suffixes:
|
||||||
|
stats.hashmatch += 1
|
||||||
|
continue
|
||||||
stats.rsync += 1
|
stats.rsync += 1
|
||||||
success, _junk = self.sync(node, job, suffixes)
|
success, _junk = self.sync(node, job, suffixes)
|
||||||
with Timeout(self.http_timeout):
|
with Timeout(self.http_timeout):
|
||||||
|
@ -1919,7 +1919,7 @@ class TestObjectReplicator(unittest.TestCase):
|
|||||||
# Check successful http_connection and exception with
|
# Check successful http_connection and exception with
|
||||||
# incorrect pickle.loads(resp.read())
|
# incorrect pickle.loads(resp.read())
|
||||||
resp.status = 200
|
resp.status = 200
|
||||||
resp.read.return_value = 'garbage'
|
resp.read.return_value = b'garbage'
|
||||||
expect = 'Error syncing with node: %r: '
|
expect = 'Error syncing with node: %r: '
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
set_default(self)
|
set_default(self)
|
||||||
@ -1969,6 +1969,7 @@ class TestObjectReplicator(unittest.TestCase):
|
|||||||
self.assertEqual(stats.suffix_sync, 2)
|
self.assertEqual(stats.suffix_sync, 2)
|
||||||
self.assertEqual(stats.suffix_hash, 1)
|
self.assertEqual(stats.suffix_hash, 1)
|
||||||
self.assertEqual(stats.suffix_count, 1)
|
self.assertEqual(stats.suffix_count, 1)
|
||||||
|
self.assertEqual(stats.hashmatch, 0)
|
||||||
|
|
||||||
# Efficient Replication Case
|
# Efficient Replication Case
|
||||||
set_default(self)
|
set_default(self)
|
||||||
@ -1989,6 +1990,7 @@ class TestObjectReplicator(unittest.TestCase):
|
|||||||
self.assertEqual(stats.suffix_sync, 1)
|
self.assertEqual(stats.suffix_sync, 1)
|
||||||
self.assertEqual(stats.suffix_hash, 1)
|
self.assertEqual(stats.suffix_hash, 1)
|
||||||
self.assertEqual(stats.suffix_count, 1)
|
self.assertEqual(stats.suffix_count, 1)
|
||||||
|
self.assertEqual(stats.hashmatch, 0)
|
||||||
|
|
||||||
mock_http.reset_mock()
|
mock_http.reset_mock()
|
||||||
self.logger.clear()
|
self.logger.clear()
|
||||||
@ -2015,6 +2017,39 @@ class TestObjectReplicator(unittest.TestCase):
|
|||||||
'/a83', headers=self.headers))
|
'/a83', headers=self.headers))
|
||||||
mock_http.assert_has_calls(reqs, any_order=True)
|
mock_http.assert_has_calls(reqs, any_order=True)
|
||||||
|
|
||||||
|
@mock.patch('swift.obj.replicator.tpool.execute')
|
||||||
|
@mock.patch('swift.obj.replicator.http_connect', autospec=True)
|
||||||
|
@mock.patch('swift.obj.replicator._do_listdir')
|
||||||
|
def test_update_local_hash_changes_during_replication(
|
||||||
|
self, mock_do_listdir, mock_http, mock_tpool_execute):
|
||||||
|
mock_http.return_value = answer = mock.MagicMock()
|
||||||
|
answer.getresponse.return_value = resp = mock.MagicMock()
|
||||||
|
resp.status = 200
|
||||||
|
resp.read.return_value = pickle.dumps({
|
||||||
|
'a83': 'c130a2c17ed45102aada0f4eee69494ff'})
|
||||||
|
|
||||||
|
self.replicator.sync = fake_sync = \
|
||||||
|
mock.MagicMock(return_value=(True, []))
|
||||||
|
local_job = [
|
||||||
|
job for job in self.replicator.collect_jobs()
|
||||||
|
if not job['delete']
|
||||||
|
and job['partition'] == '0' and int(job['policy']) == 0
|
||||||
|
][0]
|
||||||
|
|
||||||
|
mock_tpool_execute.side_effect = [
|
||||||
|
(1, {'a83': 'ba47fd314242ec8c7efb91f5d57336e4'}),
|
||||||
|
(1, {'a83': 'c130a2c17ed45102aada0f4eee69494ff'}),
|
||||||
|
]
|
||||||
|
self.replicator.update(local_job)
|
||||||
|
self.assertEqual(fake_sync.call_count, 0)
|
||||||
|
self.assertEqual(mock_http.call_count, 2)
|
||||||
|
stats = self.replicator.total_stats
|
||||||
|
self.assertEqual(stats.attempted, 1)
|
||||||
|
self.assertEqual(stats.suffix_sync, 0)
|
||||||
|
self.assertEqual(stats.suffix_hash, 1)
|
||||||
|
self.assertEqual(stats.suffix_count, 1)
|
||||||
|
self.assertEqual(stats.hashmatch, 2)
|
||||||
|
|
||||||
def test_rsync_compress_different_region(self):
|
def test_rsync_compress_different_region(self):
|
||||||
self.assertEqual(self.replicator.sync_method, self.replicator.rsync)
|
self.assertEqual(self.replicator.sync_method, self.replicator.rsync)
|
||||||
jobs = self.replicator.collect_jobs()
|
jobs = self.replicator.collect_jobs()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user