Fix misleading hash invalidations test comments

...and refactor two extremely similar tests to use a single
helper method - the only paramerization being the existence
or not of hashes.pkl at start of the test.

Change-Id: I601218a9a031e7fc77bc53ea735e89700ec1647d
Related-Change: Ia43ec2cf7ab715ec37f0044625a10aeb6420f6e3
This commit is contained in:
Alistair Coles 2017-01-31 13:30:20 +00:00
parent fa456a590c
commit 6249945a4f

View File

@ -6148,11 +6148,15 @@ class TestSuffixHashes(unittest.TestCase):
}
self.assertEqual(open_log, expected)
def test_invalidates_hashes_of_new_partition(self):
# a suffix can be changed or created by second process when new pkl
# is calculated - that suffix must be correct on next get_hashes call
def _test_invalidate_hash_racing_get_hashes_diff_suffix(self, existing):
# a suffix can be changed or created by second process while new pkl is
# being calculated - verify that suffix is correct after next
# get_hashes call
for policy in self.iter_policies():
df_mgr = self.df_router[policy]
if existing:
# force hashes.pkl to exist
df_mgr.get_hashes('sda1', '0', [], policy)
orig_listdir = os.listdir
df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy)
@ -6163,7 +6167,7 @@ class TestSuffixHashes(unittest.TestCase):
df.delete(self.ts())
def mock_listdir(*args, **kwargs):
# simulating an invalidation occuring in another process while
# simulating an invalidation occurring in another process while
# get_hashes is executing
result = orig_listdir(*args, **kwargs)
if not non_local['df2touched']:
@ -6185,52 +6189,11 @@ class TestSuffixHashes(unittest.TestCase):
self.assertIn(suffix, hashes)
self.assertIn(suffix2, hashes)
def test_hash_invalidations_survive_racing_get_hashes_diff_suffix(self):
# get_hashes must repeat path listing and return all hashes when
# another concurrent process created new pkl before hashes are stored
# by the first process
non_local = {}
for policy in self.iter_policies():
df_mgr = self.df_router[policy]
# force hashes.pkl to exist; when it does not exist that's fine,
# it's just a different race; in that case the invalidation file
# gets appended, but we don't restart hashing suffixes (the
# invalidation get's squashed in and the suffix gets rehashed on
# the next REPLICATE call)
df_mgr.get_hashes('sda1', '0', [], policy)
orig_listdir = os.listdir
df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy)
suffix = os.path.basename(os.path.dirname(df._datadir))
df2 = self.get_different_suffix_df(df)
suffix2 = os.path.basename(os.path.dirname(df2._datadir))
non_local['df2touched'] = False
def test_invalidate_hash_racing_get_hashes_diff_suffix_new_part(self):
self._test_invalidate_hash_racing_get_hashes_diff_suffix(False)
df.delete(self.ts())
def mock_listdir(*args, **kwargs):
# simulating hashes.pkl modification by another process while
# get_hashes is executing
# df2 is created to check path hashes recalculation
result = orig_listdir(*args, **kwargs)
if not non_local['df2touched']:
non_local['df2touched'] = True
df2.delete(self.ts())
return result
with mock.patch('swift.obj.diskfile.os.listdir',
mock_listdir):
# creates pkl file but leaves invalidation alone
hashes = df_mgr.get_hashes('sda1', '0', [], policy)
# suffix2 just sits in the invalidations file
self.assertIn(suffix, hashes)
self.assertNotIn(suffix2, hashes)
# it'll show up next hash
hashes = df_mgr.get_hashes('sda1', '0', [], policy)
self.assertIn(suffix, hashes)
self.assertIn(suffix2, hashes)
def test_invalidate_hash_racing_get_hashes_diff_suffix_existing_part(self):
self._test_invalidate_hash_racing_get_hashes_diff_suffix(True)
def test_hash_invalidations_survive_racing_get_hashes_same_suffix(self):
# verify that when two processes concurrently call get_hashes, then any