From e772cf95c65b98bb17b094585ff3ff2040c148c9 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Tue, 10 Jan 2017 20:27:06 -0800 Subject: [PATCH] Extract test pattern to helper MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit An existing test in diskfile established an prior art for a pattern to create a diskfile with a different suffix - I'd like to make use of it in new tests in multiple unrelated change sets. Also add a test to demonstrate some existing robustness and prevent regression. Author: Pavel Kvasnička Co-Author: Alistair Coles Related-Change-Id: I64cadb1a3feb4d819d545137eecfc295389794f0 Change-Id: I3a661fae5c7cfeb2dbcdb7f46941f55244d0b9ad --- test/unit/obj/test_diskfile.py | 70 +++++++++++++++++++++++++++++----- 1 file changed, 60 insertions(+), 10 deletions(-) diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 146f26ff21..8fd67edaff 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -5699,6 +5699,19 @@ class TestSuffixHashes(unittest.TestCase): with self.policy_in_message(): unittest.TestCase.assertEqual(self, *args) + def get_different_suffix_df(self, df, df_mgr, device, partition, + account, container, **kwargs): + # returns diskfile in the same partition with different suffix + suffix_dir = os.path.dirname(df._datadir) + i = 0 + while True: + df2 = df_mgr.get_diskfile(device, partition, account, container, + 'o%d' % i, **kwargs) + i += 1 + suffix_dir2 = os.path.dirname(df2._datadir) + if suffix_dir != suffix_dir2: + return df2 + def check_cleanup_ondisk_files(self, policy, input_files, output_files): orig_unlink = os.unlink file_list = list(input_files) @@ -6096,16 +6109,11 @@ class TestSuffixHashes(unittest.TestCase): # invalidate a different suffix hash in same partition but not in # existing hashes.pkl - i = 0 - while True: - df2 = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o%d' % i, - policy=policy) - i += 1 - suffix_dir2 = os.path.dirname(df2._datadir) - if suffix_dir != suffix_dir2: - break - + df2 = self.get_different_suffix_df(df, df_mgr, + 'sda1', '0', 'a', 'c', + policy=policy) df2.delete(self.ts()) + suffix_dir2 = os.path.dirname(df2._datadir) suffix2 = os.path.basename(suffix_dir2) # suffix2 should be in invalidations file with open(invalidations_file, 'rb') as f: @@ -6129,6 +6137,48 @@ class TestSuffixHashes(unittest.TestCase): # consolidate hashes assert_consolidation([suffix, suffix2]) + def test_consolidate_hashes_raises_exception(self): + # verify that if consolidate_hashes raises an exception then suffixes + # are rehashed and a hashes.pkl is written + for policy in self.iter_policies(): + df_mgr = self.df_router[policy] + # create something to hash + df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o', + policy=policy) + df.delete(self.ts()) + suffix_dir = os.path.dirname(df._datadir) + suffix = os.path.basename(suffix_dir) + # no pre-existing hashes.pkl + with mock.patch.object(df_mgr, '_hash_suffix', + return_value='fake hash'): + with mock.patch.object(df_mgr, 'consolidate_hashes', + side_effect=Exception()): + hashes = df_mgr.get_hashes('sda1', '0', [], policy) + self.assertEqual({suffix: 'fake hash'}, hashes) + + # sanity check hashes file + part_path = os.path.join(self.devices, 'sda1', + diskfile.get_data_dir(policy), '0') + hashes_file = os.path.join(part_path, diskfile.HASH_FILE) + + with open(hashes_file, 'rb') as f: + self.assertEqual(hashes, pickle.load(f)) + + # repeat with pre-existing hashes.pkl + with mock.patch.object(df_mgr, '_hash_suffix', + return_value='new fake hash'): + with mock.patch.object(df_mgr, 'consolidate_hashes', + side_effect=Exception()): + hashes = df_mgr.get_hashes('sda1', '0', [], policy) + self.assertEqual({suffix: 'new fake hash'}, hashes) + + # sanity check hashes file + part_path = os.path.join(self.devices, 'sda1', + diskfile.get_data_dir(policy), '0') + hashes_file = os.path.join(part_path, diskfile.HASH_FILE) + with open(hashes_file, 'rb') as f: + self.assertEqual(hashes, pickle.load(f)) + # invalidate_hash tests - error handling def test_invalidate_hash_bad_pickle(self): @@ -6354,8 +6404,8 @@ class TestSuffixHashes(unittest.TestCase): self.assertFalse(os.path.exists(df._datadir)) def test_hash_suffix_one_reclaim_and_one_valid_tombstone(self): + paths, suffix = find_paths_with_matching_suffixes(2, 1) for policy in self.iter_policies(): - paths, suffix = find_paths_with_matching_suffixes(2, 1) df_mgr = self.df_router[policy] a, c, o = paths[suffix][0] df1 = df_mgr.get_diskfile(