Avoid infinite recursion in swift.obj.replicator.get_hashes.

Fixes bug 1089140.

Turns out that if an exception bails out of the pickle loading (eg.
zero-byte hahes_file), the if clause to determine whether or not to
write out a fresh hashes_file can evaluate to false, leading to an
infinite loop.

This patch fixes this infinite loop generally, by ensuring that if any
exception is thrown, a new hashes_file is written.

Change-Id: I344c5f8e261ce7c667bdafe1687263a4150b21dc
This commit is contained in:
Darrell Bishop 2012-12-11 15:32:09 -08:00
parent 1135c9e256
commit ea95d0092a
2 changed files with 23 additions and 1 deletions

View File

@ -176,6 +176,7 @@ def get_hashes(partition_dir, recalculate=[], do_listdir=False,
hashed = 0
hashes_file = join(partition_dir, HASH_FILE)
modified = False
force_rewrite = False
hashes = {}
mtime = -1
try:
@ -184,6 +185,7 @@ def get_hashes(partition_dir, recalculate=[], do_listdir=False,
mtime = os.path.getmtime(hashes_file)
except Exception:
do_listdir = True
force_rewrite = True
if do_listdir:
for suff in os.listdir(partition_dir):
if len(suff) == 3:
@ -203,7 +205,7 @@ def get_hashes(partition_dir, recalculate=[], do_listdir=False,
modified = True
if modified:
with lock_path(partition_dir):
if not os.path.exists(hashes_file) or \
if force_rewrite or not os.path.exists(hashes_file) or \
os.path.getmtime(hashes_file) == mtime:
write_pickle(
hashes, hashes_file, partition_dir, PICKLE_PROTOCOL)

View File

@ -238,6 +238,26 @@ class TestObjectReplicator(unittest.TestCase):
part, recalculate=['a83'])
self.assertEquals(i[0], 2)
def test_get_hashes_unmodified_and_zero_bytes(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
part = os.path.join(self.objects, '0')
open(os.path.join(part, object_replicator.HASH_FILE), 'w')
# Now the hash file is zero bytes.
i = [0]
def getmtime(filename):
i[0] += 1
return 1
with mock({'os.path.getmtime': getmtime}):
hashed, hashes = object_replicator.get_hashes(
part, recalculate=[])
# getmtime will actually not get called. Initially, the pickle.load
# will raise an exception first and later, force_rewrite will
# short-circuit the if clause to determine whether to write out a fresh
# hashes_file.
self.assertEquals(i[0], 0)
self.assertTrue('a83' in hashes)
def test_get_hashes_modified(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)