relinker: use abs path index in part power replace

Change-Id: I799bdacb7ef5850096c2178bfb12959f4205920d
This commit is contained in:
Clay Gerrard 2021-03-11 09:52:52 -06:00 committed by Alistair Coles
parent ebee4d4555
commit 90660c90de
8 changed files with 87 additions and 39 deletions

View File

@ -187,9 +187,9 @@ shows the mapping between old and new location::
>>> from swift.common.utils import replace_partition_in_path >>> from swift.common.utils import replace_partition_in_path
>>> old='objects/16003/a38/fa0fcec07328d068e24ccbf2a62f2a38/1467658208.57179.data' >>> old='objects/16003/a38/fa0fcec07328d068e24ccbf2a62f2a38/1467658208.57179.data'
>>> replace_partition_in_path(old, 14) >>> replace_partition_in_path('', '/sda/' + old, 14)
'objects/16003/a38/fa0fcec07328d068e24ccbf2a62f2a38/1467658208.57179.data' 'objects/16003/a38/fa0fcec07328d068e24ccbf2a62f2a38/1467658208.57179.data'
>>> replace_partition_in_path(old, 15) >>> replace_partition_in_path('', '/sda/' + old, 15)
'objects/32007/a38/fa0fcec07328d068e24ccbf2a62f2a38/1467658208.57179.data' 'objects/32007/a38/fa0fcec07328d068e24ccbf2a62f2a38/1467658208.57179.data'
Using the original partition power (14) it returned the same path; however Using the original partition power (14) it returned the same path; however

View File

@ -210,12 +210,12 @@ def hook_post_partition(logger, states, step, policy, diskfile_manager,
device, step, num_parts_done, len(states["state"]))) device, step, num_parts_done, len(states["state"])))
def hashes_filter(next_part_power, suff_path, hashes): def hashes_filter(devices, next_part_power, suff_path, hashes):
hashes = list(hashes) hashes = list(hashes)
for hsh in hashes: for hsh in hashes:
fname = os.path.join(suff_path, hsh) fname = os.path.join(suff_path, hsh)
if fname == replace_partition_in_path( if fname == replace_partition_in_path(
fname, next_part_power, is_hash_dir=True): devices, fname, next_part_power):
hashes.remove(hsh) hashes.remove(hsh)
return hashes return hashes
@ -280,7 +280,8 @@ def relink(conf, logger, device):
relink_hook_post_partition = partial(hook_post_partition, logger, relink_hook_post_partition = partial(hook_post_partition, logger,
states, STEP_RELINK, policy, states, STEP_RELINK, policy,
diskfile_mgr) diskfile_mgr)
relink_hashes_filter = partial(hashes_filter, next_part_power) relink_hashes_filter = partial(hashes_filter, conf['devices'],
next_part_power)
locations = audit_location_generator( locations = audit_location_generator(
conf['devices'], conf['devices'],
@ -297,7 +298,8 @@ def relink(conf, logger, device):
locations = RateLimitedIterator( locations = RateLimitedIterator(
locations, conf['files_per_second']) locations, conf['files_per_second'])
for fname, _, _ in locations: for fname, _, _ in locations:
newfname = replace_partition_in_path(fname, next_part_power) newfname = replace_partition_in_path(conf['devices'], fname,
next_part_power)
try: try:
logger.debug('Relinking %s to %s', fname, newfname) logger.debug('Relinking %s to %s', fname, newfname)
diskfile.relink_paths(fname, newfname) diskfile.relink_paths(fname, newfname)
@ -350,7 +352,8 @@ def cleanup(conf, logger, device):
cleanup_hook_post_partition = partial(hook_post_partition, logger, cleanup_hook_post_partition = partial(hook_post_partition, logger,
states, STEP_CLEANUP, policy, states, STEP_CLEANUP, policy,
diskfile_mgr) diskfile_mgr)
cleanup_hashes_filter = partial(hashes_filter, next_part_power) cleanup_hashes_filter = partial(hashes_filter, conf['devices'],
next_part_power)
locations = audit_location_generator( locations = audit_location_generator(
conf['devices'], conf['devices'],
@ -375,8 +378,7 @@ def cleanup(conf, logger, device):
# most up to date set of files. The new location may have newer # most up to date set of files. The new location may have newer
# files if it has been updated since relinked. # files if it has been updated since relinked.
new_hash_path = replace_partition_in_path( new_hash_path = replace_partition_in_path(
hash_path, part_power, is_hash_dir=True) conf['devices'], hash_path, part_power)
if new_hash_path == hash_path: if new_hash_path == hash_path:
continue continue

View File

@ -5799,21 +5799,27 @@ def get_partition_for_hash(hex_hash, part_power):
return struct.unpack_from('>I', raw_hash)[0] >> part_shift return struct.unpack_from('>I', raw_hash)[0] >> part_shift
def replace_partition_in_path(path, part_power, is_hash_dir=False): def replace_partition_in_path(devices, path, part_power):
""" """
Takes a path and a partition power and returns the same path, but with the Takes a path and a partition power and returns the same path, but with the
correct partition number. Most useful when increasing the partition power. correct partition number. Most useful when increasing the partition power.
:param path: full path to a file, for example object .data file :param devices: directory where devices are mounted (e.g. /srv/node)
:param path: full path to a object file or hashdir
:param part_power: partition power to compute correct partition number :param part_power: partition power to compute correct partition number
:param is_hash_dir: if True then ``path`` is the path to a hash dir, :param is_hash_dir: if True then ``path`` is the path to a hash dir,
otherwise ``path`` is the path to a file in a hash dir. otherwise ``path`` is the path to a file in a hash dir.
:returns: Path with re-computed partition power :returns: Path with re-computed partition power
""" """
offset_parts = devices.rstrip(os.sep).split(os.sep)
path_components = path.split(os.sep) path_components = path.split(os.sep)
part = get_partition_for_hash(path_components[-1 if is_hash_dir else -2], if offset_parts == path_components[:len(offset_parts)]:
part_power) offset = len(offset_parts)
path_components[-3 if is_hash_dir else -4] = "%d" % part else:
raise ValueError('Path %r is not under device dir %r' % (
path, devices))
part = get_partition_for_hash(path_components[offset + 4], part_power)
path_components[offset + 2] = "%d" % part
return os.sep.join(path_components) return os.sep.join(path_components)

View File

@ -1859,7 +1859,7 @@ class BaseDiskFileWriter(object):
new_target_path = None new_target_path = None
if self.next_part_power: if self.next_part_power:
new_target_path = replace_partition_in_path( new_target_path = replace_partition_in_path(
target_path, self.next_part_power) self.manager.devices, target_path, self.next_part_power)
if target_path != new_target_path: if target_path != new_target_path:
try: try:
fsync_dir(os.path.dirname(target_path)) fsync_dir(os.path.dirname(target_path))
@ -1959,7 +1959,7 @@ class BaseDiskFileWriter(object):
else: else:
prev_part_power = int(self.next_part_power) - 1 prev_part_power = int(self.next_part_power) - 1
old_target_path = replace_partition_in_path( old_target_path = replace_partition_in_path(
cur_path, prev_part_power) self.manager.devices, cur_path, prev_part_power)
old_target_dir = os.path.dirname(old_target_path) old_target_dir = os.path.dirname(old_target_path)
try: try:
self.manager.cleanup_ondisk_files(old_target_dir) self.manager.cleanup_ondisk_files(old_target_dir)
@ -3095,9 +3095,10 @@ class ECDiskFileWriter(BaseDiskFileWriter):
new_data_file_path = new_durable_data_file_path = None new_data_file_path = new_durable_data_file_path = None
if self.next_part_power: if self.next_part_power:
new_data_file_path = replace_partition_in_path( new_data_file_path = replace_partition_in_path(
data_file_path, self.next_part_power) self.manager.devices, data_file_path, self.next_part_power)
new_durable_data_file_path = replace_partition_in_path( new_durable_data_file_path = replace_partition_in_path(
durable_data_file_path, self.next_part_power) self.manager.devices, durable_data_file_path,
self.next_part_power)
try: try:
try: try:
os.rename(data_file_path, durable_data_file_path) os.rename(data_file_path, durable_data_file_path)

View File

@ -27,7 +27,7 @@ from swiftclient import client
from swift.cli.relinker import main as relinker_main from swift.cli.relinker import main as relinker_main
from swift.common.manager import Manager, Server from swift.common.manager import Manager, Server
from swift.common.ring import RingBuilder from swift.common.ring import RingBuilder
from swift.common.utils import replace_partition_in_path from swift.common.utils import replace_partition_in_path, readconf
from swift.obj.diskfile import get_data_dir from swift.obj.diskfile import get_data_dir
from test.probe.common import ECProbeTest, ProbeTest, ReplProbeTest from test.probe.common import ECProbeTest, ProbeTest, ReplProbeTest
@ -50,6 +50,8 @@ class TestPartPowerIncrease(ProbeTest):
self.data = ' ' * getattr(self.policy, 'ec_segment_size', 1) self.data = ' ' * getattr(self.policy, 'ec_segment_size', 1)
self.conf_files = Server('object').conf_files() self.conf_files = Server('object').conf_files()
self.devices = [readconf(conf_file)['app:object-server']['devices']
for conf_file in self.conf_files]
def tearDown(self): def tearDown(self):
# Keep a backup copy of the modified .builder file # Keep a backup copy of the modified .builder file
@ -127,8 +129,13 @@ class TestPartPowerIncrease(ProbeTest):
# Remember the new object locations # Remember the new object locations
new_locations = [] new_locations = []
for loc in org_locations: for loc in org_locations:
for dev_root in self.devices:
if loc.startswith(dev_root):
break
else:
self.fail('Unable to find device for %s' % loc)
new_locations.append(replace_partition_in_path( new_locations.append(replace_partition_in_path(
str(loc), self.object_ring.part_power + 1)) dev_root, str(loc), self.object_ring.part_power + 1))
# Overwrite existing object - to ensure that older timestamp files # Overwrite existing object - to ensure that older timestamp files
# will be cleaned up properly later # will be cleaned up properly later

View File

@ -601,7 +601,7 @@ class TestRelinker(unittest.TestCase):
# partition! # partition!
self._setup_object(lambda part: part < 2 ** (PART_POWER - 1)) self._setup_object(lambda part: part < 2 ** (PART_POWER - 1))
with mock.patch('swift.cli.relinker.replace_partition_in_path', with mock.patch('swift.cli.relinker.replace_partition_in_path',
lambda *args, **kwargs: args[0]): lambda *args, **kwargs: args[1]):
self.assertEqual(0, relinker.main([ self.assertEqual(0, relinker.main([
'cleanup', 'cleanup',
'--swift-dir', self.testdir, '--swift-dir', self.testdir,

View File

@ -4341,31 +4341,66 @@ cluster_dfw1 = http://dfw1.host/v1/
old = '/s/n/d/o/700/c77/af088baea4806dcaba30bf07d9e64c77/f' old = '/s/n/d/o/700/c77/af088baea4806dcaba30bf07d9e64c77/f'
new = '/s/n/d/o/1400/c77/af088baea4806dcaba30bf07d9e64c77/f' new = '/s/n/d/o/1400/c77/af088baea4806dcaba30bf07d9e64c77/f'
# Expected outcome # Expected outcome
self.assertEqual(utils.replace_partition_in_path(old, 11), new) self.assertEqual(utils.replace_partition_in_path('/s/n/', old, 11),
new)
# Make sure there is no change if the part power didn't change # Make sure there is no change if the part power didn't change
self.assertEqual(utils.replace_partition_in_path(old, 10), old) self.assertEqual(utils.replace_partition_in_path('/s/n', old, 10), old)
self.assertEqual(utils.replace_partition_in_path(new, 11), new) self.assertEqual(utils.replace_partition_in_path('/s/n/', new, 11),
new)
# Check for new part = part * 2 + 1 # Check for new part = part * 2 + 1
old = '/s/n/d/o/693/c77/ad708baea4806dcaba30bf07d9e64c77/f' old = '/s/n/d/o/693/c77/ad708baea4806dcaba30bf07d9e64c77/f'
new = '/s/n/d/o/1387/c77/ad708baea4806dcaba30bf07d9e64c77/f' new = '/s/n/d/o/1387/c77/ad708baea4806dcaba30bf07d9e64c77/f'
# Expected outcome # Expected outcome
self.assertEqual(utils.replace_partition_in_path(old, 11), new) self.assertEqual(utils.replace_partition_in_path('/s/n', old, 11), new)
# Make sure there is no change if the part power didn't change # Make sure there is no change if the part power didn't change
self.assertEqual(utils.replace_partition_in_path(old, 10), old) self.assertEqual(utils.replace_partition_in_path('/s/n', old, 10), old)
self.assertEqual(utils.replace_partition_in_path(new, 11), new) self.assertEqual(utils.replace_partition_in_path('/s/n/', new, 11),
new)
# check hash_dir option # check hash_dir
old = '/s/n/d/o/700/c77/af088baea4806dcaba30bf07d9e64c77' old = '/s/n/d/o/700/c77/af088baea4806dcaba30bf07d9e64c77'
exp = '/s/n/d/o/1400/c77/af088baea4806dcaba30bf07d9e64c77' exp = '/s/n/d/o/1400/c77/af088baea4806dcaba30bf07d9e64c77'
actual = utils.replace_partition_in_path(old, 11, is_hash_dir=True) actual = utils.replace_partition_in_path('/s/n', old, 11)
self.assertEqual(exp, actual) self.assertEqual(exp, actual)
actual = utils.replace_partition_in_path(exp, 11, is_hash_dir=True) actual = utils.replace_partition_in_path('/s/n', exp, 11)
self.assertEqual(exp, actual) self.assertEqual(exp, actual)
# check longer devices path
old = '/s/n/1/2/d/o/700/c77/af088baea4806dcaba30bf07d9e64c77'
exp = '/s/n/1/2/d/o/1400/c77/af088baea4806dcaba30bf07d9e64c77'
actual = utils.replace_partition_in_path('/s/n/1/2', old, 11)
self.assertEqual(exp, actual)
actual = utils.replace_partition_in_path('/s/n/1/2', exp, 11)
self.assertEqual(exp, actual)
# check empty devices path
old = '/d/o/700/c77/af088baea4806dcaba30bf07d9e64c77'
exp = '/d/o/1400/c77/af088baea4806dcaba30bf07d9e64c77'
actual = utils.replace_partition_in_path('', old, 11)
self.assertEqual(exp, actual)
actual = utils.replace_partition_in_path('', exp, 11)
self.assertEqual(exp, actual)
# check path validation
path = '/s/n/d/o/693/c77/ad708baea4806dcaba30bf07d9e64c77/f'
with self.assertRaises(ValueError) as cm:
utils.replace_partition_in_path('/s/n1', path, 11)
self.assertEqual(
"Path '/s/n/d/o/693/c77/ad708baea4806dcaba30bf07d9e64c77/f' "
"is not under device dir '/s/n1'", str(cm.exception))
# check path validation - path lacks leading /
path = 's/n/d/o/693/c77/ad708baea4806dcaba30bf07d9e64c77/f'
with self.assertRaises(ValueError) as cm:
utils.replace_partition_in_path('/s/n', path, 11)
self.assertEqual(
"Path 's/n/d/o/693/c77/ad708baea4806dcaba30bf07d9e64c77/f' "
"is not under device dir '/s/n'", str(cm.exception))
def test_round_robin_iter(self): def test_round_robin_iter(self):
it1 = iter([1, 2, 3]) it1 = iter([1, 2, 3])
it2 = iter([4, 5]) it2 = iter([4, 5])

View File

@ -4780,9 +4780,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
mock_cleanup.assert_any_call(df_dir) mock_cleanup.assert_any_call(df_dir)
# Make sure the translated path is also cleaned up # Make sure the translated path is also cleaned up
expected_fname = utils.replace_partition_in_path( expected_dir = utils.replace_partition_in_path(
os.path.join(df_dir, "dummy"), 11) self.conf['devices'], df_dir, 11)
expected_dir = os.path.dirname(expected_fname)
mock_cleanup.assert_any_call(expected_dir) mock_cleanup.assert_any_call(expected_dir)
mock_cleanup.reset_mock() mock_cleanup.reset_mock()
@ -4802,9 +4801,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
mock_cleanup.assert_any_call(df_dir) mock_cleanup.assert_any_call(df_dir)
# Make sure the path using the old part power is also cleaned up # Make sure the path using the old part power is also cleaned up
expected_fname = utils.replace_partition_in_path( expected_dir = utils.replace_partition_in_path(
os.path.join(df_dir, "dummy"), 9) self.conf['devices'], df_dir, 9)
expected_dir = os.path.dirname(expected_fname)
mock_cleanup.assert_any_call(expected_dir) mock_cleanup.assert_any_call(expected_dir)
mock_cleanup.reset_mock() mock_cleanup.reset_mock()
@ -4821,9 +4819,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
partition=partition, partition=partition,
next_part_power=11, next_part_power=11,
expect_error=True) expect_error=True)
expected_fname = utils.replace_partition_in_path( expected_dir = utils.replace_partition_in_path(
os.path.join(df_dir, "dummy"), 11) self.conf['devices'], df_dir, 11)
expected_dir = os.path.dirname(expected_fname)
self.assertEqual(os.listdir(df_dir), os.listdir(expected_dir)) self.assertEqual(os.listdir(df_dir), os.listdir(expected_dir))