py3: port common/ring/ and common/utils.py
I can't imagine us *not* having a py3 proxy server at some point, and that proxy server is going to need a ring. While we're at it (and since they were so close anyway), port * cli/ringbuilder.py and * common/linkat.py * common/daemon.py Change-Id: Iec8d97e0ce925614a86b516c4c6ed82809d0ba9b
This commit is contained in:
parent
b33941feda
commit
642f79965a
@ -1067,7 +1067,7 @@ swift-ring-builder <builder_file> dispersion <search_filter> [options]
|
||||
print('Worst tier is %.06f (%s)' % (report['max_dispersion'],
|
||||
report['worst_tier']))
|
||||
if report['graph']:
|
||||
replica_range = range(int(math.ceil(builder.replicas + 1)))
|
||||
replica_range = list(range(int(math.ceil(builder.replicas + 1))))
|
||||
part_count_width = '%%%ds' % max(len(str(builder.parts)), 5)
|
||||
replica_counts_tmpl = ' '.join(part_count_width for i in
|
||||
replica_range)
|
||||
|
@ -173,7 +173,7 @@ class DaemonStrategy(object):
|
||||
yield per_worker_options
|
||||
|
||||
def spawned_pids(self):
|
||||
return self.options_by_pid.keys()
|
||||
return list(self.options_by_pid.keys())
|
||||
|
||||
def register_worker_start(self, pid, per_worker_options):
|
||||
self.logger.debug('Spawned worker %s with %r', pid, per_worker_options)
|
||||
|
@ -17,6 +17,8 @@ import os
|
||||
import ctypes
|
||||
from ctypes.util import find_library
|
||||
|
||||
import six
|
||||
|
||||
__all__ = ['linkat']
|
||||
|
||||
|
||||
@ -70,6 +72,11 @@ class Linkat(object):
|
||||
if not isinstance(olddirfd, int) or not isinstance(newdirfd, int):
|
||||
raise TypeError("fd must be an integer.")
|
||||
|
||||
if isinstance(oldpath, six.text_type):
|
||||
oldpath = oldpath.encode('utf8')
|
||||
if isinstance(newpath, six.text_type):
|
||||
newpath = newpath.encode('utf8')
|
||||
|
||||
return self._c_linkat(olddirfd, oldpath, newdirfd, newpath, flags)
|
||||
|
||||
linkat = Linkat()
|
||||
|
@ -696,7 +696,7 @@ class RingBuilder(object):
|
||||
(dev['id'], dev['port']))
|
||||
|
||||
int_replicas = int(math.ceil(self.replicas))
|
||||
rep2part_len = map(len, self._replica2part2dev)
|
||||
rep2part_len = list(map(len, self._replica2part2dev))
|
||||
# check the assignments of each part's replicas
|
||||
for part in range(self.parts):
|
||||
devs_for_part = []
|
||||
@ -932,7 +932,7 @@ class RingBuilder(object):
|
||||
more recently than min_part_hours.
|
||||
"""
|
||||
self._part_moved_bitmap = bytearray(max(2 ** (self.part_power - 3), 1))
|
||||
elapsed_hours = int(time() - self._last_part_moves_epoch) / 3600
|
||||
elapsed_hours = int(time() - self._last_part_moves_epoch) // 3600
|
||||
if elapsed_hours <= 0:
|
||||
return
|
||||
for part in range(self.parts):
|
||||
@ -1182,7 +1182,7 @@ class RingBuilder(object):
|
||||
"""
|
||||
# pick a random starting point on the other side of the ring
|
||||
quarter_turn = (self.parts // 4)
|
||||
random_half = random.randint(0, self.parts / 2)
|
||||
random_half = random.randint(0, self.parts // 2)
|
||||
start = (self._last_part_gather_start + quarter_turn +
|
||||
random_half) % self.parts
|
||||
self.logger.debug('Gather start is %(start)s '
|
||||
|
@ -455,7 +455,7 @@ class CompositeRingBuilder(object):
|
||||
metadata
|
||||
"""
|
||||
try:
|
||||
with open(path_to_file, 'rb') as fp:
|
||||
with open(path_to_file, 'rt') as fp:
|
||||
metadata = json.load(fp)
|
||||
builder_files = [metadata['component_builder_files'][comp['id']]
|
||||
for comp in metadata['components']]
|
||||
@ -494,7 +494,7 @@ class CompositeRingBuilder(object):
|
||||
if not self.components or not self._builder_files:
|
||||
raise ValueError("No composed ring to save.")
|
||||
# persist relative paths to builder files
|
||||
with open(path_to_file, 'wb') as fp:
|
||||
with open(path_to_file, 'wt') as fp:
|
||||
metadata = self.to_dict()
|
||||
# future-proofing:
|
||||
# - saving abs path to component builder files and this file should
|
||||
@ -640,7 +640,7 @@ class CompositeRingBuilder(object):
|
||||
"""
|
||||
self._load_components()
|
||||
self.update_last_part_moves()
|
||||
component_builders = zip(self._builder_files, self._builders)
|
||||
component_builders = list(zip(self._builder_files, self._builders))
|
||||
# don't let the same builder go first each time
|
||||
shuffle(component_builders)
|
||||
results = {}
|
||||
|
@ -78,7 +78,7 @@ class RingData(object):
|
||||
"""
|
||||
|
||||
json_len, = struct.unpack('!I', gz_file.read(4))
|
||||
ring_dict = json.loads(gz_file.read(json_len))
|
||||
ring_dict = json.loads(gz_file.read(json_len).decode('ascii'))
|
||||
ring_dict['replica2part2dev_id'] = []
|
||||
|
||||
if metadata_only:
|
||||
@ -111,7 +111,7 @@ class RingData(object):
|
||||
|
||||
# See if the file is in the new format
|
||||
magic = gz_file.read(4)
|
||||
if magic == 'R1NG':
|
||||
if magic == b'R1NG':
|
||||
format_version, = struct.unpack('!H', gz_file.read(2))
|
||||
if format_version == 1:
|
||||
ring_data = cls.deserialize_v1(
|
||||
@ -132,7 +132,7 @@ class RingData(object):
|
||||
|
||||
def serialize_v1(self, file_obj):
|
||||
# Write out new-style serialization magic and version:
|
||||
file_obj.write(struct.pack('!4sH', 'R1NG', 1))
|
||||
file_obj.write(struct.pack('!4sH', b'R1NG', 1))
|
||||
ring = self.to_dict()
|
||||
|
||||
# Only include next_part_power if it is set in the
|
||||
@ -145,8 +145,8 @@ class RingData(object):
|
||||
if next_part_power is not None:
|
||||
_text['next_part_power'] = next_part_power
|
||||
|
||||
json_encoder = json.JSONEncoder(sort_keys=True)
|
||||
json_text = json_encoder.encode(_text)
|
||||
json_text = json.dumps(_text, sort_keys=True,
|
||||
ensure_ascii=True).encode('ascii')
|
||||
json_len = len(json_text)
|
||||
file_obj.write(struct.pack('!I', json_len))
|
||||
file_obj.write(json_text)
|
||||
@ -418,8 +418,8 @@ class Ring(object):
|
||||
(d['region'], d['zone'], d['ip']) for d in primary_nodes)
|
||||
|
||||
parts = len(self._replica2part2dev_id[0])
|
||||
start = struct.unpack_from(
|
||||
'>I', md5(str(part)).digest())[0] >> self._part_shift
|
||||
part_hash = md5(str(part).encode('ascii')).digest()
|
||||
start = struct.unpack_from('>I', part_hash)[0] >> self._part_shift
|
||||
inc = int(parts / 65536) or 1
|
||||
# Multiple loops for execution speed; the checks and bookkeeping get
|
||||
# simpler as you go along
|
||||
|
@ -843,11 +843,13 @@ class Request(object):
|
||||
'https': 443}.get(parsed_path.scheme, 80)
|
||||
if parsed_path.scheme and parsed_path.scheme not in ['http', 'https']:
|
||||
raise TypeError('Invalid scheme: %s' % parsed_path.scheme)
|
||||
path_info = urllib.parse.unquote(
|
||||
parsed_path.path.decode('utf8') if six.PY3 else parsed_path.path)
|
||||
env = {
|
||||
'REQUEST_METHOD': 'GET',
|
||||
'SCRIPT_NAME': '',
|
||||
'QUERY_STRING': parsed_path.query,
|
||||
'PATH_INFO': urllib.parse.unquote(parsed_path.path),
|
||||
'PATH_INFO': path_info,
|
||||
'SERVER_NAME': server_name,
|
||||
'SERVER_PORT': str(server_port),
|
||||
'HTTP_HOST': '%s:%d' % (server_name, server_port),
|
||||
|
@ -65,6 +65,9 @@ import codecs
|
||||
utf8_decoder = codecs.getdecoder('utf-8')
|
||||
utf8_encoder = codecs.getencoder('utf-8')
|
||||
import six
|
||||
if not six.PY2:
|
||||
utf16_decoder = codecs.getdecoder('utf-16')
|
||||
utf16_encoder = codecs.getencoder('utf-16')
|
||||
from six.moves import cPickle as pickle
|
||||
from six.moves.configparser import (ConfigParser, NoSectionError,
|
||||
NoOptionError, RawConfigParser)
|
||||
@ -161,8 +164,8 @@ def IOPRIO_PRIO_VALUE(class_, data):
|
||||
# Used by hash_path to offer a bit more security when generating hashes for
|
||||
# paths. It simply appends this value to all paths; guessing the hash a path
|
||||
# will end up with would also require knowing this suffix.
|
||||
HASH_PATH_SUFFIX = ''
|
||||
HASH_PATH_PREFIX = ''
|
||||
HASH_PATH_SUFFIX = b''
|
||||
HASH_PATH_PREFIX = b''
|
||||
|
||||
SWIFT_CONF_FILE = '/etc/swift/swift.conf'
|
||||
|
||||
@ -215,17 +218,29 @@ def validate_hash_conf():
|
||||
global HASH_PATH_PREFIX
|
||||
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
|
||||
hash_conf = ConfigParser()
|
||||
if hash_conf.read(SWIFT_CONF_FILE):
|
||||
|
||||
if six.PY3:
|
||||
# Use Latin1 to accept arbitrary bytes in the hash prefix/suffix
|
||||
confs_read = hash_conf.read(SWIFT_CONF_FILE, encoding='latin1')
|
||||
else:
|
||||
confs_read = hash_conf.read(SWIFT_CONF_FILE)
|
||||
|
||||
if confs_read:
|
||||
try:
|
||||
HASH_PATH_SUFFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_suffix')
|
||||
if six.PY3:
|
||||
HASH_PATH_SUFFIX = HASH_PATH_SUFFIX.encode('latin1')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
try:
|
||||
HASH_PATH_PREFIX = hash_conf.get('swift-hash',
|
||||
'swift_hash_path_prefix')
|
||||
if six.PY3:
|
||||
HASH_PATH_PREFIX = HASH_PATH_PREFIX.encode('latin1')
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
|
||||
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
|
||||
raise InvalidHashPathConfigError()
|
||||
|
||||
@ -253,9 +268,13 @@ def get_hmac(request_method, path, expires, key, digest=sha1):
|
||||
:returns: hexdigest str of the HMAC for the request using the specified
|
||||
digest algorithm.
|
||||
"""
|
||||
parts = (request_method, str(expires), path)
|
||||
if not isinstance(key, six.binary_type):
|
||||
key = key.encode('utf8')
|
||||
return hmac.new(
|
||||
key,
|
||||
'%s\n%s\n%s' % (request_method, expires, path),
|
||||
key, b'\n'.join(
|
||||
x if isinstance(x, six.binary_type) else x.encode('utf8')
|
||||
for x in parts),
|
||||
digest).hexdigest()
|
||||
|
||||
|
||||
@ -381,13 +400,13 @@ def config_positive_int_value(value):
|
||||
integer > 0. (not including zero) Raises ValueError otherwise.
|
||||
"""
|
||||
try:
|
||||
value = int(value)
|
||||
if value < 1:
|
||||
result = int(value)
|
||||
if result < 1:
|
||||
raise ValueError()
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError(
|
||||
'Config option must be an positive int number, not "%s".' % value)
|
||||
return value
|
||||
return result
|
||||
|
||||
|
||||
def config_auto_int_value(value, default):
|
||||
@ -530,7 +549,7 @@ def load_libc_function(func_name, log_error=True,
|
||||
|
||||
def generate_trans_id(trans_id_suffix):
|
||||
return 'tx%s-%010x%s' % (
|
||||
uuid.uuid4().hex[:21], time.time(), quote(trans_id_suffix))
|
||||
uuid.uuid4().hex[:21], int(time.time()), quote(trans_id_suffix))
|
||||
|
||||
|
||||
def get_policy_index(req_headers, res_headers):
|
||||
@ -545,6 +564,8 @@ def get_policy_index(req_headers, res_headers):
|
||||
"""
|
||||
header = 'X-Backend-Storage-Policy-Index'
|
||||
policy_index = res_headers.get(header, req_headers.get(header))
|
||||
if isinstance(policy_index, six.binary_type) and not six.PY2:
|
||||
policy_index = policy_index.decode('ascii')
|
||||
return str(policy_index) if policy_index is not None else None
|
||||
|
||||
|
||||
@ -752,7 +773,7 @@ class FallocateWrapper(object):
|
||||
if float(free) <= float(FALLOCATE_RESERVE):
|
||||
raise OSError(
|
||||
errno.ENOSPC,
|
||||
'FALLOCATE_RESERVE fail %s <= %s' %
|
||||
'FALLOCATE_RESERVE fail %g <= %g' %
|
||||
(free, FALLOCATE_RESERVE))
|
||||
args = {
|
||||
'fallocate': (fd, mode, offset, length),
|
||||
@ -2274,16 +2295,19 @@ def hash_path(account, container=None, object=None, raw_digest=False):
|
||||
"""
|
||||
if object and not container:
|
||||
raise ValueError('container is required if object is provided')
|
||||
paths = [account]
|
||||
paths = [account if isinstance(account, six.binary_type)
|
||||
else account.encode('utf8')]
|
||||
if container:
|
||||
paths.append(container)
|
||||
paths.append(container if isinstance(container, six.binary_type)
|
||||
else container.encode('utf8'))
|
||||
if object:
|
||||
paths.append(object)
|
||||
paths.append(object if isinstance(object, six.binary_type)
|
||||
else object.encode('utf8'))
|
||||
if raw_digest:
|
||||
return md5(HASH_PATH_PREFIX + '/' + '/'.join(paths)
|
||||
return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths)
|
||||
+ HASH_PATH_SUFFIX).digest()
|
||||
else:
|
||||
return md5(HASH_PATH_PREFIX + '/' + '/'.join(paths)
|
||||
return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths)
|
||||
+ HASH_PATH_SUFFIX).hexdigest()
|
||||
|
||||
|
||||
@ -2391,9 +2415,9 @@ def lock_file(filename, timeout=10, append=False, unlink=True):
|
||||
flags = os.O_CREAT | os.O_RDWR
|
||||
if append:
|
||||
flags |= os.O_APPEND
|
||||
mode = 'a+'
|
||||
mode = 'a+b'
|
||||
else:
|
||||
mode = 'r+'
|
||||
mode = 'r+b'
|
||||
while True:
|
||||
fd = os.open(filename, flags)
|
||||
file_obj = os.fdopen(fd, mode)
|
||||
@ -3214,7 +3238,7 @@ def dump_recon_cache(cache_dict, cache_file, logger, lock_timeout=2,
|
||||
try:
|
||||
existing_entry = cf.readline()
|
||||
if existing_entry:
|
||||
cache_entry = json.loads(existing_entry)
|
||||
cache_entry = json.loads(existing_entry.decode('utf8'))
|
||||
except ValueError:
|
||||
# file doesn't have a valid entry, we'll recreate it
|
||||
pass
|
||||
@ -3224,7 +3248,9 @@ def dump_recon_cache(cache_dict, cache_file, logger, lock_timeout=2,
|
||||
try:
|
||||
with NamedTemporaryFile(dir=os.path.dirname(cache_file),
|
||||
delete=False) as tf:
|
||||
tf.write(json.dumps(cache_entry, sort_keys=True) + '\n')
|
||||
cache_data = json.dumps(cache_entry, ensure_ascii=True,
|
||||
sort_keys=True)
|
||||
tf.write(cache_data.encode('ascii') + b'\n')
|
||||
if set_owner:
|
||||
os.chown(tf.name, pwd.getpwnam(set_owner).pw_uid, -1)
|
||||
renamer(tf.name, cache_file, fsync=False)
|
||||
@ -3372,10 +3398,22 @@ def get_valid_utf8_str(str_or_unicode):
|
||||
|
||||
:param str_or_unicode: a string or an unicode which can be invalid utf-8
|
||||
"""
|
||||
if isinstance(str_or_unicode, six.text_type):
|
||||
(str_or_unicode, _len) = utf8_encoder(str_or_unicode, 'replace')
|
||||
(valid_utf8_str, _len) = utf8_decoder(str_or_unicode, 'replace')
|
||||
return valid_utf8_str.encode('utf-8')
|
||||
if six.PY2:
|
||||
if isinstance(str_or_unicode, six.text_type):
|
||||
(str_or_unicode, _len) = utf8_encoder(str_or_unicode, 'replace')
|
||||
(valid_unicode_str, _len) = utf8_decoder(str_or_unicode, 'replace')
|
||||
else:
|
||||
# Apparently under py3 we need to go to utf-16 to collapse surrogates?
|
||||
if isinstance(str_or_unicode, six.binary_type):
|
||||
try:
|
||||
(str_or_unicode, _len) = utf8_decoder(str_or_unicode,
|
||||
'surrogatepass')
|
||||
except UnicodeDecodeError:
|
||||
(str_or_unicode, _len) = utf8_decoder(str_or_unicode,
|
||||
'replace')
|
||||
(str_or_unicode, _len) = utf16_encoder(str_or_unicode, 'surrogatepass')
|
||||
(valid_unicode_str, _len) = utf16_decoder(str_or_unicode, 'replace')
|
||||
return valid_unicode_str.encode('utf-8')
|
||||
|
||||
|
||||
def list_from_csv(comma_separated_str):
|
||||
@ -3832,7 +3870,10 @@ def quote(value, safe='/'):
|
||||
"""
|
||||
Patched version of urllib.quote that encodes utf-8 strings before quoting
|
||||
"""
|
||||
return _quote(get_valid_utf8_str(value), safe)
|
||||
quoted = _quote(get_valid_utf8_str(value), safe)
|
||||
if isinstance(value, six.binary_type):
|
||||
quoted = quoted.encode('utf-8')
|
||||
return quoted
|
||||
|
||||
|
||||
def get_expirer_container(x_delete_at, expirer_divisor, acc, cont, obj):
|
||||
@ -3937,11 +3978,11 @@ def iter_multipart_mime_documents(wsgi_input, boundary, read_chunk_size=4096):
|
||||
:returns: A generator of file-like objects for each part.
|
||||
:raises MimeInvalid: if the document is malformed
|
||||
"""
|
||||
boundary = '--' + boundary
|
||||
boundary = b'--' + boundary
|
||||
blen = len(boundary) + 2 # \r\n
|
||||
try:
|
||||
got = wsgi_input.readline(blen)
|
||||
while got == '\r\n':
|
||||
while got == b'\r\n':
|
||||
got = wsgi_input.readline(blen)
|
||||
except (IOError, ValueError) as e:
|
||||
raise swift.common.exceptions.ChunkReadError(str(e))
|
||||
@ -3949,8 +3990,8 @@ def iter_multipart_mime_documents(wsgi_input, boundary, read_chunk_size=4096):
|
||||
if got.strip() != boundary:
|
||||
raise swift.common.exceptions.MimeInvalid(
|
||||
'invalid starting boundary: wanted %r, got %r', (boundary, got))
|
||||
boundary = '\r\n' + boundary
|
||||
input_buffer = ''
|
||||
boundary = b'\r\n' + boundary
|
||||
input_buffer = b''
|
||||
done = False
|
||||
while not done:
|
||||
it = _MultipartMimeFileLikeObject(wsgi_input, boundary, input_buffer,
|
||||
@ -4361,7 +4402,12 @@ def strict_b64decode(value, allow_line_breaks=False):
|
||||
:raises ValueError: if ``value`` is not a string, contains invalid
|
||||
characters, or has insufficient padding
|
||||
'''
|
||||
if not isinstance(value, six.string_types):
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
value = value.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError
|
||||
if not isinstance(value, six.text_type):
|
||||
raise ValueError
|
||||
# b64decode will silently discard bad characters, but we want to
|
||||
# treat them as an error
|
||||
@ -4390,7 +4436,7 @@ def md5_hash_for_file(fname):
|
||||
"""
|
||||
with open(fname, 'rb') as f:
|
||||
md5sum = md5()
|
||||
for block in iter(lambda: f.read(MD5_BLOCK_READ_BYTES), ''):
|
||||
for block in iter(lambda: f.read(MD5_BLOCK_READ_BYTES), b''):
|
||||
md5sum.update(block)
|
||||
return md5sum.hexdigest()
|
||||
|
||||
|
@ -71,7 +71,7 @@ EMPTY_ETAG = md5().hexdigest()
|
||||
# try not to import this module from swift
|
||||
if not os.path.basename(sys.argv[0]).startswith('swift'):
|
||||
# never patch HASH_PATH_SUFFIX AGAIN!
|
||||
utils.HASH_PATH_SUFFIX = 'endcap'
|
||||
utils.HASH_PATH_SUFFIX = b'endcap'
|
||||
|
||||
|
||||
EC_TYPE_PREFERENCE = [
|
||||
|
@ -34,6 +34,11 @@ from swift.common.ring import RingBuilder
|
||||
|
||||
from test.unit import Timeout
|
||||
|
||||
try:
|
||||
from itertools import zip_longest
|
||||
except ImportError:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
|
||||
class RunSwiftRingBuilderMixin(object):
|
||||
|
||||
@ -115,8 +120,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
|
||||
output = output.replace(self.tempfile, '__RINGFILE__')
|
||||
stub = stub.replace('__BUILDER_ID__', builder_id)
|
||||
for i, (value, expected) in enumerate(
|
||||
itertools.izip_longest(
|
||||
output.splitlines(), stub.splitlines())):
|
||||
zip_longest(output.splitlines(), stub.splitlines())):
|
||||
# N.B. differences in trailing whitespace are ignored!
|
||||
value = (value or '').rstrip()
|
||||
expected = (expected or '').rstrip()
|
||||
|
@ -25,6 +25,7 @@ from collections import Counter, defaultdict
|
||||
from math import ceil
|
||||
from tempfile import mkdtemp
|
||||
from shutil import rmtree
|
||||
import sys
|
||||
import random
|
||||
import uuid
|
||||
import itertools
|
||||
@ -649,7 +650,7 @@ class TestRingBuilder(unittest.TestCase):
|
||||
|
||||
self.assertEqual({0: 2, 1: 2, 2: 2}, dict(counts['zone']))
|
||||
# each part is assigned once to six unique devices
|
||||
self.assertEqual((counts['dev_id'].values()), [1] * 6)
|
||||
self.assertEqual(list(counts['dev_id'].values()), [1] * 6)
|
||||
self.assertEqual(len(set(counts['dev_id'].keys())), 6)
|
||||
|
||||
def test_multitier_part_moves_with_0_min_part_hours(self):
|
||||
@ -2114,7 +2115,7 @@ class TestRingBuilder(unittest.TestCase):
|
||||
with self.assertRaises(AttributeError) as cm:
|
||||
rb.id
|
||||
self.assertIn('id attribute has not been initialised',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
builder_file = os.path.join(self.testdir, 'test_save.builder')
|
||||
orig_rb.save(builder_file)
|
||||
@ -2131,7 +2132,7 @@ class TestRingBuilder(unittest.TestCase):
|
||||
with self.assertRaises(AttributeError) as cm:
|
||||
loaded_rb.id
|
||||
self.assertIn('id attribute has not been initialised',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
# check saving assigns an id, and that it is persisted
|
||||
loaded_rb.save(builder_file)
|
||||
@ -2169,7 +2170,7 @@ class TestRingBuilder(unittest.TestCase):
|
||||
with self.assertRaises(AttributeError) as cm:
|
||||
rb.id
|
||||
self.assertIn('id attribute has not been initialised',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
# save must succeed for id to be assigned
|
||||
with self.assertRaises(IOError):
|
||||
rb.save(os.path.join(
|
||||
@ -2177,7 +2178,7 @@ class TestRingBuilder(unittest.TestCase):
|
||||
with self.assertRaises(AttributeError) as cm:
|
||||
rb.id
|
||||
self.assertIn('id attribute has not been initialised',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
def test_search_devs(self):
|
||||
rb = ring.RingBuilder(8, 3, 1)
|
||||
@ -2480,6 +2481,8 @@ class TestRingBuilder(unittest.TestCase):
|
||||
(0, 0, '127.0.0.1', 3): [0, 256, 0, 0],
|
||||
})
|
||||
|
||||
@unittest.skipIf(sys.version_info >= (3,),
|
||||
"Seed-specific tests don't work well on py3")
|
||||
def test_undispersable_zone_converge_on_balance(self):
|
||||
rb = ring.RingBuilder(8, 6, 0)
|
||||
dev_id = 0
|
||||
@ -2535,6 +2538,8 @@ class TestRingBuilder(unittest.TestCase):
|
||||
self.assertEqual(rb.get_balance(), 0.390625)
|
||||
self.assertEqual(rb.dispersion, 16.6015625)
|
||||
|
||||
@unittest.skipIf(sys.version_info >= (3,),
|
||||
"Seed-specific tests don't work well on py3")
|
||||
def test_undispersable_server_converge_on_balance(self):
|
||||
rb = ring.RingBuilder(8, 6, 0)
|
||||
dev_id = 0
|
||||
|
@ -230,7 +230,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
compose_rings(builders)
|
||||
self.assertIn('Same region found in different rings',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
def test_composite_only_one_ring_in_the_args_error(self):
|
||||
builders = self.create_sample_ringbuilders(1)
|
||||
@ -238,7 +238,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
compose_rings(builders)
|
||||
self.assertIn(
|
||||
'Two or more component builders are required.',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
def test_composite_same_device_in_the_different_rings_error(self):
|
||||
builders = self.create_sample_ringbuilders(2)
|
||||
@ -267,7 +267,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
self.assertIn(
|
||||
'Duplicate ip/port/device combination %(ip)s/%(port)s/%(device)s '
|
||||
'found in builders at indexes 0 and 2' %
|
||||
same_device, cm.exception.message)
|
||||
same_device, cm.exception.args[0])
|
||||
|
||||
def test_different_part_power_error(self):
|
||||
# create a ring builder
|
||||
@ -296,7 +296,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
compose_rings(builders)
|
||||
self.assertIn("All builders must have same value for 'part_power'",
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
def test_compose_rings_float_replica_count_builder_error(self):
|
||||
builders = self.create_sample_ringbuilders(1)
|
||||
@ -322,8 +322,8 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
compose_rings(builders)
|
||||
self.assertIn("Problem with builders", cm.exception.message)
|
||||
self.assertIn("Non integer replica count", cm.exception.message)
|
||||
self.assertIn("Problem with builders", cm.exception.args[0])
|
||||
self.assertIn("Non integer replica count", cm.exception.args[0])
|
||||
|
||||
def test_compose_rings_rebalance_needed(self):
|
||||
builders = self.create_sample_ringbuilders(2)
|
||||
@ -334,8 +334,8 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
self.assertTrue(builders[1].devs_changed) # sanity check
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
compose_rings(builders)
|
||||
self.assertIn("Problem with builders", cm.exception.message)
|
||||
self.assertIn("Builder needs rebalance", cm.exception.message)
|
||||
self.assertIn("Problem with builders", cm.exception.args[0])
|
||||
self.assertIn("Builder needs rebalance", cm.exception.args[0])
|
||||
# after rebalance, that works (sanity)
|
||||
builders[1].rebalance()
|
||||
compose_rings(builders)
|
||||
@ -367,7 +367,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
|
||||
def test_ring_swap(self):
|
||||
# sanity
|
||||
builders = sorted(self.create_sample_ringbuilders(2))
|
||||
builders = self.create_sample_ringbuilders(2)
|
||||
rd = compose_rings(builders)
|
||||
rd.save(self.output_ring)
|
||||
got_ring = Ring(self.output_ring)
|
||||
@ -377,7 +377,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
self.assertDevices(got_ring, builders)
|
||||
|
||||
# even if swapped, it works
|
||||
reverse_builders = sorted(builders, reverse=True)
|
||||
reverse_builders = builders[::-1]
|
||||
self.assertNotEqual(reverse_builders, builders)
|
||||
rd = compose_rings(reverse_builders)
|
||||
rd.save(self.output_ring)
|
||||
@ -396,7 +396,7 @@ class TestCompositeBuilder(BaseTestCompositeBuilder):
|
||||
self.assertDevices(got_ring, builders)
|
||||
|
||||
self.assertIn("composite ring is not ordered by ring order",
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
|
||||
class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
@ -429,7 +429,7 @@ class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
cb.compose(require_modified=True)
|
||||
self.assertIn('None of the component builders has been modified',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
self.assertEqual(1, cb.version)
|
||||
# ...but by default will compose again despite no changes to components
|
||||
cb.compose(force=True).save(self.output_ring)
|
||||
@ -530,12 +530,12 @@ class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
CompositeRingBuilder.load(bad_file)
|
||||
self.assertIn(
|
||||
"File does not contain valid composite ring data",
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
except AssertionError as err:
|
||||
raise AssertionError('With content %r: %s' % (content, err))
|
||||
|
||||
for content in ('', 'not json', json.dumps({}), json.dumps([])):
|
||||
check_bad_content(content)
|
||||
check_bad_content(content.encode('ascii'))
|
||||
|
||||
good_content = {
|
||||
'components': [
|
||||
@ -548,7 +548,7 @@ class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
for missing in good_content:
|
||||
bad_content = dict(good_content)
|
||||
bad_content.pop(missing)
|
||||
check_bad_content(json.dumps(bad_content))
|
||||
check_bad_content(json.dumps(bad_content).encode('ascii'))
|
||||
|
||||
def test_save_errors(self):
|
||||
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
|
||||
@ -556,7 +556,7 @@ class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
def do_test(cb):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
cb.save(cb_file)
|
||||
self.assertIn("No composed ring to save", cm.exception.message)
|
||||
self.assertIn("No composed ring to save", cm.exception.args[0])
|
||||
|
||||
do_test(CompositeRingBuilder())
|
||||
do_test(CompositeRingBuilder([]))
|
||||
@ -635,7 +635,7 @@ class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
cb.rebalance()
|
||||
self.assertIn('Two or more component builders are required',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
builders = self.create_sample_ringbuilders(2)
|
||||
cb, builder_files = self._make_composite_builder(builders)
|
||||
@ -668,7 +668,7 @@ class TestCompositeRingBuilder(BaseTestCompositeBuilder):
|
||||
# sanity, it is impossible to compose un-rebalanced component rings
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
cb.compose()
|
||||
self.assertIn("Builder needs rebalance", cm.exception.message)
|
||||
self.assertIn("Builder needs rebalance", cm.exception.args[0])
|
||||
# but ok to compose after rebalance
|
||||
cb.rebalance()
|
||||
rd = cb.compose()
|
||||
@ -727,14 +727,14 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
self._call_method_under_test(cb, builder_files,
|
||||
force=force)
|
||||
self.assertIn('Two or more component builders are required',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
cb = CompositeRingBuilder()
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, builder_files,
|
||||
force=force)
|
||||
self.assertIn('Two or more component builders are required',
|
||||
cm.exception.message)
|
||||
cm.exception.args[0])
|
||||
|
||||
builders = self.create_sample_ringbuilders(3)
|
||||
builder_files = self.save_builders(builders)
|
||||
@ -755,7 +755,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, builder_files,
|
||||
force=force)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("Problem with builder at index %s" % no_id,
|
||||
error_lines[0])
|
||||
self.assertIn("id attribute has not been initialised",
|
||||
@ -785,7 +785,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
def do_check(force):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, force=force)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("Builder id %r used at indexes 0, 2" %
|
||||
builders[0].id, error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
@ -799,7 +799,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
orig_version = cb.version
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, builder_files, **kwargs)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("None of the component builders has been modified",
|
||||
error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
@ -839,7 +839,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
self.save_builders([old_builders[0], builders[1]])
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("Invalid builder change at index 0", error_lines[0])
|
||||
self.assertIn("Older builder version", error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
@ -849,7 +849,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
self.save_builders([old_builders[0], builders[1]])
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("Invalid builder change at index 0", error_lines[0])
|
||||
self.assertIn("Older builder version", error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
@ -869,7 +869,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(
|
||||
cb, self.save_builders(bad_builders))
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertFalse(error_lines[1:])
|
||||
self.assertEqual(1, cb.version)
|
||||
# unless we ignore errors
|
||||
@ -892,7 +892,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
different_files = self.save_builders([builders[0], builders[2]])
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, different_files)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("Invalid builder change at index 1", error_lines[0])
|
||||
self.assertIn("Attribute mismatch for id", error_lines[0])
|
||||
self.assertFalse(error_lines[1:])
|
||||
@ -908,7 +908,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
builder_files.reverse()
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, builder_files)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
for i, line in enumerate(error_lines):
|
||||
self.assertIn("Invalid builder change at index %s" % i, line)
|
||||
self.assertIn("Attribute mismatch for id", line)
|
||||
@ -925,7 +925,7 @@ class TestLoadComponents(BaseTestCompositeBuilder):
|
||||
self.save_builders(builders)
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
for i, line in enumerate(error_lines):
|
||||
self.assertIn("Invalid builder change at index 0", line)
|
||||
self.assertIn("Attribute mismatch for replicas", line)
|
||||
@ -949,7 +949,7 @@ class TestComposeLoadComponents(TestLoadComponents):
|
||||
self.save_builders(builders)
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
for i, line in enumerate(error_lines):
|
||||
self.assertIn("Invalid builder change at index 0", line)
|
||||
self.assertIn("Attribute mismatch for replicas", line)
|
||||
@ -958,7 +958,7 @@ class TestComposeLoadComponents(TestLoadComponents):
|
||||
# validate will fail because the builder needs rebalancing
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self._call_method_under_test(cb, force=True)
|
||||
error_lines = cm.exception.message.split('\n')
|
||||
error_lines = cm.exception.args[0].split('\n')
|
||||
self.assertIn("Problem with builders", error_lines[0])
|
||||
self.assertIn("Builder needs rebalance", error_lines[1])
|
||||
self.assertFalse(error_lines[2:])
|
||||
@ -976,17 +976,18 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
if rebalance:
|
||||
rb.rebalance()
|
||||
self.assertEqual(self._partition_counts(rb),
|
||||
{0: 256, 1: 256, 2: 256}) # sanity check
|
||||
[256, 256, 256]) # sanity check
|
||||
return rb
|
||||
|
||||
def _partition_counts(self, builder):
|
||||
"""
|
||||
Returns a dictionary mapping device id's to (number of
|
||||
Returns an array mapping device id's to (number of
|
||||
partitions assigned to that device).
|
||||
"""
|
||||
return Counter(builder.devs[dev_id]['id']
|
||||
for part2dev_id in builder._replica2part2dev
|
||||
for dev_id in part2dev_id)
|
||||
c = Counter(builder.devs[dev_id]['id']
|
||||
for part2dev_id in builder._replica2part2dev
|
||||
for dev_id in part2dev_id)
|
||||
return [c[d['id']] for d in builder.devs]
|
||||
|
||||
def get_moved_parts(self, after, before):
|
||||
def uniqueness(dev):
|
||||
@ -1020,7 +1021,7 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
|
||||
# all cobuilders can perform initial rebalance
|
||||
cb.rebalance()
|
||||
exp = {0: 256, 1: 256, 2: 256}
|
||||
exp = [256, 256, 256]
|
||||
self.assertEqual(exp, self._partition_counts(builders[0]))
|
||||
self.assertEqual(exp, self._partition_counts(builders[1]))
|
||||
self.assertEqual(exp, self._partition_counts(builders[2]))
|
||||
@ -1057,13 +1058,13 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
rb1_parts_moved = self.get_moved_parts(builders[0], old_builders[0])
|
||||
self.assertEqual(192, len(rb1_parts_moved))
|
||||
self.assertEqual(self._partition_counts(builders[0]),
|
||||
{0: 192, 1: 192, 2: 192, 3: 192})
|
||||
[192, 192, 192, 192])
|
||||
|
||||
rb2_parts_moved = self.get_moved_parts(builders[1], old_builders[1])
|
||||
self.assertEqual(64, len(rb2_parts_moved))
|
||||
counts = self._partition_counts(builders[1])
|
||||
self.assertEqual(counts[3], 64)
|
||||
self.assertEqual([234, 235, 235], sorted(counts.values()[:3]))
|
||||
self.assertEqual([234, 235, 235], sorted(counts[:3]))
|
||||
self.assertFalse(rb2_parts_moved.intersection(rb1_parts_moved))
|
||||
|
||||
# rb3 can't rebalance - all parts moved while rebalancing rb1 and rb2
|
||||
@ -1191,7 +1192,7 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
rb1_parts_moved = self.get_moved_parts(rb1s[1], rb1s[0])
|
||||
self.assertEqual(192, len(rb1_parts_moved))
|
||||
self.assertEqual(self._partition_counts(rb1s[1]),
|
||||
{0: 192, 1: 192, 2: 192, 3: 192})
|
||||
[192, 192, 192, 192])
|
||||
|
||||
# rebalancing rb2 - rb2 in isolation could potentially move all parts
|
||||
# so would move 192 parts to new device, but it is constrained by rb1
|
||||
@ -1200,7 +1201,7 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
self.assertEqual(64, len(rb2_parts_moved))
|
||||
counts = self._partition_counts(rb2s[3])
|
||||
self.assertEqual(counts[3], 64)
|
||||
self.assertEqual([234, 235, 235], sorted(counts.values()[:3]))
|
||||
self.assertEqual([234, 235, 235], sorted(counts[:3]))
|
||||
self.assertFalse(rb2_parts_moved.intersection(rb1_parts_moved))
|
||||
self.assertEqual(192, self.num_parts_can_move(rb2s[3]))
|
||||
self.assertEqual(64, self.num_parts_can_move(rb1s[3]))
|
||||
@ -1255,7 +1256,7 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
# rebalance - after that expect no more updates
|
||||
with mock_update_last_part_moves() as update_calls:
|
||||
cb.update_last_part_moves()
|
||||
self.assertEqual(sorted([rb1, rb2]), sorted(update_calls))
|
||||
self.assertEqual({rb1, rb2}, set(update_calls))
|
||||
|
||||
with mock_update_last_part_moves() as update_calls:
|
||||
with mock_can_part_move() as can_part_move_calls:
|
||||
@ -1263,14 +1264,14 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
|
||||
self.assertFalse(update_calls)
|
||||
# rb1 has never been rebalanced so no calls propagate from its
|
||||
# can_part_move method to its superclass _can_part_move method
|
||||
self.assertEqual([rb2], can_part_move_calls.keys())
|
||||
self.assertEqual({rb2}, set(can_part_move_calls))
|
||||
|
||||
with mock_update_last_part_moves() as update_calls:
|
||||
with mock_can_part_move() as can_part_move_calls:
|
||||
rb1.rebalance()
|
||||
self.assertFalse(update_calls)
|
||||
# rb1 is being rebalanced so gets checked, and rb2 also gets checked
|
||||
self.assertEqual(sorted([rb1, rb2]), sorted(can_part_move_calls))
|
||||
self.assertEqual({rb1, rb2}, set(can_part_move_calls))
|
||||
self.assertEqual(768, len(can_part_move_calls[rb1]))
|
||||
self.assertEqual(768, len(can_part_move_calls[rb2]))
|
||||
|
||||
|
@ -40,8 +40,8 @@ class TestRingBase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._orig_hash_suffix = utils.HASH_PATH_SUFFIX
|
||||
self._orig_hash_prefix = utils.HASH_PATH_PREFIX
|
||||
utils.HASH_PATH_SUFFIX = 'endcap'
|
||||
utils.HASH_PATH_PREFIX = ''
|
||||
utils.HASH_PATH_SUFFIX = b'endcap'
|
||||
utils.HASH_PATH_PREFIX = b''
|
||||
|
||||
def tearDown(self):
|
||||
utils.HASH_PATH_SUFFIX = self._orig_hash_suffix
|
||||
@ -150,8 +150,8 @@ class TestRingData(unittest.TestCase):
|
||||
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}], 30)
|
||||
rd.save(ring_fname1)
|
||||
rd.save(ring_fname2)
|
||||
with open(ring_fname1) as ring1:
|
||||
with open(ring_fname2) as ring2:
|
||||
with open(ring_fname1, 'rb') as ring1:
|
||||
with open(ring_fname2, 'rb') as ring2:
|
||||
self.assertEqual(ring1.read(), ring2.read())
|
||||
|
||||
def test_permissions(self):
|
||||
@ -160,8 +160,12 @@ class TestRingData(unittest.TestCase):
|
||||
[array.array('H', [0, 1, 0, 1]), array.array('H', [0, 1, 0, 1])],
|
||||
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}], 30)
|
||||
rd.save(ring_fname)
|
||||
self.assertEqual(oct(stat.S_IMODE(os.stat(ring_fname).st_mode)),
|
||||
'0644')
|
||||
ring_mode = stat.S_IMODE(os.stat(ring_fname).st_mode)
|
||||
expected_mode = (stat.S_IRUSR | stat.S_IWUSR |
|
||||
stat.S_IRGRP | stat.S_IROTH)
|
||||
self.assertEqual(
|
||||
ring_mode, expected_mode,
|
||||
'Ring has mode 0%o, expected 0%o' % (ring_mode, expected_mode))
|
||||
|
||||
def test_replica_count(self):
|
||||
rd = ring.RingData(
|
||||
@ -227,8 +231,8 @@ class TestRing(TestRingBase):
|
||||
self.assertEqual(self.ring.reload_time, self.intended_reload_time)
|
||||
self.assertEqual(self.ring.serialized_path, self.testgz)
|
||||
# test invalid endcap
|
||||
with mock.patch.object(utils, 'HASH_PATH_SUFFIX', ''), \
|
||||
mock.patch.object(utils, 'HASH_PATH_PREFIX', ''), \
|
||||
with mock.patch.object(utils, 'HASH_PATH_SUFFIX', b''), \
|
||||
mock.patch.object(utils, 'HASH_PATH_PREFIX', b''), \
|
||||
mock.patch.object(utils, 'SWIFT_CONF_FILE', ''):
|
||||
self.assertRaises(SystemExit, ring.Ring, self.testdir, 'whatever')
|
||||
|
||||
@ -490,6 +494,8 @@ class TestRing(TestRingBase):
|
||||
self.ring.devs.append(new_dev)
|
||||
self.ring._rebuild_tier_data()
|
||||
|
||||
@unittest.skipIf(sys.version_info >= (3,),
|
||||
"Seed-specific tests don't work well on py3")
|
||||
def test_get_more_nodes(self):
|
||||
# Yes, these tests are deliberately very fragile. We want to make sure
|
||||
# that if someone changes the results the ring produces, they know it.
|
||||
|
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
from collections import defaultdict
|
||||
|
||||
@ -663,6 +664,8 @@ class TestUtils(unittest.TestCase):
|
||||
}
|
||||
self.assertEqual(device, expected)
|
||||
|
||||
@unittest.skipIf(sys.version_info >= (3,),
|
||||
"Seed-specific tests don't work well on py3")
|
||||
def test_dispersion_report(self):
|
||||
rb = ring.RingBuilder(8, 3, 0)
|
||||
rb.add_dev({'id': 0, 'region': 1, 'zone': 0, 'weight': 100,
|
||||
|
@ -921,8 +921,8 @@ class TestUtils(unittest.TestCase):
|
||||
"""Tests for swift.common.utils """
|
||||
|
||||
def setUp(self):
|
||||
utils.HASH_PATH_SUFFIX = 'endcap'
|
||||
utils.HASH_PATH_PREFIX = 'startcap'
|
||||
utils.HASH_PATH_SUFFIX = b'endcap'
|
||||
utils.HASH_PATH_PREFIX = b'startcap'
|
||||
|
||||
def test_get_zero_indexed_base_string(self):
|
||||
self.assertEqual(utils.get_zero_indexed_base_string('something', 0),
|
||||
@ -1938,7 +1938,7 @@ class TestUtils(unittest.TestCase):
|
||||
def test_hash_path(self):
|
||||
# Yes, these tests are deliberately very fragile. We want to make sure
|
||||
# that if someones changes the results hash_path produces, they know it
|
||||
with mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''):
|
||||
with mock.patch('swift.common.utils.HASH_PATH_PREFIX', b''):
|
||||
self.assertEqual(utils.hash_path('a'),
|
||||
'1c84525acb02107ea475dcd3d09c2c58')
|
||||
self.assertEqual(utils.hash_path('a', 'c'),
|
||||
@ -1948,10 +1948,10 @@ class TestUtils(unittest.TestCase):
|
||||
self.assertEqual(utils.hash_path('a', 'c', 'o', raw_digest=False),
|
||||
'06fbf0b514e5199dfc4e00f42eb5ea83')
|
||||
self.assertEqual(utils.hash_path('a', 'c', 'o', raw_digest=True),
|
||||
'\x06\xfb\xf0\xb5\x14\xe5\x19\x9d\xfcN'
|
||||
'\x00\xf4.\xb5\xea\x83')
|
||||
b'\x06\xfb\xf0\xb5\x14\xe5\x19\x9d\xfcN'
|
||||
b'\x00\xf4.\xb5\xea\x83')
|
||||
self.assertRaises(ValueError, utils.hash_path, 'a', object='o')
|
||||
utils.HASH_PATH_PREFIX = 'abcdef'
|
||||
utils.HASH_PATH_PREFIX = b'abcdef'
|
||||
self.assertEqual(utils.hash_path('a', 'c', 'o', raw_digest=False),
|
||||
'363f9b535bfb7d17a43a46a358afca0e')
|
||||
|
||||
@ -1985,8 +1985,8 @@ class TestUtils(unittest.TestCase):
|
||||
def _test_validate_hash_conf(self, sections, options, should_raise_error):
|
||||
|
||||
class FakeConfigParser(object):
|
||||
def read(self, conf_path):
|
||||
return True
|
||||
def read(self, conf_path, encoding=None):
|
||||
return [conf_path]
|
||||
|
||||
def get(self, section, option):
|
||||
if section not in sections:
|
||||
@ -1996,8 +1996,8 @@ class TestUtils(unittest.TestCase):
|
||||
else:
|
||||
return 'some_option_value'
|
||||
|
||||
with mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
|
||||
mock.patch('swift.common.utils.HASH_PATH_SUFFIX', ''), \
|
||||
with mock.patch('swift.common.utils.HASH_PATH_PREFIX', b''), \
|
||||
mock.patch('swift.common.utils.HASH_PATH_SUFFIX', b''), \
|
||||
mock.patch('swift.common.utils.ConfigParser',
|
||||
FakeConfigParser):
|
||||
try:
|
||||
@ -2026,7 +2026,7 @@ foo = bar
|
||||
log_name = yarr'''
|
||||
# setup a real file
|
||||
fd, temppath = tempfile.mkstemp()
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
with os.fdopen(fd, 'w') as f:
|
||||
f.write(conf)
|
||||
make_filename = lambda: temppath
|
||||
# setup a file stream
|
||||
@ -2075,7 +2075,7 @@ foo = bar
|
||||
log_name = %(yarr)s'''
|
||||
# setup a real file
|
||||
fd, temppath = tempfile.mkstemp()
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
with os.fdopen(fd, 'w') as f:
|
||||
f.write(conf)
|
||||
make_filename = lambda: temppath
|
||||
# setup a file stream
|
||||
@ -2661,18 +2661,26 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
def test_config_positive_int_value(self):
|
||||
expectations = {
|
||||
# value : expected,
|
||||
'1': 1,
|
||||
u'1': 1,
|
||||
b'1': 1,
|
||||
1: 1,
|
||||
'2': 2,
|
||||
'1024': 1024,
|
||||
'0': ValueError,
|
||||
'-1': ValueError,
|
||||
'0x01': ValueError,
|
||||
'asdf': ValueError,
|
||||
u'2': 2,
|
||||
b'2': 2,
|
||||
u'1024': 1024,
|
||||
b'1024': 1024,
|
||||
u'0': ValueError,
|
||||
b'0': ValueError,
|
||||
u'-1': ValueError,
|
||||
b'-1': ValueError,
|
||||
u'0x01': ValueError,
|
||||
b'0x01': ValueError,
|
||||
u'asdf': ValueError,
|
||||
b'asdf': ValueError,
|
||||
None: ValueError,
|
||||
0: ValueError,
|
||||
-1: ValueError,
|
||||
'1.2': ValueError, # string expresses float should be value error
|
||||
u'1.2': ValueError, # string expresses float should be value error
|
||||
b'1.2': ValueError, # string expresses float should be value error
|
||||
}
|
||||
for value, expected in expectations.items():
|
||||
try:
|
||||
@ -2683,7 +2691,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
else:
|
||||
self.assertEqual(
|
||||
'Config option must be an positive int number, '
|
||||
'not "%s".' % value, e.message)
|
||||
'not "%s".' % value, e.args[0])
|
||||
else:
|
||||
self.assertEqual(expected, rv)
|
||||
|
||||
@ -2940,7 +2948,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
fallocate(0, 1, 0, ctypes.c_uint64(0))
|
||||
self.assertEqual(
|
||||
str(catcher.exception),
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 100.0 <= 100.0'
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 100 <= 100'
|
||||
% errno.ENOSPC)
|
||||
self.assertEqual(catcher.exception.errno, errno.ENOSPC)
|
||||
|
||||
@ -2955,7 +2963,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
fallocate(0, 1, 0, ctypes.c_uint64(101))
|
||||
self.assertEqual(
|
||||
str(catcher.exception),
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 0.99 <= 1.0'
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 0.99 <= 1'
|
||||
% errno.ENOSPC)
|
||||
self.assertEqual(catcher.exception.errno, errno.ENOSPC)
|
||||
|
||||
@ -2969,7 +2977,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
fallocate(0, 1, 0, ctypes.c_uint64(100))
|
||||
self.assertEqual(
|
||||
str(catcher.exception),
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 98.0 <= 98.0'
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 98 <= 98'
|
||||
% errno.ENOSPC)
|
||||
self.assertEqual(catcher.exception.errno, errno.ENOSPC)
|
||||
|
||||
@ -2993,7 +3001,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
fallocate(0, 1, 0, ctypes.c_uint64(1000))
|
||||
self.assertEqual(
|
||||
str(catcher.exception),
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 2.0 <= 2.0'
|
||||
'[Errno %d] FALLOCATE_RESERVE fail 2 <= 2'
|
||||
% errno.ENOSPC)
|
||||
self.assertEqual(catcher.exception.errno, errno.ENOSPC)
|
||||
|
||||
@ -3126,11 +3134,11 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
def test_lock_file(self):
|
||||
flags = os.O_CREAT | os.O_RDWR
|
||||
with NamedTemporaryFile(delete=False) as nt:
|
||||
nt.write("test string")
|
||||
nt.write(b"test string")
|
||||
nt.flush()
|
||||
nt.close()
|
||||
with utils.lock_file(nt.name, unlink=False) as f:
|
||||
self.assertEqual(f.read(), "test string")
|
||||
self.assertEqual(f.read(), b"test string")
|
||||
# we have a lock, now let's try to get a newer one
|
||||
fd = os.open(nt.name, flags)
|
||||
self.assertRaises(IOError, fcntl.flock, fd,
|
||||
@ -3138,12 +3146,12 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
|
||||
with utils.lock_file(nt.name, unlink=False, append=True) as f:
|
||||
f.seek(0)
|
||||
self.assertEqual(f.read(), "test string")
|
||||
self.assertEqual(f.read(), b"test string")
|
||||
f.seek(0)
|
||||
f.write("\nanother string")
|
||||
f.write(b"\nanother string")
|
||||
f.flush()
|
||||
f.seek(0)
|
||||
self.assertEqual(f.read(), "test string\nanother string")
|
||||
self.assertEqual(f.read(), b"test string\nanother string")
|
||||
|
||||
# we have a lock, now let's try to get a newer one
|
||||
fd = os.open(nt.name, flags)
|
||||
@ -3160,7 +3168,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
pass
|
||||
|
||||
with utils.lock_file(nt.name, unlink=True) as f:
|
||||
self.assertEqual(f.read(), "test string\nanother string")
|
||||
self.assertEqual(f.read(), b"test string\nanother string")
|
||||
# we have a lock, now let's try to get a newer one
|
||||
fd = os.open(nt.name, flags)
|
||||
self.assertRaises(
|
||||
@ -3482,22 +3490,28 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
do_test(b'\xf0\x9f\x82\xa1', b'\xf0\x9f\x82\xa1'),
|
||||
do_test(b'\xed\xa0\xbc\xed\xb2\xa1', b'\xf0\x9f\x82\xa1'),
|
||||
|
||||
def test_quote(self):
|
||||
res = utils.quote('/v1/a/c3/subdirx/')
|
||||
assert res == '/v1/a/c3/subdirx/'
|
||||
res = utils.quote('/v1/a&b/c3/subdirx/')
|
||||
assert res == '/v1/a%26b/c3/subdirx/'
|
||||
res = utils.quote('/v1/a&b/c3/subdirx/', safe='&')
|
||||
assert res == '%2Fv1%2Fa&b%2Fc3%2Fsubdirx%2F'
|
||||
unicode_sample = u'\uc77c\uc601'
|
||||
account = 'abc_' + unicode_sample
|
||||
valid_utf8_str = utils.get_valid_utf8_str(account)
|
||||
account = 'abc_' + unicode_sample.encode('utf-8')[::-1]
|
||||
invalid_utf8_str = utils.get_valid_utf8_str(account)
|
||||
self.assertEqual('abc_%EC%9D%BC%EC%98%81',
|
||||
utils.quote(valid_utf8_str))
|
||||
self.assertEqual('abc_%EF%BF%BD%EF%BF%BD%EC%BC%9D%EF%BF%BD',
|
||||
utils.quote(invalid_utf8_str))
|
||||
def test_quote_bytes(self):
|
||||
self.assertEqual(b'/v1/a/c3/subdirx/',
|
||||
utils.quote(b'/v1/a/c3/subdirx/'))
|
||||
self.assertEqual(b'/v1/a%26b/c3/subdirx/',
|
||||
utils.quote(b'/v1/a&b/c3/subdirx/'))
|
||||
self.assertEqual(b'%2Fv1%2Fa&b%2Fc3%2Fsubdirx%2F',
|
||||
utils.quote(b'/v1/a&b/c3/subdirx/', safe='&'))
|
||||
self.assertEqual(b'abc_%EC%9D%BC%EC%98%81',
|
||||
utils.quote(u'abc_\uc77c\uc601'.encode('utf8')))
|
||||
# Invalid utf8 is parsed as latin1, then re-encoded as utf8??
|
||||
self.assertEqual(b'%EF%BF%BD%EF%BF%BD%EC%BC%9D%EF%BF%BD',
|
||||
utils.quote(u'\uc77c\uc601'.encode('utf8')[::-1]))
|
||||
|
||||
def test_quote_unicode(self):
|
||||
self.assertEqual(u'/v1/a/c3/subdirx/',
|
||||
utils.quote(u'/v1/a/c3/subdirx/'))
|
||||
self.assertEqual(u'/v1/a%26b/c3/subdirx/',
|
||||
utils.quote(u'/v1/a&b/c3/subdirx/'))
|
||||
self.assertEqual(u'%2Fv1%2Fa&b%2Fc3%2Fsubdirx%2F',
|
||||
utils.quote(u'/v1/a&b/c3/subdirx/', safe='&'))
|
||||
self.assertEqual(u'abc_%EC%9D%BC%EC%98%81',
|
||||
utils.quote(u'abc_\uc77c\uc601'))
|
||||
|
||||
def test_get_hmac(self):
|
||||
self.assertEqual(
|
||||
@ -3797,7 +3811,7 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
def test_link_fd_to_path_linkat_success(self):
|
||||
tempdir = mkdtemp()
|
||||
fd = os.open(tempdir, utils.O_TMPFILE | os.O_WRONLY)
|
||||
data = "I'm whatever Gotham needs me to be"
|
||||
data = b"I'm whatever Gotham needs me to be"
|
||||
_m_fsync_dir = mock.Mock()
|
||||
try:
|
||||
os.write(fd, data)
|
||||
@ -3805,8 +3819,8 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
self.assertRaises(OSError, os.read, fd, 1)
|
||||
file_path = os.path.join(tempdir, uuid4().hex)
|
||||
with mock.patch('swift.common.utils.fsync_dir', _m_fsync_dir):
|
||||
utils.link_fd_to_path(fd, file_path, 1)
|
||||
with open(file_path, 'r') as f:
|
||||
utils.link_fd_to_path(fd, file_path, 1)
|
||||
with open(file_path, 'rb') as f:
|
||||
self.assertEqual(f.read(), data)
|
||||
self.assertEqual(_m_fsync_dir.call_count, 2)
|
||||
finally:
|
||||
@ -3818,19 +3832,19 @@ cluster_dfw1 = http://dfw1.host/v1/
|
||||
tempdir = mkdtemp()
|
||||
# Create and write to a file
|
||||
fd, path = tempfile.mkstemp(dir=tempdir)
|
||||
os.write(fd, "hello world")
|
||||
os.write(fd, b"hello world")
|
||||
os.fsync(fd)
|
||||
os.close(fd)
|
||||
self.assertTrue(os.path.exists(path))
|
||||
|
||||
fd = os.open(tempdir, utils.O_TMPFILE | os.O_WRONLY)
|
||||
try:
|
||||
os.write(fd, "bye world")
|
||||
os.write(fd, b"bye world")
|
||||
os.fsync(fd)
|
||||
utils.link_fd_to_path(fd, path, 0, fsync=False)
|
||||
# Original file now should have been over-written
|
||||
with open(path, 'r') as f:
|
||||
self.assertEqual(f.read(), "bye world")
|
||||
with open(path, 'rb') as f:
|
||||
self.assertEqual(f.read(), b"bye world")
|
||||
finally:
|
||||
os.close(fd)
|
||||
shutil.rmtree(tempdir)
|
||||
@ -5904,7 +5918,7 @@ class TestParseContentDisposition(unittest.TestCase):
|
||||
class TestIterMultipartMimeDocuments(unittest.TestCase):
|
||||
|
||||
def test_bad_start(self):
|
||||
it = utils.iter_multipart_mime_documents(StringIO('blah'), 'unique')
|
||||
it = utils.iter_multipart_mime_documents(BytesIO(b'blah'), b'unique')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
@ -5914,144 +5928,104 @@ class TestIterMultipartMimeDocuments(unittest.TestCase):
|
||||
self.assertTrue('--unique' in str(exc))
|
||||
|
||||
def test_empty(self):
|
||||
it = utils.iter_multipart_mime_documents(StringIO('--unique'),
|
||||
'unique')
|
||||
it = utils.iter_multipart_mime_documents(BytesIO(b'--unique'),
|
||||
b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), '')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.read(), b'')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_basic(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nabcdefg\r\n--unique--'), 'unique')
|
||||
BytesIO(b'--unique\r\nabcdefg\r\n--unique--'), b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'abcdefg')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.read(), b'abcdefg')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_basic2(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nabcdefg\r\n--unique\r\nhijkl\r\n--unique--'),
|
||||
'unique')
|
||||
BytesIO(b'--unique\r\nabcdefg\r\n--unique\r\nhijkl\r\n--unique--'),
|
||||
b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'abcdefg')
|
||||
self.assertEqual(fp.read(), b'abcdefg')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'hijkl')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.read(), b'hijkl')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_tiny_reads(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nabcdefg\r\n--unique\r\nhijkl\r\n--unique--'),
|
||||
'unique')
|
||||
BytesIO(b'--unique\r\nabcdefg\r\n--unique\r\nhijkl\r\n--unique--'),
|
||||
b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(2), 'ab')
|
||||
self.assertEqual(fp.read(2), 'cd')
|
||||
self.assertEqual(fp.read(2), 'ef')
|
||||
self.assertEqual(fp.read(2), 'g')
|
||||
self.assertEqual(fp.read(2), '')
|
||||
self.assertEqual(fp.read(2), b'ab')
|
||||
self.assertEqual(fp.read(2), b'cd')
|
||||
self.assertEqual(fp.read(2), b'ef')
|
||||
self.assertEqual(fp.read(2), b'g')
|
||||
self.assertEqual(fp.read(2), b'')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'hijkl')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.read(), b'hijkl')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_big_reads(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nabcdefg\r\n--unique\r\nhijkl\r\n--unique--'),
|
||||
'unique')
|
||||
BytesIO(b'--unique\r\nabcdefg\r\n--unique\r\nhijkl\r\n--unique--'),
|
||||
b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(65536), 'abcdefg')
|
||||
self.assertEqual(fp.read(), '')
|
||||
self.assertEqual(fp.read(65536), b'abcdefg')
|
||||
self.assertEqual(fp.read(), b'')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'hijkl')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.read(), b'hijkl')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_leading_crlfs(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('\r\n\r\n\r\n--unique\r\nabcdefg\r\n'
|
||||
'--unique\r\nhijkl\r\n--unique--'),
|
||||
'unique')
|
||||
BytesIO(b'\r\n\r\n\r\n--unique\r\nabcdefg\r\n'
|
||||
b'--unique\r\nhijkl\r\n--unique--'),
|
||||
b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(65536), 'abcdefg')
|
||||
self.assertEqual(fp.read(), '')
|
||||
self.assertEqual(fp.read(65536), b'abcdefg')
|
||||
self.assertEqual(fp.read(), b'')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'hijkl')
|
||||
self.assertRaises(StopIteration, it.next)
|
||||
self.assertEqual(fp.read(), b'hijkl')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_broken_mid_stream(self):
|
||||
# We go ahead and accept whatever is sent instead of rejecting the
|
||||
# whole request, in case the partial form is still useful.
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nabc'), 'unique')
|
||||
BytesIO(b'--unique\r\nabc'), b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.read(), 'abc')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.read(), b'abc')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_readline(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nab\r\ncd\ref\ng\r\n--unique\r\nhi\r\n\r\n'
|
||||
'jkl\r\n\r\n--unique--'), 'unique')
|
||||
BytesIO(b'--unique\r\nab\r\ncd\ref\ng\r\n--unique\r\nhi\r\n\r\n'
|
||||
b'jkl\r\n\r\n--unique--'), b'unique')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.readline(), 'ab\r\n')
|
||||
self.assertEqual(fp.readline(), 'cd\ref\ng')
|
||||
self.assertEqual(fp.readline(), '')
|
||||
self.assertEqual(fp.readline(), b'ab\r\n')
|
||||
self.assertEqual(fp.readline(), b'cd\ref\ng')
|
||||
self.assertEqual(fp.readline(), b'')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.readline(), 'hi\r\n')
|
||||
self.assertEqual(fp.readline(), '\r\n')
|
||||
self.assertEqual(fp.readline(), 'jkl\r\n')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.readline(), b'hi\r\n')
|
||||
self.assertEqual(fp.readline(), b'\r\n')
|
||||
self.assertEqual(fp.readline(), b'jkl\r\n')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
def test_readline_with_tiny_chunks(self):
|
||||
it = utils.iter_multipart_mime_documents(
|
||||
StringIO('--unique\r\nab\r\ncd\ref\ng\r\n--unique\r\nhi\r\n'
|
||||
'\r\njkl\r\n\r\n--unique--'),
|
||||
'unique',
|
||||
BytesIO(b'--unique\r\nab\r\ncd\ref\ng\r\n--unique\r\nhi\r\n'
|
||||
b'\r\njkl\r\n\r\n--unique--'),
|
||||
b'unique',
|
||||
read_chunk_size=2)
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.readline(), 'ab\r\n')
|
||||
self.assertEqual(fp.readline(), 'cd\ref\ng')
|
||||
self.assertEqual(fp.readline(), '')
|
||||
self.assertEqual(fp.readline(), b'ab\r\n')
|
||||
self.assertEqual(fp.readline(), b'cd\ref\ng')
|
||||
self.assertEqual(fp.readline(), b'')
|
||||
fp = next(it)
|
||||
self.assertEqual(fp.readline(), 'hi\r\n')
|
||||
self.assertEqual(fp.readline(), '\r\n')
|
||||
self.assertEqual(fp.readline(), 'jkl\r\n')
|
||||
exc = None
|
||||
try:
|
||||
next(it)
|
||||
except StopIteration as err:
|
||||
exc = err
|
||||
self.assertTrue(exc is not None)
|
||||
self.assertEqual(fp.readline(), b'hi\r\n')
|
||||
self.assertEqual(fp.readline(), b'\r\n')
|
||||
self.assertEqual(fp.readline(), b'jkl\r\n')
|
||||
self.assertRaises(StopIteration, next, it)
|
||||
|
||||
|
||||
class TestParseMimeHeaders(unittest.TestCase):
|
||||
@ -6230,7 +6204,7 @@ class TestHashForFileFunction(unittest.TestCase):
|
||||
pass
|
||||
|
||||
def test_hash_for_file_smallish(self):
|
||||
stub_data = 'some data'
|
||||
stub_data = b'some data'
|
||||
with open(self.tempfilename, 'wb') as fd:
|
||||
fd.write(stub_data)
|
||||
with mock.patch('swift.common.utils.md5') as mock_md5:
|
||||
@ -6246,9 +6220,9 @@ class TestHashForFileFunction(unittest.TestCase):
|
||||
block_size = utils.MD5_BLOCK_READ_BYTES
|
||||
truncate = 523
|
||||
start_char = ord('a')
|
||||
expected_blocks = [chr(i) * block_size
|
||||
expected_blocks = [chr(i).encode('utf8') * block_size
|
||||
for i in range(start_char, start_char + num_blocks)]
|
||||
full_data = ''.join(expected_blocks)
|
||||
full_data = b''.join(expected_blocks)
|
||||
trimmed_data = full_data[:-truncate]
|
||||
# sanity
|
||||
self.assertEqual(len(trimmed_data), block_size * num_blocks - truncate)
|
||||
@ -6272,7 +6246,7 @@ class TestHashForFileFunction(unittest.TestCase):
|
||||
else:
|
||||
self.assertEqual(block, expected_block[:-truncate])
|
||||
found_blocks.append(block)
|
||||
self.assertEqual(''.join(found_blocks), trimmed_data)
|
||||
self.assertEqual(b''.join(found_blocks), trimmed_data)
|
||||
|
||||
def test_hash_for_file_empty(self):
|
||||
with open(self.tempfilename, 'wb'):
|
||||
@ -6281,14 +6255,14 @@ class TestHashForFileFunction(unittest.TestCase):
|
||||
mock_hasher = mock_md5.return_value
|
||||
rv = utils.md5_hash_for_file(self.tempfilename)
|
||||
self.assertTrue(mock_hasher.hexdigest.called)
|
||||
self.assertEqual(rv, mock_hasher.hexdigest.return_value)
|
||||
self.assertIs(rv, mock_hasher.hexdigest.return_value)
|
||||
self.assertEqual([], mock_hasher.update.call_args_list)
|
||||
|
||||
def test_hash_for_file_brittle(self):
|
||||
data_to_expected_hash = {
|
||||
'': 'd41d8cd98f00b204e9800998ecf8427e',
|
||||
'some data': '1e50210a0202497fb79bc38b6ade6c34',
|
||||
('a' * 4096 * 10)[:-523]: '06a41551609656c85f14f659055dc6d3',
|
||||
b'': 'd41d8cd98f00b204e9800998ecf8427e',
|
||||
b'some data': '1e50210a0202497fb79bc38b6ade6c34',
|
||||
(b'a' * 4096 * 10)[:-523]: '06a41551609656c85f14f659055dc6d3',
|
||||
}
|
||||
# unlike some other places where the concrete implementation really
|
||||
# matters for backwards compatibility these brittle tests are probably
|
||||
@ -6316,8 +6290,8 @@ class TestSetSwiftDir(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.swift_dir = tempfile.mkdtemp()
|
||||
self.swift_conf = os.path.join(self.swift_dir, 'swift.conf')
|
||||
self.policy_name = ''.join(random.sample(string.letters, 20))
|
||||
with open(self.swift_conf, "wb") as sc:
|
||||
self.policy_name = ''.join(random.sample(string.ascii_letters, 20))
|
||||
with open(self.swift_conf, "wt") as sc:
|
||||
sc.write('''
|
||||
[swift-hash]
|
||||
swift_hash_path_suffix = changeme
|
||||
|
@ -75,7 +75,7 @@ def setup_servers(the_object_server=object_server, extra_conf=None):
|
||||
"orig_POLICIES": storage_policy._POLICIES,
|
||||
"orig_SysLogHandler": utils.SysLogHandler}
|
||||
|
||||
utils.HASH_PATH_SUFFIX = 'endcap'
|
||||
utils.HASH_PATH_SUFFIX = b'endcap'
|
||||
utils.SysLogHandler = mock.MagicMock()
|
||||
# Since we're starting up a lot here, we're going to test more than
|
||||
# just chunked puts; we're also going to test parts of
|
||||
|
8
tox.ini
8
tox.ini
@ -29,10 +29,16 @@ setenv = VIRTUAL_ENV={envdir}
|
||||
[testenv:py34]
|
||||
commands =
|
||||
nosetests \
|
||||
test/unit/cli/test_ring_builder_analyzer.py \
|
||||
test/unit/cli/test_ringbuilder.py \
|
||||
test/unit/common/ring \
|
||||
test/unit/common/test_daemon.py \
|
||||
test/unit/common/test_exceptions.py \
|
||||
test/unit/common/test_header_key_dict.py \
|
||||
test/unit/common/test_linkat.py \
|
||||
test/unit/common/test_manager.py \
|
||||
test/unit/common/test_splice.py
|
||||
test/unit/common/test_splice.py \
|
||||
test/unit/common/test_utils.py
|
||||
|
||||
[testenv:py35]
|
||||
commands = {[testenv:py34]commands}
|
||||
|
Loading…
Reference in New Issue
Block a user