Rebase to OpenStack Swift v1.10.0.172.g9fe7748
* Updated Proxy unit test * Updated Functional tests * Updated Tox to point to the new swift snapshot available on http://launchpad.net/gluster-swift Change-Id: Ia91593c6a28d5a3fe70715ddc60546931ae71635 Signed-off-by: Luis Pabon <lpabon@redhat.com> Reviewed-on: http://review.gluster.org/6445
This commit is contained in:
parent
702610592f
commit
bc9bdcf25b
@ -1,3 +1,4 @@
|
||||
# Copyright (c) 2010-2012 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -39,16 +40,28 @@ class RequestError(Exception):
|
||||
|
||||
|
||||
class ResponseError(Exception):
|
||||
def __init__(self, response):
|
||||
def __init__(self, response, method, path):
|
||||
self.status = response.status
|
||||
self.reason = response.reason
|
||||
Exception.__init__(self)
|
||||
self.method = method
|
||||
self.path = path
|
||||
self.headers = response.getheaders()
|
||||
|
||||
for name, value in self.headers:
|
||||
if name.lower() == 'x-trans-id':
|
||||
self.txid = value
|
||||
break
|
||||
else:
|
||||
self.txid = None
|
||||
|
||||
super(ResponseError, self).__init__()
|
||||
|
||||
def __str__(self):
|
||||
return '%d: %s' % (self.status, self.reason)
|
||||
return repr(self)
|
||||
|
||||
def __repr__(self):
|
||||
return '%d: %s' % (self.status, self.reason)
|
||||
return '%d: %r (%r %r) txid=%s' % (
|
||||
self.status, self.reason, self.method, self.path, self.txid)
|
||||
|
||||
|
||||
def listing_empty(method):
|
||||
@ -266,10 +279,6 @@ class Connection(object):
|
||||
for (x, y) in parms.items()]
|
||||
path = '%s?%s' % (path, '&'.join(query_args))
|
||||
|
||||
query_args = ['%s=%s' % (urllib.quote(x),
|
||||
urllib.quote(str(y))) for (x, y) in parms.items()]
|
||||
path = '%s?%s' % (path, '&'.join(query_args))
|
||||
|
||||
self.connection = self.conn_class(self.storage_host,
|
||||
port=self.storage_port)
|
||||
#self.connection.set_debuglevel(3)
|
||||
@ -355,7 +364,8 @@ class Account(Base):
|
||||
elif status == 204:
|
||||
return []
|
||||
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'GET',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
def delete_containers(self):
|
||||
for c in listing_items(self.containers):
|
||||
@ -369,7 +379,8 @@ class Account(Base):
|
||||
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
|
||||
parms=parms, cfg=cfg) != 204:
|
||||
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'HEAD',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
fields = [['object_count', 'x-account-object-count'],
|
||||
['container_count', 'x-account-container-count'],
|
||||
@ -457,7 +468,8 @@ class Container(Base):
|
||||
elif status == 204:
|
||||
return []
|
||||
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'GET',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
def info(self, hdrs={}, parms={}, cfg={}):
|
||||
self.conn.make_request('HEAD', self.path, hdrs=hdrs,
|
||||
@ -469,7 +481,8 @@ class Container(Base):
|
||||
|
||||
return self.header_fields(fields)
|
||||
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'HEAD',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
@ -544,7 +557,8 @@ class File(Base):
|
||||
if self.conn.make_request('DELETE', self.path, hdrs=hdrs,
|
||||
parms=parms) != 204:
|
||||
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'DELETE',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
return True
|
||||
|
||||
@ -552,7 +566,8 @@ class File(Base):
|
||||
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
|
||||
parms=parms, cfg=cfg) != 200:
|
||||
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'HEAD',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
fields = [['content_length', 'content-length'],
|
||||
['content_type', 'content-type'],
|
||||
@ -572,7 +587,8 @@ class File(Base):
|
||||
if status == 404:
|
||||
return False
|
||||
elif (status < 200) or (status > 299):
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'HEAD',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
for hdr in self.conn.response.getheaders():
|
||||
if hdr[0].lower() == 'content-type':
|
||||
@ -607,7 +623,7 @@ class File(Base):
|
||||
return data
|
||||
|
||||
def read(self, size=-1, offset=0, hdrs=None, buffer=None,
|
||||
callback=None, cfg={}):
|
||||
callback=None, cfg={}, parms={}):
|
||||
|
||||
if size > 0:
|
||||
range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1)
|
||||
@ -617,10 +633,11 @@ class File(Base):
|
||||
hdrs = {'Range': range_string}
|
||||
|
||||
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
|
||||
cfg=cfg)
|
||||
cfg=cfg, parms=parms)
|
||||
|
||||
if(status < 200) or (status > 299):
|
||||
raise ResponseError(self.conn.response)
|
||||
if (status < 200) or (status > 299):
|
||||
raise ResponseError(self.conn.response, 'GET',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
for hdr in self.conn.response.getheaders():
|
||||
if hdr[0].lower() == 'content-type':
|
||||
@ -643,8 +660,9 @@ class File(Base):
|
||||
def read_md5(self):
|
||||
status = self.conn.make_request('GET', self.path)
|
||||
|
||||
if(status < 200) or (status > 299):
|
||||
raise ResponseError(self.conn.response)
|
||||
if (status < 200) or (status > 299):
|
||||
raise ResponseError(self.conn.response, 'GET',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
checksum = hashlib.md5()
|
||||
|
||||
@ -677,7 +695,8 @@ class File(Base):
|
||||
self.conn.make_request('POST', self.path, hdrs=headers, cfg=cfg)
|
||||
|
||||
if self.conn.response.status not in (201, 202):
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'POST',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
return True
|
||||
|
||||
@ -735,8 +754,13 @@ class File(Base):
|
||||
|
||||
if (self.conn.response.status < 200) or \
|
||||
(self.conn.response.status > 299):
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'PUT',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
try:
|
||||
data.seek(0)
|
||||
except IOError:
|
||||
pass
|
||||
self.md5 = self.compute_md5sum(data)
|
||||
|
||||
return True
|
||||
@ -744,6 +768,7 @@ class File(Base):
|
||||
def write_random(self, size=None, hdrs={}, parms={}, cfg={}):
|
||||
data = self.random_data(size)
|
||||
if not self.write(data, hdrs=hdrs, parms=parms, cfg=cfg):
|
||||
raise ResponseError(self.conn.response)
|
||||
raise ResponseError(self.conn.response, 'PUT',
|
||||
self.conn.make_path(self.path))
|
||||
self.md5 = self.compute_md5sum(StringIO.StringIO(data))
|
||||
return data
|
||||
|
@ -18,6 +18,8 @@
|
||||
|
||||
from datetime import datetime
|
||||
import os
|
||||
import hashlib
|
||||
import json
|
||||
import locale
|
||||
import random
|
||||
import StringIO
|
||||
@ -916,7 +918,7 @@ class TestFile(Base):
|
||||
set_up = False
|
||||
|
||||
def testCopy(self):
|
||||
# makes sure to test encoded characters"
|
||||
# makes sure to test encoded characters
|
||||
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
|
||||
file_item = self.env.container.file(source_filename)
|
||||
|
||||
@ -1570,6 +1572,116 @@ class TestFileUTF8(Base2, TestFile):
|
||||
set_up = False
|
||||
|
||||
|
||||
class TestDloEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
|
||||
if not cls.container.create():
|
||||
raise ResponseError(cls.conn.response)
|
||||
|
||||
# avoid getting a prefix that stops halfway through an encoded
|
||||
# character
|
||||
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
|
||||
cls.segment_prefix = prefix
|
||||
|
||||
for letter in ('a', 'b', 'c', 'd', 'e'):
|
||||
file_item = cls.container.file("%s/seg_lower%s" % (prefix, letter))
|
||||
file_item.write(letter * 10)
|
||||
|
||||
file_item = cls.container.file("%s/seg_upper%s" % (prefix, letter))
|
||||
file_item.write(letter.upper() * 10)
|
||||
|
||||
man1 = cls.container.file("man1")
|
||||
man1.write('man1-contents',
|
||||
hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
|
||||
(cls.container.name, prefix)})
|
||||
|
||||
man1 = cls.container.file("man2")
|
||||
man1.write('man2-contents',
|
||||
hdrs={"X-Object-Manifest": "%s/%s/seg_upper" %
|
||||
(cls.container.name, prefix)})
|
||||
|
||||
manall = cls.container.file("manall")
|
||||
manall.write('manall-contents',
|
||||
hdrs={"X-Object-Manifest": "%s/%s/seg" %
|
||||
(cls.container.name, prefix)})
|
||||
|
||||
|
||||
class TestDlo(Base):
|
||||
env = TestDloEnv
|
||||
set_up = False
|
||||
|
||||
def test_get_manifest(self):
|
||||
file_item = self.env.container.file('man1')
|
||||
file_contents = file_item.read()
|
||||
self.assertEqual(
|
||||
file_contents,
|
||||
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
|
||||
|
||||
file_item = self.env.container.file('man2')
|
||||
file_contents = file_item.read()
|
||||
self.assertEqual(
|
||||
file_contents,
|
||||
"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")
|
||||
|
||||
file_item = self.env.container.file('manall')
|
||||
file_contents = file_item.read()
|
||||
self.assertEqual(
|
||||
file_contents,
|
||||
("aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" +
|
||||
"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE"))
|
||||
|
||||
def test_get_manifest_document_itself(self):
|
||||
file_item = self.env.container.file('man1')
|
||||
file_contents = file_item.read(parms={'multipart-manifest': 'get'})
|
||||
self.assertEqual(file_contents, "man1-contents")
|
||||
|
||||
def test_get_range(self):
|
||||
file_item = self.env.container.file('man1')
|
||||
file_contents = file_item.read(size=25, offset=8)
|
||||
self.assertEqual(file_contents, "aabbbbbbbbbbccccccccccddd")
|
||||
|
||||
file_contents = file_item.read(size=1, offset=47)
|
||||
self.assertEqual(file_contents, "e")
|
||||
|
||||
def test_get_range_out_of_range(self):
|
||||
file_item = self.env.container.file('man1')
|
||||
|
||||
self.assertRaises(ResponseError, file_item.read, size=7, offset=50)
|
||||
self.assert_status(416)
|
||||
|
||||
def test_copy(self):
|
||||
# Adding a new segment, copying the manifest, and then deleting the
|
||||
# segment proves that the new object is really the concatenated
|
||||
# segments and not just a manifest.
|
||||
f_segment = self.env.container.file("%s/seg_lowerf" %
|
||||
(self.env.segment_prefix))
|
||||
f_segment.write('ffffffffff')
|
||||
try:
|
||||
man1_item = self.env.container.file('man1')
|
||||
man1_item.copy(self.env.container.name, "copied-man1")
|
||||
finally:
|
||||
# try not to leave this around for other tests to stumble over
|
||||
f_segment.delete()
|
||||
|
||||
file_item = self.env.container.file('copied-man1')
|
||||
file_contents = file_item.read()
|
||||
self.assertEqual(
|
||||
file_contents,
|
||||
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
|
||||
|
||||
|
||||
class TestDloUTF8(Base2, TestDlo):
|
||||
set_up = False
|
||||
|
||||
|
||||
class TestFileComparisonEnv:
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
@ -1656,5 +1768,228 @@ class TestFileComparison(Base):
|
||||
class TestFileComparisonUTF8(Base2, TestFileComparison):
|
||||
set_up = False
|
||||
|
||||
|
||||
class TestSloEnv(object):
|
||||
slo_enabled = None # tri-state: None initially, then True/False
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
|
||||
if not cls.container.create():
|
||||
raise ResponseError(cls.conn.response)
|
||||
|
||||
# TODO(seriously, anyone can do this): make this use the /info API once
|
||||
# it lands, both for detection of SLO and for minimum segment size
|
||||
if cls.slo_enabled is None:
|
||||
test_file = cls.container.file(".test-slo")
|
||||
try:
|
||||
# If SLO is enabled, this'll raise an error since
|
||||
# X-Static-Large-Object is a reserved header.
|
||||
#
|
||||
# If SLO is not enabled, then this will get the usual 2xx
|
||||
# response.
|
||||
test_file.write(
|
||||
"some contents",
|
||||
hdrs={'X-Static-Large-Object': 'true'})
|
||||
except ResponseError as err:
|
||||
if err.status == 400:
|
||||
cls.slo_enabled = True
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
cls.slo_enabled = False
|
||||
return
|
||||
|
||||
seg_info = {}
|
||||
for letter, size in (('a', 1024 * 1024),
|
||||
('b', 1024 * 1024),
|
||||
('c', 1024 * 1024),
|
||||
('d', 1024 * 1024),
|
||||
('e', 1)):
|
||||
seg_name = "seg_%s" % letter
|
||||
file_item = cls.container.file(seg_name)
|
||||
file_item.write(letter * size)
|
||||
seg_info[seg_name] = {
|
||||
'size_bytes': size,
|
||||
'etag': file_item.md5,
|
||||
'path': '/%s/%s' % (cls.container.name, seg_name)}
|
||||
|
||||
file_item = cls.container.file("manifest-abcde")
|
||||
file_item.write(
|
||||
json.dumps([seg_info['seg_a'], seg_info['seg_b'],
|
||||
seg_info['seg_c'], seg_info['seg_d'],
|
||||
seg_info['seg_e']]),
|
||||
parms={'multipart-manifest': 'put'})
|
||||
|
||||
file_item = cls.container.file('manifest-cd')
|
||||
cd_json = json.dumps([seg_info['seg_c'], seg_info['seg_d']])
|
||||
file_item.write(cd_json, parms={'multipart-manifest': 'put'})
|
||||
cd_etag = hashlib.md5(seg_info['seg_c']['etag'] +
|
||||
seg_info['seg_d']['etag']).hexdigest()
|
||||
|
||||
file_item = cls.container.file("manifest-bcd-submanifest")
|
||||
file_item.write(
|
||||
json.dumps([seg_info['seg_b'],
|
||||
{'etag': cd_etag,
|
||||
'size_bytes': (seg_info['seg_c']['size_bytes'] +
|
||||
seg_info['seg_d']['size_bytes']),
|
||||
'path': '/%s/%s' % (cls.container.name,
|
||||
'manifest-cd')}]),
|
||||
parms={'multipart-manifest': 'put'})
|
||||
bcd_submanifest_etag = hashlib.md5(
|
||||
seg_info['seg_b']['etag'] + cd_etag).hexdigest()
|
||||
|
||||
file_item = cls.container.file("manifest-abcde-submanifest")
|
||||
file_item.write(
|
||||
json.dumps([
|
||||
seg_info['seg_a'],
|
||||
{'etag': bcd_submanifest_etag,
|
||||
'size_bytes': (seg_info['seg_b']['size_bytes'] +
|
||||
seg_info['seg_c']['size_bytes'] +
|
||||
seg_info['seg_d']['size_bytes']),
|
||||
'path': '/%s/%s' % (cls.container.name,
|
||||
'manifest-bcd-submanifest')},
|
||||
seg_info['seg_e']]),
|
||||
parms={'multipart-manifest': 'put'})
|
||||
|
||||
|
||||
class TestSlo(Base):
|
||||
env = TestSloEnv
|
||||
set_up = False
|
||||
|
||||
def setUp(self):
|
||||
super(TestSlo, self).setUp()
|
||||
if self.env.slo_enabled is False:
|
||||
raise SkipTest("SLO not enabled")
|
||||
elif self.env.slo_enabled is not True:
|
||||
# just some sanity checking
|
||||
raise Exception(
|
||||
"Expected slo_enabled to be True/False, got %r" %
|
||||
(self.env.slo_enabled,))
|
||||
|
||||
def test_slo_get_simple_manifest(self):
|
||||
file_item = self.env.container.file('manifest-abcde')
|
||||
file_contents = file_item.read()
|
||||
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
|
||||
self.assertEqual('a', file_contents[0])
|
||||
self.assertEqual('a', file_contents[1024 * 1024 - 1])
|
||||
self.assertEqual('b', file_contents[1024 * 1024])
|
||||
self.assertEqual('d', file_contents[-2])
|
||||
self.assertEqual('e', file_contents[-1])
|
||||
|
||||
def test_slo_get_nested_manifest(self):
|
||||
file_item = self.env.container.file('manifest-abcde-submanifest')
|
||||
file_contents = file_item.read()
|
||||
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
|
||||
self.assertEqual('a', file_contents[0])
|
||||
self.assertEqual('a', file_contents[1024 * 1024 - 1])
|
||||
self.assertEqual('b', file_contents[1024 * 1024])
|
||||
self.assertEqual('d', file_contents[-2])
|
||||
self.assertEqual('e', file_contents[-1])
|
||||
|
||||
def test_slo_ranged_get(self):
|
||||
file_item = self.env.container.file('manifest-abcde')
|
||||
file_contents = file_item.read(size=1024 * 1024 + 2,
|
||||
offset=1024 * 1024 - 1)
|
||||
self.assertEqual('a', file_contents[0])
|
||||
self.assertEqual('b', file_contents[1])
|
||||
self.assertEqual('b', file_contents[-2])
|
||||
self.assertEqual('c', file_contents[-1])
|
||||
|
||||
def test_slo_ranged_submanifest(self):
|
||||
file_item = self.env.container.file('manifest-abcde-submanifest')
|
||||
file_contents = file_item.read(size=1024 * 1024 + 2,
|
||||
offset=1024 * 1024 * 2 - 1)
|
||||
self.assertEqual('b', file_contents[0])
|
||||
self.assertEqual('c', file_contents[1])
|
||||
self.assertEqual('c', file_contents[-2])
|
||||
self.assertEqual('d', file_contents[-1])
|
||||
|
||||
def test_slo_etag_is_hash_of_etags(self):
|
||||
expected_hash = hashlib.md5()
|
||||
expected_hash.update(hashlib.md5('a' * 1024 * 1024).hexdigest())
|
||||
expected_hash.update(hashlib.md5('b' * 1024 * 1024).hexdigest())
|
||||
expected_hash.update(hashlib.md5('c' * 1024 * 1024).hexdigest())
|
||||
expected_hash.update(hashlib.md5('d' * 1024 * 1024).hexdigest())
|
||||
expected_hash.update(hashlib.md5('e').hexdigest())
|
||||
expected_etag = expected_hash.hexdigest()
|
||||
|
||||
file_item = self.env.container.file('manifest-abcde')
|
||||
self.assertEqual(expected_etag, file_item.info()['etag'])
|
||||
|
||||
def test_slo_etag_is_hash_of_etags_submanifests(self):
|
||||
|
||||
def hd(x):
|
||||
return hashlib.md5(x).hexdigest()
|
||||
|
||||
expected_etag = hd(hd('a' * 1024 * 1024) +
|
||||
hd(hd('b' * 1024 * 1024) +
|
||||
hd(hd('c' * 1024 * 1024) +
|
||||
hd('d' * 1024 * 1024))) +
|
||||
hd('e'))
|
||||
|
||||
file_item = self.env.container.file('manifest-abcde-submanifest')
|
||||
self.assertEqual(expected_etag, file_item.info()['etag'])
|
||||
|
||||
def test_slo_etag_mismatch(self):
|
||||
file_item = self.env.container.file("manifest-a-bad-etag")
|
||||
try:
|
||||
file_item.write(
|
||||
json.dumps([{
|
||||
'size_bytes': 1024 * 1024,
|
||||
'etag': 'not it',
|
||||
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
|
||||
parms={'multipart-manifest': 'put'})
|
||||
except ResponseError as err:
|
||||
self.assertEqual(400, err.status)
|
||||
else:
|
||||
self.fail("Expected ResponseError but didn't get it")
|
||||
|
||||
def test_slo_size_mismatch(self):
|
||||
file_item = self.env.container.file("manifest-a-bad-size")
|
||||
try:
|
||||
file_item.write(
|
||||
json.dumps([{
|
||||
'size_bytes': 1024 * 1024 - 1,
|
||||
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
|
||||
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
|
||||
parms={'multipart-manifest': 'put'})
|
||||
except ResponseError as err:
|
||||
self.assertEqual(400, err.status)
|
||||
else:
|
||||
self.fail("Expected ResponseError but didn't get it")
|
||||
|
||||
def test_slo_copy(self):
|
||||
file_item = self.env.container.file("manifest-abcde")
|
||||
file_item.copy(self.env.container.name, "copied-abcde")
|
||||
|
||||
copied = self.env.container.file("copied-abcde")
|
||||
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
|
||||
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
|
||||
|
||||
def test_slo_copy_the_manifest(self):
|
||||
file_item = self.env.container.file("manifest-abcde")
|
||||
file_item.copy(self.env.container.name, "copied-abcde",
|
||||
parms={'multipart-manifest': 'get'})
|
||||
|
||||
copied = self.env.container.file("copied-abcde")
|
||||
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
|
||||
try:
|
||||
json.loads(copied_contents)
|
||||
except ValueError:
|
||||
self.fail("COPY didn't copy the manifest (invalid json on GET)")
|
||||
|
||||
|
||||
class TestSloUTF8(Base2, TestSlo):
|
||||
set_up = False
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -35,13 +35,6 @@ import logging.handlers
|
||||
from httplib import HTTPException
|
||||
|
||||
|
||||
class DebugLogger(object):
|
||||
"""A simple stdout logger for eventlet wsgi."""
|
||||
|
||||
def write(self, *args):
|
||||
print args
|
||||
|
||||
|
||||
class FakeRing(object):
|
||||
|
||||
def __init__(self, replicas=3, max_more_nodes=0):
|
||||
@ -254,6 +247,7 @@ class FakeLogger(logging.Logger):
|
||||
self.level = logging.NOTSET
|
||||
if 'facility' in kwargs:
|
||||
self.facility = kwargs['facility']
|
||||
self.statsd_client = None
|
||||
|
||||
def _clear(self):
|
||||
self.log_dict = defaultdict(list)
|
||||
@ -276,10 +270,12 @@ class FakeLogger(logging.Logger):
|
||||
error = _store_and_log_in('error')
|
||||
info = _store_and_log_in('info')
|
||||
warning = _store_and_log_in('warning')
|
||||
warn = _store_and_log_in('warning')
|
||||
debug = _store_and_log_in('debug')
|
||||
|
||||
def exception(self, *args, **kwargs):
|
||||
self.log_dict['exception'].append((args, kwargs, str(exc_info()[1])))
|
||||
self.log_dict['exception'].append((args, kwargs,
|
||||
str(sys.exc_info()[1])))
|
||||
print 'FakeLogger Exception: %s' % self.log_dict
|
||||
|
||||
# mock out the StatsD logging methods:
|
||||
@ -323,7 +319,7 @@ class FakeLogger(logging.Logger):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
def handle(self, record):
|
||||
def _handle(self, record):
|
||||
try:
|
||||
line = record.getMessage()
|
||||
except TypeError:
|
||||
@ -332,6 +328,9 @@ class FakeLogger(logging.Logger):
|
||||
raise
|
||||
self.lines_dict[record.levelno].append(line)
|
||||
|
||||
def handle(self, record):
|
||||
self._handle(record)
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
@ -339,6 +338,26 @@ class FakeLogger(logging.Logger):
|
||||
pass
|
||||
|
||||
|
||||
class DebugLogger(FakeLogger):
|
||||
"""A simple stdout logging version of FakeLogger"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
FakeLogger.__init__(self, *args, **kwargs)
|
||||
self.formatter = logging.Formatter("%(server)s: %(message)s")
|
||||
|
||||
def handle(self, record):
|
||||
self._handle(record)
|
||||
print self.formatter.format(record)
|
||||
|
||||
def write(self, *args):
|
||||
print args
|
||||
|
||||
|
||||
def debug_logger(name='test'):
|
||||
"""get a named adapted debug logger"""
|
||||
return LogAdapter(DebugLogger(), name)
|
||||
|
||||
|
||||
original_syslog_handler = logging.handlers.SysLogHandler
|
||||
|
||||
|
||||
@ -446,6 +465,8 @@ def fake_http_connect(*code_iter, **kwargs):
|
||||
self.body = body
|
||||
self.headers = headers or {}
|
||||
self.timestamp = timestamp
|
||||
if kwargs.get('slow') and isinstance(kwargs['slow'], list):
|
||||
kwargs['slow'][0] -= 1
|
||||
|
||||
def getresponse(self):
|
||||
if kwargs.get('raise_exc'):
|
||||
@ -489,13 +510,18 @@ def fake_http_connect(*code_iter, **kwargs):
|
||||
headers['x-container-timestamp'] = '1'
|
||||
except StopIteration:
|
||||
pass
|
||||
if 'slow' in kwargs:
|
||||
if self.am_slow():
|
||||
headers['content-length'] = '4'
|
||||
headers.update(self.headers)
|
||||
return headers.items()
|
||||
|
||||
def am_slow(self):
|
||||
if kwargs.get('slow') and isinstance(kwargs['slow'], list):
|
||||
return kwargs['slow'][0] >= 0
|
||||
return bool(kwargs.get('slow'))
|
||||
|
||||
def read(self, amt=None):
|
||||
if 'slow' in kwargs:
|
||||
if self.am_slow():
|
||||
if self.sent < 4:
|
||||
self.sent += 1
|
||||
sleep(0.1)
|
||||
@ -505,7 +531,7 @@ def fake_http_connect(*code_iter, **kwargs):
|
||||
return rv
|
||||
|
||||
def send(self, amt=None):
|
||||
if 'slow' in kwargs:
|
||||
if self.am_slow():
|
||||
if self.received < 4:
|
||||
self.received += 1
|
||||
sleep(0.1)
|
||||
|
@ -50,7 +50,7 @@ from swift.common.constraints import MAX_META_NAME_LENGTH, \
|
||||
MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
|
||||
MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH
|
||||
from swift.common import utils
|
||||
from swift.common.utils import mkdirs, normalize_timestamp
|
||||
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
|
||||
from swift.common.wsgi import monkey_patch_mimetools
|
||||
from swift.proxy.controllers.obj import SegmentedIterable
|
||||
from swift.proxy.controllers.base import get_container_memcache_key, \
|
||||
@ -66,7 +66,7 @@ logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
|
||||
STATIC_TIME = time.time()
|
||||
_request_instances = weakref.WeakKeyDictionary()
|
||||
_test_coros = _test_servers = _test_sockets = _orig_container_listing_limit = \
|
||||
_testdir = None
|
||||
_testdir = _orig_SysLogHandler = None
|
||||
|
||||
|
||||
def request_init(self, *args, **kwargs):
|
||||
@ -78,7 +78,9 @@ def request_init(self, *args, **kwargs):
|
||||
def do_setup(the_object_server):
|
||||
utils.HASH_PATH_SUFFIX = 'endcap'
|
||||
global _testdir, _test_servers, _test_sockets, \
|
||||
_orig_container_listing_limit, _test_coros
|
||||
_orig_container_listing_limit, _test_coros, _orig_SysLogHandler
|
||||
_orig_SysLogHandler = utils.SysLogHandler
|
||||
utils.SysLogHandler = mock.MagicMock()
|
||||
Request._orig_init = Request.__init__
|
||||
Request.__init__ = request_init
|
||||
monkey_patch_mimetools()
|
||||
@ -201,6 +203,7 @@ def teardown():
|
||||
_orig_container_listing_limit
|
||||
rmtree(os.path.dirname(_testdir))
|
||||
Request.__init__ = Request._orig_init
|
||||
utils.SysLogHandler = _orig_SysLogHandler
|
||||
|
||||
|
||||
def sortHeaderNames(headerNames):
|
||||
@ -656,6 +659,34 @@ class TestProxyServer(unittest.TestCase):
|
||||
{'region': 2, 'zone': 1, 'ip': '127.0.0.1'}]
|
||||
self.assertEquals(exp_sorted, app_sorted)
|
||||
|
||||
def test_info_defaults(self):
|
||||
app = proxy_server.Application({}, FakeMemcache(),
|
||||
account_ring=FakeRing(),
|
||||
container_ring=FakeRing(),
|
||||
object_ring=FakeRing())
|
||||
|
||||
self.assertTrue(app.expose_info)
|
||||
self.assertTrue(isinstance(app.disallowed_sections, list))
|
||||
self.assertEqual(0, len(app.disallowed_sections))
|
||||
self.assertTrue(app.admin_key is None)
|
||||
|
||||
def test_get_info_controller(self):
|
||||
path = '/info'
|
||||
app = proxy_server.Application({}, FakeMemcache(),
|
||||
account_ring=FakeRing(),
|
||||
container_ring=FakeRing(),
|
||||
object_ring=FakeRing())
|
||||
|
||||
controller, path_parts = app.get_controller(path)
|
||||
|
||||
self.assertTrue('version' in path_parts)
|
||||
self.assertTrue(path_parts['version'] is None)
|
||||
self.assertTrue('disallowed_sections' in path_parts)
|
||||
self.assertTrue('expose_info' in path_parts)
|
||||
self.assertTrue('admin_key' in path_parts)
|
||||
|
||||
self.assertEqual(controller.__name__, 'InfoController')
|
||||
|
||||
|
||||
class TestObjectController(unittest.TestCase):
|
||||
|
||||
@ -825,7 +856,7 @@ class TestObjectController(unittest.TestCase):
|
||||
|
||||
controller = \
|
||||
proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg')
|
||||
controller.error_limit(
|
||||
self.app.error_limit(
|
||||
self.app.object_ring.get_part_nodes(1)[0], 'test')
|
||||
set_http_connect(200, 200, # account, container
|
||||
201, 201, 201, # 3 working backends
|
||||
@ -2282,6 +2313,73 @@ class TestObjectController(unittest.TestCase):
|
||||
got_exc = True
|
||||
self.assert_(got_exc)
|
||||
|
||||
def test_node_read_timeout_retry(self):
|
||||
with save_globals():
|
||||
self.app.account_ring.get_nodes('account')
|
||||
for dev in self.app.account_ring.devs.values():
|
||||
dev['ip'] = '127.0.0.1'
|
||||
dev['port'] = 1
|
||||
self.app.container_ring.get_nodes('account')
|
||||
for dev in self.app.container_ring.devs.values():
|
||||
dev['ip'] = '127.0.0.1'
|
||||
dev['port'] = 1
|
||||
self.app.object_ring.get_nodes('account')
|
||||
for dev in self.app.object_ring.devs.values():
|
||||
dev['ip'] = '127.0.0.1'
|
||||
dev['port'] = 1
|
||||
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
|
||||
self.app.update_request(req)
|
||||
|
||||
self.app.node_timeout = 0.1
|
||||
set_http_connect(200, 200, 200, slow=[3])
|
||||
resp = req.get_response(self.app)
|
||||
got_exc = False
|
||||
try:
|
||||
resp.body
|
||||
except ChunkReadTimeout:
|
||||
got_exc = True
|
||||
self.assert_(got_exc)
|
||||
|
||||
set_http_connect(200, 200, 200, body='lalala', slow=[2])
|
||||
resp = req.get_response(self.app)
|
||||
got_exc = False
|
||||
try:
|
||||
self.assertEquals(resp.body, 'lalala')
|
||||
except ChunkReadTimeout:
|
||||
got_exc = True
|
||||
self.assert_(not got_exc)
|
||||
|
||||
set_http_connect(200, 200, 200, body='lalala', slow=[2],
|
||||
etags=['a', 'a', 'a'])
|
||||
resp = req.get_response(self.app)
|
||||
got_exc = False
|
||||
try:
|
||||
self.assertEquals(resp.body, 'lalala')
|
||||
except ChunkReadTimeout:
|
||||
got_exc = True
|
||||
self.assert_(not got_exc)
|
||||
|
||||
set_http_connect(200, 200, 200, body='lalala', slow=[2],
|
||||
etags=['a', 'b', 'a'])
|
||||
resp = req.get_response(self.app)
|
||||
got_exc = False
|
||||
try:
|
||||
self.assertEquals(resp.body, 'lalala')
|
||||
except ChunkReadTimeout:
|
||||
got_exc = True
|
||||
self.assert_(not got_exc)
|
||||
|
||||
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
|
||||
set_http_connect(200, 200, 200, body='lalala', slow=[2],
|
||||
etags=['a', 'b', 'b'])
|
||||
resp = req.get_response(self.app)
|
||||
got_exc = False
|
||||
try:
|
||||
resp.body
|
||||
except ChunkReadTimeout:
|
||||
got_exc = True
|
||||
self.assert_(got_exc)
|
||||
|
||||
def test_node_write_timeout(self):
|
||||
with save_globals():
|
||||
self.app.account_ring.get_nodes('account')
|
||||
@ -2320,44 +2418,35 @@ class TestObjectController(unittest.TestCase):
|
||||
with save_globals():
|
||||
try:
|
||||
self.app.object_ring.max_more_nodes = 2
|
||||
controller = proxy_server.ObjectController(self.app, 'account',
|
||||
'container',
|
||||
'object')
|
||||
partition, nodes = self.app.object_ring.get_nodes('account',
|
||||
'container',
|
||||
'object')
|
||||
collected_nodes = []
|
||||
for node in controller.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
for node in self.app.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
collected_nodes.append(node)
|
||||
self.assertEquals(len(collected_nodes), 5)
|
||||
|
||||
self.app.object_ring.max_more_nodes = 20
|
||||
self.app.request_node_count = lambda r: 20
|
||||
controller = proxy_server.ObjectController(self.app, 'account',
|
||||
'container',
|
||||
'object')
|
||||
partition, nodes = self.app.object_ring.get_nodes('account',
|
||||
'container',
|
||||
'object')
|
||||
collected_nodes = []
|
||||
for node in controller.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
for node in self.app.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
collected_nodes.append(node)
|
||||
self.assertEquals(len(collected_nodes), 9)
|
||||
|
||||
self.app.log_handoffs = True
|
||||
self.app.logger = FakeLogger()
|
||||
self.app.object_ring.max_more_nodes = 2
|
||||
controller = proxy_server.ObjectController(self.app, 'account',
|
||||
'container',
|
||||
'object')
|
||||
partition, nodes = self.app.object_ring.get_nodes('account',
|
||||
'container',
|
||||
'object')
|
||||
collected_nodes = []
|
||||
for node in controller.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
for node in self.app.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
collected_nodes.append(node)
|
||||
self.assertEquals(len(collected_nodes), 5)
|
||||
self.assertEquals(
|
||||
@ -2368,15 +2457,12 @@ class TestObjectController(unittest.TestCase):
|
||||
self.app.log_handoffs = False
|
||||
self.app.logger = FakeLogger()
|
||||
self.app.object_ring.max_more_nodes = 2
|
||||
controller = proxy_server.ObjectController(self.app, 'account',
|
||||
'container',
|
||||
'object')
|
||||
partition, nodes = self.app.object_ring.get_nodes('account',
|
||||
'container',
|
||||
'object')
|
||||
collected_nodes = []
|
||||
for node in controller.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
for node in self.app.iter_nodes(self.app.object_ring,
|
||||
partition):
|
||||
collected_nodes.append(node)
|
||||
self.assertEquals(len(collected_nodes), 5)
|
||||
self.assertEquals(self.app.logger.log_dict['warning'], [])
|
||||
@ -2385,21 +2471,19 @@ class TestObjectController(unittest.TestCase):
|
||||
|
||||
def test_iter_nodes_calls_sort_nodes(self):
|
||||
with mock.patch.object(self.app, 'sort_nodes') as sort_nodes:
|
||||
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
||||
for node in controller.iter_nodes(self.app.object_ring, 0):
|
||||
for node in self.app.iter_nodes(self.app.object_ring, 0):
|
||||
pass
|
||||
sort_nodes.assert_called_once_with(
|
||||
self.app.object_ring.get_part_nodes(0))
|
||||
|
||||
def test_iter_nodes_skips_error_limited(self):
|
||||
with mock.patch.object(self.app, 'sort_nodes', lambda n: n):
|
||||
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
||||
first_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
||||
second_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
||||
first_nodes = list(self.app.iter_nodes(self.app.object_ring, 0))
|
||||
second_nodes = list(self.app.iter_nodes(self.app.object_ring, 0))
|
||||
self.assertTrue(first_nodes[0] in second_nodes)
|
||||
|
||||
controller.error_limit(first_nodes[0], 'test')
|
||||
second_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
||||
self.app.error_limit(first_nodes[0], 'test')
|
||||
second_nodes = list(self.app.iter_nodes(self.app.object_ring, 0))
|
||||
self.assertTrue(first_nodes[0] not in second_nodes)
|
||||
|
||||
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
|
||||
@ -2408,33 +2492,31 @@ class TestObjectController(unittest.TestCase):
|
||||
mock.patch.object(self.app, 'request_node_count',
|
||||
lambda r: 6),
|
||||
mock.patch.object(self.app.object_ring, 'max_more_nodes', 99)):
|
||||
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
||||
first_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
||||
first_nodes = list(self.app.iter_nodes(self.app.object_ring, 0))
|
||||
second_nodes = []
|
||||
for node in controller.iter_nodes(self.app.object_ring, 0):
|
||||
for node in self.app.iter_nodes(self.app.object_ring, 0):
|
||||
if not second_nodes:
|
||||
controller.error_limit(node, 'test')
|
||||
self.app.error_limit(node, 'test')
|
||||
second_nodes.append(node)
|
||||
self.assertEquals(len(first_nodes), 6)
|
||||
self.assertEquals(len(second_nodes), 7)
|
||||
|
||||
def test_iter_nodes_with_custom_node_iter(self):
|
||||
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
||||
node_list = [dict(id=n) for n in xrange(10)]
|
||||
with nested(
|
||||
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
|
||||
mock.patch.object(self.app, 'request_node_count',
|
||||
lambda r: 3)):
|
||||
got_nodes = list(controller.iter_nodes(self.app.object_ring, 0,
|
||||
node_iter=iter(node_list)))
|
||||
got_nodes = list(self.app.iter_nodes(self.app.object_ring, 0,
|
||||
node_iter=iter(node_list)))
|
||||
self.assertEqual(node_list[:3], got_nodes)
|
||||
|
||||
with nested(
|
||||
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
|
||||
mock.patch.object(self.app, 'request_node_count',
|
||||
lambda r: 1000000)):
|
||||
got_nodes = list(controller.iter_nodes(self.app.object_ring, 0,
|
||||
node_iter=iter(node_list)))
|
||||
got_nodes = list(self.app.iter_nodes(self.app.object_ring, 0,
|
||||
node_iter=iter(node_list)))
|
||||
self.assertEqual(node_list, got_nodes)
|
||||
|
||||
def test_best_response_sets_headers(self):
|
||||
@ -4609,6 +4691,7 @@ class TestObjectController(unittest.TestCase):
|
||||
fd.read(1)
|
||||
fd.close()
|
||||
sock.close()
|
||||
sleep(0) # let eventlet do it's thing
|
||||
# Make sure the GC is run again for pythons without reference counting
|
||||
for i in xrange(4):
|
||||
gc.collect()
|
||||
@ -6743,6 +6826,65 @@ class TestSegmentedIterable(unittest.TestCase):
|
||||
self.assertEquals(''.join(segit.app_iter_range(5, 7)), '34')
|
||||
|
||||
|
||||
class TestProxyObjectPerformance(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# This is just a simple test that can be used to verify and debug the
|
||||
# various data paths between the proxy server and the object
|
||||
# server. Used as a play ground to debug buffer sizes for sockets.
|
||||
prolis = _test_sockets[0]
|
||||
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||
# Client is transmitting in 2 MB chunks
|
||||
fd = sock.makefile('wb', 2 * 1024 * 1024)
|
||||
# Small, fast for testing
|
||||
obj_len = 2 * 64 * 1024
|
||||
# Use 1 GB or more for measurements
|
||||
#obj_len = 2 * 512 * 1024 * 1024
|
||||
self.path = '/v1/a/c/o.large'
|
||||
fd.write('PUT %s HTTP/1.1\r\n'
|
||||
'Host: localhost\r\n'
|
||||
'Connection: close\r\n'
|
||||
'X-Storage-Token: t\r\n'
|
||||
'Content-Length: %s\r\n'
|
||||
'Content-Type: application/octet-stream\r\n'
|
||||
'\r\n' % (self.path, str(obj_len)))
|
||||
fd.write('a' * obj_len)
|
||||
fd.flush()
|
||||
headers = readuntil2crlfs(fd)
|
||||
exp = 'HTTP/1.1 201'
|
||||
self.assertEqual(headers[:len(exp)], exp)
|
||||
self.obj_len = obj_len
|
||||
|
||||
def test_GET_debug_large_file(self):
|
||||
for i in range(0, 10):
|
||||
start = time.time()
|
||||
|
||||
prolis = _test_sockets[0]
|
||||
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||
# Client is reading in 2 MB chunks
|
||||
fd = sock.makefile('wb', 2 * 1024 * 1024)
|
||||
fd.write('GET %s HTTP/1.1\r\n'
|
||||
'Host: localhost\r\n'
|
||||
'Connection: close\r\n'
|
||||
'X-Storage-Token: t\r\n'
|
||||
'\r\n' % self.path)
|
||||
fd.flush()
|
||||
headers = readuntil2crlfs(fd)
|
||||
exp = 'HTTP/1.1 200'
|
||||
self.assertEqual(headers[:len(exp)], exp)
|
||||
|
||||
total = 0
|
||||
while True:
|
||||
buf = fd.read(100000)
|
||||
if not buf:
|
||||
break
|
||||
total += len(buf)
|
||||
self.assertEqual(total, self.obj_len)
|
||||
|
||||
end = time.time()
|
||||
print "Run %02d took %07.03f" % (i, end - start)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup()
|
||||
try:
|
||||
|
2
tox.ini
2
tox.ini
@ -11,7 +11,7 @@ setenv = VIRTUAL_ENV={envdir}
|
||||
NOSE_OPENSTACK_SHOW_ELAPSED=1
|
||||
NOSE_OPENSTACK_STDOUT=1
|
||||
deps =
|
||||
https://launchpad.net/gluster-swift/icehouse/1.10.1/+download/swift-1.10.0.33.g4bfe674.tar.gz
|
||||
https://launchpad.net/gluster-swift/icehouse/1.10.2/+download/swift-1.10.0.172.g9fe7748.tar.gz
|
||||
--download-cache={homedir}/.pipcache
|
||||
-r{toxinidir}/tools/test-requires
|
||||
changedir = {toxinidir}/test/unit
|
||||
|
Loading…
Reference in New Issue
Block a user