py3: bulk -- alternate approach
Related-Change: I3603247e0c36299de2107aa8e494a3f87647696f Change-Id: Iaf14c1ab3e236e6774492ed711fccef066a16fca
This commit is contained in:
parent
fd73353bb1
commit
5cd2a2ac2f
@ -191,7 +191,7 @@ payload sent to the proxy (the list of objects/containers to be deleted).
|
||||
"""
|
||||
|
||||
import json
|
||||
from six.moves.urllib.parse import quote, unquote
|
||||
import six
|
||||
import tarfile
|
||||
from xml.sax import saxutils
|
||||
from time import time
|
||||
@ -200,7 +200,8 @@ import zlib
|
||||
from swift.common.swob import Request, HTTPBadGateway, \
|
||||
HTTPCreated, HTTPBadRequest, HTTPNotFound, HTTPUnauthorized, HTTPOk, \
|
||||
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPNotAcceptable, \
|
||||
HTTPLengthRequired, HTTPException, HTTPServerError, wsgify
|
||||
HTTPLengthRequired, HTTPException, HTTPServerError, wsgify, \
|
||||
bytes_to_wsgi, str_to_wsgi, wsgi_unquote, wsgi_quote, wsgi_to_str
|
||||
from swift.common.utils import get_logger, register_swift_info, \
|
||||
StreamingPile
|
||||
from swift.common import constraints
|
||||
@ -234,7 +235,7 @@ def get_response_body(data_format, data_dict, error_list, root_tag):
|
||||
"""
|
||||
if data_format == 'application/json':
|
||||
data_dict['Errors'] = error_list
|
||||
return json.dumps(data_dict)
|
||||
return json.dumps(data_dict).encode('ascii')
|
||||
if data_format and data_format.endswith('/xml'):
|
||||
output = ['<', root_tag, '>\n']
|
||||
for key in sorted(data_dict):
|
||||
@ -251,7 +252,9 @@ def get_response_body(data_format, data_dict, error_list, root_tag):
|
||||
saxutils.escape(status), '</status></object>\n',
|
||||
])
|
||||
output.extend(['</errors>\n</', root_tag, '>\n'])
|
||||
return ''.join(output)
|
||||
if six.PY2:
|
||||
return ''.join(output)
|
||||
return ''.join(output).encode('utf-8')
|
||||
|
||||
output = []
|
||||
for key in sorted(data_dict):
|
||||
@ -260,7 +263,9 @@ def get_response_body(data_format, data_dict, error_list, root_tag):
|
||||
output.extend(
|
||||
'%s, %s\n' % (name, status)
|
||||
for name, status in error_list)
|
||||
return ''.join(output)
|
||||
if six.PY2:
|
||||
return ''.join(output)
|
||||
return ''.join(output).encode('utf-8')
|
||||
|
||||
|
||||
def pax_key_to_swift_header(pax_key):
|
||||
@ -269,10 +274,14 @@ def pax_key_to_swift_header(pax_key):
|
||||
return "Content-Type"
|
||||
elif pax_key.startswith(u"SCHILY.xattr.user.meta."):
|
||||
useful_part = pax_key[len(u"SCHILY.xattr.user.meta."):]
|
||||
return "X-Object-Meta-" + useful_part.encode("utf-8")
|
||||
if six.PY2:
|
||||
return "X-Object-Meta-" + useful_part.encode("utf-8")
|
||||
return str_to_wsgi("X-Object-Meta-" + useful_part)
|
||||
elif pax_key.startswith(u"LIBARCHIVE.xattr.user.meta."):
|
||||
useful_part = pax_key[len(u"LIBARCHIVE.xattr.user.meta."):]
|
||||
return "X-Object-Meta-" + useful_part.encode("utf-8")
|
||||
if six.PY2:
|
||||
return "X-Object-Meta-" + useful_part.encode("utf-8")
|
||||
return str_to_wsgi("X-Object-Meta-" + useful_part)
|
||||
else:
|
||||
# You can get things like atime/mtime/ctime or filesystem ACLs in
|
||||
# pax headers; those aren't really user metadata. The same goes for
|
||||
@ -308,7 +317,7 @@ class Bulk(object):
|
||||
:raises CreateContainerError: when unable to create container
|
||||
"""
|
||||
head_cont_req = make_subrequest(
|
||||
req.environ, method='HEAD', path=quote(container_path),
|
||||
req.environ, method='HEAD', path=wsgi_quote(container_path),
|
||||
headers={'X-Auth-Token': req.headers.get('X-Auth-Token')},
|
||||
swift_source='EA')
|
||||
resp = head_cont_req.get_response(self.app)
|
||||
@ -316,7 +325,7 @@ class Bulk(object):
|
||||
return False
|
||||
if resp.status_int == HTTP_NOT_FOUND:
|
||||
create_cont_req = make_subrequest(
|
||||
req.environ, method='PUT', path=quote(container_path),
|
||||
req.environ, method='PUT', path=wsgi_quote(container_path),
|
||||
headers={'X-Auth-Token': req.headers.get('X-Auth-Token')},
|
||||
swift_source='EA')
|
||||
resp = create_cont_req.get_response(self.app)
|
||||
@ -333,7 +342,7 @@ class Bulk(object):
|
||||
:returns: a list of the contents of req.body when separated by newline.
|
||||
:raises HTTPException: on failures
|
||||
"""
|
||||
line = ''
|
||||
line = b''
|
||||
data_remaining = True
|
||||
objs_to_delete = []
|
||||
if req.content_length is None and \
|
||||
@ -341,21 +350,31 @@ class Bulk(object):
|
||||
raise HTTPLengthRequired(request=req)
|
||||
|
||||
while data_remaining:
|
||||
if '\n' in line:
|
||||
obj_to_delete, line = line.split('\n', 1)
|
||||
obj_to_delete = obj_to_delete.strip()
|
||||
objs_to_delete.append(
|
||||
{'name': unquote(obj_to_delete)})
|
||||
if b'\n' in line:
|
||||
obj_to_delete, line = line.split(b'\n', 1)
|
||||
if six.PY2:
|
||||
obj_to_delete = wsgi_unquote(obj_to_delete.strip())
|
||||
else:
|
||||
# yeah, all this chaining is pretty terrible...
|
||||
# but it gets even worse trying to use UTF-8 and
|
||||
# errors='surrogateescape' when dealing with terrible
|
||||
# input like b'\xe2%98\x83'
|
||||
obj_to_delete = wsgi_to_str(wsgi_unquote(
|
||||
bytes_to_wsgi(obj_to_delete.strip())))
|
||||
objs_to_delete.append({'name': obj_to_delete})
|
||||
else:
|
||||
data = req.body_file.read(self.max_path_length)
|
||||
if data:
|
||||
line += data
|
||||
else:
|
||||
data_remaining = False
|
||||
obj_to_delete = line.strip()
|
||||
if six.PY2:
|
||||
obj_to_delete = wsgi_unquote(line.strip())
|
||||
else:
|
||||
obj_to_delete = wsgi_to_str(wsgi_unquote(
|
||||
bytes_to_wsgi(line.strip())))
|
||||
if obj_to_delete:
|
||||
objs_to_delete.append(
|
||||
{'name': unquote(obj_to_delete)})
|
||||
objs_to_delete.append({'name': obj_to_delete})
|
||||
if len(objs_to_delete) > self.max_deletes_per_request:
|
||||
raise HTTPRequestEntityTooLarge(
|
||||
'Maximum Bulk Deletes: %d per request' %
|
||||
@ -376,15 +395,15 @@ class Bulk(object):
|
||||
|
||||
:params req: a swob Request
|
||||
:params objs_to_delete: a list of dictionaries that specifies the
|
||||
objects to be deleted. If None, uses self.get_objs_to_delete to
|
||||
query request.
|
||||
(native string) objects to be deleted. If None, uses
|
||||
self.get_objs_to_delete to query request.
|
||||
"""
|
||||
last_yield = time()
|
||||
if out_content_type and out_content_type.endswith('/xml'):
|
||||
to_yield = '<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
to_yield = b'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
else:
|
||||
to_yield = ' '
|
||||
separator = ''
|
||||
to_yield = b' '
|
||||
separator = b''
|
||||
failed_files = []
|
||||
resp_dict = {'Response Status': HTTPOk().status,
|
||||
'Response Body': '',
|
||||
@ -399,6 +418,8 @@ class Bulk(object):
|
||||
vrs, account, _junk = req.split_path(2, 3, True)
|
||||
except ValueError:
|
||||
raise HTTPNotFound(request=req)
|
||||
vrs = wsgi_to_str(vrs)
|
||||
account = wsgi_to_str(account)
|
||||
|
||||
incoming_format = req.headers.get('Content-Type')
|
||||
if incoming_format and \
|
||||
@ -422,13 +443,13 @@ class Bulk(object):
|
||||
resp_dict['Number Not Found'] += 1
|
||||
else:
|
||||
failed_files.append([
|
||||
quote(obj_name),
|
||||
wsgi_quote(str_to_wsgi(obj_name)),
|
||||
obj_to_delete['error']['message']])
|
||||
continue
|
||||
delete_path = '/'.join(['', vrs, account,
|
||||
obj_name.lstrip('/')])
|
||||
if not constraints.check_utf8(delete_path):
|
||||
failed_files.append([quote(obj_name),
|
||||
failed_files.append([wsgi_quote(str_to_wsgi(obj_name)),
|
||||
HTTPPreconditionFailed().status])
|
||||
continue
|
||||
yield (obj_name, delete_path)
|
||||
@ -443,7 +464,8 @@ class Bulk(object):
|
||||
|
||||
def do_delete(obj_name, delete_path):
|
||||
delete_obj_req = make_subrequest(
|
||||
req.environ, method='DELETE', path=quote(delete_path),
|
||||
req.environ, method='DELETE',
|
||||
path=wsgi_quote(str_to_wsgi(delete_path)),
|
||||
headers={'X-Auth-Token': req.headers.get('X-Auth-Token')},
|
||||
body='', agent='%(orig)s ' + user_agent,
|
||||
swift_source=swift_source)
|
||||
@ -456,7 +478,7 @@ class Bulk(object):
|
||||
if last_yield + self.yield_frequency < time():
|
||||
last_yield = time()
|
||||
yield to_yield
|
||||
to_yield, separator = ' ', '\r\n\r\n'
|
||||
to_yield, separator = b' ', b'\r\n\r\n'
|
||||
self._process_delete(resp, pile, obj_name,
|
||||
resp_dict, failed_files,
|
||||
failed_file_response, retry)
|
||||
@ -466,7 +488,7 @@ class Bulk(object):
|
||||
if last_yield + self.yield_frequency < time():
|
||||
last_yield = time()
|
||||
yield to_yield
|
||||
to_yield, separator = ' ', '\r\n\r\n'
|
||||
to_yield, separator = b' ', b'\r\n\r\n'
|
||||
# Don't pass in the pile, as we shouldn't retry
|
||||
self._process_delete(
|
||||
resp, None, obj_name, resp_dict,
|
||||
@ -484,7 +506,7 @@ class Bulk(object):
|
||||
|
||||
except HTTPException as err:
|
||||
resp_dict['Response Status'] = err.status
|
||||
resp_dict['Response Body'] = err.body
|
||||
resp_dict['Response Body'] = err.body.decode('utf-8')
|
||||
except Exception:
|
||||
self.logger.exception('Error in bulk delete.')
|
||||
resp_dict['Response Status'] = HTTPServerError().status
|
||||
@ -511,10 +533,10 @@ class Bulk(object):
|
||||
failed_files = []
|
||||
last_yield = time()
|
||||
if out_content_type and out_content_type.endswith('/xml'):
|
||||
to_yield = '<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
to_yield = b'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
else:
|
||||
to_yield = ' '
|
||||
separator = ''
|
||||
to_yield = b' '
|
||||
separator = b''
|
||||
containers_accessed = set()
|
||||
req.environ['eventlet.minimum_write_chunk_size'] = 0
|
||||
try:
|
||||
@ -539,13 +561,16 @@ class Bulk(object):
|
||||
if last_yield + self.yield_frequency < time():
|
||||
last_yield = time()
|
||||
yield to_yield
|
||||
to_yield, separator = ' ', '\r\n\r\n'
|
||||
tar_info = next(tar)
|
||||
to_yield, separator = b' ', b'\r\n\r\n'
|
||||
tar_info = tar.next()
|
||||
if tar_info is None or \
|
||||
len(failed_files) >= self.max_failed_extractions:
|
||||
break
|
||||
if tar_info.isfile():
|
||||
obj_path = tar_info.name
|
||||
if not six.PY2:
|
||||
obj_path = obj_path.encode('utf-8', 'surrogateescape')
|
||||
obj_path = bytes_to_wsgi(obj_path)
|
||||
if obj_path.startswith('./'):
|
||||
obj_path = obj_path[2:]
|
||||
obj_path = obj_path.lstrip('/')
|
||||
@ -557,14 +582,14 @@ class Bulk(object):
|
||||
destination = '/'.join(
|
||||
['', vrs, account, obj_path])
|
||||
container = obj_path.split('/', 1)[0]
|
||||
if not constraints.check_utf8(destination):
|
||||
if not constraints.check_utf8(wsgi_to_str(destination)):
|
||||
failed_files.append(
|
||||
[quote(obj_path[:self.max_path_length]),
|
||||
[wsgi_quote(obj_path[:self.max_path_length]),
|
||||
HTTPPreconditionFailed().status])
|
||||
continue
|
||||
if tar_info.size > constraints.MAX_FILE_SIZE:
|
||||
failed_files.append([
|
||||
quote(obj_path[:self.max_path_length]),
|
||||
wsgi_quote(obj_path[:self.max_path_length]),
|
||||
HTTPRequestEntityTooLarge().status])
|
||||
continue
|
||||
container_failure = None
|
||||
@ -581,13 +606,13 @@ class Bulk(object):
|
||||
# the object PUT to this container still may
|
||||
# succeed if acls are set
|
||||
container_failure = [
|
||||
quote(cont_path[:self.max_path_length]),
|
||||
wsgi_quote(cont_path[:self.max_path_length]),
|
||||
err.status]
|
||||
if err.status_int == HTTP_UNAUTHORIZED:
|
||||
raise HTTPUnauthorized(request=req)
|
||||
except ValueError:
|
||||
failed_files.append([
|
||||
quote(obj_path[:self.max_path_length]),
|
||||
wsgi_quote(obj_path[:self.max_path_length]),
|
||||
HTTPBadRequest().status])
|
||||
continue
|
||||
|
||||
@ -598,7 +623,8 @@ class Bulk(object):
|
||||
}
|
||||
|
||||
create_obj_req = make_subrequest(
|
||||
req.environ, method='PUT', path=quote(destination),
|
||||
req.environ, method='PUT',
|
||||
path=wsgi_quote(destination),
|
||||
headers=create_headers,
|
||||
agent='%(orig)s BulkExpand', swift_source='EA')
|
||||
create_obj_req.environ['wsgi.input'] = tar_file
|
||||
@ -621,13 +647,13 @@ class Bulk(object):
|
||||
failed_files.append(container_failure)
|
||||
if resp.status_int == HTTP_UNAUTHORIZED:
|
||||
failed_files.append([
|
||||
quote(obj_path[:self.max_path_length]),
|
||||
wsgi_quote(obj_path[:self.max_path_length]),
|
||||
HTTPUnauthorized().status])
|
||||
raise HTTPUnauthorized(request=req)
|
||||
if resp.status_int // 100 == 5:
|
||||
failed_response_type = HTTPBadGateway
|
||||
failed_files.append([
|
||||
quote(obj_path[:self.max_path_length]),
|
||||
wsgi_quote(obj_path[:self.max_path_length]),
|
||||
resp.status])
|
||||
|
||||
if failed_files:
|
||||
@ -638,7 +664,7 @@ class Bulk(object):
|
||||
|
||||
except HTTPException as err:
|
||||
resp_dict['Response Status'] = err.status
|
||||
resp_dict['Response Body'] = err.body
|
||||
resp_dict['Response Body'] = err.body.decode('utf-8')
|
||||
except (tarfile.TarError, zlib.error) as tar_error:
|
||||
resp_dict['Response Status'] = HTTPBadRequest().status
|
||||
resp_dict['Response Body'] = 'Invalid Tar File: %s' % tar_error
|
||||
@ -656,7 +682,7 @@ class Bulk(object):
|
||||
elif resp.status_int == HTTP_NOT_FOUND:
|
||||
resp_dict['Number Not Found'] += 1
|
||||
elif resp.status_int == HTTP_UNAUTHORIZED:
|
||||
failed_files.append([quote(obj_name),
|
||||
failed_files.append([wsgi_quote(str_to_wsgi(obj_name)),
|
||||
HTTPUnauthorized().status])
|
||||
elif resp.status_int == HTTP_CONFLICT and pile and \
|
||||
self.retry_count > 0 and self.retry_count > retry:
|
||||
@ -671,7 +697,8 @@ class Bulk(object):
|
||||
else:
|
||||
if resp.status_int // 100 == 5:
|
||||
failed_file_response['type'] = HTTPBadGateway
|
||||
failed_files.append([quote(obj_name), resp.status])
|
||||
failed_files.append([wsgi_quote(str_to_wsgi(obj_name)),
|
||||
resp.status])
|
||||
|
||||
@wsgify
|
||||
def __call__(self, req):
|
||||
|
@ -16,7 +16,6 @@
|
||||
|
||||
from collections import Counter
|
||||
import numbers
|
||||
from six.moves import urllib
|
||||
import unittest
|
||||
import os
|
||||
import tarfile
|
||||
@ -28,6 +27,7 @@ from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
from eventlet import sleep
|
||||
from mock import patch, call
|
||||
from test.unit import debug_logger
|
||||
from test.unit.common.middleware.helpers import FakeSwift
|
||||
from swift.common import utils, constraints
|
||||
from swift.common.header_key_dict import HeaderKeyDict
|
||||
@ -100,42 +100,64 @@ def build_dir_tree(start_path, tree_obj):
|
||||
if isinstance(tree_obj, list):
|
||||
for obj in tree_obj:
|
||||
build_dir_tree(start_path, obj)
|
||||
return
|
||||
if isinstance(tree_obj, dict):
|
||||
for dir_name, obj in tree_obj.items():
|
||||
dir_path = os.path.join(start_path, dir_name)
|
||||
os.mkdir(dir_path)
|
||||
build_dir_tree(dir_path, obj)
|
||||
if isinstance(tree_obj, six.text_type):
|
||||
return
|
||||
if six.PY2 and isinstance(tree_obj, six.text_type):
|
||||
tree_obj = tree_obj.encode('utf8')
|
||||
if isinstance(tree_obj, str):
|
||||
obj_path = os.path.join(start_path, tree_obj)
|
||||
with open(obj_path, 'w+') as tree_file:
|
||||
tree_file.write('testing')
|
||||
return
|
||||
raise TypeError("can't build tree from %r" % tree_obj)
|
||||
|
||||
|
||||
def build_tar_tree(tar, start_path, tree_obj, base_path=''):
|
||||
if six.PY2:
|
||||
if isinstance(start_path, six.text_type):
|
||||
start_path = start_path.encode('utf8')
|
||||
if isinstance(tree_obj, six.text_type):
|
||||
tree_obj = tree_obj.encode('utf8')
|
||||
else:
|
||||
if isinstance(start_path, bytes):
|
||||
start_path = start_path.decode('utf8', 'surrogateescape')
|
||||
if isinstance(tree_obj, bytes):
|
||||
tree_obj = tree_obj.decode('utf8', 'surrogateescape')
|
||||
|
||||
if isinstance(tree_obj, list):
|
||||
for obj in tree_obj:
|
||||
build_tar_tree(tar, start_path, obj, base_path=base_path)
|
||||
return
|
||||
if isinstance(tree_obj, dict):
|
||||
for dir_name, obj in tree_obj.items():
|
||||
if six.PY2 and isinstance(dir_name, six.text_type):
|
||||
dir_name = dir_name.encode('utf8')
|
||||
elif not six.PY2 and isinstance(dir_name, bytes):
|
||||
dir_name = dir_name.decode('utf8', 'surrogateescape')
|
||||
dir_path = os.path.join(start_path, dir_name)
|
||||
tar_info = tarfile.TarInfo(dir_path[len(base_path):])
|
||||
tar_info.type = tarfile.DIRTYPE
|
||||
tar.addfile(tar_info)
|
||||
build_tar_tree(tar, dir_path, obj, base_path=base_path)
|
||||
if isinstance(tree_obj, six.text_type):
|
||||
tree_obj = tree_obj.encode('utf8')
|
||||
return
|
||||
if isinstance(tree_obj, str):
|
||||
obj_path = os.path.join(start_path, tree_obj)
|
||||
tar_info = tarfile.TarInfo('./' + obj_path[len(base_path):])
|
||||
tar.addfile(tar_info)
|
||||
return
|
||||
raise TypeError("can't build tree from %r" % tree_obj)
|
||||
|
||||
|
||||
class TestUntarMetadata(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.app = FakeSwift()
|
||||
self.bulk = bulk.filter_factory({})(self.app)
|
||||
self.bulk.logger = debug_logger()
|
||||
self.testdir = mkdtemp(suffix='tmp_test_bulk')
|
||||
|
||||
def tearDown(self):
|
||||
@ -174,7 +196,7 @@ class TestUntarMetadata(unittest.TestCase):
|
||||
#
|
||||
# Still, we'll support uploads with both. Just heap more code on the
|
||||
# problem until you can forget it's under there.
|
||||
with open(os.path.join(self.testdir, "obj1")) as fh1:
|
||||
with open(os.path.join(self.testdir, "obj1"), 'rb') as fh1:
|
||||
tar_info1 = tar_file.gettarinfo(fileobj=fh1,
|
||||
arcname="obj1")
|
||||
tar_info1.pax_headers[u'SCHILY.xattr.user.mime_type'] = \
|
||||
@ -186,7 +208,7 @@ class TestUntarMetadata(unittest.TestCase):
|
||||
u'gigantic bucket of coffee'
|
||||
tar_file.addfile(tar_info1, fh1)
|
||||
|
||||
with open(os.path.join(self.testdir, "obj2")) as fh2:
|
||||
with open(os.path.join(self.testdir, "obj2"), 'rb') as fh2:
|
||||
tar_info2 = tar_file.gettarinfo(fileobj=fh2,
|
||||
arcname="obj2")
|
||||
tar_info2.pax_headers[
|
||||
@ -235,6 +257,7 @@ class TestUntar(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.app = FakeApp()
|
||||
self.bulk = bulk.filter_factory({})(self.app)
|
||||
self.bulk.logger = debug_logger()
|
||||
self.testdir = mkdtemp(suffix='tmp_test_bulk')
|
||||
|
||||
def tearDown(self):
|
||||
@ -247,7 +270,7 @@ class TestUntar(unittest.TestCase):
|
||||
req, compress_format, out_content_type=out_content_type)
|
||||
first_chunk = next(iter)
|
||||
self.assertEqual(req.environ['eventlet.minimum_write_chunk_size'], 0)
|
||||
resp_body = first_chunk + ''.join(iter)
|
||||
resp_body = first_chunk + b''.join(iter)
|
||||
return resp_body
|
||||
|
||||
def test_create_container_for_path(self):
|
||||
@ -273,7 +296,7 @@ class TestUntar(unittest.TestCase):
|
||||
{'sub_dir2': ['sub2_file1', u'test obj \u2661']},
|
||||
'sub_file1',
|
||||
{'sub_dir3': [{'sub4_dir1': '../sub4 file1'}]},
|
||||
{'sub_dir4': None},
|
||||
{'sub_dir4': []},
|
||||
]}]
|
||||
|
||||
build_dir_tree(self.testdir, dir_tree)
|
||||
@ -289,7 +312,7 @@ class TestUntar(unittest.TestCase):
|
||||
tar.close()
|
||||
req = Request.blank('/tar_works/acc/cont/')
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar' + extension))
|
||||
os.path.join(self.testdir, 'tar_works.tar' + extension), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, compress_format)
|
||||
resp_data = utils.json.loads(resp_body)
|
||||
@ -298,15 +321,15 @@ class TestUntar(unittest.TestCase):
|
||||
# test out xml
|
||||
req = Request.blank('/tar_works/acc/cont/')
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar' + extension))
|
||||
os.path.join(self.testdir, 'tar_works.tar' + extension), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(
|
||||
req, compress_format, 'application/xml')
|
||||
self.assertTrue(
|
||||
'<response_status>201 Created</response_status>' in
|
||||
self.assertIn(
|
||||
b'<response_status>201 Created</response_status>',
|
||||
resp_body)
|
||||
self.assertTrue(
|
||||
'<number_files_created>6</number_files_created>' in
|
||||
self.assertIn(
|
||||
b'<number_files_created>6</number_files_created>',
|
||||
resp_body)
|
||||
|
||||
# test out nonexistent format
|
||||
@ -314,16 +337,16 @@ class TestUntar(unittest.TestCase):
|
||||
headers={'Accept': 'good_xml'})
|
||||
req.environ['REQUEST_METHOD'] = 'PUT'
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar' + extension))
|
||||
os.path.join(self.testdir, 'tar_works.tar' + extension), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
|
||||
def fake_start_response(*args, **kwargs):
|
||||
pass
|
||||
|
||||
app_iter = self.bulk(req.environ, fake_start_response)
|
||||
resp_body = ''.join([i for i in app_iter])
|
||||
resp_body = b''.join(app_iter)
|
||||
|
||||
self.assertTrue('Response Status: 406' in resp_body)
|
||||
self.assertIn(b'Response Status: 406', resp_body)
|
||||
|
||||
def test_extract_call(self):
|
||||
base_name = 'base_works_gz'
|
||||
@ -344,13 +367,13 @@ class TestUntar(unittest.TestCase):
|
||||
|
||||
req = Request.blank('/tar_works/acc/cont/?extract-archive=tar.gz')
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar.gz'))
|
||||
os.path.join(self.testdir, 'tar_works.tar.gz'), 'rb')
|
||||
self.bulk(req.environ, fake_start_response)
|
||||
self.assertEqual(self.app.calls, 1)
|
||||
|
||||
self.app.calls = 0
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar.gz'))
|
||||
os.path.join(self.testdir, 'tar_works.tar.gz'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'Chunked'
|
||||
req.method = 'PUT'
|
||||
app_iter = self.bulk(req.environ, fake_start_response)
|
||||
@ -362,9 +385,9 @@ class TestUntar(unittest.TestCase):
|
||||
req.method = 'PUT'
|
||||
req.headers['transfer-encoding'] = 'Chunked'
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar.gz'))
|
||||
os.path.join(self.testdir, 'tar_works.tar.gz'), 'rb')
|
||||
t = self.bulk(req.environ, fake_start_response)
|
||||
self.assertEqual(t[0], "Unsupported archive format")
|
||||
self.assertEqual(t, [b"Unsupported archive format"])
|
||||
|
||||
tar = tarfile.open(name=os.path.join(self.testdir,
|
||||
'tar_works.tar'),
|
||||
@ -376,20 +399,20 @@ class TestUntar(unittest.TestCase):
|
||||
req.method = 'PUT'
|
||||
req.headers['transfer-encoding'] = 'Chunked'
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar'))
|
||||
os.path.join(self.testdir, 'tar_works.tar'), 'rb')
|
||||
app_iter = self.bulk(req.environ, fake_start_response)
|
||||
list(app_iter) # iter over resp
|
||||
self.assertEqual(self.app.calls, 7)
|
||||
|
||||
def test_bad_container(self):
|
||||
req = Request.blank('/invalid/', body='')
|
||||
req = Request.blank('/invalid/', body=b'')
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertTrue('404 Not Found' in resp_body)
|
||||
self.assertIn(b'404 Not Found', resp_body)
|
||||
|
||||
def test_content_length_required(self):
|
||||
req = Request.blank('/create_cont_fail/acc/cont')
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertTrue('411 Length Required' in resp_body)
|
||||
self.assertIn(b'411 Length Required', resp_body)
|
||||
|
||||
def test_bad_tar(self):
|
||||
req = Request.blank('/create_cont_fail/acc/cont', body='')
|
||||
@ -399,7 +422,7 @@ class TestUntar(unittest.TestCase):
|
||||
|
||||
with patch.object(tarfile, 'open', bad_open):
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertTrue('400 Bad Request' in resp_body)
|
||||
self.assertIn(b'400 Bad Request', resp_body)
|
||||
|
||||
def build_tar(self, dir_tree=None):
|
||||
if not dir_tree:
|
||||
@ -424,7 +447,7 @@ class TestUntar(unittest.TestCase):
|
||||
self.build_tar(dir_tree)
|
||||
req = Request.blank('/tar_works/acc/')
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
resp_data = utils.json.loads(resp_body)
|
||||
@ -435,7 +458,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/unauth/acc/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertEqual(self.app.calls, 1)
|
||||
@ -448,7 +471,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/create_obj_unauth/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertEqual(self.app.calls, 2)
|
||||
@ -463,7 +486,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/tar_works/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertEqual(self.app.calls, 6)
|
||||
@ -478,7 +501,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/tar_works/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, 'gz')
|
||||
self.assertEqual(self.app.calls, 0)
|
||||
@ -494,8 +517,8 @@ class TestUntar(unittest.TestCase):
|
||||
self.app.calls = 0
|
||||
req = Request.blank('/tar_works/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
self.assertEqual(self.app.calls, 5)
|
||||
@ -519,7 +542,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/tar_works/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(
|
||||
os.path.join(self.testdir, 'tar_works.tar'))
|
||||
os.path.join(self.testdir, 'tar_works.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
resp_data = utils.json.loads(resp_body)
|
||||
@ -557,7 +580,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/create_cont_fail/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
resp_data = utils.json.loads(resp_body)
|
||||
@ -569,7 +592,7 @@ class TestUntar(unittest.TestCase):
|
||||
req = Request.blank('/create_cont_fail/acc/cont/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
|
||||
def bad_create(req, path):
|
||||
@ -586,13 +609,13 @@ class TestUntar(unittest.TestCase):
|
||||
|
||||
def test_extract_tar_fail_unicode(self):
|
||||
dir_tree = [{'sub_dir1': ['sub1_file1']},
|
||||
{'sub_dir2': ['sub2\xdefile1', 'sub2_file2']},
|
||||
{'sub_\xdedir3': [{'sub4_dir1': 'sub4_file1'}]}]
|
||||
{'sub_dir2': [b'sub2\xdefile1', 'sub2_file2']},
|
||||
{b'sub_\xdedir3': [{'sub4_dir1': 'sub4_file1'}]}]
|
||||
self.build_tar(dir_tree)
|
||||
req = Request.blank('/tar_works/acc/',
|
||||
headers={'Accept': 'application/json'})
|
||||
req.environ['wsgi.input'] = open(os.path.join(self.testdir,
|
||||
'tar_fails.tar'))
|
||||
'tar_fails.tar'), 'rb')
|
||||
req.headers['transfer-encoding'] = 'chunked'
|
||||
resp_body = self.handle_extract_and_iter(req, '')
|
||||
resp_data = utils.json.loads(resp_body)
|
||||
@ -608,13 +631,13 @@ class TestUntar(unittest.TestCase):
|
||||
txt_body = bulk.get_response_body(
|
||||
'bad_formay', {'hey': 'there'}, [['json > xml', '202 Accepted']],
|
||||
"doesn't matter for text")
|
||||
self.assertTrue('hey: there' in txt_body)
|
||||
self.assertIn(b'hey: there', txt_body)
|
||||
xml_body = bulk.get_response_body(
|
||||
'text/xml', {'hey': 'there'}, [['json > xml', '202 Accepted']],
|
||||
'root_tag')
|
||||
self.assertTrue('>' in xml_body)
|
||||
self.assertTrue(xml_body.startswith('<root_tag>\n'))
|
||||
self.assertTrue(xml_body.endswith('\n</root_tag>\n'))
|
||||
self.assertIn(b'>', xml_body)
|
||||
self.assertTrue(xml_body.startswith(b'<root_tag>\n'))
|
||||
self.assertTrue(xml_body.endswith(b'\n</root_tag>\n'))
|
||||
|
||||
|
||||
class TestDelete(unittest.TestCase):
|
||||
@ -623,6 +646,7 @@ class TestDelete(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.app = FakeApp()
|
||||
self.bulk = bulk.filter_factory(self.conf)(self.app)
|
||||
self.bulk.logger = debug_logger()
|
||||
|
||||
def tearDown(self):
|
||||
self.app.calls = 0
|
||||
@ -633,7 +657,7 @@ class TestDelete(unittest.TestCase):
|
||||
req, out_content_type=out_content_type)
|
||||
first_chunk = next(iter)
|
||||
self.assertEqual(req.environ['eventlet.minimum_write_chunk_size'], 0)
|
||||
resp_body = first_chunk + ''.join(iter)
|
||||
resp_body = first_chunk + b''.join(iter)
|
||||
return resp_body
|
||||
|
||||
def test_bulk_delete_uses_predefined_object_errors(self):
|
||||
@ -645,7 +669,7 @@ class TestDelete(unittest.TestCase):
|
||||
{'name': '/c/file_c', 'error': {'code': HTTP_UNAUTHORIZED,
|
||||
'message': 'unauthorized'}},
|
||||
{'name': '/c/file_d'}]
|
||||
resp_body = ''.join(self.bulk.handle_delete_iter(
|
||||
resp_body = b''.join(self.bulk.handle_delete_iter(
|
||||
req, objs_to_delete=objs_to_delete,
|
||||
out_content_type='application/json'))
|
||||
self.assertEqual(set(self.app.delete_paths),
|
||||
@ -756,41 +780,41 @@ class TestDelete(unittest.TestCase):
|
||||
req.environ['wsgi.input'] = BytesIO(data)
|
||||
req.content_length = len(data)
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
self.assertTrue('413 Request Entity Too Large' in resp_body)
|
||||
self.assertIn(b'413 Request Entity Too Large', resp_body)
|
||||
|
||||
def test_bulk_delete_works_unicode(self):
|
||||
body = (u'/c/ obj \u2661\r\n'.encode('utf8') +
|
||||
'c/ objbadutf8\r\n' +
|
||||
'/c/f\xdebadutf8\n')
|
||||
b'c/ objbadutf8\r\n' +
|
||||
b'/c/f\xdebadutf8\n')
|
||||
req = Request.blank('/delete_works/AUTH_Acc', body=body,
|
||||
headers={'Accept': 'application/json'})
|
||||
req.method = 'POST'
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
self.assertEqual(
|
||||
Counter(self.app.delete_paths),
|
||||
Counter(['/delete_works/AUTH_Acc/c/ obj \xe2\x99\xa1',
|
||||
'/delete_works/AUTH_Acc/c/ objbadutf8']))
|
||||
dict(Counter(self.app.delete_paths)),
|
||||
dict(Counter(['/delete_works/AUTH_Acc/c/ obj \xe2\x99\xa1',
|
||||
'/delete_works/AUTH_Acc/c/ objbadutf8'])))
|
||||
|
||||
self.assertEqual(self.app.calls, 2)
|
||||
resp_data = utils.json.loads(resp_body)
|
||||
self.assertEqual(resp_data['Number Deleted'], 1)
|
||||
self.assertEqual(len(resp_data['Errors']), 2)
|
||||
self.assertEqual(
|
||||
Counter(map(tuple, resp_data['Errors'])),
|
||||
Counter([(urllib.parse.quote('c/ objbadutf8'),
|
||||
'412 Precondition Failed'),
|
||||
(urllib.parse.quote('/c/f\xdebadutf8'),
|
||||
'412 Precondition Failed')]))
|
||||
dict(Counter(map(tuple, resp_data['Errors']))),
|
||||
dict(Counter([('c/%20objbadutf8',
|
||||
'412 Precondition Failed'),
|
||||
('/c/f%DEbadutf8',
|
||||
'412 Precondition Failed')])))
|
||||
|
||||
def test_bulk_delete_no_body(self):
|
||||
req = Request.blank('/unauth/AUTH_acc/')
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
self.assertTrue('411 Length Required' in resp_body)
|
||||
self.assertIn(b'411 Length Required', resp_body)
|
||||
|
||||
def test_bulk_delete_no_files_in_body(self):
|
||||
req = Request.blank('/unauth/AUTH_acc/', body=' ')
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
self.assertTrue('400 Bad Request' in resp_body)
|
||||
self.assertIn(b'400 Bad Request', resp_body)
|
||||
|
||||
def test_bulk_delete_unauth(self):
|
||||
req = Request.blank('/unauth/AUTH_acc/', body='/c/f\n/c/f_ok\n',
|
||||
@ -818,7 +842,7 @@ class TestDelete(unittest.TestCase):
|
||||
def test_bulk_delete_bad_path(self):
|
||||
req = Request.blank('/delete_cont_fail/')
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
self.assertTrue('404 Not Found' in resp_body)
|
||||
self.assertIn(b'404 Not Found', resp_body)
|
||||
|
||||
def test_bulk_delete_container_delete(self):
|
||||
req = Request.blank('/delete_cont_fail/AUTH_Acc', body='c\n',
|
||||
@ -889,7 +913,7 @@ class TestDelete(unittest.TestCase):
|
||||
req = Request.blank('/delete_works/AUTH_Acc', body=body)
|
||||
req.method = 'POST'
|
||||
resp_body = self.handle_delete_and_iter(req)
|
||||
self.assertTrue('400 Bad Request' in resp_body)
|
||||
self.assertIn(b'400 Bad Request', resp_body)
|
||||
|
||||
def test_bulk_delete_max_failures(self):
|
||||
body = '\n'.join([
|
||||
|
1
tox.ini
1
tox.ini
@ -43,6 +43,7 @@ commands =
|
||||
test/unit/common/middleware/s3api/ \
|
||||
test/unit/common/middleware/test_account_quotas.py \
|
||||
test/unit/common/middleware/test_acl.py \
|
||||
test/unit/common/middleware/test_bulk.py \
|
||||
test/unit/common/middleware/test_catch_errors.py \
|
||||
test/unit/common/middleware/test_cname_lookup.py \
|
||||
test/unit/common/middleware/test_container_sync.py \
|
||||
|
Loading…
Reference in New Issue
Block a user