Merge "Move listing formatting out to proxy middleware"
This commit is contained in:
commit
23de16b0bf
@ -9,7 +9,7 @@ eventlet_debug = true
|
||||
[pipeline:main]
|
||||
# Yes, proxy-logging appears twice. This is so that
|
||||
# middleware-originated requests get logged too.
|
||||
pipeline = catch_errors gatekeeper healthcheck proxy-logging cache bulk tempurl ratelimit crossdomain container_sync tempauth staticweb copy container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server
|
||||
pipeline = catch_errors gatekeeper healthcheck proxy-logging cache listing_formats bulk tempurl ratelimit crossdomain container_sync tempauth staticweb copy container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server
|
||||
|
||||
[filter:catch_errors]
|
||||
use = egg:swift#catch_errors
|
||||
@ -71,6 +71,9 @@ allow_versioned_writes = true
|
||||
[filter:copy]
|
||||
use = egg:swift#copy
|
||||
|
||||
[filter:listing_formats]
|
||||
use = egg:swift#listing_formats
|
||||
|
||||
[app:proxy-server]
|
||||
use = egg:swift#proxy
|
||||
allow_account_management = true
|
||||
|
@ -94,7 +94,7 @@ bind_port = 8080
|
||||
[pipeline:main]
|
||||
# This sample pipeline uses tempauth and is used for SAIO dev work and
|
||||
# testing. See below for a pipeline using keystone.
|
||||
pipeline = catch_errors gatekeeper healthcheck proxy-logging cache container_sync bulk tempurl ratelimit tempauth copy container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server
|
||||
pipeline = catch_errors gatekeeper healthcheck proxy-logging cache listing_formats container_sync bulk tempurl ratelimit tempauth copy container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server
|
||||
|
||||
# The following pipeline shows keystone integration. Comment out the one
|
||||
# above and uncomment this one. Additional steps for integrating keystone are
|
||||
@ -913,3 +913,9 @@ use = egg:swift#encryption
|
||||
# disable_encryption to True. However, all encryption middleware should remain
|
||||
# in the pipeline in order for existing encrypted data to be read.
|
||||
# disable_encryption = False
|
||||
|
||||
# listing_formats should be just right of the first proxy-logging middleware,
|
||||
# and left of most other middlewares. If it is not already present, it will
|
||||
# be automatically inserted for you.
|
||||
[filter:listing_formats]
|
||||
use = egg:swift#listing_formats
|
||||
|
@ -106,6 +106,7 @@ paste.filter_factory =
|
||||
keymaster = swift.common.middleware.crypto.keymaster:filter_factory
|
||||
encryption = swift.common.middleware.crypto:filter_factory
|
||||
kms_keymaster = swift.common.middleware.crypto.kms_keymaster:filter_factory
|
||||
listing_formats = swift.common.middleware.listing_formats:filter_factory
|
||||
|
||||
[build_sphinx]
|
||||
all_files = 1
|
||||
|
@ -24,7 +24,7 @@ import swift.common.db
|
||||
from swift.account.backend import AccountBroker, DATADIR
|
||||
from swift.account.utils import account_listing_response, get_response_headers
|
||||
from swift.common.db import DatabaseConnectionError, DatabaseAlreadyExists
|
||||
from swift.common.request_helpers import get_param, get_listing_content_type, \
|
||||
from swift.common.request_helpers import get_param, \
|
||||
split_and_validate_path
|
||||
from swift.common.utils import get_logger, hash_path, public, \
|
||||
Timestamp, storage_directory, config_true_value, \
|
||||
@ -33,6 +33,7 @@ from swift.common.constraints import valid_timestamp, check_utf8, check_drive
|
||||
from swift.common import constraints
|
||||
from swift.common.db_replicator import ReplicatorRpc
|
||||
from swift.common.base_storage_server import BaseStorageServer
|
||||
from swift.common.middleware import listing_formats
|
||||
from swift.common.swob import HTTPAccepted, HTTPBadRequest, \
|
||||
HTTPCreated, HTTPForbidden, HTTPInternalServerError, \
|
||||
HTTPMethodNotAllowed, HTTPNoContent, HTTPNotFound, \
|
||||
@ -167,7 +168,7 @@ class AccountController(BaseStorageServer):
|
||||
def HEAD(self, req):
|
||||
"""Handle HTTP HEAD request."""
|
||||
drive, part, account = split_and_validate_path(req, 3)
|
||||
out_content_type = get_listing_content_type(req)
|
||||
out_content_type = listing_formats.get_listing_content_type(req)
|
||||
if not check_drive(self.root, drive, self.mount_check):
|
||||
return HTTPInsufficientStorage(drive=drive, request=req)
|
||||
broker = self._get_account_broker(drive, part, account,
|
||||
@ -201,7 +202,7 @@ class AccountController(BaseStorageServer):
|
||||
constraints.ACCOUNT_LISTING_LIMIT)
|
||||
marker = get_param(req, 'marker', '')
|
||||
end_marker = get_param(req, 'end_marker')
|
||||
out_content_type = get_listing_content_type(req)
|
||||
out_content_type = listing_formats.get_listing_content_type(req)
|
||||
|
||||
if not check_drive(self.root, drive, self.mount_check):
|
||||
return HTTPInsufficientStorage(drive=drive, request=req)
|
||||
|
@ -14,8 +14,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from xml.sax import saxutils
|
||||
|
||||
from swift.common.middleware import listing_formats
|
||||
from swift.common.swob import HTTPOk, HTTPNoContent
|
||||
from swift.common.utils import Timestamp
|
||||
from swift.common.storage_policy import POLICIES
|
||||
@ -78,43 +78,27 @@ def account_listing_response(account, req, response_content_type, broker=None,
|
||||
|
||||
account_list = broker.list_containers_iter(limit, marker, end_marker,
|
||||
prefix, delimiter, reverse)
|
||||
if response_content_type == 'application/json':
|
||||
data = []
|
||||
for (name, object_count, bytes_used, put_timestamp, is_subdir) \
|
||||
in account_list:
|
||||
if is_subdir:
|
||||
data.append({'subdir': name})
|
||||
else:
|
||||
data.append(
|
||||
{'name': name, 'count': object_count,
|
||||
'bytes': bytes_used,
|
||||
'last_modified': Timestamp(put_timestamp).isoformat})
|
||||
data = []
|
||||
for (name, object_count, bytes_used, put_timestamp, is_subdir) \
|
||||
in account_list:
|
||||
if is_subdir:
|
||||
data.append({'subdir': name.decode('utf8')})
|
||||
else:
|
||||
data.append(
|
||||
{'name': name.decode('utf8'), 'count': object_count,
|
||||
'bytes': bytes_used,
|
||||
'last_modified': Timestamp(put_timestamp).isoformat})
|
||||
if response_content_type.endswith('/xml'):
|
||||
account_list = listing_formats.account_to_xml(data, account)
|
||||
ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
|
||||
elif response_content_type.endswith('/json'):
|
||||
account_list = json.dumps(data)
|
||||
elif response_content_type.endswith('/xml'):
|
||||
output_list = ['<?xml version="1.0" encoding="UTF-8"?>',
|
||||
'<account name=%s>' % saxutils.quoteattr(account)]
|
||||
for (name, object_count, bytes_used, put_timestamp, is_subdir) \
|
||||
in account_list:
|
||||
if is_subdir:
|
||||
output_list.append(
|
||||
'<subdir name=%s />' % saxutils.quoteattr(name))
|
||||
else:
|
||||
item = '<container><name>%s</name><count>%s</count>' \
|
||||
'<bytes>%s</bytes><last_modified>%s</last_modified>' \
|
||||
'</container>' % \
|
||||
(saxutils.escape(name), object_count,
|
||||
bytes_used, Timestamp(put_timestamp).isoformat)
|
||||
output_list.append(item)
|
||||
output_list.append('</account>')
|
||||
account_list = '\n'.join(output_list)
|
||||
ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
|
||||
elif data:
|
||||
account_list = listing_formats.listing_to_text(data)
|
||||
ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
|
||||
else:
|
||||
if not account_list:
|
||||
resp = HTTPNoContent(request=req, headers=resp_headers)
|
||||
resp.content_type = response_content_type
|
||||
resp.charset = 'utf-8'
|
||||
return resp
|
||||
account_list = '\n'.join(r[0] for r in account_list) + '\n'
|
||||
ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
|
||||
ret = HTTPNoContent(request=req, headers=resp_headers)
|
||||
ret.content_type = response_content_type
|
||||
ret.charset = 'utf-8'
|
||||
return ret
|
||||
|
@ -105,11 +105,6 @@ reload_constraints()
|
||||
MAX_BUFFERED_SLO_SEGMENTS = 10000
|
||||
|
||||
|
||||
#: Query string format= values to their corresponding content-type values
|
||||
FORMAT2CONTENT_TYPE = {'plain': 'text/plain', 'json': 'application/json',
|
||||
'xml': 'application/xml'}
|
||||
|
||||
|
||||
# By default the maximum number of allowed headers depends on the number of max
|
||||
# allowed metadata settings plus a default value of 36 for swift internally
|
||||
# generated headers and regular http headers. If for some reason this is not
|
||||
|
@ -88,19 +88,20 @@ def _get_direct_account_container(path, stype, node, part,
|
||||
Do not use directly use the get_direct_account or
|
||||
get_direct_container instead.
|
||||
"""
|
||||
qs = 'format=json'
|
||||
params = ['format=json']
|
||||
if marker:
|
||||
qs += '&marker=%s' % quote(marker)
|
||||
params.append('marker=%s' % quote(marker))
|
||||
if limit:
|
||||
qs += '&limit=%d' % limit
|
||||
params.append('limit=%d' % limit)
|
||||
if prefix:
|
||||
qs += '&prefix=%s' % quote(prefix)
|
||||
params.append('prefix=%s' % quote(prefix))
|
||||
if delimiter:
|
||||
qs += '&delimiter=%s' % quote(delimiter)
|
||||
params.append('delimiter=%s' % quote(delimiter))
|
||||
if end_marker:
|
||||
qs += '&end_marker=%s' % quote(end_marker)
|
||||
params.append('end_marker=%s' % quote(end_marker))
|
||||
if reverse:
|
||||
qs += '&reverse=%s' % quote(reverse)
|
||||
params.append('reverse=%s' % quote(reverse))
|
||||
qs = '&'.join(params)
|
||||
with Timeout(conn_timeout):
|
||||
conn = http_connect(node['ip'], node['port'], node['device'], part,
|
||||
'GET', path, query_string=qs,
|
||||
|
@ -772,12 +772,14 @@ class SimpleClient(object):
|
||||
if name:
|
||||
url = '%s/%s' % (url.rstrip('/'), quote(name))
|
||||
else:
|
||||
url += '?format=json'
|
||||
params = ['format=json']
|
||||
if prefix:
|
||||
url += '&prefix=%s' % prefix
|
||||
params.append('prefix=%s' % prefix)
|
||||
|
||||
if marker:
|
||||
url += '&marker=%s' % quote(marker)
|
||||
params.append('marker=%s' % quote(marker))
|
||||
|
||||
url += '?' + '&'.join(params)
|
||||
|
||||
req = urllib2.Request(url, headers=headers, data=contents)
|
||||
if proxy:
|
||||
|
@ -15,7 +15,6 @@
|
||||
|
||||
import base64
|
||||
import json
|
||||
import xml.etree.cElementTree as ElementTree
|
||||
|
||||
from swift import gettext_ as _
|
||||
from swift.common.http import is_success
|
||||
@ -23,7 +22,7 @@ from swift.common.middleware.crypto.crypto_utils import CryptoWSGIContext, \
|
||||
load_crypto_meta, extract_crypto_meta, Crypto
|
||||
from swift.common.exceptions import EncryptionException
|
||||
from swift.common.request_helpers import get_object_transient_sysmeta, \
|
||||
get_listing_content_type, get_sys_meta_prefix, get_user_meta_prefix
|
||||
get_sys_meta_prefix, get_user_meta_prefix
|
||||
from swift.common.swob import Request, HTTPException, HTTPInternalServerError
|
||||
from swift.common.utils import get_logger, config_true_value, \
|
||||
parse_content_range, closing_if_possible, parse_content_type, \
|
||||
@ -352,15 +351,12 @@ class DecrypterContContext(BaseDecrypterContext):
|
||||
|
||||
if is_success(self._get_status_int()):
|
||||
# only decrypt body of 2xx responses
|
||||
out_content_type = get_listing_content_type(req)
|
||||
if out_content_type == 'application/json':
|
||||
handler = self.process_json_resp
|
||||
keys = self.get_decryption_keys(req)
|
||||
elif out_content_type.endswith('/xml'):
|
||||
handler = self.process_xml_resp
|
||||
keys = self.get_decryption_keys(req)
|
||||
else:
|
||||
handler = keys = None
|
||||
handler = keys = None
|
||||
for header, value in self._response_headers:
|
||||
if header.lower() == 'content-type' and \
|
||||
value.split(';', 1)[0] == 'application/json':
|
||||
handler = self.process_json_resp
|
||||
keys = self.get_decryption_keys(req)
|
||||
|
||||
if handler and keys:
|
||||
try:
|
||||
@ -398,24 +394,6 @@ class DecrypterContContext(BaseDecrypterContext):
|
||||
obj_dict['hash'] = self.decrypt_value_with_meta(ciphertext, key)
|
||||
return obj_dict
|
||||
|
||||
def process_xml_resp(self, key, resp_iter):
|
||||
"""
|
||||
Parses xml body listing and decrypt encrypted entries. Updates
|
||||
Content-Length header with new body length and return a body iter.
|
||||
"""
|
||||
with closing_if_possible(resp_iter):
|
||||
resp_body = ''.join(resp_iter)
|
||||
tree = ElementTree.fromstring(resp_body)
|
||||
for elem in tree.iter('hash'):
|
||||
ciphertext = elem.text.encode('utf8')
|
||||
plain = self.decrypt_value_with_meta(ciphertext, key)
|
||||
elem.text = plain.decode('utf8')
|
||||
new_body = ElementTree.tostring(tree, encoding='UTF-8').replace(
|
||||
"<?xml version='1.0' encoding='UTF-8'?>",
|
||||
'<?xml version="1.0" encoding="UTF-8"?>', 1)
|
||||
self.update_content_length(len(new_body))
|
||||
return [new_body]
|
||||
|
||||
|
||||
class Decrypter(object):
|
||||
"""Middleware for decrypting data and user metadata."""
|
||||
|
@ -151,7 +151,7 @@ class GetContext(WSGIContext):
|
||||
method='GET',
|
||||
headers={'x-auth-token': req.headers.get('x-auth-token')},
|
||||
agent=('%(orig)s ' + 'DLO MultipartGET'), swift_source='DLO')
|
||||
con_req.query_string = 'format=json&prefix=%s' % quote(prefix)
|
||||
con_req.query_string = 'prefix=%s' % quote(prefix)
|
||||
if marker:
|
||||
con_req.query_string += '&marker=%s' % quote(marker)
|
||||
|
||||
|
211
swift/common/middleware/listing_formats.py
Normal file
211
swift/common/middleware/listing_formats.py
Normal file
@ -0,0 +1,211 @@
|
||||
# Copyright (c) 2017 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import six
|
||||
from xml.etree.cElementTree import Element, SubElement, tostring
|
||||
|
||||
from swift.common.constraints import valid_api_version
|
||||
from swift.common.http import HTTP_NO_CONTENT
|
||||
from swift.common.request_helpers import get_param
|
||||
from swift.common.swob import HTTPException, HTTPNotAcceptable, Request, \
|
||||
RESPONSE_REASONS
|
||||
|
||||
|
||||
#: Mapping of query string ``format=`` values to their corresponding
|
||||
#: content-type values.
|
||||
FORMAT2CONTENT_TYPE = {'plain': 'text/plain', 'json': 'application/json',
|
||||
'xml': 'application/xml'}
|
||||
#: Maximum size of a valid JSON container listing body. If we receive
|
||||
#: a container listing response larger than this, assume it's a staticweb
|
||||
#: response and pass it on to the client.
|
||||
# Default max object length is 1024, default container listing limit is 1e4;
|
||||
# add a fudge factor for things like hash, last_modified, etc.
|
||||
MAX_CONTAINER_LISTING_CONTENT_LENGTH = 1024 * 10000 * 2
|
||||
|
||||
|
||||
def get_listing_content_type(req):
|
||||
"""
|
||||
Determine the content type to use for an account or container listing
|
||||
response.
|
||||
|
||||
:param req: request object
|
||||
:returns: content type as a string (e.g. text/plain, application/json)
|
||||
:raises HTTPNotAcceptable: if the requested content type is not acceptable
|
||||
:raises HTTPBadRequest: if the 'format' query param is provided and
|
||||
not valid UTF-8
|
||||
"""
|
||||
query_format = get_param(req, 'format')
|
||||
if query_format:
|
||||
req.accept = FORMAT2CONTENT_TYPE.get(
|
||||
query_format.lower(), FORMAT2CONTENT_TYPE['plain'])
|
||||
out_content_type = req.accept.best_match(
|
||||
['text/plain', 'application/json', 'application/xml', 'text/xml'])
|
||||
if not out_content_type:
|
||||
raise HTTPNotAcceptable(request=req)
|
||||
return out_content_type
|
||||
|
||||
|
||||
def account_to_xml(listing, account_name):
|
||||
doc = Element('account', name=account_name.decode('utf-8'))
|
||||
doc.text = '\n'
|
||||
for record in listing:
|
||||
if 'subdir' in record:
|
||||
name = record.pop('subdir')
|
||||
sub = SubElement(doc, 'subdir', name=name)
|
||||
else:
|
||||
sub = SubElement(doc, 'container')
|
||||
for field in ('name', 'count', 'bytes', 'last_modified'):
|
||||
SubElement(sub, field).text = six.text_type(
|
||||
record.pop(field))
|
||||
sub.tail = '\n'
|
||||
return tostring(doc, encoding='UTF-8').replace(
|
||||
"<?xml version='1.0' encoding='UTF-8'?>",
|
||||
'<?xml version="1.0" encoding="UTF-8"?>', 1)
|
||||
|
||||
|
||||
def container_to_xml(listing, base_name):
|
||||
doc = Element('container', name=base_name.decode('utf-8'))
|
||||
for record in listing:
|
||||
if 'subdir' in record:
|
||||
name = record.pop('subdir')
|
||||
sub = SubElement(doc, 'subdir', name=name)
|
||||
SubElement(sub, 'name').text = name
|
||||
else:
|
||||
sub = SubElement(doc, 'object')
|
||||
for field in ('name', 'hash', 'bytes', 'content_type',
|
||||
'last_modified'):
|
||||
SubElement(sub, field).text = six.text_type(
|
||||
record.pop(field))
|
||||
return tostring(doc, encoding='UTF-8').replace(
|
||||
"<?xml version='1.0' encoding='UTF-8'?>",
|
||||
'<?xml version="1.0" encoding="UTF-8"?>', 1)
|
||||
|
||||
|
||||
def listing_to_text(listing):
|
||||
def get_lines():
|
||||
for item in listing:
|
||||
if 'name' in item:
|
||||
yield item['name'].encode('utf-8') + b'\n'
|
||||
else:
|
||||
yield item['subdir'].encode('utf-8') + b'\n'
|
||||
return b''.join(get_lines())
|
||||
|
||||
|
||||
class ListingFilter(object):
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
req = Request(env)
|
||||
try:
|
||||
# account and container only
|
||||
version, acct, cont = req.split_path(2, 3)
|
||||
except ValueError:
|
||||
return self.app(env, start_response)
|
||||
|
||||
if not valid_api_version(version) or req.method not in ('GET', 'HEAD'):
|
||||
return self.app(env, start_response)
|
||||
|
||||
# OK, definitely have an account/container request.
|
||||
# Get the desired content-type, then force it to a JSON request.
|
||||
try:
|
||||
out_content_type = get_listing_content_type(req)
|
||||
except HTTPException as err:
|
||||
return err(env, start_response)
|
||||
|
||||
params = req.params
|
||||
params['format'] = 'json'
|
||||
req.params = params
|
||||
|
||||
status, headers, resp_iter = req.call_application(self.app)
|
||||
|
||||
header_to_index = {}
|
||||
resp_content_type = resp_length = None
|
||||
for i, (header, value) in enumerate(headers):
|
||||
header = header.lower()
|
||||
if header == 'content-type':
|
||||
header_to_index[header] = i
|
||||
resp_content_type = value.partition(';')[0]
|
||||
elif header == 'content-length':
|
||||
header_to_index[header] = i
|
||||
resp_length = int(value)
|
||||
|
||||
if not status.startswith('200 '):
|
||||
start_response(status, headers)
|
||||
return resp_iter
|
||||
|
||||
if resp_content_type != 'application/json':
|
||||
start_response(status, headers)
|
||||
return resp_iter
|
||||
|
||||
if resp_length is None or \
|
||||
resp_length > MAX_CONTAINER_LISTING_CONTENT_LENGTH:
|
||||
start_response(status, headers)
|
||||
return resp_iter
|
||||
|
||||
def set_header(header, value):
|
||||
if value is None:
|
||||
del headers[header_to_index[header]]
|
||||
else:
|
||||
headers[header_to_index[header]] = (
|
||||
headers[header_to_index[header]][0], str(value))
|
||||
|
||||
if req.method == 'HEAD':
|
||||
set_header('content-type', out_content_type + '; charset=utf-8')
|
||||
set_header('content-length', None) # don't know, can't determine
|
||||
start_response(status, headers)
|
||||
return resp_iter
|
||||
|
||||
body = b''.join(resp_iter)
|
||||
try:
|
||||
listing = json.loads(body)
|
||||
# Do a couple sanity checks
|
||||
if not isinstance(listing, list):
|
||||
raise ValueError
|
||||
if not all(isinstance(item, dict) for item in listing):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
# Static web listing that's returning invalid JSON?
|
||||
# Just pass it straight through; that's about all we *can* do.
|
||||
start_response(status, headers)
|
||||
return [body]
|
||||
|
||||
try:
|
||||
if out_content_type.endswith('/xml'):
|
||||
if cont:
|
||||
body = container_to_xml(listing, cont)
|
||||
else:
|
||||
body = account_to_xml(listing, acct)
|
||||
elif out_content_type == 'text/plain':
|
||||
body = listing_to_text(listing)
|
||||
# else, json -- we continue down here to be sure we set charset
|
||||
except KeyError:
|
||||
# listing was in a bad format -- funky static web listing??
|
||||
start_response(status, headers)
|
||||
return [body]
|
||||
|
||||
if not body:
|
||||
status = '%s %s' % (HTTP_NO_CONTENT,
|
||||
RESPONSE_REASONS[HTTP_NO_CONTENT][0])
|
||||
|
||||
set_header('content-type', out_content_type + '; charset=utf-8')
|
||||
set_header('content-length', len(body))
|
||||
start_response(status, headers)
|
||||
return [body]
|
||||
|
||||
|
||||
def filter_factory(global_conf, **local_conf):
|
||||
return ListingFilter
|
@ -260,7 +260,7 @@ class _StaticWebContext(WSGIContext):
|
||||
env, 'GET', '/%s/%s/%s' % (
|
||||
self.version, self.account, self.container),
|
||||
self.agent, swift_source='SW')
|
||||
tmp_env['QUERY_STRING'] = 'delimiter=/&format=json'
|
||||
tmp_env['QUERY_STRING'] = 'delimiter=/'
|
||||
if prefix:
|
||||
tmp_env['QUERY_STRING'] += '&prefix=%s' % quote(prefix)
|
||||
else:
|
||||
@ -465,8 +465,8 @@ class _StaticWebContext(WSGIContext):
|
||||
env, 'GET', '/%s/%s/%s' % (
|
||||
self.version, self.account, self.container),
|
||||
self.agent, swift_source='SW')
|
||||
tmp_env['QUERY_STRING'] = 'limit=1&format=json&delimiter' \
|
||||
'=/&limit=1&prefix=%s' % quote(self.obj + '/')
|
||||
tmp_env['QUERY_STRING'] = 'limit=1&delimiter=/&prefix=%s' % (
|
||||
quote(self.obj + '/'), )
|
||||
resp = self._app_call(tmp_env)
|
||||
body = ''.join(resp)
|
||||
if not is_success(self._get_status_int()) or not body or \
|
||||
|
@ -329,8 +329,7 @@ class VersionedWritesContext(WSGIContext):
|
||||
env, method='GET', swift_source='VW',
|
||||
path='/v1/%s/%s' % (account_name, lcontainer))
|
||||
lreq.environ['QUERY_STRING'] = \
|
||||
'format=json&prefix=%s&marker=%s' % (
|
||||
quote(lprefix), quote(marker))
|
||||
'prefix=%s&marker=%s' % (quote(lprefix), quote(marker))
|
||||
if end_marker:
|
||||
lreq.environ['QUERY_STRING'] += '&end_marker=%s' % (
|
||||
quote(end_marker))
|
||||
|
@ -31,10 +31,9 @@ from swift.common.header_key_dict import HeaderKeyDict
|
||||
|
||||
from swift import gettext_ as _
|
||||
from swift.common.storage_policy import POLICIES
|
||||
from swift.common.constraints import FORMAT2CONTENT_TYPE
|
||||
from swift.common.exceptions import ListingIterError, SegmentError
|
||||
from swift.common.http import is_success
|
||||
from swift.common.swob import HTTPBadRequest, HTTPNotAcceptable, \
|
||||
from swift.common.swob import HTTPBadRequest, \
|
||||
HTTPServiceUnavailable, Range, is_chunked, multi_range_iterator
|
||||
from swift.common.utils import split_path, validate_device_partition, \
|
||||
close_if_possible, maybe_multipart_byteranges_to_document_iters, \
|
||||
@ -70,28 +69,6 @@ def get_param(req, name, default=None):
|
||||
return value
|
||||
|
||||
|
||||
def get_listing_content_type(req):
|
||||
"""
|
||||
Determine the content type to use for an account or container listing
|
||||
response.
|
||||
|
||||
:param req: request object
|
||||
:returns: content type as a string (e.g. text/plain, application/json)
|
||||
:raises HTTPNotAcceptable: if the requested content type is not acceptable
|
||||
:raises HTTPBadRequest: if the 'format' query param is provided and
|
||||
not valid UTF-8
|
||||
"""
|
||||
query_format = get_param(req, 'format')
|
||||
if query_format:
|
||||
req.accept = FORMAT2CONTENT_TYPE.get(
|
||||
query_format.lower(), FORMAT2CONTENT_TYPE['plain'])
|
||||
out_content_type = req.accept.best_match(
|
||||
['text/plain', 'application/json', 'application/xml', 'text/xml'])
|
||||
if not out_content_type:
|
||||
raise HTTPNotAcceptable(request=req)
|
||||
return out_content_type
|
||||
|
||||
|
||||
def get_name_and_placement(request, minsegs=1, maxsegs=None,
|
||||
rest_with_last=False):
|
||||
"""
|
||||
|
@ -19,7 +19,6 @@ import time
|
||||
import traceback
|
||||
import math
|
||||
from swift import gettext_ as _
|
||||
from xml.etree.cElementTree import Element, SubElement, tostring
|
||||
|
||||
from eventlet import Timeout
|
||||
|
||||
@ -29,7 +28,7 @@ from swift.container.backend import ContainerBroker, DATADIR
|
||||
from swift.container.replicator import ContainerReplicatorRpc
|
||||
from swift.common.db import DatabaseAlreadyExists
|
||||
from swift.common.container_sync_realms import ContainerSyncRealms
|
||||
from swift.common.request_helpers import get_param, get_listing_content_type, \
|
||||
from swift.common.request_helpers import get_param, \
|
||||
split_and_validate_path, is_sys_or_user_meta
|
||||
from swift.common.utils import get_logger, hash_path, public, \
|
||||
Timestamp, storage_directory, validate_sync_to, \
|
||||
@ -40,6 +39,7 @@ from swift.common import constraints
|
||||
from swift.common.bufferedhttp import http_connect
|
||||
from swift.common.exceptions import ConnectionTimeout
|
||||
from swift.common.http import HTTP_NOT_FOUND, is_success
|
||||
from swift.common.middleware import listing_formats
|
||||
from swift.common.storage_policy import POLICIES
|
||||
from swift.common.base_storage_server import BaseStorageServer
|
||||
from swift.common.header_key_dict import HeaderKeyDict
|
||||
@ -418,7 +418,7 @@ class ContainerController(BaseStorageServer):
|
||||
"""Handle HTTP HEAD request."""
|
||||
drive, part, account, container, obj = split_and_validate_path(
|
||||
req, 4, 5, True)
|
||||
out_content_type = get_listing_content_type(req)
|
||||
out_content_type = listing_formats.get_listing_content_type(req)
|
||||
if not check_drive(self.root, drive, self.mount_check):
|
||||
return HTTPInsufficientStorage(drive=drive, request=req)
|
||||
broker = self._get_container_broker(drive, part, account, container,
|
||||
@ -451,8 +451,8 @@ class ContainerController(BaseStorageServer):
|
||||
"""
|
||||
(name, created, size, content_type, etag) = record[:5]
|
||||
if content_type is None:
|
||||
return {'subdir': name}
|
||||
response = {'bytes': size, 'hash': etag, 'name': name,
|
||||
return {'subdir': name.decode('utf8')}
|
||||
response = {'bytes': size, 'hash': etag, 'name': name.decode('utf8'),
|
||||
'content_type': content_type}
|
||||
response['last_modified'] = Timestamp(created).isoformat
|
||||
override_bytes_from_content_type(response, logger=self.logger)
|
||||
@ -482,7 +482,7 @@ class ContainerController(BaseStorageServer):
|
||||
request=req,
|
||||
body='Maximum limit is %d'
|
||||
% constraints.CONTAINER_LISTING_LIMIT)
|
||||
out_content_type = get_listing_content_type(req)
|
||||
out_content_type = listing_formats.get_listing_content_type(req)
|
||||
if not check_drive(self.root, drive, self.mount_check):
|
||||
return HTTPInsufficientStorage(drive=drive, request=req)
|
||||
broker = self._get_container_broker(drive, part, account, container,
|
||||
@ -504,36 +504,20 @@ class ContainerController(BaseStorageServer):
|
||||
if value and (key.lower() in self.save_headers or
|
||||
is_sys_or_user_meta('container', key)):
|
||||
resp_headers[key] = value
|
||||
ret = Response(request=req, headers=resp_headers,
|
||||
content_type=out_content_type, charset='utf-8')
|
||||
if out_content_type == 'application/json':
|
||||
ret.body = json.dumps([self.update_data_record(record)
|
||||
for record in container_list])
|
||||
elif out_content_type.endswith('/xml'):
|
||||
doc = Element('container', name=container.decode('utf-8'))
|
||||
for obj in container_list:
|
||||
record = self.update_data_record(obj)
|
||||
if 'subdir' in record:
|
||||
name = record['subdir'].decode('utf-8')
|
||||
sub = SubElement(doc, 'subdir', name=name)
|
||||
SubElement(sub, 'name').text = name
|
||||
else:
|
||||
obj_element = SubElement(doc, 'object')
|
||||
for field in ["name", "hash", "bytes", "content_type",
|
||||
"last_modified"]:
|
||||
SubElement(obj_element, field).text = str(
|
||||
record.pop(field)).decode('utf-8')
|
||||
for field in sorted(record):
|
||||
SubElement(obj_element, field).text = str(
|
||||
record[field]).decode('utf-8')
|
||||
ret.body = tostring(doc, encoding='UTF-8').replace(
|
||||
"<?xml version='1.0' encoding='UTF-8'?>",
|
||||
'<?xml version="1.0" encoding="UTF-8"?>', 1)
|
||||
listing = [self.update_data_record(record)
|
||||
for record in container_list]
|
||||
if out_content_type.endswith('/xml'):
|
||||
body = listing_formats.container_to_xml(listing, container)
|
||||
elif out_content_type.endswith('/json'):
|
||||
body = json.dumps(listing)
|
||||
else:
|
||||
if not container_list:
|
||||
return HTTPNoContent(request=req, headers=resp_headers)
|
||||
ret.body = '\n'.join(rec[0] for rec in container_list) + '\n'
|
||||
body = listing_formats.listing_to_text(listing)
|
||||
|
||||
ret = Response(request=req, headers=resp_headers, body=body,
|
||||
content_type=out_content_type, charset='utf-8')
|
||||
ret.last_modified = math.ceil(float(resp_headers['X-PUT-Timestamp']))
|
||||
if not ret.body:
|
||||
ret.status_int = 204
|
||||
return ret
|
||||
|
||||
@public
|
||||
|
@ -18,7 +18,6 @@ from six.moves.urllib.parse import unquote
|
||||
from swift import gettext_ as _
|
||||
|
||||
from swift.account.utils import account_listing_response
|
||||
from swift.common.request_helpers import get_listing_content_type
|
||||
from swift.common.middleware.acl import parse_acl, format_acl
|
||||
from swift.common.utils import public
|
||||
from swift.common.constraints import check_metadata
|
||||
@ -26,6 +25,7 @@ from swift.common import constraints
|
||||
from swift.common.http import HTTP_NOT_FOUND, HTTP_GONE
|
||||
from swift.proxy.controllers.base import Controller, clear_info_cache, \
|
||||
set_info_cache
|
||||
from swift.common.middleware import listing_formats
|
||||
from swift.common.swob import HTTPBadRequest, HTTPMethodNotAllowed
|
||||
from swift.common.request_helpers import get_sys_meta_prefix
|
||||
|
||||
@ -67,6 +67,9 @@ class AccountController(Controller):
|
||||
concurrency = self.app.account_ring.replica_count \
|
||||
if self.app.concurrent_gets else 1
|
||||
node_iter = self.app.iter_nodes(self.app.account_ring, partition)
|
||||
params = req.params
|
||||
params['format'] = 'json'
|
||||
req.params = params
|
||||
resp = self.GETorHEAD_base(
|
||||
req, _('Account'), node_iter, partition,
|
||||
req.swift_entity_path.rstrip('/'), concurrency)
|
||||
@ -86,8 +89,10 @@ class AccountController(Controller):
|
||||
# creates the account if necessary. If we feed it a perfect
|
||||
# lie, it'll just try to create the container without
|
||||
# creating the account, and that'll fail.
|
||||
resp = account_listing_response(self.account_name, req,
|
||||
get_listing_content_type(req))
|
||||
req.params = {} # clear our format override
|
||||
resp = account_listing_response(
|
||||
self.account_name, req,
|
||||
listing_formats.get_listing_content_type(req))
|
||||
resp.headers['X-Backend-Fake-Account-Listing'] = 'yes'
|
||||
|
||||
# Cache this. We just made a request to a storage node and got
|
||||
|
@ -100,6 +100,9 @@ class ContainerController(Controller):
|
||||
concurrency = self.app.container_ring.replica_count \
|
||||
if self.app.concurrent_gets else 1
|
||||
node_iter = self.app.iter_nodes(self.app.container_ring, part)
|
||||
params = req.params
|
||||
params['format'] = 'json'
|
||||
req.params = params
|
||||
resp = self.GETorHEAD_base(
|
||||
req, _('Container'), node_iter, part,
|
||||
req.swift_entity_path, concurrency)
|
||||
|
@ -66,16 +66,19 @@ required_filters = [
|
||||
'after_fn': lambda pipe: (['catch_errors']
|
||||
if pipe.startswith('catch_errors')
|
||||
else [])},
|
||||
{'name': 'listing_formats', 'after_fn': lambda _junk: [
|
||||
'catch_errors', 'gatekeeper', 'proxy_logging', 'memcache']},
|
||||
# Put copy before dlo, slo and versioned_writes
|
||||
{'name': 'copy', 'after_fn': lambda _junk: [
|
||||
'staticweb', 'tempauth', 'keystoneauth',
|
||||
'catch_errors', 'gatekeeper', 'proxy_logging']},
|
||||
{'name': 'dlo', 'after_fn': lambda _junk: [
|
||||
'copy', 'staticweb', 'tempauth', 'keystoneauth',
|
||||
'catch_errors', 'gatekeeper', 'proxy_logging']},
|
||||
{'name': 'versioned_writes', 'after_fn': lambda _junk: [
|
||||
'slo', 'dlo', 'copy', 'staticweb', 'tempauth',
|
||||
'keystoneauth', 'catch_errors', 'gatekeeper', 'proxy_logging']},
|
||||
# Put copy before dlo, slo and versioned_writes
|
||||
{'name': 'copy', 'after_fn': lambda _junk: [
|
||||
'staticweb', 'tempauth', 'keystoneauth',
|
||||
'catch_errors', 'gatekeeper', 'proxy_logging']}]
|
||||
]
|
||||
|
||||
|
||||
def _label_for_policy(policy):
|
||||
|
@ -16,7 +16,6 @@ import base64
|
||||
import json
|
||||
import os
|
||||
import unittest
|
||||
from xml.dom import minidom
|
||||
|
||||
import mock
|
||||
|
||||
@ -961,138 +960,6 @@ class TestDecrypterContainerRequests(unittest.TestCase):
|
||||
self.assertIn("Cipher must be AES_CTR_256",
|
||||
self.decrypter.logger.get_lines_for_level('error')[0])
|
||||
|
||||
def _assert_element(self, name, expected, element):
|
||||
self.assertEqual(element.tagName, name)
|
||||
self._assert_element_contains_dict(expected, element)
|
||||
|
||||
def _assert_element_contains_dict(self, expected, element):
|
||||
for k, v in expected.items():
|
||||
entry = element.getElementsByTagName(k)
|
||||
self.assertIsNotNone(entry, 'Key %s not found' % k)
|
||||
actual = entry[0].childNodes[0].nodeValue
|
||||
self.assertEqual(v, actual,
|
||||
"Expected %s but got %s for key %s"
|
||||
% (v, actual, k))
|
||||
|
||||
def test_GET_container_xml(self):
|
||||
content_type_1 = u'\uF10F\uD20D\uB30B\u9409'
|
||||
content_type_2 = 'text/plain; param=foo'
|
||||
pt_etag1 = 'c6e8196d7f0fff6444b90861fe8d609d'
|
||||
pt_etag2 = 'ac0374ed4d43635f803c82469d0b5a10'
|
||||
key = fetch_crypto_keys()['container']
|
||||
|
||||
fake_body = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<container name="testc">\
|
||||
<subdir name="test-subdir"><name>test-subdir</name></subdir>\
|
||||
<object><hash>\
|
||||
''' + encrypt_and_append_meta(pt_etag1.encode('utf8'), key) + '''\
|
||||
</hash><content_type>\
|
||||
''' + content_type_1 + '''\
|
||||
</content_type><name>testfile</name><bytes>16</bytes>\
|
||||
<last_modified>2015-04-19T02:37:39.601660</last_modified></object>\
|
||||
<object><hash>\
|
||||
''' + encrypt_and_append_meta(pt_etag2.encode('utf8'), key) + '''\
|
||||
</hash><content_type>\
|
||||
''' + content_type_2 + '''\
|
||||
</content_type><name>testfile2</name><bytes>24</bytes>\
|
||||
<last_modified>2015-04-19T02:37:39.684740</last_modified></object>\
|
||||
</container>'''
|
||||
|
||||
resp = self._make_cont_get_req(fake_body, 'xml')
|
||||
self.assertEqual('200 OK', resp.status)
|
||||
body = resp.body
|
||||
self.assertEqual(len(body), int(resp.headers['Content-Length']))
|
||||
|
||||
tree = minidom.parseString(body)
|
||||
containers = tree.getElementsByTagName('container')
|
||||
self.assertEqual(1, len(containers))
|
||||
self.assertEqual('testc',
|
||||
containers[0].attributes.getNamedItem("name").value)
|
||||
|
||||
results = containers[0].childNodes
|
||||
self.assertEqual(3, len(results))
|
||||
|
||||
self._assert_element('subdir', {"name": "test-subdir"}, results[0])
|
||||
|
||||
obj_dict_1 = {"bytes": "16",
|
||||
"last_modified": "2015-04-19T02:37:39.601660",
|
||||
"hash": pt_etag1,
|
||||
"name": "testfile",
|
||||
"content_type": content_type_1}
|
||||
self._assert_element('object', obj_dict_1, results[1])
|
||||
obj_dict_2 = {"bytes": "24",
|
||||
"last_modified": "2015-04-19T02:37:39.684740",
|
||||
"hash": pt_etag2,
|
||||
"name": "testfile2",
|
||||
"content_type": content_type_2}
|
||||
self._assert_element('object', obj_dict_2, results[2])
|
||||
|
||||
def test_GET_container_xml_with_crypto_override(self):
|
||||
content_type_1 = 'image/jpeg'
|
||||
content_type_2 = 'text/plain; param=foo'
|
||||
|
||||
fake_body = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<container name="testc">\
|
||||
<object><hash>c6e8196d7f0fff6444b90861fe8d609d</hash>\
|
||||
<content_type>''' + content_type_1 + '''\
|
||||
</content_type><name>testfile</name><bytes>16</bytes>\
|
||||
<last_modified>2015-04-19T02:37:39.601660</last_modified></object>\
|
||||
<object><hash>ac0374ed4d43635f803c82469d0b5a10</hash>\
|
||||
<content_type>''' + content_type_2 + '''\
|
||||
</content_type><name>testfile2</name><bytes>24</bytes>\
|
||||
<last_modified>2015-04-19T02:37:39.684740</last_modified></object>\
|
||||
</container>'''
|
||||
|
||||
resp = self._make_cont_get_req(fake_body, 'xml', override=True)
|
||||
|
||||
self.assertEqual('200 OK', resp.status)
|
||||
body = resp.body
|
||||
self.assertEqual(len(body), int(resp.headers['Content-Length']))
|
||||
|
||||
tree = minidom.parseString(body)
|
||||
containers = tree.getElementsByTagName('container')
|
||||
self.assertEqual(1, len(containers))
|
||||
self.assertEqual('testc',
|
||||
containers[0].attributes.getNamedItem("name").value)
|
||||
|
||||
objs = tree.getElementsByTagName('object')
|
||||
self.assertEqual(2, len(objs))
|
||||
|
||||
obj_dict_1 = {"bytes": "16",
|
||||
"last_modified": "2015-04-19T02:37:39.601660",
|
||||
"hash": "c6e8196d7f0fff6444b90861fe8d609d",
|
||||
"name": "testfile",
|
||||
"content_type": content_type_1}
|
||||
self._assert_element_contains_dict(obj_dict_1, objs[0])
|
||||
obj_dict_2 = {"bytes": "24",
|
||||
"last_modified": "2015-04-19T02:37:39.684740",
|
||||
"hash": "ac0374ed4d43635f803c82469d0b5a10",
|
||||
"name": "testfile2",
|
||||
"content_type": content_type_2}
|
||||
self._assert_element_contains_dict(obj_dict_2, objs[1])
|
||||
|
||||
def test_cont_get_xml_req_with_cipher_mismatch(self):
|
||||
bad_crypto_meta = fake_get_crypto_meta()
|
||||
bad_crypto_meta['cipher'] = 'unknown_cipher'
|
||||
|
||||
fake_body = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<container name="testc"><object>\
|
||||
<hash>''' + encrypt_and_append_meta('c6e8196d7f0fff6444b90861fe8d609d',
|
||||
fetch_crypto_keys()['container'],
|
||||
crypto_meta=bad_crypto_meta) + '''\
|
||||
</hash>\
|
||||
<content_type>image/jpeg</content_type>\
|
||||
<name>testfile</name><bytes>16</bytes>\
|
||||
<last_modified>2015-04-19T02:37:39.601660</last_modified></object>\
|
||||
</container>'''
|
||||
|
||||
resp = self._make_cont_get_req(fake_body, 'xml')
|
||||
|
||||
self.assertEqual('500 Internal Error', resp.status)
|
||||
self.assertEqual('Error decrypting container listing', resp.body)
|
||||
self.assertIn("Cipher must be AES_CTR_256",
|
||||
self.decrypter.logger.get_lines_for_level('error')[0])
|
||||
|
||||
|
||||
class TestModuleMethods(unittest.TestCase):
|
||||
def test_purge_crypto_sysmeta_headers(self):
|
||||
|
@ -129,11 +129,11 @@ class DloTestCase(unittest.TestCase):
|
||||
"last_modified": lm,
|
||||
"content_type": "application/png"}]
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json',
|
||||
'GET', '/v1/AUTH_test/c',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'},
|
||||
json.dumps(full_container_listing))
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=seg',
|
||||
'GET', '/v1/AUTH_test/c?prefix=seg',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'},
|
||||
json.dumps(segs))
|
||||
|
||||
@ -148,11 +148,11 @@ class DloTestCase(unittest.TestCase):
|
||||
'X-Object-Manifest': 'c/seg_'},
|
||||
'manyseg')
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=seg_',
|
||||
'GET', '/v1/AUTH_test/c?prefix=seg_',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'},
|
||||
json.dumps(segs[:3]))
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=seg_&marker=seg_03',
|
||||
'GET', '/v1/AUTH_test/c?prefix=seg_&marker=seg_03',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'},
|
||||
json.dumps(segs[3:]))
|
||||
|
||||
@ -163,7 +163,7 @@ class DloTestCase(unittest.TestCase):
|
||||
'X-Object-Manifest': 'c/noseg_'},
|
||||
'noseg')
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=noseg_',
|
||||
'GET', '/v1/AUTH_test/c?prefix=noseg_',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'},
|
||||
json.dumps([]))
|
||||
|
||||
@ -278,7 +278,7 @@ class TestDloHeadManifest(DloTestCase):
|
||||
self.assertEqual(
|
||||
self.app.calls,
|
||||
[('HEAD', '/v1/AUTH_test/mancon/manifest-no-segments'),
|
||||
('GET', '/v1/AUTH_test/c?format=json&prefix=noseg_')])
|
||||
('GET', '/v1/AUTH_test/c?prefix=noseg_')])
|
||||
|
||||
|
||||
class TestDloGetManifest(DloTestCase):
|
||||
@ -444,7 +444,7 @@ class TestDloGetManifest(DloTestCase):
|
||||
self.assertEqual(
|
||||
self.app.calls,
|
||||
[('GET', '/v1/AUTH_test/mancon/manifest-many-segments'),
|
||||
('GET', '/v1/AUTH_test/c?format=json&prefix=seg_'),
|
||||
('GET', '/v1/AUTH_test/c?prefix=seg_'),
|
||||
('GET', '/v1/AUTH_test/c/seg_01?multipart-manifest=get'),
|
||||
('GET', '/v1/AUTH_test/c/seg_02?multipart-manifest=get'),
|
||||
('GET', '/v1/AUTH_test/c/seg_03?multipart-manifest=get')])
|
||||
@ -601,7 +601,7 @@ class TestDloGetManifest(DloTestCase):
|
||||
|
||||
def test_error_listing_container_first_listing_request(self):
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=seg_',
|
||||
'GET', '/v1/AUTH_test/c?prefix=seg_',
|
||||
swob.HTTPNotFound, {}, None)
|
||||
|
||||
req = swob.Request.blank('/v1/AUTH_test/mancon/manifest-many-segments',
|
||||
@ -613,7 +613,7 @@ class TestDloGetManifest(DloTestCase):
|
||||
|
||||
def test_error_listing_container_second_listing_request(self):
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=seg_&marker=seg_03',
|
||||
'GET', '/v1/AUTH_test/c?prefix=seg_&marker=seg_03',
|
||||
swob.HTTPNotFound, {}, None)
|
||||
|
||||
req = swob.Request.blank('/v1/AUTH_test/mancon/manifest-many-segments',
|
||||
@ -648,7 +648,7 @@ class TestDloGetManifest(DloTestCase):
|
||||
swob.HTTPOk, {'Content-Length': '0', 'Etag': 'blah',
|
||||
'X-Object-Manifest': 'c/quotetags'}, None)
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=quotetags',
|
||||
'GET', '/v1/AUTH_test/c?prefix=quotetags',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json; charset=utf-8'},
|
||||
json.dumps([{"hash": "\"abc\"", "bytes": 5, "name": "quotetags1",
|
||||
"last_modified": "2013-11-22T02:42:14.261620",
|
||||
@ -673,7 +673,7 @@ class TestDloGetManifest(DloTestCase):
|
||||
segs = [{"hash": md5hex("AAAAA"), "bytes": 5, "name": u"é1"},
|
||||
{"hash": md5hex("AAAAA"), "bytes": 5, "name": u"é2"}]
|
||||
self.app.register(
|
||||
'GET', '/v1/AUTH_test/c?format=json&prefix=%C3%A9',
|
||||
'GET', '/v1/AUTH_test/c?prefix=%C3%A9',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json'},
|
||||
json.dumps(segs))
|
||||
|
||||
@ -745,7 +745,7 @@ class TestDloGetManifest(DloTestCase):
|
||||
self.assertEqual(
|
||||
self.app.calls,
|
||||
[('GET', '/v1/AUTH_test/mancon/manifest'),
|
||||
('GET', '/v1/AUTH_test/c?format=json&prefix=seg'),
|
||||
('GET', '/v1/AUTH_test/c?prefix=seg'),
|
||||
('GET', '/v1/AUTH_test/c/seg_01?multipart-manifest=get'),
|
||||
('GET', '/v1/AUTH_test/c/seg_02?multipart-manifest=get'),
|
||||
('GET', '/v1/AUTH_test/c/seg_03?multipart-manifest=get')])
|
||||
|
345
test/unit/common/middleware/test_listing_formats.py
Normal file
345
test/unit/common/middleware/test_listing_formats.py
Normal file
@ -0,0 +1,345 @@
|
||||
# Copyright (c) 2017 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from swift.common.swob import Request, HTTPOk
|
||||
from swift.common.middleware import listing_formats
|
||||
from test.unit.common.middleware.helpers import FakeSwift
|
||||
|
||||
|
||||
class TestListingFormats(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.fake_swift = FakeSwift()
|
||||
self.app = listing_formats.ListingFilter(self.fake_swift)
|
||||
self.fake_account_listing = json.dumps([
|
||||
{'name': 'bar', 'bytes': 0, 'count': 0,
|
||||
'last_modified': '1970-01-01T00:00:00.000000'},
|
||||
{'subdir': 'foo_'},
|
||||
])
|
||||
self.fake_container_listing = json.dumps([
|
||||
{'name': 'bar', 'hash': 'etag', 'bytes': 0,
|
||||
'content_type': 'text/plain',
|
||||
'last_modified': '1970-01-01T00:00:00.000000'},
|
||||
{'subdir': 'foo/'},
|
||||
])
|
||||
|
||||
def test_valid_account(self):
|
||||
self.fake_swift.register('GET', '/v1/a', HTTPOk, {
|
||||
'Content-Length': str(len(self.fake_account_listing)),
|
||||
'Content-Type': 'application/json'}, self.fake_account_listing)
|
||||
|
||||
req = Request.blank('/v1/a')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, 'bar\nfoo_\n')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a?format=txt')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, 'bar\nfoo_\n')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a?format=json')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, self.fake_account_listing)
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a?format=xml')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body.split('\n'), [
|
||||
'<?xml version="1.0" encoding="UTF-8"?>',
|
||||
'<account name="a">',
|
||||
'<container><name>bar</name><count>0</count><bytes>0</bytes>'
|
||||
'<last_modified>1970-01-01T00:00:00.000000</last_modified>'
|
||||
'</container>',
|
||||
'<subdir name="foo_" />',
|
||||
'</account>',
|
||||
])
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/xml; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
def test_valid_container(self):
|
||||
self.fake_swift.register('GET', '/v1/a/c', HTTPOk, {
|
||||
'Content-Length': str(len(self.fake_container_listing)),
|
||||
'Content-Type': 'application/json'}, self.fake_container_listing)
|
||||
|
||||
req = Request.blank('/v1/a/c')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, 'bar\nfoo/\n')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a/c?format=txt')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, 'bar\nfoo/\n')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a/c?format=json')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, self.fake_container_listing)
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a/c?format=xml')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(
|
||||
resp.body,
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
'<container name="c">'
|
||||
'<object><name>bar</name><hash>etag</hash><bytes>0</bytes>'
|
||||
'<content_type>text/plain</content_type>'
|
||||
'<last_modified>1970-01-01T00:00:00.000000</last_modified>'
|
||||
'</object>'
|
||||
'<subdir name="foo/"><name>foo/</name></subdir>'
|
||||
'</container>'
|
||||
)
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/xml; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
def test_blank_account(self):
|
||||
self.fake_swift.register('GET', '/v1/a', HTTPOk, {
|
||||
'Content-Length': '2', 'Content-Type': 'application/json'}, '[]')
|
||||
|
||||
req = Request.blank('/v1/a')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '204 No Content')
|
||||
self.assertEqual(resp.body, '')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a?format=txt')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '204 No Content')
|
||||
self.assertEqual(resp.body, '')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a?format=json')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '200 OK')
|
||||
self.assertEqual(resp.body, '[]')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a?format=xml')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '200 OK')
|
||||
self.assertEqual(resp.body.split('\n'), [
|
||||
'<?xml version="1.0" encoding="UTF-8"?>',
|
||||
'<account name="a">',
|
||||
'</account>',
|
||||
])
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/xml; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a?format=json'))
|
||||
|
||||
def test_blank_container(self):
|
||||
self.fake_swift.register('GET', '/v1/a/c', HTTPOk, {
|
||||
'Content-Length': '2', 'Content-Type': 'application/json'}, '[]')
|
||||
|
||||
req = Request.blank('/v1/a/c')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '204 No Content')
|
||||
self.assertEqual(resp.body, '')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a/c?format=txt')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '204 No Content')
|
||||
self.assertEqual(resp.body, '')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'text/plain; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a/c?format=json')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '200 OK')
|
||||
self.assertEqual(resp.body, '[]')
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
req = Request.blank('/v1/a/c?format=xml')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.status, '200 OK')
|
||||
self.assertEqual(resp.body.split('\n'), [
|
||||
'<?xml version="1.0" encoding="UTF-8"?>',
|
||||
'<container name="c" />',
|
||||
])
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/xml; charset=utf-8')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/a/c?format=json'))
|
||||
|
||||
def test_pass_through(self):
|
||||
def do_test(path):
|
||||
self.fake_swift.register(
|
||||
'GET', path, HTTPOk, {
|
||||
'Content-Length': str(len(self.fake_container_listing)),
|
||||
'Content-Type': 'application/json'},
|
||||
self.fake_container_listing)
|
||||
req = Request.blank(path + '?format=xml')
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(resp.body, self.fake_container_listing)
|
||||
self.assertEqual(resp.headers['Content-Type'], 'application/json')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', path + '?format=xml')) # query param is unchanged
|
||||
|
||||
do_test('/')
|
||||
do_test('/v1')
|
||||
do_test('/auth/v1.0')
|
||||
do_test('/v1/a/c/o')
|
||||
|
||||
def test_static_web_not_json(self):
|
||||
body = 'doesnt matter'
|
||||
self.fake_swift.register(
|
||||
'GET', '/v1/staticweb/not-json', HTTPOk,
|
||||
{'Content-Length': str(len(body)),
|
||||
'Content-Type': 'text/plain'},
|
||||
body)
|
||||
|
||||
resp = Request.blank('/v1/staticweb/not-json').get_response(self.app)
|
||||
self.assertEqual(resp.body, body)
|
||||
self.assertEqual(resp.headers['Content-Type'], 'text/plain')
|
||||
# We *did* try, though
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/staticweb/not-json?format=json'))
|
||||
# TODO: add a similar test that has *no* content-type
|
||||
# FakeSwift seems to make this hard to do
|
||||
|
||||
def test_static_web_not_really_json(self):
|
||||
body = 'raises ValueError'
|
||||
self.fake_swift.register(
|
||||
'GET', '/v1/staticweb/not-json', HTTPOk,
|
||||
{'Content-Length': str(len(body)),
|
||||
'Content-Type': 'application/json'},
|
||||
body)
|
||||
|
||||
resp = Request.blank('/v1/staticweb/not-json').get_response(self.app)
|
||||
self.assertEqual(resp.body, body)
|
||||
self.assertEqual(resp.headers['Content-Type'], 'application/json')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/staticweb/not-json?format=json'))
|
||||
|
||||
def test_static_web_pretend_to_be_giant_json(self):
|
||||
body = json.dumps(self.fake_container_listing * 1000000)
|
||||
self.assertGreater( # sanity
|
||||
len(body), listing_formats.MAX_CONTAINER_LISTING_CONTENT_LENGTH)
|
||||
|
||||
self.fake_swift.register(
|
||||
'GET', '/v1/staticweb/not-json', HTTPOk,
|
||||
{'Content-Type': 'application/json'},
|
||||
body)
|
||||
|
||||
resp = Request.blank('/v1/staticweb/not-json').get_response(self.app)
|
||||
self.assertEqual(resp.body, body)
|
||||
self.assertEqual(resp.headers['Content-Type'], 'application/json')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/staticweb/not-json?format=json'))
|
||||
# TODO: add a similar test for chunked transfers
|
||||
# (staticweb referencing a DLO that doesn't fit in a single listing?)
|
||||
|
||||
def test_static_web_bad_json(self):
|
||||
def do_test(body_obj):
|
||||
body = json.dumps(body_obj)
|
||||
self.fake_swift.register(
|
||||
'GET', '/v1/staticweb/bad-json', HTTPOk,
|
||||
{'Content-Length': str(len(body)),
|
||||
'Content-Type': 'application/json'},
|
||||
body)
|
||||
|
||||
def do_sub_test(path):
|
||||
resp = Request.blank(path).get_response(self.app)
|
||||
self.assertEqual(resp.body, body)
|
||||
# NB: no charset is added; we pass through whatever we got
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/staticweb/bad-json?format=json'))
|
||||
|
||||
do_sub_test('/v1/staticweb/bad-json')
|
||||
do_sub_test('/v1/staticweb/bad-json?format=txt')
|
||||
do_sub_test('/v1/staticweb/bad-json?format=xml')
|
||||
do_sub_test('/v1/staticweb/bad-json?format=json')
|
||||
|
||||
do_test({})
|
||||
do_test({'non-empty': 'hash'})
|
||||
do_test(None)
|
||||
do_test(0)
|
||||
do_test('some string')
|
||||
do_test([None])
|
||||
do_test([0])
|
||||
do_test(['some string'])
|
||||
|
||||
def test_static_web_bad_but_not_terrible_json(self):
|
||||
body = json.dumps([{'no name': 'nor subdir'}])
|
||||
self.fake_swift.register(
|
||||
'GET', '/v1/staticweb/bad-json', HTTPOk,
|
||||
{'Content-Length': str(len(body)),
|
||||
'Content-Type': 'application/json'},
|
||||
body)
|
||||
|
||||
def do_test(path, expect_charset=False):
|
||||
resp = Request.blank(path).get_response(self.app)
|
||||
self.assertEqual(resp.body, body)
|
||||
if expect_charset:
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json; charset=utf-8')
|
||||
else:
|
||||
self.assertEqual(resp.headers['Content-Type'],
|
||||
'application/json')
|
||||
self.assertEqual(self.fake_swift.calls[-1], (
|
||||
'GET', '/v1/staticweb/bad-json?format=json'))
|
||||
|
||||
do_test('/v1/staticweb/bad-json')
|
||||
do_test('/v1/staticweb/bad-json?format=txt')
|
||||
do_test('/v1/staticweb/bad-json?format=xml')
|
||||
# The response we get is *just close enough* to being valid that we
|
||||
# assume it is and slap on the missing charset. If you set up staticweb
|
||||
# to serve back such responses, your clients are already hosed.
|
||||
do_test('/v1/staticweb/bad-json?format=json', expect_charset=True)
|
@ -279,7 +279,7 @@ class FakeApp(object):
|
||||
if ((env['PATH_INFO'] in (
|
||||
'/v1/a/c3', '/v1/a/c4', '/v1/a/c8', '/v1/a/c9'))
|
||||
and (env['QUERY_STRING'] ==
|
||||
'delimiter=/&format=json&prefix=subdir/')):
|
||||
'delimiter=/&prefix=subdir/')):
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'X-Container-Read': '.r:*',
|
||||
@ -296,14 +296,14 @@ class FakeApp(object):
|
||||
{"subdir":"subdir3/subsubdir/"}]
|
||||
'''.strip()
|
||||
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
|
||||
'delimiter=/&format=json&prefix=subdiry/':
|
||||
'delimiter=/&prefix=subdiry/':
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'X-Container-Read': '.r:*',
|
||||
'Content-Type': 'application/json; charset=utf-8'})
|
||||
body = '[]'
|
||||
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
|
||||
'limit=1&format=json&delimiter=/&limit=1&prefix=subdirz/':
|
||||
'limit=1&delimiter=/&prefix=subdirz/':
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'X-Container-Read': '.r:*',
|
||||
@ -315,7 +315,7 @@ class FakeApp(object):
|
||||
"last_modified":"2011-03-24T04:27:52.709100"}]
|
||||
'''.strip()
|
||||
elif env['PATH_INFO'] == '/v1/a/c6' and env['QUERY_STRING'] == \
|
||||
'limit=1&format=json&delimiter=/&limit=1&prefix=subdir/':
|
||||
'limit=1&delimiter=/&prefix=subdir/':
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'X-Container-Read': '.r:*',
|
||||
@ -329,9 +329,9 @@ class FakeApp(object):
|
||||
'''.strip()
|
||||
elif env['PATH_INFO'] == '/v1/a/c10' and (
|
||||
env['QUERY_STRING'] ==
|
||||
'delimiter=/&format=json&prefix=%E2%98%83/' or
|
||||
'delimiter=/&prefix=%E2%98%83/' or
|
||||
env['QUERY_STRING'] ==
|
||||
'delimiter=/&format=json&prefix=%E2%98%83/%E2%98%83/'):
|
||||
'delimiter=/&prefix=%E2%98%83/%E2%98%83/'):
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'X-Container-Read': '.r:*',
|
||||
@ -346,7 +346,7 @@ class FakeApp(object):
|
||||
'''.strip()
|
||||
elif 'prefix=' in env['QUERY_STRING']:
|
||||
return Response(status='204 No Content')(env, start_response)
|
||||
elif 'format=json' in env['QUERY_STRING']:
|
||||
else:
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'Content-Type': 'application/json; charset=utf-8'})
|
||||
@ -397,15 +397,6 @@ class FakeApp(object):
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.935560"}]
|
||||
'''.strip()
|
||||
else:
|
||||
headers.update({'X-Container-Object-Count': '12',
|
||||
'X-Container-Bytes-Used': '73763',
|
||||
'Content-Type': 'text/plain; charset=utf-8'})
|
||||
body = '\n'.join(['401error.html', '404error.html', 'index.html',
|
||||
'listing.css', 'one.txt', 'subdir/1.txt',
|
||||
'subdir/2.txt', u'subdir/\u2603.txt', 'subdir2',
|
||||
'subdir3/subsubdir/index.html', 'two.txt',
|
||||
u'\u2603/\u2603/one.txt'])
|
||||
return Response(status='200 Ok', headers=headers,
|
||||
body=body)(env, start_response)
|
||||
|
||||
@ -481,8 +472,8 @@ class TestStaticWeb(unittest.TestCase):
|
||||
def test_container2(self):
|
||||
resp = Request.blank('/v1/a/c2').get_response(self.test_staticweb)
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertEqual(resp.content_type, 'text/plain')
|
||||
self.assertEqual(len(resp.body.split('\n')),
|
||||
self.assertEqual(resp.content_type, 'application/json')
|
||||
self.assertEqual(len(json.loads(resp.body)),
|
||||
int(resp.headers['x-container-object-count']))
|
||||
|
||||
def test_container2_web_mode_explicitly_off(self):
|
||||
@ -490,8 +481,8 @@ class TestStaticWeb(unittest.TestCase):
|
||||
'/v1/a/c2',
|
||||
headers={'x-web-mode': 'false'}).get_response(self.test_staticweb)
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertEqual(resp.content_type, 'text/plain')
|
||||
self.assertEqual(len(resp.body.split('\n')),
|
||||
self.assertEqual(resp.content_type, 'application/json')
|
||||
self.assertEqual(len(json.loads(resp.body)),
|
||||
int(resp.headers['x-container-object-count']))
|
||||
|
||||
def test_container2_web_mode_explicitly_on(self):
|
||||
@ -507,7 +498,7 @@ class TestStaticWeb(unittest.TestCase):
|
||||
|
||||
def test_container2json(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c2?format=json').get_response(self.test_staticweb)
|
||||
'/v1/a/c2').get_response(self.test_staticweb)
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertEqual(resp.content_type, 'application/json')
|
||||
self.assertEqual(len(json.loads(resp.body)),
|
||||
@ -515,7 +506,7 @@ class TestStaticWeb(unittest.TestCase):
|
||||
|
||||
def test_container2json_web_mode_explicitly_off(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c2?format=json',
|
||||
'/v1/a/c2',
|
||||
headers={'x-web-mode': 'false'}).get_response(self.test_staticweb)
|
||||
self.assertEqual(resp.status_int, 200)
|
||||
self.assertEqual(resp.content_type, 'application/json')
|
||||
@ -524,7 +515,7 @@ class TestStaticWeb(unittest.TestCase):
|
||||
|
||||
def test_container2json_web_mode_explicitly_on(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c2?format=json',
|
||||
'/v1/a/c2',
|
||||
headers={'x-web-mode': 'true'}).get_response(self.test_staticweb)
|
||||
self.assertEqual(resp.status_int, 404)
|
||||
|
||||
|
@ -567,7 +567,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPNotFound, {}, None)
|
||||
|
||||
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
|
||||
@ -583,7 +583,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertEqual(['VW', None], self.app.swift_sources)
|
||||
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('DELETE', '/v1/a/c/o'),
|
||||
@ -594,7 +594,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {}, '[]')
|
||||
|
||||
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
|
||||
@ -607,7 +607,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertEqual(len(self.authorized), 1)
|
||||
self.assertRequestEqual(req, self.authorized[0])
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('DELETE', '/v1/a/c/o'),
|
||||
@ -616,7 +616,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
def test_delete_latest_version_no_marker_success(self):
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "y", '
|
||||
'"last_modified": "2014-11-21T14:23:02.206740", '
|
||||
@ -655,7 +655,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
req_headers = self.app.headers[-1]
|
||||
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', '/v1/a/ver_cont/001o/2'),
|
||||
@ -666,7 +666,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
def test_delete_latest_version_restores_marker_success(self):
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "x", '
|
||||
'"last_modified": "2014-11-21T14:23:02.206740", '
|
||||
@ -714,7 +714,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
# in the base versioned container.
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "y", '
|
||||
'"last_modified": "2014-11-21T14:23:02.206740", '
|
||||
@ -749,7 +749,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertEqual(len(self.authorized), 1)
|
||||
self.assertRequestEqual(req, self.authorized[0])
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('HEAD', '/v1/a/c/o'),
|
||||
@ -770,7 +770,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
|
||||
def test_delete_latest_version_doubled_up_markers_success(self):
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/'
|
||||
'&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "x", '
|
||||
@ -888,7 +888,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "y", '
|
||||
'"last_modified": "2014-11-21T14:23:02.206740", '
|
||||
@ -914,7 +914,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertEqual(len(self.authorized), 1)
|
||||
self.assertRequestEqual(req, self.authorized[0])
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', '/v1/a/ver_cont/001o/1'),
|
||||
@ -925,7 +925,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
def test_DELETE_on_expired_versioned_object(self):
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "y", '
|
||||
'"last_modified": "2014-11-21T14:23:02.206740", '
|
||||
@ -962,7 +962,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertRequestEqual(req, self.authorized[0])
|
||||
self.assertEqual(5, self.app.call_count)
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', '/v1/a/ver_cont/001o/2'),
|
||||
@ -975,7 +975,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
authorize_call = []
|
||||
self.app.register(
|
||||
'GET',
|
||||
'/v1/a/ver_cont?format=json&prefix=001o/&marker=&reverse=on',
|
||||
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "y", '
|
||||
'"last_modified": "2014-11-21T14:23:02.206740", '
|
||||
@ -1004,7 +1004,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertEqual(len(authorize_call), 1)
|
||||
self.assertRequestEqual(req, authorize_call[0])
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
])
|
||||
@ -1041,7 +1041,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
self.app.register(
|
||||
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "x", '
|
||||
@ -1055,7 +1055,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
'"name": "001o/2", '
|
||||
'"content_type": "text/plain"}]')
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/'
|
||||
'&marker=001o/2',
|
||||
swob.HTTPNotFound, {}, None)
|
||||
self.app.register(
|
||||
@ -1086,7 +1086,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
req_headers = self.app.headers[-1]
|
||||
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', prefix_listing_prefix + 'marker=001o/2'),
|
||||
@ -1097,7 +1097,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
|
||||
def test_DELETE_on_expired_versioned_object(self):
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "x", '
|
||||
@ -1111,7 +1111,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
'"name": "001o/2", '
|
||||
'"content_type": "text/plain"}]')
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/'
|
||||
'&marker=001o/2',
|
||||
swob.HTTPNotFound, {}, None)
|
||||
|
||||
@ -1139,7 +1139,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertRequestEqual(req, self.authorized[0])
|
||||
self.assertEqual(6, self.app.call_count)
|
||||
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', prefix_listing_prefix + 'marker=001o/2'),
|
||||
@ -1154,7 +1154,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
self.app.register(
|
||||
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&reverse=on',
|
||||
swob.HTTPOk, {},
|
||||
'[{"hash": "x", '
|
||||
@ -1168,7 +1168,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
'"name": "001o/2", '
|
||||
'"content_type": "text/plain"}]')
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/'
|
||||
'&marker=001o/2',
|
||||
swob.HTTPNotFound, {}, None)
|
||||
self.app.register(
|
||||
@ -1189,7 +1189,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
self.assertEqual(status, '403 Forbidden')
|
||||
self.assertEqual(len(authorize_call), 1)
|
||||
self.assertRequestEqual(req, authorize_call[0])
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', prefix_listing_prefix + 'marker=001o/2'),
|
||||
@ -1206,7 +1206,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
|
||||
# first container server can reverse
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&reverse=on',
|
||||
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[2:]))))
|
||||
# but all objects are already gone
|
||||
@ -1222,21 +1222,21 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
|
||||
# second container server can't reverse
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=001o/2&reverse=on',
|
||||
swob.HTTPOk, {}, json.dumps(old_versions[3:]))
|
||||
|
||||
# subsequent requests shouldn't reverse
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&end_marker=001o/2',
|
||||
swob.HTTPOk, {}, json.dumps(old_versions[:1]))
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=001o/0&end_marker=001o/2',
|
||||
swob.HTTPOk, {}, json.dumps(old_versions[1:2]))
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=001o/1&end_marker=001o/2',
|
||||
swob.HTTPOk, {}, '[]')
|
||||
self.app.register(
|
||||
@ -1255,7 +1255,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
'CONTENT_LENGTH': '0'})
|
||||
status, headers, body = self.call_vw(req)
|
||||
self.assertEqual(status, '204 No Content')
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', '/v1/a/ver_cont/001o/4'),
|
||||
@ -1281,7 +1281,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
|
||||
# first container server can reverse
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&reverse=on',
|
||||
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[-2:]))))
|
||||
# but both objects are already gone
|
||||
@ -1294,21 +1294,21 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
|
||||
# second container server can't reverse
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=001o/3&reverse=on',
|
||||
swob.HTTPOk, {}, json.dumps(old_versions[4:]))
|
||||
|
||||
# subsequent requests shouldn't reverse
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=&end_marker=001o/3',
|
||||
swob.HTTPOk, {}, json.dumps(old_versions[:2]))
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=001o/1&end_marker=001o/3',
|
||||
swob.HTTPOk, {}, json.dumps(old_versions[2:3]))
|
||||
self.app.register(
|
||||
'GET', '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
'GET', '/v1/a/ver_cont?prefix=001o/&'
|
||||
'marker=001o/2&end_marker=001o/3',
|
||||
swob.HTTPOk, {}, '[]')
|
||||
self.app.register(
|
||||
@ -1327,7 +1327,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
|
||||
'CONTENT_LENGTH': '0'})
|
||||
status, headers, body = self.call_vw(req)
|
||||
self.assertEqual(status, '204 No Content')
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?format=json&prefix=001o/&'
|
||||
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
|
||||
self.assertEqual(self.app.calls, [
|
||||
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
|
||||
('GET', '/v1/a/ver_cont/001o/4'),
|
||||
|
@ -136,22 +136,26 @@ class TestWSGI(unittest.TestCase):
|
||||
_fake_rings(t)
|
||||
app, conf, logger, log_name = wsgi.init_request_processor(
|
||||
conf_file, 'proxy-server')
|
||||
# verify pipeline is catch_errors -> dlo -> proxy-server
|
||||
# verify pipeline is: catch_errors -> gatekeeper -> listing_formats ->
|
||||
# copy -> dlo -> proxy-server
|
||||
expected = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
||||
self.assertTrue(isinstance(app, expected))
|
||||
self.assertIsInstance(app, expected)
|
||||
|
||||
app = app.app
|
||||
expected = swift.common.middleware.gatekeeper.GatekeeperMiddleware
|
||||
self.assertTrue(isinstance(app, expected))
|
||||
self.assertIsInstance(app, expected)
|
||||
|
||||
app = app.app
|
||||
expected = \
|
||||
swift.common.middleware.copy.ServerSideCopyMiddleware
|
||||
expected = swift.common.middleware.listing_formats.ListingFilter
|
||||
self.assertIsInstance(app, expected)
|
||||
|
||||
app = app.app
|
||||
expected = swift.common.middleware.copy.ServerSideCopyMiddleware
|
||||
self.assertIsInstance(app, expected)
|
||||
|
||||
app = app.app
|
||||
expected = swift.common.middleware.dlo.DynamicLargeObject
|
||||
self.assertTrue(isinstance(app, expected))
|
||||
self.assertIsInstance(app, expected)
|
||||
|
||||
app = app.app
|
||||
expected = \
|
||||
@ -160,7 +164,7 @@ class TestWSGI(unittest.TestCase):
|
||||
|
||||
app = app.app
|
||||
expected = swift.proxy.server.Application
|
||||
self.assertTrue(isinstance(app, expected))
|
||||
self.assertIsInstance(app, expected)
|
||||
# config settings applied to app instance
|
||||
self.assertEqual(0.2, app.conn_timeout)
|
||||
# appconfig returns values from 'proxy-server' section
|
||||
@ -1478,6 +1482,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
self.assertEqual(self.pipeline_modules(app),
|
||||
['swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.listing_formats',
|
||||
'swift.common.middleware.copy',
|
||||
'swift.common.middleware.dlo',
|
||||
'swift.common.middleware.versioned_writes',
|
||||
@ -1510,6 +1515,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
self.assertEqual(self.pipeline_modules(app),
|
||||
['swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.listing_formats',
|
||||
'swift.common.middleware.copy',
|
||||
'swift.common.middleware.dlo',
|
||||
'swift.common.middleware.versioned_writes',
|
||||
@ -1549,6 +1555,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
self.assertEqual(self.pipeline_modules(app),
|
||||
['swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.listing_formats',
|
||||
'swift.common.middleware.copy',
|
||||
'swift.common.middleware.slo',
|
||||
'swift.common.middleware.dlo',
|
||||
@ -1649,6 +1656,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
self.assertEqual(self.pipeline_modules(app), [
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.listing_formats',
|
||||
'swift.common.middleware.copy',
|
||||
'swift.common.middleware.dlo',
|
||||
'swift.common.middleware.versioned_writes',
|
||||
@ -1664,6 +1672,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.listing_formats',
|
||||
'swift.common.middleware.copy',
|
||||
'swift.common.middleware.dlo',
|
||||
'swift.common.middleware.versioned_writes',
|
||||
@ -1678,6 +1687,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
'swift.common.middleware.healthcheck',
|
||||
'swift.common.middleware.catch_errors',
|
||||
'swift.common.middleware.gatekeeper',
|
||||
'swift.common.middleware.listing_formats',
|
||||
'swift.common.middleware.copy',
|
||||
'swift.common.middleware.dlo',
|
||||
'swift.common.middleware.versioned_writes',
|
||||
@ -1713,7 +1723,7 @@ class TestPipelineModification(unittest.TestCase):
|
||||
tempdir, policy.ring_name + '.ring.gz')
|
||||
|
||||
app = wsgi.loadapp(conf_path)
|
||||
proxy_app = app.app.app.app.app.app.app
|
||||
proxy_app = app.app.app.app.app.app.app.app
|
||||
self.assertEqual(proxy_app.account_ring.serialized_path,
|
||||
account_ring_path)
|
||||
self.assertEqual(proxy_app.container_ring.serialized_path,
|
||||
|
@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2010-2012 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -2112,6 +2113,54 @@ class TestContainerController(unittest.TestCase):
|
||||
resp.content_type, 'application/json',
|
||||
'Invalid content_type for Accept: %s' % accept)
|
||||
|
||||
def test_GET_non_ascii(self):
|
||||
# make a container
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/jsonc', environ={'REQUEST_METHOD': 'PUT',
|
||||
'HTTP_X_TIMESTAMP': '0'})
|
||||
resp = req.get_response(self.controller)
|
||||
|
||||
noodles = [u"Spätzle", u"ラーメン"]
|
||||
for n in noodles:
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/jsonc/%s' % n.encode("utf-8"),
|
||||
environ={'REQUEST_METHOD': 'PUT',
|
||||
'HTTP_X_TIMESTAMP': '1',
|
||||
'HTTP_X_CONTENT_TYPE': 'text/plain',
|
||||
'HTTP_X_ETAG': 'x',
|
||||
'HTTP_X_SIZE': 0})
|
||||
self._update_object_put_headers(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 201) # sanity check
|
||||
|
||||
json_body = [{"name": noodles[0],
|
||||
"hash": "x",
|
||||
"bytes": 0,
|
||||
"content_type": "text/plain",
|
||||
"last_modified": "1970-01-01T00:00:01.000000"},
|
||||
{"name": noodles[1],
|
||||
"hash": "x",
|
||||
"bytes": 0,
|
||||
"content_type": "text/plain",
|
||||
"last_modified": "1970-01-01T00:00:01.000000"}]
|
||||
|
||||
# JSON
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/jsonc?format=json',
|
||||
environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 200) # sanity check
|
||||
self.assertEqual(json.loads(resp.body), json_body)
|
||||
|
||||
# Plain text
|
||||
text_body = u''.join(n + u"\n" for n in noodles).encode('utf-8')
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/jsonc?format=text',
|
||||
environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 200) # sanity check
|
||||
self.assertEqual(resp.body, text_body)
|
||||
|
||||
def test_GET_plain(self):
|
||||
# make a container
|
||||
req = Request.blank(
|
||||
@ -2496,6 +2545,39 @@ class TestContainerController(unittest.TestCase):
|
||||
{"subdir": "US-TX-"},
|
||||
{"subdir": "US-UT-"}])
|
||||
|
||||
def test_GET_delimiter_non_ascii(self):
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
|
||||
'HTTP_X_TIMESTAMP': '0'})
|
||||
resp = req.get_response(self.controller)
|
||||
for obj_name in [u"a/❥/1", u"a/❥/2", u"a/ꙮ/1", u"a/ꙮ/2"]:
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c/%s' % obj_name.encode('utf-8'),
|
||||
environ={
|
||||
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
|
||||
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
|
||||
'HTTP_X_SIZE': 0})
|
||||
self._update_object_put_headers(req)
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.status_int, 201)
|
||||
|
||||
# JSON
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c?prefix=a/&delimiter=/&format=json',
|
||||
environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(
|
||||
json.loads(resp.body),
|
||||
[{"subdir": u"a/❥/"},
|
||||
{"subdir": u"a/ꙮ/"}])
|
||||
|
||||
# Plain text
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c?prefix=a/&delimiter=/&format=text',
|
||||
environ={'REQUEST_METHOD': 'GET'})
|
||||
resp = req.get_response(self.controller)
|
||||
self.assertEqual(resp.body, u"a/❥/\na/ꙮ/\n".encode("utf-8"))
|
||||
|
||||
def test_GET_leading_delimiter(self):
|
||||
req = Request.blank(
|
||||
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
|
||||
|
@ -37,7 +37,7 @@ from swift.account import server as account_server
|
||||
from swift.common import storage_policy
|
||||
from swift.common.ring import RingData
|
||||
from swift.common.storage_policy import StoragePolicy, ECStoragePolicy
|
||||
from swift.common.middleware import proxy_logging
|
||||
from swift.common.middleware import listing_formats, proxy_logging
|
||||
from swift.common import utils
|
||||
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
|
||||
from swift.container import server as container_server
|
||||
@ -210,8 +210,8 @@ def setup_servers(the_object_server=object_server, extra_conf=None):
|
||||
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv, obj3srv,
|
||||
obj4srv, obj5srv, obj6srv)
|
||||
nl = NullLogger()
|
||||
logging_prosv = proxy_logging.ProxyLoggingMiddleware(prosrv, conf,
|
||||
logger=prosrv.logger)
|
||||
logging_prosv = proxy_logging.ProxyLoggingMiddleware(
|
||||
listing_formats.ListingFilter(prosrv), conf, logger=prosrv.logger)
|
||||
prospa = spawn(wsgi.server, prolis, logging_prosv, nl)
|
||||
acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl)
|
||||
acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl)
|
||||
|
@ -59,7 +59,7 @@ from swift.proxy import server as proxy_server
|
||||
from swift.proxy.controllers.obj import ReplicatedObjectController
|
||||
from swift.obj import server as object_server
|
||||
from swift.common.middleware import proxy_logging, versioned_writes, \
|
||||
copy
|
||||
copy, listing_formats
|
||||
from swift.common.middleware.acl import parse_acl, format_acl
|
||||
from swift.common.exceptions import ChunkReadTimeout, DiskFileNotExist, \
|
||||
APIVersionError, ChunkWriteTimeout
|
||||
@ -9176,10 +9176,11 @@ class TestAccountControllerFakeGetResponse(unittest.TestCase):
|
||||
"""
|
||||
def setUp(self):
|
||||
conf = {'account_autocreate': 'yes'}
|
||||
self.app = proxy_server.Application(conf, FakeMemcache(),
|
||||
account_ring=FakeRing(),
|
||||
container_ring=FakeRing())
|
||||
self.app.memcache = FakeMemcacheReturnsNone()
|
||||
self.app = listing_formats.ListingFilter(
|
||||
proxy_server.Application(conf, FakeMemcache(),
|
||||
account_ring=FakeRing(),
|
||||
container_ring=FakeRing()))
|
||||
self.app.app.memcache = FakeMemcacheReturnsNone()
|
||||
|
||||
def test_GET_autocreate_accept_json(self):
|
||||
with save_globals():
|
||||
@ -9569,12 +9570,15 @@ class TestSocketObjectVersions(unittest.TestCase):
|
||||
])
|
||||
conf = {'devices': _testdir, 'swift_dir': _testdir,
|
||||
'mount_check': 'false', 'allowed_headers': allowed_headers}
|
||||
prosrv = versioned_writes.VersionedWritesMiddleware(
|
||||
prosrv = listing_formats.ListingFilter(
|
||||
copy.ServerSideCopyMiddleware(
|
||||
proxy_logging.ProxyLoggingMiddleware(
|
||||
_test_servers[0], conf,
|
||||
logger=_test_servers[0].logger), conf),
|
||||
{})
|
||||
versioned_writes.VersionedWritesMiddleware(
|
||||
proxy_logging.ProxyLoggingMiddleware(
|
||||
_test_servers[0], conf,
|
||||
logger=_test_servers[0].logger), {}),
|
||||
{}
|
||||
)
|
||||
)
|
||||
self.coro = spawn(wsgi.server, prolis, prosrv, NullLogger())
|
||||
# replace global prosrv with one that's filtered with version
|
||||
# middleware
|
||||
|
Loading…
Reference in New Issue
Block a user