Rework to support RFC 2616 Sec 4.4 Message Length
RFC 2616 Sec 4.4 Message Length describes how the content-length and transfer-encoding headers interact. Basically, if chunked transfer encoding is used, the content-length header value is ignored and if the content-length header is present, and the request is not using chunked transfer-encoding, then the content-length must match the body length. The only Transfer-Coding value we support in the Transfer-Encoding header (to date) is "chunked". RFC 2616 Sec 14.41 specifies that if "multiple encodings have been applied to an entity, the transfer-codings MUST be listed in the order in which they were applied." Since we only supported "chunked". If the Transfer-Encoding header value has multiple transfer-codings, we return a 501 (Not Implemented) (see RFC 2616 Sec 3.6) without checking if chunked is the last one specified. Finally, if transfer-encoding is anything but "chunked", we return a 400 (Bad Request) to the client. This patch adds a new method, message_length, to the swob request object which will apply an algorithm based on RFC 2616 Sec 4.4 leveraging the existing content_length property. In addition to these changes, the proxy server will now notice when the message length specified by the content-length header is greater than the configured object maximum size and fail the request with a 413, "Request Entity Too Large", before reading the entire body. This work flows from https://review.openstack.org/27152. Change-Id: I5d2a30b89092680dee9d946e1aafd017eaaef8c0 Signed-off-by: Peter Portante <peter.portante@redhat.com>
This commit is contained in:
parent
1b283d47cd
commit
5174b7f85d
@ -163,7 +163,7 @@ def _header_property(header):
|
|||||||
(Used by both request and response)
|
(Used by both request and response)
|
||||||
If a value of None is given, the header is deleted.
|
If a value of None is given, the header is deleted.
|
||||||
|
|
||||||
:param header: name of the header, e.g. "Content-Length"
|
:param header: name of the header, e.g. "Transfer-Encoding"
|
||||||
"""
|
"""
|
||||||
def getter(self):
|
def getter(self):
|
||||||
return self.headers.get(header, None)
|
return self.headers.get(header, None)
|
||||||
@ -905,6 +905,46 @@ class Request(object):
|
|||||||
self.environ.get('SCRIPT_NAME', '') + self.environ['PATH_INFO'],
|
self.environ.get('SCRIPT_NAME', '') + self.environ['PATH_INFO'],
|
||||||
minsegs, maxsegs, rest_with_last)
|
minsegs, maxsegs, rest_with_last)
|
||||||
|
|
||||||
|
def message_length(self):
|
||||||
|
"""
|
||||||
|
Properly determine the message length for this request. It will return
|
||||||
|
an integer if the headers explicitly contain the message length, or
|
||||||
|
None if the headers don't contain a length. The ValueError exception
|
||||||
|
will be raised if the headers are invalid.
|
||||||
|
|
||||||
|
:raises ValueError: if either transfer-encoding or content-length
|
||||||
|
headers have bad values
|
||||||
|
:raises AttributeError: if the last value of the transfer-encoding
|
||||||
|
header is not "chunked"
|
||||||
|
"""
|
||||||
|
te = self.headers.get('transfer-encoding')
|
||||||
|
if te:
|
||||||
|
encodings = te.split(',')
|
||||||
|
if len(encodings) > 1:
|
||||||
|
raise AttributeError('Unsupported Transfer-Coding header'
|
||||||
|
' value specified in Transfer-Encoding'
|
||||||
|
' header')
|
||||||
|
# If there are more than one transfer encoding value, the last
|
||||||
|
# one must be chunked, see RFC 2616 Sec. 3.6
|
||||||
|
if encodings[-1].lower() == 'chunked':
|
||||||
|
chunked = True
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid Transfer-Encoding header value')
|
||||||
|
else:
|
||||||
|
chunked = False
|
||||||
|
if not chunked:
|
||||||
|
# Because we are not using chunked transfer encoding we can pay
|
||||||
|
# attention to the content-length header.
|
||||||
|
fsize = self.headers.get('content-length', None)
|
||||||
|
if fsize is not None:
|
||||||
|
try:
|
||||||
|
fsize = int(fsize)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError('Invalid Content-Length header value')
|
||||||
|
else:
|
||||||
|
fsize = None
|
||||||
|
return fsize
|
||||||
|
|
||||||
|
|
||||||
def content_range_header_value(start, stop, size):
|
def content_range_header_value(start, stop, size):
|
||||||
return 'bytes %s-%s/%s' % (start, (stop - 1), size)
|
return 'bytes %s-%s/%s' % (start, (stop - 1), size)
|
||||||
@ -1139,6 +1179,7 @@ HTTPUnprocessableEntity = status_map[422]
|
|||||||
HTTPClientDisconnect = status_map[499]
|
HTTPClientDisconnect = status_map[499]
|
||||||
HTTPServerError = status_map[500]
|
HTTPServerError = status_map[500]
|
||||||
HTTPInternalServerError = status_map[500]
|
HTTPInternalServerError = status_map[500]
|
||||||
|
HTTPNotImplemented = status_map[501]
|
||||||
HTTPBadGateway = status_map[502]
|
HTTPBadGateway = status_map[502]
|
||||||
HTTPServiceUnavailable = status_map[503]
|
HTTPServiceUnavailable = status_map[503]
|
||||||
HTTPInsufficientStorage = status_map[507]
|
HTTPInsufficientStorage = status_map[507]
|
||||||
|
@ -716,6 +716,11 @@ class ObjectController(object):
|
|||||||
if new_delete_at and new_delete_at < time.time():
|
if new_delete_at and new_delete_at < time.time():
|
||||||
return HTTPBadRequest(body='X-Delete-At in past', request=request,
|
return HTTPBadRequest(body='X-Delete-At in past', request=request,
|
||||||
content_type='text/plain')
|
content_type='text/plain')
|
||||||
|
try:
|
||||||
|
fsize = request.message_length()
|
||||||
|
except ValueError as e:
|
||||||
|
return HTTPBadRequest(body=e.message, request=request,
|
||||||
|
content_type='text/plain')
|
||||||
if self.mount_check and not check_mount(self.devices, device):
|
if self.mount_check and not check_mount(self.devices, device):
|
||||||
return HTTPInsufficientStorage(drive=device, request=request)
|
return HTTPInsufficientStorage(drive=device, request=request)
|
||||||
disk_file = DiskFile(self.devices, device, partition, account,
|
disk_file = DiskFile(self.devices, device, partition, account,
|
||||||
@ -726,9 +731,6 @@ class ObjectController(object):
|
|||||||
orig_timestamp = disk_file.metadata.get('X-Timestamp')
|
orig_timestamp = disk_file.metadata.get('X-Timestamp')
|
||||||
upload_expiration = time.time() + self.max_upload_time
|
upload_expiration = time.time() + self.max_upload_time
|
||||||
etag = md5()
|
etag = md5()
|
||||||
fsize = request.headers.get('content-length', None)
|
|
||||||
if fsize is not None:
|
|
||||||
fsize = int(fsize)
|
|
||||||
elapsed_time = 0
|
elapsed_time = 0
|
||||||
try:
|
try:
|
||||||
with disk_file.writer(size=fsize) as writer:
|
with disk_file.writer(size=fsize) as writer:
|
||||||
|
@ -53,7 +53,7 @@ from swift.proxy.controllers.base import Controller, delay_denial, \
|
|||||||
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPNotFound, \
|
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPNotFound, \
|
||||||
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPRequestTimeout, \
|
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPRequestTimeout, \
|
||||||
HTTPServerError, HTTPServiceUnavailable, Request, Response, \
|
HTTPServerError, HTTPServiceUnavailable, Request, Response, \
|
||||||
HTTPClientDisconnect
|
HTTPClientDisconnect, HTTPNotImplemented
|
||||||
|
|
||||||
|
|
||||||
def segment_listing_iter(listing):
|
def segment_listing_iter(listing):
|
||||||
@ -703,6 +703,16 @@ class ObjectController(Controller):
|
|||||||
return aresp
|
return aresp
|
||||||
if not containers:
|
if not containers:
|
||||||
return HTTPNotFound(request=req)
|
return HTTPNotFound(request=req)
|
||||||
|
try:
|
||||||
|
ml = req.message_length()
|
||||||
|
except ValueError as e:
|
||||||
|
return HTTPBadRequest(request=req, content_type='text/plain',
|
||||||
|
body=e.message)
|
||||||
|
except AttributeError as e:
|
||||||
|
return HTTPNotImplemented(request=req, content_type='text/plain',
|
||||||
|
body=e.message)
|
||||||
|
if ml is not None and ml > MAX_FILE_SIZE:
|
||||||
|
return HTTPRequestEntityTooLarge(request=req)
|
||||||
if 'x-delete-after' in req.headers:
|
if 'x-delete-after' in req.headers:
|
||||||
try:
|
try:
|
||||||
x_delete_after = int(req.headers['x-delete-after'])
|
x_delete_after = int(req.headers['x-delete-after'])
|
||||||
@ -867,7 +877,8 @@ class ObjectController(Controller):
|
|||||||
node_iter = GreenthreadSafeIterator(
|
node_iter = GreenthreadSafeIterator(
|
||||||
self.iter_nodes(self.app.object_ring, partition))
|
self.iter_nodes(self.app.object_ring, partition))
|
||||||
pile = GreenPile(len(nodes))
|
pile = GreenPile(len(nodes))
|
||||||
chunked = req.headers.get('transfer-encoding')
|
te = req.headers.get('transfer-encoding', '')
|
||||||
|
chunked = ('chunked' in te)
|
||||||
|
|
||||||
outgoing_headers = self._backend_requests(
|
outgoing_headers = self._backend_requests(
|
||||||
req, len(nodes), container_partition, containers,
|
req, len(nodes), container_partition, containers,
|
||||||
|
@ -276,6 +276,9 @@ class TestAccept(unittest.TestCase):
|
|||||||
'text/xml'])
|
'text/xml'])
|
||||||
self.assertEquals(match, None)
|
self.assertEquals(match, None)
|
||||||
|
|
||||||
|
def test_repr(self):
|
||||||
|
acc = swift.common.swob.Accept("application/json")
|
||||||
|
self.assertEquals(repr(acc), "application/json")
|
||||||
|
|
||||||
class TestRequest(unittest.TestCase):
|
class TestRequest(unittest.TestCase):
|
||||||
def test_blank(self):
|
def test_blank(self):
|
||||||
@ -437,6 +440,11 @@ class TestRequest(unittest.TestCase):
|
|||||||
'QUERY_STRING': 'hello=equal&acl'})
|
'QUERY_STRING': 'hello=equal&acl'})
|
||||||
self.assertEqual(req.path_qs, '/hi/there?hello=equal&acl')
|
self.assertEqual(req.path_qs, '/hi/there?hello=equal&acl')
|
||||||
|
|
||||||
|
def test_url(self):
|
||||||
|
req = swift.common.swob.Request.blank('/hi/there?hello=equal&acl')
|
||||||
|
self.assertEqual(req.url,
|
||||||
|
'http://localhost/hi/there?hello=equal&acl')
|
||||||
|
|
||||||
def test_wsgify(self):
|
def test_wsgify(self):
|
||||||
used_req = []
|
used_req = []
|
||||||
|
|
||||||
@ -566,6 +574,60 @@ class TestRequest(unittest.TestCase):
|
|||||||
exp_url = '%(sche)s://%(host)s%(pi)s?%(qs)s' % locals()
|
exp_url = '%(sche)s://%(host)s%(pi)s?%(qs)s' % locals()
|
||||||
self.assertEqual(req.as_referer(), 'POST ' + exp_url)
|
self.assertEqual(req.as_referer(), 'POST ' + exp_url)
|
||||||
|
|
||||||
|
def test_message_length_just_content_length(self):
|
||||||
|
req = swift.common.swob.Request.blank(
|
||||||
|
u'/',
|
||||||
|
environ={'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/'})
|
||||||
|
self.assertEquals(req.message_length(), None)
|
||||||
|
|
||||||
|
req = swift.common.swob.Request.blank(
|
||||||
|
u'/',
|
||||||
|
environ={'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/'},
|
||||||
|
body='x'*42)
|
||||||
|
self.assertEquals(req.message_length(), 42)
|
||||||
|
|
||||||
|
req.headers['Content-Length'] = 'abc'
|
||||||
|
try:
|
||||||
|
l = req.message_length()
|
||||||
|
except ValueError as e:
|
||||||
|
self.assertEquals(e.message, "Invalid Content-Length header value")
|
||||||
|
else:
|
||||||
|
self.fail("Expected a ValueError raised for 'abc'")
|
||||||
|
|
||||||
|
def test_message_length_transfer_encoding(self):
|
||||||
|
req = swift.common.swob.Request.blank(
|
||||||
|
u'/',
|
||||||
|
environ={'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/'},
|
||||||
|
headers={'transfer-encoding': 'chunked'},
|
||||||
|
body='x'*42)
|
||||||
|
self.assertEquals(req.message_length(), None)
|
||||||
|
|
||||||
|
req.headers['Transfer-Encoding'] = 'gzip,chunked'
|
||||||
|
try:
|
||||||
|
l = req.message_length()
|
||||||
|
except AttributeError as e:
|
||||||
|
self.assertEquals(e.message, "Unsupported Transfer-Coding header"
|
||||||
|
" value specified in Transfer-Encoding header")
|
||||||
|
else:
|
||||||
|
self.fail("Expected an AttributeError raised for 'gzip'")
|
||||||
|
|
||||||
|
req.headers['Transfer-Encoding'] = 'gzip'
|
||||||
|
try:
|
||||||
|
l = req.message_length()
|
||||||
|
except ValueError as e:
|
||||||
|
self.assertEquals(e.message, "Invalid Transfer-Encoding header value")
|
||||||
|
else:
|
||||||
|
self.fail("Expected a ValueError raised for 'gzip'")
|
||||||
|
|
||||||
|
req.headers['Transfer-Encoding'] = 'gzip,identity'
|
||||||
|
try:
|
||||||
|
l = req.message_length()
|
||||||
|
except AttributeError as e:
|
||||||
|
self.assertEquals(e.message, "Unsupported Transfer-Coding header"
|
||||||
|
" value specified in Transfer-Encoding header")
|
||||||
|
else:
|
||||||
|
self.fail("Expected an AttributeError raised for 'gzip,identity'")
|
||||||
|
|
||||||
|
|
||||||
class TestStatusMap(unittest.TestCase):
|
class TestStatusMap(unittest.TestCase):
|
||||||
def test_status_map(self):
|
def test_status_map(self):
|
||||||
|
@ -671,15 +671,6 @@ class TestObjectController(unittest.TestCase):
|
|||||||
resp = self.object_controller.PUT(req)
|
resp = self.object_controller.PUT(req)
|
||||||
self.assertEquals(resp.status_int, 201)
|
self.assertEquals(resp.status_int, 201)
|
||||||
|
|
||||||
def test_PUT_zero_content_length_mismatched(self):
|
|
||||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
||||||
headers={'X-Timestamp': normalize_timestamp(time()),
|
|
||||||
'Content-Type': 'application/octet-stream'})
|
|
||||||
req.body = 'VERIFY'
|
|
||||||
req.headers['Content-Length'] = '0'
|
|
||||||
resp = self.object_controller.PUT(req)
|
|
||||||
self.assertEquals(resp.status_int, 499)
|
|
||||||
|
|
||||||
def test_PUT_common(self):
|
def test_PUT_common(self):
|
||||||
timestamp = normalize_timestamp(time())
|
timestamp = normalize_timestamp(time())
|
||||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||||
@ -1528,13 +1519,46 @@ class TestObjectController(unittest.TestCase):
|
|||||||
'Transfer-Encoding: chunked\r\n\r\n'
|
'Transfer-Encoding: chunked\r\n\r\n'
|
||||||
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n')
|
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n')
|
||||||
fd.flush()
|
fd.flush()
|
||||||
readuntil2crlfs(fd)
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 201'
|
||||||
|
self.assertEquals(headers[:len(exp)], exp)
|
||||||
sock = connect_tcp(('localhost', port))
|
sock = connect_tcp(('localhost', port))
|
||||||
fd = sock.makefile()
|
fd = sock.makefile()
|
||||||
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
|
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
|
||||||
'Connection: close\r\n\r\n')
|
'Connection: close\r\n\r\n')
|
||||||
fd.flush()
|
fd.flush()
|
||||||
readuntil2crlfs(fd)
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 200'
|
||||||
|
self.assertEquals(headers[:len(exp)], exp)
|
||||||
|
response = fd.read()
|
||||||
|
self.assertEquals(response, 'oh hai')
|
||||||
|
killer.kill()
|
||||||
|
|
||||||
|
def test_chunked_content_length_mismatch_zero(self):
|
||||||
|
listener = listen(('localhost', 0))
|
||||||
|
port = listener.getsockname()[1]
|
||||||
|
killer = spawn(wsgi.server, listener, self.object_controller,
|
||||||
|
NullLogger())
|
||||||
|
sock = connect_tcp(('localhost', port))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
|
||||||
|
'Content-Type: text/plain\r\n'
|
||||||
|
'Connection: close\r\nX-Timestamp: 1.0\r\n'
|
||||||
|
'Content-Length: 0\r\n'
|
||||||
|
'Transfer-Encoding: chunked\r\n\r\n'
|
||||||
|
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 201'
|
||||||
|
self.assertEquals(headers[:len(exp)], exp)
|
||||||
|
sock = connect_tcp(('localhost', port))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
|
||||||
|
'Connection: close\r\n\r\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 200'
|
||||||
|
self.assertEquals(headers[:len(exp)], exp)
|
||||||
response = fd.read()
|
response = fd.read()
|
||||||
self.assertEquals(response, 'oh hai')
|
self.assertEquals(response, 'oh hai')
|
||||||
killer.kill()
|
killer.kill()
|
||||||
|
@ -805,6 +805,174 @@ class TestObjectController(unittest.TestCase):
|
|||||||
res = controller.PUT(req)
|
res = controller.PUT(req)
|
||||||
self.assertTrue(res.status.startswith('201 '))
|
self.assertTrue(res.status.startswith('201 '))
|
||||||
|
|
||||||
|
def test_PUT_message_length_using_content_length(self):
|
||||||
|
prolis = _test_sockets[0]
|
||||||
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||||
|
fd = sock.makefile()
|
||||||
|
obj = 'j' * 20
|
||||||
|
fd.write('PUT /v1/a/c/o.content-length HTTP/1.1\r\n'
|
||||||
|
'Host: localhost\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'X-Storage-Token: t\r\n'
|
||||||
|
'Content-Length: %s\r\n'
|
||||||
|
'Content-Type: application/octet-stream\r\n'
|
||||||
|
'\r\n%s' % (str(len(obj)), obj))
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 201'
|
||||||
|
self.assertEqual(headers[:len(exp)], exp)
|
||||||
|
|
||||||
|
def test_PUT_message_length_using_transfer_encoding(self):
|
||||||
|
prolis = _test_sockets[0]
|
||||||
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
|
||||||
|
'Host: localhost\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'X-Storage-Token: t\r\n'
|
||||||
|
'Content-Type: application/octet-stream\r\n'
|
||||||
|
'Transfer-Encoding: chunked\r\n\r\n'
|
||||||
|
'2\r\n'
|
||||||
|
'oh\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' say\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' can\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' you\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' see\r\n'
|
||||||
|
'3\r\n'
|
||||||
|
' by\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' the\r\n'
|
||||||
|
'8\r\n'
|
||||||
|
' dawns\'\n\r\n'
|
||||||
|
'0\r\n\r\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 201'
|
||||||
|
self.assertEqual(headers[:len(exp)], exp)
|
||||||
|
|
||||||
|
def test_PUT_message_length_using_both(self):
|
||||||
|
prolis = _test_sockets[0]
|
||||||
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
|
||||||
|
'Host: localhost\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'X-Storage-Token: t\r\n'
|
||||||
|
'Content-Type: application/octet-stream\r\n'
|
||||||
|
'Content-Length: 33\r\n'
|
||||||
|
'Transfer-Encoding: chunked\r\n\r\n'
|
||||||
|
'2\r\n'
|
||||||
|
'oh\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' say\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' can\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' you\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' see\r\n'
|
||||||
|
'3\r\n'
|
||||||
|
' by\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' the\r\n'
|
||||||
|
'8\r\n'
|
||||||
|
' dawns\'\n\r\n'
|
||||||
|
'0\r\n\r\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 201'
|
||||||
|
self.assertEqual(headers[:len(exp)], exp)
|
||||||
|
|
||||||
|
def test_PUT_bad_message_length(self):
|
||||||
|
prolis = _test_sockets[0]
|
||||||
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
|
||||||
|
'Host: localhost\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'X-Storage-Token: t\r\n'
|
||||||
|
'Content-Type: application/octet-stream\r\n'
|
||||||
|
'Content-Length: 33\r\n'
|
||||||
|
'Transfer-Encoding: gzip\r\n\r\n'
|
||||||
|
'2\r\n'
|
||||||
|
'oh\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' say\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' can\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' you\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' see\r\n'
|
||||||
|
'3\r\n'
|
||||||
|
' by\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' the\r\n'
|
||||||
|
'8\r\n'
|
||||||
|
' dawns\'\n\r\n'
|
||||||
|
'0\r\n\r\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 400'
|
||||||
|
self.assertEqual(headers[:len(exp)], exp)
|
||||||
|
|
||||||
|
def test_PUT_message_length_unsup_xfr_encoding(self):
|
||||||
|
prolis = _test_sockets[0]
|
||||||
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
|
||||||
|
'Host: localhost\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'X-Storage-Token: t\r\n'
|
||||||
|
'Content-Type: application/octet-stream\r\n'
|
||||||
|
'Content-Length: 33\r\n'
|
||||||
|
'Transfer-Encoding: gzip,chunked\r\n\r\n'
|
||||||
|
'2\r\n'
|
||||||
|
'oh\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' say\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' can\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' you\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' see\r\n'
|
||||||
|
'3\r\n'
|
||||||
|
' by\r\n'
|
||||||
|
'4\r\n'
|
||||||
|
' the\r\n'
|
||||||
|
'8\r\n'
|
||||||
|
' dawns\'\n\r\n'
|
||||||
|
'0\r\n\r\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 501'
|
||||||
|
self.assertEqual(headers[:len(exp)], exp)
|
||||||
|
|
||||||
|
def test_PUT_message_length_too_large(self):
|
||||||
|
swift.proxy.controllers.obj.MAX_FILE_SIZE = 10
|
||||||
|
try:
|
||||||
|
prolis = _test_sockets[0]
|
||||||
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
||||||
|
fd = sock.makefile()
|
||||||
|
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
|
||||||
|
'Host: localhost\r\n'
|
||||||
|
'Connection: close\r\n'
|
||||||
|
'X-Storage-Token: t\r\n'
|
||||||
|
'Content-Type: application/octet-stream\r\n'
|
||||||
|
'Content-Length: 33\r\n\r\n'
|
||||||
|
'oh say can you see by the dawns\'\n')
|
||||||
|
fd.flush()
|
||||||
|
headers = readuntil2crlfs(fd)
|
||||||
|
exp = 'HTTP/1.1 413'
|
||||||
|
self.assertEqual(headers[:len(exp)], exp)
|
||||||
|
finally:
|
||||||
|
swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE
|
||||||
|
|
||||||
def test_expirer_DELETE_on_versioned_object(self):
|
def test_expirer_DELETE_on_versioned_object(self):
|
||||||
test_errors = []
|
test_errors = []
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user