s3api: Implement object versioning API
Translate AWS S3 Object Versioning API requests to native Swift Object Versioning API, speficially: * bucket versioning status * bucket versioned objects listing params * object GETorHEAD & DELETE versionId * multi_delete versionId Change-Id: I8296681b61996e073b3ba12ad46f99042dc15c37 Co-Authored-By: Tim Burke <tim.burke@gmail.com> Co-Authored-By: Clay Gerrard <clay.gerrard@gmail.com>
This commit is contained in:
parent
2759d5d51c
commit
6097660f0c
@ -91,23 +91,16 @@ ceph_s3:
|
|||||||
s3tests.functional.test_s3.test_put_object_ifnonmatch_overwrite_existed_failed: {status: KNOWN}
|
s3tests.functional.test_s3.test_put_object_ifnonmatch_overwrite_existed_failed: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_set_cors: {status: KNOWN}
|
s3tests.functional.test_s3.test_set_cors: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_stress_bucket_acls_changes: {status: KNOWN}
|
s3tests.functional.test_s3.test_stress_bucket_acls_changes: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioned_concurrent_object_create_and_remove: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioned_concurrent_object_create_concurrent_remove: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioned_concurrent_object_create_concurrent_remove: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioned_object_acl: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioned_object_acl: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_bucket_create_suspend: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioning_copy_obj_version: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_copy_obj_version: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_multi_object_delete: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_multi_object_delete: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker_create: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker_create: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_create_overwrite_multipart: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_obj_create_overwrite_multipart: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_create_read_remove: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioning_obj_create_read_remove_head: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_obj_create_read_remove_head: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_all: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_all: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_special_names: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_special_names: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_list_marker: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioning_obj_plain_null_version_overwrite: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioning_obj_plain_null_version_overwrite_suspended: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioning_obj_plain_null_version_removal: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_versioning_obj_suspend_versions: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_obj_suspend_versions: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_suspend_versions_simple: {status: KNOWN}
|
s3tests.functional.test_s3.test_versioning_obj_suspend_versions_simple: {status: KNOWN}
|
||||||
s3tests.functional.test_s3_website.check_can_test_website: {status: KNOWN}
|
s3tests.functional.test_s3_website.check_can_test_website: {status: KNOWN}
|
||||||
@ -177,9 +170,6 @@ ceph_s3:
|
|||||||
s3tests.functional.test_s3.test_lifecycle_set_multipart: {status: KNOWN}
|
s3tests.functional.test_s3.test_lifecycle_set_multipart: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_lifecycle_set_noncurrent: {status: KNOWN}
|
s3tests.functional.test_s3.test_lifecycle_set_noncurrent: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_multipart_copy_invalid_range: {status: KNOWN}
|
s3tests.functional.test_s3.test_multipart_copy_invalid_range: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_multipart_copy_versioned: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_object_copy_versioned_bucket: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_object_copy_versioning_multipart_upload: {status: KNOWN}
|
|
||||||
s3tests.functional.test_s3.test_post_object_empty_conditions: {status: KNOWN}
|
s3tests.functional.test_s3.test_post_object_empty_conditions: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_post_object_tags_anonymous_request: {status: KNOWN}
|
s3tests.functional.test_s3.test_post_object_tags_anonymous_request: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_post_object_tags_authenticated_request: {status: KNOWN}
|
s3tests.functional.test_s3.test_post_object_tags_authenticated_request: {status: KNOWN}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
ceph_s3:
|
ceph_s3:
|
||||||
<nose.suite.ContextSuite context=s3tests.functional>:teardown: {status: KNOWN}
|
<nose.suite.ContextSuite context=s3tests.functional>:teardown: {status: KNOWN}
|
||||||
|
<nose.suite.ContextSuite context=s3tests_boto3.functional>:teardown: {status: KNOWN}
|
||||||
<nose.suite.ContextSuite context=test_routing_generator>:setup: {status: KNOWN}
|
<nose.suite.ContextSuite context=test_routing_generator>:setup: {status: KNOWN}
|
||||||
s3tests.functional.test_headers.test_bucket_create_bad_authorization_invalid_aws2: {status: KNOWN}
|
s3tests.functional.test_headers.test_bucket_create_bad_authorization_invalid_aws2: {status: KNOWN}
|
||||||
s3tests.functional.test_headers.test_bucket_create_bad_authorization_none: {status: KNOWN}
|
s3tests.functional.test_headers.test_bucket_create_bad_authorization_none: {status: KNOWN}
|
||||||
@ -45,7 +46,6 @@ ceph_s3:
|
|||||||
s3tests.functional.test_s3.test_append_object_position_wrong: {status: KNOWN}
|
s3tests.functional.test_s3.test_append_object_position_wrong: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_append_normal_object: {status: KNOWN}
|
s3tests.functional.test_s3.test_append_normal_object: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_append_object: {status: KNOWN}
|
s3tests.functional.test_s3.test_append_object: {status: KNOWN}
|
||||||
s3tests.functional.test_s3.test_versioning_obj_read_not_exist_null: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_headers.test_bucket_create_bad_authorization_empty: {status: KNOWN}
|
s3tests_boto3.functional.test_headers.test_bucket_create_bad_authorization_empty: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_headers.test_bucket_create_bad_authorization_invalid_aws2: {status: KNOWN}
|
s3tests_boto3.functional.test_headers.test_bucket_create_bad_authorization_invalid_aws2: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_headers.test_bucket_create_bad_authorization_none: {status: KNOWN}
|
s3tests_boto3.functional.test_headers.test_bucket_create_bad_authorization_none: {status: KNOWN}
|
||||||
@ -151,16 +151,12 @@ ceph_s3:
|
|||||||
s3tests_boto3.functional.test_s3.test_list_buckets_invalid_auth: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_list_buckets_invalid_auth: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_logging_toggle: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_logging_toggle: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_multipart_copy_invalid_range: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_multipart_copy_invalid_range: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_multipart_copy_versioned: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_multipart_resend_first_finishes_last: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_multipart_resend_first_finishes_last: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_multipart_upload: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_multipart_upload: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_multipart_upload_empty: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_multipart_upload_empty: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_object_acl_canned_bucketownerread: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_object_acl_canned_bucketownerread: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_object_anon_put: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_object_anon_put: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_object_anon_put_write_access: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_object_anon_put_write_access: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_object_copy_versioned_bucket: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_object_copy_versioned_url_encoding: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_object_copy_versioning_multipart_upload: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_object_delete_key_bucket_gone: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_object_delete_key_bucket_gone: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_object_header_acl_grants: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_object_header_acl_grants: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_object_lock_delete_object_with_legal_hold_off: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_object_lock_delete_object_with_legal_hold_off: {status: KNOWN}
|
||||||
@ -265,24 +261,6 @@ ceph_s3:
|
|||||||
s3tests_boto3.functional.test_s3.test_sse_kms_transfer_1MB: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_sse_kms_transfer_1MB: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_sse_kms_transfer_1b: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_sse_kms_transfer_1b: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_sse_kms_transfer_1kb: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_sse_kms_transfer_1kb: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_versioned_concurrent_object_create_and_remove: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioned_concurrent_object_create_concurrent_remove: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioned_object_acl: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioned_object_acl_no_version_specified: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_bucket_atomic_upload_return_version_id: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_bucket_create_suspend: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_bucket_multipart_upload_return_version_id: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_versioning_bucket_multipart_upload_return_version_id: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_copy_obj_version: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_multi_object_delete: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_multi_object_delete_with_marker: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_multi_object_delete_with_marker_create: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_versioning_multi_object_delete_with_marker_create: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_create_overwrite_multipart: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_create_read_remove: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_create_read_remove_head: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_create_versions_remove_all: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_create_versions_remove_special_names: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_list_marker: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_plain_null_version_overwrite: {status: KNOWN}
|
s3tests_boto3.functional.test_s3.test_versioning_obj_plain_null_version_overwrite: {status: KNOWN}
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_plain_null_version_overwrite_suspended: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_plain_null_version_removal: {status: KNOWN}
|
|
||||||
s3tests_boto3.functional.test_s3.test_versioning_obj_suspend_versions: {status: KNOWN}
|
|
||||||
|
@ -62,7 +62,7 @@ Amazon S3 operations
|
|||||||
+------------------------------------------------+------------------+--------------+
|
+------------------------------------------------+------------------+--------------+
|
||||||
| `Object tagging`_ | Core-API | Yes |
|
| `Object tagging`_ | Core-API | Yes |
|
||||||
+------------------------------------------------+------------------+--------------+
|
+------------------------------------------------+------------------+--------------+
|
||||||
| `Versioning`_ | Versioning | No |
|
| `Versioning`_ | Versioning | Yes |
|
||||||
+------------------------------------------------+------------------+--------------+
|
+------------------------------------------------+------------------+--------------+
|
||||||
| `Bucket notification`_ | Notifications | No |
|
| `Bucket notification`_ | Notifications | No |
|
||||||
+------------------------------------------------+------------------+--------------+
|
+------------------------------------------------+------------------+--------------+
|
||||||
|
@ -128,9 +128,14 @@ class BaseAclHandler(object):
|
|||||||
raise Exception('No permission to be checked exists')
|
raise Exception('No permission to be checked exists')
|
||||||
|
|
||||||
if resource == 'object':
|
if resource == 'object':
|
||||||
|
version_id = self.req.params.get('versionId')
|
||||||
|
if version_id is None:
|
||||||
|
query = {}
|
||||||
|
else:
|
||||||
|
query = {'version-id': version_id}
|
||||||
resp = self.req.get_acl_response(app, 'HEAD',
|
resp = self.req.get_acl_response(app, 'HEAD',
|
||||||
container, obj,
|
container, obj,
|
||||||
headers)
|
headers, query=query)
|
||||||
acl = resp.object_acl
|
acl = resp.object_acl
|
||||||
elif resource == 'container':
|
elif resource == 'container':
|
||||||
resp = self.req.get_acl_response(app, 'HEAD',
|
resp = self.req.get_acl_response(app, 'HEAD',
|
||||||
@ -460,4 +465,9 @@ ACL_MAP = {
|
|||||||
# Initiate Multipart Upload
|
# Initiate Multipart Upload
|
||||||
('POST', 'HEAD', 'container'):
|
('POST', 'HEAD', 'container'):
|
||||||
{'Permission': 'WRITE'},
|
{'Permission': 'WRITE'},
|
||||||
|
# Versioning
|
||||||
|
('PUT', 'POST', 'container'):
|
||||||
|
{'Permission': 'WRITE'},
|
||||||
|
('DELETE', 'GET', 'container'):
|
||||||
|
{'Permission': 'WRITE'},
|
||||||
}
|
}
|
||||||
|
@ -21,13 +21,16 @@ from six.moves.urllib.parse import quote
|
|||||||
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
from swift.common.http import HTTP_OK
|
from swift.common.http import HTTP_OK
|
||||||
from swift.common.utils import json, public, config_true_value
|
from swift.common.middleware.versioned_writes.object_versioning import \
|
||||||
|
DELETE_MARKER_CONTENT_TYPE
|
||||||
|
from swift.common.utils import json, public, config_true_value, Timestamp, \
|
||||||
|
get_swift_info
|
||||||
|
|
||||||
from swift.common.middleware.s3api.controllers.base import Controller
|
from swift.common.middleware.s3api.controllers.base import Controller
|
||||||
from swift.common.middleware.s3api.etree import Element, SubElement, tostring, \
|
from swift.common.middleware.s3api.etree import Element, SubElement, \
|
||||||
fromstring, XMLSyntaxError, DocumentInvalid
|
tostring, fromstring, XMLSyntaxError, DocumentInvalid
|
||||||
from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \
|
from swift.common.middleware.s3api.s3response import \
|
||||||
InvalidArgument, \
|
HTTPOk, S3NotImplemented, InvalidArgument, \
|
||||||
MalformedXML, InvalidLocationConstraint, NoSuchBucket, \
|
MalformedXML, InvalidLocationConstraint, NoSuchBucket, \
|
||||||
BucketNotEmpty, InternalError, ServiceUnavailable, NoSuchKey
|
BucketNotEmpty, InternalError, ServiceUnavailable, NoSuchKey
|
||||||
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX
|
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX
|
||||||
@ -94,36 +97,38 @@ class BucketController(Controller):
|
|||||||
|
|
||||||
return HTTPOk(headers=resp.headers)
|
return HTTPOk(headers=resp.headers)
|
||||||
|
|
||||||
@public
|
def _parse_request_options(self, req, max_keys):
|
||||||
def GET(self, req):
|
|
||||||
"""
|
|
||||||
Handle GET Bucket (List Objects) request
|
|
||||||
"""
|
|
||||||
|
|
||||||
max_keys = req.get_validated_param(
|
|
||||||
'max-keys', self.conf.max_bucket_listing)
|
|
||||||
# TODO: Separate max_bucket_listing and default_bucket_listing
|
|
||||||
tag_max_keys = max_keys
|
|
||||||
max_keys = min(max_keys, self.conf.max_bucket_listing)
|
|
||||||
|
|
||||||
encoding_type = req.params.get('encoding-type')
|
encoding_type = req.params.get('encoding-type')
|
||||||
if encoding_type is not None and encoding_type != 'url':
|
if encoding_type is not None and encoding_type != 'url':
|
||||||
err_msg = 'Invalid Encoding Method specified in Request'
|
err_msg = 'Invalid Encoding Method specified in Request'
|
||||||
raise InvalidArgument('encoding-type', encoding_type, err_msg)
|
raise InvalidArgument('encoding-type', encoding_type, err_msg)
|
||||||
|
|
||||||
|
# in order to judge that truncated is valid, check whether
|
||||||
|
# max_keys + 1 th element exists in swift.
|
||||||
query = {
|
query = {
|
||||||
'format': 'json',
|
|
||||||
'limit': max_keys + 1,
|
'limit': max_keys + 1,
|
||||||
}
|
}
|
||||||
if 'prefix' in req.params:
|
if 'prefix' in req.params:
|
||||||
query.update({'prefix': req.params['prefix']})
|
query['prefix'] = req.params['prefix']
|
||||||
if 'delimiter' in req.params:
|
if 'delimiter' in req.params:
|
||||||
query.update({'delimiter': req.params['delimiter']})
|
query['delimiter'] = req.params['delimiter']
|
||||||
fetch_owner = False
|
fetch_owner = False
|
||||||
if 'versions' in req.params:
|
if 'versions' in req.params:
|
||||||
|
query['versions'] = req.params['versions']
|
||||||
listing_type = 'object-versions'
|
listing_type = 'object-versions'
|
||||||
if 'key-marker' in req.params:
|
if 'key-marker' in req.params:
|
||||||
query.update({'marker': req.params['key-marker']})
|
query['marker'] = req.params['key-marker']
|
||||||
|
version_marker = req.params.get('version-id-marker')
|
||||||
|
if version_marker is not None:
|
||||||
|
if version_marker != 'null':
|
||||||
|
try:
|
||||||
|
Timestamp(version_marker)
|
||||||
|
except ValueError:
|
||||||
|
raise InvalidArgument(
|
||||||
|
'version-id-marker',
|
||||||
|
req.params['version-id-marker'],
|
||||||
|
'Invalid version id specified')
|
||||||
|
query['version_marker'] = version_marker
|
||||||
elif 'version-id-marker' in req.params:
|
elif 'version-id-marker' in req.params:
|
||||||
err_msg = ('A version-id marker cannot be specified without '
|
err_msg = ('A version-id marker cannot be specified without '
|
||||||
'a key marker.')
|
'a key marker.')
|
||||||
@ -132,30 +137,23 @@ class BucketController(Controller):
|
|||||||
elif int(req.params.get('list-type', '1')) == 2:
|
elif int(req.params.get('list-type', '1')) == 2:
|
||||||
listing_type = 'version-2'
|
listing_type = 'version-2'
|
||||||
if 'start-after' in req.params:
|
if 'start-after' in req.params:
|
||||||
query.update({'marker': req.params['start-after']})
|
query['marker'] = req.params['start-after']
|
||||||
# continuation-token overrides start-after
|
# continuation-token overrides start-after
|
||||||
if 'continuation-token' in req.params:
|
if 'continuation-token' in req.params:
|
||||||
decoded = b64decode(req.params['continuation-token'])
|
decoded = b64decode(req.params['continuation-token'])
|
||||||
if not six.PY2:
|
if not six.PY2:
|
||||||
decoded = decoded.decode('utf8')
|
decoded = decoded.decode('utf8')
|
||||||
query.update({'marker': decoded})
|
query['marker'] = decoded
|
||||||
if 'fetch-owner' in req.params:
|
if 'fetch-owner' in req.params:
|
||||||
fetch_owner = config_true_value(req.params['fetch-owner'])
|
fetch_owner = config_true_value(req.params['fetch-owner'])
|
||||||
else:
|
else:
|
||||||
listing_type = 'version-1'
|
listing_type = 'version-1'
|
||||||
if 'marker' in req.params:
|
if 'marker' in req.params:
|
||||||
query.update({'marker': req.params['marker']})
|
query['marker'] = req.params['marker']
|
||||||
|
|
||||||
resp = req.get_response(self.app, query=query)
|
return encoding_type, query, listing_type, fetch_owner
|
||||||
|
|
||||||
objects = json.loads(resp.body)
|
def _build_versions_result(self, req, objects, is_truncated):
|
||||||
|
|
||||||
# in order to judge that truncated is valid, check whether
|
|
||||||
# max_keys + 1 th element exists in swift.
|
|
||||||
is_truncated = max_keys > 0 and len(objects) > max_keys
|
|
||||||
objects = objects[:max_keys]
|
|
||||||
|
|
||||||
if listing_type == 'object-versions':
|
|
||||||
elem = Element('ListVersionsResult')
|
elem = Element('ListVersionsResult')
|
||||||
SubElement(elem, 'Name').text = req.container_name
|
SubElement(elem, 'Name').text = req.container_name
|
||||||
SubElement(elem, 'Prefix').text = req.params.get('prefix')
|
SubElement(elem, 'Prefix').text = req.params.get('prefix')
|
||||||
@ -166,15 +164,23 @@ class BucketController(Controller):
|
|||||||
if 'name' in objects[-1]:
|
if 'name' in objects[-1]:
|
||||||
SubElement(elem, 'NextKeyMarker').text = \
|
SubElement(elem, 'NextKeyMarker').text = \
|
||||||
objects[-1]['name']
|
objects[-1]['name']
|
||||||
|
SubElement(elem, 'NextVersionIdMarker').text = \
|
||||||
|
objects[-1].get('version') or 'null'
|
||||||
if 'subdir' in objects[-1]:
|
if 'subdir' in objects[-1]:
|
||||||
SubElement(elem, 'NextKeyMarker').text = \
|
SubElement(elem, 'NextKeyMarker').text = \
|
||||||
objects[-1]['subdir']
|
objects[-1]['subdir']
|
||||||
SubElement(elem, 'NextVersionIdMarker').text = 'null'
|
SubElement(elem, 'NextVersionIdMarker').text = 'null'
|
||||||
else:
|
return elem
|
||||||
|
|
||||||
|
def _build_base_listing_element(self, req):
|
||||||
elem = Element('ListBucketResult')
|
elem = Element('ListBucketResult')
|
||||||
SubElement(elem, 'Name').text = req.container_name
|
SubElement(elem, 'Name').text = req.container_name
|
||||||
SubElement(elem, 'Prefix').text = req.params.get('prefix')
|
SubElement(elem, 'Prefix').text = req.params.get('prefix')
|
||||||
if listing_type == 'version-1':
|
return elem
|
||||||
|
|
||||||
|
def _build_list_bucket_result_type_one(self, req, objects, encoding_type,
|
||||||
|
is_truncated):
|
||||||
|
elem = self._build_base_listing_element(req)
|
||||||
SubElement(elem, 'Marker').text = req.params.get('marker')
|
SubElement(elem, 'Marker').text = req.params.get('marker')
|
||||||
if is_truncated and 'delimiter' in req.params:
|
if is_truncated and 'delimiter' in req.params:
|
||||||
if 'name' in objects[-1]:
|
if 'name' in objects[-1]:
|
||||||
@ -184,14 +190,18 @@ class BucketController(Controller):
|
|||||||
if encoding_type == 'url':
|
if encoding_type == 'url':
|
||||||
name = quote(name.encode('utf-8'))
|
name = quote(name.encode('utf-8'))
|
||||||
SubElement(elem, 'NextMarker').text = name
|
SubElement(elem, 'NextMarker').text = name
|
||||||
elif listing_type == 'version-2':
|
# XXX: really? no NextMarker when no delimiter??
|
||||||
|
return elem
|
||||||
|
|
||||||
|
def _build_list_bucket_result_type_two(self, req, objects, is_truncated):
|
||||||
|
elem = self._build_base_listing_element(req)
|
||||||
if is_truncated:
|
if is_truncated:
|
||||||
if 'name' in objects[-1]:
|
if 'name' in objects[-1]:
|
||||||
SubElement(elem, 'NextContinuationToken').text = \
|
SubElement(elem, 'NextContinuationToken').text = \
|
||||||
b64encode(objects[-1]['name'].encode('utf-8'))
|
b64encode(objects[-1]['name'].encode('utf8'))
|
||||||
if 'subdir' in objects[-1]:
|
if 'subdir' in objects[-1]:
|
||||||
SubElement(elem, 'NextContinuationToken').text = \
|
SubElement(elem, 'NextContinuationToken').text = \
|
||||||
b64encode(objects[-1]['subdir'].encode('utf-8'))
|
b64encode(objects[-1]['subdir'].encode('utf8'))
|
||||||
if 'continuation-token' in req.params:
|
if 'continuation-token' in req.params:
|
||||||
SubElement(elem, 'ContinuationToken').text = \
|
SubElement(elem, 'ContinuationToken').text = \
|
||||||
req.params['continuation-token']
|
req.params['continuation-token']
|
||||||
@ -199,41 +209,58 @@ class BucketController(Controller):
|
|||||||
SubElement(elem, 'StartAfter').text = \
|
SubElement(elem, 'StartAfter').text = \
|
||||||
req.params['start-after']
|
req.params['start-after']
|
||||||
SubElement(elem, 'KeyCount').text = str(len(objects))
|
SubElement(elem, 'KeyCount').text = str(len(objects))
|
||||||
|
return elem
|
||||||
|
|
||||||
|
def _finish_result(self, req, elem, tag_max_keys, encoding_type,
|
||||||
|
is_truncated):
|
||||||
SubElement(elem, 'MaxKeys').text = str(tag_max_keys)
|
SubElement(elem, 'MaxKeys').text = str(tag_max_keys)
|
||||||
|
|
||||||
if 'delimiter' in req.params:
|
if 'delimiter' in req.params:
|
||||||
SubElement(elem, 'Delimiter').text = req.params['delimiter']
|
SubElement(elem, 'Delimiter').text = req.params['delimiter']
|
||||||
|
|
||||||
if encoding_type == 'url':
|
if encoding_type == 'url':
|
||||||
SubElement(elem, 'EncodingType').text = encoding_type
|
SubElement(elem, 'EncodingType').text = encoding_type
|
||||||
|
|
||||||
SubElement(elem, 'IsTruncated').text = \
|
SubElement(elem, 'IsTruncated').text = \
|
||||||
'true' if is_truncated else 'false'
|
'true' if is_truncated else 'false'
|
||||||
|
|
||||||
for o in objects:
|
def _add_subdir(self, elem, o, encoding_type):
|
||||||
if 'subdir' not in o:
|
common_prefixes = SubElement(elem, 'CommonPrefixes')
|
||||||
|
name = o['subdir']
|
||||||
|
if encoding_type == 'url':
|
||||||
|
name = quote(name.encode('utf-8'))
|
||||||
|
SubElement(common_prefixes, 'Prefix').text = name
|
||||||
|
|
||||||
|
def _add_object(self, req, elem, o, encoding_type, listing_type,
|
||||||
|
fetch_owner):
|
||||||
name = o['name']
|
name = o['name']
|
||||||
if encoding_type == 'url':
|
if encoding_type == 'url':
|
||||||
name = quote(name.encode('utf-8'))
|
name = quote(name.encode('utf-8'))
|
||||||
|
|
||||||
if listing_type == 'object-versions':
|
if listing_type == 'object-versions':
|
||||||
|
if o['content_type'] == DELETE_MARKER_CONTENT_TYPE:
|
||||||
|
contents = SubElement(elem, 'DeleteMarker')
|
||||||
|
else:
|
||||||
contents = SubElement(elem, 'Version')
|
contents = SubElement(elem, 'Version')
|
||||||
SubElement(contents, 'Key').text = name
|
SubElement(contents, 'Key').text = name
|
||||||
SubElement(contents, 'VersionId').text = 'null'
|
SubElement(contents, 'VersionId').text = o.get(
|
||||||
|
'version_id') or 'null'
|
||||||
|
if 'object_versioning' in get_swift_info():
|
||||||
|
SubElement(contents, 'IsLatest').text = (
|
||||||
|
'true' if o['is_latest'] else 'false')
|
||||||
|
else:
|
||||||
SubElement(contents, 'IsLatest').text = 'true'
|
SubElement(contents, 'IsLatest').text = 'true'
|
||||||
else:
|
else:
|
||||||
contents = SubElement(elem, 'Contents')
|
contents = SubElement(elem, 'Contents')
|
||||||
SubElement(contents, 'Key').text = name
|
SubElement(contents, 'Key').text = name
|
||||||
SubElement(contents, 'LastModified').text = \
|
SubElement(contents, 'LastModified').text = \
|
||||||
o['last_modified'][:-3] + 'Z'
|
o['last_modified'][:-3] + 'Z'
|
||||||
|
if contents.tag != 'DeleteMarker':
|
||||||
if 's3_etag' in o:
|
if 's3_etag' in o:
|
||||||
# New-enough MUs are already in the right format
|
# New-enough MUs are already in the right format
|
||||||
etag = o['s3_etag']
|
etag = o['s3_etag']
|
||||||
elif 'slo_etag' in o:
|
elif 'slo_etag' in o:
|
||||||
# SLOs may be in something *close* to the MU format
|
# SLOs may be in something *close* to the MU format
|
||||||
etag = '"%s-N"' % swob.normalize_etag(o['slo_etag'])
|
etag = '"%s-N"' % o['slo_etag'].strip('"')
|
||||||
else:
|
else:
|
||||||
|
# Normal objects just use the MD5
|
||||||
etag = o['hash']
|
etag = o['hash']
|
||||||
if len(etag) < 2 or etag[::len(etag) - 1] != '""':
|
if len(etag) < 2 or etag[::len(etag) - 1] != '""':
|
||||||
# Normal objects just use the MD5
|
# Normal objects just use the MD5
|
||||||
@ -249,15 +276,51 @@ class BucketController(Controller):
|
|||||||
owner = SubElement(contents, 'Owner')
|
owner = SubElement(contents, 'Owner')
|
||||||
SubElement(owner, 'ID').text = req.user_id
|
SubElement(owner, 'ID').text = req.user_id
|
||||||
SubElement(owner, 'DisplayName').text = req.user_id
|
SubElement(owner, 'DisplayName').text = req.user_id
|
||||||
|
if contents.tag != 'DeleteMarker':
|
||||||
SubElement(contents, 'StorageClass').text = 'STANDARD'
|
SubElement(contents, 'StorageClass').text = 'STANDARD'
|
||||||
|
|
||||||
|
def _add_objects_to_result(self, req, elem, objects, encoding_type,
|
||||||
|
listing_type, fetch_owner):
|
||||||
for o in objects:
|
for o in objects:
|
||||||
if 'subdir' in o:
|
if 'subdir' in o:
|
||||||
common_prefixes = SubElement(elem, 'CommonPrefixes')
|
self._add_subdir(elem, o, encoding_type)
|
||||||
name = o['subdir']
|
else:
|
||||||
if encoding_type == 'url':
|
self._add_object(req, elem, o, encoding_type, listing_type,
|
||||||
name = quote(name.encode('utf-8'))
|
fetch_owner)
|
||||||
SubElement(common_prefixes, 'Prefix').text = name
|
|
||||||
|
@public
|
||||||
|
def GET(self, req):
|
||||||
|
"""
|
||||||
|
Handle GET Bucket (List Objects) request
|
||||||
|
"""
|
||||||
|
max_keys = req.get_validated_param(
|
||||||
|
'max-keys', self.conf.max_bucket_listing)
|
||||||
|
tag_max_keys = max_keys
|
||||||
|
# TODO: Separate max_bucket_listing and default_bucket_listing
|
||||||
|
max_keys = min(max_keys, self.conf.max_bucket_listing)
|
||||||
|
|
||||||
|
encoding_type, query, listing_type, fetch_owner = \
|
||||||
|
self._parse_request_options(req, max_keys)
|
||||||
|
|
||||||
|
resp = req.get_response(self.app, query=query)
|
||||||
|
|
||||||
|
objects = json.loads(resp.body)
|
||||||
|
|
||||||
|
is_truncated = max_keys > 0 and len(objects) > max_keys
|
||||||
|
objects = objects[:max_keys]
|
||||||
|
|
||||||
|
if listing_type == 'object-versions':
|
||||||
|
elem = self._build_versions_result(req, objects, is_truncated)
|
||||||
|
elif listing_type == 'version-2':
|
||||||
|
elem = self._build_list_bucket_result_type_two(
|
||||||
|
req, objects, is_truncated)
|
||||||
|
else:
|
||||||
|
elem = self._build_list_bucket_result_type_one(
|
||||||
|
req, objects, encoding_type, is_truncated)
|
||||||
|
self._finish_result(
|
||||||
|
req, elem, tag_max_keys, encoding_type, is_truncated)
|
||||||
|
self._add_objects_to_result(
|
||||||
|
req, elem, objects, encoding_type, listing_type, fetch_owner)
|
||||||
|
|
||||||
body = tostring(elem)
|
body = tostring(elem)
|
||||||
|
|
||||||
@ -297,6 +360,7 @@ class BucketController(Controller):
|
|||||||
"""
|
"""
|
||||||
Handle DELETE Bucket request
|
Handle DELETE Bucket request
|
||||||
"""
|
"""
|
||||||
|
# NB: object_versioning is responsible for cleaning up its container
|
||||||
if self.conf.allow_multipart_uploads:
|
if self.conf.allow_multipart_uploads:
|
||||||
self._delete_segments_bucket(req)
|
self._delete_segments_bucket(req)
|
||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
@ -17,15 +17,15 @@ import copy
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from swift.common.constraints import MAX_OBJECT_NAME_LENGTH
|
from swift.common.constraints import MAX_OBJECT_NAME_LENGTH
|
||||||
from swift.common.utils import public, StreamingPile
|
from swift.common.utils import public, StreamingPile, get_swift_info
|
||||||
|
|
||||||
from swift.common.middleware.s3api.controllers.base import Controller, \
|
from swift.common.middleware.s3api.controllers.base import Controller, \
|
||||||
bucket_operation
|
bucket_operation
|
||||||
from swift.common.middleware.s3api.etree import Element, SubElement, \
|
from swift.common.middleware.s3api.etree import Element, SubElement, \
|
||||||
fromstring, tostring, XMLSyntaxError, DocumentInvalid
|
fromstring, tostring, XMLSyntaxError, DocumentInvalid
|
||||||
from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \
|
from swift.common.middleware.s3api.s3response import HTTPOk, \
|
||||||
NoSuchKey, ErrorResponse, MalformedXML, UserKeyMustBeSpecified, \
|
S3NotImplemented, NoSuchKey, ErrorResponse, MalformedXML, \
|
||||||
AccessDenied, MissingRequestBodyError
|
UserKeyMustBeSpecified, AccessDenied, MissingRequestBodyError
|
||||||
|
|
||||||
|
|
||||||
class MultiObjectDeleteController(Controller):
|
class MultiObjectDeleteController(Controller):
|
||||||
@ -35,12 +35,10 @@ class MultiObjectDeleteController(Controller):
|
|||||||
"""
|
"""
|
||||||
def _gen_error_body(self, error, elem, delete_list):
|
def _gen_error_body(self, error, elem, delete_list):
|
||||||
for key, version in delete_list:
|
for key, version in delete_list:
|
||||||
if version is not None:
|
|
||||||
# TODO: delete the specific version of the object
|
|
||||||
raise S3NotImplemented()
|
|
||||||
|
|
||||||
error_elem = SubElement(elem, 'Error')
|
error_elem = SubElement(elem, 'Error')
|
||||||
SubElement(error_elem, 'Key').text = key
|
SubElement(error_elem, 'Key').text = key
|
||||||
|
if version is not None:
|
||||||
|
SubElement(error_elem, 'VersionId').text = version
|
||||||
SubElement(error_elem, 'Code').text = error.__class__.__name__
|
SubElement(error_elem, 'Code').text = error.__class__.__name__
|
||||||
SubElement(error_elem, 'Message').text = error._msg
|
SubElement(error_elem, 'Message').text = error._msg
|
||||||
|
|
||||||
@ -105,21 +103,32 @@ class MultiObjectDeleteController(Controller):
|
|||||||
body = self._gen_error_body(error, elem, delete_list)
|
body = self._gen_error_body(error, elem, delete_list)
|
||||||
return HTTPOk(body=body)
|
return HTTPOk(body=body)
|
||||||
|
|
||||||
if any(version is not None for _key, version in delete_list):
|
if 'object_versioning' not in get_swift_info() and any(
|
||||||
# TODO: support deleting specific versions of objects
|
version not in ('null', None)
|
||||||
|
for _key, version in delete_list):
|
||||||
raise S3NotImplemented()
|
raise S3NotImplemented()
|
||||||
|
|
||||||
def do_delete(base_req, key, version):
|
def do_delete(base_req, key, version):
|
||||||
req = copy.copy(base_req)
|
req = copy.copy(base_req)
|
||||||
req.environ = copy.copy(base_req.environ)
|
req.environ = copy.copy(base_req.environ)
|
||||||
req.object_name = key
|
req.object_name = key
|
||||||
|
if version:
|
||||||
|
req.params = {'version-id': version, 'symlink': 'get'}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
query = req.gen_multipart_manifest_delete_query(self.app)
|
try:
|
||||||
|
query = req.gen_multipart_manifest_delete_query(
|
||||||
|
self.app, version=version)
|
||||||
|
except NoSuchKey:
|
||||||
|
query = {}
|
||||||
|
if version:
|
||||||
|
query['version-id'] = version
|
||||||
|
query['symlink'] = 'get'
|
||||||
|
|
||||||
resp = req.get_response(self.app, method='DELETE', query=query,
|
resp = req.get_response(self.app, method='DELETE', query=query,
|
||||||
headers={'Accept': 'application/json'})
|
headers={'Accept': 'application/json'})
|
||||||
# Have to read the response to actually do the SLO delete
|
# Have to read the response to actually do the SLO delete
|
||||||
if query:
|
if query.get('multipart-manifest'):
|
||||||
try:
|
try:
|
||||||
delete_result = json.loads(resp.body)
|
delete_result = json.loads(resp.body)
|
||||||
if delete_result['Errors']:
|
if delete_result['Errors']:
|
||||||
@ -144,6 +153,12 @@ class MultiObjectDeleteController(Controller):
|
|||||||
pass
|
pass
|
||||||
except ErrorResponse as e:
|
except ErrorResponse as e:
|
||||||
return key, {'code': e.__class__.__name__, 'message': e._msg}
|
return key, {'code': e.__class__.__name__, 'message': e._msg}
|
||||||
|
except Exception:
|
||||||
|
self.logger.exception(
|
||||||
|
'Unexpected Error handling DELETE of %r %r' % (
|
||||||
|
req.container_name, key))
|
||||||
|
return key, {'code': 'Server Error', 'message': 'Server Error'}
|
||||||
|
|
||||||
return key, None
|
return key, None
|
||||||
|
|
||||||
with StreamingPile(self.conf.multi_delete_concurrency) as pile:
|
with StreamingPile(self.conf.multi_delete_concurrency) as pile:
|
||||||
|
@ -100,10 +100,19 @@ def _get_upload_info(req, app, upload_id):
|
|||||||
container = req.container_name + MULTIUPLOAD_SUFFIX
|
container = req.container_name + MULTIUPLOAD_SUFFIX
|
||||||
obj = '%s/%s' % (req.object_name, upload_id)
|
obj = '%s/%s' % (req.object_name, upload_id)
|
||||||
|
|
||||||
|
# XXX: if we leave the copy-source header, somewhere later we might
|
||||||
|
# drop in a ?version-id=... query string that's utterly inappropriate
|
||||||
|
# for the upload marker. Until we get around to fixing that, just pop
|
||||||
|
# it off for now...
|
||||||
|
copy_source = req.headers.pop('X-Amz-Copy-Source', None)
|
||||||
try:
|
try:
|
||||||
return req.get_response(app, 'HEAD', container=container, obj=obj)
|
return req.get_response(app, 'HEAD', container=container, obj=obj)
|
||||||
except NoSuchKey:
|
except NoSuchKey:
|
||||||
raise NoSuchUpload(upload_id=upload_id)
|
raise NoSuchUpload(upload_id=upload_id)
|
||||||
|
finally:
|
||||||
|
# ...making sure to restore any copy-source before returning
|
||||||
|
if copy_source is not None:
|
||||||
|
req.headers['X-Amz-Copy-Source'] = copy_source
|
||||||
|
|
||||||
|
|
||||||
def _check_upload_info(req, app, upload_id):
|
def _check_upload_info(req, app, upload_id):
|
||||||
|
@ -13,16 +13,21 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
from swift.common.http import HTTP_OK, HTTP_PARTIAL_CONTENT, HTTP_NO_CONTENT
|
from swift.common.http import HTTP_OK, HTTP_PARTIAL_CONTENT, HTTP_NO_CONTENT
|
||||||
from swift.common.request_helpers import update_etag_is_at_header
|
from swift.common.request_helpers import update_etag_is_at_header
|
||||||
from swift.common.swob import Range, content_range_header_value, \
|
from swift.common.swob import Range, content_range_header_value, \
|
||||||
normalize_etag
|
normalize_etag
|
||||||
from swift.common.utils import public, list_from_csv
|
from swift.common.utils import public, list_from_csv, get_swift_info
|
||||||
|
|
||||||
|
from swift.common.middleware.versioned_writes.object_versioning import \
|
||||||
|
DELETE_MARKER_CONTENT_TYPE
|
||||||
from swift.common.middleware.s3api.utils import S3Timestamp, sysmeta_header
|
from swift.common.middleware.s3api.utils import S3Timestamp, sysmeta_header
|
||||||
from swift.common.middleware.s3api.controllers.base import Controller
|
from swift.common.middleware.s3api.controllers.base import Controller
|
||||||
from swift.common.middleware.s3api.s3response import S3NotImplemented, \
|
from swift.common.middleware.s3api.s3response import S3NotImplemented, \
|
||||||
InvalidRange, NoSuchKey, InvalidArgument, HTTPNoContent
|
InvalidRange, NoSuchKey, InvalidArgument, HTTPNoContent, \
|
||||||
|
PreconditionFailed
|
||||||
|
|
||||||
|
|
||||||
class ObjectController(Controller):
|
class ObjectController(Controller):
|
||||||
@ -78,11 +83,20 @@ class ObjectController(Controller):
|
|||||||
# Update where to look
|
# Update where to look
|
||||||
update_etag_is_at_header(req, sysmeta_header('object', 'etag'))
|
update_etag_is_at_header(req, sysmeta_header('object', 'etag'))
|
||||||
|
|
||||||
resp = req.get_response(self.app)
|
object_name = req.object_name
|
||||||
|
version_id = req.params.get('versionId')
|
||||||
|
if version_id not in ('null', None) and \
|
||||||
|
'object_versioning' not in get_swift_info():
|
||||||
|
raise S3NotImplemented()
|
||||||
|
query = {} if version_id is None else {'version-id': version_id}
|
||||||
|
resp = req.get_response(self.app, query=query)
|
||||||
|
|
||||||
if req.method == 'HEAD':
|
if req.method == 'HEAD':
|
||||||
resp.app_iter = None
|
resp.app_iter = None
|
||||||
|
|
||||||
|
if 'x-amz-meta-deleted' in resp.headers:
|
||||||
|
raise NoSuchKey(object_name)
|
||||||
|
|
||||||
for key in ('content-type', 'content-language', 'expires',
|
for key in ('content-type', 'content-language', 'expires',
|
||||||
'cache-control', 'content-disposition',
|
'cache-control', 'content-disposition',
|
||||||
'content-encoding'):
|
'content-encoding'):
|
||||||
@ -125,12 +139,14 @@ class ObjectController(Controller):
|
|||||||
req.headers['X-Amz-Copy-Source-Range'],
|
req.headers['X-Amz-Copy-Source-Range'],
|
||||||
'Illegal copy header')
|
'Illegal copy header')
|
||||||
req.check_copy_source(self.app)
|
req.check_copy_source(self.app)
|
||||||
|
if not req.headers.get('Content-Type'):
|
||||||
|
# can't setdefault because it can be None for some reason
|
||||||
|
req.headers['Content-Type'] = 'binary/octet-stream'
|
||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
if 'X-Amz-Copy-Source' in req.headers:
|
if 'X-Amz-Copy-Source' in req.headers:
|
||||||
resp.append_copy_resp_body(req.controller_name,
|
resp.append_copy_resp_body(req.controller_name,
|
||||||
req_timestamp.s3xmlformat)
|
req_timestamp.s3xmlformat)
|
||||||
|
|
||||||
# delete object metadata from response
|
# delete object metadata from response
|
||||||
for key in list(resp.headers.keys()):
|
for key in list(resp.headers.keys()):
|
||||||
if key.lower().startswith('x-amz-meta-'):
|
if key.lower().startswith('x-amz-meta-'):
|
||||||
@ -143,20 +159,63 @@ class ObjectController(Controller):
|
|||||||
def POST(self, req):
|
def POST(self, req):
|
||||||
raise S3NotImplemented()
|
raise S3NotImplemented()
|
||||||
|
|
||||||
|
def _restore_on_delete(self, req):
|
||||||
|
resp = req.get_response(self.app, 'GET', req.container_name, '',
|
||||||
|
query={'prefix': req.object_name,
|
||||||
|
'versions': True})
|
||||||
|
if resp.status_int != HTTP_OK:
|
||||||
|
return resp
|
||||||
|
old_versions = json.loads(resp.body)
|
||||||
|
resp = None
|
||||||
|
for item in old_versions:
|
||||||
|
if item['content_type'] == DELETE_MARKER_CONTENT_TYPE:
|
||||||
|
resp = None
|
||||||
|
break
|
||||||
|
try:
|
||||||
|
resp = req.get_response(self.app, 'PUT', query={
|
||||||
|
'version-id': item['version_id']})
|
||||||
|
except PreconditionFailed:
|
||||||
|
self.logger.debug('skipping failed PUT?version-id=%s' %
|
||||||
|
item['version_id'])
|
||||||
|
continue
|
||||||
|
# if that worked, we'll go ahead and fix up the status code
|
||||||
|
resp.status_int = HTTP_NO_CONTENT
|
||||||
|
break
|
||||||
|
return resp
|
||||||
|
|
||||||
@public
|
@public
|
||||||
def DELETE(self, req):
|
def DELETE(self, req):
|
||||||
"""
|
"""
|
||||||
Handle DELETE Object request
|
Handle DELETE Object request
|
||||||
"""
|
"""
|
||||||
|
if 'versionId' in req.params and \
|
||||||
|
req.params['versionId'] != 'null' and \
|
||||||
|
'object_versioning' not in get_swift_info():
|
||||||
|
raise S3NotImplemented()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
query = req.gen_multipart_manifest_delete_query(self.app)
|
try:
|
||||||
|
query = req.gen_multipart_manifest_delete_query(
|
||||||
|
self.app, version=req.params.get('versionId'))
|
||||||
|
except NoSuchKey:
|
||||||
|
query = {}
|
||||||
|
|
||||||
req.headers['Content-Type'] = None # Ignore client content-type
|
req.headers['Content-Type'] = None # Ignore client content-type
|
||||||
|
|
||||||
|
if 'versionId' in req.params:
|
||||||
|
query['version-id'] = req.params['versionId']
|
||||||
|
query['symlink'] = 'get'
|
||||||
|
|
||||||
resp = req.get_response(self.app, query=query)
|
resp = req.get_response(self.app, query=query)
|
||||||
if query and resp.status_int == HTTP_OK:
|
if query.get('multipart-manifest') and resp.status_int == HTTP_OK:
|
||||||
for chunk in resp.app_iter:
|
for chunk in resp.app_iter:
|
||||||
pass # drain the bulk-deleter response
|
pass # drain the bulk-deleter response
|
||||||
resp.status = HTTP_NO_CONTENT
|
resp.status = HTTP_NO_CONTENT
|
||||||
resp.body = b''
|
resp.body = b''
|
||||||
|
if resp.sw_headers.get('X-Object-Current-Version-Id') == 'null':
|
||||||
|
new_resp = self._restore_on_delete(req)
|
||||||
|
if new_resp:
|
||||||
|
resp = new_resp
|
||||||
except NoSuchKey:
|
except NoSuchKey:
|
||||||
# expect to raise NoSuchBucket when the bucket doesn't exist
|
# expect to raise NoSuchBucket when the bucket doesn't exist
|
||||||
req.get_container_info(self.app)
|
req.get_container_info(self.app)
|
||||||
|
@ -13,12 +13,16 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from swift.common.utils import public
|
from swift.common.utils import public, get_swift_info, config_true_value
|
||||||
|
|
||||||
from swift.common.middleware.s3api.controllers.base import Controller, \
|
from swift.common.middleware.s3api.controllers.base import Controller, \
|
||||||
bucket_operation
|
bucket_operation
|
||||||
from swift.common.middleware.s3api.etree import Element, tostring
|
from swift.common.middleware.s3api.etree import Element, tostring, \
|
||||||
from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented
|
fromstring, XMLSyntaxError, DocumentInvalid, SubElement
|
||||||
|
from swift.common.middleware.s3api.s3response import HTTPOk, \
|
||||||
|
S3NotImplemented, MalformedXML
|
||||||
|
|
||||||
|
MAX_PUT_VERSIONING_BODY_SIZE = 10240
|
||||||
|
|
||||||
|
|
||||||
class VersioningController(Controller):
|
class VersioningController(Controller):
|
||||||
@ -36,13 +40,16 @@ class VersioningController(Controller):
|
|||||||
"""
|
"""
|
||||||
Handles GET Bucket versioning.
|
Handles GET Bucket versioning.
|
||||||
"""
|
"""
|
||||||
req.get_response(self.app, method='HEAD')
|
sysmeta = req.get_container_info(self.app).get('sysmeta', {})
|
||||||
|
|
||||||
# Just report there is no versioning configured here.
|
|
||||||
elem = Element('VersioningConfiguration')
|
elem = Element('VersioningConfiguration')
|
||||||
|
if sysmeta.get('versions-enabled'):
|
||||||
|
SubElement(elem, 'Status').text = (
|
||||||
|
'Enabled' if config_true_value(sysmeta['versions-enabled'])
|
||||||
|
else 'Suspended')
|
||||||
body = tostring(elem)
|
body = tostring(elem)
|
||||||
|
|
||||||
return HTTPOk(body=body, content_type="text/plain")
|
return HTTPOk(body=body, content_type=None)
|
||||||
|
|
||||||
@public
|
@public
|
||||||
@bucket_operation
|
@bucket_operation
|
||||||
@ -50,4 +57,25 @@ class VersioningController(Controller):
|
|||||||
"""
|
"""
|
||||||
Handles PUT Bucket versioning.
|
Handles PUT Bucket versioning.
|
||||||
"""
|
"""
|
||||||
|
if 'object_versioning' not in get_swift_info():
|
||||||
raise S3NotImplemented()
|
raise S3NotImplemented()
|
||||||
|
|
||||||
|
xml = req.xml(MAX_PUT_VERSIONING_BODY_SIZE)
|
||||||
|
try:
|
||||||
|
elem = fromstring(xml, 'VersioningConfiguration')
|
||||||
|
status = elem.find('./Status').text
|
||||||
|
except (XMLSyntaxError, DocumentInvalid):
|
||||||
|
raise MalformedXML()
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(e)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if status not in ['Enabled', 'Suspended']:
|
||||||
|
raise MalformedXML()
|
||||||
|
|
||||||
|
# Set up versioning
|
||||||
|
# NB: object_versioning responsible for ensuring its container exists
|
||||||
|
req.headers['X-Versions-Enabled'] = str(status == 'Enabled').lower()
|
||||||
|
req.get_response(self.app, 'POST')
|
||||||
|
|
||||||
|
return HTTPOk()
|
||||||
|
@ -877,20 +877,16 @@ class S3Request(swob.Request):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if '?' in src_path:
|
src_path, qs = src_path.partition('?')[::2]
|
||||||
src_path, qs = src_path.split('?', 1)
|
parsed = parse_qsl(qs, True)
|
||||||
query = parse_qsl(qs, True)
|
if not parsed:
|
||||||
if not query:
|
query = {}
|
||||||
pass # ignore it
|
elif len(parsed) == 1 and parsed[0][0] == 'versionId':
|
||||||
elif len(query) > 1 or query[0][0] != 'versionId':
|
query = {'version-id': parsed[0][1]}
|
||||||
|
else:
|
||||||
raise InvalidArgument('X-Amz-Copy-Source',
|
raise InvalidArgument('X-Amz-Copy-Source',
|
||||||
self.headers['X-Amz-Copy-Source'],
|
self.headers['X-Amz-Copy-Source'],
|
||||||
'Unsupported copy source parameter.')
|
'Unsupported copy source parameter.')
|
||||||
elif query[0][1] != 'null':
|
|
||||||
# TODO: once we support versioning, we'll need to translate
|
|
||||||
# src_path to the proper location in the versions container
|
|
||||||
raise S3NotImplemented('Versioning is not yet supported')
|
|
||||||
self.headers['X-Amz-Copy-Source'] = src_path
|
|
||||||
|
|
||||||
src_path = unquote(src_path)
|
src_path = unquote(src_path)
|
||||||
src_path = src_path if src_path.startswith('/') else ('/' + src_path)
|
src_path = src_path if src_path.startswith('/') else ('/' + src_path)
|
||||||
@ -900,19 +896,15 @@ class S3Request(swob.Request):
|
|||||||
headers.update(self._copy_source_headers())
|
headers.update(self._copy_source_headers())
|
||||||
|
|
||||||
src_resp = self.get_response(app, 'HEAD', src_bucket, src_obj,
|
src_resp = self.get_response(app, 'HEAD', src_bucket, src_obj,
|
||||||
headers=headers)
|
headers=headers, query=query)
|
||||||
if src_resp.status_int == 304: # pylint: disable-msg=E1101
|
if src_resp.status_int == 304: # pylint: disable-msg=E1101
|
||||||
raise PreconditionFailed()
|
raise PreconditionFailed()
|
||||||
|
|
||||||
self.headers['X-Amz-Copy-Source'] = \
|
if (self.container_name == src_bucket and
|
||||||
'/' + self.headers['X-Amz-Copy-Source'].lstrip('/')
|
self.object_name == src_obj and
|
||||||
source_container, source_obj = \
|
|
||||||
split_path(self.headers['X-Amz-Copy-Source'], 1, 2, True)
|
|
||||||
|
|
||||||
if (self.container_name == source_container and
|
|
||||||
self.object_name == source_obj and
|
|
||||||
self.headers.get('x-amz-metadata-directive',
|
self.headers.get('x-amz-metadata-directive',
|
||||||
'COPY') == 'COPY'):
|
'COPY') == 'COPY' and
|
||||||
|
not query):
|
||||||
raise InvalidRequest("This copy request is illegal "
|
raise InvalidRequest("This copy request is illegal "
|
||||||
"because it is trying to copy an "
|
"because it is trying to copy an "
|
||||||
"object to itself without "
|
"object to itself without "
|
||||||
@ -920,6 +912,12 @@ class S3Request(swob.Request):
|
|||||||
"storage class, website redirect "
|
"storage class, website redirect "
|
||||||
"location or encryption "
|
"location or encryption "
|
||||||
"attributes.")
|
"attributes.")
|
||||||
|
# We've done some normalizing; write back so it's ready for
|
||||||
|
# to_swift_req
|
||||||
|
self.headers['X-Amz-Copy-Source'] = quote(src_path)
|
||||||
|
if query:
|
||||||
|
self.headers['X-Amz-Copy-Source'] += \
|
||||||
|
'?versionId=' + query['version-id']
|
||||||
return src_resp
|
return src_resp
|
||||||
|
|
||||||
def _canonical_uri(self):
|
def _canonical_uri(self):
|
||||||
@ -1064,6 +1062,7 @@ class S3Request(swob.Request):
|
|||||||
account = self.account
|
account = self.account
|
||||||
|
|
||||||
env = self.environ.copy()
|
env = self.environ.copy()
|
||||||
|
env['swift.infocache'] = self.environ.setdefault('swift.infocache', {})
|
||||||
|
|
||||||
def sanitize(value):
|
def sanitize(value):
|
||||||
if set(value).issubset(string.printable):
|
if set(value).issubset(string.printable):
|
||||||
@ -1109,8 +1108,10 @@ class S3Request(swob.Request):
|
|||||||
env['HTTP_X_OBJECT_META_' + key[16:]] = sanitize(env[key])
|
env['HTTP_X_OBJECT_META_' + key[16:]] = sanitize(env[key])
|
||||||
del env[key]
|
del env[key]
|
||||||
|
|
||||||
if 'HTTP_X_AMZ_COPY_SOURCE' in env:
|
copy_from_version_id = ''
|
||||||
env['HTTP_X_COPY_FROM'] = env['HTTP_X_AMZ_COPY_SOURCE']
|
if 'HTTP_X_AMZ_COPY_SOURCE' in env and env['REQUEST_METHOD'] == 'PUT':
|
||||||
|
env['HTTP_X_COPY_FROM'], copy_from_version_id = env[
|
||||||
|
'HTTP_X_AMZ_COPY_SOURCE'].partition('?versionId=')[::2]
|
||||||
del env['HTTP_X_AMZ_COPY_SOURCE']
|
del env['HTTP_X_AMZ_COPY_SOURCE']
|
||||||
env['CONTENT_LENGTH'] = '0'
|
env['CONTENT_LENGTH'] = '0'
|
||||||
if env.pop('HTTP_X_AMZ_METADATA_DIRECTIVE', None) == 'REPLACE':
|
if env.pop('HTTP_X_AMZ_METADATA_DIRECTIVE', None) == 'REPLACE':
|
||||||
@ -1143,16 +1144,16 @@ class S3Request(swob.Request):
|
|||||||
path = '/v1/%s' % (account)
|
path = '/v1/%s' % (account)
|
||||||
env['PATH_INFO'] = path
|
env['PATH_INFO'] = path
|
||||||
|
|
||||||
query_string = ''
|
|
||||||
if query is not None:
|
|
||||||
params = []
|
params = []
|
||||||
|
if query is not None:
|
||||||
for key, value in sorted(query.items()):
|
for key, value in sorted(query.items()):
|
||||||
if value is not None:
|
if value is not None:
|
||||||
params.append('%s=%s' % (key, quote(str(value))))
|
params.append('%s=%s' % (key, quote(str(value))))
|
||||||
else:
|
else:
|
||||||
params.append(key)
|
params.append(key)
|
||||||
query_string = '&'.join(params)
|
if copy_from_version_id and not (query and query.get('version-id')):
|
||||||
env['QUERY_STRING'] = query_string
|
params.append('version-id=' + copy_from_version_id)
|
||||||
|
env['QUERY_STRING'] = '&'.join(params)
|
||||||
|
|
||||||
return swob.Request.blank(quote(path), environ=env, body=body,
|
return swob.Request.blank(quote(path), environ=env, body=body,
|
||||||
headers=headers)
|
headers=headers)
|
||||||
@ -1292,6 +1293,7 @@ class S3Request(swob.Request):
|
|||||||
HTTP_REQUEST_ENTITY_TOO_LARGE: EntityTooLarge,
|
HTTP_REQUEST_ENTITY_TOO_LARGE: EntityTooLarge,
|
||||||
HTTP_LENGTH_REQUIRED: MissingContentLength,
|
HTTP_LENGTH_REQUIRED: MissingContentLength,
|
||||||
HTTP_REQUEST_TIMEOUT: RequestTimeout,
|
HTTP_REQUEST_TIMEOUT: RequestTimeout,
|
||||||
|
HTTP_PRECONDITION_FAILED: PreconditionFailed,
|
||||||
},
|
},
|
||||||
'POST': {
|
'POST': {
|
||||||
HTTP_NOT_FOUND: not_found_handler,
|
HTTP_NOT_FOUND: not_found_handler,
|
||||||
@ -1445,14 +1447,16 @@ class S3Request(swob.Request):
|
|||||||
return headers_to_container_info(
|
return headers_to_container_info(
|
||||||
headers, resp.status_int) # pylint: disable-msg=E1101
|
headers, resp.status_int) # pylint: disable-msg=E1101
|
||||||
|
|
||||||
def gen_multipart_manifest_delete_query(self, app, obj=None):
|
def gen_multipart_manifest_delete_query(self, app, obj=None, version=None):
|
||||||
if not self.allow_multipart_uploads:
|
if not self.allow_multipart_uploads:
|
||||||
return None
|
return {}
|
||||||
query = {'multipart-manifest': 'delete'}
|
|
||||||
if not obj:
|
if not obj:
|
||||||
obj = self.object_name
|
obj = self.object_name
|
||||||
resp = self.get_response(app, 'HEAD', obj=obj)
|
query = {'symlink': 'get'}
|
||||||
return query if resp.is_slo else None
|
if version is not None:
|
||||||
|
query['version-id'] = version
|
||||||
|
resp = self.get_response(app, 'HEAD', obj=obj, query=query)
|
||||||
|
return {'multipart-manifest': 'delete'} if resp.is_slo else {}
|
||||||
|
|
||||||
def set_acl_handler(self, handler):
|
def set_acl_handler(self, handler):
|
||||||
pass
|
pass
|
||||||
|
@ -25,6 +25,8 @@ from swift.common.request_helpers import is_sys_meta
|
|||||||
from swift.common.middleware.s3api.utils import snake_to_camel, \
|
from swift.common.middleware.s3api.utils import snake_to_camel, \
|
||||||
sysmeta_prefix, sysmeta_header
|
sysmeta_prefix, sysmeta_header
|
||||||
from swift.common.middleware.s3api.etree import Element, SubElement, tostring
|
from swift.common.middleware.s3api.etree import Element, SubElement, tostring
|
||||||
|
from swift.common.middleware.versioned_writes.object_versioning import \
|
||||||
|
DELETE_MARKER_CONTENT_TYPE
|
||||||
|
|
||||||
|
|
||||||
class HeaderKeyDict(header_key_dict.HeaderKeyDict):
|
class HeaderKeyDict(header_key_dict.HeaderKeyDict):
|
||||||
@ -109,9 +111,16 @@ class S3Response(S3ResponseBase, swob.Response):
|
|||||||
'etag', 'last-modified', 'x-robots-tag',
|
'etag', 'last-modified', 'x-robots-tag',
|
||||||
'cache-control', 'expires'):
|
'cache-control', 'expires'):
|
||||||
headers[key] = val
|
headers[key] = val
|
||||||
|
elif _key == 'x-object-version-id':
|
||||||
|
headers['x-amz-version-id'] = val
|
||||||
|
elif _key == 'x-copied-from-version-id':
|
||||||
|
headers['x-amz-copy-source-version-id'] = val
|
||||||
elif _key == 'x-static-large-object':
|
elif _key == 'x-static-large-object':
|
||||||
# for delete slo
|
# for delete slo
|
||||||
self.is_slo = config_true_value(val)
|
self.is_slo = config_true_value(val)
|
||||||
|
elif _key == 'x-backend-content-type' and \
|
||||||
|
val == DELETE_MARKER_CONTENT_TYPE:
|
||||||
|
headers['x-amz-delete-marker'] = 'true'
|
||||||
|
|
||||||
# Check whether we stored the AWS-style etag on upload
|
# Check whether we stored the AWS-style etag on upload
|
||||||
override_etag = s3_sysmeta_headers.get(
|
override_etag = s3_sysmeta_headers.get(
|
||||||
@ -217,7 +226,7 @@ class ErrorResponse(S3ResponseBase, swob.HTTPException):
|
|||||||
|
|
||||||
def _dict_to_etree(self, parent, d):
|
def _dict_to_etree(self, parent, d):
|
||||||
for key, value in d.items():
|
for key, value in d.items():
|
||||||
tag = re.sub('\W', '', snake_to_camel(key))
|
tag = re.sub(r'\W', '', snake_to_camel(key))
|
||||||
elem = SubElement(parent, tag)
|
elem = SubElement(parent, tag)
|
||||||
|
|
||||||
if isinstance(value, (dict, MutableMapping)):
|
if isinstance(value, (dict, MutableMapping)):
|
||||||
@ -481,7 +490,7 @@ class MalformedPOSTRequest(ErrorResponse):
|
|||||||
class MalformedXML(ErrorResponse):
|
class MalformedXML(ErrorResponse):
|
||||||
_status = '400 Bad Request'
|
_status = '400 Bad Request'
|
||||||
_msg = 'The XML you provided was not well-formed or did not validate ' \
|
_msg = 'The XML you provided was not well-formed or did not validate ' \
|
||||||
'against our published schema.'
|
'against our published schema'
|
||||||
|
|
||||||
|
|
||||||
class MaxMessageLengthExceeded(ErrorResponse):
|
class MaxMessageLengthExceeded(ErrorResponse):
|
||||||
|
@ -95,8 +95,8 @@ def validate_bucket_name(name, dns_compliant_bucket_names):
|
|||||||
elif name.endswith('.'):
|
elif name.endswith('.'):
|
||||||
# Bucket names must not end with dot
|
# Bucket names must not end with dot
|
||||||
return False
|
return False
|
||||||
elif re.match("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.)"
|
elif re.match(r"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.)"
|
||||||
"{3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
|
r"{3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
|
||||||
name):
|
name):
|
||||||
# Bucket names cannot be formatted as an IP Address
|
# Bucket names cannot be formatted as an IP Address
|
||||||
return False
|
return False
|
||||||
|
@ -741,7 +741,7 @@ def get_cluster_info():
|
|||||||
conn = Connection(config)
|
conn = Connection(config)
|
||||||
conn.authenticate()
|
conn.authenticate()
|
||||||
cluster_info.update(conn.cluster_info())
|
cluster_info.update(conn.cluster_info())
|
||||||
except (ResponseError, socket.error):
|
except (ResponseError, socket.error, SkipTest):
|
||||||
# Failed to get cluster_information via /info API, so fall back on
|
# Failed to get cluster_information via /info API, so fall back on
|
||||||
# test.conf data
|
# test.conf data
|
||||||
pass
|
pass
|
||||||
@ -1039,10 +1039,13 @@ def teardown_package():
|
|||||||
global config
|
global config
|
||||||
|
|
||||||
if config:
|
if config:
|
||||||
|
try:
|
||||||
conn = Connection(config)
|
conn = Connection(config)
|
||||||
conn.authenticate()
|
conn.authenticate()
|
||||||
account = Account(conn, config.get('account', config['username']))
|
account = Account(conn, config.get('account', config['username']))
|
||||||
account.delete_containers()
|
account.delete_containers()
|
||||||
|
except (SkipTest):
|
||||||
|
pass
|
||||||
|
|
||||||
global in_process
|
global in_process
|
||||||
global _test_socks
|
global _test_socks
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import test.functional as tf
|
import test.functional as tf
|
||||||
import boto3
|
import boto3
|
||||||
@ -27,6 +28,12 @@ import traceback
|
|||||||
RETRY_COUNT = 3
|
RETRY_COUNT = 3
|
||||||
|
|
||||||
|
|
||||||
|
if os.environ.get('SWIFT_TEST_QUIET_BOTO_LOGS'):
|
||||||
|
logging.getLogger('boto').setLevel(logging.INFO)
|
||||||
|
logging.getLogger('botocore').setLevel(logging.INFO)
|
||||||
|
logging.getLogger('boto3').setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
def setUpModule():
|
def setUpModule():
|
||||||
tf.setup_package()
|
tf.setup_package()
|
||||||
|
|
||||||
@ -88,8 +95,9 @@ class Connection(object):
|
|||||||
for upload in bucket.list_multipart_uploads():
|
for upload in bucket.list_multipart_uploads():
|
||||||
upload.cancel_upload()
|
upload.cancel_upload()
|
||||||
|
|
||||||
for obj in bucket.list():
|
for obj in bucket.list_versions():
|
||||||
bucket.delete_key(obj.name)
|
bucket.delete_key(
|
||||||
|
obj.name, version_id=obj.version_id)
|
||||||
|
|
||||||
self.conn.delete_bucket(bucket.name)
|
self.conn.delete_bucket(bucket.name)
|
||||||
except S3ResponseError as e:
|
except S3ResponseError as e:
|
||||||
|
@ -84,9 +84,12 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
|||||||
return status, headers, body
|
return status, headers, body
|
||||||
|
|
||||||
def _upload_part_copy(self, src_bucket, src_obj, dst_bucket, dst_key,
|
def _upload_part_copy(self, src_bucket, src_obj, dst_bucket, dst_key,
|
||||||
upload_id, part_num=1, src_range=None):
|
upload_id, part_num=1, src_range=None,
|
||||||
|
src_version_id=None):
|
||||||
|
|
||||||
src_path = '%s/%s' % (src_bucket, src_obj)
|
src_path = '%s/%s' % (src_bucket, src_obj)
|
||||||
|
if src_version_id:
|
||||||
|
src_path += '?versionId=%s' % src_version_id
|
||||||
query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
|
query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
|
||||||
req_headers = {'X-Amz-Copy-Source': src_path}
|
req_headers = {'X-Amz-Copy-Source': src_path}
|
||||||
if src_range:
|
if src_range:
|
||||||
@ -877,6 +880,133 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
|||||||
self.assertTrue('content-length' in headers)
|
self.assertTrue('content-length' in headers)
|
||||||
self.assertEqual(headers['content-length'], '0')
|
self.assertEqual(headers['content-length'], '0')
|
||||||
|
|
||||||
|
def test_object_multi_upload_part_copy_version(self):
|
||||||
|
bucket = 'bucket'
|
||||||
|
keys = ['obj1']
|
||||||
|
uploads = []
|
||||||
|
|
||||||
|
results_generator = self._initiate_multi_uploads_result_generator(
|
||||||
|
bucket, keys)
|
||||||
|
|
||||||
|
# Initiate Multipart Upload
|
||||||
|
for expected_key, (status, headers, body) in \
|
||||||
|
zip(keys, results_generator):
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
self.assertTrue('content-type' in headers)
|
||||||
|
self.assertEqual(headers['content-type'], 'application/xml')
|
||||||
|
self.assertTrue('content-length' in headers)
|
||||||
|
self.assertEqual(headers['content-length'], str(len(body)))
|
||||||
|
elem = fromstring(body, 'InitiateMultipartUploadResult')
|
||||||
|
self.assertEqual(elem.find('Bucket').text, bucket)
|
||||||
|
key = elem.find('Key').text
|
||||||
|
self.assertEqual(expected_key, key)
|
||||||
|
upload_id = elem.find('UploadId').text
|
||||||
|
self.assertTrue(upload_id is not None)
|
||||||
|
self.assertTrue((key, upload_id) not in uploads)
|
||||||
|
uploads.append((key, upload_id))
|
||||||
|
|
||||||
|
self.assertEqual(len(uploads), len(keys)) # sanity
|
||||||
|
|
||||||
|
key, upload_id = uploads[0]
|
||||||
|
src_bucket = 'bucket2'
|
||||||
|
src_obj = 'obj4'
|
||||||
|
src_content = b'y' * (self.min_segment_size // 2) + b'z' * \
|
||||||
|
self.min_segment_size
|
||||||
|
etags = [md5(src_content).hexdigest()]
|
||||||
|
|
||||||
|
# prepare null-version src obj
|
||||||
|
self.conn.make_request('PUT', src_bucket)
|
||||||
|
self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
|
||||||
|
_, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
|
||||||
|
# Turn on versioning
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', src_bucket, body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
src_obj2 = 'obj5'
|
||||||
|
src_content2 = b'stub'
|
||||||
|
etags.append(md5(src_content2).hexdigest())
|
||||||
|
|
||||||
|
# prepare src obj w/ real version
|
||||||
|
self.conn.make_request('PUT', src_bucket, src_obj2, body=src_content2)
|
||||||
|
_, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj2)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
version_id2 = headers['x-amz-version-id']
|
||||||
|
|
||||||
|
status, headers, body, resp_etag = \
|
||||||
|
self._upload_part_copy(src_bucket, src_obj, bucket,
|
||||||
|
key, upload_id, 1,
|
||||||
|
src_version_id='null')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
self.assertTrue('content-type' in headers)
|
||||||
|
self.assertEqual(headers['content-type'], 'application/xml')
|
||||||
|
self.assertTrue('content-length' in headers)
|
||||||
|
self.assertEqual(headers['content-length'], str(len(body)))
|
||||||
|
self.assertTrue('etag' not in headers)
|
||||||
|
elem = fromstring(body, 'CopyPartResult')
|
||||||
|
|
||||||
|
last_modifieds = [elem.find('LastModified').text]
|
||||||
|
self.assertTrue(last_modifieds[0] is not None)
|
||||||
|
|
||||||
|
self.assertEqual(resp_etag, etags[0])
|
||||||
|
|
||||||
|
status, headers, body, resp_etag = \
|
||||||
|
self._upload_part_copy(src_bucket, src_obj2, bucket,
|
||||||
|
key, upload_id, 2,
|
||||||
|
src_version_id=version_id2)
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
self.assertTrue('content-type' in headers)
|
||||||
|
self.assertEqual(headers['content-type'], 'application/xml')
|
||||||
|
self.assertTrue('content-length' in headers)
|
||||||
|
self.assertEqual(headers['content-length'], str(len(body)))
|
||||||
|
self.assertTrue('etag' not in headers)
|
||||||
|
elem = fromstring(body, 'CopyPartResult')
|
||||||
|
|
||||||
|
last_modifieds.append(elem.find('LastModified').text)
|
||||||
|
self.assertTrue(last_modifieds[1] is not None)
|
||||||
|
|
||||||
|
self.assertEqual(resp_etag, etags[1])
|
||||||
|
|
||||||
|
# Check last-modified timestamp
|
||||||
|
key, upload_id = uploads[0]
|
||||||
|
query = 'uploadId=%s' % upload_id
|
||||||
|
status, headers, body = \
|
||||||
|
self.conn.make_request('GET', bucket, key, query=query)
|
||||||
|
|
||||||
|
elem = fromstring(body, 'ListPartsResult')
|
||||||
|
|
||||||
|
# FIXME: COPY result drops milli/microseconds but GET doesn't
|
||||||
|
last_modified_gets = [p.find('LastModified').text
|
||||||
|
for p in elem.iterfind('Part')]
|
||||||
|
self.assertEqual(
|
||||||
|
[lm.rsplit('.', 1)[0] for lm in last_modified_gets],
|
||||||
|
[lm.rsplit('.', 1)[0] for lm in last_modifieds])
|
||||||
|
|
||||||
|
# There should be *exactly* two parts in the result
|
||||||
|
self.assertEqual(2, len(last_modified_gets))
|
||||||
|
|
||||||
|
# Abort Multipart Upload
|
||||||
|
key, upload_id = uploads[0]
|
||||||
|
query = 'uploadId=%s' % upload_id
|
||||||
|
status, headers, body = \
|
||||||
|
self.conn.make_request('DELETE', bucket, key, query=query)
|
||||||
|
|
||||||
|
# sanity checks
|
||||||
|
self.assertEqual(status, 204)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
self.assertTrue('content-type' in headers)
|
||||||
|
self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
|
||||||
|
self.assertTrue('content-length' in headers)
|
||||||
|
self.assertEqual(headers['content-length'], '0')
|
||||||
|
|
||||||
|
|
||||||
class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
|
class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -892,6 +1022,11 @@ class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
|
|||||||
|
|
||||||
def test_object_multi_upload_part_copy_range(self):
|
def test_object_multi_upload_part_copy_range(self):
|
||||||
if StrictVersion(boto.__version__) < StrictVersion('3.0'):
|
if StrictVersion(boto.__version__) < StrictVersion('3.0'):
|
||||||
|
# boto 2 doesn't sort headers properly; see
|
||||||
|
# https://github.com/boto/boto/pull/3032
|
||||||
|
# or https://github.com/boto/boto/pull/3176
|
||||||
|
# or https://github.com/boto/boto/pull/3751
|
||||||
|
# or https://github.com/boto/boto/pull/3824
|
||||||
self.skipTest('This stuff got the issue of boto<=2.x')
|
self.skipTest('This stuff got the issue of boto<=2.x')
|
||||||
|
|
||||||
def test_delete_bucket_multi_upload_object_exisiting(self):
|
def test_delete_bucket_multi_upload_object_exisiting(self):
|
||||||
|
@ -650,7 +650,8 @@ class TestS3ApiObject(S3ApiBase):
|
|||||||
def test_get_object_range(self):
|
def test_get_object_range(self):
|
||||||
obj = 'object'
|
obj = 'object'
|
||||||
content = b'abcdefghij'
|
content = b'abcdefghij'
|
||||||
headers = {'x-amz-meta-test': 'swift'}
|
headers = {'x-amz-meta-test': 'swift',
|
||||||
|
'content-type': 'application/octet-stream'}
|
||||||
self.conn.make_request(
|
self.conn.make_request(
|
||||||
'PUT', self.bucket, obj, headers=headers, body=content)
|
'PUT', self.bucket, obj, headers=headers, body=content)
|
||||||
|
|
||||||
@ -664,6 +665,7 @@ class TestS3ApiObject(S3ApiBase):
|
|||||||
self.assertTrue('x-amz-meta-test' in headers)
|
self.assertTrue('x-amz-meta-test' in headers)
|
||||||
self.assertEqual('swift', headers['x-amz-meta-test'])
|
self.assertEqual('swift', headers['x-amz-meta-test'])
|
||||||
self.assertEqual(body, b'bcdef')
|
self.assertEqual(body, b'bcdef')
|
||||||
|
self.assertEqual('application/octet-stream', headers['content-type'])
|
||||||
|
|
||||||
headers = {'Range': 'bytes=5-'}
|
headers = {'Range': 'bytes=5-'}
|
||||||
status, headers, body = \
|
status, headers, body = \
|
||||||
|
166
test/functional/s3api/test_versioning.py
Normal file
166
test/functional/s3api/test_versioning.py
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
# Copyright (c) 2017 OpenStack Foundation
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||||
|
Element, SubElement
|
||||||
|
import test.functional as tf
|
||||||
|
from test.functional.s3api import S3ApiBase
|
||||||
|
from test.functional.s3api.utils import get_error_code
|
||||||
|
|
||||||
|
|
||||||
|
def setUpModule():
|
||||||
|
tf.setup_package()
|
||||||
|
|
||||||
|
|
||||||
|
def tearDownModule():
|
||||||
|
tf.teardown_package()
|
||||||
|
|
||||||
|
|
||||||
|
class TestS3ApiVersioning(S3ApiBase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestS3ApiVersioning, self).setUp()
|
||||||
|
if 'object_versioning' not in tf.cluster_info:
|
||||||
|
# Alternatively, maybe we should assert we get 501s...
|
||||||
|
raise tf.SkipTest('S3 versioning requires that Swift object '
|
||||||
|
'versioning be enabled')
|
||||||
|
status, headers, body = self.conn.make_request('PUT', 'bucket')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# TODO: is this necessary on AWS? or can you delete buckets while
|
||||||
|
# versioning is enabled?
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Suspended'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
status, headers, body = self.conn.make_request('DELETE', 'bucket')
|
||||||
|
self.assertEqual(status, 204)
|
||||||
|
super(TestS3ApiVersioning, self).tearDown()
|
||||||
|
|
||||||
|
def test_versioning_put(self):
|
||||||
|
# Versioning not configured
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.getchildren(), [])
|
||||||
|
|
||||||
|
# Enable versioning
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.find('./Status').text, 'Enabled')
|
||||||
|
|
||||||
|
# Suspend versioning
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Suspended'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.find('./Status').text, 'Suspended')
|
||||||
|
|
||||||
|
# Resume versioning
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.find('./Status').text, 'Enabled')
|
||||||
|
|
||||||
|
def test_versioning_immediately_suspend(self):
|
||||||
|
# Versioning not configured
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.getchildren(), [])
|
||||||
|
|
||||||
|
# Suspend versioning
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Suspended'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.find('./Status').text, 'Suspended')
|
||||||
|
|
||||||
|
# Enable versioning
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'GET', 'bucket', query='versioning')
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(elem.find('./Status').text, 'Enabled')
|
||||||
|
|
||||||
|
def test_versioning_put_error(self):
|
||||||
|
# Root tag is not VersioningConfiguration
|
||||||
|
elem = Element('foo')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 400)
|
||||||
|
self.assertEqual(get_error_code(body), 'MalformedXML')
|
||||||
|
|
||||||
|
# Status is not "Enabled" or "Suspended"
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = '...'
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 400)
|
||||||
|
self.assertEqual(get_error_code(body), 'MalformedXML')
|
||||||
|
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = ''
|
||||||
|
xml = tostring(elem)
|
||||||
|
status, headers, body = self.conn.make_request(
|
||||||
|
'PUT', 'bucket', body=xml, query='versioning')
|
||||||
|
self.assertEqual(status, 400)
|
||||||
|
self.assertEqual(get_error_code(body), 'MalformedXML')
|
@ -16,8 +16,11 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
import uuid
|
||||||
|
import time
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
|
|
||||||
from swift.common.utils import config_true_value
|
from swift.common.utils import config_true_value
|
||||||
@ -80,11 +83,14 @@ def get_s3_client(user=1, signature_version='s3v4', addressing_style='path'):
|
|||||||
path -- produces URLs like ``http(s)://host.domain/bucket/key``
|
path -- produces URLs like ``http(s)://host.domain/bucket/key``
|
||||||
virtual -- produces URLs like ``http(s)://bucket.host.domain/key``
|
virtual -- produces URLs like ``http(s)://bucket.host.domain/key``
|
||||||
'''
|
'''
|
||||||
endpoint = get_opt_or_error('endpoint')
|
endpoint = get_opt('endpoint', None)
|
||||||
|
if endpoint:
|
||||||
scheme = urllib.parse.urlsplit(endpoint).scheme
|
scheme = urllib.parse.urlsplit(endpoint).scheme
|
||||||
if scheme not in ('http', 'https'):
|
if scheme not in ('http', 'https'):
|
||||||
raise ConfigError('unexpected scheme in endpoint: %r; '
|
raise ConfigError('unexpected scheme in endpoint: %r; '
|
||||||
'expected http or https' % scheme)
|
'expected http or https' % scheme)
|
||||||
|
else:
|
||||||
|
scheme = None
|
||||||
region = get_opt('region', 'us-east-1')
|
region = get_opt('region', 'us-east-1')
|
||||||
access_key = get_opt_or_error('access_key%d' % user)
|
access_key = get_opt_or_error('access_key%d' % user)
|
||||||
secret_key = get_opt_or_error('secret_key%d' % user)
|
secret_key = get_opt_or_error('secret_key%d' % user)
|
||||||
@ -112,6 +118,9 @@ def get_s3_client(user=1, signature_version='s3v4', addressing_style='path'):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
TEST_PREFIX = 's3api-test-'
|
||||||
|
|
||||||
|
|
||||||
class BaseS3TestCase(unittest.TestCase):
|
class BaseS3TestCase(unittest.TestCase):
|
||||||
# Default to v4 signatures (as aws-cli does), but subclasses can override
|
# Default to v4 signatures (as aws-cli does), but subclasses can override
|
||||||
signature_version = 's3v4'
|
signature_version = 's3v4'
|
||||||
@ -121,15 +130,77 @@ class BaseS3TestCase(unittest.TestCase):
|
|||||||
return get_s3_client(user, cls.signature_version)
|
return get_s3_client(user, cls.signature_version)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def clear_bucket(cls, client, bucket):
|
def _remove_all_object_versions_from_bucket(cls, client, bucket_name):
|
||||||
for key in client.list_objects(Bucket=bucket).get('Contents', []):
|
resp = client.list_object_versions(Bucket=bucket_name)
|
||||||
client.delete_key(Bucket=bucket, Key=key['Name'])
|
objs_to_delete = (resp.get('Versions', []) +
|
||||||
|
resp.get('DeleteMarkers', []))
|
||||||
|
while objs_to_delete:
|
||||||
|
multi_delete_body = {
|
||||||
|
'Objects': [
|
||||||
|
{'Key': obj['Key'], 'VersionId': obj['VersionId']}
|
||||||
|
for obj in objs_to_delete
|
||||||
|
],
|
||||||
|
'Quiet': False,
|
||||||
|
}
|
||||||
|
del_resp = client.delete_objects(Bucket=bucket_name,
|
||||||
|
Delete=multi_delete_body)
|
||||||
|
if any(del_resp.get('Errors', [])):
|
||||||
|
raise Exception('Unable to delete %r' % del_resp['Errors'])
|
||||||
|
if not resp['IsTruncated']:
|
||||||
|
break
|
||||||
|
key_marker = resp['NextKeyMarker']
|
||||||
|
version_id_marker = resp['NextVersionIdMarker']
|
||||||
|
resp = client.list_object_versions(
|
||||||
|
Bucket=bucket_name, KeyMarker=key_marker,
|
||||||
|
VersionIdMarker=version_id_marker)
|
||||||
|
objs_to_delete = (resp.get('Versions', []) +
|
||||||
|
resp.get('DeleteMarkers', []))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def clear_bucket(cls, client, bucket_name):
|
||||||
|
timeout = time.time() + 10
|
||||||
|
backoff = 0.1
|
||||||
|
cls._remove_all_object_versions_from_bucket(client, bucket_name)
|
||||||
|
try:
|
||||||
|
client.delete_bucket(Bucket=bucket_name)
|
||||||
|
except ClientError as e:
|
||||||
|
if 'BucketNotEmpty' not in str(e):
|
||||||
|
raise
|
||||||
|
# Something's gone sideways. Try harder
|
||||||
|
client.put_bucket_versioning(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
VersioningConfiguration={'Status': 'Suspended'})
|
||||||
|
while True:
|
||||||
|
cls._remove_all_object_versions_from_bucket(
|
||||||
|
client, bucket_name)
|
||||||
|
# also try some version-unaware operations...
|
||||||
|
for key in client.list_objects(Bucket=bucket_name).get(
|
||||||
|
'Contents', []):
|
||||||
|
client.delete_object(Bucket=bucket_name, Key=key['Key'])
|
||||||
|
|
||||||
|
# *then* try again
|
||||||
|
try:
|
||||||
|
client.delete_bucket(Bucket=bucket_name)
|
||||||
|
except ClientError as e:
|
||||||
|
if 'BucketNotEmpty' not in str(e):
|
||||||
|
raise
|
||||||
|
if time.time() > timeout:
|
||||||
|
raise Exception('Timeout clearing %r' % bucket_name)
|
||||||
|
time.sleep(backoff)
|
||||||
|
backoff *= 2
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
def create_name(self, slug):
|
||||||
|
return '%s%s-%s' % (TEST_PREFIX, slug, uuid.uuid4().hex)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def clear_account(cls, client):
|
def clear_account(cls, client):
|
||||||
for bucket in client.list_buckets()['Buckets']:
|
for bucket in client.list_buckets()['Buckets']:
|
||||||
|
if not bucket['Name'].startswith(TEST_PREFIX):
|
||||||
|
# these tests run against real s3 accounts
|
||||||
|
continue
|
||||||
cls.clear_bucket(client, bucket['Name'])
|
cls.clear_bucket(client, bucket['Name'])
|
||||||
client.delete_bucket(Bucket=bucket['Name'])
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
client = self.get_s3_client(1)
|
client = self.get_s3_client(1)
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
import uuid
|
|
||||||
|
|
||||||
from test.s3api import BaseS3TestCase, ConfigError
|
from test.s3api import BaseS3TestCase, ConfigError
|
||||||
|
|
||||||
@ -43,7 +42,7 @@ class TestGetServiceSigV4(BaseS3TestCase):
|
|||||||
|
|
||||||
def test_service_with_buckets(self):
|
def test_service_with_buckets(self):
|
||||||
c = self.get_s3_client(1)
|
c = self.get_s3_client(1)
|
||||||
buckets = [str(uuid.uuid4()) for _ in range(5)]
|
buckets = [self.create_name('bucket%s' % i) for i in range(5)]
|
||||||
for bucket in buckets:
|
for bucket in buckets:
|
||||||
c.create_bucket(Bucket=bucket)
|
c.create_bucket(Bucket=bucket)
|
||||||
|
|
||||||
@ -65,7 +64,7 @@ class TestGetServiceSigV4(BaseS3TestCase):
|
|||||||
c2 = self.get_s3_client(2)
|
c2 = self.get_s3_client(2)
|
||||||
except ConfigError as err:
|
except ConfigError as err:
|
||||||
raise unittest.SkipTest(str(err))
|
raise unittest.SkipTest(str(err))
|
||||||
buckets2 = [str(uuid.uuid4()) for _ in range(2)]
|
buckets2 = [self.create_name('bucket%s' % i) for i in range(2)]
|
||||||
for bucket in buckets2:
|
for bucket in buckets2:
|
||||||
c2.create_bucket(Bucket=bucket)
|
c2.create_bucket(Bucket=bucket)
|
||||||
self.assertEqual(sorted(buckets2), [
|
self.assertEqual(sorted(buckets2), [
|
||||||
|
758
test/s3api/test_versioning.py
Normal file
758
test/s3api/test_versioning.py
Normal file
@ -0,0 +1,758 @@
|
|||||||
|
# Copyright (c) 2019 SwiftStack, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import time
|
||||||
|
import hashlib
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
import six
|
||||||
|
|
||||||
|
from swift.common.header_key_dict import HeaderKeyDict
|
||||||
|
from test.s3api import BaseS3TestCase
|
||||||
|
|
||||||
|
|
||||||
|
def retry(f, timeout=10):
|
||||||
|
timelimit = time.time() + timeout
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
f()
|
||||||
|
except (ClientError, AssertionError):
|
||||||
|
if time.time() > timelimit:
|
||||||
|
raise
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
class TestObjectVersioning(BaseS3TestCase):
|
||||||
|
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.client = self.get_s3_client(1)
|
||||||
|
self.bucket_name = self.create_name('versioning')
|
||||||
|
resp = self.client.create_bucket(Bucket=self.bucket_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
|
||||||
|
def enable_versioning():
|
||||||
|
resp = self.client.put_bucket_versioning(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
VersioningConfiguration={'Status': 'Enabled'})
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
retry(enable_versioning)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
resp = self.client.put_bucket_versioning(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
VersioningConfiguration={'Status': 'Suspended'})
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.clear_bucket(self.client, self.bucket_name)
|
||||||
|
super(TestObjectVersioning, self).tearDown()
|
||||||
|
|
||||||
|
def test_setup(self):
|
||||||
|
bucket_name = self.create_name('new-bucket')
|
||||||
|
resp = self.client.create_bucket(Bucket=bucket_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
expected_location = '/%s' % bucket_name
|
||||||
|
self.assertEqual(expected_location, resp['Location'])
|
||||||
|
headers = HeaderKeyDict(resp['ResponseMetadata']['HTTPHeaders'])
|
||||||
|
self.assertEqual('0', headers['content-length'])
|
||||||
|
self.assertEqual(expected_location, headers['location'])
|
||||||
|
|
||||||
|
# get versioning
|
||||||
|
resp = self.client.get_bucket_versioning(Bucket=bucket_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertNotIn('Status', resp)
|
||||||
|
|
||||||
|
# put versioning
|
||||||
|
versioning_config = {
|
||||||
|
'Status': 'Enabled',
|
||||||
|
}
|
||||||
|
resp = self.client.put_bucket_versioning(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
VersioningConfiguration=versioning_config)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
|
||||||
|
# ... now it's enabled
|
||||||
|
def check_status():
|
||||||
|
resp = self.client.get_bucket_versioning(Bucket=bucket_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
try:
|
||||||
|
self.assertEqual('Enabled', resp['Status'])
|
||||||
|
except KeyError:
|
||||||
|
self.fail('Status was not in %r' % resp)
|
||||||
|
retry(check_status)
|
||||||
|
|
||||||
|
# send over some bogus junk
|
||||||
|
versioning_config['Status'] = 'Disabled'
|
||||||
|
with self.assertRaises(ClientError) as ctx:
|
||||||
|
self.client.put_bucket_versioning(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
VersioningConfiguration=versioning_config)
|
||||||
|
expected_err = 'An error occurred (MalformedXML) when calling the ' \
|
||||||
|
'PutBucketVersioning operation: The XML you provided was ' \
|
||||||
|
'not well-formed or did not validate against our published schema'
|
||||||
|
self.assertEqual(expected_err, str(ctx.exception))
|
||||||
|
|
||||||
|
# disable it
|
||||||
|
versioning_config['Status'] = 'Suspended'
|
||||||
|
resp = self.client.put_bucket_versioning(
|
||||||
|
Bucket=bucket_name,
|
||||||
|
VersioningConfiguration=versioning_config)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
|
||||||
|
# ... now it's disabled again
|
||||||
|
def check_status():
|
||||||
|
resp = self.client.get_bucket_versioning(Bucket=bucket_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('Suspended', resp['Status'])
|
||||||
|
retry(check_status)
|
||||||
|
|
||||||
|
def test_upload_fileobj_versioned(self):
|
||||||
|
obj_data = self.create_name('some-data').encode('ascii')
|
||||||
|
obj_etag = hashlib.md5(obj_data).hexdigest()
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
self.client.upload_fileobj(six.BytesIO(obj_data),
|
||||||
|
self.bucket_name, obj_name)
|
||||||
|
|
||||||
|
# object is in the listing
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % obj_etag,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# object version listing
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
obj.pop('VersionId')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % obj_etag,
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# overwrite the object
|
||||||
|
new_obj_data = self.create_name('some-new-data').encode('ascii')
|
||||||
|
new_obj_etag = hashlib.md5(new_obj_data).hexdigest()
|
||||||
|
self.client.upload_fileobj(six.BytesIO(new_obj_data),
|
||||||
|
self.bucket_name, obj_name)
|
||||||
|
|
||||||
|
# new object is in the listing
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % new_obj_etag,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(new_obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# both object versions in the versions listing
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
obj.pop('VersionId')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % new_obj_etag,
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(new_obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}, {
|
||||||
|
'ETag': '"%s"' % obj_etag,
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
def test_delete_versioned_objects(self):
|
||||||
|
etags = []
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
etags.insert(0, hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(six.BytesIO(obj_data),
|
||||||
|
self.bucket_name, obj_name)
|
||||||
|
|
||||||
|
# only one object appears in the listing
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[0],
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# but everything is layed out in the object versions listing
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
versions = []
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
versions.append(obj.pop('VersionId'))
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[0],
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}, {
|
||||||
|
'ETag': '"%s"' % etags[1],
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}, {
|
||||||
|
'ETag': '"%s"' % etags[2],
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# we can delete a specific version
|
||||||
|
resp = self.client.delete_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=versions[1])
|
||||||
|
|
||||||
|
# and that just pulls it out of the versions listing
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
obj.pop('VersionId')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[0],
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}, {
|
||||||
|
'ETag': '"%s"' % etags[2],
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# ... but the current listing is unaffected
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[0],
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# OTOH, if you delete specifically the latest version
|
||||||
|
# we can delete a specific version
|
||||||
|
resp = self.client.delete_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=versions[0])
|
||||||
|
|
||||||
|
# the versions listing has a new IsLatest
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
obj.pop('VersionId')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[2],
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# and the stack pops
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[2],
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
def test_delete_versioned_deletes(self):
|
||||||
|
etags = []
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
etags.insert(0, hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(six.BytesIO(obj_data),
|
||||||
|
self.bucket_name, obj_name)
|
||||||
|
# and make a delete marker
|
||||||
|
self.client.delete_object(Bucket=self.bucket_name, Key=obj_name)
|
||||||
|
|
||||||
|
# current listing is empty
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
self.assertEqual([], objs)
|
||||||
|
|
||||||
|
# but everything is in layed out in the versions listing
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
versions = []
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
versions.append(obj.pop('VersionId'))
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etag,
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
} for etag in etags], objs)
|
||||||
|
# ... plus the delete markers
|
||||||
|
delete_markers = resp.get('DeleteMarkers', [])
|
||||||
|
marker_versions = []
|
||||||
|
for marker in delete_markers:
|
||||||
|
marker.pop('LastModified')
|
||||||
|
marker.pop('Owner')
|
||||||
|
marker_versions.append(marker.pop('VersionId'))
|
||||||
|
self.assertEqual([{
|
||||||
|
'Key': obj_name,
|
||||||
|
'IsLatest': is_latest,
|
||||||
|
} for is_latest in (True, False, False)], delete_markers)
|
||||||
|
|
||||||
|
# delete an old delete markers
|
||||||
|
resp = self.client.delete_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=marker_versions[2])
|
||||||
|
|
||||||
|
# since IsLatest is still marker we'll raise NoSuchKey
|
||||||
|
with self.assertRaises(ClientError) as caught:
|
||||||
|
resp = self.client.get_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name)
|
||||||
|
expected_err = 'An error occurred (NoSuchKey) when calling the ' \
|
||||||
|
'GetObject operation: The specified key does not exist.'
|
||||||
|
self.assertEqual(expected_err, str(caught.exception))
|
||||||
|
|
||||||
|
# now delete the delete marker (IsLatest)
|
||||||
|
resp = self.client.delete_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=marker_versions[0])
|
||||||
|
|
||||||
|
# most recent version is now latest
|
||||||
|
resp = self.client.get_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[0], resp['ETag'])
|
||||||
|
|
||||||
|
# now delete the IsLatest object version
|
||||||
|
resp = self.client.delete_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=versions[0])
|
||||||
|
|
||||||
|
# and object is deleted again
|
||||||
|
with self.assertRaises(ClientError) as caught:
|
||||||
|
resp = self.client.get_object(Bucket=self.bucket_name,
|
||||||
|
Key=obj_name)
|
||||||
|
expected_err = 'An error occurred (NoSuchKey) when calling the ' \
|
||||||
|
'GetObject operation: The specified key does not exist.'
|
||||||
|
self.assertEqual(expected_err, str(caught.exception))
|
||||||
|
|
||||||
|
# delete marker IsLatest
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
delete_markers = resp.get('DeleteMarkers', [])
|
||||||
|
for marker in delete_markers:
|
||||||
|
marker.pop('LastModified')
|
||||||
|
marker.pop('Owner')
|
||||||
|
self.assertEqual([{
|
||||||
|
'Key': obj_name,
|
||||||
|
'IsLatest': True,
|
||||||
|
'VersionId': marker_versions[1],
|
||||||
|
}], delete_markers)
|
||||||
|
|
||||||
|
def test_multipart_upload(self):
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
obj_data = b'data'
|
||||||
|
|
||||||
|
mu = self.client.create_multipart_upload(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
Key=obj_name)
|
||||||
|
part_md5 = self.client.upload_part(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
UploadId=mu['UploadId'],
|
||||||
|
PartNumber=1,
|
||||||
|
Body=obj_data)['ETag']
|
||||||
|
complete_response = self.client.complete_multipart_upload(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
UploadId=mu['UploadId'],
|
||||||
|
MultipartUpload={'Parts': [
|
||||||
|
{'PartNumber': 1, 'ETag': part_md5},
|
||||||
|
]})
|
||||||
|
obj_etag = complete_response['ETag']
|
||||||
|
|
||||||
|
delete_response = self.client.delete_object(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
Key=obj_name)
|
||||||
|
marker_version_id = delete_response['VersionId']
|
||||||
|
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
versions = []
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
versions.append(obj.pop('VersionId'))
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': obj_etag,
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
markers = resp.get('DeleteMarkers', [])
|
||||||
|
for marker in markers:
|
||||||
|
marker.pop('LastModified')
|
||||||
|
marker.pop('Owner')
|
||||||
|
self.assertEqual([{
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'VersionId': marker_version_id,
|
||||||
|
}], markers)
|
||||||
|
|
||||||
|
# Can still get the old version
|
||||||
|
resp = self.client.get_object(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=versions[0])
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual(obj_etag, resp['ETag'])
|
||||||
|
|
||||||
|
delete_response = self.client.delete_object(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
Key=obj_name,
|
||||||
|
VersionId=versions[0])
|
||||||
|
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
self.assertEqual([], resp.get('Versions', []))
|
||||||
|
|
||||||
|
markers = resp.get('DeleteMarkers', [])
|
||||||
|
for marker in markers:
|
||||||
|
marker.pop('LastModified')
|
||||||
|
marker.pop('Owner')
|
||||||
|
self.assertEqual([{
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'VersionId': marker_version_id,
|
||||||
|
}], markers)
|
||||||
|
|
||||||
|
def test_get_versioned_object(self):
|
||||||
|
etags = []
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
# TODO: pull etag from response instead
|
||||||
|
etags.insert(0, hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(
|
||||||
|
six.BytesIO(obj_data), self.bucket_name, obj_name)
|
||||||
|
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
versions = []
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
versions.append(obj.pop('VersionId'))
|
||||||
|
self.assertEqual([{
|
||||||
|
'ETag': '"%s"' % etags[0],
|
||||||
|
'IsLatest': True,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}, {
|
||||||
|
'ETag': '"%s"' % etags[1],
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}, {
|
||||||
|
'ETag': '"%s"' % etags[2],
|
||||||
|
'IsLatest': False,
|
||||||
|
'Key': obj_name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
}], objs)
|
||||||
|
|
||||||
|
# un-versioned get_object returns IsLatest
|
||||||
|
resp = self.client.get_object(Bucket=self.bucket_name, Key=obj_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[0], resp['ETag'])
|
||||||
|
|
||||||
|
# but you can get any object by version
|
||||||
|
for i, version in enumerate(versions):
|
||||||
|
resp = self.client.get_object(
|
||||||
|
Bucket=self.bucket_name, Key=obj_name, VersionId=version)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[i], resp['ETag'])
|
||||||
|
|
||||||
|
# and head_object works about the same
|
||||||
|
resp = self.client.head_object(Bucket=self.bucket_name, Key=obj_name)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[0], resp['ETag'])
|
||||||
|
self.assertEqual(versions[0], resp['VersionId'])
|
||||||
|
for version, etag in zip(versions, etags):
|
||||||
|
resp = self.client.head_object(
|
||||||
|
Bucket=self.bucket_name, Key=obj_name, VersionId=version)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual(version, resp['VersionId'])
|
||||||
|
self.assertEqual('"%s"' % etag, resp['ETag'])
|
||||||
|
|
||||||
|
def test_get_versioned_object_invalid_params(self):
|
||||||
|
with self.assertRaises(ClientError) as ctx:
|
||||||
|
self.client.list_object_versions(Bucket=self.bucket_name,
|
||||||
|
KeyMarker='',
|
||||||
|
VersionIdMarker='bogus')
|
||||||
|
expected_err = 'An error occurred (InvalidArgument) when calling ' \
|
||||||
|
'the ListObjectVersions operation: Invalid version id specified'
|
||||||
|
self.assertEqual(expected_err, str(ctx.exception))
|
||||||
|
|
||||||
|
with self.assertRaises(ClientError) as ctx:
|
||||||
|
self.client.list_object_versions(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
VersionIdMarker='a' * 32)
|
||||||
|
expected_err = 'An error occurred (InvalidArgument) when calling ' \
|
||||||
|
'the ListObjectVersions operation: A version-id marker cannot ' \
|
||||||
|
'be specified without a key marker.'
|
||||||
|
self.assertEqual(expected_err, str(ctx.exception))
|
||||||
|
|
||||||
|
def test_get_versioned_object_key_marker(self):
|
||||||
|
obj00_name = self.create_name('00-versioned-obj')
|
||||||
|
obj01_name = self.create_name('01-versioned-obj')
|
||||||
|
names = [obj00_name] * 3 + [obj01_name] * 3
|
||||||
|
latest = [True, False, False, True, False, False]
|
||||||
|
etags = []
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
etags.insert(0, '"%s"' % hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(
|
||||||
|
six.BytesIO(obj_data), self.bucket_name, obj01_name)
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
etags.insert(0, '"%s"' % hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(
|
||||||
|
six.BytesIO(obj_data), self.bucket_name, obj00_name)
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
versions = []
|
||||||
|
objs = []
|
||||||
|
for o in resp.get('Versions', []):
|
||||||
|
versions.append(o['VersionId'])
|
||||||
|
objs.append({
|
||||||
|
'Key': o['Key'],
|
||||||
|
'VersionId': o['VersionId'],
|
||||||
|
'IsLatest': o['IsLatest'],
|
||||||
|
'ETag': o['ETag'],
|
||||||
|
})
|
||||||
|
expected = [{
|
||||||
|
'Key': name,
|
||||||
|
'VersionId': version,
|
||||||
|
'IsLatest': is_latest,
|
||||||
|
'ETag': etag,
|
||||||
|
} for name, etag, version, is_latest in zip(
|
||||||
|
names, etags, versions, latest)]
|
||||||
|
self.assertEqual(expected, objs)
|
||||||
|
|
||||||
|
# on s3 this makes expected[0]['IsLatest'] magicaly change to False?
|
||||||
|
# resp = self.client.list_object_versions(Bucket=self.bucket_name,
|
||||||
|
# KeyMarker='',
|
||||||
|
# VersionIdMarker=versions[0])
|
||||||
|
# objs = [{
|
||||||
|
# 'Key': o['Key'],
|
||||||
|
# 'VersionId': o['VersionId'],
|
||||||
|
# 'IsLatest': o['IsLatest'],
|
||||||
|
# 'ETag': o['ETag'],
|
||||||
|
# } for o in resp.get('Versions', [])]
|
||||||
|
# self.assertEqual(expected, objs)
|
||||||
|
|
||||||
|
# KeyMarker skips past that key
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name,
|
||||||
|
KeyMarker=obj00_name)
|
||||||
|
objs = [{
|
||||||
|
'Key': o['Key'],
|
||||||
|
'VersionId': o['VersionId'],
|
||||||
|
'IsLatest': o['IsLatest'],
|
||||||
|
'ETag': o['ETag'],
|
||||||
|
} for o in resp.get('Versions', [])]
|
||||||
|
self.assertEqual(expected[3:], objs)
|
||||||
|
|
||||||
|
# KeyMarker with VersionIdMarker skips past that version
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name,
|
||||||
|
KeyMarker=obj00_name,
|
||||||
|
VersionIdMarker=versions[0])
|
||||||
|
objs = [{
|
||||||
|
'Key': o['Key'],
|
||||||
|
'VersionId': o['VersionId'],
|
||||||
|
'IsLatest': o['IsLatest'],
|
||||||
|
'ETag': o['ETag'],
|
||||||
|
} for o in resp.get('Versions', [])]
|
||||||
|
self.assertEqual(expected[1:], objs)
|
||||||
|
|
||||||
|
# KeyMarker with bogus version skips past that key
|
||||||
|
resp = self.client.list_object_versions(
|
||||||
|
Bucket=self.bucket_name,
|
||||||
|
KeyMarker=obj00_name,
|
||||||
|
VersionIdMarker=versions[4])
|
||||||
|
objs = [{
|
||||||
|
'Key': o['Key'],
|
||||||
|
'VersionId': o['VersionId'],
|
||||||
|
'IsLatest': o['IsLatest'],
|
||||||
|
'ETag': o['ETag'],
|
||||||
|
} for o in resp.get('Versions', [])]
|
||||||
|
self.assertEqual(expected[3:], objs)
|
||||||
|
|
||||||
|
def test_list_objects(self):
|
||||||
|
etags = defaultdict(list)
|
||||||
|
for i in range(3):
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
etags[obj_name].insert(0, hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(
|
||||||
|
six.BytesIO(obj_data), self.bucket_name, obj_name)
|
||||||
|
|
||||||
|
# both unversioned list_objects responses are similar
|
||||||
|
expected = []
|
||||||
|
for name, obj_etags in sorted(etags.items()):
|
||||||
|
expected.append({
|
||||||
|
'ETag': '"%s"' % obj_etags[0],
|
||||||
|
'Key': name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
})
|
||||||
|
resp = self.client.list_objects(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
# one difference seems to be the Owner key
|
||||||
|
self.assertEqual({'DisplayName', 'ID'},
|
||||||
|
set(obj.pop('Owner').keys()))
|
||||||
|
self.assertEqual(expected, objs)
|
||||||
|
resp = self.client.list_objects_v2(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Contents', [])
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
self.assertEqual(expected, objs)
|
||||||
|
|
||||||
|
# versioned listings has something for everyone
|
||||||
|
expected = []
|
||||||
|
for name, obj_etags in sorted(etags.items()):
|
||||||
|
is_latest = True
|
||||||
|
for etag in obj_etags:
|
||||||
|
expected.append({
|
||||||
|
'ETag': '"%s"' % etag,
|
||||||
|
'IsLatest': is_latest,
|
||||||
|
'Key': name,
|
||||||
|
'Size': len(obj_data),
|
||||||
|
'StorageClass': 'STANDARD',
|
||||||
|
})
|
||||||
|
is_latest = False
|
||||||
|
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
versions = []
|
||||||
|
for obj in objs:
|
||||||
|
obj.pop('LastModified')
|
||||||
|
obj.pop('Owner')
|
||||||
|
versions.append(obj.pop('VersionId'))
|
||||||
|
self.assertEqual(expected, objs)
|
||||||
|
|
||||||
|
def test_copy_object(self):
|
||||||
|
etags = []
|
||||||
|
obj_name = self.create_name('versioned-obj')
|
||||||
|
for i in range(3):
|
||||||
|
obj_data = self.create_name('some-data-%s' % i).encode('ascii')
|
||||||
|
etags.insert(0, hashlib.md5(obj_data).hexdigest())
|
||||||
|
self.client.upload_fileobj(
|
||||||
|
six.BytesIO(obj_data), self.bucket_name, obj_name)
|
||||||
|
|
||||||
|
resp = self.client.list_object_versions(Bucket=self.bucket_name)
|
||||||
|
objs = resp.get('Versions', [])
|
||||||
|
versions = []
|
||||||
|
for obj in objs:
|
||||||
|
versions.append(obj.pop('VersionId'))
|
||||||
|
|
||||||
|
# CopySource can just be Bucket/Key string
|
||||||
|
first_target = self.create_name('target-obj1')
|
||||||
|
copy_resp = self.client.copy_object(
|
||||||
|
Bucket=self.bucket_name, Key=first_target,
|
||||||
|
CopySource='%s/%s' % (self.bucket_name, obj_name))
|
||||||
|
self.assertEqual(versions[0], copy_resp['CopySourceVersionId'])
|
||||||
|
|
||||||
|
# and you'll just get the most recent version
|
||||||
|
resp = self.client.head_object(Bucket=self.bucket_name,
|
||||||
|
Key=first_target)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[0], resp['ETag'])
|
||||||
|
|
||||||
|
# or you can be more explicit
|
||||||
|
explicit_target = self.create_name('target-%s' % versions[0])
|
||||||
|
copy_source = {'Bucket': self.bucket_name, 'Key': obj_name,
|
||||||
|
'VersionId': versions[0]}
|
||||||
|
copy_resp = self.client.copy_object(
|
||||||
|
Bucket=self.bucket_name, Key=explicit_target,
|
||||||
|
CopySource=copy_source)
|
||||||
|
self.assertEqual(versions[0], copy_resp['CopySourceVersionId'])
|
||||||
|
# and you still get the same thing
|
||||||
|
resp = self.client.head_object(Bucket=self.bucket_name,
|
||||||
|
Key=explicit_target)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[0], resp['ETag'])
|
||||||
|
|
||||||
|
# but you can also copy from a specific version
|
||||||
|
version_target = self.create_name('target-%s' % versions[2])
|
||||||
|
copy_source['VersionId'] = versions[2]
|
||||||
|
copy_resp = self.client.copy_object(
|
||||||
|
Bucket=self.bucket_name, Key=version_target,
|
||||||
|
CopySource=copy_source)
|
||||||
|
self.assertEqual(versions[2], copy_resp['CopySourceVersionId'])
|
||||||
|
resp = self.client.head_object(Bucket=self.bucket_name,
|
||||||
|
Key=version_target)
|
||||||
|
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||||
|
self.assertEqual('"%s"' % etags[2], resp['ETag'])
|
@ -16,6 +16,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import email
|
import email
|
||||||
|
import mock
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
@ -24,8 +25,8 @@ from swift.common.middleware.s3api.s3api import filter_factory
|
|||||||
from swift.common.middleware.s3api.etree import fromstring
|
from swift.common.middleware.s3api.etree import fromstring
|
||||||
from swift.common.middleware.s3api.utils import Config
|
from swift.common.middleware.s3api.utils import Config
|
||||||
|
|
||||||
from test.unit.common.middleware.s3api.helpers import FakeSwift
|
|
||||||
from test.unit import debug_logger
|
from test.unit import debug_logger
|
||||||
|
from test.unit.common.middleware.s3api.helpers import FakeSwift
|
||||||
|
|
||||||
|
|
||||||
class FakeApp(object):
|
class FakeApp(object):
|
||||||
@ -80,7 +81,7 @@ class S3ApiTestCase(unittest.TestCase):
|
|||||||
self.app = FakeApp()
|
self.app = FakeApp()
|
||||||
self.swift = self.app.swift
|
self.swift = self.app.swift
|
||||||
self.s3api = filter_factory({}, **self.conf)(self.app)
|
self.s3api = filter_factory({}, **self.conf)(self.app)
|
||||||
self.s3api.logger = debug_logger()
|
self.s3api.logger = self.swift.logger = debug_logger()
|
||||||
|
|
||||||
self.swift.register('HEAD', '/v1/AUTH_test',
|
self.swift.register('HEAD', '/v1/AUTH_test',
|
||||||
swob.HTTPOk, {}, None)
|
swob.HTTPOk, {}, None)
|
||||||
@ -100,6 +101,19 @@ class S3ApiTestCase(unittest.TestCase):
|
|||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
|
||||||
swob.HTTPNoContent, {}, None)
|
swob.HTTPNoContent, {}, None)
|
||||||
|
|
||||||
|
self.mock_get_swift_info_result = {'object_versioning': {}}
|
||||||
|
for s3api_path in (
|
||||||
|
'controllers.obj',
|
||||||
|
'controllers.bucket',
|
||||||
|
'controllers.multi_delete',
|
||||||
|
'controllers.versioning',
|
||||||
|
):
|
||||||
|
patcher = mock.patch(
|
||||||
|
'swift.common.middleware.s3api.%s.get_swift_info' % s3api_path,
|
||||||
|
return_value=self.mock_get_swift_info_result)
|
||||||
|
patcher.start()
|
||||||
|
self.addCleanup(patcher.stop)
|
||||||
|
|
||||||
def _get_error_code(self, body):
|
def _get_error_code(self, body):
|
||||||
elem = fromstring(body, 'Error')
|
elem = fromstring(body, 'Error')
|
||||||
return elem.find('./Code').text
|
return elem.find('./Code').text
|
||||||
|
@ -15,25 +15,21 @@
|
|||||||
|
|
||||||
# This stuff can't live in test/unit/__init__.py due to its swob dependency.
|
# This stuff can't live in test/unit/__init__.py due to its swob dependency.
|
||||||
|
|
||||||
from copy import deepcopy
|
|
||||||
from hashlib import md5
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
from swift.common.utils import split_path
|
from swift.common.utils import split_path
|
||||||
from swift.common.request_helpers import is_sys_meta
|
from swift.common.request_helpers import is_sys_meta
|
||||||
|
|
||||||
|
from test.unit.common.middleware.helpers import FakeSwift as BaseFakeSwift
|
||||||
|
|
||||||
class FakeSwift(object):
|
|
||||||
|
class FakeSwift(BaseFakeSwift):
|
||||||
"""
|
"""
|
||||||
A good-enough fake Swift proxy server to use in testing middleware.
|
A good-enough fake Swift proxy server to use in testing middleware.
|
||||||
"""
|
"""
|
||||||
|
ALLOWED_METHODS = BaseFakeSwift.ALLOWED_METHODS + ['TEST']
|
||||||
|
|
||||||
def __init__(self, s3_acl=False):
|
def __init__(self, s3_acl=False):
|
||||||
self._calls = []
|
super(FakeSwift, self).__init__()
|
||||||
self.req_method_paths = []
|
|
||||||
self.swift_sources = []
|
|
||||||
self.uploaded = {}
|
|
||||||
# mapping of (method, path) --> (response class, headers, body)
|
|
||||||
self._responses = {}
|
|
||||||
self.s3_acl = s3_acl
|
self.s3_acl = s3_acl
|
||||||
self.remote_user = 'authorized'
|
self.remote_user = 'authorized'
|
||||||
|
|
||||||
@ -69,88 +65,7 @@ class FakeSwift(object):
|
|||||||
def __call__(self, env, start_response):
|
def __call__(self, env, start_response):
|
||||||
if self.s3_acl:
|
if self.s3_acl:
|
||||||
self._fake_auth_middleware(env)
|
self._fake_auth_middleware(env)
|
||||||
|
return super(FakeSwift, self).__call__(env, start_response)
|
||||||
req = swob.Request(env)
|
|
||||||
method = env['REQUEST_METHOD']
|
|
||||||
path = env['PATH_INFO']
|
|
||||||
_, acc, cont, obj = split_path(env['PATH_INFO'], 0, 4,
|
|
||||||
rest_with_last=True)
|
|
||||||
if env.get('QUERY_STRING'):
|
|
||||||
path += '?' + env['QUERY_STRING']
|
|
||||||
|
|
||||||
if 'swift.authorize' in env:
|
|
||||||
resp = env['swift.authorize'](req)
|
|
||||||
if resp:
|
|
||||||
return resp(env, start_response)
|
|
||||||
|
|
||||||
headers = req.headers
|
|
||||||
self._calls.append((method, path, headers))
|
|
||||||
self.swift_sources.append(env.get('swift.source'))
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp_class, raw_headers, body = self._responses[(method, path)]
|
|
||||||
headers = swob.HeaderKeyDict(raw_headers)
|
|
||||||
except KeyError:
|
|
||||||
# FIXME: suppress print state error for python3 compatibility.
|
|
||||||
# pylint: disable-msg=E1601
|
|
||||||
if (env.get('QUERY_STRING')
|
|
||||||
and (method, env['PATH_INFO']) in self._responses):
|
|
||||||
resp_class, raw_headers, body = self._responses[
|
|
||||||
(method, env['PATH_INFO'])]
|
|
||||||
headers = swob.HeaderKeyDict(raw_headers)
|
|
||||||
elif method == 'HEAD' and ('GET', path) in self._responses:
|
|
||||||
resp_class, raw_headers, _ = self._responses[('GET', path)]
|
|
||||||
body = None
|
|
||||||
headers = swob.HeaderKeyDict(raw_headers)
|
|
||||||
elif method == 'GET' and obj and path in self.uploaded:
|
|
||||||
resp_class = swob.HTTPOk
|
|
||||||
headers, body = self.uploaded[path]
|
|
||||||
else:
|
|
||||||
print("Didn't find %r in allowed responses" %
|
|
||||||
((method, path),))
|
|
||||||
raise
|
|
||||||
|
|
||||||
# simulate object PUT
|
|
||||||
if method == 'PUT' and obj:
|
|
||||||
input = env['wsgi.input'].read()
|
|
||||||
etag = md5(input).hexdigest()
|
|
||||||
if env.get('HTTP_ETAG', etag) != etag:
|
|
||||||
raise Exception('Client sent a bad ETag! Got %r, but '
|
|
||||||
'md5(body) = %r' % (env['HTTP_ETAG'], etag))
|
|
||||||
headers.setdefault('Etag', etag)
|
|
||||||
headers.setdefault('Content-Length', len(input))
|
|
||||||
|
|
||||||
# keep it for subsequent GET requests later
|
|
||||||
self.uploaded[path] = (deepcopy(headers), input)
|
|
||||||
if "CONTENT_TYPE" in env:
|
|
||||||
self.uploaded[path][0]['Content-Type'] = env["CONTENT_TYPE"]
|
|
||||||
|
|
||||||
# range requests ought to work, but copies are special
|
|
||||||
support_range_and_conditional = not (
|
|
||||||
method == 'PUT' and
|
|
||||||
'X-Copy-From' in req.headers and
|
|
||||||
'Range' in req.headers)
|
|
||||||
if isinstance(body, list):
|
|
||||||
app_iter = body
|
|
||||||
body = None
|
|
||||||
else:
|
|
||||||
app_iter = None
|
|
||||||
resp = resp_class(
|
|
||||||
req=req, headers=headers, body=body, app_iter=app_iter,
|
|
||||||
conditional_response=support_range_and_conditional)
|
|
||||||
return resp(env, start_response)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def calls(self):
|
|
||||||
return [(method, path) for method, path, headers in self._calls]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def calls_with_headers(self):
|
|
||||||
return self._calls
|
|
||||||
|
|
||||||
@property
|
|
||||||
def call_count(self):
|
|
||||||
return len(self._calls)
|
|
||||||
|
|
||||||
def register(self, method, path, response_class, headers, body):
|
def register(self, method, path, response_class, headers, body):
|
||||||
# assuming the path format like /v1/account/container/object
|
# assuming the path format like /v1/account/container/object
|
||||||
@ -167,7 +82,8 @@ class FakeSwift(object):
|
|||||||
|
|
||||||
if body is not None and not isinstance(body, (bytes, list)):
|
if body is not None and not isinstance(body, (bytes, list)):
|
||||||
body = body.encode('utf8')
|
body = body.encode('utf8')
|
||||||
self._responses[(method, path)] = (response_class, headers, body)
|
return super(FakeSwift, self).register(
|
||||||
|
method, path, response_class, headers, body)
|
||||||
|
|
||||||
def register_unconditionally(self, method, path, response_class, headers,
|
def register_unconditionally(self, method, path, response_class, headers,
|
||||||
body):
|
body):
|
||||||
|
@ -21,6 +21,8 @@ import six
|
|||||||
from six.moves.urllib.parse import quote
|
from six.moves.urllib.parse import quote
|
||||||
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
|
from swift.common.middleware.versioned_writes.object_versioning import \
|
||||||
|
DELETE_MARKER_CONTENT_TYPE
|
||||||
from swift.common.swob import Request
|
from swift.common.swob import Request
|
||||||
from swift.common.utils import json
|
from swift.common.utils import json
|
||||||
|
|
||||||
@ -30,6 +32,7 @@ from swift.common.middleware.s3api.subresource import Owner, encode_acl, \
|
|||||||
ACLPublicRead
|
ACLPublicRead
|
||||||
from swift.common.middleware.s3api.s3request import MAX_32BIT_INT
|
from swift.common.middleware.s3api.s3request import MAX_32BIT_INT
|
||||||
|
|
||||||
|
from test.unit.common.middleware.helpers import normalize_path
|
||||||
from test.unit.common.middleware.s3api import S3ApiTestCase
|
from test.unit.common.middleware.s3api import S3ApiTestCase
|
||||||
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
||||||
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
||||||
@ -41,25 +44,43 @@ PFS_ETAG = '"pfsv2/AUTH_test/01234567/89abcdef-32"'
|
|||||||
class TestS3ApiBucket(S3ApiTestCase):
|
class TestS3ApiBucket(S3ApiTestCase):
|
||||||
def setup_objects(self):
|
def setup_objects(self):
|
||||||
self.objects = (('lily', '2011-01-05T02:19:14.275290', '0', '3909'),
|
self.objects = (('lily', '2011-01-05T02:19:14.275290', '0', '3909'),
|
||||||
('rose', '2011-01-05T02:19:14.275290', 0, 303),
|
|
||||||
('viola', '2011-01-05T02:19:14.275290', '0', 3909),
|
|
||||||
(u'lily-\u062a', '2011-01-05T02:19:14.275290', 0, 390),
|
(u'lily-\u062a', '2011-01-05T02:19:14.275290', 0, 390),
|
||||||
('mu', '2011-01-05T02:19:14.275290',
|
('mu', '2011-01-05T02:19:14.275290',
|
||||||
'md5-of-the-manifest; s3_etag=0', '3909'),
|
'md5-of-the-manifest; s3_etag=0', '3909'),
|
||||||
('pfs-obj', '2011-01-05T02:19:14.275290',
|
('pfs-obj', '2011-01-05T02:19:14.275290',
|
||||||
PFS_ETAG, '3909'),
|
PFS_ETAG, '3909'),
|
||||||
|
('rose', '2011-01-05T02:19:14.275290', 0, 303),
|
||||||
('slo', '2011-01-05T02:19:14.275290',
|
('slo', '2011-01-05T02:19:14.275290',
|
||||||
'md5-of-the-manifest', '3909'),
|
'md5-of-the-manifest', '3909'),
|
||||||
|
('viola', '2011-01-05T02:19:14.275290', '0', 3909),
|
||||||
('with space', '2011-01-05T02:19:14.275290', 0, 390),
|
('with space', '2011-01-05T02:19:14.275290', 0, 390),
|
||||||
('with%20space', '2011-01-05T02:19:14.275290', 0, 390))
|
('with%20space', '2011-01-05T02:19:14.275290', 0, 390))
|
||||||
|
|
||||||
objects = [
|
self.objects_list = [
|
||||||
{'name': item[0], 'last_modified': str(item[1]),
|
{'name': item[0], 'last_modified': str(item[1]),
|
||||||
|
'content_type': 'application/octet-stream',
|
||||||
'hash': str(item[2]), 'bytes': str(item[3])}
|
'hash': str(item[2]), 'bytes': str(item[3])}
|
||||||
for item in self.objects]
|
for item in self.objects]
|
||||||
objects[6]['slo_etag'] = '"0"'
|
self.objects_list[5]['slo_etag'] = '"0"'
|
||||||
object_list = json.dumps(objects)
|
self.versioned_objects = [{
|
||||||
|
'name': 'rose',
|
||||||
|
'version_id': '2',
|
||||||
|
'hash': '0',
|
||||||
|
'bytes': '0',
|
||||||
|
'last_modified': '2010-03-01T17:09:51.510928',
|
||||||
|
'content_type': DELETE_MARKER_CONTENT_TYPE,
|
||||||
|
'is_latest': False,
|
||||||
|
}, {
|
||||||
|
'name': 'rose',
|
||||||
|
'version_id': '1',
|
||||||
|
'hash': '1234',
|
||||||
|
'bytes': '6',
|
||||||
|
'last_modified': '2010-03-01T17:09:50.510928',
|
||||||
|
'content_type': 'application/octet-stream',
|
||||||
|
'is_latest': False,
|
||||||
|
}]
|
||||||
|
|
||||||
|
listing_body = json.dumps(self.objects_list)
|
||||||
self.prefixes = ['rose', 'viola', 'lily']
|
self.prefixes = ['rose', 'viola', 'lily']
|
||||||
object_list_subdir = [{"subdir": p} for p in self.prefixes]
|
object_list_subdir = [{"subdir": p} for p in self.prefixes]
|
||||||
|
|
||||||
@ -79,7 +100,7 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
json.dumps([]))
|
json.dumps([]))
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=',
|
'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=',
|
||||||
swob.HTTPOk, {'Content-Type': 'application/json'}, object_list)
|
swob.HTTPOk, {'Content-Type': 'application/json'}, listing_body)
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None)
|
'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None)
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
@ -89,20 +110,14 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
{}, None)
|
{}, None)
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'GET', '/v1/AUTH_test/junk', swob.HTTPOk,
|
'GET', '/v1/AUTH_test/junk', swob.HTTPOk,
|
||||||
{'Content-Type': 'application/json'}, object_list)
|
{'Content-Type': 'application/json'}, listing_body)
|
||||||
self.swift.register(
|
|
||||||
'GET',
|
|
||||||
'/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola',
|
|
||||||
swob.HTTPOk,
|
|
||||||
{'Content-Type': 'application/json; charset=utf-8'},
|
|
||||||
json.dumps(objects[2:]))
|
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk,
|
'GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk,
|
||||||
{'Content-Type': 'application/json; charset=utf-8'},
|
{'Content-Type': 'application/json; charset=utf-8'},
|
||||||
json.dumps(object_list_subdir))
|
json.dumps(object_list_subdir))
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'GET',
|
'GET',
|
||||||
'/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3',
|
'/v1/AUTH_test/subdirs?delimiter=/&limit=3',
|
||||||
swob.HTTPOk, {}, json.dumps([
|
swob.HTTPOk, {}, json.dumps([
|
||||||
{'subdir': 'nothing/'},
|
{'subdir': 'nothing/'},
|
||||||
{'subdir': u'but-\u062a/'},
|
{'subdir': u'but-\u062a/'},
|
||||||
@ -189,11 +204,13 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
items.append((o.find('./Key').text, o.find('./ETag').text))
|
items.append((o.find('./Key').text, o.find('./ETag').text))
|
||||||
self.assertEqual('2011-01-05T02:19:14.275Z',
|
self.assertEqual('2011-01-05T02:19:14.275Z',
|
||||||
o.find('./LastModified').text)
|
o.find('./LastModified').text)
|
||||||
self.assertEqual(items, [
|
expected = [
|
||||||
(i[0].encode('utf-8') if six.PY2 else i[0],
|
(i[0].encode('utf-8') if six.PY2 else i[0],
|
||||||
PFS_ETAG if i[0] == 'pfs-obj' else
|
PFS_ETAG if i[0] == 'pfs-obj' else
|
||||||
'"0-N"' if i[0] == 'slo' else '"0"')
|
'"0-N"' if i[0] == 'slo' else '"0"')
|
||||||
for i in self.objects])
|
for i in self.objects
|
||||||
|
]
|
||||||
|
self.assertEqual(items, expected)
|
||||||
|
|
||||||
def test_bucket_GET_url_encoded(self):
|
def test_bucket_GET_url_encoded(self):
|
||||||
bucket_name = 'junk'
|
bucket_name = 'junk'
|
||||||
@ -483,15 +500,16 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
|
|
||||||
def test_bucket_GET_with_delimiter_max_keys(self):
|
def test_bucket_GET_with_delimiter_max_keys(self):
|
||||||
bucket_name = 'junk'
|
bucket_name = 'junk'
|
||||||
req = Request.blank('/%s?delimiter=a&max-keys=2' % bucket_name,
|
req = Request.blank('/%s?delimiter=a&max-keys=4' % bucket_name,
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '200')
|
self.assertEqual(status.split()[0], '200')
|
||||||
elem = fromstring(body, 'ListBucketResult')
|
elem = fromstring(body, 'ListBucketResult')
|
||||||
self.assertEqual(elem.find('./NextMarker').text, 'rose')
|
self.assertEqual(elem.find('./NextMarker').text,
|
||||||
self.assertEqual(elem.find('./MaxKeys').text, '2')
|
self.objects_list[3]['name'])
|
||||||
|
self.assertEqual(elem.find('./MaxKeys').text, '4')
|
||||||
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
||||||
|
|
||||||
def test_bucket_GET_v2_with_delimiter_max_keys(self):
|
def test_bucket_GET_v2_with_delimiter_max_keys(self):
|
||||||
@ -567,6 +585,14 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
self.assertIsNotNone(o.find('./Owner'))
|
self.assertIsNotNone(o.find('./Owner'))
|
||||||
|
|
||||||
def test_bucket_GET_with_versions_versioning_not_configured(self):
|
def test_bucket_GET_with_versions_versioning_not_configured(self):
|
||||||
|
for obj in self.objects:
|
||||||
|
self.swift.register(
|
||||||
|
'HEAD', '/v1/AUTH_test/junk/%s' % quote(obj[0].encode('utf8')),
|
||||||
|
swob.HTTPOk, {}, None)
|
||||||
|
# self.swift.register('HEAD', '/v1/AUTH_test/junk/viola',
|
||||||
|
# swob.HTTPOk, {}, None)
|
||||||
|
|
||||||
|
self._add_versions_request(versioned_objects=[])
|
||||||
req = Request.blank('/junk?versions',
|
req = Request.blank('/junk?versions',
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
@ -585,11 +611,10 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
versions = elem.findall('./Version')
|
versions = elem.findall('./Version')
|
||||||
objects = list(self.objects)
|
objects = list(self.objects)
|
||||||
if six.PY2:
|
if six.PY2:
|
||||||
self.assertEqual([v.find('./Key').text for v in versions],
|
expected = [v[0].encode('utf-8') for v in objects]
|
||||||
[v[0].encode('utf-8') for v in objects])
|
|
||||||
else:
|
else:
|
||||||
self.assertEqual([v.find('./Key').text for v in versions],
|
expected = [v[0] for v in objects]
|
||||||
[v[0] for v in objects])
|
self.assertEqual([v.find('./Key').text for v in versions], expected)
|
||||||
self.assertEqual([v.find('./IsLatest').text for v in versions],
|
self.assertEqual([v.find('./IsLatest').text for v in versions],
|
||||||
['true' for v in objects])
|
['true' for v in objects])
|
||||||
self.assertEqual([v.find('./VersionId').text for v in versions],
|
self.assertEqual([v.find('./VersionId').text for v in versions],
|
||||||
@ -612,6 +637,446 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
self.assertEqual([v.find('./StorageClass').text for v in versions],
|
self.assertEqual([v.find('./StorageClass').text for v in versions],
|
||||||
['STANDARD' for v in objects])
|
['STANDARD' for v in objects])
|
||||||
|
|
||||||
|
def _add_versions_request(self, orig_objects=None, versioned_objects=None,
|
||||||
|
bucket='junk'):
|
||||||
|
if orig_objects is None:
|
||||||
|
orig_objects = self.objects_list
|
||||||
|
if versioned_objects is None:
|
||||||
|
versioned_objects = self.versioned_objects
|
||||||
|
all_versions = versioned_objects + [
|
||||||
|
dict(i, version_id='null', is_latest=True)
|
||||||
|
for i in orig_objects]
|
||||||
|
all_versions.sort(key=lambda o: (
|
||||||
|
o['name'], '' if o['version_id'] == 'null' else o['version_id']))
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/AUTH_test/%s' % bucket, swob.HTTPOk,
|
||||||
|
{'Content-Type': 'application/json'}, json.dumps(all_versions))
|
||||||
|
|
||||||
|
def _assert_delete_markers(self, elem):
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(len(delete_markers), 1)
|
||||||
|
self.assertEqual(delete_markers[0].find('./IsLatest').text, 'false')
|
||||||
|
self.assertEqual(delete_markers[0].find('./VersionId').text, '2')
|
||||||
|
self.assertEqual(delete_markers[0].find('./Key').text, 'rose')
|
||||||
|
|
||||||
|
def test_bucket_GET_with_versions(self):
|
||||||
|
self._add_versions_request()
|
||||||
|
req = Request.blank('/junk?versions',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./Name').text, 'junk')
|
||||||
|
self._assert_delete_markers(elem)
|
||||||
|
versions = elem.findall('./Version')
|
||||||
|
self.assertEqual(len(versions), len(self.objects) + 1)
|
||||||
|
|
||||||
|
expected = []
|
||||||
|
for o in self.objects_list:
|
||||||
|
name = o['name']
|
||||||
|
if six.PY2:
|
||||||
|
name = name.encode('utf8')
|
||||||
|
expected.append((name, 'true', 'null'))
|
||||||
|
if name == 'rose':
|
||||||
|
expected.append((name, 'false', '1'))
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in versions
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
def test_bucket_GET_with_versions_with_max_keys(self):
|
||||||
|
self._add_versions_request()
|
||||||
|
req = Request.blank('/junk?versions&max-keys=7',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./MaxKeys').text, '7')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
||||||
|
self._assert_delete_markers(elem)
|
||||||
|
versions = elem.findall('./Version')
|
||||||
|
self.assertEqual(len(versions), 6)
|
||||||
|
|
||||||
|
expected = []
|
||||||
|
for o in self.objects_list[:5]:
|
||||||
|
name = o['name']
|
||||||
|
if six.PY2:
|
||||||
|
name = name.encode('utf8')
|
||||||
|
expected.append((name, 'true', 'null'))
|
||||||
|
if name == 'rose':
|
||||||
|
expected.append((name, 'false', '1'))
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in versions
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
def test_bucket_GET_with_versions_with_max_keys_and_key_marker(self):
|
||||||
|
self._add_versions_request(orig_objects=self.objects_list[4:])
|
||||||
|
req = Request.blank('/junk?versions&max-keys=3&key-marker=ros',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./MaxKeys').text, '3')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
||||||
|
self._assert_delete_markers(elem)
|
||||||
|
versions = elem.findall('./Version')
|
||||||
|
self.assertEqual(len(versions), 2)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
('rose', 'true', 'null'),
|
||||||
|
('rose', 'false', '1'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in versions
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
def test_bucket_GET_versions_with_key_marker_and_version_id_marker(self):
|
||||||
|
container_listing = [{
|
||||||
|
"bytes": 8192,
|
||||||
|
"content_type": "binary/octet-stream",
|
||||||
|
"hash": "221994040b14294bdf7fbc128e66633c",
|
||||||
|
"last_modified": "2019-08-16T19:39:53.152780",
|
||||||
|
"name": "subdir/foo",
|
||||||
|
}]
|
||||||
|
versions_listing = [{
|
||||||
|
'bytes': 0,
|
||||||
|
'content_type': DELETE_MARKER_CONTENT_TYPE,
|
||||||
|
'hash': '0',
|
||||||
|
"last_modified": "2019-08-19T19:05:33.565940",
|
||||||
|
'name': 'subdir/bar',
|
||||||
|
"version_id": "1565241533.55320",
|
||||||
|
'is_latest': True,
|
||||||
|
}, {
|
||||||
|
"bytes": 8192,
|
||||||
|
"content_type": "binary/octet-stream",
|
||||||
|
"hash": "221994040b14294bdf7fbc128e66633c",
|
||||||
|
"last_modified": "2019-08-16T19:39:53.508510",
|
||||||
|
"name": "subdir/bar",
|
||||||
|
"version_id": "1564984393.68962",
|
||||||
|
'is_latest': False,
|
||||||
|
}, {
|
||||||
|
"bytes": 8192,
|
||||||
|
"content_type": "binary/octet-stream",
|
||||||
|
"hash": "221994040b14294bdf7fbc128e66633c",
|
||||||
|
"last_modified": "2019-08-16T19:39:42.673260",
|
||||||
|
"name": "subdir/foo",
|
||||||
|
"version_id": "1565984382.67326",
|
||||||
|
'is_latest': False,
|
||||||
|
}]
|
||||||
|
self._add_versions_request(container_listing, versions_listing,
|
||||||
|
bucket='mybucket')
|
||||||
|
req = Request.blank(
|
||||||
|
'/mybucket?versions&key-marker=subdir/bar&'
|
||||||
|
'version-id-marker=1566589611.065522',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(['subdir/bar'], [
|
||||||
|
o.find('Key').text for o in delete_markers])
|
||||||
|
expected = [
|
||||||
|
('subdir/bar', 'false', '1564984393.68962'),
|
||||||
|
('subdir/foo', 'true', 'null'),
|
||||||
|
('subdir/foo', 'false', '1565984382.67326'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
self._add_versions_request(container_listing, versions_listing[1:],
|
||||||
|
bucket='mybucket')
|
||||||
|
req = Request.blank(
|
||||||
|
'/mybucket?versions&key-marker=subdir/bar&'
|
||||||
|
'version-id-marker=1565241533.55320',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(0, len(delete_markers))
|
||||||
|
expected = [
|
||||||
|
('subdir/bar', 'false', '1564984393.68962'),
|
||||||
|
('subdir/foo', 'true', 'null'),
|
||||||
|
('subdir/foo', 'false', '1565984382.67326'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
self._add_versions_request([], versions_listing[-1:],
|
||||||
|
bucket='mybucket')
|
||||||
|
req = Request.blank(
|
||||||
|
'/mybucket?versions&key-marker=subdir/foo&'
|
||||||
|
'version-id-marker=null',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(0, len(delete_markers))
|
||||||
|
expected = [
|
||||||
|
('subdir/foo', 'false', '1565984382.67326'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
def test_bucket_GET_versions_with_version_id_marker(self):
|
||||||
|
self._add_versions_request()
|
||||||
|
req = Request.blank(
|
||||||
|
'/junk?versions',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
|
||||||
|
# sanity
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
expected = [('rose', 'false', '2')]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./DeleteMarker')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
expected = [
|
||||||
|
('lily', 'true', 'null'),
|
||||||
|
(b'lily-\xd8\xaa', 'true', 'null'),
|
||||||
|
('mu', 'true', 'null'),
|
||||||
|
('pfs-obj', 'true', 'null'),
|
||||||
|
('rose', 'true', 'null'),
|
||||||
|
('rose', 'false', '1'),
|
||||||
|
('slo', 'true', 'null'),
|
||||||
|
('viola', 'true', 'null'),
|
||||||
|
('with space', 'true', 'null'),
|
||||||
|
('with%20space', 'true', 'null'),
|
||||||
|
]
|
||||||
|
if not six.PY2:
|
||||||
|
item = list(expected[1])
|
||||||
|
item[0] = item[0].decode('utf8')
|
||||||
|
expected[1] = tuple(item)
|
||||||
|
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
self._add_versions_request(self.objects_list[5:])
|
||||||
|
req = Request.blank(
|
||||||
|
'/junk?versions&key-marker=rose&version-id-marker=null',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(len(delete_markers), 1)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
('rose', 'false', '1'),
|
||||||
|
('slo', 'true', 'null'),
|
||||||
|
('viola', 'true', 'null'),
|
||||||
|
('with space', 'true', 'null'),
|
||||||
|
('with%20space', 'true', 'null'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
# N.B. versions are sorted most recent to oldest
|
||||||
|
self._add_versions_request(self.objects_list[5:],
|
||||||
|
self.versioned_objects[1:])
|
||||||
|
req = Request.blank(
|
||||||
|
'/junk?versions&key-marker=rose&version-id-marker=2',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(len(delete_markers), 0)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
('rose', 'false', '1'),
|
||||||
|
('slo', 'true', 'null'),
|
||||||
|
('viola', 'true', 'null'),
|
||||||
|
('with space', 'true', 'null'),
|
||||||
|
('with%20space', 'true', 'null'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
self._add_versions_request(self.objects_list[5:],
|
||||||
|
self.versioned_objects[2:])
|
||||||
|
req = Request.blank(
|
||||||
|
'/junk?versions&key-marker=rose&version-id-marker=1',
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(len(delete_markers), 0)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
('slo', 'true', 'null'),
|
||||||
|
('viola', 'true', 'null'),
|
||||||
|
('with space', 'true', 'null'),
|
||||||
|
('with%20space', 'true', 'null'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
def test_bucket_GET_versions_non_existent_version_id_marker(self):
|
||||||
|
self._add_versions_request(orig_objects=self.objects_list[5:])
|
||||||
|
req = Request.blank(
|
||||||
|
'/junk?versions&key-marker=rose&'
|
||||||
|
'version-id-marker=null',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200', body)
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./Name').text, 'junk')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(len(delete_markers), 1)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
('rose', 'false', '1'),
|
||||||
|
('slo', 'true', 'null'),
|
||||||
|
('viola', 'true', 'null'),
|
||||||
|
('with space', 'true', 'null'),
|
||||||
|
('with%20space', 'true', 'null'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
self.assertEqual(self.swift.calls, [
|
||||||
|
('GET', normalize_path('/v1/AUTH_test/junk?'
|
||||||
|
'limit=1001&marker=rose&version_marker=null&versions=')),
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_bucket_GET_versions_prefix(self):
|
||||||
|
container_listing = [{
|
||||||
|
"bytes": 8192,
|
||||||
|
"content_type": "binary/octet-stream",
|
||||||
|
"hash": "221994040b14294bdf7fbc128e66633c",
|
||||||
|
"last_modified": "2019-08-16T19:39:53.152780",
|
||||||
|
"name": "subdir/foo",
|
||||||
|
}]
|
||||||
|
versions_listing = [{
|
||||||
|
"bytes": 8192,
|
||||||
|
"content_type": "binary/octet-stream",
|
||||||
|
"hash": "221994040b14294bdf7fbc128e66633c",
|
||||||
|
"last_modified": "2019-08-16T19:39:53.508510",
|
||||||
|
"name": "subdir/bar",
|
||||||
|
"version_id": "1565984393.68962",
|
||||||
|
"is_latest": True,
|
||||||
|
}, {
|
||||||
|
'bytes': 0,
|
||||||
|
'content_type': DELETE_MARKER_CONTENT_TYPE,
|
||||||
|
'hash': '0',
|
||||||
|
"last_modified": "2019-08-19T19:05:33.565940",
|
||||||
|
'name': 'subdir/bar',
|
||||||
|
'version_id': '1566241533.55320',
|
||||||
|
'is_latest': False,
|
||||||
|
}, {
|
||||||
|
"bytes": 8192,
|
||||||
|
"content_type": "binary/octet-stream",
|
||||||
|
"hash": "221994040b14294bdf7fbc128e66633c",
|
||||||
|
"last_modified": "2019-08-16T19:39:42.673260",
|
||||||
|
"name": "subdir/foo",
|
||||||
|
"version_id": "1565984382.67326",
|
||||||
|
'is_latest': False,
|
||||||
|
}]
|
||||||
|
self._add_versions_request(container_listing, versions_listing)
|
||||||
|
req = Request.blank(
|
||||||
|
'/junk?versions&prefix=subdir/',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'ListVersionsResult')
|
||||||
|
self.assertEqual(elem.find('./Name').text, 'junk')
|
||||||
|
delete_markers = elem.findall('./DeleteMarker')
|
||||||
|
self.assertEqual(len(delete_markers), 1)
|
||||||
|
|
||||||
|
expected = [
|
||||||
|
('subdir/bar', 'true', '1565984393.68962'),
|
||||||
|
('subdir/foo', 'true', 'null'),
|
||||||
|
('subdir/foo', 'false', '1565984382.67326'),
|
||||||
|
]
|
||||||
|
discovered = [
|
||||||
|
tuple(e.find('./%s' % key).text for key in (
|
||||||
|
'Key', 'IsLatest', 'VersionId'))
|
||||||
|
for e in elem.findall('./Version')
|
||||||
|
]
|
||||||
|
self.assertEqual(expected, discovered)
|
||||||
|
|
||||||
|
self.assertEqual(self.swift.calls, [
|
||||||
|
('GET', normalize_path('/v1/AUTH_test/junk'
|
||||||
|
'?limit=1001&prefix=subdir/&versions=')),
|
||||||
|
])
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_bucket_PUT_error(self):
|
def test_bucket_PUT_error(self):
|
||||||
code = self._test_method_error('PUT', '/bucket', swob.HTTPCreated,
|
code = self._test_method_error('PUT', '/bucket', swob.HTTPCreated,
|
||||||
@ -847,6 +1312,24 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '204')
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
|
||||||
|
@s3acl
|
||||||
|
def test_bucket_DELETE_with_empty_versioning(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket+versioning',
|
||||||
|
swob.HTTPNoContent, {}, None)
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket+versioning',
|
||||||
|
swob.HTTPNoContent, {}, None)
|
||||||
|
# overwrite default HEAD to return x-container-object-count
|
||||||
|
self.swift.register(
|
||||||
|
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
|
||||||
|
{'X-Container-Object-Count': 0}, None)
|
||||||
|
|
||||||
|
req = Request.blank('/bucket',
|
||||||
|
environ={'REQUEST_METHOD': 'DELETE'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_bucket_DELETE_error_while_segment_bucket_delete(self):
|
def test_bucket_DELETE_error_while_segment_bucket_delete(self):
|
||||||
# An error occurred while deleting segment objects
|
# An error occurred while deleting segment objects
|
||||||
|
@ -23,6 +23,7 @@ import mock
|
|||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
from swift.common.swob import Request
|
from swift.common.swob import Request
|
||||||
|
|
||||||
|
from test.unit import make_timestamp_iter
|
||||||
from test.unit.common.middleware.s3api import S3ApiTestCase
|
from test.unit.common.middleware.s3api import S3ApiTestCase
|
||||||
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
||||||
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
|
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
|
||||||
@ -38,6 +39,7 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
|
|||||||
swob.HTTPOk, {}, None)
|
swob.HTTPOk, {}, None)
|
||||||
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key2',
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key2',
|
||||||
swob.HTTPNotFound, {}, None)
|
swob.HTTPNotFound, {}, None)
|
||||||
|
self.ts = make_timestamp_iter()
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_multi_DELETE_to_object(self):
|
def test_object_multi_DELETE_to_object(self):
|
||||||
@ -98,10 +100,11 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
|
|||||||
self.assertEqual(len(elem.findall('Deleted')), 3)
|
self.assertEqual(len(elem.findall('Deleted')), 3)
|
||||||
self.assertEqual(self.swift.calls, [
|
self.assertEqual(self.swift.calls, [
|
||||||
('HEAD', '/v1/AUTH_test/bucket'),
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key1'),
|
('HEAD', '/v1/AUTH_test/bucket/Key1?symlink=get'),
|
||||||
('DELETE', '/v1/AUTH_test/bucket/Key1'),
|
('DELETE', '/v1/AUTH_test/bucket/Key1'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key2'),
|
('HEAD', '/v1/AUTH_test/bucket/Key2?symlink=get'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key3'),
|
('DELETE', '/v1/AUTH_test/bucket/Key2'),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'),
|
||||||
('DELETE', '/v1/AUTH_test/bucket/Key3?multipart-manifest=delete'),
|
('DELETE', '/v1/AUTH_test/bucket/Key3?multipart-manifest=delete'),
|
||||||
])
|
])
|
||||||
|
|
||||||
@ -161,11 +164,12 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(self.swift.calls, [
|
self.assertEqual(self.swift.calls, [
|
||||||
('HEAD', '/v1/AUTH_test/bucket'),
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key1'),
|
('HEAD', '/v1/AUTH_test/bucket/Key1?symlink=get'),
|
||||||
('DELETE', '/v1/AUTH_test/bucket/Key1'),
|
('DELETE', '/v1/AUTH_test/bucket/Key1'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key2'),
|
('HEAD', '/v1/AUTH_test/bucket/Key2?symlink=get'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key3'),
|
('DELETE', '/v1/AUTH_test/bucket/Key2'),
|
||||||
('HEAD', '/v1/AUTH_test/bucket/Key4'),
|
('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/Key4?symlink=get'),
|
||||||
('DELETE', '/v1/AUTH_test/bucket/Key4?multipart-manifest=delete'),
|
('DELETE', '/v1/AUTH_test/bucket/Key4?multipart-manifest=delete'),
|
||||||
])
|
])
|
||||||
|
|
||||||
@ -221,18 +225,42 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
|
|||||||
self.assertEqual(self._get_error_code(body), 'UserKeyMustBeSpecified')
|
self.assertEqual(self._get_error_code(body), 'UserKeyMustBeSpecified')
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_multi_DELETE_versioned(self):
|
def test_object_multi_DELETE_versioned_enabled(self):
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
|
self.swift.register(
|
||||||
swob.HTTPNoContent, {}, None)
|
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
|
'X-Container-Sysmeta-Versions-Enabled': 'True',
|
||||||
swob.HTTPNotFound, {}, None)
|
}, None)
|
||||||
|
t1 = next(self.ts)
|
||||||
|
key1 = '/v1/AUTH_test/bucket/Key1' \
|
||||||
|
'?symlink=get&version-id=%s' % t1.normal
|
||||||
|
self.swift.register('HEAD', key1, swob.HTTPOk, {}, None)
|
||||||
|
self.swift.register('DELETE', key1, swob.HTTPNoContent, {}, None)
|
||||||
|
t2 = next(self.ts)
|
||||||
|
key2 = '/v1/AUTH_test/bucket/Key2' \
|
||||||
|
'?symlink=get&version-id=%s' % t2.normal
|
||||||
|
# this 404 could just mean it's a delete marker
|
||||||
|
self.swift.register('HEAD', key2, swob.HTTPNotFound, {}, None)
|
||||||
|
self.swift.register('DELETE', key2, swob.HTTPNoContent, {}, None)
|
||||||
|
key3 = '/v1/AUTH_test/bucket/Key3'
|
||||||
|
self.swift.register('HEAD', key3 + '?symlink=get',
|
||||||
|
swob.HTTPOk, {}, None)
|
||||||
|
self.swift.register('DELETE', key3, swob.HTTPNoContent, {}, None)
|
||||||
|
key4 = '/v1/AUTH_test/bucket/Key4?symlink=get&version-id=null'
|
||||||
|
self.swift.register('HEAD', key4, swob.HTTPOk, {}, None)
|
||||||
|
self.swift.register('DELETE', key4, swob.HTTPNoContent, {}, None)
|
||||||
|
|
||||||
elem = Element('Delete')
|
elem = Element('Delete')
|
||||||
SubElement(elem, 'Quiet').text = 'true'
|
items = (
|
||||||
for key in ['Key1', 'Key2']:
|
('Key1', t1.normal),
|
||||||
|
('Key2', t2.normal),
|
||||||
|
('Key3', None),
|
||||||
|
('Key4', 'null'),
|
||||||
|
)
|
||||||
|
for key, version in items:
|
||||||
obj = SubElement(elem, 'Object')
|
obj = SubElement(elem, 'Object')
|
||||||
SubElement(obj, 'Key').text = key
|
SubElement(obj, 'Key').text = key
|
||||||
SubElement(obj, 'VersionId').text = 'not-supported'
|
if version:
|
||||||
|
SubElement(obj, 'VersionId').text = version
|
||||||
body = tostring(elem, use_s3ns=False)
|
body = tostring(elem, use_s3ns=False)
|
||||||
content_md5 = base64.b64encode(md5(body).digest()).strip()
|
content_md5 = base64.b64encode(md5(body).digest()).strip()
|
||||||
|
|
||||||
@ -243,7 +271,80 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
|
|||||||
'Content-MD5': content_md5},
|
'Content-MD5': content_md5},
|
||||||
body=body)
|
body=body)
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
|
||||||
|
self.assertEqual(self.swift.calls, [
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
|
('HEAD', key1),
|
||||||
|
('DELETE', key1),
|
||||||
|
('HEAD', key2),
|
||||||
|
('DELETE', key2),
|
||||||
|
('HEAD', key3 + '?symlink=get'),
|
||||||
|
('DELETE', key3),
|
||||||
|
('HEAD', key4),
|
||||||
|
('DELETE', key4),
|
||||||
|
])
|
||||||
|
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual({'Key1', 'Key2', 'Key3', 'Key4'}, set(
|
||||||
|
e.findtext('Key') for e in elem.findall('Deleted')))
|
||||||
|
|
||||||
|
@s3acl
|
||||||
|
def test_object_multi_DELETE_versioned_suspended(self):
|
||||||
|
self.swift.register(
|
||||||
|
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None)
|
||||||
|
t1 = next(self.ts)
|
||||||
|
key1 = '/v1/AUTH_test/bucket/Key1' + \
|
||||||
|
'?symlink=get&version-id=%s' % t1.normal
|
||||||
|
self.swift.register('HEAD', key1, swob.HTTPOk, {}, None)
|
||||||
|
self.swift.register('DELETE', key1, swob.HTTPNoContent, {}, None)
|
||||||
|
t2 = next(self.ts)
|
||||||
|
key2 = '/v1/AUTH_test/bucket/Key2' + \
|
||||||
|
'?symlink=get&version-id=%s' % t2.normal
|
||||||
|
self.swift.register('HEAD', key2, swob.HTTPNotFound, {}, None)
|
||||||
|
self.swift.register('DELETE', key2, swob.HTTPNotFound, {}, None)
|
||||||
|
key3 = '/v1/AUTH_test/bucket/Key3'
|
||||||
|
self.swift.register('HEAD', key3, swob.HTTPOk, {}, None)
|
||||||
|
self.swift.register('DELETE', key3, swob.HTTPNoContent, {}, None)
|
||||||
|
|
||||||
|
elem = Element('Delete')
|
||||||
|
items = (
|
||||||
|
('Key1', t1),
|
||||||
|
('Key2', t2),
|
||||||
|
('Key3', None),
|
||||||
|
)
|
||||||
|
for key, ts in items:
|
||||||
|
obj = SubElement(elem, 'Object')
|
||||||
|
SubElement(obj, 'Key').text = key
|
||||||
|
if ts:
|
||||||
|
SubElement(obj, 'VersionId').text = ts.normal
|
||||||
|
body = tostring(elem, use_s3ns=False)
|
||||||
|
content_md5 = base64.b64encode(md5(body).digest()).strip()
|
||||||
|
|
||||||
|
req = Request.blank('/bucket?delete',
|
||||||
|
environ={'REQUEST_METHOD': 'POST'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header(),
|
||||||
|
'Content-MD5': content_md5},
|
||||||
|
body=body)
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body)
|
||||||
|
self.assertEqual(len(elem.findall('Deleted')), 3)
|
||||||
|
|
||||||
|
self.assertEqual(self.swift.calls, [
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/Key1'
|
||||||
|
'?symlink=get&version-id=%s' % t1.normal),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/Key1'
|
||||||
|
'?symlink=get&version-id=%s' % t1.normal),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/Key2'
|
||||||
|
'?symlink=get&version-id=%s' % t2.normal),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/Key2'
|
||||||
|
'?symlink=get&version-id=%s' % t2.normal),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/Key3'),
|
||||||
|
])
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_multi_DELETE_with_invalid_md5(self):
|
def test_object_multi_DELETE_with_invalid_md5(self):
|
||||||
@ -282,9 +383,12 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
|
|||||||
def test_object_multi_DELETE_lots_of_keys(self):
|
def test_object_multi_DELETE_lots_of_keys(self):
|
||||||
elem = Element('Delete')
|
elem = Element('Delete')
|
||||||
for i in range(self.conf.max_multi_delete_objects):
|
for i in range(self.conf.max_multi_delete_objects):
|
||||||
|
status = swob.HTTPOk if i % 2 else swob.HTTPNotFound
|
||||||
name = 'x' * 1000 + str(i)
|
name = 'x' * 1000 + str(i)
|
||||||
self.swift.register('HEAD', '/v1/AUTH_test/bucket/%s' % name,
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/%s' % name,
|
||||||
swob.HTTPNotFound, {}, None)
|
status, {}, None)
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/%s' % name,
|
||||||
|
swob.HTTPNoContent, {}, None)
|
||||||
obj = SubElement(elem, 'Object')
|
obj = SubElement(elem, 'Object')
|
||||||
SubElement(obj, 'Key').text = name
|
SubElement(obj, 'Key').text = name
|
||||||
body = tostring(elem, use_s3ns=False)
|
body = tostring(elem, use_s3ns=False)
|
||||||
|
@ -20,7 +20,7 @@ from mock import patch
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
from six.moves.urllib.parse import quote
|
from six.moves.urllib.parse import quote, quote_plus
|
||||||
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
from swift.common.swob import Request
|
from swift.common.swob import Request
|
||||||
@ -346,7 +346,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
query[key] = arg
|
query[key] = arg
|
||||||
self.assertEqual(query['format'], 'json')
|
self.assertEqual(query['format'], 'json')
|
||||||
self.assertEqual(query['limit'], '1001')
|
self.assertEqual(query['limit'], '1001')
|
||||||
self.assertEqual(query['marker'], 'object/Y')
|
self.assertEqual(query['marker'], quote_plus('object/Y'))
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_bucket_multipart_uploads_GET_with_key_marker(self):
|
def test_bucket_multipart_uploads_GET_with_key_marker(self):
|
||||||
@ -380,7 +380,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
query[key] = arg
|
query[key] = arg
|
||||||
self.assertEqual(query['format'], 'json')
|
self.assertEqual(query['format'], 'json')
|
||||||
self.assertEqual(query['limit'], '1001')
|
self.assertEqual(query['limit'], '1001')
|
||||||
self.assertEqual(query['marker'], quote('object/~'))
|
self.assertEqual(query['marker'], quote_plus('object/~'))
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_bucket_multipart_uploads_GET_with_prefix(self):
|
def test_bucket_multipart_uploads_GET_with_prefix(self):
|
||||||
@ -550,7 +550,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
query[key] = arg
|
query[key] = arg
|
||||||
self.assertEqual(query['format'], 'json')
|
self.assertEqual(query['format'], 'json')
|
||||||
self.assertEqual(query['limit'], '1001')
|
self.assertEqual(query['limit'], '1001')
|
||||||
self.assertEqual(query['prefix'], 'dir/')
|
self.assertEqual(query['prefix'], quote_plus('dir/'))
|
||||||
self.assertTrue(query.get('delimiter') is None)
|
self.assertTrue(query.get('delimiter') is None)
|
||||||
|
|
||||||
@patch('swift.common.middleware.s3api.controllers.'
|
@patch('swift.common.middleware.s3api.controllers.'
|
||||||
|
@ -22,6 +22,7 @@ from os.path import join
|
|||||||
import time
|
import time
|
||||||
from mock import patch
|
from mock import patch
|
||||||
import six
|
import six
|
||||||
|
import json
|
||||||
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
from swift.common.swob import Request
|
from swift.common.swob import Request
|
||||||
@ -32,6 +33,8 @@ from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, \
|
|||||||
Owner, Grant
|
Owner, Grant
|
||||||
from swift.common.middleware.s3api.etree import fromstring
|
from swift.common.middleware.s3api.etree import fromstring
|
||||||
from swift.common.middleware.s3api.utils import mktime, S3Timestamp
|
from swift.common.middleware.s3api.utils import mktime, S3Timestamp
|
||||||
|
from swift.common.middleware.versioned_writes.object_versioning import \
|
||||||
|
DELETE_MARKER_CONTENT_TYPE
|
||||||
|
|
||||||
|
|
||||||
class TestS3ApiObj(S3ApiTestCase):
|
class TestS3ApiObj(S3ApiTestCase):
|
||||||
@ -57,6 +60,9 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
|
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
|
||||||
swob.HTTPOk, self.response_headers,
|
swob.HTTPOk, self.response_headers,
|
||||||
self.object_body)
|
self.object_body)
|
||||||
|
self.swift.register('GET', '/v1/AUTH_test/bucket/object?symlink=get',
|
||||||
|
swob.HTTPOk, self.response_headers,
|
||||||
|
self.object_body)
|
||||||
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
|
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
|
||||||
swob.HTTPCreated,
|
swob.HTTPCreated,
|
||||||
{'etag': self.etag,
|
{'etag': self.etag,
|
||||||
@ -370,6 +376,82 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
self.assertTrue('content-encoding' in headers)
|
self.assertTrue('content-encoding' in headers)
|
||||||
self.assertEqual(headers['content-encoding'], 'gzip')
|
self.assertEqual(headers['content-encoding'], 'gzip')
|
||||||
|
|
||||||
|
@s3acl
|
||||||
|
def test_object_GET_version_id_not_implemented(self):
|
||||||
|
# GET version that is not null
|
||||||
|
req = Request.blank('/bucket/object?versionId=2',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
|
||||||
|
with patch('swift.common.middleware.s3api.controllers.obj.'
|
||||||
|
'get_swift_info', return_value={}):
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '501', body)
|
||||||
|
|
||||||
|
# GET current version
|
||||||
|
req = Request.blank('/bucket/object?versionId=null',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
with patch('swift.common.middleware.s3api.controllers.obj.'
|
||||||
|
'get_swift_info', return_value={}):
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200', body)
|
||||||
|
self.assertEqual(body, self.object_body)
|
||||||
|
|
||||||
|
@s3acl
|
||||||
|
def test_object_GET_version_id(self):
|
||||||
|
# GET current version
|
||||||
|
req = Request.blank('/bucket/object?versionId=null',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200', body)
|
||||||
|
self.assertEqual(body, self.object_body)
|
||||||
|
|
||||||
|
# GET current version that is not null
|
||||||
|
req = Request.blank('/bucket/object?versionId=2',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200', body)
|
||||||
|
self.assertEqual(body, self.object_body)
|
||||||
|
|
||||||
|
# GET version in archive
|
||||||
|
headers = self.response_headers.copy()
|
||||||
|
headers['Content-Length'] = 6
|
||||||
|
account = 'test:tester'
|
||||||
|
grants = [Grant(User(account), 'FULL_CONTROL')]
|
||||||
|
headers.update(
|
||||||
|
encode_acl('object', ACL(Owner(account, account), grants)))
|
||||||
|
self.swift.register(
|
||||||
|
'HEAD', '/v1/AUTH_test/bucket/object?version-id=1', swob.HTTPOk,
|
||||||
|
headers, None)
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/AUTH_test/bucket/object?version-id=1', swob.HTTPOk,
|
||||||
|
headers, 'hello1')
|
||||||
|
req = Request.blank('/bucket/object?versionId=1',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200', body)
|
||||||
|
self.assertEqual(body, b'hello1')
|
||||||
|
|
||||||
|
# Version not found
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/AUTH_test/bucket/object?version-id=A',
|
||||||
|
swob.HTTPNotFound, {}, None)
|
||||||
|
req = Request.blank('/bucket/object?versionId=A',
|
||||||
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '404')
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_PUT_error(self):
|
def test_object_PUT_error(self):
|
||||||
code = self._test_method_error('PUT', '/bucket/object',
|
code = self._test_method_error('PUT', '/bucket/object',
|
||||||
@ -393,9 +475,10 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
code = self._test_method_error('PUT', '/bucket/object',
|
code = self._test_method_error('PUT', '/bucket/object',
|
||||||
swob.HTTPLengthRequired)
|
swob.HTTPLengthRequired)
|
||||||
self.assertEqual(code, 'MissingContentLength')
|
self.assertEqual(code, 'MissingContentLength')
|
||||||
|
# Swift can 412 if the versions container is missing
|
||||||
code = self._test_method_error('PUT', '/bucket/object',
|
code = self._test_method_error('PUT', '/bucket/object',
|
||||||
swob.HTTPPreconditionFailed)
|
swob.HTTPPreconditionFailed)
|
||||||
self.assertEqual(code, 'InternalError')
|
self.assertEqual(code, 'PreconditionFailed')
|
||||||
code = self._test_method_error('PUT', '/bucket/object',
|
code = self._test_method_error('PUT', '/bucket/object',
|
||||||
swob.HTTPServiceUnavailable)
|
swob.HTTPServiceUnavailable)
|
||||||
self.assertEqual(code, 'ServiceUnavailable')
|
self.assertEqual(code, 'ServiceUnavailable')
|
||||||
@ -432,11 +515,6 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
swob.HTTPCreated,
|
swob.HTTPCreated,
|
||||||
{'X-Amz-Copy-Source': '/bucket/src_obj?bar=baz&versionId=foo'})
|
{'X-Amz-Copy-Source': '/bucket/src_obj?bar=baz&versionId=foo'})
|
||||||
self.assertEqual(code, 'InvalidArgument')
|
self.assertEqual(code, 'InvalidArgument')
|
||||||
code = self._test_method_error(
|
|
||||||
'PUT', '/bucket/object',
|
|
||||||
swob.HTTPCreated,
|
|
||||||
{'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo'})
|
|
||||||
self.assertEqual(code, 'NotImplemented')
|
|
||||||
code = self._test_method_error(
|
code = self._test_method_error(
|
||||||
'PUT', '/bucket/object',
|
'PUT', '/bucket/object',
|
||||||
swob.HTTPCreated,
|
swob.HTTPCreated,
|
||||||
@ -447,6 +525,35 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
swob.HTTPRequestTimeout)
|
swob.HTTPRequestTimeout)
|
||||||
self.assertEqual(code, 'RequestTimeout')
|
self.assertEqual(code, 'RequestTimeout')
|
||||||
|
|
||||||
|
def test_object_PUT_with_version(self):
|
||||||
|
self.swift.register('GET',
|
||||||
|
'/v1/AUTH_test/bucket/src_obj?version-id=foo',
|
||||||
|
swob.HTTPOk, self.response_headers,
|
||||||
|
self.object_body)
|
||||||
|
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPCreated, {
|
||||||
|
'etag': self.etag,
|
||||||
|
'last-modified': self.last_modified,
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
req = Request.blank('/bucket/object', method='PUT', body='', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header(),
|
||||||
|
'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo',
|
||||||
|
})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
|
||||||
|
self.assertEqual('200 OK', status)
|
||||||
|
elem = fromstring(body, 'CopyObjectResult')
|
||||||
|
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
|
||||||
|
|
||||||
|
self.assertEqual(self.swift.calls, [
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/src_obj?version-id=foo'),
|
||||||
|
('PUT', '/v1/AUTH_test/bucket/object?version-id=foo'),
|
||||||
|
])
|
||||||
|
_, _, headers = self.swift.calls_with_headers[-1]
|
||||||
|
self.assertEqual(headers['x-copy-from'], '/bucket/src_obj')
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_PUT(self):
|
def test_object_PUT(self):
|
||||||
etag = self.response_headers['etag']
|
etag = self.response_headers['etag']
|
||||||
@ -643,7 +750,7 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_PUT_copy(self):
|
def test_object_PUT_copy(self):
|
||||||
def do_test(src_path=None):
|
def do_test(src_path):
|
||||||
date_header = self.get_date_header()
|
date_header = self.get_date_header()
|
||||||
timestamp = mktime(date_header)
|
timestamp = mktime(date_header)
|
||||||
allowed_last_modified = [S3Timestamp(timestamp).s3xmlformat]
|
allowed_last_modified = [S3Timestamp(timestamp).s3xmlformat]
|
||||||
@ -990,6 +1097,257 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
_, path = self.swift.calls[-1]
|
_, path = self.swift.calls[-1]
|
||||||
self.assertEqual(path.count('?'), 0)
|
self.assertEqual(path.count('?'), 0)
|
||||||
|
|
||||||
|
def test_object_DELETE_old_version_id(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPOk, self.response_headers, None)
|
||||||
|
resp_headers = {'X-Object-Current-Version-Id': '1574360804.34906'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293')
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
def test_object_DELETE_current_version_id(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPOk, self.response_headers, None)
|
||||||
|
resp_headers = {'X-Object-Current-Version-Id': 'null'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
old_versions = [{
|
||||||
|
'name': 'object',
|
||||||
|
'version_id': '1574341899.21751',
|
||||||
|
'content_type': 'application/found',
|
||||||
|
}, {
|
||||||
|
'name': 'object',
|
||||||
|
'version_id': '1574333192.15190',
|
||||||
|
'content_type': 'application/older',
|
||||||
|
}]
|
||||||
|
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
|
||||||
|
json.dumps(old_versions))
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('GET', '/v1/AUTH_test/bucket'
|
||||||
|
'?prefix=object&versions=True'),
|
||||||
|
('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574341899.21751'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
def test_object_DELETE_version_id_not_implemented(self):
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
|
||||||
|
with patch('swift.common.middleware.s3api.controllers.obj.'
|
||||||
|
'get_swift_info', return_value={}):
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '501', body)
|
||||||
|
|
||||||
|
def test_object_DELETE_current_version_id_is_delete_marker(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPOk, self.response_headers, None)
|
||||||
|
resp_headers = {'X-Object-Current-Version-Id': 'null'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
old_versions = [{
|
||||||
|
'name': 'object',
|
||||||
|
'version_id': '1574341899.21751',
|
||||||
|
'content_type': 'application/x-deleted;swift_versions_deleted=1',
|
||||||
|
}]
|
||||||
|
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
|
||||||
|
json.dumps(old_versions))
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('GET', '/v1/AUTH_test/bucket'
|
||||||
|
'?prefix=object&versions=True'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
def test_object_DELETE_current_version_id_is_missing(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPOk, self.response_headers, None)
|
||||||
|
resp_headers = {'X-Object-Current-Version-Id': 'null'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
old_versions = [{
|
||||||
|
'name': 'object',
|
||||||
|
'version_id': '1574341899.21751',
|
||||||
|
'content_type': 'application/missing',
|
||||||
|
}, {
|
||||||
|
'name': 'object',
|
||||||
|
'version_id': '1574333192.15190',
|
||||||
|
'content_type': 'application/found',
|
||||||
|
}]
|
||||||
|
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
|
||||||
|
json.dumps(old_versions))
|
||||||
|
self.swift.register('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574341899.21751',
|
||||||
|
swob.HTTPPreconditionFailed, {}, None)
|
||||||
|
self.swift.register('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574333192.15190',
|
||||||
|
swob.HTTPCreated, {}, None)
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('GET', '/v1/AUTH_test/bucket'
|
||||||
|
'?prefix=object&versions=True'),
|
||||||
|
('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574341899.21751'),
|
||||||
|
('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574333192.15190'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
def test_object_DELETE_current_version_id_GET_error(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPOk, self.response_headers, None)
|
||||||
|
resp_headers = {'X-Object-Current-Version-Id': 'null'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
self.swift.register('GET', '/v1/AUTH_test/bucket',
|
||||||
|
swob.HTTPServerError, {}, '')
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '500')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('GET', '/v1/AUTH_test/bucket'
|
||||||
|
'?prefix=object&versions=True'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
def test_object_DELETE_current_version_id_PUT_error(self):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPOk, self.response_headers, None)
|
||||||
|
resp_headers = {'X-Object-Current-Version-Id': 'null'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
old_versions = [{
|
||||||
|
'name': 'object',
|
||||||
|
'version_id': '1574341899.21751',
|
||||||
|
'content_type': 'application/foo',
|
||||||
|
}]
|
||||||
|
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
|
||||||
|
json.dumps(old_versions))
|
||||||
|
self.swift.register('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574341899.21751',
|
||||||
|
swob.HTTPServerError, {}, None)
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574358170.12293',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '500')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574358170.12293'),
|
||||||
|
('GET', '/v1/AUTH_test/bucket'
|
||||||
|
'?prefix=object&versions=True'),
|
||||||
|
('PUT', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?version-id=1574341899.21751'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
def test_object_DELETE_in_versioned_container_without_version(self):
|
||||||
|
resp_headers = {
|
||||||
|
'X-Object-Version-Id': '1574360804.34906',
|
||||||
|
'X-Backend-Content-Type': DELETE_MARKER_CONTENT_TYPE}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
|
||||||
|
swob.HTTPNoContent, {
|
||||||
|
'X-Container-Sysmeta-Versions-Enabled': True},
|
||||||
|
None)
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPNotFound, self.response_headers, None)
|
||||||
|
req = Request.blank('/bucket/object', method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
|
||||||
|
('HEAD', '/v1/AUTH_test'),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
self.assertEqual('1574360804.34906', headers.get('x-amz-version-id'))
|
||||||
|
self.assertEqual('true', headers.get('x-amz-delete-marker'))
|
||||||
|
|
||||||
|
def test_object_DELETE_in_versioned_container_with_version_id(self):
|
||||||
|
resp_headers = {
|
||||||
|
'X-Object-Version-Id': '1574701081.61553'}
|
||||||
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPNoContent, resp_headers, None)
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
|
||||||
|
swob.HTTPNoContent, {
|
||||||
|
'X-Container-Sysmeta-Versions-Enabled': True},
|
||||||
|
None)
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
|
||||||
|
swob.HTTPNotFound, self.response_headers, None)
|
||||||
|
req = Request.blank('/bucket/object?versionId=1574701081.61553',
|
||||||
|
method='DELETE', headers={
|
||||||
|
'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
self.assertEqual([
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574701081.61553'),
|
||||||
|
('HEAD', '/v1/AUTH_test'),
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
|
'?symlink=get&version-id=1574701081.61553'),
|
||||||
|
], self.swift.calls)
|
||||||
|
|
||||||
|
self.assertEqual('1574701081.61553', headers.get('x-amz-version-id'))
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_object_DELETE_multipart(self):
|
def test_object_DELETE_multipart(self):
|
||||||
req = Request.blank('/bucket/object',
|
req = Request.blank('/bucket/object',
|
||||||
@ -999,7 +1357,7 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '204')
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
|
||||||
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
|
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
|
||||||
self.swift.calls)
|
self.swift.calls)
|
||||||
self.assertEqual(('DELETE', '/v1/AUTH_test/bucket/object'),
|
self.assertEqual(('DELETE', '/v1/AUTH_test/bucket/object'),
|
||||||
self.swift.calls[-1])
|
self.swift.calls[-1])
|
||||||
@ -1017,10 +1375,11 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '204')
|
self.assertEqual(status.split()[0], '204')
|
||||||
|
|
||||||
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
|
self.assertEqual(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
|
||||||
self.swift.calls)
|
self.swift.calls[0])
|
||||||
self.assertNotIn(('DELETE', '/v1/AUTH_test/bucket/object'),
|
# the s3acl retests w/ a get_container_info HEAD @ self.swift.calls[1]
|
||||||
self.swift.calls)
|
self.assertEqual(('DELETE', '/v1/AUTH_test/bucket/object'),
|
||||||
|
self.swift.calls[-1])
|
||||||
|
|
||||||
@s3acl
|
@s3acl
|
||||||
def test_slo_object_DELETE(self):
|
def test_slo_object_DELETE(self):
|
||||||
@ -1039,7 +1398,7 @@ class TestS3ApiObj(S3ApiTestCase):
|
|||||||
self.assertEqual(status.split()[0], '204')
|
self.assertEqual(status.split()[0], '204')
|
||||||
self.assertEqual(body, b'')
|
self.assertEqual(body, b'')
|
||||||
|
|
||||||
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
|
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
|
||||||
self.swift.calls)
|
self.swift.calls)
|
||||||
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
|
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
|
||||||
'?multipart-manifest=delete'),
|
'?multipart-manifest=delete'),
|
||||||
|
@ -15,42 +15,180 @@
|
|||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from swift.common.swob import Request
|
from mock import patch
|
||||||
|
|
||||||
|
from swift.common.swob import Request, HTTPNoContent
|
||||||
|
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||||
|
Element, SubElement
|
||||||
|
|
||||||
from test.unit.common.middleware.s3api import S3ApiTestCase
|
from test.unit.common.middleware.s3api import S3ApiTestCase
|
||||||
from swift.common.middleware.s3api.etree import fromstring
|
|
||||||
|
|
||||||
|
|
||||||
class TestS3ApiVersioning(S3ApiTestCase):
|
class TestS3ApiVersioning(S3ApiTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def _versioning_GET(self, path):
|
||||||
super(TestS3ApiVersioning, self).setUp()
|
req = Request.blank('%s?versioning' % path,
|
||||||
|
|
||||||
def test_object_versioning_GET(self):
|
|
||||||
req = Request.blank('/bucket/object?versioning',
|
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
|
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '200')
|
return status, headers, body
|
||||||
fromstring(body, 'VersioningConfiguration')
|
|
||||||
|
|
||||||
def test_object_versioning_PUT(self):
|
def _versioning_GET_not_configured(self, path):
|
||||||
req = Request.blank('/bucket/object?versioning',
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
|
||||||
|
HTTPNoContent, {}, None)
|
||||||
|
|
||||||
|
status, headers, body = self._versioning_GET(path)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'VersioningConfiguration')
|
||||||
|
self.assertEqual(elem.getchildren(), [])
|
||||||
|
|
||||||
|
def _versioning_GET_enabled(self, path):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket', HTTPNoContent, {
|
||||||
|
'X-Container-Sysmeta-Versions-Enabled': 'True',
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
status, headers, body = self._versioning_GET(path)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'VersioningConfiguration')
|
||||||
|
status = elem.find('./Status').text
|
||||||
|
self.assertEqual(status, 'Enabled')
|
||||||
|
|
||||||
|
def _versioning_GET_suspended(self, path):
|
||||||
|
self.swift.register('HEAD', '/v1/AUTH_test/bucket', HTTPNoContent, {
|
||||||
|
'X-Container-Sysmeta-Versions-Enabled': 'False',
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
status, headers, body = self._versioning_GET('/bucket/object')
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'VersioningConfiguration')
|
||||||
|
status = elem.find('./Status').text
|
||||||
|
self.assertEqual(status, 'Suspended')
|
||||||
|
|
||||||
|
def _versioning_PUT_error(self, path):
|
||||||
|
# Root tag is not VersioningConfiguration
|
||||||
|
elem = Element('foo')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
|
||||||
|
req = Request.blank('%s?versioning' % path,
|
||||||
environ={'REQUEST_METHOD': 'PUT'},
|
environ={'REQUEST_METHOD': 'PUT'},
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()},
|
||||||
|
body=xml)
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
self.assertEqual(status.split()[0], '400')
|
||||||
|
|
||||||
def test_bucket_versioning_GET(self):
|
# Status is not "Enabled" or "Suspended"
|
||||||
req = Request.blank('/bucket?versioning',
|
elem = Element('VersioningConfiguration')
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
SubElement(elem, 'Status').text = 'enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
|
||||||
|
req = Request.blank('%s?versioning' % path,
|
||||||
|
environ={'REQUEST_METHOD': 'PUT'},
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()},
|
||||||
|
body=xml)
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
fromstring(body, 'VersioningConfiguration')
|
self.assertEqual(status.split()[0], '400')
|
||||||
|
|
||||||
|
def _versioning_PUT_enabled(self, path):
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
|
||||||
|
self.swift.register('POST', '/v1/AUTH_test/bucket', HTTPNoContent,
|
||||||
|
{'X-Container-Sysmeta-Versions-Enabled': 'True'},
|
||||||
|
None)
|
||||||
|
|
||||||
|
req = Request.blank('%s?versioning' % path,
|
||||||
|
environ={'REQUEST_METHOD': 'PUT'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()},
|
||||||
|
body=xml)
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
|
||||||
|
calls = self.swift.calls_with_headers
|
||||||
|
self.assertEqual(calls[-1][0], 'POST')
|
||||||
|
self.assertIn(('X-Versions-Enabled', 'true'),
|
||||||
|
list(calls[-1][2].items()))
|
||||||
|
|
||||||
|
def _versioning_PUT_suspended(self, path):
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Suspended'
|
||||||
|
xml = tostring(elem)
|
||||||
|
|
||||||
|
self.swift.register('POST', '/v1/AUTH_test/bucket', HTTPNoContent,
|
||||||
|
{'x-container-sysmeta-versions-enabled': 'False'},
|
||||||
|
None)
|
||||||
|
|
||||||
|
req = Request.blank('%s?versioning' % path,
|
||||||
|
environ={'REQUEST_METHOD': 'PUT'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()},
|
||||||
|
body=xml)
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
|
||||||
|
calls = self.swift.calls_with_headers
|
||||||
|
self.assertEqual(calls[-1][0], 'POST')
|
||||||
|
self.assertIn(('X-Versions-Enabled', 'false'),
|
||||||
|
list(calls[-1][2].items()))
|
||||||
|
|
||||||
|
def test_object_versioning_GET_not_configured(self):
|
||||||
|
self._versioning_GET_not_configured('/bucket/object')
|
||||||
|
|
||||||
|
def test_object_versioning_GET_enabled(self):
|
||||||
|
self._versioning_GET_enabled('/bucket/object')
|
||||||
|
|
||||||
|
def test_object_versioning_GET_suspended(self):
|
||||||
|
self._versioning_GET_suspended('/bucket/object')
|
||||||
|
|
||||||
|
def test_object_versioning_PUT_error(self):
|
||||||
|
self._versioning_PUT_error('/bucket/object')
|
||||||
|
|
||||||
|
def test_object_versioning_PUT_enabled(self):
|
||||||
|
self._versioning_PUT_enabled('/bucket/object')
|
||||||
|
|
||||||
|
def test_object_versioning_PUT_suspended(self):
|
||||||
|
self._versioning_PUT_suspended('/bucket/object')
|
||||||
|
|
||||||
|
def test_bucket_versioning_GET_not_configured(self):
|
||||||
|
self._versioning_GET_not_configured('/bucket')
|
||||||
|
|
||||||
|
def test_bucket_versioning_GET_enabled(self):
|
||||||
|
self._versioning_GET_enabled('/bucket')
|
||||||
|
|
||||||
|
def test_bucket_versioning_GET_suspended(self):
|
||||||
|
self._versioning_GET_suspended('/bucket')
|
||||||
|
|
||||||
|
def test_bucket_versioning_PUT_error(self):
|
||||||
|
self._versioning_PUT_error('/bucket')
|
||||||
|
|
||||||
|
def test_object_versioning_PUT_not_implemented(self):
|
||||||
|
elem = Element('VersioningConfiguration')
|
||||||
|
SubElement(elem, 'Status').text = 'Enabled'
|
||||||
|
xml = tostring(elem)
|
||||||
|
|
||||||
|
req = Request.blank('/bucket?versioning',
|
||||||
|
environ={'REQUEST_METHOD': 'PUT'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()},
|
||||||
|
body=xml)
|
||||||
|
|
||||||
|
with patch('swift.common.middleware.s3api.controllers.versioning.'
|
||||||
|
'get_swift_info', return_value={}):
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(status.split()[0], '501', body)
|
||||||
|
|
||||||
|
def test_bucket_versioning_PUT_enabled(self):
|
||||||
|
self._versioning_PUT_enabled('/bucket')
|
||||||
|
|
||||||
|
def test_bucket_versioning_PUT_suspended(self):
|
||||||
|
self._versioning_PUT_suspended('/bucket')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
Loading…
Reference in New Issue
Block a user