From 2759d5d51c5e3c684081e066f9efeb495c141524 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Fri, 13 Sep 2019 12:25:24 -0500 Subject: [PATCH] New Object Versioning mode This patch adds a new object versioning mode. This new mode provides a new set of APIs for users to interact with older versions of an object. It also changes the naming scheme of older versions and adds a version-id to each object. This new mode is not backwards compatible or interchangeable with the other two modes (i.e., stack and history), especially due to the changes in the namimg scheme of older versions. This new mode will also serve as a foundation for adding S3 versioning compatibility in the s3api middleware. Note that this does not (yet) support using a versioned container as a source in container-sync. Container sync should be enhanced to sync previous versions of objects. Change-Id: Ic7d39ba425ca324eeb4543a2ce8d03428e2225a1 Co-Authored-By: Clay Gerrard Co-Authored-By: Tim Burke Co-Authored-By: Thiago da Silva --- doc/saio/swift/proxy-server.conf | 1 + doc/source/logs.rst | 1 + doc/source/middleware.rst | 13 +- etc/proxy-server.conf-sample | 2 + swift/common/middleware/bulk.py | 9 +- swift/common/middleware/container_sync.py | 8 + swift/common/middleware/copy.py | 12 + swift/common/middleware/listing_formats.py | 5 + swift/common/middleware/slo.py | 6 + swift/common/middleware/symlink.py | 18 +- .../middleware/versioned_writes/__init__.py | 51 + .../legacy.py} | 23 +- .../versioned_writes/object_versioning.py | 1467 ++++++++ swift/common/swob.py | 27 + swift/common/utils.py | 5 + swift/container/backend.py | 2 +- swift/container/sync.py | 19 + swift/obj/server.py | 9 +- swift/proxy/controllers/base.py | 14 +- swift/proxy/controllers/obj.py | 23 +- test/functional/__init__.py | 3 + test/functional/swift_test_client.py | 18 +- test/functional/test_object_versioning.py | 2669 ++++++++++++++ test/probe/common.py | 40 +- test/probe/test_container_sync.py | 150 + test/probe/test_object_versioning.py | 233 ++ test/probe/test_sharder.py | 290 +- .../middleware/test_object_versioning.py | 3214 +++++++++++++++++ test/unit/common/middleware/test_slo.py | 87 +- .../middleware/test_versioned_writes.py | 7 +- test/unit/common/test_utils.py | 26 + test/unit/common/test_wsgi.py | 7 +- test/unit/obj/test_server.py | 11 +- 33 files changed, 8307 insertions(+), 163 deletions(-) create mode 100644 swift/common/middleware/versioned_writes/__init__.py rename swift/common/middleware/{versioned_writes.py => versioned_writes/legacy.py} (98%) create mode 100644 swift/common/middleware/versioned_writes/object_versioning.py create mode 100644 test/functional/test_object_versioning.py create mode 100644 test/probe/test_object_versioning.py create mode 100644 test/unit/common/middleware/test_object_versioning.py diff --git a/doc/saio/swift/proxy-server.conf b/doc/saio/swift/proxy-server.conf index fb1b4fc80b..ec52958ade 100644 --- a/doc/saio/swift/proxy-server.conf +++ b/doc/saio/swift/proxy-server.conf @@ -68,6 +68,7 @@ use = egg:swift#gatekeeper [filter:versioned_writes] use = egg:swift#versioned_writes allow_versioned_writes = true +allow_object_versioning = true [filter:copy] use = egg:swift#copy diff --git a/doc/source/logs.rst b/doc/source/logs.rst index eeba1f8cc7..e2cd553dc4 100644 --- a/doc/source/logs.rst +++ b/doc/source/logs.rst @@ -140,6 +140,7 @@ SSC :ref:`copy` SYM :ref:`symlink` SH :ref:`sharding_doc` S3 :ref:`s3api` +OV :ref:`object_versioning` ======================= ============================= diff --git a/doc/source/middleware.rst b/doc/source/middleware.rst index 2c3fa72e23..ca48bda952 100644 --- a/doc/source/middleware.rst +++ b/doc/source/middleware.rst @@ -278,12 +278,12 @@ Name Check (Forbidden Character Filter) :members: :show-inheritance: -.. _versioned_writes: +.. _object_versioning: Object Versioning ================= -.. automodule:: swift.common.middleware.versioned_writes +.. automodule:: swift.common.middleware.versioned_writes.object_versioning :members: :show-inheritance: @@ -371,6 +371,15 @@ TempURL :members: :show-inheritance: +.. _versioned_writes: + +Versioned Writes +================= + +.. automodule:: swift.common.middleware.versioned_writes.legacy + :members: + :show-inheritance: + XProfile ============== diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index 94905d46ec..c1468047f9 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -1072,6 +1072,8 @@ use = egg:swift#versioned_writes # in the container configuration file, which will be eventually # deprecated. See documentation for more details. # allow_versioned_writes = false +# Enables Swift object-versioning API +# allow_object_versioning = false # Note: Put after auth and before dlo and slo middlewares. # If you don't put it in the pipeline, it will be inserted for you. diff --git a/swift/common/middleware/bulk.py b/swift/common/middleware/bulk.py index 0ca2535da6..86798d755f 100644 --- a/swift/common/middleware/bulk.py +++ b/swift/common/middleware/bulk.py @@ -457,7 +457,8 @@ class Bulk(object): failed_files.append([wsgi_quote(str_to_wsgi(obj_name)), HTTPPreconditionFailed().status]) continue - yield (obj_name, delete_path) + yield (obj_name, delete_path, + obj_to_delete.get('version_id')) def objs_then_containers(objs_to_delete): # process all objects first @@ -467,13 +468,17 @@ class Bulk(object): yield delete_filter(lambda name: '/' not in name.strip('/'), objs_to_delete) - def do_delete(obj_name, delete_path): + def do_delete(obj_name, delete_path, version_id): delete_obj_req = make_subrequest( req.environ, method='DELETE', path=wsgi_quote(str_to_wsgi(delete_path)), headers={'X-Auth-Token': req.headers.get('X-Auth-Token')}, body='', agent='%(orig)s ' + user_agent, swift_source=swift_source) + if version_id is None: + delete_obj_req.params = {} + else: + delete_obj_req.params = {'version-id': version_id} return (delete_obj_req.get_response(self.app), obj_name, 0) with StreamingPile(self.delete_concurrency) as pile: diff --git a/swift/common/middleware/container_sync.py b/swift/common/middleware/container_sync.py index bde33ca70c..74c63afa47 100644 --- a/swift/common/middleware/container_sync.py +++ b/swift/common/middleware/container_sync.py @@ -87,6 +87,14 @@ class ContainerSync(object): info = get_container_info( req.environ, self.app, swift_source='CS') sync_to = req.headers.get('x-container-sync-to') + if req.method in ('PUT', 'POST') and cont and not obj: + versions_cont = info.get( + 'sysmeta', {}).get('versions-container') + if sync_to and versions_cont: + raise HTTPBadRequest( + 'Cannot configure container sync on a container ' + 'with object versioning configured.', + request=req) if not self.allow_full_urls: if sync_to and not sync_to.startswith('//'): diff --git a/swift/common/middleware/copy.py b/swift/common/middleware/copy.py index f256133b53..598653c69a 100644 --- a/swift/common/middleware/copy.py +++ b/swift/common/middleware/copy.py @@ -319,6 +319,9 @@ class ServerSideCopyMiddleware(object): if 'last-modified' in source_resp.headers: resp_headers['X-Copied-From-Last-Modified'] = \ source_resp.headers['last-modified'] + if 'X-Object-Version-Id' in source_resp.headers: + resp_headers['X-Copied-From-Version-Id'] = \ + source_resp.headers['X-Object-Version-Id'] # Existing sys and user meta of source object is added to response # headers in addition to the new ones. _copy_headers(sink_req.headers, resp_headers) @@ -374,6 +377,8 @@ class ServerSideCopyMiddleware(object): sink_req.headers.update(req.headers) params = sink_req.params + params_updated = False + if params.get('multipart-manifest') == 'get': if 'X-Static-Large-Object' in source_resp.headers: params['multipart-manifest'] = 'put' @@ -381,6 +386,13 @@ class ServerSideCopyMiddleware(object): del params['multipart-manifest'] sink_req.headers['X-Object-Manifest'] = \ source_resp.headers['X-Object-Manifest'] + params_updated = True + + if 'version-id' in params: + del params['version-id'] + params_updated = True + + if params_updated: sink_req.params = params # Set swift.source, data source, content length and etag diff --git a/swift/common/middleware/listing_formats.py b/swift/common/middleware/listing_formats.py index 8c07965af6..926bdbf2c0 100644 --- a/swift/common/middleware/listing_formats.py +++ b/swift/common/middleware/listing_formats.py @@ -170,7 +170,12 @@ class ListingFilter(object): params['format'] = 'json' req.params = params + # Give other middlewares a chance to be in charge + env.setdefault('swift.format_listing', True) status, headers, resp_iter = req.call_application(self.app) + if not env.get('swift.format_listing'): + start_response(status, headers) + return resp_iter header_to_index = {} resp_content_type = resp_length = None diff --git a/swift/common/middleware/slo.py b/swift/common/middleware/slo.py index bbe2cdca0b..24d3da1d5a 100644 --- a/swift/common/middleware/slo.py +++ b/swift/common/middleware/slo.py @@ -1417,6 +1417,9 @@ class StaticLargeObject(object): segments = [{ 'sub_slo': True, 'name': obj_path}] + if 'version-id' in req.params: + segments[0]['version_id'] = req.params['version-id'] + while segments: # We chose not to set the limit at max_manifest_segments # in the case this value was decreased by operators. @@ -1469,6 +1472,9 @@ class StaticLargeObject(object): new_env['REQUEST_METHOD'] = 'GET' del(new_env['wsgi.input']) new_env['QUERY_STRING'] = 'multipart-manifest=get' + if 'version-id' in req.params: + new_env['QUERY_STRING'] += \ + '&version-id=' + req.params['version-id'] new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s MultipartDELETE' % new_env.get('HTTP_USER_AGENT') diff --git a/swift/common/middleware/symlink.py b/swift/common/middleware/symlink.py index 1854e712ad..4add4acda7 100644 --- a/swift/common/middleware/symlink.py +++ b/swift/common/middleware/symlink.py @@ -202,7 +202,8 @@ import os from cgi import parse_header from swift.common.utils import get_logger, register_swift_info, split_path, \ - MD5_OF_EMPTY_STRING, close_if_possible, closing_if_possible + MD5_OF_EMPTY_STRING, close_if_possible, closing_if_possible, \ + config_true_value from swift.common.constraints import check_account_format from swift.common.wsgi import WSGIContext, make_subrequest from swift.common.request_helpers import get_sys_meta_prefix, \ @@ -228,6 +229,8 @@ TGT_ETAG_SYSMETA_SYMLINK_HDR = \ get_sys_meta_prefix('object') + 'symlink-target-etag' TGT_BYTES_SYSMETA_SYMLINK_HDR = \ get_sys_meta_prefix('object') + 'symlink-target-bytes' +SYMLOOP_EXTEND = get_sys_meta_prefix('object') + 'symloop-extend' +ALLOW_RESERVED_NAMES = get_sys_meta_prefix('object') + 'allow-reserved-names' def _validate_and_prep_request_headers(req): @@ -477,7 +480,13 @@ class SymlinkObjectContext(WSGIContext): raise LinkIterError() # format: /// new_req = build_traversal_req(symlink_target) - self._loop_count += 1 + if not config_true_value( + self._response_header_value(SYMLOOP_EXTEND)): + self._loop_count += 1 + if config_true_value( + self._response_header_value(ALLOW_RESERVED_NAMES)): + new_req.headers['X-Backend-Allow-Reserved-Names'] = 'true' + return self._recursive_get_head(new_req, target_etag=resp_etag) else: final_etag = self._response_header_value('etag') @@ -516,6 +525,8 @@ class SymlinkObjectContext(WSGIContext): new_req = make_subrequest( req.environ, path=wsgi_quote(symlink_target_path), method='HEAD', swift_source='SYM') + if req.allow_reserved_names: + new_req.headers['X-Backend-Allow-Reserved-Names'] = 'true' self._last_target_path = symlink_target_path resp = self._recursive_get_head(new_req, target_etag=etag, follow_softlinks=False) @@ -659,6 +670,9 @@ class SymlinkObjectContext(WSGIContext): req.environ['swift.leave_relative_location'] = True errmsg = 'The requested POST was applied to a symlink. POST ' +\ 'directly to the target to apply requested metadata.' + for key, value in self._response_headers: + if key.lower().startswith('x-object-sysmeta-'): + headers[key] = value raise HTTPTemporaryRedirect( body=errmsg, headers=headers) else: diff --git a/swift/common/middleware/versioned_writes/__init__.py b/swift/common/middleware/versioned_writes/__init__.py new file mode 100644 index 0000000000..1bc41c3e20 --- /dev/null +++ b/swift/common/middleware/versioned_writes/__init__.py @@ -0,0 +1,51 @@ +# Copyright (c) 2019 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Implements middleware for object versioning which comprises an instance of a +:class:`~swift.common.middleware.versioned_writes.legacy. +VersionedWritesMiddleware` combined with an instance of an +:class:`~swift.common.middleware.versioned_writes.object_versioning. +ObjectVersioningMiddleware`. +""" +from swift.common.middleware.versioned_writes. \ + legacy import CLIENT_VERSIONS_LOC, CLIENT_HISTORY_LOC, \ + VersionedWritesMiddleware +from swift.common.middleware.versioned_writes. \ + object_versioning import ObjectVersioningMiddleware + +from swift.common.utils import config_true_value, register_swift_info, \ + get_swift_info + + +def filter_factory(global_conf, **local_conf): + """Provides a factory function for loading versioning middleware.""" + conf = global_conf.copy() + conf.update(local_conf) + if config_true_value(conf.get('allow_versioned_writes')): + register_swift_info('versioned_writes', allowed_flags=( + CLIENT_VERSIONS_LOC, CLIENT_HISTORY_LOC)) + + allow_object_versioning = config_true_value(conf.get( + 'allow_object_versioning')) + if allow_object_versioning: + register_swift_info('object_versioning') + + def versioning_filter(app): + if allow_object_versioning: + if 'symlink' not in get_swift_info(): + raise ValueError('object versioning requires symlinks') + app = ObjectVersioningMiddleware(app, conf) + return VersionedWritesMiddleware(app, conf) + return versioning_filter diff --git a/swift/common/middleware/versioned_writes.py b/swift/common/middleware/versioned_writes/legacy.py similarity index 98% rename from swift/common/middleware/versioned_writes.py rename to swift/common/middleware/versioned_writes/legacy.py index 5c9bce20b1..59fe6a73f6 100644 --- a/swift/common/middleware/versioned_writes.py +++ b/swift/common/middleware/versioned_writes/legacy.py @@ -14,6 +14,11 @@ # limitations under the License. """ +.. note:: + This middleware supports two legacy modes of object versioning that is + now replaced by a new mode. It is recommended to use the new + :ref:`Object Versioning ` mode for new containers. + Object versioning in swift is implemented by setting a flag on the container to tell swift to version all objects in the container. The value of the flag is the URL-encoded container name where the versions are stored (commonly referred @@ -225,7 +230,7 @@ import json import time from swift.common.utils import get_logger, Timestamp, \ - register_swift_info, config_true_value, close_if_possible, FileLikeIter + config_true_value, close_if_possible, FileLikeIter from swift.common.request_helpers import get_sys_meta_prefix, \ copy_header_subset from swift.common.wsgi import WSGIContext, make_pre_authed_request @@ -457,6 +462,7 @@ class VersionedWritesContext(WSGIContext): put_path_info = "/%s/%s/%s/%s" % ( api_version, account_name, versions_cont, vers_obj_name) + req.environ['QUERY_STRING'] = '' put_resp = self._put_versioned_obj(req, put_path_info, get_resp) self._check_response_error(req, put_resp) @@ -601,6 +607,7 @@ class VersionedWritesContext(WSGIContext): break obj_to_restore = bytes_to_wsgi( version_to_restore['name'].encode('utf-8')) + req.environ['QUERY_STRING'] = '' restored_path = self._restore_data( req, versions_cont, api_version, account_name, container_name, object_name, obj_to_restore) @@ -632,6 +639,7 @@ class VersionedWritesContext(WSGIContext): # current object and delete the previous version prev_obj_name = bytes_to_wsgi( previous_version['name'].encode('utf-8')) + req.environ['QUERY_STRING'] = '' restored_path = self._restore_data( req, versions_cont, api_version, account_name, container_name, object_name, prev_obj_name) @@ -856,16 +864,3 @@ class VersionedWritesMiddleware(object): return error_response(env, start_response) else: return self.app(env, start_response) - - -def filter_factory(global_conf, **local_conf): - conf = global_conf.copy() - conf.update(local_conf) - if config_true_value(conf.get('allow_versioned_writes')): - register_swift_info('versioned_writes', allowed_flags=( - CLIENT_VERSIONS_LOC, CLIENT_HISTORY_LOC)) - - def obj_versions_filter(app): - return VersionedWritesMiddleware(app, conf) - - return obj_versions_filter diff --git a/swift/common/middleware/versioned_writes/object_versioning.py b/swift/common/middleware/versioned_writes/object_versioning.py new file mode 100644 index 0000000000..c2c0bfff1f --- /dev/null +++ b/swift/common/middleware/versioned_writes/object_versioning.py @@ -0,0 +1,1467 @@ +# Copyright (c) 2020 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Object versioning in Swift has 3 different modes. There are two +:ref:`legacy modes ` that have similar API with a slight +difference in behavior and this middleware introduces a new mode with a +completely redesigned API and implementation. + +In terms of the implementation, this middleware relies heavily on the use of +static links to reduce the amount of backend data movement that was part of the +two legacy modes. It also introduces a new API for enabling the feature and to +interact with older versions of an object. + +Compatibility between modes +=========================== + +This new mode is not backwards compatible or interchangeable with the +two legacy modes. This means that existing containers that are being versioned +by the two legacy modes cannot enable the new mode. The new mode can only be +enabled on a new container or a container without either +``X-Versions-Location`` or ``X-History-Location`` header set. Attempting to +enable the new mode on a container with either header will result in a +``400 Bad Request`` response. + +Enable Object Versioning in a Container +======================================= + +After the introduction of this feature containers in a Swift cluster will be +in one of either 3 possible states: 1. Object versioning never enabled, +2. Object Versioning Enabled or 3. Object Versioning Disabled. Once versioning +has been enabled on a container, it will always have a flag stating whether it +is either enabled or disabled. + +Clients enable object versioning on a container by performing either a PUT or +POST request with the header ``X-Versions-Enabled: true``. Upon enabling the +versioning for the first time, the middleware will create a hidden container +where object versions are stored. This hidden container will inherit the same +Storage Policy as its parent container. + +To disable, clients send a POST request with the header +``X-Versions-Enabled: false``. When versioning is disabled, the old versions +remain unchanged. + +To delete a versioned container, versioning must be disabled and all versions +of all objects must be deleted before the container can be deleted. At such +time, the hidden container will also be deleted. + +Object CRUD Operations to a Versioned Container +=============================================== + +When data is ``PUT`` into a versioned container (a container with the +versioning flag enabled), the actual object is written to a hidden container +and a symlink object is written to the parent container. Every object is +assigned a version id. This id can be retrieved from the +``X-Object-Version-Id`` header in the PUT response. + +.. note:: + + When object versioning is disabled on a container, new data will no longer + be versioned, but older versions remain untouched. Any new data ``PUT`` + will result in a object with a ``null`` version-id. The versioning API can + be used to both list and operate on previous versions even while versioning + is disabled. + + If versioning is re-enabled and an overwrite occurs on a `null` id object. + The object will be versioned off with a regular version-id. + +A ``GET`` to a versioned object will return the current version of the object. +The ``X-Object-Version-Id`` header is also returned in the response. + +A ``POST`` to a versioned object will update the most current object metadata +as normal, but will not create a new version of the object. In other words, +new versions are only created when the content of the object changes. + +On ``DELETE``, the middleware will write a zero-byte "delete marker" object +version that notes **when** the delete took place. The symlink object will also +be deleted from the versioned container. The object will no longer appear in +container listings for the versioned container and future requests there will +return ``404 Not Found``. However, the previous versions content will still be +recoverable. + +Object Versioning API +===================== + +Clients can now operate on previous versions of an object using this new +versioning API. + +First to list previous versions, issue a a ``GET`` request to the versioned +container with query parameter:: + + ?versions + +To list a container with a large number of object versions, clients can +also use the ``version_marker`` parameter together with the ``marker`` +parameter. While the ``marker`` parameter is used to specify an object name +the ``version_marker`` will be used specify the version id. + +All other pagination parameters can be used in conjunction with the +``versions`` parameter. + +During container listings, delete markers can be identified with the +content-type ``application/x-deleted;swift_versions_deleted=1``. The most +current version of an object can be identified by the field ``is_latest``. + +To operate on previous versions, clients can use the query parameter:: + + ?version-id= + +where the ```` is the value from the ``X-Object-Version-Id`` header. + +Only COPY, HEAD, GET and DELETE operations can be performed on previous +versions. Either a PUT or POST request with a ``version-id`` parameter will +result in a ``400 Bad Request`` response. + +A HEAD/GET request to a delete-marker will result in a ``404 Not Found`` +response. + +When issuing DELETE requests with a ``version-id`` parameter, delete markers +are not written down. A DELETE request with a ``version-id`` parameter to +the current object will result in a both the symlink and the backing data +being deleted. A DELETE to any other version will result in that version only +be deleted and no changes made to the symlink pointing to the current version. + +How to Enable Object Versioning in a Swift Cluster +================================================== + +To enable this new mode in a Swift cluster the ``versioned_writes`` and +``symlink`` middlewares must be added to the proxy pipeline, you must also set +the option ``allow_object_versioning`` to ``True``. +""" + +import calendar +import itertools +import json +import six +import time + +from cgi import parse_header +from six.moves.urllib.parse import unquote + +from swift.common.constraints import MAX_FILE_SIZE, valid_api_version +from swift.common.http import is_success, is_client_error, HTTP_NOT_FOUND, \ + HTTP_CONFLICT +from swift.common.request_helpers import get_sys_meta_prefix, \ + copy_header_subset, get_reserved_name, split_reserved_name +from swift.common.middleware.symlink import TGT_OBJ_SYMLINK_HDR, \ + TGT_ETAG_SYSMETA_SYMLINK_HDR, SYMLOOP_EXTEND, ALLOW_RESERVED_NAMES, \ + TGT_BYTES_SYSMETA_SYMLINK_HDR, TGT_ACCT_SYMLINK_HDR +from swift.common.swob import HTTPPreconditionFailed, HTTPServiceUnavailable, \ + HTTPBadRequest, str_to_wsgi, bytes_to_wsgi, wsgi_quote, \ + wsgi_to_str, wsgi_unquote, Request, HTTPNotFound, HTTPException, \ + HTTPRequestEntityTooLarge, HTTPInternalServerError, HTTPNotAcceptable, \ + HTTPConflict +from swift.common.storage_policy import POLICIES +from swift.common.utils import get_logger, Timestamp, \ + config_true_value, close_if_possible, closing_if_possible, \ + FileLikeIter, split_path, parse_content_type, RESERVED_STR +from swift.common.wsgi import WSGIContext, make_pre_authed_request +from swift.proxy.controllers.base import get_container_info + + +DELETE_MARKER_CONTENT_TYPE = 'application/x-deleted;swift_versions_deleted=1' +CLIENT_VERSIONS_ENABLED = 'x-versions-enabled' +SYSMETA_VERSIONS_ENABLED = \ + get_sys_meta_prefix('container') + 'versions-enabled' +SYSMETA_VERSIONS_CONT = get_sys_meta_prefix('container') + 'versions-container' +SYSMETA_PARENT_CONT = get_sys_meta_prefix('container') + 'parent-container' +SYSMETA_VERSIONS_SYMLINK = get_sys_meta_prefix('object') + 'versions-symlink' + + +def build_listing(*to_splice, **kwargs): + reverse = kwargs.pop('reverse') + if kwargs: + raise TypeError('Invalid keyword arguments received: %r' % kwargs) + + def merge_key(item): + if 'subdir' in item: + return item['subdir'] + return item['name'] + + return json.dumps(sorted( + itertools.chain(*to_splice), + key=merge_key, + reverse=reverse, + )).encode('ascii') + + +def non_expiry_header(header): + return header.lower() not in ('x-delete-at', 'x-delete-after') + + +class ByteCountingReader(object): + """ + Counts bytes read from file_like so we know how big the object is that + the client just PUT. + + This is particularly important when the client sends a chunk-encoded body, + so we don't have a Content-Length header available. + """ + def __init__(self, file_like): + self.file_like = file_like + self.bytes_read = 0 + + def read(self, amt=-1): + chunk = self.file_like.read(amt) + self.bytes_read += len(chunk) + return chunk + + +class ObjectVersioningContext(WSGIContext): + def __init__(self, wsgi_app, logger): + super(ObjectVersioningContext, self).__init__(wsgi_app) + self.logger = logger + + def _build_versions_object_prefix(self, object_name): + return get_reserved_name(object_name, '') + + def _build_versions_container_name(self, container_name): + return get_reserved_name('versions', container_name) + + def _build_versions_object_name(self, object_name, ts): + inv = ~Timestamp(ts) + return get_reserved_name(object_name, inv.internal) + + def _split_version_from_name(self, versioned_name): + try: + name, inv = split_reserved_name(versioned_name) + ts = ~Timestamp(inv) + except ValueError: + return versioned_name, None + return name, ts + + def _split_versions_container_name(self, versions_container): + try: + versions, container_name = split_reserved_name(versions_container) + except ValueError: + return versions_container + + if versions != 'versions': + return versions_container + + return container_name + + +class ObjectContext(ObjectVersioningContext): + + def _get_source_object(self, req, path_info): + # make a pre_auth request in case the user has write access + # to container, but not READ. This was allowed in previous version + # (i.e., before middleware) so keeping the same behavior here + get_req = make_pre_authed_request( + req.environ, path=wsgi_quote(path_info) + '?symlink=get', + headers={'X-Newest': 'True'}, method='GET', swift_source='OV') + source_resp = get_req.get_response(self.app) + + if source_resp.content_length is None or \ + source_resp.content_length > MAX_FILE_SIZE: + close_if_possible(source_resp.app_iter) + return HTTPRequestEntityTooLarge(request=req) + + return source_resp + + def _put_versioned_obj(self, req, put_path_info, source_resp): + # Create a new Request object to PUT to the versions container, copying + # all headers from the source object apart from x-timestamp. + put_req = make_pre_authed_request( + req.environ, path=wsgi_quote(put_path_info), method='PUT', + headers={'X-Backend-Allow-Reserved-Names': 'true'}, + swift_source='OV') + copy_header_subset(source_resp, put_req, + lambda k: k.lower() != 'x-timestamp') + put_req.environ['wsgi.input'] = FileLikeIter(source_resp.app_iter) + slo_size = put_req.headers.get('X-Object-Sysmeta-Slo-Size') + if slo_size: + put_req.headers['Content-Type'] += '; swift_bytes=%s' % slo_size + put_req.environ['swift.content_type_overridden'] = True + put_resp = put_req.get_response(self.app) + close_if_possible(source_resp.app_iter) + return put_resp + + def _put_versioned_obj_from_client(self, req, versions_cont, api_version, + account_name, object_name): + vers_obj_name = self._build_versions_object_name( + object_name, req.timestamp.internal) + put_path_info = "/%s/%s/%s/%s" % ( + api_version, account_name, versions_cont, vers_obj_name) + # Consciously *do not* set swift_source here -- this req is in charge + # of reading bytes from the client, don't let it look like that data + # movement is due to some internal-to-swift thing + put_req = make_pre_authed_request( + req.environ, path=wsgi_quote(put_path_info), method='PUT', + headers={'X-Backend-Allow-Reserved-Names': 'true'}, + swift_source='OV') + # move the client request body over + # note that the WSGI environ may be *further* manipulated; hold on to + # a reference to the byte counter so we can get the bytes_read + if req.message_length() is None: + put_req.headers['transfer-encoding'] = \ + req.headers.get('transfer-encoding') + else: + put_req.content_length = req.content_length + byte_counter = ByteCountingReader(req.environ['wsgi.input']) + put_req.environ['wsgi.input'] = byte_counter + req.body = b'' + # move metadata over, including sysmeta + + copy_header_subset(req, put_req, non_expiry_header) + if 'swift.content_type_overridden' in req.environ: + put_req.environ['swift.content_type_overridden'] = \ + req.environ.pop('swift.content_type_overridden') + + # do the write + put_resp = put_req.get_response(self.app) + + if put_resp.status_int == HTTP_NOT_FOUND: + close_if_possible(put_resp.app_iter) + raise HTTPInternalServerError( + request=req, content_type='text/plain', + body=b'The versions container does not exist. You may ' + b'want to re-enable object versioning.') + + self._check_response_error(req, put_resp) + with closing_if_possible(put_resp.app_iter), closing_if_possible( + put_req.environ['wsgi.input']): + for chunk in put_resp.app_iter: + pass + put_bytes = byte_counter.bytes_read + # N.B. this is essentially the same hack that symlink does in + # _validate_etag_and_update_sysmeta to deal with SLO + slo_size = put_req.headers.get('X-Object-Sysmeta-Slo-Size') + if slo_size: + put_bytes = slo_size + put_content_type = parse_content_type( + put_req.headers['Content-Type'])[0] + + return (put_resp, vers_obj_name, put_bytes, put_content_type) + + def _put_symlink_to_version(self, req, versions_cont, put_vers_obj_name, + api_version, account_name, object_name, + put_etag, put_bytes, put_content_type): + + req.method = 'PUT' + # inch x-timestamp forward, just in case + req.ensure_x_timestamp() + req.headers['X-Timestamp'] = Timestamp( + req.timestamp, offset=1).internal + req.headers[TGT_ETAG_SYSMETA_SYMLINK_HDR] = put_etag + req.headers[TGT_BYTES_SYSMETA_SYMLINK_HDR] = put_bytes + # N.B. in stack mode DELETE we use content_type from listing + req.headers['Content-Type'] = put_content_type + req.headers[TGT_OBJ_SYMLINK_HDR] = wsgi_quote('%s/%s' % ( + versions_cont, put_vers_obj_name)) + req.headers[SYSMETA_VERSIONS_SYMLINK] = 'true' + req.headers[SYMLOOP_EXTEND] = 'true' + req.headers[ALLOW_RESERVED_NAMES] = 'true' + req.headers['X-Backend-Allow-Reserved-Names'] = 'true' + not_for_symlink_headers = ( + 'ETag', 'X-If-Delete-At', TGT_ACCT_SYMLINK_HDR, + 'X-Object-Manifest', 'X-Static-Large-Object', + 'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size', + ) + for header in not_for_symlink_headers: + req.headers.pop(header, None) + + # *do* set swift_source here; this PUT is an implementation detail + req.environ['swift.source'] = 'OV' + req.body = b'' + resp = req.get_response(self.app) + resp.headers['ETag'] = put_etag + resp.headers['X-Object-Version-Id'] = self._split_version_from_name( + put_vers_obj_name)[1].internal + return resp + + def _check_response_error(self, req, resp): + """ + Raise Error Response in case of error + """ + if is_success(resp.status_int): + return + close_if_possible(resp.app_iter) + if is_client_error(resp.status_int): + # missing container or bad permissions + if resp.status_int == 404: + raise HTTPPreconditionFailed(request=req) + raise HTTPException(body=resp.body, status=resp.status, + headers=resp.headers) + # could not version the data, bail + raise HTTPServiceUnavailable(request=req) + + def _copy_current(self, req, versions_cont, api_version, account_name, + object_name): + ''' + Check if the current version of the object is a versions-symlink + if not, it's because this object was added to the container when + versioning was not enabled. We'll need to copy it into the versions + containers now. + + :param req: original request. + :param versions_cont: container where previous versions of the object + are stored. + :param api_version: api version. + :param account_name: account name. + :param object_name: name of object of original request + ''' + # validate the write access to the versioned container before + # making any backend requests + if 'swift.authorize' in req.environ: + container_info = get_container_info( + req.environ, self.app, swift_source='OV') + req.acl = container_info.get('write_acl') + aresp = req.environ['swift.authorize'](req) + if aresp: + raise aresp + + get_resp = self._get_source_object(req, req.path_info) + + if get_resp.status_int == HTTP_NOT_FOUND: + # nothing to version, proceed with original request + for chunk in get_resp.app_iter: + # Should be short; just avoiding the 499 + pass + close_if_possible(get_resp.app_iter) + return get_resp + + # check for any other errors + self._check_response_error(req, get_resp) + + if get_resp.headers.get(SYSMETA_VERSIONS_SYMLINK) == 'true': + # existing object is a VW symlink; no action required + close_if_possible(get_resp.app_iter) + return get_resp + + # if there's an existing object, then copy it to + # X-Versions-Location + ts_source = get_resp.headers.get( + 'x-timestamp', + calendar.timegm(time.strptime( + get_resp.headers['last-modified'], + '%a, %d %b %Y %H:%M:%S GMT'))) + vers_obj_name = self._build_versions_object_name( + object_name, ts_source) + + put_path_info = "/%s/%s/%s/%s" % ( + api_version, account_name, versions_cont, vers_obj_name) + put_resp = self._put_versioned_obj(req, put_path_info, get_resp) + + if put_resp.status_int == HTTP_NOT_FOUND: + close_if_possible(put_resp.app_iter) + raise HTTPInternalServerError( + request=req, content_type='text/plain', + body=b'The versions container does not exist. You may ' + b'want to re-enable object versioning.') + + self._check_response_error(req, put_resp) + close_if_possible(put_resp.app_iter) + return put_resp + + def handle_put(self, req, versions_cont, api_version, + account_name, object_name, is_enabled): + """ + Check if the current version of the object is a versions-symlink + if not, it's because this object was added to the container when + versioning was not enabled. We'll need to copy it into the versions + containers now that versioning is enabled. + + Also, put the new data from the client into the versions container + and add a static symlink in the versioned container. + + :param req: original request. + :param versions_cont: container where previous versions of the object + are stored. + :param api_version: api version. + :param account_name: account name. + :param object_name: name of object of original request + """ + # handle object request for a disabled versioned container. + if not is_enabled: + return req.get_response(self.app) + + # attempt to copy current object to versions container + self._copy_current(req, versions_cont, api_version, account_name, + object_name) + + # write client's put directly to versioned container + req.ensure_x_timestamp() + put_resp, put_vers_obj_name, put_bytes, put_content_type = \ + self._put_versioned_obj_from_client(req, versions_cont, + api_version, account_name, + object_name) + + # and add an static symlink to original container + target_etag = put_resp.headers['Etag'] + return self._put_symlink_to_version(req, versions_cont, + put_vers_obj_name, api_version, + account_name, object_name, + target_etag, put_bytes, + put_content_type) + + def handle_delete(self, req, versions_cont, api_version, + account_name, container_name, + object_name, is_enabled): + """ + Handle DELETE requests. + + Copy current version of object to versions_container and write a + delete marker before proceeding with original request. + + :param req: original request. + :param versions_cont: container where previous versions of the object + are stored. + :param api_version: api version. + :param account_name: account name. + :param object_name: name of object of original request + """ + # handle object request for a disabled versioned container. + if not is_enabled: + return req.get_response(self.app) + + self._copy_current(req, versions_cont, api_version, + account_name, object_name) + + req.ensure_x_timestamp() + marker_name = self._build_versions_object_name( + object_name, req.timestamp.internal) + marker_path = "/%s/%s/%s/%s" % ( + api_version, account_name, versions_cont, marker_name) + marker_headers = { + # Definitive source of truth is Content-Type, and since we add + # a swift_* param, we know users haven't set it themselves. + # This is still open to users POSTing to update the content-type + # but they're just shooting themselves in the foot then. + 'content-type': DELETE_MARKER_CONTENT_TYPE, + 'content-length': '0', + 'x-auth-token': req.headers.get('x-auth-token'), + 'X-Backend-Allow-Reserved-Names': 'true', + } + marker_req = make_pre_authed_request( + req.environ, path=wsgi_quote(marker_path), + headers=marker_headers, method='PUT', swift_source='OV') + marker_req.environ['swift.content_type_overridden'] = True + marker_resp = marker_req.get_response(self.app) + self._check_response_error(req, marker_resp) + close_if_possible(marker_resp.app_iter) + + # successfully copied and created delete marker; safe to delete + resp = req.get_response(self.app) + if resp.is_success or resp.status_int == 404: + resp.headers['X-Object-Version-Id'] = \ + self._split_version_from_name(marker_name)[1].internal + resp.headers['X-Backend-Content-Type'] = DELETE_MARKER_CONTENT_TYPE + close_if_possible(resp.app_iter) + return resp + + def handle_post(self, req, versions_cont, account): + ''' + Handle a POST request to an object in a versioned container. + + If the response is a 307 because the POST went to a symlink, + follow the symlink and send the request to the versioned object + + :param req: original request. + :param versions_cont: container where previous versions of the object + are stored. + :param account: account name. + ''' + # create eventual post request before + # encryption middleware changes the request headers + post_req = make_pre_authed_request( + req.environ, path=wsgi_quote(req.path_info), method='POST', + headers={'X-Backend-Allow-Reserved-Names': 'true'}, + swift_source='OV') + copy_header_subset(req, post_req, non_expiry_header) + + # send original request + resp = req.get_response(self.app) + + # if it's a versioning symlink, send post to versioned object + if resp.status_int == 307 and config_true_value( + resp.headers.get(SYSMETA_VERSIONS_SYMLINK, 'false')): + loc = wsgi_unquote(resp.headers['Location']) + + # Only follow if the version container matches + if split_path(loc, 4, 4, True)[1:3] == [ + account, versions_cont]: + close_if_possible(resp.app_iter) + post_req.path_info = loc + resp = post_req.get_response(self.app) + return resp + + def _check_head(self, req, auth_token_header): + obj_head_headers = { + 'X-Newest': 'True', + } + obj_head_headers.update(auth_token_header) + head_req = make_pre_authed_request( + req.environ, path=wsgi_quote(req.path_info) + '?symlink=get', + method='HEAD', headers=obj_head_headers, swift_source='OV') + hresp = head_req.get_response(self.app) + head_is_tombstone = False + symlink_target = None + if hresp.status_int == HTTP_NOT_FOUND: + head_is_tombstone = True + else: + head_is_tombstone = False + # if there's any other kind of error with a broken link... + # I guess give up? + self._check_response_error(req, hresp) + if hresp.headers.get(SYSMETA_VERSIONS_SYMLINK) == 'true': + symlink_target = hresp.headers.get(TGT_OBJ_SYMLINK_HDR) + close_if_possible(hresp.app_iter) + return head_is_tombstone, symlink_target + + def handle_delete_version(self, req, versions_cont, api_version, + account_name, container_name, + object_name, is_enabled, version): + if version == 'null': + # let the request go directly through to the is_latest link + return + auth_token_header = {'X-Auth-Token': req.headers.get('X-Auth-Token')} + head_is_tombstone, symlink_target = self._check_head( + req, auth_token_header) + + versions_obj = self._build_versions_object_name( + object_name, version) + req_obj_path = '%s/%s' % (versions_cont, versions_obj) + if head_is_tombstone or not symlink_target or ( + wsgi_unquote(symlink_target) != wsgi_unquote(req_obj_path)): + # If there's no current version (i.e., tombstone or unversioned + # object) or if current version links to another version, then + # just delete the version requested to be deleted + req.path_info = "/%s/%s/%s/%s" % ( + api_version, account_name, versions_cont, versions_obj) + req.headers['X-Backend-Allow-Reserved-Names'] = 'true' + if head_is_tombstone or not symlink_target: + resp_version_id = 'null' + else: + _, vers_obj_name = wsgi_unquote(symlink_target).split('/', 1) + resp_version_id = self._split_version_from_name( + vers_obj_name)[1].internal + else: + # if version-id is the latest version, delete the link too + # First, kill the link... + req.environ['QUERY_STRING'] = '' + link_resp = req.get_response(self.app) + self._check_response_error(req, link_resp) + close_if_possible(link_resp.app_iter) + + # *then* the backing data + req.path_info = "/%s/%s/%s/%s" % ( + api_version, account_name, versions_cont, versions_obj) + req.headers['X-Backend-Allow-Reserved-Names'] = 'true' + resp_version_id = 'null' + resp = req.get_response(self.app) + resp.headers['X-Object-Version-Id'] = version + resp.headers['X-Object-Current-Version-Id'] = resp_version_id + return resp + + def handle_put_version(self, req, versions_cont, api_version, account_name, + container, object_name, is_enabled, version): + """ + Handle a PUT?version-id request and create/update the is_latest link to + point to the specific version. Expects a valid 'version' id. + """ + if req.content_length is None: + has_body = (req.body_file.read(1) != b'') + else: + has_body = (req.content_length != 0) + if has_body: + raise HTTPBadRequest( + body='PUT version-id requests require a zero byte body', + request=req, + content_type='text/plain') + versions_obj_name = self._build_versions_object_name( + object_name, version) + versioned_obj_path = "/%s/%s/%s/%s" % ( + api_version, account_name, versions_cont, versions_obj_name) + obj_head_headers = {'X-Backend-Allow-Reserved-Names': 'true'} + head_req = make_pre_authed_request( + req.environ, path=wsgi_quote(versioned_obj_path) + '?symlink=get', + method='HEAD', headers=obj_head_headers, swift_source='OV') + head_resp = head_req.get_response(self.app) + if head_resp.status_int == HTTP_NOT_FOUND: + close_if_possible(head_resp.app_iter) + if is_success(get_container_info( + head_req.environ, self.app, swift_source='OV')['status']): + raise HTTPNotFound( + request=req, content_type='text/plain', + body=b'The specified version does not exist') + else: + raise HTTPInternalServerError( + request=req, content_type='text/plain', + body=b'The versions container does not exist. You may ' + b'want to re-enable object versioning.') + + self._check_response_error(req, head_resp) + close_if_possible(head_resp.app_iter) + + put_etag = head_resp.headers['ETag'] + put_bytes = head_resp.content_length + put_content_type = head_resp.headers['Content-Type'] + resp = self._put_symlink_to_version( + req, versions_cont, versions_obj_name, api_version, account_name, + object_name, put_etag, put_bytes, put_content_type) + return resp + + def handle_versioned_request(self, req, versions_cont, api_version, + account, container, obj, is_enabled, version): + """ + Handle 'version-id' request for object resource. When a request + contains a ``version-id=`` parameter, the request is acted upon + the actual version of that object. Version-aware operations + require that the container is versioned, but do not require that + the versioning is currently enabled. Users should be able to + operate on older versions of an object even if versioning is + currently suspended. + + PUT and POST requests are not allowed as that would overwrite + the contents of the versioned object. + + :param req: The original request + :param versions_cont: container holding versions of the requested obj + :param api_version: should be v1 unless swift bumps api version + :param account: account name string + :param container: container name string + :param object: object name string + :param is_enabled: is versioning currently enabled + :param version: version of the object to act on + """ + # ?version-id requests are allowed for GET, HEAD, DELETE reqs + if req.method == 'POST': + raise HTTPBadRequest( + '%s to a specific version is not allowed' % req.method, + request=req) + elif not versions_cont and version != 'null': + raise HTTPBadRequest( + 'version-aware operations require that the container is ' + 'versioned', request=req) + if version != 'null': + try: + Timestamp(version) + except ValueError: + raise HTTPBadRequest('Invalid version parameter', request=req) + + if req.method == 'DELETE': + return self.handle_delete_version( + req, versions_cont, api_version, account, + container, obj, is_enabled, version) + elif req.method == 'PUT': + return self.handle_put_version( + req, versions_cont, api_version, account, + container, obj, is_enabled, version) + if version == 'null': + resp = req.get_response(self.app) + if resp.is_success: + if get_reserved_name('versions', '') in wsgi_unquote( + resp.headers.get('Content-Location', '')): + # Have a latest version, but it's got a real version-id. + # Since the user specifically asked for null, return 404 + close_if_possible(resp.app_iter) + raise HTTPNotFound(request=req) + resp.headers['X-Object-Version-Id'] = 'null' + if req.method == 'HEAD': + close_if_possible(resp.app_iter) + return resp + else: + # Re-write the path; most everything else goes through normally + req.path_info = "/%s/%s/%s/%s" % ( + api_version, account, versions_cont, + self._build_versions_object_name(obj, version)) + req.headers['X-Backend-Allow-Reserved-Names'] = 'true' + + resp = req.get_response(self.app) + if resp.is_success: + resp.headers['X-Object-Version-Id'] = version + + # Well, except for some delete marker business... + is_del_marker = DELETE_MARKER_CONTENT_TYPE == resp.headers.get( + 'X-Backend-Content-Type', resp.headers['Content-Type']) + + if req.method == 'HEAD': + close_if_possible(resp.app_iter) + + if is_del_marker: + hdrs = {'X-Object-Version-Id': version, + 'Content-Type': DELETE_MARKER_CONTENT_TYPE} + raise HTTPNotFound(request=req, headers=hdrs) + return resp + + def handle_request(self, req, versions_cont, api_version, account, + container, obj, is_enabled): + if req.method == 'PUT': + return self.handle_put( + req, versions_cont, api_version, account, obj, + is_enabled) + elif req.method == 'POST': + return self.handle_post(req, versions_cont, account) + elif req.method == 'DELETE': + return self.handle_delete( + req, versions_cont, api_version, account, + container, obj, is_enabled) + + # GET/HEAD/OPTIONS + resp = req.get_response(self.app) + + resp.headers['X-Object-Version-Id'] = 'null' + # Check for a "real" version + loc = wsgi_unquote(resp.headers.get('Content-Location', '')) + if loc: + _, acct, cont, version_obj = split_path(loc, 4, 4, True) + if acct == account and cont == versions_cont: + _, version = self._split_version_from_name(version_obj) + if version is not None: + resp.headers['X-Object-Version-Id'] = version.internal + content_loc = wsgi_quote('/%s/%s/%s/%s' % ( + api_version, account, container, obj, + )) + '?version-id=%s' % (version.internal,) + resp.headers['Content-Location'] = content_loc + symlink_target = wsgi_unquote(resp.headers.get('X-Symlink-Target', '')) + if symlink_target: + cont, version_obj = split_path('/%s' % symlink_target, 2, 2, True) + if cont == versions_cont: + _, version = self._split_version_from_name(version_obj) + if version is not None: + resp.headers['X-Object-Version-Id'] = version.internal + symlink_target = wsgi_quote('%s/%s' % (container, obj)) + \ + '?version-id=%s' % (version.internal,) + resp.headers['X-Symlink-Target'] = symlink_target + return resp + + +class ContainerContext(ObjectVersioningContext): + def handle_request(self, req, start_response): + """ + Handle request for container resource. + + On PUT, POST set version location and enabled flag sysmeta. + For container listings of a versioned container, update the object's + bytes and etag to use the target's instead of using the symlink info. + """ + app_resp = self._app_call(req.environ) + _, account, container, _ = req.split_path(3, 4, True) + location = '' + curr_bytes = 0 + bytes_idx = -1 + for i, (header, value) in enumerate(self._response_headers): + if header == 'X-Container-Bytes-Used': + curr_bytes = value + bytes_idx = i + if header.lower() == SYSMETA_VERSIONS_CONT: + location = value + if header.lower() == SYSMETA_VERSIONS_ENABLED: + self._response_headers.extend([ + (CLIENT_VERSIONS_ENABLED.title(), value)]) + + if location: + location = wsgi_unquote(location) + + # update bytes header + if bytes_idx > -1: + head_req = make_pre_authed_request( + req.environ, method='HEAD', swift_source='OV', + path=wsgi_quote('/v1/%s/%s' % (account, location)), + headers={'X-Backend-Allow-Reserved-Names': 'true'}) + vresp = head_req.get_response(self.app) + if vresp.is_success: + ver_bytes = vresp.headers.get('X-Container-Bytes-Used', 0) + self._response_headers[bytes_idx] = ( + 'X-Container-Bytes-Used', + str(int(curr_bytes) + int(ver_bytes))) + close_if_possible(vresp.app_iter) + elif is_success(self._get_status_int()): + # If client is doing a version-aware listing for a container that + # (as best we could tell) has never had versioning enabled, + # err on the side of there being data anyway -- the metadata we + # found may not be the most up-to-date. + + # Note that any extra listing request we make will likely 404. + try: + location = self._build_versions_container_name(container) + except ValueError: + # may be internal listing to a reserved namespace container + pass + # else, we won't need location anyway + + if is_success(self._get_status_int()) and req.method == 'GET': + with closing_if_possible(app_resp): + body = b''.join(app_resp) + try: + listing = json.loads(body) + except ValueError: + app_resp = [body] + else: + for item in listing: + if not all(x in item for x in ( + 'symlink_path', + 'symlink_etag', + 'symlink_bytes')): + continue + path = wsgi_unquote(bytes_to_wsgi( + item['symlink_path'].encode('utf-8'))) + _, tgt_acct, tgt_container, tgt_obj = split_path( + path, 4, 4, True) + if tgt_container != location: + # if the archive container changed, leave the extra + # info unmodified + continue + _, meta = parse_header(item['hash']) + tgt_bytes = int(item.pop('symlink_bytes')) + item['bytes'] = tgt_bytes + item['version_symlink'] = True + item['hash'] = item.pop('symlink_etag') + ''.join( + '; %s=%s' % (k, v) for k, v in meta.items()) + tgt_obj, version = self._split_version_from_name(tgt_obj) + if version is not None and 'versions' not in req.params: + sp = wsgi_quote('/v1/%s/%s/%s' % ( + tgt_acct, container, tgt_obj, + )) + '?version-id=' + version.internal + item['symlink_path'] = sp + + if 'versions' in req.params: + return self._list_versions( + req, start_response, location, + listing) + + body = json.dumps(listing).encode('ascii') + self.update_content_length(len(body)) + app_resp = [body] + + start_response(self._response_status, + self._response_headers, + self._response_exc_info) + return app_resp + + def handle_delete(self, req, start_response): + """ + Handle request to delete a user's container. + + As part of deleting a container, this middleware will also delete + the hidden container holding object versions. + + Before a user's container can be deleted, swift must check + if there are still old object versions from that container. + Only after disabling versioning and deleting *all* object versions + can a container be deleted. + """ + container_info = get_container_info(req.environ, self.app, + swift_source='OV') + + versions_cont = unquote(container_info.get( + 'sysmeta', {}).get('versions-container', '')) + + if versions_cont: + account = req.split_path(3, 3, True)[1] + # using a HEAD request here as opposed to get_container_info + # to make sure we get an up-to-date value + versions_req = make_pre_authed_request( + req.environ, method='HEAD', swift_source='OV', + path=wsgi_quote('/v1/%s/%s' % ( + account, str_to_wsgi(versions_cont))), + headers={'X-Backend-Allow-Reserved-Names': 'true'}) + vresp = versions_req.get_response(self.app) + close_if_possible(vresp.app_iter) + if vresp.is_success and int(vresp.headers.get( + 'X-Container-Object-Count', 0)) > 0: + raise HTTPConflict( + 'Delete all versions before deleting container.', + request=req) + elif not vresp.is_success and vresp.status_int != 404: + raise HTTPInternalServerError( + 'Error deleting versioned container') + else: + versions_req.method = 'DELETE' + resp = versions_req.get_response(self.app) + close_if_possible(resp.app_iter) + if not is_success(resp.status_int) and resp.status_int != 404: + raise HTTPInternalServerError( + 'Error deleting versioned container') + + app_resp = self._app_call(req.environ) + + start_response(self._response_status, + self._response_headers, + self._response_exc_info) + return app_resp + + def enable_versioning(self, req, start_response): + container_info = get_container_info(req.environ, self.app, + swift_source='OV') + + # if container is already configured to use old style versioning, + # we don't allow user to enable object versioning here. They must + # choose which middleware to use, only one style of versioning + # is supported for a given container + versions_cont = container_info.get( + 'sysmeta', {}).get('versions-location') + legacy_versions_cont = container_info.get('versions') + if versions_cont or legacy_versions_cont: + raise HTTPBadRequest( + 'Cannot enable object versioning on a container ' + 'that is already using the legacy versioned writes ' + 'feature.', + request=req) + + # versioning and container-sync do not yet work well together + # container-sync needs to be enhanced to sync previous versions + sync_to = container_info.get('sync_to') + if sync_to: + raise HTTPBadRequest( + 'Cannot enable object versioning on a container ' + 'configured as source of container syncing.', + request=req) + + versions_cont = container_info.get( + 'sysmeta', {}).get('versions-container') + is_enabled = config_true_value( + req.headers[CLIENT_VERSIONS_ENABLED]) + + req.headers[SYSMETA_VERSIONS_ENABLED] = is_enabled + + # TODO: a POST request to a primary container that doesn't exist + # will fail, so we will create and delete the versions container + # for no reason + if config_true_value(is_enabled): + (version, account, container, _) = req.split_path(3, 4, True) + + # Attempt to use same policy as primary container, otherwise + # use default policy + if is_success(container_info['status']): + primary_policy_idx = container_info['storage_policy'] + if POLICIES[primary_policy_idx].is_deprecated: + # Do an auth check now, so we don't leak information + # about the container + aresp = req.environ['swift.authorize'](req) + if aresp: + raise aresp + + # Proxy controller would catch the deprecated policy, too, + # but waiting until then would mean the error message + # would be a generic "Error enabling object versioning". + raise HTTPBadRequest( + 'Cannot enable object versioning on a container ' + 'that uses a deprecated storage policy.', + request=req) + hdrs = {'X-Storage-Policy': POLICIES[primary_policy_idx].name} + else: + if req.method == 'PUT' and \ + 'X-Storage-Policy' in req.headers: + hdrs = {'X-Storage-Policy': + req.headers['X-Storage-Policy']} + else: + hdrs = {} + hdrs['X-Backend-Allow-Reserved-Names'] = 'true' + + versions_cont = self._build_versions_container_name(container) + versions_cont_path = "/%s/%s/%s" % ( + version, account, versions_cont) + ver_cont_req = make_pre_authed_request( + req.environ, path=wsgi_quote(versions_cont_path), + method='PUT', headers=hdrs, swift_source='OV') + resp = ver_cont_req.get_response(self.app) + # Should always be short; consume the body + for chunk in resp.app_iter: + pass + close_if_possible(resp.app_iter) + if is_success(resp.status_int) or resp.status_int == HTTP_CONFLICT: + req.headers[SYSMETA_VERSIONS_CONT] = wsgi_quote(versions_cont) + else: + raise HTTPInternalServerError( + 'Error enabling object versioning') + + # make original request + app_resp = self._app_call(req.environ) + + # if we just created a versions container but the original + # request failed, delete the versions container + # and let user retry later + if not is_success(self._get_status_int()) and \ + SYSMETA_VERSIONS_CONT in req.headers: + versions_cont_path = "/%s/%s/%s" % ( + version, account, versions_cont) + ver_cont_req = make_pre_authed_request( + req.environ, path=wsgi_quote(versions_cont_path), + method='DELETE', headers=hdrs, swift_source='OV') + + # TODO: what if this one fails?? + resp = ver_cont_req.get_response(self.app) + close_if_possible(resp.app_iter) + + if self._response_headers is None: + self._response_headers = [] + for key, val in self._response_headers: + if key.lower() == SYSMETA_VERSIONS_ENABLED: + self._response_headers.extend([ + (CLIENT_VERSIONS_ENABLED.title(), val)]) + + start_response(self._response_status, + self._response_headers, + self._response_exc_info) + return app_resp + + def _list_versions(self, req, start_response, location, primary_listing): + # Only supports JSON listings + req.environ['swift.format_listing'] = False + if not req.accept.best_match(['application/json']): + raise HTTPNotAcceptable(request=req) + + params = req.params + if 'version_marker' in params: + if 'marker' not in params: + raise HTTPBadRequest('version_marker param requires marker') + + if params['version_marker'] != 'null': + try: + ts = Timestamp(params.pop('version_marker')) + except ValueError: + raise HTTPBadRequest('invalid version_marker param') + + params['marker'] = self._build_versions_object_name( + params['marker'], ts) + elif 'marker' in params: + params['marker'] = self._build_versions_object_prefix( + params['marker']) + ':' # just past all numbers + + delim = params.get('delimiter', '') + # Exclude the set of chars used in version_id from user delimiters + if set(delim).intersection('0123456789.%s' % RESERVED_STR): + raise HTTPBadRequest('invalid delimiter param') + + null_listing = [] + subdir_set = set() + current_versions = {} + is_latest_set = set() + for item in primary_listing: + if 'name' not in item: + subdir_set.add(item['subdir']) + else: + if item.get('version_symlink'): + path = wsgi_to_str(wsgi_unquote(bytes_to_wsgi( + item['symlink_path'].encode('utf-8')))) + current_versions[path] = item + else: + null_listing.append(dict( + item, version_id='null', is_latest=True)) + is_latest_set.add(item['name']) + + account = req.split_path(3, 3, True)[1] + versions_req = make_pre_authed_request( + req.environ, method='GET', swift_source='OV', + path=wsgi_quote('/v1/%s/%s' % (account, location)), + headers={'X-Backend-Allow-Reserved-Names': 'true'}, + ) + # NB: Not using self._build_versions_object_name here because + # we don't want to bookend the prefix with RESERVED_NAME as user + # could be using just part of object name as the prefix. + if 'prefix' in params: + params['prefix'] = get_reserved_name(params['prefix']) + + # NB: no end_marker support (yet) + versions_req.params = { + k: params.get(k, '') + for k in ('prefix', 'marker', 'limit', 'delimiter', 'reverse')} + versions_resp = versions_req.get_response(self.app) + + if versions_resp.status_int == HTTP_NOT_FOUND: + subdir_listing = [{'subdir': s} for s in subdir_set] + broken_listing = [] + for item in current_versions.values(): + linked_name = wsgi_to_str(wsgi_unquote(bytes_to_wsgi( + item['symlink_path'].encode('utf8')))).split('/', 4)[-1] + name, ts = self._split_version_from_name(linked_name) + if ts is None: + continue + name = name.decode('utf8') if six.PY2 else name + is_latest = False + if name not in is_latest_set: + is_latest_set.add(name) + is_latest = True + broken_listing.append({ + 'name': name, + 'is_latest': is_latest, + 'version_id': ts.internal, + 'content_type': item['content_type'], + 'bytes': item['bytes'], + 'hash': item['hash'], + 'last_modified': item['last_modified'], + }) + body = build_listing( + null_listing, subdir_listing, broken_listing, + reverse=config_true_value(params.get('reverse', 'no'))) + self.update_content_length(len(body)) + app_resp = [body] + close_if_possible(versions_resp.app_iter) + elif is_success(versions_resp.status_int): + try: + listing = json.loads(versions_resp.body) + except ValueError: + app_resp = [body] + else: + versions_listing = [] + for item in listing: + if 'name' not in item: + # remove reserved chars from subdir + subdir = split_reserved_name(item['subdir'])[0] + subdir_set.add(subdir) + else: + name, ts = self._split_version_from_name(item['name']) + if ts is None: + continue + path = '/v1/%s/%s/%s' % ( + wsgi_to_str(account), + wsgi_to_str(location), + item['name'].encode('utf8') + if six.PY2 else item['name']) + + if path in current_versions: + item['is_latest'] = True + is_latest_set.add(name) + del current_versions[path] + elif (item['content_type'] == + DELETE_MARKER_CONTENT_TYPE + and name not in is_latest_set): + item['is_latest'] = True + is_latest_set.add(name) + else: + item['is_latest'] = False + + item['name'] = name + item['version_id'] = ts.internal + versions_listing.append(item) + + subdir_listing = [{'subdir': s} for s in subdir_set] + broken_listing = [] + for item in current_versions.values(): + link_path = wsgi_to_str(wsgi_unquote(bytes_to_wsgi( + item['symlink_path'].encode('utf-8')))) + name, ts = self._split_version_from_name( + link_path.split('/', 1)[1]) + if ts is None: + continue + broken_listing.append({ + 'name': name.decode('utf8') if six.PY2 else name, + 'is_latest': True, + 'version_id': ts.internal, + 'content_type': item['content_type'], + 'bytes': item['bytes'], + 'hash': item['hash'], + 'last_modified': item['last_modified'], + }) + + body = build_listing( + null_listing, versions_listing, + subdir_listing, broken_listing, + reverse=config_true_value(params.get('reverse', 'no'))) + self.update_content_length(len(body)) + app_resp = [body] + else: + return versions_resp(versions_req.environ, start_response) + + start_response(self._response_status, + self._response_headers, + self._response_exc_info) + return app_resp + + +class AccountContext(ObjectVersioningContext): + def list_containers(self, req, api_version, account, start_response): + app_resp = self._app_call(req.environ) + + if is_success(self._get_status_int()): + with closing_if_possible(app_resp): + body = b''.join(app_resp) + try: + listing = json.loads(body) + except ValueError: + app_resp = [body] + else: + # list hidden versions containers + # It might be necessary to issue multiple listing requests + # because of paging limitations, hence the while loop. + params = req.params + versions_dict = {} + versions_req = make_pre_authed_request( + req.environ, method='GET', swift_source='OV', + path=wsgi_quote('/v1/%s' % account), + headers={'X-Backend-Allow-Reserved-Names': 'true'}, + ) + if 'prefix' in params: + try: + params['prefix'] = \ + self._build_versions_container_name( + params['prefix']) + except ValueError: + # don't touch params['prefix'], + # RESERVED_STR probably came from looping around + pass + else: + params['prefix'] = get_reserved_name('versions') + + for p in ('marker', 'end_marker'): + if p in params: + try: + params[p] = \ + self._build_versions_container_name( + params[p]) + except ValueError: + # don't touch params[p] + pass + + versions_req.params = params + versions_resp = versions_req.get_response(self.app) + try: + versions_listing = json.loads(versions_resp.body) + except ValueError: + close_if_possible(versions_resp.app_iter) + versions_listing = [] + else: + close_if_possible(versions_resp.app_iter) + + # create a dict from versions listing to facilitate + # look-up by name. Ignore 'subdir' items + for item in [item for item in versions_listing + if 'name' in item]: + name = self._split_versions_container_name( + item['name']) + container_name = bytes_to_wsgi(name.encode('utf-8')) + versions_dict[container_name] = item + + # update bytes from original listing with bytes from + # versions cont + if len(versions_dict) > 0: + # ignore 'subdir' items + for item in [item for item in listing if 'name' in item]: + container_name = bytes_to_wsgi( + item['name'].encode('utf-8')) + if container_name in versions_dict: + v_info = versions_dict.pop(container_name) + item['bytes'] = item['bytes'] + v_info['bytes'] + + # if there are items left in versions_dict, it indicates an + # error scenario where there are orphan hidden containers + # (possibly storing data) that should have been deleted + # along with the primary container. In this case, let's add + # those containers to listing so users can be aware and + # clean them up + for key, item in versions_dict.items(): + item['name'] = key + item['count'] = 0 # None of these are current + listing.append(item) + + body = build_listing( + listing, + reverse=config_true_value(params.get('reverse', 'no'))) + self.update_content_length(len(body)) + app_resp = [body] + + start_response(self._response_status, + self._response_headers, + self._response_exc_info) + return app_resp + + +class ObjectVersioningMiddleware(object): + + def __init__(self, app, conf): + self.app = app + self.conf = conf + self.logger = get_logger(conf, log_route='object_versioning') + + def account_request(self, req, api_version, account, start_response): + account_ctx = AccountContext(self.app, self.logger) + if req.method == 'GET': + return account_ctx.list_containers( + req, api_version, account, start_response) + else: + return self.app(req.environ, start_response) + + def container_request(self, req, start_response): + container_ctx = ContainerContext(self.app, self.logger) + if req.method in ('PUT', 'POST') and \ + CLIENT_VERSIONS_ENABLED in req.headers: + return container_ctx.enable_versioning(req, start_response) + elif req.method == 'DELETE': + return container_ctx.handle_delete(req, start_response) + + # send request and translate sysmeta headers from response + return container_ctx.handle_request(req, start_response) + + def object_request(self, req, api_version, account, container, obj): + """ + Handle request for object resource. + + Note that account, container, obj should be unquoted by caller + if the url path is under url encoding (e.g. %FF) + + :param req: swift.common.swob.Request instance + :param api_version: should be v1 unless swift bumps api version + :param account: account name string + :param container: container name string + :param object: object name string + """ + resp = None + container_info = get_container_info( + req.environ, self.app, swift_source='OV') + + versions_cont = container_info.get( + 'sysmeta', {}).get('versions-container', '') + is_enabled = config_true_value(container_info.get( + 'sysmeta', {}).get('versions-enabled')) + + if versions_cont: + versions_cont = wsgi_unquote(str_to_wsgi( + versions_cont)).split('/')[0] + + if req.params.get('version-id'): + vw_ctx = ObjectContext(self.app, self.logger) + resp = vw_ctx.handle_versioned_request( + req, versions_cont, api_version, account, container, obj, + is_enabled, req.params['version-id']) + elif versions_cont: + # handle object request for a enabled versioned container + vw_ctx = ObjectContext(self.app, self.logger) + resp = vw_ctx.handle_request( + req, versions_cont, api_version, account, container, obj, + is_enabled) + + if resp: + return resp + else: + return self.app + + def __call__(self, env, start_response): + req = Request(env) + try: + (api_version, account, container, obj) = req.split_path(2, 4, True) + bad_path = False + except ValueError: + bad_path = True + + # use of bad_path bool is to avoid recursive tracebacks + if bad_path or not valid_api_version(api_version): + return self.app(env, start_response) + + try: + if not container: + return self.account_request(req, api_version, account, + start_response) + if container and not obj: + return self.container_request(req, start_response) + else: + return self.object_request( + req, api_version, account, container, + obj)(env, start_response) + except HTTPException as error_response: + return error_response(env, start_response) diff --git a/swift/common/swob.py b/swift/common/swob.py index 9dd89fedb1..61b66793c4 100644 --- a/swift/common/swob.py +++ b/swift/common/swob.py @@ -1012,6 +1012,33 @@ class Request(object): self.query_string = urllib.parse.urlencode(param_pairs, encoding='latin-1') + def ensure_x_timestamp(self): + """ + Similar to :attr:`timestamp`, but the ``X-Timestamp`` header will be + set if not present. + + :raises HTTPBadRequest: if X-Timestamp is already set but not a valid + :class:`~swift.common.utils.Timestamp` + :returns: the request's X-Timestamp header, + as a :class:`~swift.common.utils.Timestamp` + """ + # The container sync feature includes an x-timestamp header with + # requests. If present this is checked and preserved, otherwise a fresh + # timestamp is added. + if 'HTTP_X_TIMESTAMP' in self.environ: + try: + self._timestamp = Timestamp(self.environ['HTTP_X_TIMESTAMP']) + except ValueError: + raise HTTPBadRequest( + request=self, content_type='text/plain', + body='X-Timestamp should be a UNIX timestamp float value; ' + 'was %r' % self.environ['HTTP_X_TIMESTAMP']) + else: + self._timestamp = Timestamp.now() + # Always normalize it to the internal form + self.environ['HTTP_X_TIMESTAMP'] = self._timestamp.internal + return self._timestamp + @property def timestamp(self): """ diff --git a/swift/common/utils.py b/swift/common/utils.py index de2fa4f13f..392312800d 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -1376,6 +1376,11 @@ class Timestamp(object): def __hash__(self): return hash(self.internal) + def __invert__(self): + if self.offset: + raise ValueError('Cannot invert timestamps with offsets') + return Timestamp((999999999999999 - self.raw) * PRECISION) + def encode_timestamps(t1, t2=None, t3=None, explicit=False): """ diff --git a/swift/container/backend.py b/swift/container/backend.py index f6ca5130a7..0a18fe48f8 100644 --- a/swift/container/backend.py +++ b/swift/container/backend.py @@ -1236,7 +1236,7 @@ class ContainerBroker(DatabaseBroker): limit, marker, end_marker, prefix=None, delimiter=None, path=None, reverse=False, include_deleted=include_deleted, transform_func=self._record_to_dict, since_row=since_row, - all_policies=True + all_policies=True, allow_reserved=True ) def _transform_record(self, record): diff --git a/swift/container/sync.py b/swift/container/sync.py index 92c3d740c0..fcb7f18bcf 100644 --- a/swift/container/sync.py +++ b/swift/container/sync.py @@ -44,6 +44,8 @@ from swift.common.utils import ( from swift.common.daemon import Daemon from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND from swift.common.wsgi import ConfigString +from swift.common.middleware.versioned_writes.object_versioning import ( + SYSMETA_VERSIONS_CONT, SYSMETA_VERSIONS_SYMLINK) # The default internal client config body is to support upgrades without @@ -358,6 +360,13 @@ class ContainerSync(Daemon): break else: return + if broker.metadata.get(SYSMETA_VERSIONS_CONT): + self.container_skips += 1 + self.logger.increment('skips') + self.logger.warning('Skipping container %s/%s with ' + 'object versioning configured' % ( + info['account'], info['container'])) + return if not broker.is_deleted(): sync_to = None user_key = None @@ -594,6 +603,16 @@ class ContainerSync(Daemon): headers = {} body = None exc = err + + # skip object_versioning links; this is in case the container + # metadata is out of date + if headers.get(SYSMETA_VERSIONS_SYMLINK): + self.logger.info( + 'Skipping versioning symlink %s/%s/%s ' % ( + info['account'], info['container'], + row['name'])) + return True + timestamp = Timestamp(headers.get('x-timestamp', 0)) if timestamp < ts_meta: if exc: diff --git a/swift/obj/server.py b/swift/obj/server.py index fdb413b44a..5a883e4c1d 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -764,8 +764,9 @@ class ObjectController(BaseStorageServer): 'PUT', account, container, obj, request, update_headers, device, policy) - # Add sysmeta to response - resp_headers = {} + # Add current content-type and sysmeta to response + resp_headers = { + 'X-Backend-Content-Type': content_type_headers['Content-Type']} for key, value in orig_metadata.items(): if is_sys_meta('object', key): resp_headers[key] = value @@ -1276,7 +1277,9 @@ class ObjectController(BaseStorageServer): device, policy) return response_class( request=request, - headers={'X-Backend-Timestamp': response_timestamp.internal}) + headers={'X-Backend-Timestamp': response_timestamp.internal, + 'X-Backend-Content-Type': orig_metadata.get( + 'Content-Type', '')}) @public @replication diff --git a/swift/proxy/controllers/base.py b/swift/proxy/controllers/base.py index 831864aba1..8065006a78 100644 --- a/swift/proxy/controllers/base.py +++ b/swift/proxy/controllers/base.py @@ -57,7 +57,8 @@ from swift.common.http import is_informational, is_success, is_redirection, \ HTTP_INSUFFICIENT_STORAGE, HTTP_UNAUTHORIZED, HTTP_CONTINUE, HTTP_GONE from swift.common.swob import Request, Response, Range, \ HTTPException, HTTPRequestedRangeNotSatisfiable, HTTPServiceUnavailable, \ - status_map, wsgi_to_str, str_to_wsgi, wsgi_quote, normalize_etag + status_map, wsgi_to_str, str_to_wsgi, wsgi_quote, wsgi_unquote, \ + normalize_etag from swift.common.request_helpers import strip_sys_meta_prefix, \ strip_user_meta_prefix, is_user_meta, is_sys_meta, is_sys_or_user_meta, \ http_response_to_document_iters, is_object_transient_sysmeta, \ @@ -396,6 +397,17 @@ def get_container_info(env, app, swift_source=None): if info.get('sharding_state') is None: info['sharding_state'] = 'unsharded' + versions_cont = info.get('sysmeta', {}).get('versions-container', '') + if versions_cont: + versions_cont = wsgi_unquote(str_to_wsgi( + versions_cont)).split('/')[0] + versions_req = _prepare_pre_auth_info_request( + env, ("/%s/%s/%s" % (version, wsgi_account, versions_cont)), + (swift_source or 'GET_CONTAINER_INFO')) + versions_req.headers['X-Backend-Allow-Reserved-Names'] = 'true' + versions_info = get_container_info(versions_req.environ, app) + info['bytes'] = info['bytes'] + versions_info['bytes'] + return info diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 9a5b347012..7609556f90 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -303,7 +303,7 @@ class BaseObjectController(Controller): if error_response: return error_response - req.headers['X-Timestamp'] = Timestamp.now().internal + req.ensure_x_timestamp() req, delete_at_container, delete_at_part, \ delete_at_nodes = self._config_obj_expiration(req) @@ -547,23 +547,6 @@ class BaseObjectController(Controller): if detect_content_type: req.headers.pop('x-detect-content-type') - def _update_x_timestamp(self, req): - # The container sync feature includes an x-timestamp header with - # requests. If present this is checked and preserved, otherwise a fresh - # timestamp is added. - if 'x-timestamp' in req.headers: - try: - req_timestamp = Timestamp(req.headers['X-Timestamp']) - except ValueError: - raise HTTPBadRequest( - request=req, content_type='text/plain', - body='X-Timestamp should be a UNIX timestamp float value; ' - 'was %r' % req.headers['x-timestamp']) - req.headers['X-Timestamp'] = req_timestamp.internal - else: - req.headers['X-Timestamp'] = Timestamp.now().internal - return None - def _check_failure_put_connections(self, putters, req, min_conns): """ Identify any failed connections and check minimum connection count. @@ -785,7 +768,7 @@ class BaseObjectController(Controller): # update content type in case it is missing self._update_content_type(req) - self._update_x_timestamp(req) + req.ensure_x_timestamp() # check constraints on object name and request headers error_response = check_object_creation(req, self.object_name) or \ @@ -845,7 +828,7 @@ class BaseObjectController(Controller): partition, nodes = obj_ring.get_nodes( self.account_name, self.container_name, self.object_name) - self._update_x_timestamp(req) + req.ensure_x_timestamp() # Include local handoff nodes if write-affinity is enabled. node_count = len(nodes) diff --git a/test/functional/__init__.py b/test/functional/__init__.py index a437e021e0..660f73078e 100644 --- a/test/functional/__init__.py +++ b/test/functional/__init__.py @@ -327,6 +327,7 @@ def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs): if not six.PY2: root_secret = root_secret.decode('ascii') conf.set('filter:keymaster', 'encryption_root_secret', root_secret) + conf.set('filter:versioned_writes', 'allow_object_versioning', 'true') except NoSectionError as err: msg = 'Error problem with proxy conf file %s: %s' % \ (proxy_conf_file, err) @@ -456,6 +457,8 @@ def _load_s3api(proxy_conf_file, swift_conf_file, **kwargs): "s3api tempauth") conf.set(section, 'pipeline', pipeline) conf.set('filter:s3api', 's3_acl', 'true') + + conf.set('filter:versioned_writes', 'allow_object_versioning', 'true') except NoSectionError as err: msg = 'Error problem with proxy conf file %s: %s' % \ (proxy_conf_file, err) diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py index bd1b42f5ba..8cac6b1b3a 100644 --- a/test/functional/swift_test_client.py +++ b/test/functional/swift_test_client.py @@ -699,7 +699,7 @@ class Container(Base): if cfg is None: cfg = {} format_type = parms.get('format', None) - if format_type not in [None, 'json', 'xml']: + if format_type not in [None, 'plain', 'json', 'xml']: raise RequestError('Invalid format: %s' % format_type) if format_type is None and 'format' in parms: del parms['format'] @@ -707,12 +707,13 @@ class Container(Base): status = self.conn.make_request('GET', self.path, hdrs=hdrs, parms=parms, cfg=cfg) if status == 200: - if format_type == 'json': + if format_type == 'json' or 'versions' in parms: files = json.loads(self.conn.response.read()) if six.PY2: for file_item in files: - for key in ('name', 'subdir', 'content_type'): + for key in ('name', 'subdir', 'content_type', + 'version_id'): if key in file_item: file_item[key] = file_item[key].encode('utf-8') return files @@ -785,8 +786,10 @@ class Container(Base): # versioning is enabled. ['versions', 'x-versions-location'], ['versions', 'x-history-location'], + ['versions_enabled', 'x-versions-enabled'], ['tempurl_key', 'x-container-meta-temp-url-key'], - ['tempurl_key2', 'x-container-meta-temp-url-key-2']] + ['tempurl_key2', 'x-container-meta-temp-url-key-2'], + ['container_quota_bytes', 'x-container-meta-quota-bytes']] return self.header_fields(required_fields, optional_fields) @@ -853,7 +856,8 @@ class File(Base): data.seek(0) return checksum.hexdigest() - def copy(self, dest_cont, dest_file, hdrs=None, parms=None, cfg=None): + def copy(self, dest_cont, dest_file, hdrs=None, parms=None, cfg=None, + return_resp=False): if hdrs is None: hdrs = {} if parms is None: @@ -875,6 +879,8 @@ class File(Base): cfg=cfg, parms=parms) != 201: raise ResponseError(self.conn.response, 'COPY', self.conn.make_path(self.path)) + if return_resp: + return self.conn.response return True def copy_account(self, dest_account, dest_cont, dest_file, @@ -942,6 +948,8 @@ class File(Base): ['last_modified', 'last-modified'], ['etag', 'etag']] optional_fields = [['x_object_manifest', 'x-object-manifest'], + ['x_manifest_etag', 'x-manifest-etag'], + ['x_object_version_id', 'x-object-version-id'], ['x_symlink_target', 'x-symlink-target']] header_fields = self.header_fields(fields, diff --git a/test/functional/test_object_versioning.py b/test/functional/test_object_versioning.py new file mode 100644 index 0000000000..5548f85f47 --- /dev/null +++ b/test/functional/test_object_versioning.py @@ -0,0 +1,2669 @@ +#!/usr/bin/python -u +# Copyright (c) 2010-2012 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +import hmac +import json +import time +import six + +from copy import deepcopy +from hashlib import md5 +from six.moves.urllib.parse import quote, unquote + +import test.functional as tf + +from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value +from swift.common.middleware.versioned_writes.object_versioning import \ + DELETE_MARKER_CONTENT_TYPE + +from test.functional.tests import Base, Base2, BaseEnv, Utils +from test.functional import cluster_info, SkipTest +from test.functional.swift_test_client import Connection, \ + ResponseError +from test.functional.test_tempurl import TestContainerTempurlEnv, \ + TestTempurlEnv + + +def setUpModule(): + tf.setup_package() + + +def tearDownModule(): + tf.teardown_package() + + +class TestObjectVersioningEnv(BaseEnv): + account2 = None + versions_header_key = 'X-Versions-Enabled' + + @classmethod + def setUp(cls): + super(TestObjectVersioningEnv, cls).setUp() + + if not tf.skip2: + # Second connection for ACL tests + config2 = deepcopy(tf.config) + config2['account'] = tf.config['account2'] + config2['username'] = tf.config['username2'] + config2['password'] = tf.config['password2'] + cls.conn2 = Connection(config2) + cls.conn2.authenticate() + + if six.PY2: + # avoid getting a prefix that stops halfway through an encoded + # character + prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") + else: + prefix = Utils.create_name()[:10] + + cls.container = cls.account.container(prefix + "-objs") + container_headers = {cls.versions_header_key: 'True'} + if not cls.container.create(hdrs=container_headers): + raise ResponseError(cls.conn.response) + + cls.unversioned_container = cls.account.container( + prefix + "-unversioned") + if not cls.unversioned_container.create(): + raise ResponseError(cls.conn.response) + + if not tf.skip2: + # setup another account to test ACLs + config2 = deepcopy(tf.config) + config2['account'] = tf.config['account2'] + config2['username'] = tf.config['username2'] + config2['password'] = tf.config['password2'] + cls.conn2 = Connection(config2) + cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate() + cls.account2 = cls.conn2.get_account() + cls.account2.delete_containers() + + if not tf.skip3: + # setup another account with no access to anything to test ACLs + config3 = deepcopy(tf.config) + config3['account'] = tf.config['account'] + config3['username'] = tf.config['username3'] + config3['password'] = tf.config['password3'] + cls.conn3 = Connection(config3) + cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate() + cls.account3 = cls.conn3.get_account() + + # the allowed headers are configurable in object server, so we cannot + # assert that content-encoding or content-disposition get *copied* to + # the object version unless they were set on the original PUT, so + # populate expected_headers by making a HEAD on the original object + precheck_container = cls.account.container('header-precheck-cont') + if not precheck_container.create(): + raise ResponseError(cls.conn.response) + test_obj = precheck_container.file('test_allowed_headers') + put_headers = {'Content-Type': 'text/jibberish01', + 'Content-Encoding': 'gzip', + 'Content-Disposition': 'attachment; filename=myfile'} + test_obj.write(b"aaaaa", hdrs=put_headers) + test_obj.initialize() + resp_headers = { + h.lower(): v for h, v in test_obj.conn.response.getheaders()} + cls.expected_headers = {} + for k, v in put_headers.items(): + if k.lower() in resp_headers: + cls.expected_headers[k] = v + precheck_container.delete_recursive() + + @classmethod + def tearDown(cls): + if cls.account: + cls.account.delete_containers() + if cls.account2: + cls.account2.delete_containers() + + +class TestObjectVersioningBase(Base): + env = TestObjectVersioningEnv + + def setUp(self): + super(TestObjectVersioningBase, self).setUp() + if 'object_versioning' not in tf.cluster_info: + raise SkipTest("Object Versioning not enabled") + + self._account_name = None + + # make sure versioning is enabled, + # since it gets disabled in tearDown + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'True'}) + + def _tear_down_files(self, container): + try: + # only delete files and not containers + # as they were configured in self.env + # get rid of any versions so they aren't restored + + container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # get rid of originals + container.delete_files() + + # delete older versions + listing_parms = {'versions': None, 'format': 'json'} + for obj_info in container.files(parms=listing_parms): + prev_version = container.file(obj_info['name']) + prev_version.delete( + parms={'version-id': obj_info['version_id']}) + + except ResponseError: + pass + + def tearDown(self): + super(TestObjectVersioningBase, self).tearDown() + self._tear_down_files(self.env.container) + + def assertTotalVersions(self, container, count): + listing_parms = {'versions': None} + self.assertEqual(count, len(container.files(parms=listing_parms))) + + def assertContentTypes(self, container, expected_content_types): + listing_parms = {'versions': None, + 'format': 'json', + 'reverse': 'true'} + self.assertEqual(expected_content_types, [ + o['content_type'] + for o in container.files(parms=listing_parms)]) + + +class TestObjectVersioning(TestObjectVersioningBase): + + @property + def account_name(self): + if not self._account_name: + self._account_name = self.env.conn.storage_path.rsplit('/', 1)[-1] + return self._account_name + + def test_disable_version(self): + # sanity + self.assertTrue( + config_true_value(self.env.container.info()['versions_enabled'])) + + # disable it + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + self.assertFalse( + config_true_value(self.env.container.info()['versions_enabled'])) + + # enabled it back + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'True'}) + self.assertTrue( + config_true_value(self.env.container.info()['versions_enabled'])) + + def assert_previous_version(self, object_name, version_id, content, + content_type, expected_headers={}, + not_expected_header_keys=[], + check_env_expected_headers=False): + ''' + Find previous version of an object using the ?versions API + then, assert object data and metadata using ?version-id API + ''' + prev_version = self.env.container.file(object_name) + prev_version.initialize(parms={'version-id': version_id}) + self.assertEqual(content, prev_version.read( + parms={'version-id': version_id})) + self.assertEqual(content_type, prev_version.content_type) + # make sure the new obj metadata did not leak to the prev. version + resp_headers = { + h.lower(): v for h, v in prev_version.conn.response.getheaders()} + + for k in not_expected_header_keys: + self.assertNotIn(k, resp_headers) + + for k, v in expected_headers.items(): + self.assertIn(k.lower(), resp_headers) + self.assertEqual(v, resp_headers[k.lower()]) + + # also check env expected_headers + if check_env_expected_headers: + for k, v in self.env.expected_headers.items(): + self.assertIn(k.lower(), resp_headers) + self.assertEqual(v, resp_headers[k.lower()]) + + def test_expiry(self): + # sanity + container = self.env.container + self.assertTrue( + config_true_value(self.env.container.info()['versions_enabled'])) + + versioned_obj1 = container.file(Utils.create_name()) + put_headers = {'Content-Type': 'text/blah-blah-blah', + 'X-Delete-After': '1', + 'X-Object-Meta-Color': 'blue'} + resp = versioned_obj1.write(b"aaaaa", hdrs=put_headers, + return_resp=True) + version_id1 = resp.getheader('x-object-version-id') + + versioned_obj2 = container.file(Utils.create_name()) + resp = versioned_obj2.write(b"aaaaa", hdrs={}, return_resp=True) + version_id2 = resp.getheader('x-object-version-id') + + # swift_test_client's File API doesn't really allow for POSTing + # arbitrary headers, so... + def put(url, token, parsed, conn): + conn.request('POST', '%s/%s/%s' % (parsed.path, container, + versioned_obj2.name), + '', {'X-Auth-Token': token, + 'Content-Length': '0', + 'X-Object-Meta-Color': 'red', + 'X-Delete-After': '1'}) + return tf.check_response(conn) + resp = tf.retry(put) + resp.read() + self.assertEqual(resp.status, 202) + + time.sleep(1) + + # Links have expired + with self.assertRaises(ResponseError) as cm: + versioned_obj1.info() + self.assertEqual(404, cm.exception.status) + + with self.assertRaises(ResponseError) as cm: + versioned_obj2.info() + self.assertEqual(404, cm.exception.status) + + # But data are still there + versioned_obj1.initialize(parms={'version-id': version_id1}) + self.assertEqual('text/blah-blah-blah', versioned_obj1.content_type) + self.assertEqual('blue', versioned_obj1.metadata['color']) + + versioned_obj2.initialize(parms={'version-id': version_id2}) + self.assertEqual('application/octet-stream', + versioned_obj2.content_type) + self.assertEqual('red', versioned_obj2.metadata['color']) + + # Note that links may still show up in listings, depending on how + # aggressive the object-expirer is. When doing a version-aware + # listing, though, we'll only ever have the two entries. + self.assertTotalVersions(container, 2) + + def _test_overwriting_setup(self, obj_name=None): + # sanity + container = self.env.container + self.assertTrue( + config_true_value(self.env.container.info()['versions_enabled'])) + + expected_content_types = [] + self.assertTotalVersions(container, 0) + obj_name = obj_name or Utils.create_name() + + versioned_obj = container.file(obj_name) + put_headers = {'Content-Type': 'text/jibberish01', + 'Content-Encoding': 'gzip', + 'Content-Disposition': 'attachment; filename=myfile'} + resp = versioned_obj.write(b"aaaaa", hdrs=put_headers, + return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + expected_content_types.append('text/jibberish01') + self.assertContentTypes(container, expected_content_types) + obj_info = versioned_obj.info() + self.assertEqual('text/jibberish01', obj_info['content_type']) + + self.assertTotalVersions(container, 1) + resp = versioned_obj.write( + b"bbbbb", + hdrs={'Content-Type': 'text/jibberish02', + 'X-Object-Meta-Foo': 'Bar'}, + return_resp=True) + v2_version_id = resp.getheader('x-object-version-id') + versioned_obj.initialize() + self.assertEqual(versioned_obj.content_type, 'text/jibberish02') + self.assertEqual(versioned_obj.metadata['foo'], 'Bar') + resp_headers = { + h.lower(): v for h, v in versioned_obj.conn.response.getheaders()} + content_location = quote('/v1/%s/%s/%s' % ( + self.account_name, container.name, obj_name + )) + '?version-id=%s' % (v2_version_id,) + self.assertEqual(content_location, resp_headers['content-location']) + expected_content_types.append('text/jibberish02') + self.assertContentTypes(container, expected_content_types) + + # the old version got saved off + self.assertTotalVersions(container, 2) + + self.assert_previous_version( + obj_name, v1_version_id, b'aaaaa', 'text/jibberish01', + not_expected_header_keys=['X-Object-Meta-Foo'], + check_env_expected_headers=True) + + # check that POST does not create a new version + versioned_obj.sync_metadata(metadata={'fu': 'baz'}) + self.assertTotalVersions(container, 2) + + self.assert_previous_version( + obj_name, v2_version_id, b'bbbbb', 'text/jibberish02', + expected_headers={'X-Object-Meta-Fu': 'baz'}) + + # if we overwrite it again, there are three versions + resp = versioned_obj.write(b"ccccc", return_resp=True) + v3_version_id = resp.getheader('x-object-version-id') + expected_content_types.append('text/jibberish02') + self.assertContentTypes(container, expected_content_types) + self.assertTotalVersions(self.env.container, 3) + + # versioned_obj keeps the newest content + self.assertEqual(b"ccccc", versioned_obj.read()) + + # test copy from a different container + src_container = self.env.account.container(Utils.create_name()) + self.assertTrue(src_container.create()) + src_name = Utils.create_name() + src_obj = src_container.file(src_name) + src_obj.write(b"ddddd", hdrs={'Content-Type': 'text/jibberish04'}) + src_obj.copy(container.name, obj_name) + expected_content_types.append('text/jibberish04') + self.assertContentTypes(container, expected_content_types) + + self.assertEqual(b"ddddd", versioned_obj.read()) + versioned_obj.initialize() + self.assertEqual(versioned_obj.content_type, 'text/jibberish04') + + # make sure versions container has the previous version + self.assertTotalVersions(self.env.container, 4) + self.assert_previous_version( + obj_name, v3_version_id, b'ccccc', 'text/jibberish02') + + # test delete + # at first, delete will succeed with 204 + versioned_obj.delete() + expected_content_types.append( + 'application/x-deleted;swift_versions_deleted=1') + + # after that, any time the delete doesn't restore the old version + # and we will get 404 NotFound + for x in range(3): + with self.assertRaises(ResponseError) as cm: + versioned_obj.delete() + self.assertEqual(404, cm.exception.status) + expected_content_types.append( + 'application/x-deleted;swift_versions_deleted=1') + + # finally, we have 4 versioned items and 4 delete markers total in + # the versions container + self.assertTotalVersions(self.env.container, 8) + self.assertContentTypes(self.env.container, expected_content_types) + + # update versioned_obj + versioned_obj.write(b"eeee", hdrs={'Content-Type': 'text/thanksgiving', + 'X-Object-Meta-Bar': 'foo'}) + + # verify the PUT object is kept successfully + obj_info = versioned_obj.info() + self.assertEqual('text/thanksgiving', obj_info['content_type']) + + # 8 plus one more write + self.assertTotalVersions(self.env.container, 9) + + # update versioned_obj + versioned_obj.write(b"ffff", hdrs={'Content-Type': 'text/teriyaki', + 'X-Object-Meta-Food': 'chickin'}) + + # verify the PUT object is kept successfully + obj_info = versioned_obj.info() + self.assertEqual('text/teriyaki', obj_info['content_type']) + + # 9 plus one more write + self.assertTotalVersions(self.env.container, 10) + + versioned_obj.delete() + with self.assertRaises(ResponseError) as cm: + versioned_obj.read() + self.assertEqual(404, cm.exception.status) + + # 10 plus delete marker + self.assertTotalVersions(self.env.container, 11) + + return (versioned_obj, expected_content_types) + + def test_overwriting(self): + versioned_obj, expected_content_types = \ + self._test_overwriting_setup() + + def test_make_old_version_latest(self): + obj_name = Utils.create_name() + versioned_obj = self.env.container.file(obj_name) + versions = [{ + 'content_type': 'text/jibberish01', + 'body': b'aaaaa', + }, { + 'content_type': 'text/jibberish02', + 'body': b'bbbbbb', + }, { + 'content_type': 'text/jibberish03', + 'body': b'ccccccc', + }] + for version in versions: + resp = versioned_obj.write(version['body'], hdrs={ + 'Content-Type': version['content_type']}, return_resp=True) + version['version_id'] = resp.getheader('x-object-version-id') + expected = [{ + 'name': obj_name, + 'content_type': version['content_type'], + 'version_id': version['version_id'], + 'hash': md5(version['body']).hexdigest(), + 'bytes': len(version['body'],) + } for version in reversed(versions)] + for item, is_latest in zip(expected, (True, False, False)): + item['is_latest'] = is_latest + versions_listing = self.env.container.files(parms={ + 'versions': 'true', 'format': 'json'}) + for item in versions_listing: + item.pop('last_modified') + self.assertEqual(expected, versions_listing) + + versioned_obj.write(b'', parms={ + 'version-id': versions[1]['version_id']}) + self.assertEqual(b'bbbbbb', versioned_obj.read()) + for item, is_latest in zip(expected, (False, True, False)): + item['is_latest'] = is_latest + versions_listing = self.env.container.files(parms={ + 'versions': 'true', 'format': 'json'}) + for item in versions_listing: + item.pop('last_modified') + self.assertEqual(expected, versions_listing) + + def test_overwriting_with_url_encoded_object_name(self): + obj_name = Utils.create_name() + '%25ff' + versioned_obj, expected_content_types = \ + self._test_overwriting_setup(obj_name) + + def _test_versioning_dlo_setup(self): + if tf.in_process: + tf.skip_if_no_xattrs() + + container = self.env.container + obj_name = Utils.create_name() + + for i in ('1', '2', '3'): + time.sleep(.01) # guarantee that the timestamp changes + obj_name_seg = 'segs_' + obj_name + '/' + i + versioned_obj = container.file(obj_name_seg) + versioned_obj.write(i.encode('ascii')) + # immediately overwrite + versioned_obj.write((i + i).encode('ascii')) + + # three objects 2 versions each + self.assertTotalVersions(self.env.container, 6) + + man_file = container.file(obj_name) + + # write a normal file first + resp = man_file.write( + b'old content', hdrs={'Content-Type': 'text/jibberish01'}, + return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + self.assertEqual(b'old content', man_file.read()) + + # guarantee that the timestamp changes + time.sleep(.01) + + # overwrite with a dlo manifest + dlo_prefix = quote(unquote('%s/segs_%s/' % ( + self.env.container.name, obj_name))) + resp = man_file.write( + b'', hdrs={'Content-Type': 'text/jibberish02', + 'X-Object-Manifest': dlo_prefix}, + return_resp=True) + v2_version_id = resp.getheader('x-object-version-id') + + self.assertTotalVersions(self.env.container, 8) + self.assertEqual(b'112233', man_file.read()) + + self.assert_previous_version( + obj_name, v1_version_id, b'old content', 'text/jibberish01') + + # overwrite the manifest with a normal file + man_file.write(b'new content') + self.assertTotalVersions(self.env.container, 9) + self.assertEqual(b'new content', man_file.read()) + + # new most-recent archive is the dlo + self.assert_previous_version( + obj_name, v2_version_id, b'112233', 'text/jibberish02', + expected_headers={'X-Object-Manifest': dlo_prefix}) + return obj_name, man_file + + def test_versioning_dlo(self): + obj_name, man_file = \ + self._test_versioning_dlo_setup() + + man_file.delete() + with self.assertRaises(ResponseError) as cm: + man_file.read() + self.assertEqual(404, cm.exception.status) + + # 9 plus one more write + self.assertTotalVersions(self.env.container, 10) + + expected = [b'old content', b'112233', b'new content'] + + bodies = [] + listing_parms = {'versions': None, 'format': 'json', + 'reverse': 'true', 'prefix': obj_name} + for obj_info in self.env.container.files(parms=listing_parms)[:3]: + bodies.append(man_file.read( + parms={'version-id': obj_info['version_id']})) + self.assertEqual(expected, bodies) + + def _check_overwriting_symlink(self): + # sanity + container = self.env.container + self.assertTrue( + config_true_value(self.env.container.info()['versions_enabled'])) + + tgt_a_name = Utils.create_name() + tgt_b_name = Utils.create_name() + expected_count = 0 + + tgt_a = container.file(tgt_a_name) + tgt_a.write(b'aaaaa', hdrs={'Content-Type': 'text/jibberish01'}) + expected_count += 1 + + tgt_b = container.file(tgt_b_name) + tgt_b.write(b"bbbbb") + expected_count += 1 + + symlink_name = Utils.create_name() + sym_tgt_header = quote(unquote('%s/%s' % (container.name, tgt_a_name))) + sym_headers_a = {'X-Symlink-Target': sym_tgt_header} + symlink = container.file(symlink_name) + resp = symlink.write(b'', hdrs=sym_headers_a, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + expected_count += 1 + self.assertEqual(b"aaaaa", symlink.read()) + + sym_headers_b = {'X-Symlink-Target': '%s/%s' % (container.name, + tgt_b_name)} + symlink.write(b"", hdrs=sym_headers_b) + expected_count += 1 + self.assertEqual(b"bbbbb", symlink.read()) + + self.assertTotalVersions(container, expected_count) + self.assert_previous_version( + symlink_name, v1_version_id, b'aaaaa', 'text/jibberish01') + return symlink, tgt_a + + def test_overwriting_symlink(self): + if 'symlink' not in cluster_info: + raise SkipTest("Symlinks not enabled") + + symlink, target = self._check_overwriting_symlink() + # test delete + symlink.delete() + with self.assertRaises(ResponseError) as cm: + symlink.read() + self.assertEqual(404, cm.exception.status) + + def _setup_symlink(self): + tgt_name = 'target-' + Utils.create_name() + target = self.env.container.file(tgt_name) + target.write(b'target object data', + hdrs={'Content-Type': 'text/jibberish01'}) + symlink = self.env.container.file('symlink') + resp = symlink.write(b'', hdrs={ + 'Content-Type': 'application/symlink', + 'X-Symlink-Target': '%s/%s' % ( + self.env.container.name, target.name)}, + return_resp=True) + symlink_version_id = resp.getheader('x-object-version-id') + return symlink, symlink_version_id, target + + def _check_copy_destination_symlink(self): + symlink, sym_version_id, target = self._setup_symlink() + self.assertEqual(b'target object data', symlink.read()) + symlink.write(b'this is not a symlink') + + # target, symlink, and new 'not a symlink' overwritten by write + self.assertTotalVersions(self.env.container, 3) + self.assert_previous_version( + symlink.name, sym_version_id, + b'target object data', 'text/jibberish01') + + # the symlink is still a symlink + prev_version = self.env.container.file(symlink.name) + prev_version.initialize(parms={'version-id': sym_version_id}) + self.assertEqual('application/symlink', + prev_version.info(parms={ + 'version-id': sym_version_id, + 'symlink': 'get'})['content_type']) + prev_version.copy(self.env.container.name, symlink.name, + parms={'version-id': sym_version_id, + 'symlink': 'get'}) + self.assertEqual(b'target object data', symlink.read()) + self.assertTotalVersions(self.env.container, 4) + + return symlink, target + + def test_copy_destination_restore_symlink(self): + if 'symlink' not in cluster_info: + raise SkipTest("Symlinks not enabled") + + symlink, target = self._check_copy_destination_symlink() + symlink.delete() + with self.assertRaises(ResponseError) as cm: + symlink.read() + self.assertEqual(404, cm.exception.status) + # symlink & target, plus overwrite and restore, then delete marker + self.assertTotalVersions(self.env.container, 5) + + def test_versioned_staticlink(self): + tgt_name = 'target-' + Utils.create_name() + link_name = 'staticlink-' + Utils.create_name() + target = self.env.container.file(tgt_name) + staticlink = self.env.container.file(link_name) + + target_resp = target.write(b'target object data', hdrs={ + 'Content-Type': 'text/jibberish01'}, return_resp=True) + staticlink.write(b'', hdrs={ + 'X-Symlink-Target': '%s/%s' % ( + self.env.container.name, target.name), + 'X-Symlink-Target-Etag': target_resp.getheader('etag'), + }, cfg={'no_content_type': True}) + self.assertEqual(b'target object data', staticlink.read()) + + listing_parms = {'format': 'json', 'versions': 'true'} + prev_versions = self.env.container.files(parms=listing_parms) + expected = [{ + 'name': link_name, + 'bytes': 0, + 'content_type': 'text/jibberish01', + 'is_latest': True, + }, { + 'name': tgt_name, + 'bytes': 18, + 'content_type': 'text/jibberish01', + 'is_latest': True, + }] + self.assertEqual(expected, [{ + k: i[k] for k in ( + 'name', 'bytes', 'content_type', 'is_latest', + )} for i in prev_versions]) + + target_resp = target.write(b'updated target data', hdrs={ + 'Content-Type': 'text/jibberish02'}, return_resp=True) + with self.assertRaises(ResponseError) as caught: + staticlink.read() + self.assertEqual(409, caught.exception.status) + staticlink.write(b'', hdrs={ + 'X-Symlink-Target': '%s/%s' % ( + self.env.container.name, target.name), + 'X-Symlink-Target-Etag': target_resp.getheader('etag'), + }, cfg={'no_content_type': True}) + self.assertEqual(b'updated target data', staticlink.read()) + + listing_parms = {'format': 'json', 'versions': 'true'} + prev_versions = self.env.container.files(parms=listing_parms) + expected = [{ + 'name': link_name, + 'bytes': 0, + 'content_type': 'text/jibberish02', + 'is_latest': True, + }, { + 'name': link_name, + 'bytes': 0, + 'content_type': 'text/jibberish01', + 'is_latest': False, + }, { + 'name': tgt_name, + 'bytes': 19, + 'content_type': 'text/jibberish02', + 'is_latest': True, + }, { + 'name': tgt_name, + 'bytes': 18, + 'content_type': 'text/jibberish01', + 'is_latest': False, + }] + self.assertEqual(expected, [{ + k: i[k] for k in ( + 'name', 'bytes', 'content_type', 'is_latest', + )} for i in prev_versions]) + + def test_link_to_versioned_object(self): + + # setup target object + tgt_name = 'target-' + Utils.create_name() + target = self.env.container.file(tgt_name) + target_resp = target.write(b'target object data', hdrs={ + 'Content-Type': 'text/jibberish01'}, return_resp=True) + + # setup dynamic link object from a non-versioned container + link_container_name = 'link-container-' + Utils.create_name() + link_name = 'link-' + Utils.create_name() + link_cont = self.env.account.container(link_container_name) + self.assertTrue(link_cont.create()) + link = link_cont.file(link_name) + self.assertTrue(link.write(b'', hdrs={ + 'X-Symlink-Target': '%s/%s' % ( + self.env.container.name, tgt_name), + }, cfg={'no_content_type': True})) + self.assertEqual(b'target object data', link.read()) + + # setup static link object from a non-versioned container + staticlink_name = 'staticlink-' + Utils.create_name() + staticlink = link_cont.file(staticlink_name) + self.assertTrue(staticlink.write(b'', hdrs={ + 'X-Symlink-Target': '%s/%s' % ( + self.env.container.name, tgt_name), + 'X-Symlink-Target-Etag': target_resp.getheader('etag'), + }, cfg={'no_content_type': True})) + self.assertEqual(b'target object data', link.read()) + + def test_versioned_post(self): + # first we'll create a versioned object + obj_name = Utils.create_name() + obj = self.env.container.file(obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish10' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + + # send post request + obj.post(hdrs={'Content-Type': 'text/updated20'}) + + # head request should show updated content-type + obj_info = obj.info() + self.assertEqual(obj_info['content_type'], 'text/updated20') + + listing_parms = {'format': 'json', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(1, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj_name, + 'bytes': 8, + 'content_type': 'text/updated20', + 'hash': '966634ebf2fc135707d6753692bf4b1e', + 'version_id': v1_version_id, + 'is_latest': True, + }]) + + def test_unversioned_post(self): + # first we'll create a versioned object + obj_name = Utils.create_name() + obj = self.env.container.file(obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish10' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + + # now, turn off versioning + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + obj.post(hdrs={'Content-Type': 'text/updated20'}) + + # head request should show updated content-type + obj_info = obj.info() + self.assertEqual(obj_info['content_type'], 'text/updated20') + + listing_parms = {'format': 'json', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(1, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj_name, + 'bytes': 8, + 'content_type': 'text/updated20', + 'hash': '966634ebf2fc135707d6753692bf4b1e', + 'is_latest': True, + 'version_id': v1_version_id, + 'is_latest': True, + }]) + + def test_unversioned_overwrite_and_delete(self): + # first we'll create a versioned object + obj_name = Utils.create_name() + obj = self.env.container.file(obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish18' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + self.assertTotalVersions(self.env.container, 1) + + # now, turn off versioning, and delete source obj + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + obj.delete() + + # no delete markers, archive listing is unchanged + self.assertTotalVersions(self.env.container, 1) + + # sanity, object is gone + self.assertRaises(ResponseError, obj.read) + self.assertEqual(404, obj.conn.response.status) + + # but, archive version is unmodified + self.assert_previous_version(obj_name, v1_version_id, b'version1', + 'text/jibberish18') + + # a new overwrites will not have a version-id + resp = obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish19' + }, return_resp=True) + self.assertIsNone(resp.getheader('x-object-version-id')) + self.assertTotalVersions(self.env.container, 2) + + resp = obj.write(b'version3', hdrs={ + 'Content-Type': 'text/jibberish20' + }, return_resp=True) + self.assertIsNone(resp.getheader('x-object-version-id')) + self.assertTotalVersions(self.env.container, 2) + + obj.delete() + self.assertTotalVersions(self.env.container, 1) + + obj.delete(tolerate_missing=True) + self.assertTotalVersions(self.env.container, 1) + + def test_versioned_overwrite_from_old_version(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish32' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + v1_etag = resp.getheader('etag') + + resp = obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish33' + }, return_resp=True) + v2_version_id = resp.getheader('x-object-version-id') + v2_etag = resp.getheader('etag') + + # sanity + self.assertEqual(b'version2', obj.read()) + + self.assertTotalVersions(self.env.container, 2) + listing_parms = {'format': 'json', 'reverse': 'true', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(2, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': versioned_obj_name, + 'bytes': 8, + 'content_type': 'text/jibberish32', + 'hash': v1_etag, + 'version_id': v1_version_id, + 'is_latest': False, + }, { + 'name': versioned_obj_name, + 'bytes': 8, + 'content_type': 'text/jibberish33', + 'hash': v2_etag, + 'version_id': v2_version_id, + 'is_latest': True, + }]) + + # restore old version1 back in place with a copy request + # should get a new version-id + old_version_obj = self.env.container.file(versioned_obj_name) + resp = old_version_obj.copy(self.env.container.name, + versioned_obj_name, + parms={'version-id': v1_version_id}, + return_resp=True) + v3_version_id = resp.getheader('x-object-version-id') + + listing_parms = {'format': 'json', 'reverse': 'true', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(3, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': versioned_obj_name, + 'bytes': 8, + 'content_type': 'text/jibberish32', + 'hash': v1_etag, + 'version_id': v1_version_id, + 'is_latest': False, + }, { + 'name': versioned_obj_name, + 'bytes': 8, + 'content_type': 'text/jibberish33', + 'hash': v2_etag, + 'version_id': v2_version_id, + 'is_latest': False, + }, { + 'name': versioned_obj_name, + 'bytes': 8, + 'content_type': 'text/jibberish32', + 'hash': v1_etag, + 'version_id': v3_version_id, + 'is_latest': True, + }]) + + self.assertEqual(b'version1', obj.read()) + obj_info = obj.info() + self.assertEqual('text/jibberish32', obj_info['content_type']) + self.assertEqual(v1_etag, obj_info['etag']) + + def test_delete_with_version_api_old_object(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish32' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + + obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'}) + + # sanity + self.assertEqual(b'version2', obj.read()) + + self.assertTotalVersions(self.env.container, 2) + obj.delete(parms={'version-id': v1_version_id}) + + self.assertEqual(b'version2', obj.read()) + self.assertTotalVersions(self.env.container, 1) + + def test_delete_with_version_api_current_object(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'}) + + resp = obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish33' + }, return_resp=True) + v2_version_id = resp.getheader('x-object-version-id') + + # sanity + self.assertEqual(b'version2', obj.read()) + + self.assertTotalVersions(self.env.container, 2) + obj.delete(parms={'version-id': v2_version_id}) + + with self.assertRaises(ResponseError) as cm: + obj.read() + self.assertEqual(404, cm.exception.status) + self.assertTotalVersions(self.env.container, 1) + + def test_delete_delete_marker_with_version_api(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'}) + + obj.delete() + resp_headers = { + h.lower(): v for h, v in obj.conn.response.getheaders()} + self.assertIn('x-object-version-id', resp_headers) + dm_version_id = resp_headers['x-object-version-id'] + + # sanity + with self.assertRaises(ResponseError) as cm: + obj.info(parms={'version-id': dm_version_id}) + resp_headers = { + h.lower(): v for h, v in cm.exception.headers} + self.assertEqual(dm_version_id, + resp_headers['x-object-version-id']) + self.assertEqual(DELETE_MARKER_CONTENT_TYPE, + resp_headers['content-type']) + + obj.delete(parms={'version-id': dm_version_id}) + resp_headers = { + h.lower(): v for h, v in obj.conn.response.getheaders()} + self.assertEqual(dm_version_id, + resp_headers['x-object-version-id']) + + def test_delete_with_version_api_last_object(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish1' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + + # sanity + self.assertEqual(b'version1', obj.read()) + self.assertTotalVersions(self.env.container, 1) + + # delete + obj.delete(parms={'version-id': v1_version_id}) + + with self.assertRaises(ResponseError) as cm: + obj.read() + self.assertEqual(404, cm.exception.status) + self.assertTotalVersions(self.env.container, 0) + + def test_delete_with_version_api_null_version(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'}) + obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'}) + + # sanity + self.assertEqual(b'version2', obj.read()) + self.assertTotalVersions(self.env.container, 2) + + obj.delete(parms={'version-id': 'null'}) + with self.assertRaises(ResponseError) as caught: + obj.read() + self.assertEqual(404, caught.exception.status) + + # no versions removed + self.assertTotalVersions(self.env.container, 2) + + def test_delete_with_version_api_old_object_disabled(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish32' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + + obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'}) + + # disabled versioning + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # sanity + self.assertEqual(b'version2', obj.read()) + + self.assertTotalVersions(self.env.container, 2) + obj.delete(parms={'version-id': v1_version_id}) + + self.assertEqual(b'version2', obj.read()) + self.assertTotalVersions(self.env.container, 1) + + def test_delete_with_version_api_current_object_disabled(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'}) + + resp = obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish33' + }, return_resp=True) + v2_version_id = resp.getheader('x-object-version-id') + + # disabled versioning + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # sanity + self.assertEqual(b'version2', obj.read()) + + self.assertTotalVersions(self.env.container, 2) + obj.delete(parms={'version-id': v2_version_id}) + + with self.assertRaises(ResponseError) as cm: + obj.read() + self.assertEqual(404, cm.exception.status) + self.assertTotalVersions(self.env.container, 1) + + def test_delete_with_version_api_old_object_current_unversioned(self): + versioned_obj_name = Utils.create_name() + obj = self.env.container.file(versioned_obj_name) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish32' + }, return_resp=True) + v1_version_id = resp.getheader('x-object-version-id') + + # disabled versioning + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # write unversioned object (i.e., version-id='null') + obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'}) + + # sanity + self.assertEqual(b'version2', obj.read()) + + self.assertTotalVersions(self.env.container, 2) + obj.delete(parms={'version-id': v1_version_id}) + + self.assertEqual(b'version2', obj.read()) + self.assertTotalVersions(self.env.container, 1) + + +class TestObjectVersioningUTF8(Base2, TestObjectVersioning): + pass + + +class TestContainerOperations(TestObjectVersioningBase): + + def _prep_object_versions(self): + + # object with multiple versions and currently deleted + obj1_v1 = {} + obj1_v1['name'] = 'c' + Utils.create_name() + obj = self.env.container.file(obj1_v1['name']) + + # v1 + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish11', + 'ETag': md5(b'version1').hexdigest(), + }, return_resp=True) + obj1_v1['id'] = resp.getheader('x-object-version-id') + + # v2 + resp = obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish12', + 'ETag': md5(b'version2').hexdigest(), + }, return_resp=True) + obj1_v2 = {} + obj1_v2['name'] = obj1_v1['name'] + obj1_v2['id'] = resp.getheader('x-object-version-id') + + # v3 + resp = obj.write(b'version3', hdrs={ + 'Content-Type': 'text/jibberish13', + 'ETag': md5(b'version3').hexdigest(), + }, return_resp=True) + obj1_v3 = {} + obj1_v3['name'] = obj1_v1['name'] + obj1_v3['id'] = resp.getheader('x-object-version-id') + + with self.assertRaises(ResponseError) as cm: + obj.write(b'version4', hdrs={ + 'Content-Type': 'text/jibberish11', + 'ETag': 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + }) + self.assertEqual(422, cm.exception.status) + + # v4 + obj.delete() + resp_headers = { + h.lower(): v for h, v in obj.conn.response.getheaders()} + obj1_v4 = {} + obj1_v4['name'] = obj1_v1['name'] + obj1_v4['id'] = resp_headers.get('x-object-version-id') + + # object with just a single version + obj2_v1 = {} + obj2_v1['name'] = 'b' + Utils.create_name() + obj = self.env.container.file(obj2_v1['name']) + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish20', + 'ETag': '966634ebf2fc135707d6753692bf4b1e', + }, return_resp=True) + obj2_v1['id'] = resp.getheader('x-object-version-id') + + # object never existed, just a delete marker + obj3_v1 = {} + obj3_v1['name'] = 'a' + Utils.create_name() + obj = self.env.container.file(obj3_v1['name']) + obj.delete(tolerate_missing=True) + self.assertEqual(obj.conn.response.status, 404) + resp_headers = { + h.lower(): v for h, v in obj.conn.response.getheaders()} + obj3_v1['id'] = resp_headers.get('x-object-version-id') + + return (obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1) + + def _prep_unversioned_objects(self): + objs = ( + 'deleted' + Utils.create_name(), + 'in' + Utils.create_name(), + 'order' + Utils.create_name(), + ) + + # object with multiple writes and currently deleted + obj = self.env.unversioned_container.file(objs[0]) + obj.write(b'data', hdrs={ + 'Content-Type': 'text/jibberish11', + 'ETag': md5(b'data').hexdigest(), + }) + obj.delete() + + obj = self.env.unversioned_container.file(objs[1]) + obj.write(b'first', hdrs={ + 'Content-Type': 'text/blah-blah-blah', + 'ETag': md5(b'first').hexdigest(), + }) + + obj = self.env.unversioned_container.file(objs[2]) + obj.write(b'second', hdrs={ + 'Content-Type': 'text/plain', + 'ETag': md5(b'second').hexdigest(), + }) + return objs + + def test_list_all_versions(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions in container + listing_parms = {'format': 'json', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(6, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj3_v1['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj3_v1['id'], + }, { + 'name': obj2_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish20', + 'hash': '966634ebf2fc135707d6753692bf4b1e', + 'is_latest': True, + 'version_id': obj2_v1['id'], + }, { + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }, { + 'name': obj1_v3['name'], + 'bytes': 8, + 'content_type': 'text/jibberish13', + 'hash': md5(b'version3').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v3['id'], + }, { + 'name': obj1_v2['name'], + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v2['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v1['id'], + }]) + + def test_list_all_versions_reverse(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions in container in reverse order + listing_parms = {'format': 'json', 'reverse': 'true', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(6, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v1['id'], + }, { + 'name': obj1_v2['name'], + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v2['id'], + }, { + 'name': obj1_v3['name'], + 'bytes': 8, + 'content_type': 'text/jibberish13', + 'hash': md5(b'version3').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v3['id'], + }, { + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }, { + 'name': obj2_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish20', + 'hash': '966634ebf2fc135707d6753692bf4b1e', + 'is_latest': True, + 'version_id': obj2_v1['id'], + }, { + 'name': obj3_v1['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj3_v1['id'], + }]) + + def test_list_versions_prefix(self): + + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions for a given object + listing_parms = {'format': 'json', + 'versions': None, 'prefix': obj1_v1['name']} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(4, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }, { + 'name': obj1_v3['name'], + 'bytes': 8, + 'content_type': 'text/jibberish13', + 'hash': md5(b'version3').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v3['id'], + }, { + 'name': obj1_v2['name'], + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v2['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v1['id'], + }]) + + def test_list_versions_prefix_reverse(self): + + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions for a given object in reverse order + listing_parms = {'format': 'json', 'reverse': 'true', + 'versions': None, 'prefix': obj1_v1['name']} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(4, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v1['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v2['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish13', + 'hash': md5(b'version3').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v3['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }]) + + def test_list_limit(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions in container + listing_parms = {'format': 'json', + 'limit': 3, + 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(3, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj3_v1['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj3_v1['id'], + }, { + 'name': obj2_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish20', + 'hash': '966634ebf2fc135707d6753692bf4b1e', + 'is_latest': True, + 'version_id': obj2_v1['id'], + }, { + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }]) + + def test_list_limit_marker(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions in container + listing_parms = {'format': 'json', + 'limit': 2, + 'marker': obj2_v1['name'], + 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(2, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }, { + 'name': obj1_v3['name'], + 'bytes': 8, + 'content_type': 'text/jibberish13', + 'hash': md5(b'version3').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v3['id'], + }]) + + def test_list_version_marker(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions starting with version_marker + listing_parms = {'format': 'json', + 'marker': obj1_v3['name'], + 'version_marker': obj1_v3['id'], + 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(2, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v2['name'], + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v2['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v1['id'], + }]) + + def test_list_version_marker_reverse(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list all versions starting with version_marker in reverse order + listing_parms = {'format': 'json', + 'marker': obj1_v3['name'], + 'version_marker': obj1_v3['id'], + 'reverse': 'true', + 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(3, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }, { + 'name': obj2_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish20', + 'hash': '966634ebf2fc135707d6753692bf4b1e', + 'is_latest': True, + 'version_id': obj2_v1['id'], + }, { + 'name': obj3_v1['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj3_v1['id'], + }]) + + def test_list_prefix_version_marker(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list versions with prefix starting with version_marker + listing_parms = {'format': 'json', + 'prefix': obj1_v3['name'], + 'marker': obj1_v3['name'], + 'version_marker': obj1_v3['id'], + 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(2, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v2['name'], + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v2['id'], + }, { + 'name': obj1_v1['name'], + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj1_v1['id'], + }]) + + def test_list_prefix_version_marker_reverse(self): + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + # list versions with prefix starting with version_marker + # in reverse order + listing_parms = {'format': 'json', + 'prefix': obj1_v3['name'], + 'marker': obj1_v3['name'], + 'version_marker': obj1_v3['id'], + 'reverse': 'true', + 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(1, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj1_v4['name'], + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': True, + 'version_id': obj1_v4['id'], + }]) + + def test_unacceptable(self): + def do_test(format): + with self.assertRaises(ResponseError) as caught: + self.env.container.files(parms={ + 'format': format, 'versions': None}) + self.assertEqual(caught.exception.status, 406) + + do_test('plain') + do_test('xml') + + def do_test(accept): + with self.assertRaises(ResponseError) as caught: + self.env.container.files(hdrs={'Accept': accept}, + parms={'versions': None}) + self.assertEqual(caught.exception.status, 406) + + do_test('text/plain') + do_test('text/xml') + do_test('application/xml') + do_test('foo/bar') + + def testFileListingLimitMarkerPrefix(self): + cont = self.env.container + + files = ['apple', 'banana', 'cacao', 'date', 'elderberry'] + for f in files: + file_item = cont.file(f) + self.assertTrue(file_item.write_random()) + # immediately ovewrite + self.assertTrue(file_item.write_random()) + time.sleep(.01) # guarantee that the timestamp changes + + # sanity + for i in range(len(files)): + f = files[i] + for j in range(1, len(files) - i): + self.assertEqual(cont.files(parms={'limit': j, 'marker': f}), + files[i + 1: i + j + 1]) + self.assertEqual(cont.files(parms={'marker': f}), files[i + 1:]) + self.assertEqual(cont.files(parms={'marker': f, 'prefix': f}), []) + self.assertEqual(cont.files(parms={'prefix': f}), [f]) + + # repeat items in files list + versions = [f2 for f1 in files for f2 in (f1,) * 2] + + # now list versions too + v = 0 + for i in range(len(files)): + f = files[i] + for j in range(1, len(files) - i): + self.assertEqual(versions[i + v + 2: i + j + v + 2], [ + item['name'] for item in cont.files(parms={ + 'limit': j, 'marker': f, 'versions': None})]) + self.assertEqual(versions[v + i + 2:], [ + item['name'] for item in cont.files(parms={ + 'marker': f, 'versions': None})]) + self.assertEqual(cont.files(parms={'marker': f, 'prefix': f, + 'versions': None}), []) + self.assertEqual([f, f], [ + item['name'] for item in cont.files(parms={ + 'prefix': f, 'versions': None})]) + v = v + 1 + + def testPrefixAndLimit(self): + cont = self.env.container + + prefix_file_count = 10 + limit_count = 2 + prefixs = ['apple/', 'banana/', 'cacao/'] + prefix_files = {} + + for prefix in prefixs: + prefix_files[prefix] = [] + + for i in range(prefix_file_count): + file_item = cont.file(prefix + Utils.create_name()) + self.assertTrue(file_item.write_random()) + self.assertTrue(file_item.write_random()) + prefix_files[prefix].append(file_item.name) + time.sleep(.01) # guarantee that the timestamp changes + + versions_prefix_files = {} + for prefix in prefixs: + versions_prefix_files[prefix] = [f2 for f1 in prefix_files[prefix] + for f2 in (f1,) * 2] + # sanity + for format_type in [None, 'json', 'xml']: + for prefix in prefixs: + files = cont.files(parms={'prefix': prefix, + 'format': format_type}) + if isinstance(files[0], dict): + files = [x.get('name', x.get('subdir')) for x in files] + self.assertEqual(files, sorted(prefix_files[prefix])) + + # list versions + for format_type in [None, 'json']: + for prefix in prefixs: + files = cont.files(parms={'prefix': prefix, + 'versions': None, + 'format': format_type}) + if isinstance(files[0], dict): + files = [x.get('name', x.get('subdir')) for x in files] + self.assertEqual(files, sorted(versions_prefix_files[prefix])) + + # list versions + for format_type in [None, 'json']: + for prefix in prefixs: + files = cont.files(parms={'limit': limit_count, + 'versions': None, + 'prefix': prefix, + 'format': format_type}) + if isinstance(files[0], dict): + files = [x.get('name', x.get('subdir')) for x in files] + self.assertEqual(len(files), limit_count) + + for file_item in files: + self.assertTrue(file_item.startswith(prefix)) + + def testListDelimiter(self): + cont = self.env.container + + delimiter = '-' + files = ['test', delimiter.join(['test', 'bar']), + delimiter.join(['test', 'foo'])] + for f in files: + file_item = cont.file(f) + self.assertTrue(file_item.write_random()) + + # object with no current version, just a delete marker + del_file = 'del-baz' + obj = self.env.container.file(del_file) + obj.delete(tolerate_missing=True) + self.assertEqual(obj.conn.response.status, 404) + + # now, turn off versioning and write a un-versioned obj + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # a new write will not have a version-id + off_file = 'off-xyz' + obj = self.env.container.file(off_file) + resp = obj.write(b'unversioned', return_resp=True) + self.assertIsNone(resp.getheader('x-object-version-id')) + + # sanity + # list latest, delete marker should not show-up + for format_type in [None, 'json', 'xml']: + results = cont.files(parms={'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['off-xyz', 'test', 'test-bar', + 'test-foo']) + + results = cont.files(parms={'delimiter': delimiter, + 'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['off-', 'test', 'test-']) + + results = cont.files(parms={'delimiter': delimiter, + 'format': format_type, + 'reverse': 'yes'}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['test-', 'test', 'off-']) + + # list versions, we should see delete marker here + for format_type in [None, 'json']: + results = cont.files(parms={'versions': None, + 'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['del-baz', 'off-xyz', 'test', + 'test-bar', 'test-foo']) + + results = cont.files(parms={'delimiter': delimiter, + 'versions': None, + 'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['del-', 'off-', 'test', 'test-']) + + results = cont.files(parms={'delimiter': delimiter, + 'versions': None, + 'format': format_type, + 'reverse': 'yes'}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['test-', 'test', 'off-', 'del-']) + + def testListMultiCharDelimiter(self): + cont = self.env.container + + delimiter = '-&' + files = ['test', delimiter.join(['test', 'bar']), + delimiter.join(['test', 'foo'])] + for f in files: + file_item = cont.file(f) + self.assertTrue(file_item.write_random()) + + # object with no current version, just a delete marker + del_file = 'del-&baz' + obj = self.env.container.file(del_file) + obj.delete(tolerate_missing=True) + self.assertEqual(obj.conn.response.status, 404) + + # now, turn off versioning and write a un-versioned obj + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # a new write will not have a version-id + off_file = 'off-&xyz' + obj = self.env.container.file(off_file) + resp = obj.write(b'unversioned', return_resp=True) + self.assertIsNone(resp.getheader('x-object-version-id')) + + # sanity + # list latest, delete marker should not show-up + for format_type in [None, 'json', 'xml']: + results = cont.files(parms={'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['off-&xyz', 'test', 'test-&bar', + 'test-&foo']) + + results = cont.files(parms={'delimiter': delimiter, + 'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['off-&', 'test', 'test-&']) + + results = cont.files(parms={'delimiter': delimiter, + 'format': format_type, + 'reverse': 'yes'}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['test-&', 'test', 'off-&']) + + # list versions, we should see delete marker here + for format_type in [None, 'json']: + results = cont.files(parms={'versions': None, + 'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['del-&baz', 'off-&xyz', 'test', + 'test-&bar', 'test-&foo']) + + results = cont.files(parms={'delimiter': delimiter, + 'versions': None, + 'format': format_type}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['del-&', 'off-&', 'test', 'test-&']) + + results = cont.files(parms={'delimiter': delimiter, + 'versions': None, + 'format': format_type, + 'reverse': 'yes'}) + if isinstance(results[0], dict): + results = [x.get('name', x.get('subdir')) for x in results] + self.assertEqual(results, ['test-&', 'test', 'off-&', 'del-&']) + + def test_bytes_count(self): + + container = self.env.container + + # first store a non-versioned object + # disable versioning + container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + self.assertFalse( + config_true_value(container.info()['versions_enabled'])) + + obj = container.file(Utils.create_name()) + self.assertTrue(obj.write(b'not-versioned')) + self.assertTotalVersions(container, 1) + + # enable versioning + container.update_metadata( + hdrs={self.env.versions_header_key: 'True'}) + self.assertTrue( + config_true_value(container.info()['versions_enabled'])) + + obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \ + self._prep_object_versions() + + self.assertEqual(int(container.info()['bytes_used']), 32 + obj.size) + self.assertEqual(int(container.info()['object_count']), 2) + self.assertTotalVersions(container, 7) + + def test_container_quota_bytes(self): + if 'container_quotas' not in tf.cluster_info: + raise SkipTest('Container quotas not enabled') + + if tf.in_process: + tf.skip_if_no_xattrs() + + container = self.env.container + + # write two versions of 5 bytes each + obj = container.file(Utils.create_name()) + self.assertTrue(obj.write(b'aaaaa')) + self.assertTrue(obj.write(b'bbbbb')) + self.assertTotalVersions(container, 2) + + # set X-Container-Meta-Quota-Bytes is 10 + container.update_metadata( + hdrs={'X-Container-Meta-Quota-Bytes': '10'}) + self.assertEqual(container.info()['container_quota_bytes'], '10') + + with self.assertRaises(ResponseError) as cm: + obj.write(b'ccccc') + self.assertEqual(413, cm.exception.status) + + # reset container quota + container.update_metadata( + hdrs={'X-Container-Meta-Quota-Bytes': ''}) + + def test_list_unversioned_container(self): + _obj1, obj2, obj3 = self._prep_unversioned_objects() + # _obj1 got deleted, so won't show up at all + item2 = { + 'name': obj2, + 'bytes': 5, + 'content_type': 'text/blah-blah-blah', + 'hash': md5(b'first').hexdigest(), + 'is_latest': True, + 'version_id': 'null', + } + item3 = { + 'name': obj3, + 'bytes': 6, + 'content_type': 'text/plain', + 'hash': md5(b'second').hexdigest(), + 'is_latest': True, + 'version_id': 'null', + } + + # version-aware listing works for unversioned containers + listing_parms = {'format': 'json', + 'versions': None} + listing = self.env.unversioned_container.files(parms=listing_parms) + for item in listing: + item.pop('last_modified') + self.assertEqual(listing, [item2, item3]) + + listing_parms = {'format': 'json', + 'prefix': obj2[:2], + 'versions': None} + listing = self.env.unversioned_container.files(parms=listing_parms) + for item in listing: + item.pop('last_modified') + self.assertEqual(listing, [item2]) + + listing_parms = {'format': 'json', + 'marker': obj2, + 'versions': None} + listing = self.env.unversioned_container.files(parms=listing_parms) + for item in listing: + item.pop('last_modified') + self.assertEqual(listing, [item3]) + + listing_parms = {'format': 'json', + 'delimiter': 'er', + 'versions': None} + listing = self.env.unversioned_container.files(parms=listing_parms) + for item in listing: + if 'name' in item: + item.pop('last_modified') + self.assertEqual(listing, [item2, {'subdir': 'order'}]) + + listing_parms = {'format': 'json', + 'reverse': 'true', + 'versions': None} + listing = self.env.unversioned_container.files(parms=listing_parms) + for item in listing: + item.pop('last_modified') + self.assertEqual(listing, [item3, item2]) + + def test_is_latest(self): + obj = self.env.container.file(Utils.create_name()) + + # v1 + resp = obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish11', + 'ETag': md5(b'version1').hexdigest(), + }, return_resp=True) + obj_v1 = resp.getheader('x-object-version-id') + + # v2 + resp = obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish12', + 'ETag': md5(b'version2').hexdigest(), + }, return_resp=True) + obj_v2 = resp.getheader('x-object-version-id') + + obj.delete() + resp_headers = { + h.lower(): v for h, v in obj.conn.response.getheaders()} + obj_v3 = resp_headers.get('x-object-version-id') + + resp = obj.write(b'version4', hdrs={ + 'Content-Type': 'text/jibberish14', + 'ETag': md5(b'version4').hexdigest(), + }, return_resp=True) + obj_v4 = resp.getheader('x-object-version-id') + + listing_parms = {'format': 'json', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(4, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish14', + 'hash': md5(b'version4').hexdigest(), + 'is_latest': True, + 'version_id': obj_v4, + }, { + 'name': obj.name, + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': False, + 'version_id': obj_v3, + }, { + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj_v2, + }, { + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj_v1, + }]) + + self.env.container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + + # v5 - non-versioned + obj.write(b'version5', hdrs={ + 'Content-Type': 'text/jibberish15', + 'ETag': md5(b'version5').hexdigest(), + }) + + listing_parms = {'format': 'json', 'versions': None} + prev_versions = self.env.container.files(parms=listing_parms) + self.assertEqual(5, len(prev_versions)) + for pv in prev_versions: + pv.pop('last_modified') + self.assertEqual(prev_versions, [{ + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish15', + 'hash': md5(b'version5').hexdigest(), + 'is_latest': True, + 'version_id': 'null', + }, { + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish14', + 'hash': md5(b'version4').hexdigest(), + 'is_latest': False, + 'version_id': obj_v4, + }, { + 'name': obj.name, + 'bytes': 0, + 'content_type': 'application/x-deleted;swift_versions_deleted=1', + 'hash': MD5_OF_EMPTY_STRING, + 'is_latest': False, + 'version_id': obj_v3, + }, { + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': False, + 'version_id': obj_v2, + }, { + 'name': obj.name, + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': False, + 'version_id': obj_v1, + }]) + + +class TestContainerOperationsUTF8(Base2, TestContainerOperations): + pass + + +class TestDeleteContainer(TestObjectVersioningBase): + def tearDown(self): + # do nothing since test will delete all data + container + pass + + def test_delete_container(self): + # sanity + container = self.env.container + self.assertTrue( + config_true_value(container.info()['versions_enabled'])) + self.assertTotalVersions(container, 0) + + # write an object to be versioned + obj = container.file(Utils.create_name) + obj.write(b"foo") + self.assertTotalVersions(container, 1) + + # delete object and attempt to delete container + obj.delete() + self.assertTotalVersions(container, 2) + + # expect failure because versioning is enabled and + # old versions still exist + self.assertFalse(container.delete()) + + # disable it + container.update_metadata( + hdrs={self.env.versions_header_key: 'False'}) + self.assertFalse( + config_true_value(container.info()['versions_enabled'])) + + # expect failure because old versions still exist + self.assertFalse(container.delete()) + + # delete older versions + self._tear_down_files(container) + self.assertTotalVersions(container, 0) + + # and finally delete container + self.assertTrue(container.delete()) + + +class TestSloWithVersioning(TestObjectVersioningBase): + + def setUp(self): + super(TestSloWithVersioning, self).setUp() + + if 'slo' not in cluster_info: + raise SkipTest("SLO not enabled") + if tf.in_process: + tf.skip_if_no_xattrs() + + # create a container with versioning + self.env.versions_header_key = 'X-Versions-Enabled' + self.container = self.env.account.container(Utils.create_name()) + container_headers = {self.env.versions_header_key: 'True'} + if not self.container.create(hdrs=container_headers): + raise ResponseError(self.conn.response) + + self.segments_container = self.env.account.container( + Utils.create_name()) + if not self.segments_container.create(): + raise ResponseError(self.conn.response) + + # create some segments + self.seg_info = {} + for letter, size in (('a', 1024 * 1024), + ('b', 1024 * 1024)): + seg_name = letter + file_item = self.segments_container.file(seg_name) + file_item.write((letter * size).encode('ascii')) + self.seg_info[seg_name] = { + 'size_bytes': size, + 'etag': file_item.md5, + 'path': '/%s/%s' % (self.segments_container.name, seg_name)} + + @property + def account_name(self): + if not self._account_name: + self._account_name = self.env.account.conn.storage_path.rsplit( + '/', 1)[-1] + return self._account_name + + def _create_manifest(self, seg_name): + # create a manifest in the versioning container + file_item = self.container.file("my-slo-manifest") + resp = file_item.write( + json.dumps([self.seg_info[seg_name]]).encode('ascii'), + parms={'multipart-manifest': 'put'}, + return_resp=True) + version_id = resp.getheader('x-object-version-id') + return file_item, version_id + + def _assert_is_manifest(self, file_item, seg_name, version_id=None): + if version_id: + read_params = {'multipart-manifest': 'get', + 'version-id': version_id} + else: + read_params = {'multipart-manifest': 'get'} + manifest_body = file_item.read(parms=read_params) + resp_headers = { + h.lower(): v for h, v in file_item.conn.response.getheaders()} + self.assertIn('x-static-large-object', resp_headers) + self.assertEqual('application/json; charset=utf-8', + file_item.content_type) + try: + manifest = json.loads(manifest_body) + except ValueError: + self.fail("GET with multipart-manifest=get got invalid json") + + self.assertEqual(1, len(manifest)) + key_map = {'etag': 'hash', 'size_bytes': 'bytes', 'path': 'name'} + for k_client, k_slo in key_map.items(): + self.assertEqual(self.seg_info[seg_name][k_client], + manifest[0][k_slo]) + + def _assert_is_object(self, file_item, seg_data, version_id=None): + if version_id: + file_contents = file_item.read(parms={'version-id': version_id}) + else: + file_contents = file_item.read() + self.assertEqual(1024 * 1024, len(file_contents)) + self.assertEqual(seg_data, file_contents[:1]) + self.assertEqual(seg_data, file_contents[-1:]) + + def tearDown(self): + self._tear_down_files(self.container) + + def test_slo_manifest_version(self): + file_item, v1_version_id = self._create_manifest('a') + # sanity check: read the manifest, then the large object + self._assert_is_manifest(file_item, 'a') + self._assert_is_object(file_item, b'a') + + # upload new manifest + file_item, v2_version_id = self._create_manifest('b') + # sanity check: read the manifest, then the large object + self._assert_is_manifest(file_item, 'b') + self._assert_is_object(file_item, b'b') + + # we wrote two versions + self.assertTotalVersions(self.container, 2) + + # check the version 1 is still a manifest + self._assert_is_manifest(file_item, 'a', v1_version_id) + self._assert_is_object(file_item, b'a', v1_version_id) + + # listing looks good + file_info = file_item.info() + manifest_info = file_item.info(parms={'multipart-manifest': 'get'}) + obj_list = self.container.files(parms={'format': 'json'}) + for o in obj_list: + o.pop('last_modified') + # TODO: add symlink_path back in expected + o.pop('symlink_path') + expected = { + 'bytes': file_info['content_length'], + 'content_type': 'application/octet-stream', + 'hash': manifest_info['etag'], + 'name': 'my-slo-manifest', + 'slo_etag': file_info['etag'], + 'version_symlink': True, + } + self.assertEqual([expected], obj_list) + + # delete the newest manifest + file_item.delete() + + # expect to have 3 versions now, last one being a delete-marker + self.assertTotalVersions(self.container, 3) + + # restore version 1 + file_item.copy(self.container.name, file_item.name, + parms={'multipart-manifest': 'get', + 'version-id': v1_version_id}) + self.assertTotalVersions(self.container, 4) + self._assert_is_manifest(file_item, 'a') + self._assert_is_object(file_item, b'a') + + # versioned container listing still looks slo-like + file_info = file_item.info() + manifest_info = file_item.info(parms={'multipart-manifest': 'get'}) + obj_list = self.container.files(parms={'format': 'json'}) + for o in obj_list: + o.pop('last_modified') + # TODO: add symlink_path back in expected + o.pop('symlink_path') + expected = { + 'bytes': file_info['content_length'], + 'content_type': 'application/octet-stream', + 'hash': manifest_info['etag'], + 'name': 'my-slo-manifest', + 'slo_etag': file_info['etag'], + 'version_symlink': True, + } + self.assertEqual([expected], obj_list) + + status = file_item.conn.make_request( + 'DELETE', file_item.path, + hdrs={'Accept': 'application/json'}, + parms={'multipart-manifest': 'delete', + 'version-id': v1_version_id}) + body = file_item.conn.response.read() + self.assertEqual(status, 200, body) + resp = json.loads(body) + self.assertEqual(resp['Response Status'], '200 OK') + self.assertEqual(resp['Errors'], []) + self.assertEqual(resp['Number Deleted'], 2) + + self.assertTotalVersions(self.container, 3) + # Since we included the ?multipart-manifest=delete, segments + # got cleaned up and now the current version is busted + with self.assertRaises(ResponseError) as caught: + file_item.read() + self.assertEqual(409, caught.exception.status) + + def test_links_to_slo(self): + file_item, v1_version_id = self._create_manifest('a') + slo_info = file_item.info() + + symlink_name = Utils.create_name() + sym_tgt_header = quote(unquote('%s/%s' % ( + self.container.name, file_item.name))) + symlink = self.container.file(symlink_name) + + # symlink to the slo + sym_headers = {'X-Symlink-Target': sym_tgt_header} + symlink.write(b'', hdrs=sym_headers) + self.assertEqual(slo_info, symlink.info()) + + # hardlink to the slo + sym_headers['X-Symlink-Target-Etag'] = slo_info['x_manifest_etag'] + symlink.write(b'', hdrs=sym_headers) + self.assertEqual(slo_info, symlink.info()) + + +class TestVersionsLocationWithVersioning(TestObjectVersioningBase): + + # create a container with versioned writes + location_header_key = 'X-Versions-Location' + + def setUp(self): + super(TestVersionsLocationWithVersioning, self).setUp() + + if six.PY2: + # avoid getting a prefix that stops halfway through an encoded + # character + prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") + else: + prefix = Utils.create_name()[:10] + + self.versions_container = self.env.account.container( + prefix + "-versions") + if not self.versions_container.create(): + raise ResponseError(self.conn.response) + + self.container = self.env.account.container(prefix + "-objs") + container_headers = { + self.location_header_key: quote(self.versions_container.name)} + if not self.container.create(hdrs=container_headers): + raise ResponseError(self.conn.response) + + def _prep_object_versions(self): + + # object with multiple versions + object_name = Utils.create_name() + obj = self.container.file(object_name) + + # v1 + obj.write(b'version1', hdrs={ + 'Content-Type': 'text/jibberish11', + 'ETag': md5(b'version1').hexdigest(), + }) + + # v2 + obj.write(b'version2', hdrs={ + 'Content-Type': 'text/jibberish12', + 'ETag': md5(b'version2').hexdigest(), + }) + + # v3 + obj.write(b'version3', hdrs={ + 'Content-Type': 'text/jibberish13', + 'ETag': md5(b'version3').hexdigest(), + }) + + return obj + + def test_list_with_versions_param(self): + obj = self._prep_object_versions() + obj_name = obj.name + + listing_parms = {'format': 'json', 'versions': None} + current_versions = self.container.files(parms=listing_parms) + self.assertEqual(1, len(current_versions)) + for pv in current_versions: + pv.pop('last_modified') + self.assertEqual(current_versions, [{ + 'name': obj_name, + 'bytes': 8, + 'content_type': 'text/jibberish13', + 'hash': md5(b'version3').hexdigest(), + 'is_latest': True, + 'version_id': 'null' + }]) + + prev_versions = self.versions_container.files(parms=listing_parms) + self.assertEqual(2, len(prev_versions)) + + for pv in prev_versions: + pv.pop('last_modified') + name = pv.pop('name') + self.assertTrue(name.startswith('%03x%s/' % (len(obj_name), + obj_name))) + + self.assertEqual(prev_versions, [{ + 'bytes': 8, + 'content_type': 'text/jibberish11', + 'hash': md5(b'version1').hexdigest(), + 'is_latest': True, + 'version_id': 'null', + }, { + 'bytes': 8, + 'content_type': 'text/jibberish12', + 'hash': md5(b'version2').hexdigest(), + 'is_latest': True, + 'version_id': 'null' + }]) + + def test_delete_with_null_version_id(self): + obj = self._prep_object_versions() + + # sanity + self.assertEqual(b'version3', obj.read()) + + obj.delete(parms={'version-id': 'null'}) + if self.location_header_key == 'X-Versions-Location': + self.assertEqual(b'version2', obj.read()) + else: + with self.assertRaises(ResponseError) as caught: + obj.read() + self.assertEqual(404, caught.exception.status) + + +class TestHistoryLocationWithVersioning(TestVersionsLocationWithVersioning): + + # create a container with versioned writes + location_header_key = 'X-History-Location' + + +class TestVersioningAccountTempurl(TestObjectVersioningBase): + env = TestTempurlEnv + digest_name = 'sha1' + + def setUp(self): + self.env.versions_header_key = 'X-Versions-Enabled' + super(TestVersioningAccountTempurl, self).setUp() + if self.env.tempurl_enabled is False: + raise SkipTest("TempURL not enabled") + elif self.env.tempurl_enabled is not True: + # just some sanity checking + raise Exception( + "Expected tempurl_enabled to be True/False, got %r" % + (self.env.tempurl_enabled,)) + + if self.digest_name not in cluster_info['tempurl'].get( + 'allowed_digests', ['sha1']): + raise SkipTest("tempurl does not support %s signatures" % + self.digest_name) + + self.digest = getattr(hashlib, self.digest_name) + self.expires = int(time.time()) + 86400 + self.obj_tempurl_parms = self.tempurl_parms( + 'GET', self.expires, self.env.conn.make_path(self.env.obj.path), + self.env.tempurl_key) + + def tempurl_parms(self, method, expires, path, key): + path = unquote(path) + if not six.PY2: + method = method.encode('utf8') + path = path.encode('utf8') + key = key.encode('utf8') + sig = hmac.new( + key, + b'%s\n%d\n%s' % (method, expires, path), + self.digest).hexdigest() + return {'temp_url_sig': sig, 'temp_url_expires': str(expires)} + + def test_PUT(self): + obj = self.env.obj + + # give out a signature which allows a PUT to obj + expires = int(time.time()) + 86400 + put_parms = self.tempurl_parms( + 'PUT', expires, self.env.conn.make_path(obj.path), + self.env.tempurl_key) + + # try to overwrite existing object + resp = obj.write(b"version2", parms=put_parms, + cfg={'no_auth_token': True}, + return_resp=True) + resp_headers = { + h.lower(): v for h, v in resp.getheaders()} + self.assertIn('x-object-version-id', resp_headers) + + def test_GET_latest(self): + obj = self.env.obj + + expires = int(time.time()) + 86400 + get_parms = self.tempurl_parms( + 'GET', expires, self.env.conn.make_path(obj.path), + self.env.tempurl_key) + + # get v1 object (., version-id=null, no symlinks involved) + contents = obj.read(parms=get_parms, cfg={'no_auth_token': True}) + self.assert_status([200]) + self.assertEqual(contents, b"obj contents") + + # give out a signature which allows a PUT to obj + expires = int(time.time()) + 86400 + put_parms = self.tempurl_parms( + 'PUT', expires, self.env.conn.make_path(obj.path), + self.env.tempurl_key) + + # try to overwrite existing object + resp = obj.write(b"version2", parms=put_parms, + cfg={'no_auth_token': True}, + return_resp=True) + resp_headers = { + h.lower(): v for h, v in resp.getheaders()} + self.assertIn('x-object-version-id', resp_headers) + + # get v2 object + contents = obj.read(parms=get_parms, cfg={'no_auth_token': True}) + self.assert_status([200]) + self.assertEqual(contents, b"version2") + + def test_GET_version_id(self): + # N.B.: The test is not intended to imply the desired behavior + # of a tempurl GET with version-id. Currently version-id is simply + # ignored as the latest version is always returned. In the future, + # users should be able to create a tempurl with version-id as a + # parameter. + + # overwrite object a couple more times + obj = self.env.obj + resp = obj.write(b"version2", return_resp=True) + v2_version_id = resp.getheader('x-object-version-id') + obj.write(b"version3!!!") + + expires = int(time.time()) + 86400 + get_parms = self.tempurl_parms( + 'GET', expires, self.env.conn.make_path(obj.path), + self.env.tempurl_key) + get_parms['version-id'] = v2_version_id + + contents = obj.read(parms=get_parms, cfg={'no_auth_token': True}) + self.assert_status([200]) + self.assertEqual(contents, b"version3!!!") + + +class TestVersioningContainerTempurl(TestObjectVersioningBase): + env = TestContainerTempurlEnv + digest_name = 'sha1' + + def setUp(self): + self.env.versions_header_key = 'X-Versions-Enabled' + super(TestVersioningContainerTempurl, self).setUp() + if self.env.tempurl_enabled is False: + raise SkipTest("TempURL not enabled") + elif self.env.tempurl_enabled is not True: + # just some sanity checking + raise Exception( + "Expected tempurl_enabled to be True/False, got %r" % + (self.env.tempurl_enabled,)) + + if self.digest_name not in cluster_info['tempurl'].get( + 'allowed_digests', ['sha1']): + raise SkipTest("tempurl does not support %s signatures" % + self.digest_name) + + self.digest = getattr(hashlib, self.digest_name) + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'GET', expires, self.env.conn.make_path(self.env.obj.path), + self.env.tempurl_key) + self.obj_tempurl_parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + def tempurl_sig(self, method, expires, path, key): + path = unquote(path) + if not six.PY2: + method = method.encode('utf8') + path = path.encode('utf8') + key = key.encode('utf8') + return hmac.new( + key, + b'%s\n%d\n%s' % (method, expires, path), + self.digest).hexdigest() + + def test_PUT(self): + obj = self.env.obj + + # give out a signature which allows a PUT to new_obj + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'PUT', expires, self.env.conn.make_path(obj.path), + self.env.tempurl_key) + put_parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + # try to overwrite existing object + resp = obj.write(b"version2", parms=put_parms, + cfg={'no_auth_token': True}, + return_resp=True) + resp_headers = { + h.lower(): v for h, v in resp.getheaders()} + self.assertIn('x-object-version-id', resp_headers) + + def test_GET_latest(self): + obj = self.env.obj + + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'GET', expires, self.env.conn.make_path(obj.path), + self.env.tempurl_key) + get_parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + # get v1 object (., version-id=null, no symlinks involved) + contents = obj.read(parms=get_parms, cfg={'no_auth_token': True}) + self.assert_status([200]) + self.assertEqual(contents, b"obj contents") + + # overwrite existing object + obj.write(b"version2") + + # get v2 object (reading from versions container) + # cross container tempurl does not work for container tempurl key + try: + obj.read(parms=get_parms, cfg={'no_auth_token': True}) + except ResponseError as e: + self.assertEqual(e.status, 401) + else: + self.fail('request did not error') + try: + obj.info(parms=get_parms, cfg={'no_auth_token': True}) + except ResponseError as e: + self.assertEqual(e.status, 401) + else: + self.fail('request did not error') diff --git a/test/probe/common.py b/test/probe/common.py index 9652c163bc..f494e26b34 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -31,12 +31,13 @@ from six.moves.http_client import HTTPConnection from six.moves.urllib.parse import urlparse from swiftclient import get_auth, head_account, client -from swift.common import internal_client -from swift.obj.diskfile import get_data_dir +from swift.common import internal_client, direct_client +from swift.common.direct_client import DirectClientException from swift.common.ring import Ring from swift.common.utils import readconf, renamer, rsync_module_interpolation from swift.common.manager import Manager from swift.common.storage_policy import POLICIES, EC_POLICY, REPL_POLICY +from swift.obj.diskfile import get_data_dir from test.probe import CHECK_SERVER_TIMEOUT, VALIDATE_RSYNC @@ -556,6 +557,41 @@ class ReplProbeTest(ProbeTest): obj_required_devices = 4 policy_requirements = {'policy_type': REPL_POLICY} + def direct_container_op(self, func, account=None, container=None, + expect_failure=False): + account = account if account else self.account + container = container if container else self.container_to_shard + cpart, cnodes = self.container_ring.get_nodes(account, container) + unexpected_responses = [] + results = {} + for cnode in cnodes: + try: + results[cnode['id']] = func(cnode, cpart, account, container) + except DirectClientException as err: + if not expect_failure: + unexpected_responses.append((cnode, err)) + else: + if expect_failure: + unexpected_responses.append((cnode, 'success')) + if unexpected_responses: + self.fail('Unexpected responses: %s' % unexpected_responses) + return results + + def direct_delete_container(self, account=None, container=None, + expect_failure=False): + self.direct_container_op(direct_client.direct_delete_container, + account, container, expect_failure) + + def direct_head_container(self, account=None, container=None, + expect_failure=False): + return self.direct_container_op(direct_client.direct_head_container, + account, container, expect_failure) + + def direct_get_container(self, account=None, container=None, + expect_failure=False): + return self.direct_container_op(direct_client.direct_get_container, + account, container, expect_failure) + class ECProbeTest(ProbeTest): diff --git a/test/probe/test_container_sync.py b/test/probe/test_container_sync.py index 96f63065c6..cf0c2e9dae 100644 --- a/test/probe/test_container_sync.py +++ b/test/probe/test_container_sync.py @@ -739,5 +739,155 @@ class TestContainerSyncAndSymlink(BaseTestContainerSync): self.assertEqual(target_body, actual_target_body) +class TestContainerSyncAndVersioning(BaseTestContainerSync): + + def setUp(self): + super(TestContainerSyncAndVersioning, self).setUp() + if 'object_versioning' not in self.info: + raise unittest.SkipTest("Object Versioning not enabled") + + def _test_syncing(self, source_container, dest_container): + # test syncing and versioning + object_name = 'object-%s' % uuid.uuid4() + client.put_object(self.url, self.token, source_container, object_name, + 'version1') + + # cycle container-sync + Manager(['container-sync']).once() + + # overwrite source + client.put_object(self.url, self.token, source_container, object_name, + 'version2') + + # cycle container-sync + Manager(['container-sync']).once() + + resp_headers, listing = client.get_container( + self.url, self.token, dest_container, + query_string='versions') + + self.assertEqual(2, len(listing)) + + def test_enable_versioning_while_syncing_container(self): + + source_container, dest_container = self._setup_synced_containers() + version_hdr = {'X-Versions-Enabled': 'true'} + + # Cannot enable versioning on source container + with self.assertRaises(ClientException) as cm: + client.post_container(self.url, self.token, source_container, + headers=version_hdr) + self.assertEqual(400, cm.exception.http_status) # sanity check + self.assertEqual(b'Cannot enable object versioning on a container ' + b'configured as source of container syncing.', + cm.exception.http_response_content) + + # but destination is ok! + client.post_container(self.url, self.token, dest_container, + headers=version_hdr) + + headers = client.head_container(self.url, self.token, + dest_container) + self.assertEqual('True', headers.get('x-versions-enabled')) + self.assertEqual('secret', headers.get('x-container-sync-key')) + + self._test_syncing(source_container, dest_container) + + def test_enable_syncing_while_versioned(self): + source_container, dest_container = self._setup_synced_containers() + + container_name = 'versioned-%s' % uuid.uuid4() + version_hdr = {'X-Versions-Enabled': 'true'} + + client.put_container(self.url, self.token, container_name, + headers=version_hdr) + + # fails to configure as a container-sync source + sync_headers = {'X-Container-Sync-Key': 'secret'} + sync_to = '//%s/%s/%s/%s' % (self.realm, self.cluster, self.account, + dest_container) + sync_headers['X-Container-Sync-To'] = sync_to + with self.assertRaises(ClientException) as cm: + client.post_container(self.url, self.token, container_name, + headers=sync_headers) + self.assertEqual(400, cm.exception.http_status) # sanity check + + # but works if it's just a container-sync destination + sync_headers = {'X-Container-Sync-Key': 'secret'} + client.post_container(self.url, self.token, container_name, + headers=sync_headers) + + headers = client.head_container(self.url, self.token, + container_name) + self.assertEqual('True', headers.get('x-versions-enabled')) + self.assertEqual('secret', headers.get('x-container-sync-key')) + + # update source header to sync to versioned container + source_headers = {'X-Container-Sync-Key': 'secret'} + sync_to = '//%s/%s/%s/%s' % (self.realm, self.cluster, self.account, + container_name) + source_headers['X-Container-Sync-To'] = sync_to + client.post_container(self.url, self.token, source_container, + headers=source_headers) + + self._test_syncing(source_container, container_name) + + def test_skip_sync_when_misconfigured(self): + source_container, dest_container = self._setup_synced_containers() + + container_name = 'versioned-%s' % uuid.uuid4() + version_hdr = {'X-Versions-Enabled': 'true'} + + client.put_container(self.url, self.token, container_name, + headers=version_hdr) + + # some sanity checks + object_name = 'object-%s' % uuid.uuid4() + client.put_object(self.url, self.token, container_name, object_name, + 'version1') + client.put_object(self.url, self.token, container_name, object_name, + 'version2') + + resp_headers, listing = client.get_container( + self.url, self.token, container_name, + query_string='versions') + + self.assertEqual(2, len(listing)) + + sync_headers = {} + sync_to = '//%s/%s/%s/%s' % (self.realm, self.cluster, self.account, + dest_container) + sync_headers['X-Container-Sync-To'] = sync_to + sync_headers['X-Container-Sync-Key'] = 'secret' + + # use internal client to set container-sync headers + # since it doesn't have container_sync middleware in pipeline + # allowing us to bypass checks + int_client = self.make_internal_client() + # TODO: what a terrible hack, maybe we need to extend internal + # client to allow caller to become a swift_owner?? + int_client.app.app.app.app.swift_owner_headers = [] + int_client.set_container_metadata(self.account, container_name, + metadata=sync_headers) + + headers = client.head_container(self.url, self.token, + container_name) + + # This should never happen, but if it does because of eventual + # consistency or a messed up pipeline, container-sync should + # skip syncing container. + self.assertEqual('True', headers.get('x-versions-enabled')) + self.assertEqual('secret', headers.get('x-container-sync-key')) + self.assertEqual(sync_to, headers.get('x-container-sync-to')) + + # cycle container-sync + Manager(['container-sync']).once() + + with self.assertRaises(ClientException) as cm: + client.get_object( + self.url, self.token, dest_container, object_name) + self.assertEqual(404, cm.exception.http_status) # sanity check + + if __name__ == "__main__": unittest.main() diff --git a/test/probe/test_object_versioning.py b/test/probe/test_object_versioning.py new file mode 100644 index 0000000000..147cf84f4f --- /dev/null +++ b/test/probe/test_object_versioning.py @@ -0,0 +1,233 @@ +#!/usr/bin/python -u +# Copyright (c) 2010-2012 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import main + +from swiftclient import client + +from swift.common.request_helpers import get_reserved_name + +from test.probe.common import ReplProbeTest + + +class TestObjectVersioning(ReplProbeTest): + + def _assert_account_level(self, container_name, hdr_cont_count, + hdr_obj_count, hdr_bytes, cont_count, + cont_bytes): + + headers, containers = client.get_account(self.url, self.token) + self.assertEqual(hdr_cont_count, headers['x-account-container-count']) + self.assertEqual(hdr_obj_count, headers['x-account-object-count']) + self.assertEqual(hdr_bytes, headers['x-account-bytes-used']) + self.assertEqual(len(containers), 1) + container = containers[0] + self.assertEqual(container_name, container['name']) + self.assertEqual(cont_count, container['count']) + self.assertEqual(cont_bytes, container['bytes']) + + def test_account_listing(self): + versions_header_key = 'X-Versions-Enabled' + + # Create container1 + container_name = 'container1' + obj_name = 'object1' + client.put_container(self.url, self.token, container_name) + + # Assert account level sees it + self._assert_account_level( + container_name, + hdr_cont_count='1', + hdr_obj_count='0', + hdr_bytes='0', + cont_count=0, + cont_bytes=0) + + # Enable versioning + hdrs = {versions_header_key: 'True'} + client.post_container(self.url, self.token, container_name, hdrs) + + # write multiple versions of same obj + client.put_object(self.url, self.token, container_name, obj_name, + 'version1') + client.put_object(self.url, self.token, container_name, obj_name, + 'version2') + + # Assert account level doesn't see object data yet, but it + # does see the update for the hidden container + self._assert_account_level( + container_name, + hdr_cont_count='2', + hdr_obj_count='0', + hdr_bytes='0', + cont_count=0, + cont_bytes=0) + + # Get to final state + self.get_to_final_state() + + # Assert account level now sees updated values + # N.B: Note difference in values between header and container listing + # header object count is counting both symlink + object versions + # listing count is counting only symlink (in primary container) + self._assert_account_level( + container_name, + hdr_cont_count='2', + hdr_obj_count='3', + hdr_bytes='16', + cont_count=1, + cont_bytes=16) + + client.delete_object(self.url, self.token, container_name, obj_name) + _headers, current_versions = client.get_container( + self.url, self.token, container_name) + self.assertEqual(len(current_versions), 0) + _headers, all_versions = client.get_container( + self.url, self.token, container_name, query_string='versions') + self.assertEqual(len(all_versions), 3) + + # directly delete primary container to leave an orphan hidden + # container + self.direct_delete_container(container=container_name) + + # Get to final state + self.get_to_final_state() + + # The container count decreases, as well as object count. But bytes + # do not. The discrepancy between header object count, container + # object count and bytes should indicate orphan hidden container is + # still around consuming storage + self._assert_account_level( + container_name, + hdr_cont_count='1', + hdr_obj_count='3', + hdr_bytes='16', + cont_count=0, + cont_bytes=16) + + # Can't HEAD or list anything, though + with self.assertRaises(client.ClientException) as caught: + client.head_container(self.url, self.token, container_name) + self.assertEqual(caught.exception.http_status, 404) + with self.assertRaises(client.ClientException) as caught: + client.get_container(self.url, self.token, container_name) + self.assertEqual(caught.exception.http_status, 404) + with self.assertRaises(client.ClientException) as caught: + client.get_container(self.url, self.token, container_name, + query_string='versions') + self.assertEqual(caught.exception.http_status, 404) + with self.assertRaises(client.ClientException) as caught: + client.get_object( + self.url, self.token, container_name, all_versions[1]['name'], + query_string='version-id=%s' % all_versions[1]['version_id']) + # A little funny -- maybe this should 404 instead? + self.assertEqual(caught.exception.http_status, 400) + + # Fix isn't too bad -- just make the container again! + client.put_container(self.url, self.token, container_name) + _headers, current_versions = client.get_container( + self.url, self.token, container_name) + self.assertEqual(len(current_versions), 0) + _headers, all_versions = client.get_container( + self.url, self.token, container_name, query_string='versions') + self.assertEqual(len(all_versions), 3) + + # ... but to actually *access* the versions, you have to enable + # versioning again + with self.assertRaises(client.ClientException) as caught: + client.get_object( + self.url, self.token, container_name, all_versions[1]['name'], + query_string='version-id=%s' % all_versions[1]['version_id']) + self.assertEqual(caught.exception.http_status, 400) + self.assertIn(b'version-aware operations require', + caught.exception.http_response_content) + client.post_container(self.url, self.token, container_name, + headers={'X-Versions-Enabled': 'true'}) + client.get_object( + self.url, self.token, container_name, all_versions[1]['name'], + query_string='version-id=%s' % all_versions[1]['version_id']) + + def test_missing_versions_container(self): + versions_header_key = 'X-Versions-Enabled' + + # Create container1 + container_name = 'container1' + obj_name = 'object1' + client.put_container(self.url, self.token, container_name) + + # Write some data + client.put_object(self.url, self.token, container_name, obj_name, + b'null version') + + # Enable versioning + hdrs = {versions_header_key: 'True'} + client.post_container(self.url, self.token, container_name, hdrs) + + # But directly delete hidden container to leave an orphan primary + # container + self.direct_delete_container(container=get_reserved_name( + 'versions', container_name)) + + # Could be worse; we can still list versions and GET data + _headers, all_versions = client.get_container( + self.url, self.token, container_name, query_string='versions') + self.assertEqual(len(all_versions), 1) + self.assertEqual(all_versions[0]['name'], obj_name) + self.assertEqual(all_versions[0]['version_id'], 'null') + + _headers, data = client.get_object( + self.url, self.token, container_name, obj_name) + self.assertEqual(data, b'null version') + + _headers, data = client.get_object( + self.url, self.token, container_name, obj_name, + query_string='version-id=null') + self.assertEqual(data, b'null version') + + # But most any write is going to fail + with self.assertRaises(client.ClientException) as caught: + client.put_object(self.url, self.token, container_name, obj_name, + b'new version') + self.assertEqual(caught.exception.http_status, 500) + with self.assertRaises(client.ClientException) as caught: + client.delete_object(self.url, self.token, container_name, + obj_name) + self.assertEqual(caught.exception.http_status, 500) + + # Version-aware delete can work, though! + client.delete_object(self.url, self.token, container_name, obj_name, + query_string='version-id=null') + + # Re-enabling versioning should square us + hdrs = {versions_header_key: 'True'} + client.post_container(self.url, self.token, container_name, hdrs) + + client.put_object(self.url, self.token, container_name, obj_name, + b'new version') + + _headers, all_versions = client.get_container( + self.url, self.token, container_name, query_string='versions') + self.assertEqual(len(all_versions), 1) + self.assertEqual(all_versions[0]['name'], obj_name) + self.assertNotEqual(all_versions[0]['version_id'], 'null') + + _headers, data = client.get_object( + self.url, self.token, container_name, obj_name) + self.assertEqual(data, b'new version') + + +if __name__ == '__main__': + main() diff --git a/test/probe/test_sharder.py b/test/probe/test_sharder.py index 4a0fb386ee..3d06b0bc18 100644 --- a/test/probe/test_sharder.py +++ b/test/probe/test_sharder.py @@ -25,7 +25,6 @@ from six.moves.urllib.parse import quote from swift.common import direct_client, utils from swift.common.manager import Manager from swift.common.memcached import MemcacheRing -from swift.common.direct_client import DirectClientException from swift.common.utils import ShardRange, parse_db_filename, get_db_files, \ quorum_size, config_true_value, Timestamp from swift.container.backend import ContainerBroker, UNSHARDED, SHARDING @@ -102,10 +101,12 @@ class BaseTestContainerSharding(ReplProbeTest): # perform checks for skipping test before starting services self._maybe_skip_test() - def _make_object_names(self, number): - return ['obj%s%04d' % (self.DELIM, x) for x in range(number)] + def _make_object_names(self, number, start=0): + return ['obj%s%04d' % (self.DELIM, x) + for x in range(start, start + number)] def _setup_container_name(self): + # Container where we're PUTting objects self.container_name = 'container%s%s' % (self.DELIM, uuid.uuid4()) def setUp(self): @@ -116,13 +117,18 @@ class BaseTestContainerSharding(ReplProbeTest): _, self.admin_token = get_auth( 'http://127.0.0.1:8080/auth/v1.0', 'admin:admin', 'admin') self._setup_container_name() - self.brain = BrainSplitter(self.url, self.token, self.container_name, - None, 'container') - self.brain.put_container(policy_index=int(self.policy)) + self.init_brain(self.container_name) self.sharders = Manager(['container-sharder']) self.internal_client = self.make_internal_client() self.memcache = MemcacheRing(['127.0.0.1:11211']) + def init_brain(self, container_name): + self.container_to_shard = container_name + self.brain = BrainSplitter( + self.url, self.token, self.container_to_shard, + None, 'container') + self.brain.put_container(policy_index=int(self.policy)) + def stop_container_servers(self, node_numbers=None): if node_numbers: ipports = [] @@ -139,45 +145,35 @@ class BaseTestContainerSharding(ReplProbeTest): wait_for_server_to_hangup(ipport) def put_objects(self, obj_names, contents=None): + results = [] for obj in obj_names: + rdict = {} client.put_object(self.url, token=self.token, container=self.container_name, name=obj, - contents=contents) + contents=contents, response_dict=rdict) + results.append((obj, rdict['headers'].get('x-object-version-id'))) + return results - def delete_objects(self, obj_names): - for obj in obj_names: - client.delete_object( - self.url, self.token, self.container_name, obj) + def delete_objects(self, obj_names_and_versions): + for obj in obj_names_and_versions: + if isinstance(obj, tuple): + obj, version = obj + client.delete_object( + self.url, self.token, self.container_name, obj, + query_string='version-id=%s' % version) + else: + client.delete_object( + self.url, self.token, self.container_name, obj) def get_container_shard_ranges(self, account=None, container=None): account = account if account else self.account - container = container if container else self.container_name + container = container if container else self.container_to_shard path = self.internal_client.make_path(account, container) resp = self.internal_client.make_request( 'GET', path + '?format=json', {'X-Backend-Record-Type': 'shard'}, [200]) return [ShardRange.from_dict(sr) for sr in json.loads(resp.body)] - def direct_container_op(self, func, account=None, container=None, - expect_failure=False): - account = account if account else self.account - container = container if container else self.container_name - cpart, cnodes = self.container_ring.get_nodes(account, container) - unexpected_responses = [] - results = {} - for cnode in cnodes: - try: - results[cnode['id']] = func(cnode, cpart, account, container) - except DirectClientException as err: - if not expect_failure: - unexpected_responses.append((cnode, err)) - else: - if expect_failure: - unexpected_responses.append((cnode, 'success')) - if unexpected_responses: - self.fail('Unexpected responses: %s' % unexpected_responses) - return results - def direct_get_container_shard_ranges(self, account=None, container=None, expect_failure=False): collector = ShardCollector() @@ -185,21 +181,6 @@ class BaseTestContainerSharding(ReplProbeTest): collector, account, container, expect_failure) return collector.ranges - def direct_delete_container(self, account=None, container=None, - expect_failure=False): - self.direct_container_op(direct_client.direct_delete_container, - account, container, expect_failure) - - def direct_head_container(self, account=None, container=None, - expect_failure=False): - return self.direct_container_op(direct_client.direct_head_container, - account, container, expect_failure) - - def direct_get_container(self, account=None, container=None, - expect_failure=False): - return self.direct_container_op(direct_client.direct_get_container, - account, container, expect_failure) - def get_storage_dir(self, part, node, account=None, container=None): account = account or self.brain.account container = container or self.container_name @@ -371,7 +352,7 @@ class BaseTestContainerSharding(ReplProbeTest): def assert_container_state(self, node, expected_state, num_shard_ranges): headers, shard_ranges = direct_client.direct_get_container( - node, self.brain.part, self.account, self.container_name, + node, self.brain.part, self.account, self.container_to_shard, headers={'X-Backend-Record-Type': 'shard'}) self.assertEqual(num_shard_ranges, len(shard_ranges)) self.assertIn('X-Backend-Sharding-State', headers) @@ -560,11 +541,11 @@ class TestContainerShardingFunkyNames(TestContainerShardingNonUTF8): class TestContainerShardingUTF8(TestContainerShardingNonUTF8): - def _make_object_names(self, number): + def _make_object_names(self, number, start=0): # override default with names that include non-ascii chars name_length = self.cluster_info['swift']['max_object_name_length'] obj_names = [] - for x in range(number): + for x in range(start, start + number): name = (u'obj-\u00e4\u00ea\u00ec\u00f2\u00fb\u1234-%04d' % x) name = name.encode('utf8').ljust(name_length, b'o') if not six.PY2: @@ -583,6 +564,215 @@ class TestContainerShardingUTF8(TestContainerShardingNonUTF8): self.container_name = self.container_name.decode('utf8') +class TestContainerShardingObjectVersioning(BaseTestContainerSharding): + def _maybe_skip_test(self): + super(TestContainerShardingObjectVersioning, self)._maybe_skip_test() + try: + vw_config = utils.readconf(self.configs['proxy-server'], + 'filter:versioned_writes') + except ValueError: + raise SkipTest('No [filter:versioned_writes] section found in ' + 'proxy-server configs') + allow_object_versioning = config_true_value( + vw_config.get('allow_object_versioning', False)) + if not allow_object_versioning: + raise SkipTest('allow_object_versioning must be true ' + 'in all versioned_writes configs') + + def init_brain(self, container_name): + client.put_container(self.url, self.token, container_name, headers={ + 'X-Storage-Policy': self.policy.name, + 'X-Versions-Enabled': 'true', + }) + self.container_to_shard = '\x00versions\x00' + container_name + self.brain = BrainSplitter( + self.url, self.token, self.container_to_shard, + None, 'container') + + def test_sharding_listing(self): + # verify parameterised listing of a container during sharding + all_obj_names = self._make_object_names(3) * self.max_shard_size + all_obj_names.extend(self._make_object_names(self.max_shard_size, + start=3)) + obj_names = all_obj_names[::2] + obj_names_and_versions = self.put_objects(obj_names) + + def sort_key(obj_and_ver): + obj, ver = obj_and_ver + return obj, ~Timestamp(ver) + + obj_names_and_versions.sort(key=sort_key) + # choose some names approx in middle of each expected shard range + markers = [ + obj_names_and_versions[i] + for i in range(self.max_shard_size // 4, + 2 * self.max_shard_size, + self.max_shard_size // 2)] + + def check_listing(objects, **params): + params['versions'] = '' + qs = '&'.join('%s=%s' % param for param in params.items()) + headers, listing = client.get_container( + self.url, self.token, self.container_name, query_string=qs) + listing = [(x['name'].encode('utf-8') if six.PY2 else x['name'], + x['version_id']) + for x in listing] + if params.get('reverse'): + marker = ( + params.get('marker', ShardRange.MAX), + ~Timestamp(params['version_marker']) + if 'version_marker' in params else ~Timestamp('0'), + ) + end_marker = ( + params.get('end_marker', ShardRange.MIN), + Timestamp('0'), + ) + expected = [o for o in objects + if end_marker < sort_key(o) < marker] + expected.reverse() + else: + marker = ( + params.get('marker', ShardRange.MIN), + ~Timestamp(params['version_marker']) + if 'version_marker' in params else Timestamp('0'), + ) + end_marker = ( + params.get('end_marker', ShardRange.MAX), + ~Timestamp('0'), + ) + expected = [o for o in objects + if marker < sort_key(o) < end_marker] + if 'limit' in params: + expected = expected[:params['limit']] + self.assertEqual(expected, listing) + + def check_listing_fails(exp_status, **params): + params['versions'] = '' + qs = '&'.join('%s=%s' % param for param in params.items()) + with self.assertRaises(ClientException) as cm: + client.get_container( + self.url, self.token, self.container_name, query_string=qs) + self.assertEqual(exp_status, cm.exception.http_status) + return cm.exception + + def do_listing_checks(objects): + check_listing(objects) + check_listing(objects, + marker=markers[0][0], version_marker=markers[0][1]) + check_listing(objects, + marker=markers[0][0], version_marker=markers[0][1], + limit=self.max_shard_size // 10) + check_listing(objects, + marker=markers[0][0], version_marker=markers[0][1], + limit=self.max_shard_size // 4) + check_listing(objects, + marker=markers[0][0], version_marker=markers[0][1], + limit=self.max_shard_size // 2) + check_listing(objects, + marker=markers[1][0], version_marker=markers[1][1]) + check_listing(objects, + marker=markers[1][0], version_marker=markers[1][1], + limit=self.max_shard_size // 10) + check_listing(objects, + marker=markers[2][0], version_marker=markers[2][1], + limit=self.max_shard_size // 4) + check_listing(objects, + marker=markers[2][0], version_marker=markers[2][1], + limit=self.max_shard_size // 2) + check_listing(objects, reverse=True) + check_listing(objects, reverse=True, + marker=markers[1][0], version_marker=markers[1][1]) + + check_listing(objects, prefix='obj') + check_listing([], prefix='zzz') + # delimiter + headers, listing = client.get_container( + self.url, self.token, self.container_name, + query_string='delimiter=-') + self.assertEqual([{'subdir': 'obj-'}], listing) + headers, listing = client.get_container( + self.url, self.token, self.container_name, + query_string='delimiter=j-') + self.assertEqual([{'subdir': 'obj-'}], listing) + + limit = self.cluster_info['swift']['container_listing_limit'] + exc = check_listing_fails(412, limit=limit + 1) + self.assertIn(b'Maximum limit', exc.http_response_content) + exc = check_listing_fails(400, delimiter='%ff') + self.assertIn(b'not valid UTF-8', exc.http_response_content) + + # sanity checks + do_listing_checks(obj_names_and_versions) + + # Shard the container. Use an internal_client so we get an implicit + # X-Backend-Allow-Reserved-Names header + self.internal_client.set_container_metadata( + self.account, self.container_to_shard, { + 'X-Container-Sysmeta-Sharding': 'True', + }) + # First run the 'leader' in charge of scanning, which finds all shard + # ranges and cleaves first two + self.sharders.once(number=self.brain.node_numbers[0], + additional_args='--partitions=%s' % self.brain.part) + # Then run sharder on other nodes which will also cleave first two + # shard ranges + for n in self.brain.node_numbers[1:]: + self.sharders.once( + number=n, additional_args='--partitions=%s' % self.brain.part) + + # sanity check shard range states + for node in self.brain.nodes: + self.assert_container_state(node, 'sharding', 4) + shard_ranges = self.get_container_shard_ranges() + self.assertLengthEqual(shard_ranges, 4) + self.assert_shard_range_state(ShardRange.CLEAVED, shard_ranges[:2]) + self.assert_shard_range_state(ShardRange.CREATED, shard_ranges[2:]) + + self.assert_container_delete_fails() + self.assert_container_has_shard_sysmeta() # confirm no sysmeta deleted + self.assert_container_post_ok('sharding') + do_listing_checks(obj_names_and_versions) + + # put some new objects spread through entire namespace + new_obj_names = all_obj_names[1::4] + new_obj_names_and_versions = self.put_objects(new_obj_names) + + # new objects that fell into the first two cleaved shard ranges are + # reported in listing, new objects in the yet-to-be-cleaved shard + # ranges are not yet included in listing + exp_obj_names_and_versions = [ + o for o in obj_names_and_versions + new_obj_names_and_versions + if '\x00' + o[0] <= shard_ranges[1].upper] + exp_obj_names_and_versions += [ + o for o in obj_names_and_versions + if '\x00' + o[0] > shard_ranges[1].upper] + exp_obj_names_and_versions.sort(key=sort_key) + do_listing_checks(exp_obj_names_and_versions) + + # run all the sharders again and the last two shard ranges get cleaved + self.sharders.once(additional_args='--partitions=%s' % self.brain.part) + for node in self.brain.nodes: + self.assert_container_state(node, 'sharded', 4) + shard_ranges = self.get_container_shard_ranges() + self.assert_shard_range_state(ShardRange.ACTIVE, shard_ranges) + + exp_obj_names_and_versions = \ + obj_names_and_versions + new_obj_names_and_versions + exp_obj_names_and_versions.sort(key=sort_key) + do_listing_checks(exp_obj_names_and_versions) + self.assert_container_delete_fails() + self.assert_container_has_shard_sysmeta() + self.assert_container_post_ok('sharded') + + # delete original objects + self.delete_objects(obj_names_and_versions) + new_obj_names_and_versions.sort(key=sort_key) + do_listing_checks(new_obj_names_and_versions) + self.assert_container_delete_fails() + self.assert_container_has_shard_sysmeta() + self.assert_container_post_ok('sharded') + + class TestContainerSharding(BaseTestContainerSharding): def _test_sharded_listing(self, run_replicators=False): obj_names = self._make_object_names(self.max_shard_size) diff --git a/test/unit/common/middleware/test_object_versioning.py b/test/unit/common/middleware/test_object_versioning.py new file mode 100644 index 0000000000..d56674d43d --- /dev/null +++ b/test/unit/common/middleware/test_object_versioning.py @@ -0,0 +1,3214 @@ +# Copyright (c) 2019 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import json +import os +import time +import mock +import unittest +from hashlib import md5 +import six +from six.moves import urllib +from swift.common import swob, utils +from swift.common.middleware import versioned_writes, copy, symlink, \ + listing_formats +from swift.common.swob import Request, wsgi_quote, str_to_wsgi +from swift.common.middleware.symlink import TGT_OBJ_SYSMETA_SYMLINK_HDR, \ + ALLOW_RESERVED_NAMES, SYMLOOP_EXTEND +from swift.common.middleware.versioned_writes.object_versioning import \ + SYSMETA_VERSIONS_CONT, SYSMETA_VERSIONS_ENABLED, \ + SYSMETA_VERSIONS_SYMLINK, DELETE_MARKER_CONTENT_TYPE +from swift.common.request_helpers import get_reserved_name +from swift.common.storage_policy import StoragePolicy +from swift.proxy.controllers.base import get_cache_key +from test.unit import patch_policies, FakeMemcache, make_timestamp_iter +from test.unit.common.middleware.helpers import FakeSwift + + +def local_tz(func): + ''' + Decorator to change the timezone when running a test. + + This uses the Eastern Time Zone definition from the time module's docs. + Note that the timezone affects things like time.time() and time.mktime(). + ''' + @functools.wraps(func) + def wrapper(*args, **kwargs): + tz = os.environ.get('TZ', '') + try: + os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0' + time.tzset() + return func(*args, **kwargs) + finally: + os.environ['TZ'] = tz + time.tzset() + return wrapper + + +class ObjectVersioningBaseTestCase(unittest.TestCase): + def setUp(self): + self.app = FakeSwift() + conf = {} + self.sym = symlink.filter_factory(conf)(self.app) + self.sym.logger = self.app.logger + self.ov = versioned_writes.object_versioning.\ + ObjectVersioningMiddleware(self.sym, conf) + self.ov.logger = self.app.logger + self.cp = copy.filter_factory({})(self.ov) + self.lf = listing_formats.ListingFilter(self.cp, {}, self.app.logger) + + self.ts = make_timestamp_iter() + cont_cache_version_on = {'sysmeta': { + 'versions-container': self.build_container_name('c'), + 'versions-enabled': 'true'}} + self.cache_version_on = FakeMemcache() + self.cache_version_on.set(get_cache_key('a'), {'status': 200}) + self.cache_version_on.set(get_cache_key('a', 'c'), + cont_cache_version_on) + self.cache_version_on.set( + get_cache_key('a', self.build_container_name('c')), + {'status': 200}) + + self.cache_version_on_but_busted = FakeMemcache() + self.cache_version_on_but_busted.set(get_cache_key('a'), + {'status': 200}) + self.cache_version_on_but_busted.set(get_cache_key('a', 'c'), + cont_cache_version_on) + self.cache_version_on_but_busted.set( + get_cache_key('a', self.build_container_name('c')), + {'status': 404}) + + cont_cache_version_off = {'sysmeta': { + 'versions-container': self.build_container_name('c'), + 'versions-enabled': 'false'}} + self.cache_version_off = FakeMemcache() + self.cache_version_off.set(get_cache_key('a'), {'status': 200}) + self.cache_version_off.set(get_cache_key('a', 'c'), + cont_cache_version_off) + self.cache_version_off.set( + get_cache_key('a', self.build_container_name('c')), + {'status': 200}) + + def tearDown(self): + self.assertEqual(self.app.unclosed_requests, {}) + + def call_ov(self, req): + self.authorized = [] + + def authorize(req): + self.authorized.append(req) + + if 'swift.authorize' not in req.environ: + req.environ['swift.authorize'] = authorize + + req.headers.setdefault("User-Agent", "Marula Kruger") + + status = [None] + headers = [None] + + def start_response(s, h, ei=None): + status[0] = s + headers[0] = h + + body_iter = self.lf(req.environ, start_response) + with utils.closing_if_possible(body_iter): + body = b''.join(body_iter) + + return status[0], headers[0], body + + def assertRequestEqual(self, req, other): + self.assertEqual(req.method, other.method) + self.assertEqual(req.path, other.path) + + def str_to_wsgi(self, native_str): + if six.PY2 and isinstance(native_str, six.text_type): + native_str = native_str.encode('utf8') + return str_to_wsgi(native_str) + + def build_container_name(self, cont): + return get_reserved_name('versions', cont) + + def build_object_name(self, obj, version): + return get_reserved_name(obj, version) + + def build_symlink_path(self, cont, obj, version): + cont = self.build_container_name(cont) + obj = self.build_object_name(obj, version) + return wsgi_quote(self.str_to_wsgi("%s/%s" % (cont, obj))) + + def build_versions_path(self, acc='a', cont='c', obj=None, version=None): + cont = self.build_container_name(cont) + if not obj: + return self.str_to_wsgi("/v1/%s/%s" % (acc, cont)) + obj = self.build_object_name(obj, version) + return self.str_to_wsgi("/v1/%s/%s/%s" % (acc, cont, obj)) + + +class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): + + def test_put_container(self): + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('HEAD', '/v1/a/c', swob.HTTPOk, {}, '') + self.app.register('PUT', self.build_versions_path(), swob.HTTPOk, {}, + 'passed') + self.app.register('PUT', '/v1/a/c', swob.HTTPAccepted, {}, 'passed') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'PUT'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '202 Accepted') + + # check for sysmeta header + calls = self.app.calls_with_headers + self.assertEqual(4, len(calls)) + method, path, headers = calls[3] + self.assertEqual('PUT', method) + self.assertEqual('/v1/a/c', path) + self.assertIn(SYSMETA_VERSIONS_CONT, headers) + self.assertEqual(headers[SYSMETA_VERSIONS_CONT], + wsgi_quote(self.str_to_wsgi( + self.build_container_name('c')))) + self.assertIn(SYSMETA_VERSIONS_ENABLED, headers) + self.assertEqual(headers[SYSMETA_VERSIONS_ENABLED], 'True') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + @patch_policies([StoragePolicy(0, 'zero', True), + StoragePolicy(1, 'one', False)]) + def test_same_policy_as_existing_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('GET', '/v1/a/c', swob.HTTPOk, { + 'x-backend-storage-policy-index': 1}, '') + self.app.register('PUT', self.build_versions_path(), swob.HTTPOk, {}, + 'passed') + self.app.register('POST', '/v1/a/c', swob.HTTPNoContent, {}, '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'POST'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + + # check for sysmeta header + calls = self.app.calls_with_headers + self.assertEqual(4, len(calls)) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + # request to create versions container + method, path, headers = calls[2] + self.assertEqual('PUT', method) + self.assertEqual(self.build_versions_path(), path) + self.assertIn('X-Storage-Policy', headers) + self.assertEqual('one', headers['X-Storage-Policy']) + + # request to enable versioning on primary container + method, path, headers = calls[3] + self.assertEqual('POST', method) + self.assertEqual('/v1/a/c', path) + self.assertIn(SYSMETA_VERSIONS_CONT, headers) + self.assertEqual(headers[SYSMETA_VERSIONS_CONT], + wsgi_quote(self.str_to_wsgi( + self.build_container_name('c')))) + self.assertIn(SYSMETA_VERSIONS_ENABLED, headers) + self.assertEqual(headers[SYSMETA_VERSIONS_ENABLED], 'True') + + @patch_policies([StoragePolicy(0, 'zero', True), + StoragePolicy(1, 'one', False, is_deprecated=True)]) + def test_existing_container_has_deprecated_policy(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('GET', '/v1/a/c', swob.HTTPOk, { + 'x-backend-storage-policy-index': 1}, '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'POST'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + self.assertEqual(body, + b'Cannot enable object versioning on a container ' + b'that uses a deprecated storage policy.') + + calls = self.app.calls_with_headers + self.assertEqual(2, len(calls)) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + @patch_policies([StoragePolicy(0, 'zero', True), + StoragePolicy(1, 'one', False, is_deprecated=True)]) + def test_existing_container_has_deprecated_policy_unauthed(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('GET', '/v1/a/c', swob.HTTPOk, { + 'x-backend-storage-policy-index': 1}, '') + + def fake_authorize(req): + self.authorized.append(req) + return swob.HTTPForbidden() + + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'POST', + 'swift.authorize': fake_authorize}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '403 Forbidden') + + calls = self.app.calls_with_headers + self.assertEqual(2, len(calls)) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_same_policy_as_primary_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('GET', '/v1/a/c', swob.HTTPNotFound, {}, '') + self.app.register('PUT', self.build_versions_path(), swob.HTTPOk, + {}, '') + self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true', + 'X-Storage-Policy': 'ec42'}, + environ={'REQUEST_METHOD': 'PUT'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + + # check for sysmeta header + calls = self.app.calls_with_headers + self.assertEqual(4, len(calls)) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + # request to create versions container + method, path, headers = calls[2] + self.assertEqual('PUT', method) + self.assertEqual(self.build_versions_path(), path) + self.assertIn('X-Storage-Policy', headers) + self.assertEqual('ec42', headers['X-Storage-Policy']) + + # request to enable versioning on primary container + method, path, headers = calls[3] + self.assertEqual('PUT', method) + self.assertEqual('/v1/a/c', path) + self.assertIn(SYSMETA_VERSIONS_CONT, headers) + self.assertEqual(headers[SYSMETA_VERSIONS_CONT], + wsgi_quote(self.str_to_wsgi( + self.build_container_name('c')))) + self.assertIn(SYSMETA_VERSIONS_ENABLED, headers) + self.assertEqual(headers[SYSMETA_VERSIONS_ENABLED], 'True') + self.assertIn('X-Storage-Policy', headers) + self.assertEqual('ec42', headers['X-Storage-Policy']) + + def test_enable_versioning_failed_primary_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed') + self.app.register('GET', '/v1/a/c', swob.HTTPNotFound, {}, 'passed') + self.app.register('PUT', self.build_versions_path(), + swob.HTTPOk, {}, 'passed') + self.app.register('DELETE', self.build_versions_path(), + swob.HTTPNoContent, {}, '') + self.app.register('PUT', '/v1/a/c', swob.HTTPInternalServerError, + {}, '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'PUT'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '500 Internal Error') + + def test_enable_versioning_failed_versions_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('GET', '/v1/a/c', swob.HTTPNotFound, {}, '') + self.app.register('PUT', self.build_versions_path(), + swob.HTTPInternalServerError, {}, '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'PUT'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '500 Internal Error') + + def test_enable_versioning_existing_container(self): + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('HEAD', self.build_versions_path(), + swob.HTTPOk, {}, '') + self.app.register('PUT', self.build_versions_path(), + swob.HTTPAccepted, {}, '') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: False}, + 'passed') + self.app.register('POST', '/v1/a/c', swob.HTTPOk, {}, 'passed') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'POST'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + + # check for sysmeta header + calls = self.app.calls_with_headers + self.assertEqual(5, len(calls)) + method, path, req_headers = calls[-1] + self.assertEqual('POST', method) + self.assertEqual('/v1/a/c', path) + self.assertIn(SYSMETA_VERSIONS_ENABLED, req_headers) + self.assertEqual(req_headers[SYSMETA_VERSIONS_ENABLED], + 'True') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_put_container_with_legacy_versioning(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {'x-container-sysmeta-versions-location': 'ver_cont'}, + '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'POST'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + + def test_put_container_with_super_legacy_versioning(self): + # x-versions-location was used before versioned writes + # was pulled out to middleware + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {'x-versions-location': 'ver_cont'}, + '') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'true'}, + environ={'REQUEST_METHOD': 'POST'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + + def test_get_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, b'[]') + req = Request.blank( + '/v1/a/c', + environ={'REQUEST_METHOD': 'GET'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_get_reserved_container_passthrough(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed') + self.app.register('GET', '/v1/a/%s' % get_reserved_name('foo'), + swob.HTTPOk, {}, b'[]') + req = Request.blank('/v1/a/%s' % get_reserved_name('foo')) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_head_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, None) + req = Request.blank( + '/v1/a/c', + environ={'REQUEST_METHOD': 'HEAD'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_delete_container_success(self): + self.app.register( + 'DELETE', '/v1/a/c', swob.HTTPNoContent, {}, '') + self.app.register( + 'DELETE', self.build_versions_path(), + swob.HTTPNoContent, {}, '') + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, '') + self.app.register( + 'HEAD', self.build_versions_path(), swob.HTTPOk, + {'x-container-object-count': 0}, '') + req = Request.blank( + '/v1/a/c', environ={'REQUEST_METHOD': 'DELETE'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a'), + ('HEAD', '/v1/a/c'), + ('HEAD', self.build_versions_path()), + ('HEAD', self.build_versions_path()), # get_container_info + ('DELETE', self.build_versions_path()), + ('DELETE', '/v1/a/c'), + ]) + + def test_delete_container_fail_object_count(self): + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: False}, '') + self.app.register( + 'HEAD', + self.build_versions_path(), + swob.HTTPOk, + {'x-container-object-count': 1}, '') + req = Request.blank( + '/v1/a/c', environ={'REQUEST_METHOD': 'DELETE'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '409 Conflict') + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a'), + ('HEAD', '/v1/a/c'), + ('HEAD', self.build_versions_path()), + ('HEAD', self.build_versions_path()), # get_container_info + ]) + + def test_delete_container_fail_delete_versions_cont(self): + # N.B.: Notice lack of a call to DELETE /v1/a/c + # Since deleting versions container failed, swift should + # not delete primary container + self.app.register( + 'DELETE', self.build_versions_path(), + swob.HTTPServerError, {}, '') + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: False}, '') + self.app.register( + 'HEAD', self.build_versions_path(), swob.HTTPOk, + {'x-container-object-count': 0}, '') + req = Request.blank( + '/v1/a/c', environ={'REQUEST_METHOD': 'DELETE'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '500 Internal Error') + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a'), + ('HEAD', '/v1/a/c'), + ('HEAD', self.build_versions_path()), + ('HEAD', self.build_versions_path()), # get_container_info + ('DELETE', self.build_versions_path()), + ]) + + def test_get(self): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, { + 'Content-Location': self.build_versions_path( + obj='o', version='9999998765.99999')}, + 'body') + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertIn(('X-Object-Version-Id', '0000001234.00000'), headers) + self.assertIn( + ('Content-Location', '/v1/a/c/o?version-id=0000001234.00000'), + headers) + + def test_get_symlink(self): + self.app.register( + 'GET', '/v1/a/c/o?symlink=get', swob.HTTPOk, { + 'X-Symlink-Target': '%s/%s' % ( + self.build_container_name('c'), + self.build_object_name('o', '9999998765.99999'), + ), + 'X-Symlink-Target-Etag': 'versioned-obj-etag', + }, '') + req = Request.blank( + '/v1/a/c/o?symlink=get', + environ={'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertIn(('X-Object-Version-Id', '0000001234.00000'), headers) + self.assertIn( + ('X-Symlink-Target', 'c/o?version-id=0000001234.00000'), + headers) + + def test_put_object_no_versioning(self): + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + cache = FakeMemcache() + cache.set(get_cache_key('a'), {'status': 200}) + cache.set(get_cache_key('a', 'c'), {'status': 200}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite(self, mock_time): + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: 'c-unique/whatever'}, '') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = 'stuff' * 100 + req = Request.blank( + '/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain', + 'ETag': md5( + put_body.encode('utf8')).hexdigest(), + 'Content-Length': len(put_body)}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 2) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual(['OV', 'OV', 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', self.build_versions_path( + obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + symlink_expected_headers = { + SYMLOOP_EXTEND: 'true', + ALLOW_RESERVED_NAMES: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + put_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in symlink_expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + def test_POST(self): + self.app.register( + 'POST', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPAccepted, {}, '') + self.app.register( + 'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, { + SYSMETA_VERSIONS_SYMLINK: 'true', + 'Location': self.build_versions_path( + obj='o', version='9999998765.99999')}, '') + + # TODO: in symlink middleware, swift.leave_relative_location + # is added by the middleware during the response + # adding to the client request here, need to understand how + # to modify the response environ. + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/jibberish01', + 'X-Object-Meta-Foo': 'bar'}, + environ={'swift.cache': self.cache_version_on, + 'swift.leave_relative_location': 'true', + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '202 Accepted') + + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual([None, 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual(self.app.calls, [ + ('POST', '/v1/a/c/o'), + ('POST', self.build_versions_path( + obj='o', version='9999998765.99999')), + ]) + + expected_hdrs = { + 'content-type': 'text/jibberish01', + 'x-object-meta-foo': 'bar', + } + version_obj_post_headers = self.app._calls[1].headers + for k, v in expected_hdrs.items(): + self.assertEqual(version_obj_post_headers[k], v) + + def test_POST_mismatched_location(self): + # This is a defensive chech, ideally a mistmached + # versions container should never happen. + self.app.register( + 'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, { + SYSMETA_VERSIONS_SYMLINK: 'true', + 'Location': self.build_versions_path( + cont='mismatched', obj='o', version='9999998765.99999')}, + '') + + # TODO: in symlink middleware, swift.leave_relative_location + # is added by the middleware during the response + # adding to the client request here, need to understand how + # to modify the response environ. + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/jibberish01', + 'X-Object-Meta-Foo': 'bar'}, + environ={'swift.cache': self.cache_version_on, + 'swift.leave_relative_location': 'true', + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '307 Temporary Redirect') + + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual([None], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual(self.app.calls, [ + ('POST', '/v1/a/c/o'), + ]) + + def test_POST_regular_symlink(self): + self.app.register( + 'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, { + 'Location': '/v1/a/t/o'}, '') + + # TODO: in symlink middleware, swift.leave_relative_location + # is added by the middleware during the response + # adding to the client request here, need to understand how + # to modify the response environ. + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/jibberish01', + 'X-Object-Meta-Foo': 'bar'}, + environ={'swift.cache': self.cache_version_on, + 'swift.leave_relative_location': 'true', + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '307 Temporary Redirect') + + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual([None], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual(self.app.calls, [ + ('POST', '/v1/a/c/o'), + ]) + + def test_denied_PUT_of_versioned_object(self): + authorize_call = [] + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed') + + def fake_authorize(req): + # we should deny the object PUT + authorize_call.append(req) + return swob.HTTPForbidden() + + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', + 'swift.cache': self.cache_version_on, + 'swift.authorize': fake_authorize, + 'CONTENT_LENGTH': '0'}) + # Save off a copy, as the middleware may modify the original + expected_req = Request(req.environ.copy()) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '403 Forbidden') + self.assertEqual(len(authorize_call), 1) + self.assertRequestEqual(expected_req, authorize_call[0]) + + self.assertEqual(self.app.calls, []) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite_tombstone(self, mock_time): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None) + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = 'stuff' * 100 + req = Request.blank( + '/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain', + 'ETag': md5(put_body.encode('utf8')).hexdigest(), + 'Content-Length': len(put_body)}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + # authorized twice because of pre-flight check on PUT + self.assertEqual(len(self.authorized), 2) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual(['OV', 'OV', 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', self.build_versions_path( + obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + put_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite_object_with_DLO(self, mock_time): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old version') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = '' + req = Request.blank('/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain', + 'X-Object-Manifest': 'req/manifest'}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 2) + self.assertEqual(4, self.app.call_count) + self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + + self.assertEqual([ + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999999939.99999')), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ], self.app.calls) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + self.assertNotIn('x-object-manifest', calls[1].headers) + self.assertEqual('req/manifest', + calls[-2].headers['X-Object-Manifest']) + + symlink_put_headers = calls[-1].headers + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + put_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), + } + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + self.assertNotIn('x-object-manifest', symlink_put_headers) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite_DLO_with_object(self, mock_time): + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, + {'X-Object-Manifest': 'resp/manifest', + 'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, + 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = 'stuff' * 100 + req = Request.blank('/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain'}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 2) + self.assertEqual(4, self.app.call_count) + self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual([ + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999999939.99999')), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ], self.app.calls) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + self.assertEqual('resp/manifest', + calls[1].headers['X-Object-Manifest']) + self.assertNotIn(TGT_OBJ_SYSMETA_SYMLINK_HDR, + calls[1].headers) + + self.assertNotIn('x-object-manifest', calls[2].headers) + self.assertNotIn(TGT_OBJ_SYSMETA_SYMLINK_HDR, + calls[2].headers) + + symlink_put_headers = calls[-1].headers + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + put_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), + } + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + self.assertNotIn('x-object-manifest', symlink_put_headers) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite_SLO_with_object(self, mock_time): + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { + 'X-Static-Large-Object': 'True', + # N.B. object-sever strips swift_bytes + 'Content-Type': 'application/octet-stream', + 'X-Object-Sysmeta-Container-Update-Override-Etag': + '656516af0f7474b857857dd2a327f3b9; ' + 'slo_etag=71e938d37c1d06dc634dd24660255a88', + 'X-Object-Sysmeta-Slo-Etag': '71e938d37c1d06dc634dd24660255a88', + 'X-Object-Sysmeta-Slo-Size': '10485760', + 'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT', + }, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = 'stuff' * 100 + req = Request.blank('/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain'}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 2) + self.assertEqual(4, self.app.call_count) + self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual([ + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999999939.99999')), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ], self.app.calls) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + slo_headers = { + 'X-Static-Large-Object': 'True', + 'Content-Type': 'application/octet-stream; swift_bytes=10485760', + 'X-Object-Sysmeta-Container-Update-Override-Etag': + '656516af0f7474b857857dd2a327f3b9; ' + 'slo_etag=71e938d37c1d06dc634dd24660255a88', + 'X-Object-Sysmeta-Slo-Etag': '71e938d37c1d06dc634dd24660255a88', + 'X-Object-Sysmeta-Slo-Size': '10485760', + } + archive_put = calls[1] + for key, value in slo_headers.items(): + self.assertEqual(archive_put.headers[key], value) + + client_put = calls[2] + for key in slo_headers: + if key == 'Content-Type': + self.assertEqual('text/plain', client_put.headers[key]) + else: + self.assertNotIn(key, client_put.headers) + + symlink_put_headers = calls[-1].headers + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + put_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), + } + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + self.assertNotIn('x-object-manifest', symlink_put_headers) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite_object(self, mock_time): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + + put_body = 'stuff' * 100 + req = Request.blank( + '/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain', + 'ETag': md5( + put_body.encode('utf8')).hexdigest(), + 'Content-Length': len(put_body)}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + # authorized twice because of pre-flight check on PUT + self.assertEqual(len(self.authorized), 2) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999999939.99999')), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + put_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + def test_new_version_get_errors(self): + # GET on source fails, expect client error response, + # no PUT should happen + self.app.register('GET', '/v1/a/c/o', + swob.HTTPBadRequest, {}, None) + req = Request.blank('/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + self.assertEqual(1, self.app.call_count) + + # GET on source fails, expect server error response + self.app.register('GET', '/v1/a/c/o', + swob.HTTPBadGateway, {}, None) + req = Request.blank('/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '503 Service Unavailable') + self.assertEqual(2, self.app.call_count) + + def test_new_version_put_errors(self): + # PUT of version fails, expect client error response + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPUnauthorized, {}, None) + req = Request.blank('/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '401 Unauthorized') + self.assertEqual(2, self.app.call_count) + + # PUT of version fails, expect server error response + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPBadGateway, {}, None) + req = Request.blank( + '/v1/a/c/o', headers={'Content-Type': 'text/plain'}, + environ={'REQUEST_METHOD': 'PUT', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '503 Service Unavailable') + self.assertEqual(4, self.app.call_count) + + # PUT fails because the reserved container is missing; server error + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNotFound, {}, None) + req = Request.blank( + '/v1/a/c/o', headers={'Content-Type': 'text/plain'}, + environ={'REQUEST_METHOD': 'PUT', + 'swift.cache': self.cache_version_on_but_busted, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '500 Internal Error') + self.assertIn(b'container does not exist', body) + self.assertIn(b're-enable object versioning', body) + + +class ObjectVersioningTestDisabled(ObjectVersioningBaseTestCase): + def test_get_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: 'c\x01versions', + SYSMETA_VERSIONS_ENABLED: False}, b'[]') + req = Request.blank( + '/v1/a/c', + environ={'REQUEST_METHOD': 'GET'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'False'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_head_container(self): + self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: 'c\x01versions', + SYSMETA_VERSIONS_ENABLED: False}, None) + req = Request.blank( + '/v1/a/c', + environ={'REQUEST_METHOD': 'HEAD'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'False'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_disable_versioning(self): + self.app.register('POST', '/v1/a/c', swob.HTTPOk, {}, 'passed') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Enabled': 'false'}, + environ={'REQUEST_METHOD': 'POST', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_overwrite_null_marker_versioning_disabled(self, mock_time): + # During object PUT with a versioning disabled, if the most + # recent versioned object is a DELETE marker will a *null* + # version-id, then the DELETE marker should be removed. + listing_body = [{ + "hash": "y", + "last_modified": "2014-11-21T14:23:02.206740", + "bytes": 0, + "name": self.build_object_name('o', '0000000001.00000'), + "content_type": "application/x-deleted;swift_versions_deleted=1" + }, { + "hash": "x", + "last_modified": "2014-11-21T14:14:27.409100", + "bytes": 3, + "name": self.build_object_name('o', '0000000002.00000'), + "content_type": "text/plain" + }] + prefix_listing_path = \ + '/v1/a/c\x01versions?prefix=o---&marker=' + self.app.register( + 'GET', prefix_listing_path, swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + self.app.register( + 'HEAD', + self.build_versions_path(obj='o', version='0000000001.00000'), + swob.HTTPNoContent, + {'content-type': DELETE_MARKER_CONTENT_TYPE}, None) + self.app.register( + 'DELETE', + self.build_versions_path(obj='o', version='0000000001.00000'), + swob.HTTPNoContent, {}, None) + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = 'stuff' * 100 + req = Request.blank( + '/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain', + 'ETag': md5(put_body.encode('utf8')).hexdigest(), + 'Content-Length': len(put_body)}, + environ={'swift.cache': self.cache_version_off, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + + # authorized twice because of pre-flight check on PUT + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + # TODO self.assertEqual(['OV', None, 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + + self.assertEqual(self.app.calls, [ + ('PUT', '/v1/a/c/o'), + ]) + + obj_put_headers = self.app.calls_with_headers[-1].headers + self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers) + + def test_put_object_versioning_disabled(self): + listing_body = [{ + "hash": "x", + "last_modified": "2014-11-21T14:14:27.409100", + "bytes": 3, + "name": self.build_object_name('o', '0000000001.00000'), + "content_type": "text/plain" + }] + prefix_listing_path = \ + '/v1/a/c\x01versions?prefix=o---&marker=' + self.app.register( + 'GET', prefix_listing_path, swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', + 'swift.cache': self.cache_version_off, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + self.assertEqual(self.app.calls, [ + ('PUT', '/v1/a/c/o'), + ]) + obj_put_headers = self.app._calls[-1].headers + self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_PUT_with_recent_versioned_marker_versioning_disabled(self, + mock_time): + # During object PUT with a versioning disabled, if the most + # recent versioned object is a DELETE marker will a non-null + # version-id, then the DELETE marker should not be removed. + listing_body = [{ + "hash": "y", + "last_modified": "2014-11-21T14:23:02.206740", + "bytes": 0, + "name": self.build_object_name('o', '0000000001.00000'), + "content_type": "application/x-deleted;swift_versions_deleted=1" + }, { + "hash": "x", + "last_modified": "2014-11-21T14:14:27.409100", + "bytes": 3, + "name": self.build_object_name('o', '0000000002.00000'), + "content_type": "text/plain" + }] + prefix_listing_path = \ + '/v1/a/c\x01versions?prefix=o---&marker=' + self.app.register( + 'GET', prefix_listing_path, swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + self.app.register( + 'HEAD', + self.build_versions_path(obj='o', version='0000000001.00000'), + swob.HTTPNoContent, + {'content-type': DELETE_MARKER_CONTENT_TYPE}, None) + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + put_body = 'stuff' * 100 + req = Request.blank( + '/v1/a/c/o', method='PUT', body=put_body, + headers={'Content-Type': 'text/plain', + 'ETag': md5(put_body.encode('utf8')).hexdigest(), + 'Content-Length': len(put_body)}, + environ={'swift.cache': self.cache_version_off, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + + # authorized twice because of pre-flight check on PUT + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + # TODO self.assertEqual(['OV', None, 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + + self.assertEqual(self.app.calls, [ + ('PUT', '/v1/a/c/o'), + ]) + + obj_put_headers = self.app.calls_with_headers[-1].headers + self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_delete_object_with_versioning_disabled(self, mock_time): + # When versioning is disabled, swift will simply issue the + # original request to the versioned container + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed') + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': self.cache_version_off}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_POST_symlink(self): + self.app.register( + 'POST', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPAccepted, {}, '') + self.app.register( + 'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, { + SYSMETA_VERSIONS_SYMLINK: 'true', + 'Location': self.build_versions_path( + obj='o', version='9999998765.99999')}, '') + + # TODO: in symlink middleware, swift.leave_relative_location + # is added by the middleware during the response + # adding to the client request here, need to understand how + # to modify the response environ. + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/jibberish01', + 'X-Object-Meta-Foo': 'bar'}, + environ={'swift.cache': self.cache_version_off, + 'swift.leave_relative_location': 'true', + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '202 Accepted') + + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual([None, 'OV'], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual(self.app.calls, [ + ('POST', '/v1/a/c/o'), + ('POST', + self.build_versions_path(obj='o', version='9999998765.99999')), + ]) + + expected_hdrs = { + 'content-type': 'text/jibberish01', + 'x-object-meta-foo': 'bar', + } + version_obj_post_headers = self.app._calls[1].headers + for k, v in expected_hdrs.items(): + self.assertEqual(version_obj_post_headers[k], v) + + def test_POST_unversioned_obj(self): + self.app.register( + 'POST', '/v1/a/c/o', swob.HTTPAccepted, {}, '') + + # TODO: in symlink middleware, swift.leave_relative_location + # is added by the middleware during the response + # adding to the client request here, need to understand how + # to modify the response environ. + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/jibberish01', + 'X-Object-Meta-Foo': 'bar'}, + environ={'swift.cache': self.cache_version_off, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '202 Accepted') + + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual([None], self.app.swift_sources) + self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids)) + self.assertEqual(self.app.calls, [ + ('POST', '/v1/a/c/o'), + ]) + + expected_hdrs = { + 'content-type': 'text/jibberish01', + 'x-object-meta-foo': 'bar', + } + version_obj_post_headers = self.app._calls[0].headers + for k, v in expected_hdrs.items(): + self.assertEqual(version_obj_post_headers[k], v) + + +class ObjectVersioningTestDelete(ObjectVersioningBaseTestCase): + def test_delete_object_with_versioning_never_enabled(self): + # should be a straight DELETE, versioning middleware + # does not get involved. + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed') + cache = FakeMemcache() + cache.set(get_cache_key('a'), {'status': 200}) + cache.set(get_cache_key('a', 'c'), {'status': 200}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': cache}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + called_method = \ + [method for (method, path, rheaders) in self.app._calls] + self.assertNotIn('PUT', called_method) + self.assertNotIn('GET', called_method) + self.assertEqual(1, self.app.call_count) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_put_delete_marker_no_object_success(self, mock_time): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPNotFound, + {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNotFound, {}, None) + + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '404 Not Found') + self.assertEqual(len(self.authorized), 2) + + req.environ['REQUEST_METHOD'] = 'PUT' + self.assertRequestEqual(req, self.authorized[0]) + + calls = self.app.calls_with_headers + self.assertEqual(['GET', 'PUT', 'DELETE'], [c.method for c in calls]) + self.assertEqual('application/x-deleted;swift_versions_deleted=1', + calls[1].headers.get('Content-Type')) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_delete_marker_over_object_success(self, mock_time): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None) + + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual(b'', body) + self.assertEqual(len(self.authorized), 2) + + req.environ['REQUEST_METHOD'] = 'PUT' + self.assertRequestEqual(req, self.authorized[0]) + + calls = self.app.calls_with_headers + self.assertEqual(['GET', 'PUT', 'PUT', 'DELETE'], + [c.method for c in calls]) + self.assertEqual( + self.build_versions_path(obj='o', version='9999999939.99999'), + calls[1].path) + self.assertEqual('application/x-deleted;swift_versions_deleted=1', + calls[2].headers.get('Content-Type')) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_delete_marker_over_versioned_object_success(self, mock_time): + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, + {SYSMETA_VERSIONS_SYMLINK: 'true'}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None) + + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual(b'', body) + self.assertEqual(len(self.authorized), 2) + + req.environ['REQUEST_METHOD'] = 'PUT' + self.assertRequestEqual(req, self.authorized[0]) + + calls = self.app.calls_with_headers + self.assertEqual(['GET', 'PUT', 'DELETE'], + [c.method for c in calls]) + self.assertEqual( + self.build_versions_path(obj='o', version='9999998765.99999'), + calls[1].path) + self.assertEqual('application/x-deleted;swift_versions_deleted=1', + calls[1].headers.get('Content-Type')) + + def test_denied_DELETE_of_versioned_object(self): + authorize_call = [] + + def fake_authorize(req): + authorize_call.append((req.method, req.path)) + return swob.HTTPForbidden() + + req = Request.blank('/v1/a/c/o', method='DELETE', body='', + headers={'X-If-Delete-At': 1}, + environ={'swift.cache': self.cache_version_on, + 'swift.authorize': fake_authorize, + 'swift.trans_id': 'fake_trans_id'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '403 Forbidden') + self.assertEqual(len(authorize_call), 1) + self.assertEqual(('DELETE', '/v1/a/c/o'), authorize_call[0]) + + +class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase): + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_COPY_overwrite_tombstone(self, mock_time): + self.cache_version_on.set(get_cache_key('a', 'src_cont'), + {'status': 200}) + src_body = 'stuff' * 100 + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None) + self.app.register( + 'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body) + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + req = Request.blank( + '/v1/a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'c/o'}) + + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 3) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/src_cont/src_obj'), + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + src_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_COPY_overwrite_object(self, mock_time): + self.cache_version_on.set(get_cache_key('a', 'src_cont'), + {'status': 200}) + src_body = 'stuff' * 100 + self.app.register( + 'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body) + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old object') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + req = Request.blank( + '/v1/a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'c/o'}) + + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 3) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/src_cont/src_obj'), + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999999939.99999')), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + src_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_COPY_overwrite_version_symlink(self, mock_time): + self.cache_version_on.set(get_cache_key('a', 'src_cont'), + {'status': 200}) + src_body = 'stuff' * 100 + self.app.register( + 'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body) + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: 'c-unique/whatever'}, '') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + req = Request.blank( + '/v1/a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'c/o'}) + + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 3) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/src_cont/src_obj'), + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + src_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + @mock.patch('swift.common.middleware.versioned_writes.object_versioning.' + 'time.time', return_value=1234) + def test_copy_new_version_different_account(self, mock_time): + self.cache_version_on.set(get_cache_key('src_acc'), + {'status': 200}) + self.cache_version_on.set(get_cache_key('src_acc', 'src_cont'), + {'status': 200}) + src_body = 'stuff' * 100 + self.app.register( + 'GET', '/v1/src_acc/src_cont/src_obj', swob.HTTPOk, {}, src_body) + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: 'c-unique/whatever'}, '') + self.app.register( + 'PUT', + self.build_versions_path(obj='o', version='9999998765.99999'), + swob.HTTPCreated, {}, 'passed') + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + req = Request.blank( + '/v1/src_acc/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', + 'swift.cache': self.cache_version_on, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'c/o', + 'Destination-Account': 'a'}) + + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(len(self.authorized), 3) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/src_acc/src_cont/src_obj'), + ('GET', '/v1/a/c/o?symlink=get'), + ('PUT', + self.build_versions_path(obj='o', version='9999998765.99999')), + ('PUT', '/v1/a/c/o'), + ]) + + expected_headers = { + TGT_OBJ_SYSMETA_SYMLINK_HDR: + self.build_symlink_path('c', 'o', '9999998765.99999'), + 'x-object-sysmeta-symlink-target-etag': md5( + src_body.encode('utf8')).hexdigest(), + 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), + } + symlink_put_headers = self.app._calls[-1].headers + for k, v in expected_headers.items(): + self.assertEqual(symlink_put_headers[k], v) + + def test_copy_object_versioning_disabled(self): + self.cache_version_off.set(get_cache_key('a', 'src_cont'), + {'status': 200}) + listing_body = [{ + "hash": "x", + "last_modified": "2014-11-21T14:14:27.409100", + "bytes": 3, + "name": self.build_object_name('o', '0000000001.00000'), + "content_type": "text/plain" + }] + prefix_listing_path = \ + '/v1/a/c\x01versions?prefix=o---&marker=' + self.app.register( + 'GET', prefix_listing_path, swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + src_body = 'stuff' * 100 + self.app.register( + 'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body) + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + req = Request.blank( + '/v1/a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', + 'swift.cache': self.cache_version_off, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'c/o'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 2) + + self.assertEqual(self.app.calls, [ + ('GET', '/v1/a/src_cont/src_obj'), + ('PUT', '/v1/a/c/o'), + ]) + obj_put_headers = self.app._calls[-1].headers + self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers) + + +class ObjectVersioningTestVersionAPI(ObjectVersioningBaseTestCase): + + def test_fail_non_versioned_container(self): + self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '') + self.app.register('HEAD', '/v1/a/c', swob.HTTPOk, {}, '') + req = Request.blank( + '/v1/a/c/o', method='GET', + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + self.assertEqual(body, b'version-aware operations require' + b' that the container is versioned') + + def test_PUT_version(self): + timestamp = next(self.ts) + version_path = '%s?symlink=get' % self.build_versions_path( + obj='o', version=(~timestamp).normal) + etag = md5(b'old-version-etag').hexdigest() + self.app.register('HEAD', version_path, swob.HTTPNoContent, { + 'Content-Length': 10, + 'Content-Type': 'application/old-version', + 'ETag': etag, + }, '') + self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}, '') + req = Request.blank( + '/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': timestamp.normal}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(self.app.calls, [ + ('HEAD', version_path), + ('PUT', '/v1/a/c/o?version-id=%s' % timestamp.normal), + ]) + obj_put_headers = self.app.calls_with_headers[-1].headers + symlink_expected_headers = { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path( + 'c', 'o', (~timestamp).normal), + 'x-object-sysmeta-symlink-target-etag': etag, + 'x-object-sysmeta-symlink-target-bytes': '10', + } + for k, v in symlink_expected_headers.items(): + self.assertEqual(obj_put_headers[k], v) + + def test_PUT_version_with_body(self): + req = Request.blank( + '/v1/a/c/o', method='PUT', body='foo', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '1'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + + def test_PUT_version_not_found(self): + timestamp = next(self.ts) + version_path = '%s?symlink=get' % self.build_versions_path( + obj='o', version=(~timestamp).normal) + self.app.register('HEAD', version_path, swob.HTTPNotFound, {}, '') + req = Request.blank( + '/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': timestamp.normal}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '404 Not Found') + self.assertIn(b'version does not exist', body) + + def test_PUT_version_container_not_found(self): + timestamp = next(self.ts) + version_path = '%s?symlink=get' % self.build_versions_path( + obj='o', version=(~timestamp).normal) + self.app.register('HEAD', version_path, swob.HTTPNotFound, {}, '') + req = Request.blank( + '/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on_but_busted}, + params={'version-id': timestamp.normal}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '500 Internal Error') + self.assertIn(b'container does not exist', body) + self.assertIn(b're-enable object versioning', body) + + def test_PUT_version_invalid(self): + invalid_versions = ('null', 'something', '-10') + for version_id in invalid_versions: + req = Request.blank( + '/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': invalid_versions}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + + def test_POST_error(self): + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/plain', + 'X-Object-Meta-foo': 'bar'}, + environ={'swift.cache': self.cache_version_on, + 'swift.trans_id': 'fake_trans_id'}, + params={'version-id': '1'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + + def test_GET(self): + self.app.register( + 'GET', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPOk, {}, 'foobar') + req = Request.blank( + '/v1/a/c/o', method='GET', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Object-Version-Id', '0000000060.00000'), + headers) + self.assertEqual(b'foobar', body) + + def test_GET_404(self): + self.app.register( + 'GET', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNotFound, {}, '') + req = Request.blank( + '/v1/a/c/o', method='GET', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '404 Not Found') + self.assertNotIn(('X-Object-Version-Id', '0000000060.00000'), + headers) + + def test_HEAD(self): + self.app.register( + 'HEAD', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPOk, { + 'X-Object-Meta-Foo': 'bar'}, + '') + req = Request.blank( + '/v1/a/c/o', method='HEAD', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertIn(('X-Object-Version-Id', '0000000060.00000'), + headers) + self.assertIn(('X-Object-Meta-Foo', 'bar'), headers) + + def test_GET_null_id(self): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, {}, 'foobar') + req = Request.blank( + '/v1/a/c/o', method='GET', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': 'null'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(1, len(self.authorized)) + self.assertEqual(1, len(self.app.calls)) + self.assertIn(('X-Object-Version-Id', 'null'), headers) + self.assertEqual(b'foobar', body) + + def test_GET_null_id_versioned_obj(self): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPOk, { + 'Content-Location': self.build_versions_path( + obj='o', version='9999998765.99999')}, + '') + req = Request.blank( + '/v1/a/c/o', method='GET', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': 'null'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '404 Not Found') + self.assertEqual(1, len(self.authorized)) + self.assertEqual(1, len(self.app.calls)) + self.assertNotIn(('X-Object-Version-Id', '0000001234.00000'), headers) + + def test_GET_null_id_404(self): + self.app.register( + 'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, '') + req = Request.blank( + '/v1/a/c/o', method='GET', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': 'null'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '404 Not Found') + self.assertEqual(1, len(self.authorized)) + self.assertEqual(1, len(self.app.calls)) + self.assertNotIn(('X-Object-Version-Id', 'null'), headers) + + def test_HEAD_null_id(self): + self.app.register( + 'HEAD', '/v1/a/c/o', swob.HTTPOk, {'X-Object-Meta-Foo': 'bar'}, '') + req = Request.blank( + '/v1/a/c/o', method='HEAD', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': 'null'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertEqual(1, len(self.authorized)) + self.assertEqual(1, len(self.app.calls)) + self.assertIn(('X-Object-Version-Id', 'null'), headers) + self.assertIn(('X-Object-Meta-Foo', 'bar'), headers) + + def test_HEAD_delete_marker(self): + self.app.register( + 'HEAD', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPOk, { + 'content-type': + 'application/x-deleted;swift_versions_deleted=1'}, + '') + req = Request.blank( + '/v1/a/c/o', method='HEAD', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + + # a HEAD/GET of a delete-marker returns a 404 + self.assertEqual(status, '404 Not Found') + self.assertEqual(len(self.authorized), 1) + self.assertIn(('X-Object-Version-Id', '0000000060.00000'), + headers) + + def test_DELETE_not_current_version(self): + # This tests when version-id does not point to the + # current version, in this case, there's no need to + # re-link symlink + self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path( + 'c', 'o', '9999999940.99999')}, '') + self.app.register( + 'DELETE', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNoContent, {}, 'foobar') + req = Request.blank( + '/v1/a/c/o', method='DELETE', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual('0000000059.00000', + dict(headers)['X-Object-Current-Version-Id']) + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a/c/o?symlink=get'), + ('DELETE', + '%s?version-id=0000000060.00000' % self.build_versions_path( + obj='o', version='9999999939.99999')), + ]) + + calls = self.app.calls_with_headers + self.assertIn('X-Newest', calls[0].headers) + self.assertEqual('True', calls[0].headers['X-Newest']) + + def test_DELETE_current_version(self): + self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path( + 'c', 'o', '9999999939.99999')}, '') + self.app.register( + 'DELETE', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNoContent, {}, '') + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, '') + req = Request.blank( + '/v1/a/c/o', method='DELETE', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual('null', + dict(headers)['X-Object-Current-Version-Id']) + self.assertEqual('0000000060.00000', + dict(headers)['X-Object-Version-Id']) + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a/c/o?symlink=get'), + ('DELETE', '/v1/a/c/o'), + ('DELETE', + self.build_versions_path(obj='o', version='9999999939.99999')), + ]) + + def test_DELETE_current_version_is_delete_marker(self): + self.app.register('HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, '') + self.app.register( + 'DELETE', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNoContent, {}, '') + req = Request.blank( + '/v1/a/c/o', method='DELETE', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual('null', + dict(headers)['X-Object-Current-Version-Id']) + self.assertEqual('0000000060.00000', + dict(headers)['X-Object-Version-Id']) + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a/c/o?symlink=get'), + ('DELETE', + '%s?version-id=0000000060.00000' % self.build_versions_path( + obj='o', version='9999999939.99999')), + ]) + + def test_DELETE_current_obj_is_unversioned(self): + self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {}, '') + self.app.register( + 'DELETE', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNoContent, {}, '') + req = Request.blank( + '/v1/a/c/o', method='DELETE', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual('null', + dict(headers)['X-Object-Current-Version-Id']) + self.assertEqual('0000000060.00000', + dict(headers)['X-Object-Version-Id']) + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a/c/o?symlink=get'), + ('DELETE', + '%s?version-id=0000000060.00000' % self.build_versions_path( + obj='o', version='9999999939.99999')), + ]) + + def test_DELETE_null_version(self): + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, '') + req = Request.blank( + '/v1/a/c/o', method='DELETE', + environ={'swift.cache': self.cache_version_on}, + params={'version-id': 'null'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '204 No Content') + self.assertEqual(self.app.calls, [ + ('DELETE', '/v1/a/c/o?version-id=null'), + ]) + + +class ObjectVersioningVersionAPIWhileDisabled(ObjectVersioningBaseTestCase): + + def test_PUT_version_versioning_disbaled(self): + timestamp = next(self.ts) + version_path = '%s?symlink=get' % self.build_versions_path( + obj='o', version=(~timestamp).normal) + etag = md5(b'old-version-etag').hexdigest() + self.app.register('HEAD', version_path, swob.HTTPNoContent, { + 'Content-Length': 10, + 'Content-Type': 'application/old-version', + 'ETag': etag, + }, '') + self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}, '') + req = Request.blank( + '/v1/a/c/o', method='PUT', + environ={'swift.cache': self.cache_version_off}, + params={'version-id': timestamp.normal}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '201 Created') + self.assertEqual(self.app.calls, [ + ('HEAD', version_path), + ('PUT', '/v1/a/c/o?version-id=%s' % timestamp.normal), + ]) + obj_put_headers = self.app.calls_with_headers[-1].headers + symlink_expected_headers = { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path( + 'c', 'o', (~timestamp).normal), + 'x-object-sysmeta-symlink-target-etag': etag, + 'x-object-sysmeta-symlink-target-bytes': '10', + } + for k, v in symlink_expected_headers.items(): + self.assertEqual(obj_put_headers[k], v) + + def test_POST_error_versioning_disabled(self): + req = Request.blank( + '/v1/a/c/o', method='POST', + headers={'Content-Type': 'text/plain', + 'X-Object-Meta-foo': 'bar'}, + environ={'swift.cache': self.cache_version_off, + 'swift.trans_id': 'fake_trans_id'}, + params={'version-id': '1'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + + def test_DELETE_current_version(self): + self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, { + SYSMETA_VERSIONS_SYMLINK: 'true', + TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path( + 'c', 'o', '9999999939.99999')}, '') + self.app.register( + 'DELETE', + self.build_versions_path(obj='o', version='9999999939.99999'), + swob.HTTPNoContent, {}, '') + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, '') + + # request with versioning disabled + req = Request.blank( + '/v1/a/c/o', method='DELETE', + environ={'swift.cache': self.cache_version_off}, + params={'version-id': '0000000060.00000'}) + status, headers, body = self.call_ov(req) + + self.assertEqual(status, '204 No Content') + self.assertEqual(self.app.calls, [ + ('HEAD', '/v1/a/c/o?symlink=get'), + ('DELETE', '/v1/a/c/o'), + ('DELETE', + self.build_versions_path(obj='o', version='9999999939.99999')), + ]) + + +class ObjectVersioningTestContainerOperations(ObjectVersioningBaseTestCase): + def test_container_listing_translation(self): + listing_body = [{ + 'bytes': 0, + 'name': 'my-normal-obj', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=%s; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9' % self.build_symlink_path( + 'c', 'my-normal-obj', '9999999989.99999'), + 'last_modified': '2019-07-26T15:09:54.518990', + 'content_type': 'application/foo', + }, { + 'bytes': 8, + 'name': 'my-old-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '2019-07-26T15:54:38.326800', + 'content_type': 'application/bar', + }, { + 'bytes': 0, + 'name': 'my-slo-manifest', + 'hash': '387d1ab7d89eda2162bcf8e502667c86; ' + 'slo_etag=71e938d37c1d06dc634dd24660255a88; ' + 'symlink_target=%s; ' + 'symlink_target_etag=387d1ab7d89eda2162bcf8e502667c86; ' + # N.B. symlink_target_bytes is set to the slo_size + 'symlink_target_bytes=10485760' % self.build_symlink_path( + 'c', 'my-slo-manifest', '9999999979.99999'), + 'last_modified': '2019-07-26T15:00:28.499260', + 'content_type': 'application/baz', + }, { + 'bytes': 0, + 'name': 'unexpected-symlink', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=tgt_container/tgt_obj; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9', + 'last_modified': '2019-07-26T15:09:54.518990', + 'content_type': 'application/symlink', + }] + + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c', + environ={'REQUEST_METHOD': 'GET'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 9, + 'name': 'my-normal-obj', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '2019-07-26T15:09:54.518990', + 'content_type': 'application/foo', + 'symlink_path': + '/v1/a/c/my-normal-obj?version-id=0000000010.00000', + 'version_symlink': True, + }, { + 'bytes': 8, + 'name': 'my-old-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '2019-07-26T15:54:38.326800', + 'content_type': 'application/bar', + }, { + 'bytes': 10485760, + 'name': 'my-slo-manifest', + # since we don't have slo middleware in test pipeline, we expect + # slo_etag to stay in the hash key + 'hash': '387d1ab7d89eda2162bcf8e502667c86; ' + 'slo_etag=71e938d37c1d06dc634dd24660255a88', + 'last_modified': '2019-07-26T15:00:28.499260', + 'content_type': 'application/baz', + 'symlink_path': + '/v1/a/c/my-slo-manifest?version-id=0000000020.00000', + 'version_symlink': True, + }, { + 'bytes': 0, + 'name': 'unexpected-symlink', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e', + 'last_modified': '2019-07-26T15:09:54.518990', + 'content_type': 'application/foo', + 'symlink_bytes': 9, + 'symlink_path': '/v1/a/tgt_container/tgt_obj', + 'symlink_etag': 'e55cedc11adb39c404b7365f7d6291fa', + 'content_type': 'application/symlink', + }] + self.assertEqual(expected, json.loads(body)) + + def test_listing_translation_utf8(self): + listing_body = [{ + 'bytes': 0, + 'name': u'\N{SNOWMAN}-obj', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=%s; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9' % self.build_symlink_path( + u'\N{COMET}-container', u'\N{CLOUD}-target', + '9999999989.99999'), + 'last_modified': '2019-07-26T15:09:54.518990', + 'content_type': 'application/snowman', + }] + self.app.register( + 'GET', '/v1/a/\xe2\x98\x83-test', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: wsgi_quote( + self.str_to_wsgi(self.build_container_name( + u'\N{COMET}-container'))), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/\xe2\x98\x83-test', + environ={'REQUEST_METHOD': 'GET'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 9, + 'name': u'\N{SNOWMAN}-obj', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '2019-07-26T15:09:54.518990', + 'symlink_path': + '/v1/a/%E2%98%83-test/%E2%98%81-target?' + 'version-id=0000000010.00000', + 'content_type': 'application/snowman', + 'version_symlink': True, + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_versions(self): + listing_body = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 0, + 'name': 'obj', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=%s; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9' % + self.build_symlink_path('c', 'obj', '9999999979.99999'), + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }] + + versions_listing_body = [{ + 'bytes': 9, + 'name': self.build_object_name('obj', '9999999979.99999'), + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }, { + 'bytes': 8, + 'name': self.build_object_name('obj', '9999999989.99999'), + 'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + }] + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'version_id': 'null', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + 'is_latest': True, + }, { + 'bytes': 9, + 'name': 'obj', + 'version_id': '0000000020.00000', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': True, + }, { + 'bytes': 8, + 'name': 'obj', + 'version_id': '0000000010.00000', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + 'is_latest': False, + }] + self.assertEqual(expected, json.loads(body)) + + # Can be explicitly JSON + req = Request.blank( + '/v1/a/c?versions&format=json', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on, + 'HTTP_ACCEPT': 'application/json'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + + # But everything else is unacceptable + req = Request.blank( + '/v1/a/c?versions&format=txt', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on, + 'HTTP_ACCEPT': 'text/plain'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + req = Request.blank( + '/v1/a/c?versions&format=xml', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on, + 'HTTP_ACCEPT': 'text/xml'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on, + 'HTTP_ACCEPT': 'application/xml'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + req = Request.blank( + '/v1/a/c?versions&format=asdf', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on, + 'HTTP_ACCEPT': 'foo/bar'}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '406 Not Acceptable') + + def test_list_versions_marker_missing_marker(self): + + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, '{}') + + req = Request.blank( + '/v1/a/c?versions&version_marker=1', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + self.assertEqual(body, b'version_marker param requires marker') + + req = Request.blank( + '/v1/a/c?versions&marker=obj&version_marker=id', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + self.assertEqual(body, b'invalid version_marker param') + + def test_list_versions_marker(self): + listing_body = [{ + 'bytes': 8, + 'name': 'non-versioned-obj', + 'hash': 'etag', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 0, + 'name': 'obj', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=%s; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9' % + self.build_symlink_path('c', 'obj', '9999999969.99999'), + 'last_modified': '1970-01-01T00:00:30.000000', + 'content_type': 'text/plain', + }] + + versions_listing_body = [{ + 'bytes': 9, + 'name': self.build_object_name('obj', '9999999969.99999'), + 'hash': 'etagv3', + 'last_modified': '1970-01-01T00:00:30.000000', + 'content_type': 'text/plain', + }, { + 'bytes': 10, + 'name': self.build_object_name('obj', '9999999979.99999'), + 'hash': 'etagv2', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }, { + 'bytes': 8, + 'name': self.build_object_name('obj', '9999999989.99999'), + 'hash': 'etagv1', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + }] + + expected = [{ + 'bytes': 8, + 'name': 'non-versioned-obj', + 'hash': 'etag', + 'version_id': 'null', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 9, + 'name': 'obj', + 'version_id': '0000000030.00000', + 'hash': 'etagv3', + 'last_modified': '1970-01-01T00:00:30.000000', + 'content_type': 'text/plain', + 'is_latest': True, + }, { + 'bytes': 10, + 'name': 'obj', + 'version_id': '0000000020.00000', + 'hash': 'etagv2', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': False, + }, { + 'bytes': 8, + 'name': 'obj', + 'version_id': '0000000010.00000', + 'hash': 'etagv1', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + 'is_latest': False, + }] + + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body[1:]).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions&marker=obj', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual(expected[1:], json.loads(body)) + + # version_marker + self.app.register( + 'GET', + '%s?marker=%s' % ( + self.build_versions_path(), + self.build_object_name('obj', '9999999989.99999')), + swob.HTTPOk, {}, + json.dumps(versions_listing_body[2:]).encode('utf8')) + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body[1:]).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions&marker=obj&version_marker=0000000010.00000', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + self.assertEqual(expected[3:], json.loads(body)) + + def test_list_versions_invalid_delimiter(self): + + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, '{}') + + req = Request.blank( + '/v1/a/c?versions&delimiter=1', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '400 Bad Request') + self.assertEqual(body, b'invalid delimiter param') + + def test_list_versions_delete_markers(self): + listing_body = [] + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + versions_listing_body = [{ + 'name': self.build_object_name('obj', '9999999979.99999'), + 'bytes': 0, + 'hash': utils.MD5_OF_EMPTY_STRING, + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': DELETE_MARKER_CONTENT_TYPE, + }, { + 'name': self.build_object_name('obj', '9999999989.99999'), + 'bytes': 0, + 'hash': utils.MD5_OF_EMPTY_STRING, + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': DELETE_MARKER_CONTENT_TYPE, + }] + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + req = Request.blank('/v1/a/c?versions', method='GET', + environ={'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + expected = [{ + 'name': 'obj', + 'bytes': 0, + 'version_id': '0000000020.00000', + 'hash': utils.MD5_OF_EMPTY_STRING, + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': DELETE_MARKER_CONTENT_TYPE, + 'is_latest': True, + }, { + 'name': 'obj', + 'bytes': 0, + 'version_id': '0000000010.00000', + 'hash': utils.MD5_OF_EMPTY_STRING, + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': DELETE_MARKER_CONTENT_TYPE, + 'is_latest': False, + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_versions_unversioned(self): + listing_body = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + # How did this get here??? Who knows -- maybe another container + # replica *does* know about versioning being enabled + 'bytes': 0, + 'name': 'obj', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=%s; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9' % + self.build_symlink_path('c', 'obj', '9999999979.99999'), + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }] + + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPNotFound, {}, None) + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_off}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertNotIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'version_id': 'null', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + 'is_latest': True, + }, { + 'bytes': 9, + 'name': 'obj', + 'version_id': '0000000020.00000', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': True, + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_versions_delimiter(self): + listing_body = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 0, + 'name': 'obj', + 'hash': 'd41d8cd98f00b204e9800998ecf8427e; ' + 'symlink_target=%s; ' + 'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; ' + 'symlink_target_bytes=9' % + self.build_symlink_path('c', 'obj', '9999999979.99999'), + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }, { + 'subdir': 'subdir/' + }] + + versions_listing_body = [{ + 'bytes': 9, + 'name': self.build_object_name('obj', '9999999979.99999'), + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }, { + 'bytes': 8, + 'name': self.build_object_name('obj', '9999999989.99999'), + 'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + }, { + 'subdir': get_reserved_name('subdir/') + }] + + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions&delimiter=/', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'version_id': 'null', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + 'is_latest': True, + }, { + 'bytes': 9, + 'name': 'obj', + 'version_id': '0000000020.00000', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': True, + }, { + 'bytes': 8, + 'name': 'obj', + 'version_id': '0000000010.00000', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + 'is_latest': False, + }, { + 'subdir': 'subdir/' + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_versions_empty_primary(self): + versions_listing_body = [{ + 'bytes': 8, + 'name': self.build_object_name('obj', '9999999979.99999'), + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }, { + 'bytes': 8, + 'name': self.build_object_name('obj', '9999999989.99999'), + 'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + }] + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + '{}') + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 8, + 'name': 'obj', + 'version_id': '0000000020.00000', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': False, + }, { + 'bytes': 8, + 'name': 'obj', + 'version_id': '0000000010.00000', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2', + 'last_modified': '1970-01-01T00:00:10.000000', + 'content_type': 'text/plain', + 'is_latest': False, + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_versions_error_versions_container(self): + listing_body = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 9, + 'name': 'obj', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }] + + self.app.register( + 'GET', self.build_versions_path(), + swob.HTTPInternalServerError, {}, '') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '500 Internal Error') + + def test_list_versions_empty_versions_container(self): + listing_body = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 9, + 'name': 'obj', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }] + + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPOk, {}, '{}') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'version_id': 'null', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + 'is_latest': True, + }, { + 'bytes': 9, + 'name': 'obj', + 'version_id': 'null', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': True, + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_versions_404_versions_container(self): + listing_body = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + }, { + 'bytes': 9, + 'name': 'obj', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + }] + + self.app.register( + 'GET', self.build_versions_path(), swob.HTTPNotFound, {}, '') + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True}, + json.dumps(listing_body).encode('utf8')) + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + expected = [{ + 'bytes': 8, + 'name': 'my-other-object', + 'version_id': 'null', + 'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3', + 'last_modified': '1970-01-01T00:00:05.000000', + 'content_type': 'application/bar', + 'is_latest': True, + }, { + 'bytes': 9, + 'name': 'obj', + 'version_id': 'null', + 'hash': 'e55cedc11adb39c404b7365f7d6291fa', + 'last_modified': '1970-01-01T00:00:20.000000', + 'content_type': 'text/plain', + 'is_latest': True, + }] + self.assertEqual(expected, json.loads(body)) + + def test_bytes_count(self): + self.app.register( + 'HEAD', self.build_versions_path(), swob.HTTPOk, + {'X-Container-Bytes-Used': '17', + 'X-Container-Object-Count': '3'}, '') + self.app.register( + 'HEAD', '/v1/a/c', swob.HTTPOk, + {SYSMETA_VERSIONS_CONT: self.build_container_name('c'), + SYSMETA_VERSIONS_ENABLED: True, + 'X-Container-Bytes-Used': '8', + 'X-Container-Object-Count': '1'}, '') + req = Request.blank( + '/v1/a/c?versions', + environ={'REQUEST_METHOD': 'HEAD', + 'swift.cache': self.cache_version_on}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + self.assertIn(('X-Versions-Enabled', 'True'), headers) + self.assertIn(('X-Container-Bytes-Used', '25'), headers) + self.assertIn(('X-Container-Object-Count', '1'), headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + +class ObjectVersioningTestAccountOperations(ObjectVersioningBaseTestCase): + + def test_list_containers(self): + listing_body = [{ + 'bytes': 10, + 'count': 2, + 'name': 'regular-cont', + 'last_modified': '1970-01-01T00:00:05.000000', + }, { + 'bytes': 0, + 'count': 3, + 'name': 'versioned-cont', + 'last_modified': '1970-01-01T00:00:20.000000', + }] + + versions_listing_body = [{ + 'bytes': 24, + 'count': 3, + 'name': self.build_container_name('versioned-cont'), + 'last_modified': '1970-01-01T00:00:20.000000', + }] + + cache = FakeMemcache() + + self.app.register( + 'GET', '/v1/a', swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + + params = { + 'format': 'json', + 'prefix': self.str_to_wsgi(get_reserved_name('versions')), + } + path = '/v1/a?%s' % urllib.parse.urlencode(params) + + self.app.register( + 'GET', path, swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + + req = Request.blank( + '/v1/a', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': cache}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + expected = [{ + 'bytes': 10, + 'count': 2, + 'name': 'regular-cont', + 'last_modified': '1970-01-01T00:00:05.000000', + }, { + 'bytes': 24, + 'count': 3, + 'name': 'versioned-cont', + 'last_modified': '1970-01-01T00:00:20.000000', + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_containers_prefix(self): + listing_body = [{ + 'bytes': 0, + 'count': 1, + 'name': 'versioned-cont', + 'last_modified': '1970-01-01T00:00:05.000000', + }] + + versions_listing_body = [{ + 'bytes': 24, + 'count': 3, + 'name': self.build_container_name('versioned-cont'), + 'last_modified': '1970-01-01T00:00:20.000000', + }] + + cache = FakeMemcache() + + path = '/v1/a?%s' % urllib.parse.urlencode({ + 'format': 'json', 'prefix': 'versioned-'}) + + self.app.register( + 'GET', path, swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + + path = '/v1/a?%s' % urllib.parse.urlencode({ + 'format': 'json', 'prefix': self.str_to_wsgi( + self.build_container_name('versioned-'))}) + + self.app.register( + 'GET', path, swob.HTTPOk, {}, + json.dumps(versions_listing_body).encode('utf8')) + + req = Request.blank( + '/v1/a?prefix=versioned-', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': cache}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + expected = [{ + 'bytes': 24, + 'count': 1, + 'name': 'versioned-cont', + 'last_modified': '1970-01-01T00:00:05.000000', + }] + self.assertEqual(expected, json.loads(body)) + + def test_list_orphan_hidden_containers(self): + + listing_body = [{ + 'bytes': 10, + 'count': 2, + 'name': 'alpha', + 'last_modified': '1970-01-01T00:00:05.000000', + }, { + 'bytes': 6, + 'count': 3, + 'name': 'bravo', + 'last_modified': '1970-01-01T00:00:20.000000', + }, { + 'bytes': 0, + 'count': 5, + 'name': 'charlie', + 'last_modified': '1970-01-01T00:00:30.000000', + }, { + 'bytes': 0, + 'count': 8, + 'name': 'zulu', + 'last_modified': '1970-01-01T00:00:40.000000', + }] + + versions_listing_body1 = [{ + 'bytes': 24, + 'count': 8, + 'name': self.build_container_name('bravo'), + 'last_modified': '1970-01-01T00:00:20.000000', + }, { + 'bytes': 123, + 'count': 23, + 'name': self.build_container_name('charlie'), + 'last_modified': '1970-01-01T00:00:30.000000', + }, { + 'bytes': 13, + 'count': 30, + 'name': self.build_container_name('kilo'), + 'last_modified': '1970-01-01T00:00:35.000000', + }, { + 'bytes': 83, + 'count': 13, + 'name': self.build_container_name('zulu'), + 'last_modified': '1970-01-01T00:00:40.000000', + }] + + cache = FakeMemcache() + + self.app.register( + 'GET', '/v1/a', swob.HTTPOk, {}, + json.dumps(listing_body).encode('utf8')) + + params = { + 'format': 'json', + 'prefix': self.str_to_wsgi(get_reserved_name('versions')), + } + path = '/v1/a?%s' % urllib.parse.urlencode(params) + + self.app.register( + 'GET', path, swob.HTTPOk, {}, + json.dumps(versions_listing_body1).encode('utf8')) + + req = Request.blank( + '/v1/a', + environ={'REQUEST_METHOD': 'GET', + 'swift.cache': cache}) + status, headers, body = self.call_ov(req) + self.assertEqual(status, '200 OK') + expected = [{ + 'bytes': 10, + 'count': 2, + 'name': 'alpha', + 'last_modified': '1970-01-01T00:00:05.000000', + }, { + 'bytes': 30, + 'count': 3, + 'name': 'bravo', + 'last_modified': '1970-01-01T00:00:20.000000', + }, { + 'bytes': 123, + 'count': 5, + 'name': 'charlie', + 'last_modified': '1970-01-01T00:00:30.000000', + }, { + 'bytes': 13, + 'count': 0, + 'name': 'kilo', + 'last_modified': '1970-01-01T00:00:35.000000', + }, { + 'bytes': 83, + 'count': 8, + 'name': 'zulu', + 'last_modified': '1970-01-01T00:00:40.000000', + }] + self.assertEqual(expected, json.loads(body)) + + +if __name__ == '__main__': + unittest.main() diff --git a/test/unit/common/middleware/test_slo.py b/test/unit/common/middleware/test_slo.py index 980352c8dc..95d7e68172 100644 --- a/test/unit/common/middleware/test_slo.py +++ b/test/unit/common/middleware/test_slo.py @@ -1310,12 +1310,9 @@ class TestSloDeleteManifest(SloTestCase): set(self.app.calls), set([('GET', '/v1/AUTH_test/deltest/man?multipart-manifest=get'), - ('DELETE', - '/v1/AUTH_test/deltest/gone?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/b_2?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/man?multipart-manifest=delete')])) + ('DELETE', '/v1/AUTH_test/deltest/gone'), + ('DELETE', '/v1/AUTH_test/deltest/b_2'), + ('DELETE', '/v1/AUTH_test/deltest/man')])) self.assertEqual(resp_data['Response Status'], '200 OK') self.assertEqual(resp_data['Number Deleted'], 2) self.assertEqual(resp_data['Number Not Found'], 1) @@ -1328,10 +1325,9 @@ class TestSloDeleteManifest(SloTestCase): self.assertEqual(set(self.app.calls), set([ ('GET', '/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'), - ('DELETE', '/v1/AUTH_test/deltest/b_2?multipart-manifest=delete'), - ('DELETE', '/v1/AUTH_test/deltest/c_3?multipart-manifest=delete'), - ('DELETE', ('/v1/AUTH_test/deltest/' + - 'man-all-there?multipart-manifest=delete'))])) + ('DELETE', '/v1/AUTH_test/deltest/b_2'), + ('DELETE', '/v1/AUTH_test/deltest/c_3'), + ('DELETE', ('/v1/AUTH_test/deltest/man-all-there'))])) def test_handle_multipart_delete_non_ascii(self): if six.PY2: @@ -1356,10 +1352,9 @@ class TestSloDeleteManifest(SloTestCase): self.assertEqual(set(self.app.calls), set([ ('GET', '/v1/%s/deltest/man-all-there?multipart-manifest=get' % acct), - ('DELETE', '/v1/%s/deltest/b_2?multipart-manifest=delete' % acct), - ('DELETE', '/v1/%s/deltest/c_3?multipart-manifest=delete' % acct), - ('DELETE', ('/v1/%s/deltest/' - 'man-all-there?multipart-manifest=delete' % acct))])) + ('DELETE', '/v1/%s/deltest/b_2' % acct), + ('DELETE', '/v1/%s/deltest/c_3' % acct), + ('DELETE', ('/v1/%s/deltest/man-all-there' % acct))])) def test_handle_multipart_delete_nested(self): req = Request.blank( @@ -1369,24 +1364,16 @@ class TestSloDeleteManifest(SloTestCase): self.call_slo(req) self.assertEqual( set(self.app.calls), - set([('GET', '/v1/AUTH_test/deltest/' + - 'manifest-with-submanifest?multipart-manifest=get'), - ('GET', '/v1/AUTH_test/deltest/' + - 'submanifest?multipart-manifest=get'), - ('DELETE', - '/v1/AUTH_test/deltest/a_1?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/b_2?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/c_3?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/' + - 'submanifest?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/d_3?multipart-manifest=delete'), - ('DELETE', - '/v1/AUTH_test/deltest/' + - 'manifest-with-submanifest?multipart-manifest=delete')])) + {('GET', '/v1/AUTH_test/deltest/' + + 'manifest-with-submanifest?multipart-manifest=get'), + ('GET', '/v1/AUTH_test/deltest/' + + 'submanifest?multipart-manifest=get'), + ('DELETE', '/v1/AUTH_test/deltest/a_1'), + ('DELETE', '/v1/AUTH_test/deltest/b_2'), + ('DELETE', '/v1/AUTH_test/deltest/c_3'), + ('DELETE', '/v1/AUTH_test/deltest/submanifest'), + ('DELETE', '/v1/AUTH_test/deltest/d_3'), + ('DELETE', '/v1/AUTH_test/deltest/manifest-with-submanifest')}) def test_handle_multipart_delete_nested_too_many_segments(self): req = Request.blank( @@ -1410,18 +1397,15 @@ class TestSloDeleteManifest(SloTestCase): 'HTTP_ACCEPT': 'application/json'}) status, headers, body = self.call_slo(req) resp_data = json.loads(body) - self.assertEqual( - set(self.app.calls), - set([('GET', '/v1/AUTH_test/deltest/' + - 'manifest-missing-submanifest?multipart-manifest=get'), - ('DELETE', '/v1/AUTH_test/deltest/' + - 'a_1?multipart-manifest=delete'), - ('GET', '/v1/AUTH_test/deltest/' + - 'missing-submanifest?multipart-manifest=get'), - ('DELETE', '/v1/AUTH_test/deltest/' + - 'd_3?multipart-manifest=delete'), - ('DELETE', '/v1/AUTH_test/deltest/' + - 'manifest-missing-submanifest?multipart-manifest=delete')])) + self.assertEqual(set(self.app.calls), { + ('GET', '/v1/AUTH_test/deltest/' + + 'manifest-missing-submanifest?multipart-manifest=get'), + ('DELETE', '/v1/AUTH_test/deltest/a_1'), + ('GET', '/v1/AUTH_test/deltest/' + + 'missing-submanifest?multipart-manifest=get'), + ('DELETE', '/v1/AUTH_test/deltest/d_3'), + ('DELETE', '/v1/AUTH_test/deltest/manifest-missing-submanifest'), + }) self.assertEqual(resp_data['Response Status'], '200 OK') self.assertEqual(resp_data['Response Body'], '') self.assertEqual(resp_data['Number Deleted'], 3) @@ -1510,12 +1494,10 @@ class TestSloDeleteManifest(SloTestCase): set(self.app.calls), set([('GET', '/v1/AUTH_test/deltest/' + 'manifest-with-unauth-segment?multipart-manifest=get'), - ('DELETE', - '/v1/AUTH_test/deltest/a_1?multipart-manifest=delete'), - ('DELETE', '/v1/AUTH_test/deltest-unauth/' + - 'q_17?multipart-manifest=delete'), + ('DELETE', '/v1/AUTH_test/deltest/a_1'), + ('DELETE', '/v1/AUTH_test/deltest-unauth/q_17'), ('DELETE', '/v1/AUTH_test/deltest/' + - 'manifest-with-unauth-segment?multipart-manifest=delete')])) + 'manifest-with-unauth-segment')])) self.assertEqual(resp_data['Response Status'], '400 Bad Request') self.assertEqual(resp_data['Response Body'], '') self.assertEqual(resp_data['Number Deleted'], 2) @@ -1537,10 +1519,9 @@ class TestSloDeleteManifest(SloTestCase): self.assertEqual(set(self.app.calls), set([ ('GET', '/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'), - ('DELETE', '/v1/AUTH_test/deltest/b_2?multipart-manifest=delete'), - ('DELETE', '/v1/AUTH_test/deltest/c_3?multipart-manifest=delete'), - ('DELETE', ('/v1/AUTH_test/deltest/' + - 'man-all-there?multipart-manifest=delete'))])) + ('DELETE', '/v1/AUTH_test/deltest/b_2'), + ('DELETE', '/v1/AUTH_test/deltest/c_3'), + ('DELETE', '/v1/AUTH_test/deltest/man-all-there')])) class TestSloHeadOldManifest(SloTestCase): diff --git a/test/unit/common/middleware/test_versioned_writes.py b/test/unit/common/middleware/test_versioned_writes.py index e4e7d4da6f..e9e37c8a1e 100644 --- a/test/unit/common/middleware/test_versioned_writes.py +++ b/test/unit/common/middleware/test_versioned_writes.py @@ -60,7 +60,8 @@ class VersionedWritesBaseTestCase(unittest.TestCase): def setUp(self): self.app = helpers.FakeSwift() conf = {'allow_versioned_writes': 'true'} - self.vw = versioned_writes.filter_factory(conf)(self.app) + self.vw = versioned_writes.legacy.VersionedWritesMiddleware( + self.app, conf) def tearDown(self): self.assertEqual(self.app.unclosed_requests, {}) @@ -842,7 +843,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase): self.assertTrue(path.startswith('/v1/a/ver_cont/001o/3')) self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers]) - @mock.patch('swift.common.middleware.versioned_writes.time.time', + @mock.patch('swift.common.middleware.versioned_writes.legacy.time.time', return_value=1234) def test_history_delete_marker_no_object_success(self, mock_time): self.app.register( @@ -872,7 +873,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase): self.assertEqual('application/x-deleted;swift_versions_deleted=1', calls[1].headers.get('Content-Type')) - @mock.patch('swift.common.middleware.versioned_writes.time.time', + @mock.patch('swift.common.middleware.versioned_writes.legacy.time.time', return_value=123456789.54321) def test_history_delete_marker_over_object_success(self, mock_time): self.app.register( diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index fa22dfbd9c..2039fa5b11 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -889,6 +889,32 @@ class TestTimestamp(unittest.TestCase): check_is_earlier(b'-9999.999') check_is_earlier(u'-1234_5678') + def test_inversion(self): + ts = utils.Timestamp(0) + self.assertIsInstance(~ts, utils.Timestamp) + self.assertEqual((~ts).internal, '9999999999.99999') + + ts = utils.Timestamp(123456.789) + self.assertIsInstance(~ts, utils.Timestamp) + self.assertEqual(ts.internal, '0000123456.78900') + self.assertEqual((~ts).internal, '9999876543.21099') + + timestamps = sorted(utils.Timestamp(random.random() * 1e10) + for _ in range(20)) + self.assertEqual([x.internal for x in timestamps], + sorted(x.internal for x in timestamps)) + self.assertEqual([(~x).internal for x in reversed(timestamps)], + sorted((~x).internal for x in timestamps)) + + ts = utils.Timestamp.now() + self.assertGreater(~ts, ts) # NB: will break around 2128 + + ts = utils.Timestamp.now(offset=1) + with self.assertRaises(ValueError) as caught: + ~ts + self.assertEqual(caught.exception.args[0], + 'Cannot invert timestamps with offsets') + class TestTimestampEncoding(unittest.TestCase): diff --git a/test/unit/common/test_wsgi.py b/test/unit/common/test_wsgi.py index 76743c7dfe..f2f71da001 100644 --- a/test/unit/common/test_wsgi.py +++ b/test/unit/common/test_wsgi.py @@ -1795,7 +1795,12 @@ class TestPipelineModification(unittest.TestCase): # anywhere other than an attribute named "app", but it works for now. pipe = [] for _ in range(1000): - pipe.append(app.__class__.__module__) + if app.__class__.__module__ == \ + 'swift.common.middleware.versioned_writes.legacy': + pipe.append('swift.common.middleware.versioned_writes') + else: + pipe.append(app.__class__.__module__) + if not hasattr(app, 'app'): break app = app.app diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 57b32df36f..89e8156f88 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -276,8 +276,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-4': 'Four', 'Content-Encoding': 'gzip', 'Foo': 'fooheader', - 'Bar': 'barheader', - 'Content-Type': 'application/x-test'} + 'Bar': 'barheader'} req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, headers=headers) @@ -286,6 +285,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(dict(resp.headers), { 'Content-Type': 'text/html; charset=UTF-8', 'Content-Length': str(len(resp.body)), + 'X-Backend-Content-Type': 'application/x-test', 'X-Object-Sysmeta-Color': 'blue', }) @@ -321,19 +321,20 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': post_timestamp, 'X-Object-Sysmeta-Color': 'red', - 'Content-Type': 'application/x-test'}) + 'Content-Type': 'application/x-test2'}) resp = req.get_response(self.object_controller) self.assertEqual(resp.status_int, 202) self.assertEqual(dict(resp.headers), { 'Content-Type': 'text/html; charset=UTF-8', 'Content-Length': str(len(resp.body)), + 'X-Backend-Content-Type': 'application/x-test2', 'X-Object-Sysmeta-Color': 'blue', }) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) self.assertEqual(dict(resp.headers), { - 'Content-Type': 'application/x-test', + 'Content-Type': 'application/x-test2', 'Content-Length': '6', 'Etag': etag, 'X-Object-Sysmeta-Color': 'blue', @@ -403,6 +404,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(dict(resp.headers), { 'Content-Type': 'text/html; charset=UTF-8', 'Content-Length': str(len(resp.body)), + 'X-Backend-Content-Type': 'application/x-test', 'X-Object-Sysmeta-Color': 'red', }) @@ -436,6 +438,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(dict(resp.headers), { 'Content-Type': 'text/html; charset=UTF-8', 'Content-Length': str(len(resp.body)), + 'X-Backend-Content-Type': 'application/x-test', 'X-Object-Sysmeta-Color': 'red', })