Support Storage Policy - Rev 3
* Functional tests now run in SAIO+SoF setup. * Fix and update various constraints. * Update object-server.conf, swift.conf files. Add test.conf file. * Added ./.functests script TODO: * Reintroduce gluster_swift_tests.py with SoF specific functional tests. Signed-off-by: Prashanth Pai <ppai@redhat.com>
This commit is contained in:
parent
33e8e2312e
commit
abf0641e40
90
.functests
90
.functests
@ -18,6 +18,15 @@
|
||||
# This program expects to be run by tox in a virtual python environment
|
||||
# so that it does not pollute the host development system
|
||||
|
||||
GREEN='\e[0;32m'
|
||||
RED='\e[0;31m'
|
||||
NC='\e[0m' # No Color
|
||||
|
||||
print()
|
||||
{
|
||||
echo -e "\n${GREEN}$*${NC}"
|
||||
}
|
||||
|
||||
sudo_env()
|
||||
{
|
||||
sudo bash -c "PATH=$PATH $*"
|
||||
@ -25,54 +34,73 @@ sudo_env()
|
||||
|
||||
cleanup()
|
||||
{
|
||||
sudo service memcached stop
|
||||
sudo_env swift-init main stop
|
||||
sudo rm -rf /etc/swift > /dev/null 2>&1
|
||||
sudo rm -rf /mnt/gluster-object/test{,2}/* > /dev/null 2>&1
|
||||
sudo setfattr -x user.swift.metadata /mnt/gluster-object/test{,2} > /dev/null 2>&1
|
||||
print "Stopping memcached"
|
||||
sudo service memcached stop
|
||||
print "Stopping swift sevices"
|
||||
sudo_env swift-init main stop
|
||||
print "Cleaning SoF mount point"
|
||||
sudo rm -rf /mnt/gluster-object/test/* > /dev/null 2>&1
|
||||
sudo setfattr -x user.swift.metadata /mnt/gluster-object/test > /dev/null 2>&1
|
||||
print "Invoking SAIO's resetswift script"
|
||||
resetswift
|
||||
}
|
||||
|
||||
quit()
|
||||
{
|
||||
echo "$1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
fail()
|
||||
{
|
||||
cleanup
|
||||
quit "$1"
|
||||
cleanup
|
||||
echo -e "\n${RED}$1${NC}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
### MAIN ###
|
||||
# This script runs functional tests only with tempauth
|
||||
|
||||
# Only run if there is no configuration in the system
|
||||
if [ -x /etc/swift ] ; then
|
||||
quit "/etc/swift exists, cannot run functional tests."
|
||||
print """
|
||||
Before proceeding forward, please make sure you already have:
|
||||
1. SAIO deployment. (with resetswift and remakerings script)
|
||||
2. XFS/GlusterFS mounted at /mnt/gluster-object/test
|
||||
"""
|
||||
|
||||
prompt=true
|
||||
if [ "$1" == "-q" ]; then
|
||||
prompt=false
|
||||
fi
|
||||
|
||||
# Check the directories exist
|
||||
DIRS="/mnt/gluster-object /mnt/gluster-object/test /mnt/gluster-object/test2"
|
||||
for d in $DIRS ; do
|
||||
if [ ! -x $d ] ; then
|
||||
quit "$d must exist on an XFS or GlusterFS volume"
|
||||
fi
|
||||
done
|
||||
if $prompt; then
|
||||
read -p "Continue ? " -r
|
||||
if [[ $REPLY =~ ^[Nn]$ ]]
|
||||
then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
print "Invoking SAIO's resetswift script"
|
||||
resetswift
|
||||
|
||||
print "Invoking SAIO's remakerings script"
|
||||
remakerings
|
||||
|
||||
print "Copying conf files into /etc/swift. This will replace swift.conf and test.conf"
|
||||
cp etc/object-server.conf-gluster /etc/swift/object-server/5.conf
|
||||
cp etc/swift.conf-gluster /etc/swift/swift.conf
|
||||
cp etc/test.conf /etc/swift/test.conf
|
||||
|
||||
print "Generating additional object-rings for swiftonfile SP"
|
||||
cd /etc/swift
|
||||
swift-ring-builder object-1.builder create 1 1 1
|
||||
swift-ring-builder object-1.builder add r1z1-127.0.0.1:6050/test 1
|
||||
swift-ring-builder object-1.builder rebalance
|
||||
cd -
|
||||
|
||||
export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf
|
||||
|
||||
# Install the configuration files
|
||||
sudo mkdir /etc/swift > /dev/null 2>&1
|
||||
sudo cp -r test/functional_auth/tempauth/conf/* /etc/swift || fail "Unable to copy configuration files to /etc/swift"
|
||||
sudo_env gluster-swift-gen-builders test test2 || fail "Unable to create ring files"
|
||||
|
||||
# Start the services
|
||||
print "Starting memcached"
|
||||
sudo service memcached start || fail "Unable to start memcached"
|
||||
print "Starting swift services"
|
||||
sudo_env swift-init main start || fail "Unable to start swift"
|
||||
|
||||
mkdir functional_tests_result > /dev/null 2>&1
|
||||
|
||||
print "Runnning functional tests"
|
||||
nosetests -v --exe \
|
||||
--with-xunit \
|
||||
--xunit-file functional_tests_result/gluster-swift-generic-functional-TC-report.xml \
|
||||
|
@ -11,7 +11,7 @@ devices = /mnt/gluster-object
|
||||
# you can *consider* setting this value to "false" to reduce the per-request
|
||||
# overhead it can incur.
|
||||
mount_check = true
|
||||
bind_port = 6010
|
||||
bind_port = 6050
|
||||
#
|
||||
# Maximum number of clients one worker can process simultaneously (it will
|
||||
# actually accept N + 1). Setting this to one (1) will only handle one request
|
||||
@ -34,11 +34,6 @@ use = egg:gluster_swift#object
|
||||
user = root
|
||||
log_facility = LOG_LOCAL2
|
||||
log_level = WARN
|
||||
# The following parameters are used by object-expirer and needs to be same
|
||||
# across all conf files!
|
||||
auto_create_account_prefix = gs
|
||||
expiring_objects_account_name = expiring
|
||||
#
|
||||
# For performance, after ensuring things are running in a stable manner, you
|
||||
# can turn off normal request logging for the object server to reduce the
|
||||
# per-request overhead and unclutter the log files. Warnings and errors will
|
||||
|
@ -1,13 +1,42 @@
|
||||
[DEFAULT]
|
||||
|
||||
|
||||
[swift-hash]
|
||||
# random unique string that can never change (DO NOT LOSE)
|
||||
swift_hash_path_suffix = gluster
|
||||
|
||||
# swift_hash_path_suffix and swift_hash_path_prefix are used as part of the
|
||||
# the hashing algorithm when determining data placement in the cluster.
|
||||
# These values should remain secret and MUST NOT change
|
||||
# once a cluster has been deployed.
|
||||
|
||||
swift_hash_path_suffix = changeme
|
||||
swift_hash_path_prefix = changeme
|
||||
|
||||
# storage policies are defined here and determine various characteristics
|
||||
# about how objects are stored and treated. Policies are specified by name on
|
||||
# a per container basis. Names are case-insensitive. The policy index is
|
||||
# specified in the section header and is used internally. The policy with
|
||||
# index 0 is always used for legacy containers and can be given a name for use
|
||||
# in metadata however the ring file name will always be 'object.ring.gz' for
|
||||
# backwards compatibility. If no policies are defined a policy with index 0
|
||||
# will be automatically created for backwards compatibility and given the name
|
||||
# Policy-0. A default policy is used when creating new containers when no
|
||||
# policy is specified in the request. If no other policies are defined the
|
||||
# policy with index 0 will be declared the default. If multiple policies are
|
||||
# defined you must define a policy with index 0 and you must specify a
|
||||
# default. It is recommended you always define a section for
|
||||
# storage-policy:0.
|
||||
[storage-policy:0]
|
||||
name = Policy-0
|
||||
# default = yes
|
||||
# Default has beeen turned off here and set for policy-1 for running SoF
|
||||
# functional tests.
|
||||
|
||||
# The following section defines a policy called 'sof' to be used by swiftonfile
|
||||
# object-server implementation.
|
||||
[storage-policy:1]
|
||||
name = sof
|
||||
default = yes
|
||||
|
||||
# The swift-constraints section sets the basic constraints on data
|
||||
# saved in the swift cluster.
|
||||
# saved in the swift cluster. These constraints are automatically
|
||||
# published by the proxy server in responses to /info requests.
|
||||
|
||||
[swift-constraints]
|
||||
|
||||
@ -15,9 +44,10 @@ swift_hash_path_suffix = gluster
|
||||
# the cluster. This is also the limit on the size of each segment of
|
||||
# a "large" object when using the large object manifest support.
|
||||
# This value is set in bytes. Setting it to lower than 1MiB will cause
|
||||
# some tests to fail.
|
||||
# Default is 1 TiB = 2**30*1024
|
||||
max_file_size = 1099511627776
|
||||
# some tests to fail. It is STRONGLY recommended to leave this value at
|
||||
# the default (5 * 2**30 + 2).
|
||||
|
||||
#max_file_size = 5368709122
|
||||
|
||||
|
||||
# max_meta_name_length is the max number of bytes in the utf8 encoding
|
||||
@ -43,43 +73,50 @@ max_file_size = 1099511627776
|
||||
|
||||
#max_meta_overall_size = 4096
|
||||
|
||||
# max_header_size is the max number of bytes in the utf8 encoding of each
|
||||
# header. Using 8192 as default because eventlet use 8192 as max size of
|
||||
# header line. This value may need to be increased when using identity
|
||||
# v3 API tokens including more than 7 catalog entries.
|
||||
# See also include_service_catalog in proxy-server.conf-sample
|
||||
# (documented in overview_auth.rst)
|
||||
|
||||
# max_object_name_length is the max number of bytes in the utf8 encoding of an
|
||||
# object name: Gluster FS can handle much longer file names, but the length
|
||||
# between the slashes of the URL is handled below. Remember that most web
|
||||
# clients can't handle anything greater than 2048, and those that do are
|
||||
# rather clumsy.
|
||||
|
||||
max_object_name_length = 2048
|
||||
|
||||
# max_object_name_component_length (GlusterFS) is the max number of bytes in
|
||||
# the utf8 encoding of an object name component (the part between the
|
||||
# slashes); this is a limit imposed by the underlying file system (for XFS it
|
||||
# is 255 bytes).
|
||||
|
||||
max_object_name_component_length = 255
|
||||
#max_header_size = 8192
|
||||
|
||||
# container_listing_limit is the default (and max) number of items
|
||||
# returned for a container listing request
|
||||
|
||||
#container_listing_limit = 10000
|
||||
|
||||
|
||||
# account_listing_limit is the default (and max) number of items returned
|
||||
# for an account listing request
|
||||
|
||||
#account_listing_limit = 10000
|
||||
|
||||
# SwiftOnFile constraints - do not exceed the maximum values which are
|
||||
# set here as default
|
||||
|
||||
# max_account_name_length is the max number of bytes in the utf8 encoding of
|
||||
# an account name: Gluster FS Filename limit (XFS limit?), must be the same
|
||||
# size as max_object_name_component_length above.
|
||||
# max_object_name_length is the max number of bytes in the utf8 encoding
|
||||
# of an object name
|
||||
max_object_name_length = 221
|
||||
# Why 221 ?
|
||||
# The longest filename supported by XFS in 255.
|
||||
# http://lxr.free-electrons.com/source/fs/xfs/xfs_types.h#L125
|
||||
# SoF creates a temp file with following naming convention:
|
||||
# .OBJECT_NAME.<random-string>
|
||||
# The random string is 32 character long and and file name has two dots.
|
||||
# Hence 255 - 32 - 2 = 221
|
||||
# NOTE: This limitation can be sefely raised by having slashes in really long
|
||||
# object name. Each segment between slashes ('/') should not exceed 221.
|
||||
|
||||
|
||||
# max_account_name_length is the max number of bytes in the utf8 encoding
|
||||
# of an account name
|
||||
max_account_name_length = 255
|
||||
|
||||
|
||||
# max_container_name_length is the max number of bytes in the utf8 encoding
|
||||
# of a container name: Gluster FS Filename limit (XFS limit?), must be the same
|
||||
# size as max_object_name_component_length above.
|
||||
|
||||
# of a container name
|
||||
max_container_name_length = 255
|
||||
|
||||
# Why 255 ?
|
||||
# The longest filename supported by XFS in 255.
|
||||
# http://lxr.free-electrons.com/source/fs/xfs/xfs_types.h#L125
|
||||
# SoF creates following directory hierarchy on mount point: account/container
|
||||
|
69
etc/test.conf
Normal file
69
etc/test.conf
Normal file
@ -0,0 +1,69 @@
|
||||
[func_test]
|
||||
# sample config for Swift with tempauth
|
||||
auth_host = 127.0.0.1
|
||||
auth_port = 8080
|
||||
auth_ssl = no
|
||||
auth_prefix = /auth/
|
||||
## sample config for Swift with Keystone
|
||||
#auth_version = 2
|
||||
#auth_host = localhost
|
||||
#auth_port = 5000
|
||||
#auth_ssl = no
|
||||
#auth_prefix = /v2.0/
|
||||
|
||||
# Primary functional test account (needs admin access to the account)
|
||||
account = test
|
||||
username = tester
|
||||
password = testing
|
||||
|
||||
# User on a second account (needs admin access to the account)
|
||||
account2 = test2
|
||||
username2 = tester2
|
||||
password2 = testing2
|
||||
|
||||
# User on same account as first, but without admin access
|
||||
username3 = tester3
|
||||
password3 = testing3
|
||||
|
||||
collate = C
|
||||
|
||||
[unit_test]
|
||||
fake_syslog = False
|
||||
|
||||
[probe_test]
|
||||
# check_server_timeout = 30
|
||||
# validate_rsync = false
|
||||
|
||||
[swift-constraints]
|
||||
# The functional test runner will try to use the constraint values provided in
|
||||
# the swift-constraints section of test.conf.
|
||||
#
|
||||
# If a constraint value does not exist in that section, or because the
|
||||
# swift-constraints section does not exist, the constraints values found in
|
||||
# the /info API call (if successful) will be used.
|
||||
#
|
||||
# If a constraint value cannot be found in the /info results, either because
|
||||
# the /info API call failed, or a value is not present, the constraint value
|
||||
# used will fall back to those loaded by the constraints module at time of
|
||||
# import (which will attempt to load /etc/swift/swift.conf, see the
|
||||
# swift.common.constraints module for more information).
|
||||
#
|
||||
# Note that the cluster must have "sane" values for the test suite to pass
|
||||
# (for some definition of sane).
|
||||
#
|
||||
#max_file_size = 1099511
|
||||
#max_meta_name_length = 128
|
||||
#max_meta_value_length = 256
|
||||
#max_meta_count = 90
|
||||
#max_meta_overall_size = 4096
|
||||
#max_header_size = 8192
|
||||
max_object_name_length = 221
|
||||
#container_listing_limit = 10000
|
||||
#account_listing_limit = 10000
|
||||
max_account_name_length = 255
|
||||
max_container_name_length = 255
|
||||
|
||||
# Newer swift versions default to strict cors mode, but older ones were the
|
||||
# opposite.
|
||||
#strict_cors_mode = true
|
||||
#
|
@ -14,52 +14,36 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
try:
|
||||
from webob.exc import HTTPBadRequest
|
||||
except ImportError:
|
||||
from swift.common.swob import HTTPBadRequest
|
||||
from swift.common.swob import HTTPBadRequest
|
||||
import swift.common.constraints
|
||||
from gluster.swift.common import Glusterfs
|
||||
|
||||
MAX_OBJECT_NAME_COMPONENT_LENGTH = 255
|
||||
|
||||
|
||||
def set_object_name_component_length(len=None):
|
||||
global MAX_OBJECT_NAME_COMPONENT_LENGTH
|
||||
|
||||
if len:
|
||||
MAX_OBJECT_NAME_COMPONENT_LENGTH = len
|
||||
elif hasattr(swift.common.constraints, 'constraints_conf_int'):
|
||||
MAX_OBJECT_NAME_COMPONENT_LENGTH = \
|
||||
swift.common.constraints.constraints_conf_int(
|
||||
'max_object_name_component_length', 255)
|
||||
else:
|
||||
MAX_OBJECT_NAME_COMPONENT_LENGTH = 255
|
||||
return
|
||||
|
||||
set_object_name_component_length()
|
||||
|
||||
|
||||
def get_object_name_component_length():
|
||||
return MAX_OBJECT_NAME_COMPONENT_LENGTH
|
||||
SOF_MAX_OBJECT_NAME_LENGTH = 221
|
||||
# Why 221 ?
|
||||
# The longest filename supported by XFS in 255.
|
||||
# http://lxr.free-electrons.com/source/fs/xfs/xfs_types.h#L125
|
||||
# SoF creates a temp file with following naming convention:
|
||||
# .OBJECT_NAME.<random-string>
|
||||
# The random string is 32 character long and and file name has two dots.
|
||||
# Hence 255 - 32 - 2 = 221
|
||||
# NOTE: This limitation can be sefely raised by having slashes in really long
|
||||
# object name. Each segment between slashes ('/') should not exceed 221.
|
||||
|
||||
|
||||
def validate_obj_name_component(obj):
|
||||
if not obj:
|
||||
return 'cannot begin, end, or have contiguous %s\'s' % os.path.sep
|
||||
if len(obj) > MAX_OBJECT_NAME_COMPONENT_LENGTH:
|
||||
if len(obj) > SOF_MAX_OBJECT_NAME_LENGTH:
|
||||
return 'too long (%d)' % len(obj)
|
||||
if obj == '.' or obj == '..':
|
||||
return 'cannot be . or ..'
|
||||
return ''
|
||||
|
||||
# Save the original check object creation
|
||||
__check_object_creation = swift.common.constraints.check_object_creation
|
||||
__check_metadata = swift.common.constraints.check_metadata
|
||||
# Store Swift's check_object_creation method to be invoked later
|
||||
swift_check_object_creation = swift.common.constraints.check_object_creation
|
||||
|
||||
|
||||
# Define our new one which invokes the original
|
||||
def gluster_check_object_creation(req, object_name):
|
||||
def sof_check_object_creation(req, object_name):
|
||||
"""
|
||||
Check to ensure that everything is alright about an object to be created.
|
||||
Monkey patches swift.common.constraints.check_object_creation, invoking
|
||||
@ -74,8 +58,10 @@ def gluster_check_object_creation(req, object_name):
|
||||
:raises HTTPBadRequest: missing or bad content-type header, or
|
||||
bad metadata
|
||||
"""
|
||||
ret = __check_object_creation(req, object_name)
|
||||
# Invoke Swift's method
|
||||
ret = swift_check_object_creation(req, object_name)
|
||||
|
||||
# SoF's additional checks
|
||||
if ret is None:
|
||||
for obj in object_name.split(os.path.sep):
|
||||
reason = validate_obj_name_component(obj)
|
||||
@ -86,9 +72,3 @@ def gluster_check_object_creation(req, object_name):
|
||||
request=req,
|
||||
content_type='text/plain')
|
||||
return ret
|
||||
|
||||
# Replace the original checks with ours
|
||||
swift.common.constraints.check_object_creation = gluster_check_object_creation
|
||||
|
||||
# Replace the original check mount with ours
|
||||
swift.common.constraints.check_mount = Glusterfs.mount
|
||||
|
@ -179,13 +179,6 @@ def make_directory(full_path, uid, gid, metadata=None):
|
||||
|
||||
_fs_conf = ConfigParser()
|
||||
if _fs_conf.read(os.path.join('/etc/swift', 'fs.conf')):
|
||||
try:
|
||||
_mkdir_locking = _fs_conf.get('DEFAULT', 'mkdir_locking', "no") \
|
||||
in TRUE_VALUES
|
||||
logging.warn("The option mkdir_locking has been deprecated and is"
|
||||
" no longer supported")
|
||||
except (NoSectionError, NoOptionError):
|
||||
pass
|
||||
try:
|
||||
_use_put_mount = _fs_conf.get('DEFAULT', 'use_put_mount', "no") \
|
||||
in TRUE_VALUES
|
||||
|
@ -15,8 +15,6 @@
|
||||
|
||||
""" Object Server for Gluster for Swift """
|
||||
|
||||
# Simply importing this monkey patches the constraint handling to fit our
|
||||
# needs
|
||||
import gluster.swift.common.constraints # noqa
|
||||
from swift.common.swob import HTTPConflict
|
||||
from swift.common.utils import public, timing_stats
|
||||
@ -65,6 +63,8 @@ class ObjectController(server.ObjectController):
|
||||
@timing_stats()
|
||||
def PUT(self, request):
|
||||
try:
|
||||
server.check_object_creation = \
|
||||
gluster.swift.common.constraints.sof_check_object_creation
|
||||
return server.ObjectController.PUT(self, request)
|
||||
except (AlreadyExistsAsFile, AlreadyExistsAsDir):
|
||||
device = \
|
||||
|
@ -53,12 +53,12 @@ def get_config(section_name=None, defaults=None):
|
||||
:param section_name: the section to read (all sections if not defined)
|
||||
:param defaults: an optional dictionary namespace of defaults
|
||||
"""
|
||||
config_file = os.environ.get('SWIFT_TEST_CONFIG_FILE',
|
||||
'/etc/swift/test.conf')
|
||||
config = {}
|
||||
if defaults is not None:
|
||||
config.update(defaults)
|
||||
|
||||
config_file = os.environ.get('SWIFT_TEST_CONFIG_FILE',
|
||||
'/etc/swift/test.conf')
|
||||
try:
|
||||
config = readconf(config_file, section_name)
|
||||
except SystemExit:
|
||||
|
@ -0,0 +1,731 @@
|
||||
# Copyright (c) 2014 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pickle
|
||||
import socket
|
||||
import locale
|
||||
import eventlet
|
||||
import eventlet.debug
|
||||
import functools
|
||||
import random
|
||||
from time import time, sleep
|
||||
from httplib import HTTPException
|
||||
from urlparse import urlparse
|
||||
from nose import SkipTest
|
||||
from contextlib import closing
|
||||
from gzip import GzipFile
|
||||
from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from test import get_config
|
||||
from test.functional.swift_test_client import Connection, ResponseError
|
||||
# This has the side effect of mocking out the xattr module so that unit tests
|
||||
# (and in this case, when in-process functional tests are called for) can run
|
||||
# on file systems that don't support extended attributes.
|
||||
from test.unit import debug_logger, FakeMemcache
|
||||
|
||||
from swift.common import constraints, utils, ring
|
||||
from swift.common.wsgi import monkey_patch_mimetools
|
||||
from swift.common.middleware import catch_errors, gatekeeper, healthcheck, \
|
||||
proxy_logging, container_sync, bulk, tempurl, slo, dlo, ratelimit, \
|
||||
tempauth, container_quotas, account_quotas
|
||||
from swift.common.utils import config_true_value
|
||||
from swift.proxy import server as proxy_server
|
||||
from swift.account import server as account_server
|
||||
from swift.container import server as container_server
|
||||
from swift.obj import server as object_server
|
||||
import swift.proxy.controllers.obj
|
||||
|
||||
# In order to get the proper blocking behavior of sockets without using
|
||||
# threads, where we can set an arbitrary timeout for some piece of code under
|
||||
# test, we use eventlet with the standard socket library patched. We have to
|
||||
# perform this setup at module import time, since all the socket module
|
||||
# bindings in the swiftclient code will have been made by the time nose
|
||||
# invokes the package or class setup methods.
|
||||
eventlet.hubs.use_hub(utils.get_hub())
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||
eventlet.debug.hub_exceptions(False)
|
||||
|
||||
from swiftclient import get_auth, http_connection
|
||||
|
||||
has_insecure = False
|
||||
try:
|
||||
from swiftclient import __version__ as client_version
|
||||
# Prevent a ValueError in StrictVersion with '2.0.3.68.ga99c2ff'
|
||||
client_version = '.'.join(client_version.split('.')[:3])
|
||||
except ImportError:
|
||||
# Pre-PBR we had version, not __version__. Anyhow...
|
||||
client_version = '1.2'
|
||||
from distutils.version import StrictVersion
|
||||
if StrictVersion(client_version) >= StrictVersion('2.0'):
|
||||
has_insecure = True
|
||||
|
||||
|
||||
config = {}
|
||||
web_front_end = None
|
||||
normalized_urls = None
|
||||
|
||||
# If no config was read, we will fall back to old school env vars
|
||||
swift_test_auth_version = None
|
||||
swift_test_auth = os.environ.get('SWIFT_TEST_AUTH')
|
||||
swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None]
|
||||
swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None]
|
||||
swift_test_tenant = ['', '', '']
|
||||
swift_test_perm = ['', '', '']
|
||||
|
||||
skip, skip2, skip3 = False, False, False
|
||||
|
||||
orig_collate = ''
|
||||
insecure = False
|
||||
|
||||
orig_hash_path_suff_pref = ('', '')
|
||||
orig_swift_conf_name = None
|
||||
|
||||
in_process = False
|
||||
_testdir = _test_servers = _test_sockets = _test_coros = None
|
||||
|
||||
|
||||
class FakeMemcacheMiddleware(object):
|
||||
"""
|
||||
Caching middleware that fakes out caching in swift.
|
||||
"""
|
||||
|
||||
def __init__(self, app, conf):
|
||||
self.app = app
|
||||
self.memcache = FakeMemcache()
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
env['swift.cache'] = self.memcache
|
||||
return self.app(env, start_response)
|
||||
|
||||
|
||||
def fake_memcache_filter_factory(conf):
|
||||
def filter_app(app):
|
||||
return FakeMemcacheMiddleware(app, conf)
|
||||
return filter_app
|
||||
|
||||
|
||||
# swift.conf contents for in-process functional test runs
|
||||
functests_swift_conf = '''
|
||||
[swift-hash]
|
||||
swift_hash_path_suffix = inprocfunctests
|
||||
swift_hash_path_prefix = inprocfunctests
|
||||
|
||||
[swift-constraints]
|
||||
max_file_size = %d
|
||||
''' % ((8 * 1024 * 1024) + 2) # 8 MB + 2
|
||||
|
||||
|
||||
def in_process_setup(the_object_server=object_server):
|
||||
print >>sys.stderr, 'IN-PROCESS SERVERS IN USE FOR FUNCTIONAL TESTS'
|
||||
|
||||
monkey_patch_mimetools()
|
||||
|
||||
global _testdir
|
||||
_testdir = os.path.join(mkdtemp(), 'tmp_functional')
|
||||
utils.mkdirs(_testdir)
|
||||
rmtree(_testdir)
|
||||
utils.mkdirs(os.path.join(_testdir, 'sda1'))
|
||||
utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
|
||||
utils.mkdirs(os.path.join(_testdir, 'sdb1'))
|
||||
utils.mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
|
||||
|
||||
swift_conf = os.path.join(_testdir, "swift.conf")
|
||||
with open(swift_conf, "w") as scfp:
|
||||
scfp.write(functests_swift_conf)
|
||||
|
||||
global orig_swift_conf_name
|
||||
orig_swift_conf_name = utils.SWIFT_CONF_FILE
|
||||
utils.SWIFT_CONF_FILE = swift_conf
|
||||
constraints.reload_constraints()
|
||||
global config
|
||||
if constraints.SWIFT_CONSTRAINTS_LOADED:
|
||||
# Use the swift constraints that are loaded for the test framework
|
||||
# configuration
|
||||
config.update(constraints.EFFECTIVE_CONSTRAINTS)
|
||||
else:
|
||||
# In-process swift constraints were not loaded, somethings wrong
|
||||
raise SkipTest
|
||||
global orig_hash_path_suff_pref
|
||||
orig_hash_path_suff_pref = utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX
|
||||
utils.validate_hash_conf()
|
||||
|
||||
# We create the proxy server listening socket to get its port number so
|
||||
# that we can add it as the "auth_port" value for the functional test
|
||||
# clients.
|
||||
prolis = eventlet.listen(('localhost', 0))
|
||||
|
||||
# The following set of configuration values is used both for the
|
||||
# functional test frame work and for the various proxy, account, container
|
||||
# and object servers.
|
||||
config.update({
|
||||
# Values needed by the various in-process swift servers
|
||||
'devices': _testdir,
|
||||
'swift_dir': _testdir,
|
||||
'mount_check': 'false',
|
||||
'client_timeout': 4,
|
||||
'allow_account_management': 'true',
|
||||
'account_autocreate': 'true',
|
||||
'allowed_headers':
|
||||
'content-disposition, content-encoding, x-delete-at,'
|
||||
' x-object-manifest, x-static-large-object',
|
||||
'allow_versions': 'True',
|
||||
# Below are values used by the functional test framework, as well as
|
||||
# by the various in-process swift servers
|
||||
'auth_host': '127.0.0.1',
|
||||
'auth_port': str(prolis.getsockname()[1]),
|
||||
'auth_ssl': 'no',
|
||||
'auth_prefix': '/auth/',
|
||||
# Primary functional test account (needs admin access to the
|
||||
# account)
|
||||
'account': 'test',
|
||||
'username': 'tester',
|
||||
'password': 'testing',
|
||||
# User on a second account (needs admin access to the account)
|
||||
'account2': 'test2',
|
||||
'username2': 'tester2',
|
||||
'password2': 'testing2',
|
||||
# User on same account as first, but without admin access
|
||||
'username3': 'tester3',
|
||||
'password3': 'testing3',
|
||||
# For tempauth middleware
|
||||
'user_admin_admin': 'admin .admin .reseller_admin',
|
||||
'user_test_tester': 'testing .admin',
|
||||
'user_test2_tester2': 'testing2 .admin',
|
||||
'user_test_tester3': 'testing3'
|
||||
})
|
||||
|
||||
acc1lis = eventlet.listen(('localhost', 0))
|
||||
acc2lis = eventlet.listen(('localhost', 0))
|
||||
con1lis = eventlet.listen(('localhost', 0))
|
||||
con2lis = eventlet.listen(('localhost', 0))
|
||||
obj1lis = eventlet.listen(('localhost', 0))
|
||||
obj2lis = eventlet.listen(('localhost', 0))
|
||||
global _test_sockets
|
||||
_test_sockets = \
|
||||
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis)
|
||||
|
||||
account_ring_path = os.path.join(_testdir, 'account.ring.gz')
|
||||
with closing(GzipFile(account_ring_path, 'wb')) as f:
|
||||
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
|
||||
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
|
||||
'port': acc1lis.getsockname()[1]},
|
||||
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
|
||||
'port': acc2lis.getsockname()[1]}], 30),
|
||||
f)
|
||||
container_ring_path = os.path.join(_testdir, 'container.ring.gz')
|
||||
with closing(GzipFile(container_ring_path, 'wb')) as f:
|
||||
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
|
||||
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
|
||||
'port': con1lis.getsockname()[1]},
|
||||
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
|
||||
'port': con2lis.getsockname()[1]}], 30),
|
||||
f)
|
||||
object_ring_path = os.path.join(_testdir, 'object.ring.gz')
|
||||
with closing(GzipFile(object_ring_path, 'wb')) as f:
|
||||
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
|
||||
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
|
||||
'port': obj1lis.getsockname()[1]},
|
||||
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
|
||||
'port': obj2lis.getsockname()[1]}], 30),
|
||||
f)
|
||||
|
||||
eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
|
||||
# Turn off logging requests by the underlying WSGI software.
|
||||
eventlet.wsgi.HttpProtocol.log_request = lambda *a: None
|
||||
logger = utils.get_logger(config, 'wsgi-server', log_route='wsgi')
|
||||
# Redirect logging other messages by the underlying WSGI software.
|
||||
eventlet.wsgi.HttpProtocol.log_message = \
|
||||
lambda s, f, *a: logger.error('ERROR WSGI: ' + f % a)
|
||||
# Default to only 4 seconds for in-process functional test runs
|
||||
eventlet.wsgi.WRITE_TIMEOUT = 4
|
||||
|
||||
prosrv = proxy_server.Application(config, logger=debug_logger('proxy'))
|
||||
acc1srv = account_server.AccountController(
|
||||
config, logger=debug_logger('acct1'))
|
||||
acc2srv = account_server.AccountController(
|
||||
config, logger=debug_logger('acct2'))
|
||||
con1srv = container_server.ContainerController(
|
||||
config, logger=debug_logger('cont1'))
|
||||
con2srv = container_server.ContainerController(
|
||||
config, logger=debug_logger('cont2'))
|
||||
obj1srv = the_object_server.ObjectController(
|
||||
config, logger=debug_logger('obj1'))
|
||||
obj2srv = the_object_server.ObjectController(
|
||||
config, logger=debug_logger('obj2'))
|
||||
global _test_servers
|
||||
_test_servers = \
|
||||
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv)
|
||||
|
||||
pipeline = [
|
||||
catch_errors.filter_factory,
|
||||
gatekeeper.filter_factory,
|
||||
healthcheck.filter_factory,
|
||||
proxy_logging.filter_factory,
|
||||
fake_memcache_filter_factory,
|
||||
container_sync.filter_factory,
|
||||
bulk.filter_factory,
|
||||
tempurl.filter_factory,
|
||||
slo.filter_factory,
|
||||
dlo.filter_factory,
|
||||
ratelimit.filter_factory,
|
||||
tempauth.filter_factory,
|
||||
container_quotas.filter_factory,
|
||||
account_quotas.filter_factory,
|
||||
proxy_logging.filter_factory,
|
||||
]
|
||||
app = prosrv
|
||||
import mock
|
||||
for filter_factory in reversed(pipeline):
|
||||
app_filter = filter_factory(config)
|
||||
with mock.patch('swift.common.utils') as mock_utils:
|
||||
mock_utils.get_logger.return_value = None
|
||||
app = app_filter(app)
|
||||
app.logger = prosrv.logger
|
||||
|
||||
nl = utils.NullLogger()
|
||||
prospa = eventlet.spawn(eventlet.wsgi.server, prolis, app, nl)
|
||||
acc1spa = eventlet.spawn(eventlet.wsgi.server, acc1lis, acc1srv, nl)
|
||||
acc2spa = eventlet.spawn(eventlet.wsgi.server, acc2lis, acc2srv, nl)
|
||||
con1spa = eventlet.spawn(eventlet.wsgi.server, con1lis, con1srv, nl)
|
||||
con2spa = eventlet.spawn(eventlet.wsgi.server, con2lis, con2srv, nl)
|
||||
obj1spa = eventlet.spawn(eventlet.wsgi.server, obj1lis, obj1srv, nl)
|
||||
obj2spa = eventlet.spawn(eventlet.wsgi.server, obj2lis, obj2srv, nl)
|
||||
global _test_coros
|
||||
_test_coros = \
|
||||
(prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa)
|
||||
|
||||
# Create accounts "test" and "test2"
|
||||
def create_account(act):
|
||||
ts = utils.normalize_timestamp(time())
|
||||
partition, nodes = prosrv.account_ring.get_nodes(act)
|
||||
for node in nodes:
|
||||
# Note: we are just using the http_connect method in the object
|
||||
# controller here to talk to the account server nodes.
|
||||
conn = swift.proxy.controllers.obj.http_connect(
|
||||
node['ip'], node['port'], node['device'], partition, 'PUT',
|
||||
'/' + act, {'X-Timestamp': ts, 'x-trans-id': act})
|
||||
resp = conn.getresponse()
|
||||
assert(resp.status == 201)
|
||||
|
||||
create_account('AUTH_test')
|
||||
create_account('AUTH_test2')
|
||||
|
||||
cluster_info = {}
|
||||
|
||||
|
||||
def get_cluster_info():
|
||||
# The fallback constraints used for testing will come from the current
|
||||
# effective constraints.
|
||||
eff_constraints = dict(constraints.EFFECTIVE_CONSTRAINTS)
|
||||
|
||||
# We'll update those constraints based on what the /info API provides, if
|
||||
# anything.
|
||||
global cluster_info
|
||||
try:
|
||||
conn = Connection(config)
|
||||
conn.authenticate()
|
||||
cluster_info.update(conn.cluster_info())
|
||||
except (ResponseError, socket.error):
|
||||
# Failed to get cluster_information via /info API, so fall back on
|
||||
# test.conf data
|
||||
pass
|
||||
else:
|
||||
eff_constraints.update(cluster_info['swift'])
|
||||
|
||||
# Finally, we'll allow any constraint present in the swift-constraints
|
||||
# section of test.conf to override everything. Note that only those
|
||||
# constraints defined in the constraints module are converted to integers.
|
||||
test_constraints = get_config('swift-constraints')
|
||||
for k in constraints.DEFAULT_CONSTRAINTS:
|
||||
try:
|
||||
test_constraints[k] = int(test_constraints[k])
|
||||
except KeyError:
|
||||
pass
|
||||
except ValueError:
|
||||
print >>sys.stderr, "Invalid constraint value: %s = %s" % (
|
||||
k, test_constraints[k])
|
||||
eff_constraints.update(test_constraints)
|
||||
|
||||
# Just make it look like these constraints were loaded from a /info call,
|
||||
# even if the /info call failed, or when they are overridden by values
|
||||
# from the swift-constraints section of test.conf
|
||||
cluster_info['swift'] = eff_constraints
|
||||
|
||||
|
||||
def setup_package():
|
||||
in_process_env = os.environ.get('SWIFT_TEST_IN_PROCESS')
|
||||
if in_process_env is not None:
|
||||
use_in_process = utils.config_true_value(in_process_env)
|
||||
else:
|
||||
use_in_process = None
|
||||
|
||||
global in_process
|
||||
|
||||
if use_in_process:
|
||||
# Explicitly set to True, so barrel on ahead with in-process
|
||||
# functional test setup.
|
||||
in_process = True
|
||||
# NOTE: No attempt is made to a read local test.conf file.
|
||||
else:
|
||||
if use_in_process is None:
|
||||
# Not explicitly set, default to using in-process functional tests
|
||||
# if the test.conf file is not found, or does not provide a usable
|
||||
# configuration.
|
||||
config.update(get_config('func_test'))
|
||||
if config:
|
||||
in_process = False
|
||||
else:
|
||||
in_process = True
|
||||
else:
|
||||
# Explicitly set to False, do not attempt to use in-process
|
||||
# functional tests, be sure we attempt to read from local
|
||||
# test.conf file.
|
||||
in_process = False
|
||||
config.update(get_config('func_test'))
|
||||
|
||||
if in_process:
|
||||
in_process_setup()
|
||||
|
||||
global web_front_end
|
||||
web_front_end = config.get('web_front_end', 'integral')
|
||||
global normalized_urls
|
||||
normalized_urls = config.get('normalized_urls', False)
|
||||
|
||||
global orig_collate
|
||||
orig_collate = locale.setlocale(locale.LC_COLLATE)
|
||||
locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C'))
|
||||
|
||||
global insecure
|
||||
insecure = config_true_value(config.get('insecure', False))
|
||||
|
||||
global swift_test_auth_version
|
||||
global swift_test_auth
|
||||
global swift_test_user
|
||||
global swift_test_key
|
||||
global swift_test_tenant
|
||||
global swift_test_perm
|
||||
|
||||
if config:
|
||||
swift_test_auth_version = str(config.get('auth_version', '1'))
|
||||
|
||||
swift_test_auth = 'http'
|
||||
if config_true_value(config.get('auth_ssl', 'no')):
|
||||
swift_test_auth = 'https'
|
||||
if 'auth_prefix' not in config:
|
||||
config['auth_prefix'] = '/'
|
||||
try:
|
||||
suffix = '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % config
|
||||
swift_test_auth += suffix
|
||||
except KeyError:
|
||||
pass # skip
|
||||
|
||||
if swift_test_auth_version == "1":
|
||||
swift_test_auth += 'v1.0'
|
||||
|
||||
try:
|
||||
if 'account' in config:
|
||||
swift_test_user[0] = '%(account)s:%(username)s' % config
|
||||
else:
|
||||
swift_test_user[0] = '%(username)s' % config
|
||||
swift_test_key[0] = config['password']
|
||||
except KeyError:
|
||||
# bad config, no account/username configured, tests cannot be
|
||||
# run
|
||||
pass
|
||||
try:
|
||||
swift_test_user[1] = '%s%s' % (
|
||||
'%s:' % config['account2'] if 'account2' in config else '',
|
||||
config['username2'])
|
||||
swift_test_key[1] = config['password2']
|
||||
except KeyError:
|
||||
pass # old config, no second account tests can be run
|
||||
try:
|
||||
swift_test_user[2] = '%s%s' % (
|
||||
'%s:' % config['account'] if 'account'
|
||||
in config else '', config['username3'])
|
||||
swift_test_key[2] = config['password3']
|
||||
except KeyError:
|
||||
pass # old config, no third account tests can be run
|
||||
|
||||
for _ in range(3):
|
||||
swift_test_perm[_] = swift_test_user[_]
|
||||
|
||||
else:
|
||||
swift_test_user[0] = config['username']
|
||||
swift_test_tenant[0] = config['account']
|
||||
swift_test_key[0] = config['password']
|
||||
swift_test_user[1] = config['username2']
|
||||
swift_test_tenant[1] = config['account2']
|
||||
swift_test_key[1] = config['password2']
|
||||
swift_test_user[2] = config['username3']
|
||||
swift_test_tenant[2] = config['account']
|
||||
swift_test_key[2] = config['password3']
|
||||
|
||||
for _ in range(3):
|
||||
swift_test_perm[_] = swift_test_tenant[_] + ':' \
|
||||
+ swift_test_user[_]
|
||||
|
||||
global skip
|
||||
skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]])
|
||||
if skip:
|
||||
print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG'
|
||||
|
||||
global skip2
|
||||
skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]])
|
||||
if not skip and skip2:
|
||||
print >>sys.stderr, \
|
||||
'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS' \
|
||||
' DUE TO NO CONFIG FOR THEM'
|
||||
|
||||
global skip3
|
||||
skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]])
|
||||
if not skip and skip3:
|
||||
print >>sys.stderr, \
|
||||
'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
|
||||
|
||||
get_cluster_info()
|
||||
|
||||
|
||||
def teardown_package():
|
||||
global orig_collate
|
||||
locale.setlocale(locale.LC_COLLATE, orig_collate)
|
||||
|
||||
global in_process
|
||||
if in_process:
|
||||
try:
|
||||
for server in _test_coros:
|
||||
server.kill()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
rmtree(os.path.dirname(_testdir))
|
||||
except Exception:
|
||||
pass
|
||||
utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX = \
|
||||
orig_hash_path_suff_pref
|
||||
utils.SWIFT_CONF_FILE = orig_swift_conf_name
|
||||
constraints.reload_constraints()
|
||||
|
||||
|
||||
class AuthError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InternalServerError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
url = [None, None, None]
|
||||
token = [None, None, None]
|
||||
parsed = [None, None, None]
|
||||
conn = [None, None, None]
|
||||
|
||||
|
||||
def connection(url):
|
||||
if has_insecure:
|
||||
return http_connection(url, insecure=insecure)
|
||||
return http_connection(url)
|
||||
|
||||
|
||||
def retry(func, *args, **kwargs):
|
||||
"""
|
||||
You can use the kwargs to override:
|
||||
'retries' (default: 5)
|
||||
'use_account' (default: 1) - which user's token to pass
|
||||
'url_account' (default: matches 'use_account') - which user's storage URL
|
||||
'resource' (default: url[url_account] - URL to connect to; retry()
|
||||
will interpolate the variable :storage_url: if present
|
||||
"""
|
||||
global url, token, parsed, conn
|
||||
retries = kwargs.get('retries', 5)
|
||||
attempts, backoff = 0, 1
|
||||
|
||||
# use account #1 by default; turn user's 1-indexed account into 0-indexed
|
||||
use_account = kwargs.pop('use_account', 1) - 1
|
||||
|
||||
# access our own account by default
|
||||
url_account = kwargs.pop('url_account', use_account + 1) - 1
|
||||
|
||||
while attempts <= retries:
|
||||
attempts += 1
|
||||
try:
|
||||
if not url[use_account] or not token[use_account]:
|
||||
url[use_account], token[use_account] = \
|
||||
get_auth(swift_test_auth, swift_test_user[use_account],
|
||||
swift_test_key[use_account],
|
||||
snet=False,
|
||||
tenant_name=swift_test_tenant[use_account],
|
||||
auth_version=swift_test_auth_version,
|
||||
os_options={})
|
||||
parsed[use_account] = conn[use_account] = None
|
||||
if not parsed[use_account] or not conn[use_account]:
|
||||
parsed[use_account], conn[use_account] = \
|
||||
connection(url[use_account])
|
||||
|
||||
# default resource is the account url[url_account]
|
||||
resource = kwargs.pop('resource', '%(storage_url)s')
|
||||
template_vars = {'storage_url': url[url_account]}
|
||||
parsed_result = urlparse(resource % template_vars)
|
||||
return func(url[url_account], token[use_account],
|
||||
parsed_result, conn[url_account],
|
||||
*args, **kwargs)
|
||||
except (socket.error, HTTPException):
|
||||
if attempts > retries:
|
||||
raise
|
||||
parsed[use_account] = conn[use_account] = None
|
||||
except AuthError:
|
||||
url[use_account] = token[use_account] = None
|
||||
continue
|
||||
except InternalServerError:
|
||||
pass
|
||||
if attempts <= retries:
|
||||
sleep(backoff)
|
||||
backoff *= 2
|
||||
raise Exception('No result after %s retries.' % retries)
|
||||
|
||||
|
||||
def check_response(conn):
|
||||
resp = conn.getresponse()
|
||||
if resp.status == 401:
|
||||
resp.read()
|
||||
raise AuthError()
|
||||
elif resp.status // 100 == 5:
|
||||
resp.read()
|
||||
raise InternalServerError()
|
||||
return resp
|
||||
|
||||
|
||||
def load_constraint(name):
|
||||
global cluster_info
|
||||
try:
|
||||
c = cluster_info['swift'][name]
|
||||
except KeyError:
|
||||
raise SkipTest("Missing constraint: %s" % name)
|
||||
if not isinstance(c, int):
|
||||
raise SkipTest("Bad value, %r, for constraint: %s" % (c, name))
|
||||
return c
|
||||
|
||||
|
||||
def get_storage_policy_from_cluster_info(info):
|
||||
policies = info['swift'].get('policies', {})
|
||||
default_policy = []
|
||||
non_default_policies = []
|
||||
for p in policies:
|
||||
if p.get('default', {}):
|
||||
default_policy.append(p)
|
||||
else:
|
||||
non_default_policies.append(p)
|
||||
return default_policy, non_default_policies
|
||||
|
||||
|
||||
def reset_acl():
|
||||
def post(url, token, parsed, conn):
|
||||
conn.request('POST', parsed.path, '', {
|
||||
'X-Auth-Token': token,
|
||||
'X-Account-Access-Control': '{}'
|
||||
})
|
||||
return check_response(conn)
|
||||
resp = retry(post, use_account=1)
|
||||
resp.read()
|
||||
|
||||
|
||||
def requires_acls(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
global skip, cluster_info
|
||||
if skip or not cluster_info:
|
||||
raise SkipTest
|
||||
# Determine whether this cluster has account ACLs; if not, skip test
|
||||
if not cluster_info.get('tempauth', {}).get('account_acls'):
|
||||
raise SkipTest
|
||||
if 'keystoneauth' in cluster_info:
|
||||
# remove when keystoneauth supports account acls
|
||||
raise SkipTest
|
||||
reset_acl()
|
||||
try:
|
||||
rv = f(*args, **kwargs)
|
||||
finally:
|
||||
reset_acl()
|
||||
return rv
|
||||
return wrapper
|
||||
|
||||
|
||||
class FunctionalStoragePolicyCollection(object):
|
||||
|
||||
def __init__(self, policies):
|
||||
self._all = policies
|
||||
self.default = None
|
||||
for p in self:
|
||||
if p.get('default', False):
|
||||
assert self.default is None, 'Found multiple default ' \
|
||||
'policies %r and %r' % (self.default, p)
|
||||
self.default = p
|
||||
|
||||
@classmethod
|
||||
def from_info(cls, info=None):
|
||||
if not (info or cluster_info):
|
||||
get_cluster_info()
|
||||
info = info or cluster_info
|
||||
try:
|
||||
policy_info = info['swift']['policies']
|
||||
except KeyError:
|
||||
raise AssertionError('Did not find any policy info in %r' % info)
|
||||
policies = cls(policy_info)
|
||||
assert policies.default, \
|
||||
'Did not find default policy in %r' % policy_info
|
||||
return policies
|
||||
|
||||
def __len__(self):
|
||||
return len(self._all)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._all)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._all[index]
|
||||
|
||||
def filter(self, **kwargs):
|
||||
return self.__class__([p for p in self if all(
|
||||
p.get(k) == v for k, v in kwargs.items())])
|
||||
|
||||
def exclude(self, **kwargs):
|
||||
return self.__class__([p for p in self if all(
|
||||
p.get(k) != v for k, v in kwargs.items())])
|
||||
|
||||
def select(self):
|
||||
return random.choice(self)
|
||||
|
||||
|
||||
def requires_policies(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
rv = None
|
||||
if skip:
|
||||
raise SkipTest
|
||||
try:
|
||||
self.policies = FunctionalStoragePolicyCollection.from_info()
|
||||
assert len(self.policies) > 1
|
||||
except AssertionError:
|
||||
raise SkipTest("Multiple policies not enabled")
|
||||
try:
|
||||
rv = f(self, *args, **kwargs)
|
||||
except:
|
||||
raise
|
||||
return rv
|
||||
|
||||
return wrapper
|
@ -1,385 +0,0 @@
|
||||
# Copyright (c) 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" OpenStack Swift based functional tests for Gluster for Swift"""
|
||||
|
||||
import random
|
||||
import os,sys,re,hashlib
|
||||
from nose import SkipTest
|
||||
|
||||
from test.functional.tests import config, locale, Base, Base2, Utils, \
|
||||
TestFileEnv
|
||||
from test.functional.swift_test_client import Account, Connection, File, \
|
||||
ResponseError
|
||||
|
||||
web_front_end = config.get('web_front_end', 'integral')
|
||||
|
||||
class TestFile(Base):
|
||||
env = TestFileEnv
|
||||
set_up = False
|
||||
|
||||
def testObjectManifest(self):
|
||||
if (web_front_end == 'apache2'):
|
||||
raise SkipTest()
|
||||
data = File.random_data(10000)
|
||||
parts = random.randrange(2,10)
|
||||
charsEachPart = len(data)/parts
|
||||
for i in range(parts+1):
|
||||
if i==0 :
|
||||
file = self.env.container.file('objectmanifest')
|
||||
hdrs={}
|
||||
hdrs['Content-Length']='0'
|
||||
hdrs['X-Object-Manifest']=str(self.env.container.name)+'/objectmanifest'
|
||||
self.assert_(file.write('',hdrs=hdrs))
|
||||
self.assert_(file.name in self.env.container.files())
|
||||
self.assert_(file.read() == '')
|
||||
elif i==parts :
|
||||
file = self.env.container.file('objectmanifest'+'-'+str(i))
|
||||
segment=data[ (i-1)*charsEachPart :]
|
||||
self.assertTrue(file.write(segment))
|
||||
else :
|
||||
file = self.env.container.file('objectmanifest'+'-'+str(i))
|
||||
segment=data[ (i-1)*charsEachPart : i*charsEachPart]
|
||||
self.assertTrue(file.write(segment))
|
||||
#matching the manifest file content with orignal data, as etag won't match
|
||||
file = self.env.container.file('objectmanifest')
|
||||
data_read = file.read()
|
||||
self.assertEquals(data,data_read)
|
||||
|
||||
def test_PUT_large_object(self):
|
||||
file_item = self.env.container.file(Utils.create_name())
|
||||
data = File.random_data(1024 * 1024 * 2)
|
||||
self.assertTrue(file_item.write(data))
|
||||
self.assert_status(201)
|
||||
self.assertTrue(data == file_item.read())
|
||||
self.assert_status(200)
|
||||
|
||||
def testInvalidHeadersPUT(self):
|
||||
#TODO: Although we now support x-delete-at and x-delete-after,
|
||||
#retained this test case as we may add some other header to
|
||||
#unsupported list in future
|
||||
raise SkipTest()
|
||||
file = self.env.container.file(Utils.create_name())
|
||||
self.assertRaises(ResponseError,
|
||||
file.write_random,
|
||||
self.env.file_size,
|
||||
hdrs={'X-Delete-At': '9876545321'})
|
||||
self.assert_status(400)
|
||||
self.assertRaises(ResponseError,
|
||||
file.write_random,
|
||||
self.env.file_size,
|
||||
hdrs={'X-Delete-After': '60'})
|
||||
self.assert_status(400)
|
||||
|
||||
def testInvalidHeadersPOST(self):
|
||||
#TODO: Although we now support x-delete-at and x-delete-after,
|
||||
#retained this test case as we may add some other header to
|
||||
#unsupported list in future
|
||||
raise SkipTest()
|
||||
file = self.env.container.file(Utils.create_name())
|
||||
file.write_random(self.env.file_size)
|
||||
headers = file.make_headers(cfg={})
|
||||
headers.update({ 'X-Delete-At' : '987654321'})
|
||||
# Need to call conn.make_request instead of file.sync_metadata
|
||||
# because sync_metadata calls make_headers. make_headers()
|
||||
# overwrites any headers in file.metadata as 'user' metadata
|
||||
# by appending 'X-Object-Meta-' to any of the headers
|
||||
# in file.metadata.
|
||||
file.conn.make_request('POST', file.path, hdrs=headers, cfg={})
|
||||
self.assertEqual(400, file.conn.response.status)
|
||||
|
||||
headers = file.make_headers(cfg={})
|
||||
headers.update({ 'X-Delete-After' : '60'})
|
||||
file.conn.make_request('POST', file.path, hdrs=headers, cfg={})
|
||||
self.assertEqual(400, file.conn.response.status)
|
||||
|
||||
|
||||
class TestFileUTF8(Base2, TestFile):
|
||||
set_up = False
|
||||
|
||||
|
||||
class TestContainerPathsEnv:
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.file_size = 8
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
if not cls.container.create():
|
||||
raise ResponseError(cls.conn.response)
|
||||
|
||||
cls.dirs = [
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir1/subdir1',
|
||||
'dir1/subdir2',
|
||||
'dir1/subdir1/subsubdir1',
|
||||
'dir1/subdir1/subsubdir2',
|
||||
'dir1/subdir with spaces',
|
||||
'dir1/subdir+with{whatever',
|
||||
]
|
||||
|
||||
cls.files = [
|
||||
'file1',
|
||||
'file A',
|
||||
'dir1/file2',
|
||||
'dir1/subdir1/file2',
|
||||
'dir1/subdir1/file3',
|
||||
'dir1/subdir1/file4',
|
||||
'dir1/subdir1/subsubdir1/file5',
|
||||
'dir1/subdir1/subsubdir1/file6',
|
||||
'dir1/subdir1/subsubdir1/file7',
|
||||
'dir1/subdir1/subsubdir1/file8',
|
||||
'dir1/subdir1/subsubdir2/file9',
|
||||
'dir1/subdir1/subsubdir2/file0',
|
||||
'dir1/subdir with spaces/file B',
|
||||
'dir1/subdir+with{whatever/file D',
|
||||
]
|
||||
|
||||
stored_files = set()
|
||||
for d in cls.dirs:
|
||||
file = cls.container.file(d)
|
||||
file.write(hdrs={'Content-Type': 'application/directory'})
|
||||
for f in cls.files:
|
||||
file = cls.container.file(f)
|
||||
file.write_random(cls.file_size, hdrs={'Content-Type':
|
||||
'application/octet-stream'})
|
||||
stored_files.add(f)
|
||||
cls.stored_files = sorted(stored_files)
|
||||
cls.sorted_objects = sorted(set(cls.dirs + cls.files))
|
||||
|
||||
|
||||
class TestContainerPaths(Base):
|
||||
env = TestContainerPathsEnv
|
||||
set_up = False
|
||||
|
||||
def testTraverseContainer(self):
|
||||
found_files = []
|
||||
found_dirs = []
|
||||
|
||||
def recurse_path(path, count=0):
|
||||
if count > 10:
|
||||
raise ValueError('too deep recursion')
|
||||
|
||||
for file in self.env.container.files(parms={'path': path}):
|
||||
self.assert_(file.startswith(path))
|
||||
if file in self.env.dirs:
|
||||
recurse_path(file, count + 1)
|
||||
found_dirs.append(file)
|
||||
else:
|
||||
found_files.append(file)
|
||||
|
||||
recurse_path('')
|
||||
for file in self.env.stored_files:
|
||||
self.assert_(file in found_files)
|
||||
self.assert_(file not in found_dirs)
|
||||
|
||||
|
||||
def testContainerListing(self):
|
||||
for format in (None, 'json', 'xml'):
|
||||
files = self.env.container.files(parms={'format': format})
|
||||
self.assertFalse(len(files) == 0)
|
||||
|
||||
if isinstance(files[0], dict):
|
||||
files = [str(x['name']) for x in files]
|
||||
|
||||
self.assertEquals(files, self.env.sorted_objects)
|
||||
|
||||
for format in ('json', 'xml'):
|
||||
for file in self.env.container.files(parms={'format': format}):
|
||||
self.assert_(int(file['bytes']) >= 0)
|
||||
self.assert_('last_modified' in file)
|
||||
if file['name'] in self.env.dirs:
|
||||
self.assertEquals(file['content_type'],
|
||||
'application/directory')
|
||||
else:
|
||||
self.assertEquals(file['content_type'],
|
||||
'application/octet-stream')
|
||||
|
||||
def testStructure(self):
|
||||
def assert_listing(path, list):
|
||||
files = self.env.container.files(parms={'path': path})
|
||||
self.assertEquals(sorted(list, cmp=locale.strcoll), files)
|
||||
|
||||
assert_listing('', ['file1', 'dir1', 'dir2', 'file A'])
|
||||
assert_listing('dir1', ['dir1/file2', 'dir1/subdir1',
|
||||
'dir1/subdir2', 'dir1/subdir with spaces',
|
||||
'dir1/subdir+with{whatever'])
|
||||
assert_listing('dir1/subdir1',
|
||||
['dir1/subdir1/file4', 'dir1/subdir1/subsubdir2',
|
||||
'dir1/subdir1/file2', 'dir1/subdir1/file3',
|
||||
'dir1/subdir1/subsubdir1'])
|
||||
assert_listing('dir1/subdir1/subsubdir1',
|
||||
['dir1/subdir1/subsubdir1/file7',
|
||||
'dir1/subdir1/subsubdir1/file5',
|
||||
'dir1/subdir1/subsubdir1/file8',
|
||||
'dir1/subdir1/subsubdir1/file6'])
|
||||
assert_listing('dir1/subdir1/subsubdir1',
|
||||
['dir1/subdir1/subsubdir1/file7',
|
||||
'dir1/subdir1/subsubdir1/file5',
|
||||
'dir1/subdir1/subsubdir1/file8',
|
||||
'dir1/subdir1/subsubdir1/file6'])
|
||||
assert_listing('dir1/subdir with spaces',
|
||||
['dir1/subdir with spaces/file B'])
|
||||
|
||||
|
||||
class TestObjectVersioningEnv:
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account.delete_containers()
|
||||
cls.containers = {}
|
||||
#create two containers one for object other for versions of objects
|
||||
for i in range(2):
|
||||
hdrs={}
|
||||
if i==0:
|
||||
hdrs={'X-Versions-Location':'versions'}
|
||||
cont = cls.containers['object'] = cls.account.container('object')
|
||||
else:
|
||||
cont = cls.containers['versions'] = cls.account.container('versions')
|
||||
if not cont.create(hdrs=hdrs):
|
||||
raise ResponseError(cls.conn.response)
|
||||
cls.containers.append(cont)
|
||||
|
||||
|
||||
class TestObjectVersioning(Base):
|
||||
env = TestObjectVersioningEnv
|
||||
set_up = False
|
||||
|
||||
def testObjectVersioning(self):
|
||||
versions = random.randrange(2,10)
|
||||
dataArr=[]
|
||||
#create versions
|
||||
for i in range(versions):
|
||||
data = File.random_data(10000*(i+1))
|
||||
file = self.env.containers['object'].file('object')
|
||||
self.assertTrue(file.write(data))
|
||||
dataArr.append(data)
|
||||
cont = self.env.containers['versions']
|
||||
info = cont.info()
|
||||
self.assertEquals(info['object_count'], versions-1)
|
||||
#match the current version of object with data in arr and delete it
|
||||
for i in range(versions):
|
||||
data = dataArr[-(i+1)]
|
||||
file = self.env.containers['object'].file('object')
|
||||
self.assertEquals(data,file.read())
|
||||
self.assert_(file.delete())
|
||||
self.assert_status(204)
|
||||
|
||||
|
||||
class TestMultiProtocolAccessEnv:
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.root_dir = os.path.join('/mnt/gluster-object',cls.account.conn.storage_url.split('/')[2].split('_')[1])
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.file_size = 8
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
if not cls.container.create():
|
||||
raise ResponseError(cls.conn.response)
|
||||
|
||||
cls.dirs = [
|
||||
'dir1',
|
||||
'dir2',
|
||||
'dir1/subdir1',
|
||||
'dir1/subdir2',
|
||||
'dir1/subdir1/subsubdir1',
|
||||
'dir1/subdir1/subsubdir2',
|
||||
'dir1/subdir with spaces',
|
||||
'dir1/subdir+with{whatever',
|
||||
]
|
||||
|
||||
cls.files = [
|
||||
'file1',
|
||||
'file A',
|
||||
'dir1/file2',
|
||||
'dir1/subdir1/file2',
|
||||
'dir1/subdir1/file3',
|
||||
'dir1/subdir1/file4',
|
||||
'dir1/subdir1/subsubdir1/file5',
|
||||
'dir1/subdir1/subsubdir1/file6',
|
||||
'dir1/subdir1/subsubdir1/file7',
|
||||
'dir1/subdir1/subsubdir1/file8',
|
||||
'dir1/subdir1/subsubdir2/file9',
|
||||
'dir1/subdir1/subsubdir2/file0',
|
||||
'dir1/subdir with spaces/file B',
|
||||
'dir1/subdir+with{whatever/file D',
|
||||
]
|
||||
|
||||
stored_files = set()
|
||||
for d in cls.dirs:
|
||||
file = cls.container.file(d)
|
||||
file.write(hdrs={'Content-Type': 'application/directory'})
|
||||
for f in cls.files:
|
||||
file = cls.container.file(f)
|
||||
file.write_random(cls.file_size, hdrs={'Content-Type':
|
||||
'application/octet-stream'})
|
||||
stored_files.add(f)
|
||||
cls.stored_files = sorted(stored_files)
|
||||
cls.sorted_objects = sorted(set(cls.dirs + cls.files))
|
||||
|
||||
|
||||
class TestMultiProtocolAccess(Base):
|
||||
env = TestMultiProtocolAccessEnv
|
||||
set_up = False
|
||||
|
||||
def testObjectsFromMountPoint(self):
|
||||
found_files = []
|
||||
found_dirs = []
|
||||
|
||||
def recurse_path(path, count=0):
|
||||
if count > 10:
|
||||
raise ValueError('too deep recursion')
|
||||
self.assert_(os.path.exists(path))
|
||||
for file in os.listdir(path):
|
||||
if os.path.isdir(os.path.join(path,file)):
|
||||
recurse_path(os.path.join(path,file), count + 1)
|
||||
found_dirs.append(file)
|
||||
elif os.path.isfile(os.path.join(path,file)):
|
||||
filename=os.path.join(os.path.relpath(path,os.path.join(self.env.root_dir,self.env.container.name)),file)
|
||||
if re.match('^[\.]',filename):
|
||||
filename=filename[2:]
|
||||
found_files.append(filename)
|
||||
else:
|
||||
pass #Just a Place holder
|
||||
|
||||
recurse_path(os.path.join(self.env.root_dir,self.env.container.name))
|
||||
for file in self.env.stored_files:
|
||||
self.assert_(file in found_files)
|
||||
self.assert_(file not in found_dirs)
|
||||
|
||||
def testObjectContentFromMountPoint(self):
|
||||
file_name = Utils.create_name()
|
||||
file_item = self.env.container.file(file_name)
|
||||
data = file_item.write_random()
|
||||
self.assert_status(201)
|
||||
file_info = file_item.info()
|
||||
fhOnMountPoint = open(os.path.join(self.env.root_dir,self.env.container.name,file_name),'r')
|
||||
data_read_from_mountP = fhOnMountPoint.read()
|
||||
md5_returned = hashlib.md5(data_read_from_mountP).hexdigest()
|
||||
self.assertEquals(md5_returned,file_info['etag'])
|
||||
fhOnMountPoint.close()
|
@ -103,7 +103,8 @@ class Connection(object):
|
||||
def __init__(self, config):
|
||||
for key in 'auth_host auth_port auth_ssl username password'.split():
|
||||
if key not in config:
|
||||
raise SkipTest
|
||||
raise SkipTest(
|
||||
"Missing required configuration parameter: %s" % key)
|
||||
|
||||
self.auth_host = config['auth_host']
|
||||
self.auth_port = int(config['auth_port'])
|
||||
@ -117,6 +118,7 @@ class Connection(object):
|
||||
|
||||
self.storage_host = None
|
||||
self.storage_port = None
|
||||
self.storage_url = None
|
||||
|
||||
self.conn_class = None
|
||||
|
||||
@ -184,7 +186,7 @@ class Connection(object):
|
||||
"""
|
||||
status = self.make_request('GET', '/info',
|
||||
cfg={'absolute_path': True})
|
||||
if status == 404:
|
||||
if status // 100 == 4:
|
||||
return {}
|
||||
if not 200 <= status <= 299:
|
||||
raise ResponseError(self.response, 'GET', '/info')
|
||||
@ -195,7 +197,12 @@ class Connection(object):
|
||||
port=self.storage_port)
|
||||
#self.connection.set_debuglevel(3)
|
||||
|
||||
def make_path(self, path=[], cfg={}):
|
||||
def make_path(self, path=None, cfg=None):
|
||||
if path is None:
|
||||
path = []
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
if cfg.get('version_only_path'):
|
||||
return '/' + self.storage_url.split('/')[1]
|
||||
|
||||
@ -208,7 +215,9 @@ class Connection(object):
|
||||
else:
|
||||
return self.storage_url
|
||||
|
||||
def make_headers(self, hdrs, cfg={}):
|
||||
def make_headers(self, hdrs, cfg=None):
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
headers = {}
|
||||
|
||||
if not cfg.get('no_auth_token'):
|
||||
@ -218,8 +227,16 @@ class Connection(object):
|
||||
headers.update(hdrs)
|
||||
return headers
|
||||
|
||||
def make_request(self, method, path=[], data='', hdrs={}, parms={},
|
||||
cfg={}):
|
||||
def make_request(self, method, path=None, data='', hdrs=None, parms=None,
|
||||
cfg=None):
|
||||
if path is None:
|
||||
path = []
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
if not cfg.get('absolute_path'):
|
||||
# Set absolute_path=True to make a request to exactly the given
|
||||
# path, not storage path + given path. Useful for
|
||||
@ -277,7 +294,14 @@ class Connection(object):
|
||||
'Attempts: %s, Failures: %s' %
|
||||
(request, len(fail_messages), fail_messages))
|
||||
|
||||
def put_start(self, path, hdrs={}, parms={}, cfg={}, chunked=False):
|
||||
def put_start(self, path, hdrs=None, parms=None, cfg=None, chunked=False):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
self.http_connect()
|
||||
|
||||
path = self.make_path(path, cfg)
|
||||
@ -322,7 +346,10 @@ class Base(object):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def header_fields(self, required_fields, optional_fields=()):
|
||||
def header_fields(self, required_fields, optional_fields=None):
|
||||
if optional_fields is None:
|
||||
optional_fields = ()
|
||||
|
||||
headers = dict(self.conn.response.getheaders())
|
||||
ret = {}
|
||||
|
||||
@ -352,7 +379,11 @@ class Account(Base):
|
||||
self.conn = conn
|
||||
self.name = str(name)
|
||||
|
||||
def update_metadata(self, metadata={}, cfg={}):
|
||||
def update_metadata(self, metadata=None, cfg=None):
|
||||
if metadata is None:
|
||||
metadata = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
headers = dict(("X-Account-Meta-%s" % k, v)
|
||||
for k, v in metadata.items())
|
||||
|
||||
@ -365,7 +396,14 @@ class Account(Base):
|
||||
def container(self, container_name):
|
||||
return Container(self.conn, self.name, container_name)
|
||||
|
||||
def containers(self, hdrs={}, parms={}, cfg={}):
|
||||
def containers(self, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
format_type = parms.get('format', None)
|
||||
if format_type not in [None, 'json', 'xml']:
|
||||
raise RequestError('Invalid format: %s' % format_type)
|
||||
@ -411,7 +449,13 @@ class Account(Base):
|
||||
|
||||
return listing_empty(self.containers)
|
||||
|
||||
def info(self, hdrs={}, parms={}, cfg={}):
|
||||
def info(self, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
|
||||
parms=parms, cfg=cfg) != 204:
|
||||
|
||||
@ -435,11 +479,21 @@ class Container(Base):
|
||||
self.account = str(account)
|
||||
self.name = str(name)
|
||||
|
||||
def create(self, hdrs={}, parms={}, cfg={}):
|
||||
def create(self, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
return self.conn.make_request('PUT', self.path, hdrs=hdrs,
|
||||
parms=parms, cfg=cfg) in (201, 202)
|
||||
|
||||
def delete(self, hdrs={}, parms={}):
|
||||
def delete(self, hdrs=None, parms=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
return self.conn.make_request('DELETE', self.path, hdrs=hdrs,
|
||||
parms=parms) == 204
|
||||
|
||||
@ -457,7 +511,13 @@ class Container(Base):
|
||||
def file(self, file_name):
|
||||
return File(self.conn, self.account, self.name, file_name)
|
||||
|
||||
def files(self, hdrs={}, parms={}, cfg={}):
|
||||
def files(self, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
format_type = parms.get('format', None)
|
||||
if format_type not in [None, 'json', 'xml']:
|
||||
raise RequestError('Invalid format: %s' % format_type)
|
||||
@ -507,7 +567,13 @@ class Container(Base):
|
||||
raise ResponseError(self.conn.response, 'GET',
|
||||
self.conn.make_path(self.path))
|
||||
|
||||
def info(self, hdrs={}, parms={}, cfg={}):
|
||||
def info(self, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
self.conn.make_request('HEAD', self.path, hdrs=hdrs,
|
||||
parms=parms, cfg=cfg)
|
||||
|
||||
@ -538,7 +604,9 @@ class File(Base):
|
||||
self.size = None
|
||||
self.metadata = {}
|
||||
|
||||
def make_headers(self, cfg={}):
|
||||
def make_headers(self, cfg=None):
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
headers = {}
|
||||
if not cfg.get('no_content_length'):
|
||||
if cfg.get('set_content_length'):
|
||||
@ -575,7 +643,13 @@ class File(Base):
|
||||
data.seek(0)
|
||||
return checksum.hexdigest()
|
||||
|
||||
def copy(self, dest_cont, dest_file, hdrs={}, parms={}, cfg={}):
|
||||
def copy(self, dest_cont, dest_file, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
if 'destination' in cfg:
|
||||
headers = {'Destination': cfg['destination']}
|
||||
elif cfg.get('no_destination'):
|
||||
@ -590,7 +664,11 @@ class File(Base):
|
||||
return self.conn.make_request('COPY', self.path, hdrs=headers,
|
||||
parms=parms) == 201
|
||||
|
||||
def delete(self, hdrs={}, parms={}):
|
||||
def delete(self, hdrs=None, parms=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if self.conn.make_request('DELETE', self.path, hdrs=hdrs,
|
||||
parms=parms) != 204:
|
||||
|
||||
@ -599,7 +677,13 @@ class File(Base):
|
||||
|
||||
return True
|
||||
|
||||
def info(self, hdrs={}, parms={}, cfg={}):
|
||||
def info(self, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
|
||||
parms=parms, cfg=cfg) != 200:
|
||||
|
||||
@ -615,7 +699,11 @@ class File(Base):
|
||||
header_fields['etag'] = header_fields['etag'].strip('"')
|
||||
return header_fields
|
||||
|
||||
def initialize(self, hdrs={}, parms={}):
|
||||
def initialize(self, hdrs=None, parms=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if not self.name:
|
||||
return False
|
||||
|
||||
@ -660,7 +748,11 @@ class File(Base):
|
||||
return data
|
||||
|
||||
def read(self, size=-1, offset=0, hdrs=None, buffer=None,
|
||||
callback=None, cfg={}, parms={}):
|
||||
callback=None, cfg=None, parms=None):
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
|
||||
if size > 0:
|
||||
range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1)
|
||||
@ -717,7 +809,12 @@ class File(Base):
|
||||
finally:
|
||||
fobj.close()
|
||||
|
||||
def sync_metadata(self, metadata={}, cfg={}):
|
||||
def sync_metadata(self, metadata=None, cfg=None):
|
||||
if metadata is None:
|
||||
metadata = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
self.metadata.update(metadata)
|
||||
|
||||
if self.metadata:
|
||||
@ -737,7 +834,14 @@ class File(Base):
|
||||
|
||||
return True
|
||||
|
||||
def chunked_write(self, data=None, hdrs={}, parms={}, cfg={}):
|
||||
def chunked_write(self, data=None, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
if data is not None and self.chunked_write_in_progress:
|
||||
self.conn.put_data(data, True)
|
||||
elif data is not None:
|
||||
@ -756,8 +860,15 @@ class File(Base):
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
||||
def write(self, data='', hdrs={}, parms={}, callback=None, cfg={},
|
||||
def write(self, data='', hdrs=None, parms=None, callback=None, cfg=None,
|
||||
return_resp=False):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
block_size = 2 ** 20
|
||||
|
||||
if isinstance(data, file):
|
||||
@ -778,13 +889,15 @@ class File(Base):
|
||||
|
||||
transferred = 0
|
||||
buff = data.read(block_size)
|
||||
buff_len = len(buff)
|
||||
try:
|
||||
while len(buff) > 0:
|
||||
while buff_len > 0:
|
||||
self.conn.put_data(buff)
|
||||
buff = data.read(block_size)
|
||||
transferred += len(buff)
|
||||
transferred += buff_len
|
||||
if callable(callback):
|
||||
callback(transferred, self.size)
|
||||
buff = data.read(block_size)
|
||||
buff_len = len(buff)
|
||||
|
||||
self.conn.put_end()
|
||||
except socket.timeout as err:
|
||||
@ -806,7 +919,14 @@ class File(Base):
|
||||
|
||||
return True
|
||||
|
||||
def write_random(self, size=None, hdrs={}, parms={}, cfg={}):
|
||||
def write_random(self, size=None, hdrs=None, parms=None, cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
data = self.random_data(size)
|
||||
if not self.write(data, hdrs=hdrs, parms=parms, cfg=cfg):
|
||||
raise ResponseError(self.conn.response, 'PUT',
|
||||
@ -814,7 +934,15 @@ class File(Base):
|
||||
self.md5 = self.compute_md5sum(StringIO.StringIO(data))
|
||||
return data
|
||||
|
||||
def write_random_return_resp(self, size=None, hdrs={}, parms={}, cfg={}):
|
||||
def write_random_return_resp(self, size=None, hdrs=None, parms=None,
|
||||
cfg=None):
|
||||
if hdrs is None:
|
||||
hdrs = {}
|
||||
if parms is None:
|
||||
parms = {}
|
||||
if cfg is None:
|
||||
cfg = {}
|
||||
|
||||
data = self.random_data(size)
|
||||
resp = self.write(data, hdrs=hdrs, parms=parms, cfg=cfg,
|
||||
return_resp=True)
|
||||
|
@ -1,231 +0,0 @@
|
||||
# Copyright (c) 2010-2014 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from httplib import HTTPException
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
from time import sleep
|
||||
from urlparse import urlparse
|
||||
import functools
|
||||
from nose import SkipTest
|
||||
|
||||
from test import get_config
|
||||
|
||||
from swiftclient import get_auth, http_connection
|
||||
from test.functional.swift_test_client import Connection
|
||||
|
||||
conf = get_config('func_test')
|
||||
web_front_end = conf.get('web_front_end', 'integral')
|
||||
normalized_urls = conf.get('normalized_urls', False)
|
||||
|
||||
# If no conf was read, we will fall back to old school env vars
|
||||
swift_test_auth = os.environ.get('SWIFT_TEST_AUTH')
|
||||
swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None]
|
||||
swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None]
|
||||
swift_test_tenant = ['', '', '']
|
||||
swift_test_perm = ['', '', '']
|
||||
|
||||
if conf:
|
||||
swift_test_auth_version = str(conf.get('auth_version', '1'))
|
||||
|
||||
swift_test_auth = 'http'
|
||||
if conf.get('auth_ssl', 'no').lower() in ('yes', 'true', 'on', '1'):
|
||||
swift_test_auth = 'https'
|
||||
if 'auth_prefix' not in conf:
|
||||
conf['auth_prefix'] = '/'
|
||||
try:
|
||||
suffix = '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % conf
|
||||
swift_test_auth += suffix
|
||||
except KeyError:
|
||||
pass # skip
|
||||
|
||||
if swift_test_auth_version == "1":
|
||||
swift_test_auth += 'v1.0'
|
||||
|
||||
if 'account' in conf:
|
||||
swift_test_user[0] = '%(account)s:%(username)s' % conf
|
||||
else:
|
||||
swift_test_user[0] = '%(username)s' % conf
|
||||
swift_test_key[0] = conf['password']
|
||||
try:
|
||||
swift_test_user[1] = '%s%s' % (
|
||||
'%s:' % conf['account2'] if 'account2' in conf else '',
|
||||
conf['username2'])
|
||||
swift_test_key[1] = conf['password2']
|
||||
except KeyError as err:
|
||||
pass # old conf, no second account tests can be run
|
||||
try:
|
||||
swift_test_user[2] = '%s%s' % ('%s:' % conf['account'] if 'account'
|
||||
in conf else '', conf['username3'])
|
||||
swift_test_key[2] = conf['password3']
|
||||
except KeyError as err:
|
||||
pass # old conf, no third account tests can be run
|
||||
|
||||
for _ in range(3):
|
||||
swift_test_perm[_] = swift_test_user[_]
|
||||
|
||||
else:
|
||||
swift_test_user[0] = conf['username']
|
||||
swift_test_tenant[0] = conf['account']
|
||||
swift_test_key[0] = conf['password']
|
||||
swift_test_user[1] = conf['username2']
|
||||
swift_test_tenant[1] = conf['account2']
|
||||
swift_test_key[1] = conf['password2']
|
||||
swift_test_user[2] = conf['username3']
|
||||
swift_test_tenant[2] = conf['account']
|
||||
swift_test_key[2] = conf['password3']
|
||||
|
||||
for _ in range(3):
|
||||
swift_test_perm[_] = swift_test_tenant[_] + ':' \
|
||||
+ swift_test_user[_]
|
||||
|
||||
skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]])
|
||||
if skip:
|
||||
print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG'
|
||||
|
||||
skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]])
|
||||
if not skip and skip2:
|
||||
print >>sys.stderr, \
|
||||
'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
|
||||
|
||||
skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]])
|
||||
if not skip and skip3:
|
||||
print >>sys.stderr, \
|
||||
'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
|
||||
|
||||
|
||||
class AuthError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InternalServerError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
url = [None, None, None]
|
||||
token = [None, None, None]
|
||||
parsed = [None, None, None]
|
||||
conn = [None, None, None]
|
||||
|
||||
|
||||
def retry(func, *args, **kwargs):
|
||||
"""
|
||||
You can use the kwargs to override:
|
||||
'retries' (default: 5)
|
||||
'use_account' (default: 1) - which user's token to pass
|
||||
'url_account' (default: matches 'use_account') - which user's storage URL
|
||||
'resource' (default: url[url_account] - URL to connect to; retry()
|
||||
will interpolate the variable :storage_url: if present
|
||||
"""
|
||||
global url, token, parsed, conn
|
||||
retries = kwargs.get('retries', 5)
|
||||
attempts, backoff = 0, 1
|
||||
|
||||
# use account #1 by default; turn user's 1-indexed account into 0-indexed
|
||||
use_account = kwargs.pop('use_account', 1) - 1
|
||||
|
||||
# access our own account by default
|
||||
url_account = kwargs.pop('url_account', use_account + 1) - 1
|
||||
|
||||
while attempts <= retries:
|
||||
attempts += 1
|
||||
try:
|
||||
if not url[use_account] or not token[use_account]:
|
||||
url[use_account], token[use_account] = \
|
||||
get_auth(swift_test_auth, swift_test_user[use_account],
|
||||
swift_test_key[use_account],
|
||||
snet=False,
|
||||
tenant_name=swift_test_tenant[use_account],
|
||||
auth_version=swift_test_auth_version,
|
||||
os_options={})
|
||||
parsed[use_account] = conn[use_account] = None
|
||||
if not parsed[use_account] or not conn[use_account]:
|
||||
parsed[use_account], conn[use_account] = \
|
||||
http_connection(url[use_account])
|
||||
|
||||
# default resource is the account url[url_account]
|
||||
resource = kwargs.pop('resource', '%(storage_url)s')
|
||||
template_vars = {'storage_url': url[url_account]}
|
||||
parsed_result = urlparse(resource % template_vars)
|
||||
return func(url[url_account], token[use_account],
|
||||
parsed_result, conn[url_account],
|
||||
*args, **kwargs)
|
||||
except (socket.error, HTTPException):
|
||||
if attempts > retries:
|
||||
raise
|
||||
parsed[use_account] = conn[use_account] = None
|
||||
except AuthError:
|
||||
url[use_account] = token[use_account] = None
|
||||
continue
|
||||
except InternalServerError:
|
||||
pass
|
||||
if attempts <= retries:
|
||||
sleep(backoff)
|
||||
backoff *= 2
|
||||
raise Exception('No result after %s retries.' % retries)
|
||||
|
||||
|
||||
def check_response(conn):
|
||||
resp = conn.getresponse()
|
||||
if resp.status == 401:
|
||||
resp.read()
|
||||
raise AuthError()
|
||||
elif resp.status // 100 == 5:
|
||||
resp.read()
|
||||
raise InternalServerError()
|
||||
return resp
|
||||
|
||||
cluster_info = {}
|
||||
|
||||
|
||||
def get_cluster_info():
|
||||
conn = Connection(conf)
|
||||
conn.authenticate()
|
||||
global cluster_info
|
||||
cluster_info = conn.cluster_info()
|
||||
|
||||
|
||||
def reset_acl():
|
||||
def post(url, token, parsed, conn):
|
||||
conn.request('POST', parsed.path, '', {
|
||||
'X-Auth-Token': token,
|
||||
'X-Account-Access-Control': '{}'
|
||||
})
|
||||
return check_response(conn)
|
||||
resp = retry(post, use_account=1)
|
||||
resp.read()
|
||||
|
||||
|
||||
def requires_acls(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
if skip:
|
||||
raise SkipTest
|
||||
if not cluster_info:
|
||||
get_cluster_info()
|
||||
# Determine whether this cluster has account ACLs; if not, skip test
|
||||
if not cluster_info.get('tempauth', {}).get('account_acls'):
|
||||
raise SkipTest
|
||||
if 'keystoneauth' in cluster_info:
|
||||
# remove when keystoneauth supports account acls
|
||||
raise SkipTest
|
||||
reset_acl()
|
||||
try:
|
||||
rv = f(*args, **kwargs)
|
||||
finally:
|
||||
reset_acl()
|
||||
return rv
|
||||
return wrapper
|
813
test/functional/test_account.py
Executable file
813
test/functional/test_account.py
Executable file
@ -0,0 +1,813 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (c) 2010-2012 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
import json
|
||||
from uuid import uuid4
|
||||
from nose import SkipTest
|
||||
from string import letters
|
||||
|
||||
from swift.common.middleware.acl import format_acl
|
||||
|
||||
from test.functional import check_response, retry, requires_acls, \
|
||||
load_constraint
|
||||
import test.functional as tf
|
||||
|
||||
|
||||
class TestAccount(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.max_meta_count = load_constraint('max_meta_count')
|
||||
self.max_meta_name_length = load_constraint('max_meta_name_length')
|
||||
self.max_meta_overall_size = load_constraint('max_meta_overall_size')
|
||||
self.max_meta_value_length = load_constraint('max_meta_value_length')
|
||||
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(head)
|
||||
self.existing_metadata = set([
|
||||
k for k, v in resp.getheaders() if
|
||||
k.lower().startswith('x-account-meta')])
|
||||
|
||||
def tearDown(self):
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
new_metadata = set(
|
||||
[k for k, v in resp.getheaders() if
|
||||
k.lower().startswith('x-account-meta')])
|
||||
|
||||
def clear_meta(url, token, parsed, conn, remove_metadata_keys):
|
||||
headers = {'X-Auth-Token': token}
|
||||
headers.update((k, '') for k in remove_metadata_keys)
|
||||
conn.request('POST', parsed.path, '', headers)
|
||||
return check_response(conn)
|
||||
extra_metadata = list(self.existing_metadata ^ new_metadata)
|
||||
for i in range(0, len(extra_metadata), 90):
|
||||
batch = extra_metadata[i:i + 90]
|
||||
resp = retry(clear_meta, batch)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
|
||||
def test_metadata(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, value):
|
||||
conn.request('POST', parsed.path, '',
|
||||
{'X-Auth-Token': token, 'X-Account-Meta-Test': value})
|
||||
return check_response(conn)
|
||||
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
resp = retry(post, '')
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('x-account-meta-test'), None)
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('x-account-meta-test'), None)
|
||||
resp = retry(post, 'Value')
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('x-account-meta-test'), 'Value')
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('x-account-meta-test'), 'Value')
|
||||
|
||||
def test_invalid_acls(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
# needs to be an acceptable header size
|
||||
num_keys = 8
|
||||
max_key_size = load_constraint('max_header_size') / num_keys
|
||||
acl = {'admin': [c * max_key_size for c in letters[:num_keys]]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
# and again a touch smaller
|
||||
acl = {'admin': [c * max_key_size for c in letters[:num_keys - 1]]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
@requires_acls
|
||||
def test_invalid_acl_keys(self):
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
# needs to be json
|
||||
resp = retry(post, headers={'X-Account-Access-Control': 'invalid'},
|
||||
use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
acl_user = tf.swift_test_user[1]
|
||||
acl = {'admin': [acl_user], 'invalid_key': 'invalid_value'}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
|
||||
resp = retry(post, headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
@requires_acls
|
||||
def test_invalid_acl_values(self):
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
acl = {'admin': 'invalid_value'}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
@requires_acls
|
||||
def test_read_only_acl(self):
|
||||
if tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
# cannot read account
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
# grant read access
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'read-only': [acl_user]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# read-only can read account headers
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204))
|
||||
# but not acls
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# read-only can not write metadata
|
||||
headers = {'x-account-meta-test': 'value'}
|
||||
resp = retry(post, headers=headers, use_account=3)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 403)
|
||||
|
||||
# but they can read it
|
||||
headers = {'x-account-meta-test': 'value'}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204))
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Test'), 'value')
|
||||
|
||||
@requires_acls
|
||||
def test_read_write_acl(self):
|
||||
if tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
# cannot read account
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
# grant read-write access
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'read-write': [acl_user]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# read-write can read account headers
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204))
|
||||
# but not acls
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# read-write can not write account metadata
|
||||
headers = {'x-account-meta-test': 'value'}
|
||||
resp = retry(post, headers=headers, use_account=3)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 403)
|
||||
|
||||
@requires_acls
|
||||
def test_admin_acl(self):
|
||||
if tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
# cannot read account
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
# grant admin access
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'admin': [acl_user]}
|
||||
acl_json_str = format_acl(version=2, acl_dict=acl)
|
||||
headers = {'x-account-access-control': acl_json_str}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# admin can read account headers
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204))
|
||||
# including acls
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'),
|
||||
acl_json_str)
|
||||
|
||||
# admin can write account metadata
|
||||
value = str(uuid4())
|
||||
headers = {'x-account-meta-test': value}
|
||||
resp = retry(post, headers=headers, use_account=3)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204))
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
|
||||
|
||||
# admin can even revoke their own access
|
||||
headers = {'x-account-access-control': '{}'}
|
||||
resp = retry(post, headers=headers, use_account=3)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# and again, cannot read account
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
@requires_acls
|
||||
def test_protected_tempurl(self):
|
||||
if tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
# add an account metadata, and temp-url-key to account
|
||||
value = str(uuid4())
|
||||
headers = {
|
||||
'x-account-meta-temp-url-key': 'secret',
|
||||
'x-account-meta-test': value,
|
||||
}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# grant read-only access to tester3
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'read-only': [acl_user]}
|
||||
acl_json_str = format_acl(version=2, acl_dict=acl)
|
||||
headers = {'x-account-access-control': acl_json_str}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# read-only tester3 can read account metadata
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204),
|
||||
'Expected status in (200, 204), got %s' % resp.status)
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
|
||||
# but not temp-url-key
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'), None)
|
||||
|
||||
# grant read-write access to tester3
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'read-write': [acl_user]}
|
||||
acl_json_str = format_acl(version=2, acl_dict=acl)
|
||||
headers = {'x-account-access-control': acl_json_str}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# read-write tester3 can read account metadata
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204),
|
||||
'Expected status in (200, 204), got %s' % resp.status)
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
|
||||
# but not temp-url-key
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'), None)
|
||||
|
||||
# grant admin access to tester3
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'admin': [acl_user]}
|
||||
acl_json_str = format_acl(version=2, acl_dict=acl)
|
||||
headers = {'x-account-access-control': acl_json_str}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# admin tester3 can read account metadata
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204),
|
||||
'Expected status in (200, 204), got %s' % resp.status)
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
|
||||
# including temp-url-key
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'),
|
||||
'secret')
|
||||
|
||||
# admin tester3 can even change temp-url-key
|
||||
secret = str(uuid4())
|
||||
headers = {
|
||||
'x-account-meta-temp-url-key': secret,
|
||||
}
|
||||
resp = retry(post, headers=headers, use_account=3)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(get, use_account=3)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204),
|
||||
'Expected status in (200, 204), got %s' % resp.status)
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'),
|
||||
secret)
|
||||
|
||||
@requires_acls
|
||||
def test_account_acls(self):
|
||||
if tf.skip2:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
def put(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('PUT', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
def delete(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('DELETE', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
try:
|
||||
# User1 can POST to their own account (and reset the ACLs)
|
||||
resp = retry(post, headers={'X-Account-Access-Control': '{}'},
|
||||
use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# User1 can GET their own empty account
|
||||
resp = retry(get, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# User2 can't GET User1's account
|
||||
resp = retry(get, use_account=2, url_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 403)
|
||||
|
||||
# User1 is swift_owner of their own account, so they can POST an
|
||||
# ACL -- let's do this and make User2 (test_user[1]) an admin
|
||||
acl_user = tf.swift_test_user[1]
|
||||
acl = {'admin': [acl_user]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# User1 can see the new header
|
||||
resp = retry(get, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
data_from_headers = resp.getheader('x-account-access-control')
|
||||
expected = json.dumps(acl, separators=(',', ':'))
|
||||
self.assertEqual(data_from_headers, expected)
|
||||
|
||||
# Now User2 should be able to GET the account and see the ACL
|
||||
resp = retry(head, use_account=2, url_account=1)
|
||||
resp.read()
|
||||
data_from_headers = resp.getheader('x-account-access-control')
|
||||
self.assertEqual(data_from_headers, expected)
|
||||
|
||||
# Revoke User2's admin access, grant User2 read-write access
|
||||
acl = {'read-write': [acl_user]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# User2 can still GET the account, but not see the ACL
|
||||
# (since it's privileged data)
|
||||
resp = retry(head, use_account=2, url_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
self.assertEqual(resp.getheader('x-account-access-control'), None)
|
||||
|
||||
# User2 can PUT and DELETE a container
|
||||
resp = retry(put, use_account=2, url_account=1,
|
||||
resource='%(storage_url)s/mycontainer', headers={})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
resp = retry(delete, use_account=2, url_account=1,
|
||||
resource='%(storage_url)s/mycontainer', headers={})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# Revoke User2's read-write access, grant User2 read-only access
|
||||
acl = {'read-only': [acl_user]}
|
||||
headers = {'x-account-access-control': format_acl(
|
||||
version=2, acl_dict=acl)}
|
||||
resp = retry(post, headers=headers, use_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# User2 can still GET the account, but not see the ACL
|
||||
# (since it's privileged data)
|
||||
resp = retry(head, use_account=2, url_account=1)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
self.assertEqual(resp.getheader('x-account-access-control'), None)
|
||||
|
||||
# User2 can't PUT a container
|
||||
resp = retry(put, use_account=2, url_account=1,
|
||||
resource='%(storage_url)s/mycontainer', headers={})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 403)
|
||||
|
||||
finally:
|
||||
# Make sure to clean up even if tests fail -- User2 should not
|
||||
# have access to User1's account in other functional tests!
|
||||
resp = retry(post, headers={'X-Account-Access-Control': '{}'},
|
||||
use_account=1)
|
||||
resp.read()
|
||||
|
||||
@requires_acls
|
||||
def test_swift_account_acls(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
try:
|
||||
# User1 can POST to their own account
|
||||
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# User1 can GET their own empty account
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# User1 can POST non-empty data
|
||||
acl_json = '{"admin":["bob"]}'
|
||||
resp = retry(post, headers={'X-Account-Access-Control': acl_json})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# User1 can GET the non-empty data
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'),
|
||||
acl_json)
|
||||
|
||||
# POST non-JSON ACL should fail
|
||||
resp = retry(post, headers={'X-Account-Access-Control': 'yuck'})
|
||||
resp.read()
|
||||
# resp.status will be 400 if tempauth or some other ACL-aware
|
||||
# auth middleware rejects it, or 200 (but silently swallowed by
|
||||
# core Swift) if ACL-unaware auth middleware approves it.
|
||||
|
||||
# A subsequent GET should show the old, valid data, not the garbage
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'),
|
||||
acl_json)
|
||||
|
||||
finally:
|
||||
# Make sure to clean up even if tests fail -- User2 should not
|
||||
# have access to User1's account in other functional tests!
|
||||
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
|
||||
resp.read()
|
||||
|
||||
def test_swift_prohibits_garbage_account_acls(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, headers):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('POST', parsed.path, '', new_headers)
|
||||
return check_response(conn)
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
try:
|
||||
# User1 can POST to their own account
|
||||
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# User1 can GET their own empty account
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
self.assertEqual(resp.getheader('X-Account-Access-Control'), None)
|
||||
|
||||
# User1 can POST non-empty data
|
||||
acl_json = '{"admin":["bob"]}'
|
||||
resp = retry(post, headers={'X-Account-Access-Control': acl_json})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
# If this request is handled by ACL-aware auth middleware, then the
|
||||
# ACL will be persisted. If it is handled by ACL-unaware auth
|
||||
# middleware, then the header will be thrown out. But the request
|
||||
# should return successfully in any case.
|
||||
|
||||
# User1 can GET the non-empty data
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
# ACL will be set if some ACL-aware auth middleware (e.g. tempauth)
|
||||
# propagates it to sysmeta; if no ACL-aware auth middleware does,
|
||||
# then X-Account-Access-Control will still be empty.
|
||||
|
||||
# POST non-JSON ACL should fail
|
||||
resp = retry(post, headers={'X-Account-Access-Control': 'yuck'})
|
||||
resp.read()
|
||||
# resp.status will be 400 if tempauth or some other ACL-aware
|
||||
# auth middleware rejects it, or 200 (but silently swallowed by
|
||||
# core Swift) if ACL-unaware auth middleware approves it.
|
||||
|
||||
# A subsequent GET should either show the old, valid data (if
|
||||
# ACL-aware auth middleware is propagating it) or show nothing
|
||||
# (if no auth middleware in the pipeline is ACL-aware), but should
|
||||
# never return the garbage ACL.
|
||||
resp = retry(get)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status // 100, 2)
|
||||
self.assertNotEqual(resp.getheader('X-Account-Access-Control'),
|
||||
'yuck')
|
||||
|
||||
finally:
|
||||
# Make sure to clean up even if tests fail -- User2 should not
|
||||
# have access to User1's account in other functional tests!
|
||||
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
|
||||
resp.read()
|
||||
|
||||
def test_unicode_metadata(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, name, value):
|
||||
conn.request('POST', parsed.path, '',
|
||||
{'X-Auth-Token': token, name: value})
|
||||
return check_response(conn)
|
||||
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
uni_key = u'X-Account-Meta-uni\u0E12'
|
||||
uni_value = u'uni\u0E12'
|
||||
if (tf.web_front_end == 'integral'):
|
||||
resp = retry(post, uni_key, '1')
|
||||
resp.read()
|
||||
self.assertTrue(resp.status in (201, 204))
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
|
||||
resp = retry(post, 'X-Account-Meta-uni', uni_value)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
|
||||
uni_value.encode('utf-8'))
|
||||
if (tf.web_front_end == 'integral'):
|
||||
resp = retry(post, uni_key, uni_value)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
|
||||
uni_value.encode('utf-8'))
|
||||
|
||||
def test_multi_metadata(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, name, value):
|
||||
conn.request('POST', parsed.path, '',
|
||||
{'X-Auth-Token': token, name: value})
|
||||
return check_response(conn)
|
||||
|
||||
def head(url, token, parsed, conn):
|
||||
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
resp = retry(post, 'X-Account-Meta-One', '1')
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('x-account-meta-one'), '1')
|
||||
resp = retry(post, 'X-Account-Meta-Two', '2')
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(head)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (200, 204), resp.status)
|
||||
self.assertEqual(resp.getheader('x-account-meta-one'), '1')
|
||||
self.assertEqual(resp.getheader('x-account-meta-two'), '2')
|
||||
|
||||
def test_bad_metadata(self):
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def post(url, token, parsed, conn, extra_headers):
|
||||
headers = {'X-Auth-Token': token}
|
||||
headers.update(extra_headers)
|
||||
conn.request('POST', parsed.path, '', headers)
|
||||
return check_response(conn)
|
||||
|
||||
resp = retry(post,
|
||||
{'X-Account-Meta-' + (
|
||||
'k' * self.max_meta_name_length): 'v'})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(
|
||||
post,
|
||||
{'X-Account-Meta-' + ('k' * (
|
||||
self.max_meta_name_length + 1)): 'v'})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
resp = retry(post,
|
||||
{'X-Account-Meta-Too-Long': (
|
||||
'k' * self.max_meta_value_length)})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
resp = retry(
|
||||
post,
|
||||
{'X-Account-Meta-Too-Long': 'k' * (
|
||||
self.max_meta_value_length + 1)})
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
headers = {}
|
||||
for x in xrange(self.max_meta_count):
|
||||
headers['X-Account-Meta-%d' % x] = 'v'
|
||||
resp = retry(post, headers)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
headers = {}
|
||||
for x in xrange(self.max_meta_count + 1):
|
||||
headers['X-Account-Meta-%d' % x] = 'v'
|
||||
resp = retry(post, headers)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
headers = {}
|
||||
header_value = 'k' * self.max_meta_value_length
|
||||
size = 0
|
||||
x = 0
|
||||
while size < (self.max_meta_overall_size - 4
|
||||
- self.max_meta_value_length):
|
||||
size += 4 + self.max_meta_value_length
|
||||
headers['X-Account-Meta-%04d' % x] = header_value
|
||||
x += 1
|
||||
if self.max_meta_overall_size - size > 1:
|
||||
headers['X-Account-Meta-k'] = \
|
||||
'v' * (self.max_meta_overall_size - size - 1)
|
||||
resp = retry(post, headers)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
headers['X-Account-Meta-k'] = \
|
||||
'v' * (self.max_meta_overall_size - size)
|
||||
resp = retry(post, headers)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
1515
test/functional/test_container.py
Executable file
1515
test/functional/test_container.py
Executable file
File diff suppressed because it is too large
Load Diff
@ -21,24 +21,21 @@ from uuid import uuid4
|
||||
|
||||
from swift.common.utils import json
|
||||
|
||||
from swift_testing import check_response, retry, skip, skip3, \
|
||||
swift_test_perm, web_front_end, requires_acls, swift_test_user
|
||||
from test.functional import check_response, retry, requires_acls, \
|
||||
requires_policies
|
||||
import test.functional as tf
|
||||
|
||||
|
||||
class TestObject(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
self.container = uuid4().hex
|
||||
|
||||
def put(url, token, parsed, conn):
|
||||
conn.request('PUT', parsed.path + '/' + self.container, '',
|
||||
{'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(put)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
self.containers = []
|
||||
self._create_container(self.container)
|
||||
|
||||
self.obj = uuid4().hex
|
||||
|
||||
def put(url, token, parsed, conn):
|
||||
@ -50,40 +47,65 @@ class TestObject(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
|
||||
def _create_container(self, name=None, headers=None):
|
||||
if not name:
|
||||
name = uuid4().hex
|
||||
self.containers.append(name)
|
||||
headers = headers or {}
|
||||
|
||||
def put(url, token, parsed, conn, name):
|
||||
new_headers = dict({'X-Auth-Token': token}, **headers)
|
||||
conn.request('PUT', parsed.path + '/' + name, '',
|
||||
new_headers)
|
||||
return check_response(conn)
|
||||
resp = retry(put, name)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
return name
|
||||
|
||||
def tearDown(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def delete(url, token, parsed, conn, obj):
|
||||
conn.request('DELETE',
|
||||
'%s/%s/%s' % (parsed.path, self.container, obj),
|
||||
'', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
# get list of objects in container
|
||||
def list(url, token, parsed, conn):
|
||||
conn.request('GET',
|
||||
'%s/%s' % (parsed.path, self.container),
|
||||
'', {'X-Auth-Token': token})
|
||||
def get(url, token, parsed, conn, container):
|
||||
conn.request(
|
||||
'GET', parsed.path + '/' + container + '?format=json', '',
|
||||
{'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(list)
|
||||
object_listing = resp.read()
|
||||
self.assertEqual(resp.status, 200)
|
||||
|
||||
# iterate over object listing and delete all objects
|
||||
for obj in object_listing.splitlines():
|
||||
resp = retry(delete, obj)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
# delete an object
|
||||
def delete(url, token, parsed, conn, container, obj):
|
||||
conn.request(
|
||||
'DELETE', '/'.join([parsed.path, container, obj['name']]), '',
|
||||
{'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
for container in self.containers:
|
||||
while True:
|
||||
resp = retry(get, container)
|
||||
body = resp.read()
|
||||
if resp.status == 404:
|
||||
break
|
||||
self.assert_(resp.status // 100 == 2, resp.status)
|
||||
objs = json.loads(body)
|
||||
if not objs:
|
||||
break
|
||||
for obj in objs:
|
||||
resp = retry(delete, container, obj)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
# delete the container
|
||||
def delete(url, token, parsed, conn):
|
||||
conn.request('DELETE', parsed.path + '/' + self.container, '',
|
||||
def delete(url, token, parsed, conn, name):
|
||||
conn.request('DELETE', parsed.path + '/' + name, '',
|
||||
{'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(delete)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
for container in self.containers:
|
||||
resp = retry(delete, container)
|
||||
resp.read()
|
||||
self.assert_(resp.status in (204, 404))
|
||||
|
||||
def test_if_none_match(self):
|
||||
def put(url, token, parsed, conn):
|
||||
@ -112,7 +134,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEquals(resp.status, 400)
|
||||
|
||||
def test_copy_object(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
source = '%s/%s' % (self.container, self.obj)
|
||||
@ -186,7 +208,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
def test_public_object(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def get(url, token, parsed, conn):
|
||||
@ -225,7 +247,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assert_(str(err).startswith('No result after '))
|
||||
|
||||
def test_private_object(self):
|
||||
if skip or skip3:
|
||||
if tf.skip or tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
# Ensure we can't access the object with the third account
|
||||
@ -245,8 +267,8 @@ class TestObject(unittest.TestCase):
|
||||
conn.request('PUT', '%s/%s' % (
|
||||
parsed.path, shared_container), '',
|
||||
{'X-Auth-Token': token,
|
||||
'X-Container-Read': swift_test_perm[2],
|
||||
'X-Container-Write': swift_test_perm[2]})
|
||||
'X-Container-Read': tf.swift_test_perm[2],
|
||||
'X-Container-Write': tf.swift_test_perm[2]})
|
||||
return check_response(conn)
|
||||
resp = retry(put)
|
||||
resp.read()
|
||||
@ -319,8 +341,8 @@ class TestObject(unittest.TestCase):
|
||||
|
||||
@requires_acls
|
||||
def test_read_only(self):
|
||||
if skip3:
|
||||
raise SkipTest
|
||||
if tf.skip3:
|
||||
raise tf.SkipTest
|
||||
|
||||
def get_listing(url, token, parsed, conn):
|
||||
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
|
||||
@ -361,7 +383,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
# grant read-only access
|
||||
acl_user = swift_test_user[2]
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'read-only': [acl_user]}
|
||||
headers = {'x-account-access-control': json.dumps(acl)}
|
||||
resp = retry(post_account, headers=headers, use_account=1)
|
||||
@ -400,7 +422,7 @@ class TestObject(unittest.TestCase):
|
||||
|
||||
@requires_acls
|
||||
def test_read_write(self):
|
||||
if skip3:
|
||||
if tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
def get_listing(url, token, parsed, conn):
|
||||
@ -442,7 +464,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
# grant read-write access
|
||||
acl_user = swift_test_user[2]
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'read-write': [acl_user]}
|
||||
headers = {'x-account-access-control': json.dumps(acl)}
|
||||
resp = retry(post_account, headers=headers, use_account=1)
|
||||
@ -481,7 +503,7 @@ class TestObject(unittest.TestCase):
|
||||
|
||||
@requires_acls
|
||||
def test_admin(self):
|
||||
if skip3:
|
||||
if tf.skip3:
|
||||
raise SkipTest
|
||||
|
||||
def get_listing(url, token, parsed, conn):
|
||||
@ -523,7 +545,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEquals(resp.status, 403)
|
||||
|
||||
# grant admin access
|
||||
acl_user = swift_test_user[2]
|
||||
acl_user = tf.swift_test_user[2]
|
||||
acl = {'admin': [acl_user]}
|
||||
headers = {'x-account-access-control': json.dumps(acl)}
|
||||
resp = retry(post_account, headers=headers, use_account=1)
|
||||
@ -561,7 +583,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assert_(self.obj not in listing)
|
||||
|
||||
def test_manifest(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
# Data for the object segments
|
||||
segments1 = ['one', 'two', 'three', 'four', 'five']
|
||||
@ -672,7 +694,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEqual(resp.read(), ''.join(segments2))
|
||||
self.assertEqual(resp.status, 200)
|
||||
|
||||
if not skip3:
|
||||
if not tf.skip3:
|
||||
|
||||
# Ensure we can't access the manifest with the third account
|
||||
def get(url, token, parsed, conn):
|
||||
@ -687,7 +709,7 @@ class TestObject(unittest.TestCase):
|
||||
def post(url, token, parsed, conn):
|
||||
conn.request('POST', '%s/%s' % (parsed.path, self.container),
|
||||
'', {'X-Auth-Token': token,
|
||||
'X-Container-Read': swift_test_perm[2]})
|
||||
'X-Container-Read': tf.swift_test_perm[2]})
|
||||
return check_response(conn)
|
||||
resp = retry(post)
|
||||
resp.read()
|
||||
@ -745,7 +767,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEqual(resp.read(), ''.join(segments3))
|
||||
self.assertEqual(resp.status, 200)
|
||||
|
||||
if not skip3:
|
||||
if not tf.skip3:
|
||||
|
||||
# Ensure we can't access the manifest with the third account
|
||||
# (because the segments are in a protected container even if the
|
||||
@ -763,7 +785,7 @@ class TestObject(unittest.TestCase):
|
||||
def post(url, token, parsed, conn):
|
||||
conn.request('POST', '%s/%s' % (parsed.path, acontainer),
|
||||
'', {'X-Auth-Token': token,
|
||||
'X-Container-Read': swift_test_perm[2]})
|
||||
'X-Container-Read': tf.swift_test_perm[2]})
|
||||
return check_response(conn)
|
||||
resp = retry(post)
|
||||
resp.read()
|
||||
@ -831,7 +853,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEqual(resp.status, 204)
|
||||
|
||||
def test_delete_content_type(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def put(url, token, parsed, conn):
|
||||
@ -853,7 +875,7 @@ class TestObject(unittest.TestCase):
|
||||
'text/html; charset=UTF-8')
|
||||
|
||||
def test_delete_if_delete_at_bad(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def put(url, token, parsed, conn):
|
||||
@ -875,7 +897,7 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEqual(resp.status, 400)
|
||||
|
||||
def test_null_name(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def put(url, token, parsed, conn):
|
||||
@ -884,24 +906,16 @@ class TestObject(unittest.TestCase):
|
||||
self.container), 'test', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
resp = retry(put)
|
||||
if (web_front_end == 'apache2'):
|
||||
if (tf.web_front_end == 'apache2'):
|
||||
self.assertEqual(resp.status, 404)
|
||||
else:
|
||||
self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL')
|
||||
self.assertEqual(resp.status, 412)
|
||||
|
||||
def test_cors(self):
|
||||
if skip:
|
||||
if tf.skip:
|
||||
raise SkipTest
|
||||
|
||||
def is_strict_mode(url, token, parsed, conn):
|
||||
conn.request('GET', '/info')
|
||||
resp = conn.getresponse()
|
||||
if resp.status // 100 == 2:
|
||||
info = json.loads(resp.read())
|
||||
return info.get('swift', {}).get('strict_cors_mode', False)
|
||||
return False
|
||||
|
||||
def put_cors_cont(url, token, parsed, conn, orig):
|
||||
conn.request(
|
||||
'PUT', '%s/%s' % (parsed.path, self.container),
|
||||
@ -924,8 +938,6 @@ class TestObject(unittest.TestCase):
|
||||
'', headers)
|
||||
return conn.getresponse()
|
||||
|
||||
strict_cors = retry(is_strict_mode)
|
||||
|
||||
resp = retry(put_cors_cont, '*')
|
||||
resp.read()
|
||||
self.assertEquals(resp.status // 100, 2)
|
||||
@ -977,6 +989,11 @@ class TestObject(unittest.TestCase):
|
||||
resp.read()
|
||||
self.assertEquals(resp.status, 401)
|
||||
|
||||
try:
|
||||
strict_cors = tf.cluster_info['swift']['strict_cors_mode']
|
||||
except KeyError:
|
||||
strict_cors = False
|
||||
|
||||
if strict_cors:
|
||||
resp = retry(check_cors,
|
||||
'GET', 'cat', {'Origin': 'http://m.com'})
|
||||
@ -1001,6 +1018,64 @@ class TestObject(unittest.TestCase):
|
||||
self.assertEquals(headers.get('access-control-allow-origin'),
|
||||
'http://m.com')
|
||||
|
||||
@requires_policies
|
||||
def test_cross_policy_copy(self):
|
||||
# create container in first policy
|
||||
policy = self.policies.select()
|
||||
container = self._create_container(
|
||||
headers={'X-Storage-Policy': policy['name']})
|
||||
obj = uuid4().hex
|
||||
|
||||
# create a container in second policy
|
||||
other_policy = self.policies.exclude(name=policy['name']).select()
|
||||
other_container = self._create_container(
|
||||
headers={'X-Storage-Policy': other_policy['name']})
|
||||
other_obj = uuid4().hex
|
||||
|
||||
def put_obj(url, token, parsed, conn, container, obj):
|
||||
# to keep track of things, use the original path as the body
|
||||
content = '%s/%s' % (container, obj)
|
||||
path = '%s/%s' % (parsed.path, content)
|
||||
conn.request('PUT', path, content, {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
# create objects
|
||||
for c, o in zip((container, other_container), (obj, other_obj)):
|
||||
resp = retry(put_obj, c, o)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
|
||||
def put_copy_from(url, token, parsed, conn, container, obj, source):
|
||||
dest_path = '%s/%s/%s' % (parsed.path, container, obj)
|
||||
conn.request('PUT', dest_path, '',
|
||||
{'X-Auth-Token': token,
|
||||
'Content-Length': '0',
|
||||
'X-Copy-From': source})
|
||||
return check_response(conn)
|
||||
|
||||
copy_requests = (
|
||||
(container, other_obj, '%s/%s' % (other_container, other_obj)),
|
||||
(other_container, obj, '%s/%s' % (container, obj)),
|
||||
)
|
||||
|
||||
# copy objects
|
||||
for c, o, source in copy_requests:
|
||||
resp = retry(put_copy_from, c, o, source)
|
||||
resp.read()
|
||||
self.assertEqual(resp.status, 201)
|
||||
|
||||
def get_obj(url, token, parsed, conn, container, obj):
|
||||
path = '%s/%s/%s' % (parsed.path, container, obj)
|
||||
conn.request('GET', path, '', {'X-Auth-Token': token})
|
||||
return check_response(conn)
|
||||
|
||||
# validate contents, contents should be source
|
||||
validate_requests = copy_requests
|
||||
for c, o, body in validate_requests:
|
||||
resp = retry(get_obj, c, o)
|
||||
self.assertEqual(resp.status, 200)
|
||||
self.assertEqual(body, resp.read())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -14,10 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Modifications by Red Hat, Inc.
|
||||
|
||||
from datetime import datetime
|
||||
import os
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
@ -25,131 +22,18 @@ import locale
|
||||
import random
|
||||
import StringIO
|
||||
import time
|
||||
import threading
|
||||
import unittest
|
||||
import urllib
|
||||
import uuid
|
||||
import eventlet
|
||||
from nose import SkipTest
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
from test import get_config
|
||||
from swift.common.storage_policy import POLICY
|
||||
|
||||
from test.functional import normalized_urls, load_constraint, cluster_info
|
||||
import test.functional as tf
|
||||
from test.functional.swift_test_client import Account, Connection, File, \
|
||||
ResponseError
|
||||
from swift.common.constraints import MAX_FILE_SIZE, MAX_META_NAME_LENGTH, \
|
||||
MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
|
||||
MAX_OBJECT_NAME_LENGTH, CONTAINER_LISTING_LIMIT, ACCOUNT_LISTING_LIMIT, \
|
||||
MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH, MAX_HEADER_SIZE
|
||||
from gluster.swift.common.constraints import \
|
||||
set_object_name_component_length, get_object_name_component_length
|
||||
|
||||
default_constraints = dict((
|
||||
('max_file_size', MAX_FILE_SIZE),
|
||||
('max_meta_name_length', MAX_META_NAME_LENGTH),
|
||||
('max_meta_value_length', MAX_META_VALUE_LENGTH),
|
||||
('max_meta_count', MAX_META_COUNT),
|
||||
('max_meta_overall_size', MAX_META_OVERALL_SIZE),
|
||||
('max_object_name_length', MAX_OBJECT_NAME_LENGTH),
|
||||
('container_listing_limit', CONTAINER_LISTING_LIMIT),
|
||||
('account_listing_limit', ACCOUNT_LISTING_LIMIT),
|
||||
('max_account_name_length', MAX_ACCOUNT_NAME_LENGTH),
|
||||
('max_container_name_length', MAX_CONTAINER_NAME_LENGTH),
|
||||
('max_header_size', MAX_HEADER_SIZE)))
|
||||
constraints_conf = ConfigParser()
|
||||
conf_exists = constraints_conf.read('/etc/swift/swift.conf')
|
||||
# Constraints are set first from the test config, then from
|
||||
# /etc/swift/swift.conf if it exists. If swift.conf doesn't exist,
|
||||
# then limit test coverage. This allows SAIO tests to work fine but
|
||||
# requires remote functional testing to know something about the cluster
|
||||
# that is being tested.
|
||||
config = get_config('func_test')
|
||||
for k in default_constraints:
|
||||
if k in config:
|
||||
# prefer what's in test.conf
|
||||
config[k] = int(config[k])
|
||||
elif conf_exists:
|
||||
# swift.conf exists, so use what's defined there (or swift defaults)
|
||||
# This normally happens when the test is running locally to the cluster
|
||||
# as in a SAIO.
|
||||
config[k] = default_constraints[k]
|
||||
else:
|
||||
# .functests don't know what the constraints of the tested cluster are,
|
||||
# so the tests can't reliably pass or fail. Therefore, skip those
|
||||
# tests.
|
||||
config[k] = '%s constraint is not defined' % k
|
||||
|
||||
web_front_end = config.get('web_front_end', 'integral')
|
||||
normalized_urls = config.get('normalized_urls', False)
|
||||
set_object_name_component_length()
|
||||
|
||||
|
||||
def load_constraint(name):
|
||||
c = config[name]
|
||||
if not isinstance(c, int):
|
||||
raise SkipTest(c)
|
||||
return c
|
||||
|
||||
locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C'))
|
||||
|
||||
|
||||
def create_limit_filename(name_limit):
|
||||
"""
|
||||
Convert a split a large object name with
|
||||
slashes so as to conform the GlusterFS file name
|
||||
constraints.
|
||||
Example: Take a object name: 'a'*1024, and
|
||||
convert it to a*255/a*255/...
|
||||
"""
|
||||
# Get the file name limit from the configuration file
|
||||
filename_limit = get_object_name_component_length()
|
||||
|
||||
# Convert string to a list: "abc" -> ['a', 'b', 'c']
|
||||
filename_list = list('a' * name_limit)
|
||||
|
||||
# Replace chars at filename limits to '/'
|
||||
for index in range(filename_limit, name_limit, filename_limit):
|
||||
filename_list[index] = os.path.sep
|
||||
|
||||
# Cannot end in a '/'
|
||||
if os.path.sep == filename_list[-1]:
|
||||
return "".join(filename_list[:-1])
|
||||
else:
|
||||
return "".join(filename_list)
|
||||
|
||||
|
||||
def chunks(s, length=3):
|
||||
i, j = 0, length
|
||||
while i < len(s):
|
||||
yield s[i:j]
|
||||
i, j = j, j + length
|
||||
|
||||
|
||||
def timeout(seconds, method, *args, **kwargs):
|
||||
class TimeoutThread(threading.Thread):
|
||||
def __init__(self, method, *args, **kwargs):
|
||||
threading.Thread.__init__(self)
|
||||
|
||||
self.method = method
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.exception = None
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.method(*self.args, **self.kwargs)
|
||||
except Exception as e:
|
||||
self.exception = e
|
||||
|
||||
t = TimeoutThread(method, *args, **kwargs)
|
||||
t.start()
|
||||
t.join(seconds)
|
||||
|
||||
if t.exception:
|
||||
raise t.exception
|
||||
|
||||
if t.isAlive():
|
||||
t._Thread__stop()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class Utils(object):
|
||||
@ -207,10 +91,10 @@ class Base2(object):
|
||||
class TestAccountEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.containers = []
|
||||
@ -394,10 +278,10 @@ class TestAccountUTF8(Base2, TestAccount):
|
||||
class TestAccountNoContainersEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
|
||||
@ -423,10 +307,10 @@ class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers):
|
||||
class TestContainerEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
@ -715,10 +599,10 @@ class TestContainerPathsEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
raise SkipTest('Objects ending in / are not supported')
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.file_size = 8
|
||||
@ -894,10 +778,10 @@ class TestContainerPaths(Base):
|
||||
class TestFileEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
@ -1079,7 +963,7 @@ class TestFile(Base):
|
||||
limit = load_constraint('max_object_name_length')
|
||||
|
||||
for l in (1, 10, limit / 2, limit - 1, limit, limit + 1, limit * 2):
|
||||
file_item = self.env.container.file(create_limit_filename(l))
|
||||
file_item = self.env.container.file('a' * l)
|
||||
|
||||
if l <= limit:
|
||||
self.assert_(file_item.write())
|
||||
@ -1245,6 +1129,15 @@ class TestFile(Base):
|
||||
limit = load_constraint('max_file_size')
|
||||
tsecs = 3
|
||||
|
||||
def timeout(seconds, method, *args, **kwargs):
|
||||
try:
|
||||
with eventlet.Timeout(seconds):
|
||||
method(*args, **kwargs)
|
||||
except eventlet.Timeout:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
for i in (limit - 100, limit - 10, limit - 1, limit, limit + 1,
|
||||
limit + 10, limit + 100):
|
||||
|
||||
@ -1553,8 +1446,16 @@ class TestFile(Base):
|
||||
self.assertEqual(etag, header_etag)
|
||||
|
||||
def testChunkedPut(self):
|
||||
if (web_front_end == 'apache2'):
|
||||
raise SkipTest()
|
||||
if (tf.web_front_end == 'apache2'):
|
||||
raise SkipTest("Chunked PUT can only be tested with apache2 web"
|
||||
" front end")
|
||||
|
||||
def chunks(s, length=3):
|
||||
i, j = 0, length
|
||||
while i < len(s):
|
||||
yield s[i:j]
|
||||
i, j = j, j + length
|
||||
|
||||
data = File.random_data(10000)
|
||||
etag = File.compute_md5sum(data)
|
||||
|
||||
@ -1578,10 +1479,10 @@ class TestFileUTF8(Base2, TestFile):
|
||||
class TestDloEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
@ -1699,6 +1600,51 @@ class TestDlo(Base):
|
||||
# try not to leave this around for other tests to stumble over
|
||||
self.env.container.file("copied-man1").delete()
|
||||
|
||||
def test_dlo_if_match_get(self):
|
||||
manifest = self.env.container.file("man1")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.read,
|
||||
hdrs={'If-Match': 'not-%s' % etag})
|
||||
self.assert_status(412)
|
||||
|
||||
manifest.read(hdrs={'If-Match': etag})
|
||||
self.assert_status(200)
|
||||
|
||||
def test_dlo_if_none_match_get(self):
|
||||
manifest = self.env.container.file("man1")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.read,
|
||||
hdrs={'If-None-Match': etag})
|
||||
self.assert_status(304)
|
||||
|
||||
manifest.read(hdrs={'If-None-Match': "not-%s" % etag})
|
||||
self.assert_status(200)
|
||||
|
||||
def test_dlo_if_match_head(self):
|
||||
manifest = self.env.container.file("man1")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.info,
|
||||
hdrs={'If-Match': 'not-%s' % etag})
|
||||
self.assert_status(412)
|
||||
|
||||
manifest.info(hdrs={'If-Match': etag})
|
||||
self.assert_status(200)
|
||||
|
||||
def test_dlo_if_none_match_head(self):
|
||||
manifest = self.env.container.file("man1")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.info,
|
||||
hdrs={'If-None-Match': etag})
|
||||
self.assert_status(304)
|
||||
|
||||
manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
|
||||
self.assert_status(200)
|
||||
|
||||
|
||||
class TestDloUTF8(Base2, TestDlo):
|
||||
set_up = False
|
||||
|
||||
@ -1706,10 +1652,10 @@ class TestDloUTF8(Base2, TestDlo):
|
||||
class TestFileComparisonEnv(object):
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
@ -1761,19 +1707,25 @@ class TestFileComparison(Base):
|
||||
for file_item in self.env.files:
|
||||
hdrs = {'If-Modified-Since': self.env.time_old_f1}
|
||||
self.assert_(file_item.read(hdrs=hdrs))
|
||||
self.assert_(file_item.info(hdrs=hdrs))
|
||||
|
||||
hdrs = {'If-Modified-Since': self.env.time_new}
|
||||
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
|
||||
self.assert_status(304)
|
||||
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
|
||||
self.assert_status(304)
|
||||
|
||||
def testIfUnmodifiedSince(self):
|
||||
for file_item in self.env.files:
|
||||
hdrs = {'If-Unmodified-Since': self.env.time_new}
|
||||
self.assert_(file_item.read(hdrs=hdrs))
|
||||
self.assert_(file_item.info(hdrs=hdrs))
|
||||
|
||||
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
|
||||
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
|
||||
self.assert_status(412)
|
||||
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
|
||||
self.assert_status(412)
|
||||
|
||||
def testIfMatchAndUnmodified(self):
|
||||
for file_item in self.env.files:
|
||||
@ -1823,17 +1775,16 @@ class TestSloEnv(object):
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
|
||||
if cls.slo_enabled is None:
|
||||
cluster_info = cls.conn.cluster_info()
|
||||
cls.slo_enabled = 'slo' in cluster_info
|
||||
if not cls.slo_enabled:
|
||||
return
|
||||
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
|
||||
cls.container = cls.account.container(Utils.create_name())
|
||||
@ -1899,7 +1850,6 @@ class TestSlo(Base):
|
||||
set_up = False
|
||||
|
||||
def setUp(self):
|
||||
raise SkipTest("SLO not enabled yet in gluster-swift")
|
||||
super(TestSlo, self).setUp()
|
||||
if self.env.slo_enabled is False:
|
||||
raise SkipTest("SLO not enabled")
|
||||
@ -2039,6 +1989,50 @@ class TestSlo(Base):
|
||||
self.assertEqual('application/json; charset=utf-8',
|
||||
got_info['content_type'])
|
||||
|
||||
def test_slo_if_match_get(self):
|
||||
manifest = self.env.container.file("manifest-abcde")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.read,
|
||||
hdrs={'If-Match': 'not-%s' % etag})
|
||||
self.assert_status(412)
|
||||
|
||||
manifest.read(hdrs={'If-Match': etag})
|
||||
self.assert_status(200)
|
||||
|
||||
def test_slo_if_none_match_get(self):
|
||||
manifest = self.env.container.file("manifest-abcde")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.read,
|
||||
hdrs={'If-None-Match': etag})
|
||||
self.assert_status(304)
|
||||
|
||||
manifest.read(hdrs={'If-None-Match': "not-%s" % etag})
|
||||
self.assert_status(200)
|
||||
|
||||
def test_slo_if_match_head(self):
|
||||
manifest = self.env.container.file("manifest-abcde")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.info,
|
||||
hdrs={'If-Match': 'not-%s' % etag})
|
||||
self.assert_status(412)
|
||||
|
||||
manifest.info(hdrs={'If-Match': etag})
|
||||
self.assert_status(200)
|
||||
|
||||
def test_slo_if_none_match_head(self):
|
||||
manifest = self.env.container.file("manifest-abcde")
|
||||
etag = manifest.info()['etag']
|
||||
|
||||
self.assertRaises(ResponseError, manifest.info,
|
||||
hdrs={'If-None-Match': etag})
|
||||
self.assert_status(304)
|
||||
|
||||
manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
|
||||
self.assert_status(200)
|
||||
|
||||
|
||||
class TestSloUTF8(Base2, TestSlo):
|
||||
set_up = False
|
||||
@ -2049,11 +2043,11 @@ class TestObjectVersioningEnv(object):
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
|
||||
cls.account = Account(cls.conn, config.get('account',
|
||||
config['username']))
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
|
||||
# avoid getting a prefix that stops halfway through an encoded
|
||||
# character
|
||||
@ -2073,6 +2067,61 @@ class TestObjectVersioningEnv(object):
|
||||
cls.versioning_enabled = 'versions' in container_info
|
||||
|
||||
|
||||
class TestCrossPolicyObjectVersioningEnv(object):
|
||||
# tri-state: None initially, then True/False
|
||||
versioning_enabled = None
|
||||
multiple_policies_enabled = None
|
||||
policies = None
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
|
||||
if cls.multiple_policies_enabled is None:
|
||||
try:
|
||||
cls.policies = tf.FunctionalStoragePolicyCollection.from_info()
|
||||
except AssertionError:
|
||||
pass
|
||||
|
||||
if cls.policies and len(cls.policies) > 1:
|
||||
cls.multiple_policies_enabled = True
|
||||
else:
|
||||
cls.multiple_policies_enabled = False
|
||||
# We have to lie here that versioning is enabled. We actually
|
||||
# don't know, but it does not matter. We know these tests cannot
|
||||
# run without multiple policies present. If multiple policies are
|
||||
# present, we won't be setting this field to any value, so it
|
||||
# should all still work.
|
||||
cls.versioning_enabled = True
|
||||
return
|
||||
|
||||
policy = cls.policies.select()
|
||||
version_policy = cls.policies.exclude(name=policy['name']).select()
|
||||
|
||||
cls.account = Account(cls.conn, tf.config.get('account',
|
||||
tf.config['username']))
|
||||
|
||||
# avoid getting a prefix that stops halfway through an encoded
|
||||
# character
|
||||
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
|
||||
|
||||
cls.versions_container = cls.account.container(prefix + "-versions")
|
||||
if not cls.versions_container.create(
|
||||
{POLICY: policy['name']}):
|
||||
raise ResponseError(cls.conn.response)
|
||||
|
||||
cls.container = cls.account.container(prefix + "-objs")
|
||||
if not cls.container.create(
|
||||
hdrs={'X-Versions-Location': cls.versions_container.name,
|
||||
POLICY: version_policy['name']}):
|
||||
raise ResponseError(cls.conn.response)
|
||||
|
||||
container_info = cls.container.info()
|
||||
# if versioning is off, then X-Versions-Location won't persist
|
||||
cls.versioning_enabled = 'versions' in container_info
|
||||
|
||||
|
||||
class TestObjectVersioning(Base):
|
||||
env = TestObjectVersioningEnv
|
||||
set_up = False
|
||||
@ -2123,16 +2172,30 @@ class TestObjectVersioningUTF8(Base2, TestObjectVersioning):
|
||||
set_up = False
|
||||
|
||||
|
||||
class TestCrossPolicyObjectVersioning(TestObjectVersioning):
|
||||
env = TestCrossPolicyObjectVersioningEnv
|
||||
set_up = False
|
||||
|
||||
def setUp(self):
|
||||
super(TestCrossPolicyObjectVersioning, self).setUp()
|
||||
if self.env.multiple_policies_enabled is False:
|
||||
raise SkipTest('Cross policy test requires multiple policies')
|
||||
elif self.env.multiple_policies_enabled is not True:
|
||||
# just some sanity checking
|
||||
raise Exception("Expected multiple_policies_enabled "
|
||||
"to be True/False, got %r" % (
|
||||
self.env.versioning_enabled,))
|
||||
|
||||
|
||||
class TestTempurlEnv(object):
|
||||
tempurl_enabled = None # tri-state: None initially, then True/False
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
|
||||
if cls.tempurl_enabled is None:
|
||||
cluster_info = cls.conn.cluster_info()
|
||||
cls.tempurl_enabled = 'tempurl' in cluster_info
|
||||
if not cls.tempurl_enabled:
|
||||
return
|
||||
@ -2142,7 +2205,7 @@ class TestTempurlEnv(object):
|
||||
cls.tempurl_key2 = Utils.create_name()
|
||||
|
||||
cls.account = Account(
|
||||
cls.conn, config.get('account', config['username']))
|
||||
cls.conn, tf.config.get('account', tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
cls.account.update_metadata({
|
||||
'temp-url-key': cls.tempurl_key,
|
||||
@ -2303,17 +2366,16 @@ class TestSloTempurlEnv(object):
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
cls.conn = Connection(config)
|
||||
cls.conn = Connection(tf.config)
|
||||
cls.conn.authenticate()
|
||||
|
||||
if cls.enabled is None:
|
||||
cluster_info = cls.conn.cluster_info()
|
||||
cls.enabled = 'tempurl' in cluster_info and 'slo' in cluster_info
|
||||
|
||||
cls.tempurl_key = Utils.create_name()
|
||||
|
||||
cls.account = Account(
|
||||
cls.conn, config.get('account', config['username']))
|
||||
cls.conn, tf.config.get('account', tf.config['username']))
|
||||
cls.account.delete_containers()
|
||||
cls.account.update_metadata({'temp-url-key': cls.tempurl_key})
|
||||
|
||||
|
@ -14,69 +14,35 @@
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
import swift.common.constraints
|
||||
from mock import Mock, patch
|
||||
from gluster.swift.common import constraints as cnt
|
||||
|
||||
|
||||
def mock_glusterfs_mount(*args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
def mock_constraints_conf_int(*args, **kwargs):
|
||||
return 1000
|
||||
|
||||
|
||||
def mock_check_object_creation(*args, **kwargs):
|
||||
return None
|
||||
|
||||
|
||||
def mock_check_mount(*args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
def mock_check_mount_err(*args, **kwargs):
|
||||
return False
|
||||
|
||||
|
||||
class TestConstraints(unittest.TestCase):
|
||||
""" Tests for common.constraints """
|
||||
|
||||
def tearDown(self):
|
||||
cnt.set_object_name_component_length()
|
||||
|
||||
def test_set_object_name_component_length(self):
|
||||
len = cnt.get_object_name_component_length()
|
||||
cnt.set_object_name_component_length(len+1)
|
||||
self.assertEqual(len, cnt.get_object_name_component_length()-1)
|
||||
|
||||
if hasattr(swift.common.constraints, 'constraints_conf_int'):
|
||||
len = swift.common.constraints.constraints_conf_int(
|
||||
'max_object_name_component_length', 255)
|
||||
cnt.set_object_name_component_length()
|
||||
self.assertEqual(len, cnt.get_object_name_component_length())
|
||||
|
||||
with patch('swift.common.constraints.constraints_conf_int',
|
||||
mock_constraints_conf_int):
|
||||
cnt.set_object_name_component_length()
|
||||
self.assertEqual(cnt.get_object_name_component_length(), 1000)
|
||||
|
||||
def test_validate_obj_name_component(self):
|
||||
max_obj_len = cnt.get_object_name_component_length()
|
||||
self.assertFalse(cnt.validate_obj_name_component('tests'*(max_obj_len/5)))
|
||||
cnt.set_object_name_component_length(300)
|
||||
self.assertFalse(cnt.validate_obj_name_component('tests'*60))
|
||||
max_obj_len = cnt.SOF_MAX_OBJECT_NAME_LENGTH
|
||||
self.assertFalse(
|
||||
cnt.validate_obj_name_component('tests' * (max_obj_len / 5)))
|
||||
self.assertEqual(cnt.validate_obj_name_component(
|
||||
'tests' * 60), 'too long (300)')
|
||||
|
||||
def test_validate_obj_name_component_err(self):
|
||||
max_obj_len = cnt.get_object_name_component_length()
|
||||
self.assertTrue(cnt.validate_obj_name_component('tests'*(max_obj_len/5+1)))
|
||||
max_obj_len = cnt.SOF_MAX_OBJECT_NAME_LENGTH
|
||||
self.assertTrue(cnt.validate_obj_name_component(
|
||||
'tests' * (max_obj_len / 5 + 1)))
|
||||
self.assertTrue(cnt.validate_obj_name_component('.'))
|
||||
self.assertTrue(cnt.validate_obj_name_component('..'))
|
||||
self.assertTrue(cnt.validate_obj_name_component(''))
|
||||
|
||||
def test_gluster_check_object_creation(self):
|
||||
with patch('gluster.swift.common.constraints.__check_object_creation',
|
||||
def test_sof_check_object_creation(self):
|
||||
with patch('gluster.swift.common.constraints.swift_check_object_creation',
|
||||
mock_check_object_creation):
|
||||
req = Mock()
|
||||
req.headers = []
|
||||
self.assertFalse(cnt.gluster_check_object_creation(req, 'dir/z'))
|
||||
self.assertFalse(cnt.sof_check_object_creation(req, 'dir/z'))
|
||||
|
4
tox.ini
4
tox.ini
@ -7,6 +7,7 @@ skipsdist = True
|
||||
usedevelop = True
|
||||
install_command = pip install --allow-external netifaces --allow-insecure netifaces -U {opts} {packages}
|
||||
whitelist_externals=bash
|
||||
yes
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
NOSE_WITH_OPENSTACK=1
|
||||
NOSE_OPENSTACK_COLOR=1
|
||||
@ -38,8 +39,7 @@ downloadcache = ~/cache/pip
|
||||
|
||||
[testenv:functest]
|
||||
changedir = {toxinidir}
|
||||
commands = bash ./.functests
|
||||
bash tools/gswauth_functional_tests.sh
|
||||
commands = bash ./.functests -q
|
||||
|
||||
[testenv:pep8]
|
||||
changedir = {toxinidir}
|
||||
|
Loading…
Reference in New Issue
Block a user