diff --git a/.functests b/.functests index 199d97c..1e1b9f5 100755 --- a/.functests +++ b/.functests @@ -18,6 +18,15 @@ # This program expects to be run by tox in a virtual python environment # so that it does not pollute the host development system +GREEN='\e[0;32m' +RED='\e[0;31m' +NC='\e[0m' # No Color + +print() +{ + echo -e "\n${GREEN}$*${NC}" +} + sudo_env() { sudo bash -c "PATH=$PATH $*" @@ -25,54 +34,61 @@ sudo_env() cleanup() { - sudo service memcached stop - sudo_env swift-init main stop - sudo rm -rf /etc/swift > /dev/null 2>&1 - sudo rm -rf /mnt/gluster-object/test{,2}/* > /dev/null 2>&1 - sudo setfattr -x user.swift.metadata /mnt/gluster-object/test{,2} > /dev/null 2>&1 + print "Cleaning SoF mount point" + sudo rm -rf /mnt/swiftonfile/test/* > /dev/null 2>&1 + sudo setfattr -x user.swift.metadata /mnt/swiftonfile/test > /dev/null 2>&1 } -quit() -{ - echo "$1" - exit 1 -} - - fail() { - cleanup - quit "$1" + cleanup + echo -e "\n${RED}$1${NC}" + exit 1 } ### MAIN ### -# This script runs functional tests only with tempauth -# Only run if there is no configuration in the system -if [ -x /etc/swift ] ; then - quit "/etc/swift exists, cannot run functional tests." +print """ +Before proceeding forward, please make sure you already have: +1. SAIO deployment. +2. XFS/GlusterFS mounted at /mnt/swiftonfile/test +3. Added swiftonfile policy section to swift.conf file. + Example: + + [storage-policy:2] + name = swiftonfile + default = yes + +4. Copied etc/object-server.conf-gluster to /etc/swift/object-server/5.conf + +5. Generated ring files for swiftonfile policy. + Example: for policy with index 2 + + swift-ring-builder object-2.builder create 1 1 1 + swift-ring-builder object-2.builder add r1z1-127.0.0.1:6050/test 1 + swift-ring-builder object-2.builder rebalance + +6. Started memcached and swift services. +""" + +prompt=true +if [ "$1" == "-q" ]; then + prompt=false fi -# Check the directories exist -DIRS="/mnt/gluster-object /mnt/gluster-object/test /mnt/gluster-object/test2" -for d in $DIRS ; do - if [ ! -x $d ] ; then - quit "$d must exist on an XFS or GlusterFS volume" - fi -done +if $prompt; then + read -p "Continue ? " -r + if [[ $REPLY =~ ^[Nn]$ ]] + then + exit 1 + fi +fi export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf -# Install the configuration files -sudo mkdir /etc/swift > /dev/null 2>&1 -sudo cp -r test/functional_auth/tempauth/conf/* /etc/swift || fail "Unable to copy configuration files to /etc/swift" -sudo_env gluster-swift-gen-builders test test2 || fail "Unable to create ring files" - -# Start the services -sudo service memcached start || fail "Unable to start memcached" -sudo_env swift-init main start || fail "Unable to start swift" - mkdir functional_tests_result > /dev/null 2>&1 + +print "Runnning functional tests" nosetests -v --exe \ --with-xunit \ --xunit-file functional_tests_result/gluster-swift-generic-functional-TC-report.xml \ diff --git a/.functests-ci b/.functests-ci new file mode 100755 index 0000000..93a0d83 --- /dev/null +++ b/.functests-ci @@ -0,0 +1,111 @@ +#!/bin/bash + +# Copyright (c) 2013 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This program expects to be run by tox in a virtual python environment +# so that it does not pollute the host development system + +GREEN='\e[0;32m' +RED='\e[0;31m' +NC='\e[0m' # No Color + +print() +{ + echo -e "\n${GREEN}$*${NC}" +} + +sudo_env() +{ + sudo bash -c "PATH=$PATH $*" +} + +cleanup() +{ + print "Stopping memcached" + sudo service memcached stop + print "Stopping swift sevices" + sudo_env swift-init main stop + print "Cleaning SoF mount point" + sudo rm -rf /mnt/swiftonfile/test/* > /dev/null 2>&1 + sudo setfattr -x user.swift.metadata /mnt/swiftonfile/test > /dev/null 2>&1 + print "Invoking SAIO's resetswift script" + resetswift +} + +fail() +{ + cleanup + echo -e "\n${RED}$1${NC}" + exit 1 +} + +### MAIN ### + +print """ +Before proceeding forward, please make sure you already have: +1. SAIO deployment. (with resetswift and remakerings script) +2. XFS/GlusterFS mounted at /mnt/swiftonfile/test +""" + +prompt=true +if [ "$1" == "-q" ]; then + prompt=false +fi + +if $prompt; then + read -p "Continue ? " -r + if [[ $REPLY =~ ^[Nn]$ ]] + then + exit 1 + fi +fi + +print "Invoking SAIO's resetswift script" +resetswift + +print "Invoking SAIO's remakerings script" +remakerings + +print "Copying conf files into /etc/swift. This will replace swift.conf and test.conf" +\cp etc/object-server.conf-gluster /etc/swift/object-server/5.conf +\cp test/functional/conf/swift.conf /etc/swift/swift.conf +\cp test/functional/conf/test.conf /etc/swift/test.conf + +print "Generating additional object-rings for swiftonfile SP" +cd /etc/swift +swift-ring-builder object-2.builder create 1 1 1 +swift-ring-builder object-2.builder add r1z1-127.0.0.1:6050/test 1 +swift-ring-builder object-2.builder rebalance +cd - + +export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf + +print "Starting memcached" +sudo service memcached start || fail "Unable to start memcached" +print "Starting swift services" +sudo_env swift-init main start || fail "Unable to start swift" + +mkdir functional_tests_result > /dev/null 2>&1 + +print "Runnning functional tests" +nosetests -v --exe \ + --with-xunit \ + --xunit-file functional_tests_result/gluster-swift-generic-functional-TC-report.xml \ + --with-html-output \ + --html-out-file functional_tests_result/gluster-swift-generic-functional-result.html \ + test/functional || fail "Functional tests failed" +cleanup +exit 0 diff --git a/bin/gluster-swift-gen-builders b/bin/gluster-swift-gen-builders deleted file mode 100755 index 2e5fe1f..0000000 --- a/bin/gluster-swift-gen-builders +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -# Note that these port numbers must match the configured values for the -# various servers in their configuration files. -declare -A port=(["account.builder"]=6012 ["container.builder"]=6011 \ - ["object.builder"]=6010) - -print_usage() { - echo " - NAME - gluster-swift-gen-builders - Registers GlusterFS volumes to be accessed by - object storage. - SYNOPSIS - gluster-swift-gen-builders [-v] [-h] volumes... - DESCRIPTION - Register GlusterFS volumes to be accessed over OpenStack Swift object - storage. - OPTIONS - -v or --verbose - Verbose - -h or --help - Prints help screen - EXAMPLES - gluster-swift-gen-builders myvol1 myvol2 - -Creates new ring files with myvol1 and myvol2 - - gluster-swift-gen-builders myvol2 - -Creates new ring files by removing myvol1 - " -} - -builder_files="account.builder container.builder object.builder" - -function create { - swift-ring-builder $1 create 1 1 1 >> /tmp/out -} - -function add { - swift-ring-builder $1 add z$2-127.0.0.1:$3/$4_ 100.0 -} - -function rebalance { - swift-ring-builder $1 rebalance -} - -function build { - swift-ring-builder $1 -} - -verbose=0 -outdev="/dev/null" - -if [ "$1" = "-v" ] || [ "$1" = "--verbose" ]; then - verbose=1 - outdev="/dev/stdout" - shift -fi - -if [ "x$1" = "x" ]; then - echo "Please specify the gluster volume name to use." - print_usage - exit 1 -fi - -if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then - print_usage - exit 0 -fi - - -cd /etc/swift - -for builder_file in $builder_files -do - create $builder_file - - zone=1 - for volname in $@ - do - add $builder_file $zone ${port[$builder_file]} $volname >& $outdev - zone=$(expr $zone + 1) - done - - rebalance $builder_file >& $outdev - build $builder_file >& $outdev - -done - -echo "Ring files are prepared in /etc/swift. Please restart object store services" diff --git a/doc/man/gluster-swift-gen-builders.8 b/doc/man/gluster-swift-gen-builders.8 deleted file mode 100644 index 3fb3b81..0000000 --- a/doc/man/gluster-swift-gen-builders.8 +++ /dev/null @@ -1,69 +0,0 @@ -.TH gluster-swift-gen-builders 8 "gluster-swift helper utility" "18 November 2013" "Red Hat Inc." -.SH NAME -\fBgluster-swift-gen-builders \fP- Registers GlusterFS volumes to be accessed by -\fBOpenStack Swift. -\fB -.SH SYNOPSIS -.nf -.fam C -\fBgluster-swift-gen-builders\fP [\fB-v\fP] [\fB-h\fP] volumes\.\.\. - -.fam T -.fi -.fam T -.fi -.SH DESCRIPTION -Register GlusterFS volumes to be accessed over OpenStack Swift. -.SH OPTIONS -\fB-v\fP or \fB--verbose\fP -.PP -.nf -.fam C - Verbose - -.fam T -.fi -\fB-h\fP or \fB--help\fP -.PP -.nf -.fam C - Prints help screen - -.fam T -.fi -.SH EXAMPLES - -\fBgluster-swift-gen-builders\fP vol1 vol2 -.PP -.nf -.fam C - Creates new ring files with vol1 and vol2 - -.fam T -.fi -\fBgluster-swift-gen-builders\fP vol2 -.PP -.nf -.fam C - Creates new ring files by removing vol1 - -.fam T -.fi -\fBgluster-swift-gen-builders\fP \fB-v\fP vol1 -.PP -.nf -.fam C - Create new ring files with vol1, (Verbose). - -.fam T -.fi -\fBgluster-swift-gen-builders\fP \fB-h\fP -.PP -.nf -.fam C - Displays help screen - -.fam T -.fi -.SH COPYRIGHT -\fBCopyright\fP(c) 2013 RedHat, Inc. diff --git a/doc/markdown/auth_guide.md b/doc/markdown/auth_guide.md deleted file mode 100644 index 274dd32..0000000 --- a/doc/markdown/auth_guide.md +++ /dev/null @@ -1,479 +0,0 @@ -# Authentication Services Start Guide - -## Contents -* [Keystone](#keystone) - * [Overview](#keystone_overview) - * [Creation of swift accounts](#keystone_swift_accounts) - * [Configuration](#keystone_configuration) - * [Configuring keystone endpoint](#keystone_endpoint) -* [GSwauth](#gswauth) - * [Overview](#gswauth_overview) - * [Installing GSwauth](#gswauth_install) - * [User roles](#gswauth_user_roles) - * [GSwauth Tools](#gswauth_tools) - * [Authenticating a user](#gswauth_authenticate) -* [Swiftkerbauth](#swiftkerbauth) - * [Architecture](swiftkerbauth/architecture.md) - * [RHEL IPA Server Guide](swiftkerbauth/ipa_server.md) - * [RHEL IPA Client Guide](swiftkerbauth/ipa_client.md) - * [Windows AD Server Guide](swiftkerbauth/AD_server.md) - * [Windows AD Client Guide](swiftkerbauth/AD_client.md) - * [Swiftkerbauth Guide](swiftkerbauth/swiftkerbauth_guide.md) - -## Keystone ## -The Standard Openstack authentication service - -### Overview ### -[Keystone](https://wiki.openstack.org/wiki/Keystone) is the identity -service for OpenStack, used for authentication and authorization when -interacting with OpenStack services. - -Configuring gluster-swift to authenticate against keystone is thus -very useful because allows users to access a gluster-swift storage -using the same credentials used for all other OpenStack services. - -Currently, gluster-swift has a strict mapping of one account to a -GlusterFS volume, and this volume has to be named after the **tenant -id** (aka **project id**) of the user accessing it. - -### Installation ### - -Keystone authentication is performed using the -[swift.common.middleware.keystone](http://docs.openstack.org/developer/swift/middleware.html#module-swift.common.middleware.keystoneauth) -which is part of swift itself. It depends on keystone python APIs, -contained in the package `python-keystoneclient`. - -You can install `python-keystoneclient` from the packages of your -distribution running: - - * on Ubuntu: - - sudo apt-get install python-keystoneclient - - * on Fedora: - - sudo yum install python-keystoneclient - -otherwise you can install it via pip: - - sudo pip install python-keystoneclient - -### Configuration of the proxy-server ### - -You only need to configure the proxy-server in order to enable -keystone authentication. The configuration is no different from what -is done for a standard swift installation (cfr. for instance the -related -[swift documentation](http://docs.openstack.org/developer/swift/overview_auth.html#keystone-auth)), -however we report it for completeness. - -In the configuration file of the proxy server (usually -`/etc/swift/proxy-server.conf`) you must modify the main pipeline and -add `authtoken` and `keystoneauth`: - - Was: -~~~ -[pipeline:main] -pipeline = catch_errors healthcheck cache ratelimit tempauth proxy-server -~~~ - Change To: -~~~ -[pipeline:main] -pipeline = catch_errors healthcheck cache ratelimit authtoken keystoneauth proxy-server -~~~ - -(note that we also removed `tempauth`, although this is not necessary) - -Add configuration for the `authtoken` middleware by adding the following section: - - [filter:authtoken] - paste.filter_factory = keystone.middleware.auth_token:filter_factory - auth_host = KEYSTONE_HOSTNAME - auth_port = 35357 - auth_protocol = http - auth_uri = http://KEYSTONE_HOSTNAME:5000/ - admin_tenant_name = TENANT_NAME - admin_user = SWIFT_USERNAME - admin_password = SWIFT_PASSWORD - include_service_catalog = False - -`SWIFT_USERNAME`, `SWIFT_PASSWORD` and `TENANT_NAME` will be used by -swift to get an admin token from `KEYSTONE_HOSTNAME`, used to -authorize user tokens so they must match an user in keystone with -administrative privileges. - -Add configuration for the `keystoneauth` middleware: - - [filter:keystoneauth] - use = egg:swift#keystoneauth - # Operator roles is the role which user would be allowed to manage a - # tenant and be able to create container or give ACL to others. - operator_roles = Member, admin - -Restart the `proxy-server` service. - -### Configuring keystone endpoint ### - -In order to be able to use the `swift` command line you also need to -configure keystone by adding a service and its relative endpoint. Up -to date documentation can be found in the OpenStack documentation, but -we report it here for completeness: - -First of all create the swift service of type `object-store`: - - $ keystone service-create --name=swift \ - --type=object-store --description="Swift Service" - +-------------+---------------------------------+ - | Property | Value | - +-------------+----------------------------------+ - | description | Swift Service | - | id | 272efad2d1234376cbb911c1e5a5a6ed | - | name | swift | - | type | object-store | - +-------------+----------------------------------+ - -and use the `id` of the service you just created to create the -corresponding endpoint: - - $ keystone endpoint-create \ - --region RegionOne \ - --service-id= \ - --publicurl 'http://:8080/v1/AUTH_$(tenant_id)s' \ - --internalurl 'http://:8080/v1/AUTH_$(tenant_id)s' \ - --adminurl 'http://:8080/v1' - -Now you should be able to use the swift command line to list the containers of your account with: - - $ swift --os-auth-url http://:5000/v2.0 \ - -U : -K list - -to create a container - - $ swift --os-auth-url http://:5000/v2.0 \ - -U : -K post mycontainer - -and upload a file - - $ swift --os-auth-url http://:5000/v2.0 \ - -U : -K upload - -## GSwauth ## - -### Overview ### -An easily deployable GlusterFS aware authentication service based on [Swauth](http://gholt.github.com/swauth/). -GSwauth is a WSGI Middleware that uses Swift itself as a backing store to -maintain its metadata. - -This model has the benefit of having the metadata available to all proxy servers -and saving the data to a GlusterFS volume. To protect the metadata, the GlusterFS -volume should only be able to be mounted by the systems running the proxy servers. - -Currently, gluster-swift has a strict mapping of one account to a GlusterFS volume. -Future releases, this will be enhanced to support multiple accounts per GlusterFS -volume. - -See for more information on Swauth. - -### Installing GSwauth ### - -1. GSwauth is installed by default with Gluster-Swift. - -1. Create and start the `gsmetadata` gluster volume -~~~ -gluster volume create gsmetadata : -gluster volume start gsmetadata -~~~ - -1. run `gluster-swift-gen-builders` with all volumes that should be - accessible by gluster-swift, including `gsmetadata` -~~~ -gluster-swift-gen-builders gsmetadata -~~~ - -1. Change your proxy-server.conf pipeline to have gswauth instead of tempauth: - - Was: -~~~ -[pipeline:main] -pipeline = catch_errors cache tempauth proxy-server -~~~ - Change To: -~~~ -[pipeline:main] -pipeline = catch_errors cache gswauth proxy-server -~~~ - -1. Add to your proxy-server.conf the section for the GSwauth WSGI filter: -~~~ -[filter:gswauth] -use = egg:gluster_swift#gswauth -set log_name = gswauth -super_admin_key = gswauthkey -metadata_volume = gsmetadata -auth_type = sha1 -auth_type_salt = swauthsalt -token_life = 86400 -max_token_life = 86400 -~~~ - -1. Restart your proxy server ``swift-init proxy reload`` - -##### Advanced options for GSwauth WSGI filter: - -* `default-swift-cluster` - default storage-URL for newly created accounts. When attempting to authenticate with a user for the first time, the return information is the access token and the storage-URL where data for the given account is stored. - -* `token_life` - set default token life. The default value is 86400 (24hrs). - -* `max_token_life` - The maximum token life. Users can set a token lifetime when requesting a new token with header `x-auth-token-lifetime`. If the passed in value is bigger than the `max_token_life`, then `max_token_life` will be used. - -### User Roles -There are only three user roles in GSwauth: - -* A regular user has basically no rights. He needs to be given both read/write priviliges to any container. -* The `admin` user is a super-user at the account level. This user can create and delete users for the account they are members and have both write and read priviliges to all stored objects in that account. -* The `reseller admin` user is a super-user at the cluster level. This user can create and delete accounts and users and has read/write priviliges to all accounts under that cluster. - - -| Role/Group | get list of accounts | get Acccount Details (users, etc)| Create Account | Delete Account | Get User Details | Create admin user | Create reseller-admin user | Create regular user | Delete admin user | Delete reseller-admin user | Delete regular user | Set Service Endpoints | Get Account Groups | Modify User | -| ----------------------- |:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:| -| .super_admin (username) |x|x|x|x|x|x|x|x|x|x|x|x|x|x| -| .reseller_admin (group) |x|x|x|x|x|x| |x|x| |x|x|x|x| -| .admin (group) | |x| | |x|x| |x|x| |x| |x|x| -| regular user (type) | | | | | | | | | | | | | | | - - -### GSwauth Tools -GSwauth provides cli tools to facilitate managing accounts and users. All tools have some options in common: - -#### Common Options: -* -A, --admin-url: The URL to the auth - * Default: `http://127.0.0.1:8080/auth/` -* -U, --admin-user: The user with admin rights to perform action - * Default: `.super_admin` -* -K, --admin-key: The key for the user with admin rights to perform action - * no default value - -#### gswauth-prep: -Prepare the gluster volume where gswauth will save its metadata. - -~~~ -gswauth-prep [option] -~~~ - -Example: - -~~~ -gswauth-prep -A http://10.20.30.40:8080/auth/ -K gswauthkey -~~~ - -#### gswauth-add-account: -Create account. Currently there's a requirement that an account must map to a gluster volume. The gluster volume must not exist at the time when the account is being created. - -~~~ -gswauth-add-account [option] -~~~ - -Example: - -~~~ -gswauth-add-account -K gswauthkey -~~~ - -#### gswauth-add-user: -Create user. If the provided account does not exist, it will be automatically created before creating the user. -Use the `-r` flag to create a reseller admin user and the `-a` flag to create an admin user. To change the password or make the user an admin, just run the same command with the new information. - -~~~ -gswauth-add-user [option] -~~~ - -Example: - -~~~ -gswauth-add-user -K gswauthkey -a test ana anapwd -~~~ - -**Change password examples** - -Command to update password/key of regular user: - -~~~ -gswauth-add-user -U account1:user1 -K old_pass account1 user1 new_pass -~~~ - -Command to update password/key of account admin: - -~~~ -gswauth-add-user -U account1:admin -K old_pass -a account1 admin new_pass -~~~ - -Command to update password/key of reseller_admin: - -~~~ -gswauth-add-user -U account1:radmin -K old_pass -r account1 radmin new_pass -~~~ - -#### gswauth-delete-account: -Delete an account. An account cannot be deleted if it still contains users, an error will be returned. - -~~~ -gswauth-delete-account [option] -~~~ - -Example: - -~~~ -gswauth-delete-account -K gswauthkey test -~~~ - -#### gswauth-delete-user: -Delete a user. - -~~~ -gswauth-delete-user [option] -~~~ - -Example: - -~~~ -gswauth-delete-user -K gswauthkey test ana -~~~ - -#### gswauth-set-account-service: -Sets a service URL for an account. Can only be set by a reseller admin. -This command can be used to changed the default storage URL for a given account. -All accounts have the same storage-URL default value, which comes from the `default-swift-cluster` -option. - -~~~ -gswauth-set-account-service [options] -~~~ - -Example: - -~~~ -gswauth-set-account-service -K gswauthkey test storage local http://newhost:8080/v1/AUTH_test -~~~ - -#### gswauth-list: -List information about accounts and users - -* If `[account]` and `[user]` are omitted, a list of accounts will be output. -* If `[account]` is included but not `[user]`, a list of users within the account will be output. -* If `[account]` and `[user]` are included, a list of groups the user belongs to will be ouptput. -* If the `[user]` is `.groups`, the active groups for the account will be listed. - -The default output format is tabular. `-p` changes the output to plain text. `-j` changes the -output to JSON format. This will print all information about given account or user, including -stored password - -~~~ -gswauth-list [options] [account] [user] -~~~ - -Example: - -~~~ -gswauth-list -K gswauthkey test ana -+----------+ -| Groups | -+----------+ -| test:ana | -| test | -| .admin | -+----------+ -~~~ - -#### gswauth-cleanup-tokens: -Delete expired tokens. Users also have the option to provide the expected life of tokens, delete all tokens or all tokens for a given account. - -Options: - -* `-t`, `--token-life`: The expected life of tokens, token objects modified more than this number of -seconds ago will be checked for expiration (default: 86400). -* `--purge`: Purge all tokens for a given account whether the tokens have expired or not. -* `--purge-all`: Purges all tokens for all accounts and users whether the tokens have expired or not. - -~~~ -gswauth-cleanup-tokens [options] -~~~ - -Example: - -~~~ -gswauth-cleanup-tokens -K gswauthkey --purge test -~~~ - -### Authenticating a user with swift client -There are two methods of accessing data using the swift client. The first (and most simple one) is by providing the user name and password everytime. The swift client takes care of acquiring the token from gswauth. See example below: - -~~~ -swift -A http://127.0.0.1:8080/auth/v1.0 -U test:ana -K anapwd upload container1 README.md -~~~ - -The second method is a two-step process, but it allows users to only provide their username and password once. First users must authenticate with a username and password to get a token and the storage URL. Then, users can make the object requests to the storage URL with the given token. - -It is important to remember that tokens expires, so the authentication process needs to be repeated every so often. - -Authenticate a user with the curl command - -~~~ -curl -v -H 'X-Storage-User: test:ana' -H 'X-Storage-Pass: anapwd' -k http://localhost:8080/auth/v1.0 -... -< X-Auth-Token: AUTH_tk7e68ef4698f14c7f95af07ab7b298610 -< X-Storage-Url: http://127.0.0.1:8080/v1/AUTH_test -... -~~~ -Now, the user can access the object-storage using the swift client with the given token and storage URL - -~~~ -bash-4.2$ swift --os-auth-token=AUTH_tk7e68ef4698f14c7f95af07ab7b298610 --os-storage-url=http://127.0.0.1:8080/v1/AUTH_test upload container1 README.md -README.md -bash-4.2$ -bash-4.2$ swift --os-auth-token=AUTH_tk7e68ef4698f14c7f95af07ab7b298610 --os-storage-url=http://127.0.0.1:8080/v1/AUTH_test list container1 -README.md -~~~ -**Note:** Reseller admins must always use the second method to acquire a token, in order to be given access to other accounts different than his own. The first method of using the username and password will give them access only to their own accounts. - -## Swiftkerbauth ## -Kerberos authentication filter - -Carsten Clasohm implemented a new authentication filter for swift -that uses Kerberos tickets for single sign on authentication, and -grants administrator permissions based on the users group membership -in a directory service like Red Hat Enterprise Linux Identity Management -or Microsoft Active Directory. diff --git a/doc/markdown/concepts.md b/doc/markdown/concepts.md deleted file mode 100644 index 2ad3f25..0000000 --- a/doc/markdown/concepts.md +++ /dev/null @@ -1,2 +0,0 @@ -# Overview and Concepts -TBD diff --git a/doc/markdown/object-expiration.md b/doc/markdown/object-expiration.md deleted file mode 100644 index a61818a..0000000 --- a/doc/markdown/object-expiration.md +++ /dev/null @@ -1,75 +0,0 @@ -# Object Expiration - -## Contents -* [Overview](#overview) -* [Setup](#setup) -* [Using object expiration](#using) -* [Running object-expirer daemon](#running-daemon) - - -## Overview -The Object Expiration feature offers **scheduled deletion of objects**. The client would use the *X-Delete-At* or *X-Delete-After* headers during an object PUT or POST and the cluster would automatically quit serving that object at the specified time and would shortly thereafter remove the object from the GlusterFS volume. - -Expired objects however do appear in container listings until they are deleted by object-expirer daemon. This behaviour is expected: https://bugs.launchpad.net/swift/+bug/1069849 - - -## Setup -Object expirer uses a seprate account (a GlusterFS volume, for now, until multiple accounts per volume is implemented) named *gsexpiring*. You will have to [create a GlusterFS volume](quick_start_guide.md#gluster-volume-setup) by that name. - -Object-expirer uses the */etc/swift/object-expirer.conf* configuration file. Make sure that it exists. If not, you can copy it from */etc* directory of gluster-swift source repo. - - -## Using object expiration - -**PUT an object with X-Delete-At header using curl** - -~~~ -curl -v -X PUT -H 'X-Delete-At: 1392013619' http://127.0.0.1:8080/v1/AUTH_test/container1/object1 -T ./localfile -~~~ - -**PUT an object with X-Delete-At header using swift client** - -~~~ -swift --os-auth-token=AUTH_tk99a39aecc3dd4f80b2b1e801d00df846 --os-storage-url=http://127.0.0.1:8080/v1/AUTH_test upload container1 ./localfile --header 'X-Delete-At: 1392013619' -~~~ - -where *X-Delete-At* header takes a Unix Epoch timestamp in integer. For example, the current time in Epoch notation can be found by running this command: - -~~~ -date +%s -~~~ - - -**PUT an object with X-Delete-After header using curl** - -~~~ -curl -v -X PUT -H 'X-Delete-After: 3600' http://127.0.0.1:8080/v1/AUTH_test/container1/object1 -T ./localfile -~~~ - -**PUT an object with X-Delete-At header using swift client** - -~~~ -swift --os-auth-token=AUTH_tk99a39aecc3dd4f80b2b1e801d00df846 --os-storage-url=http://127.0.0.1:8080/v1/AUTH_test upload container1 ./localfile --header 'X-Delete-After: 3600' -~~~ - -where *X-Delete-After* header takes a integer number of seconds, after which the object expires. The proxy server that receives the request will convert this header into an X-Delete-At header using its current time plus the value given. - - -## Running object-expirer daemon -The object-expirer daemon runs a pass once every X seconds (configurable using *interval* option in config file). For every pass it makes, it queries the *gsexpiring* account for "tracker objects". Based on (timestamp, path) present in name of "tracker objects", object-expirer then deletes the actual object and the corresponding tracker object. - - -To run object-expirer forever as a daemon: -~~~ -swift-init object-expirer start -~~~ - -To run just once: -~~~ -swift-object-expirer -o -v /etc/swift/object-expirer.conf -~~~ - -**For more information, visit:** -http://docs.openstack.org/developer/swift/overview_expiring_objects.html - - diff --git a/doc/markdown/openstack_swift_sync.md b/doc/markdown/openstack_swift_sync.md deleted file mode 100644 index ba2fadb..0000000 --- a/doc/markdown/openstack_swift_sync.md +++ /dev/null @@ -1,67 +0,0 @@ -# Syncing Gluster-Swift with Swift - -## Create a release -Create a release in launchpad.net so that we can place the latest swift source for download. We'll place the source here, and it will allow tox in gluster-swift to download the latest code. - -## Upload swift release - -* Clone the git swift repo -* Go to the release tag or just use the latest -* Type the following to package the swift code: - -``` -$ python setup.py sdist -$ ls dist -``` - -* Take the file in the `dist` directory and upload it to the new release we created it on launchpad.net. -* Alternatively, if we are syncing with a Swift version which is already released, we can get the tar.gz file from Swift launchpad page and upload the same to gluster-swift launchpad. - -## Setup Tox -Now that the swift source is availabe on launchpad.net, copy its link location and update tox.ini in gluster-swift with the new link. - -## Update tests -This part is a little more complicated and now we need to *merge* the latest tests with ours. - -[meld](http://meldmerge.org/) is a great tool to make this work easier. The 3-way comparison feature of meld comes handy to compare 3 version of same file from: - -* Latest swift (say v1.13) -* Previous swift (say v1.12) -* gluster-swift (v1.12) - -Files that need to be merged: - -* Update unit tests - -``` -$ export SWIFTDIR=../swift -$ meld $SWIFTDIR/tox.ini tox.ini -$ meld $SWIFTDIR/test-requirements.txt tools/test-requires -$ meld $SWIFTDIR/requirements.txt tools/requirements.txt -$ meld $SWIFTDIR/test/unit/proxy/test_servers.py test/unit/proxy/test_server.py -$ cp $SWIFTDIR/test/unit/proxy/controllers/*.py test/unit/proxy/controllers -$ meld $SWIFTDIR/test/unit/__init__.py test/unit/__init__.py -``` - -* Update all the functional tests -First check if there are any new files in the swift functional test directory. If there are, copy them over. - -* Remember to `git add` any new files - -* Now merge the existing ones: - -``` -for i in $SWIFTDIR/test/functional/*.py ; do - meld $i test/functional/`basename $i` -done -``` - -## Update the version -If needed, update the version now in `gluster/swift/__init__.py`. - -## Upload the patch -Upload the patch to Gerrit. - -## Update the release in launchpad.net -Upload the gluster-swift*.tar.gz built by Jenkins to launchpad.net once the fix has been commited to the main branch. - diff --git a/doc/markdown/swiftkerbauth/AD_client.md b/doc/markdown/swiftkerbauth/AD_client.md deleted file mode 100644 index 0947a1e..0000000 --- a/doc/markdown/swiftkerbauth/AD_client.md +++ /dev/null @@ -1,206 +0,0 @@ -#AD client setup guide - -###Contents -* [Setup Overview] (#setup) -* [Configure Network] (#network) -* [Installing AD Client] (#AD-client) - - -###Setup Overview - -This guide talks about adding fedora linux client to windows domain. -The test setup included a client machine with Fedora 19 installed -on it with all the latest packages updated. The crux is to add this linux -machine to Windows Domain. This linux box is expected to act as RHS node and on which swiftkerbauth, -apachekerbauth code would run. - -Set hostname (FQDN) to fcclient.winad.com - - # hostnamectl set-hostname "fcclient.winad.com" - - # hostname "fcclient.winad.com" - - - -### Configure client - -* Deploy Fedora linux 19. - -* Update the system with latest packages. - -* Configure SELinux security parameters. - -* Install & configure samba - -* Configure DNS - -* Synchronize the time services - -* Join Domain - -* Install / Configure Kerberos Client - - -The document assumes the installing Fedora Linux and configuring SELinux -parameters to 'permissive' is known already. - -###Install & Configure Samba: - # yum -y install samba samba-client samba-common samba-winbind - samba-winbind-clients - - # service start smb - - # ps -aef | grep smb - # chkconfig smb on - -###Synchronize time services -The kerberos authentication and most of the DNS functionality could fail with -clock skew if times are not synchronized. - - # cat /etc/ntp.conf - server ns1.bos.redhat.com - server 10.5.26.10 - - # service ntpd stop - - # ntpdate 10.16.255.2 - - # service ntpd start - - #chkconfig ntpd on - -Check if Windows server in the whole environment is also time synchronized with -same source. - - # C:\Users\Administrator>w32tm /query /status | find "Source" - - Source: ns1.xxx.xxx.com - -###Configure DNS on client -Improperly resolved hostname is the leading cause in authentication failures. -Best practice is to configure fedora client to use Windows DNS. -'nameserver' below is the IP address of the windows server. - # cat /etc/resolve.conf - domain server.winad.com - search server.winad.com - nameserver 10.nn.nnn.3 - -###Set the hostname of the client properly (FQDN) - # cat /etc/sysconfig/network - HOSTNAME=fcclient.winad.com - - -###Install & Configure kerberos client - - # yum -y install krb5-workstation - -Edit the /etc/krb5.conf as follows: - - # cat /etc/krb5.conf - [logging] - default = FILE:/var/log/krb5libs.log - kdc = FILE:/var/log/krb5kdc.log - admin_server = FILE:/var/log/kadmind.log - - [libdefaults] - default_realm = WINAD.COM - dns_lookup_realm = false - dns_lookup_kdc = false - ticket_lifetime = 24h - renew_lifetime = 7d - forwardable = true - - [realms] - WINAD.COM = { - kdc = server.winad.com - admin_server = server.winad.com - } - [domain_realm] - .demo = server.winad.com - demo = server.winad.com - -###Join Domain -Fire command 'system-config-authentication' on client. This should display a -graphical wizard. Below inputs would help configure this wizard. - - - User account data base = winbind - - winbind domain = winad - - security model = ads - - winbind ads realm = winad.com - - winbind controller = server.winad.com - - template shell = /bin/bash - - let the other options be as is to default. - - Perform Join domain and appy settings and quit. Please note this join should - not see any errors. This makes the client fedora box to join the windows - domain. - -###Configure the kerberos client -This would bring the users/groups from Windows Active directory to this -fedora client. - -Edit /etc/samba/smb.conf file to have below parameters in the global section. - - # cat /etc/samba/smb.conf - [global] - workgroup = winad - realm = winad.com - server string = Samba Server Version %v - security = ADS - allow trusted domains = No - password server = server.winad.com - log file = /var/log/samba/log.%m - max log size = 50 - idmap uid = 10000­19999 - idmap gid = 10000­19999 - template shell = /bin/bash - winbind separator = + - winbind use default domain = Yes - idmap config REFARCH­AD:range = 10000000­19999999 - idmap config REFARCH­AD:backend = rid - cups options = raw - - - # service smb stop - - # service winbind stop - - # tar -cvf /var/tmp/samba-cache-backup.tar /var/lib/samba - - # ls -la /var/tmp/samba-cache-backup.tar - - # rm ­-f /var/lib/samba/* - - -Verify that no kerberos ticket available and cached. - - # kdestroy - - # klist - -Rejoin the domain. - - # net join -S server -U Administrstor - -Test that client rejoined the domain. - - # net ads info - -Restart smb and winbind service. - - # wbinfo --domain-users - -Perform kinit for the domain users prepared on active directory. This is obtain -the kerberos ticket for user 'auth_admin' - - # kinit auth_admin - - # id -Gn auth_admin - -###Notes -Obtaining the HTTP service principal & keytab file and installing it with -swiftkerbauth is added to swiftkerbauth_guide - -###References -Reference Document for adding Linux box to windows domain : -Integrating Red Hat Enterprise Linux 6 -with Active Directory diff --git a/doc/markdown/swiftkerbauth/AD_server.md b/doc/markdown/swiftkerbauth/AD_server.md deleted file mode 100644 index 66d90f2..0000000 --- a/doc/markdown/swiftkerbauth/AD_server.md +++ /dev/null @@ -1,119 +0,0 @@ -#Windows Active Directory & Domain Controller Server Guide - -###Contents -* [Setup Overview] (#Setup) -* [Installing Active Directory Services] (#AD-server) -* [Configuring DNS] (#DNS) -* [Adding Users and Groups] (#users-groups) - - - -###Setup Overview - -The setup includes a server machine installed with Windows 2008 R2 Server, with -Domain Controller, Active Directory services & DNS server installed alongwith. -The steps to install windows operating system and above servers can be found -on MicroSoft Documentation. This windows Active Directory server would act as an -authentication server in the whole setup. This would provide the access control -and permissions for users on certain data objects. - - -Windows 2008 R2 deployment: - -http://technet.microsoft.com/en-us/library/dd283085.aspx - - -Configuring Active Directory, Domain Services, DNS server: - -http://technet.microsoft.com/en-us/library/cc770946.aspx - - - -###Installing AD Server - -Administrators need to follow simple instructions in Server Manager on Windows -2008, and should add Active Directory Domain Services & DNS server. It is -recommended to use static IP for DNS server. Preferred Hostname(FQDN) for -Windows server could be of format hostname 'server.winad.com' where -'winad.com' is a domain name. - -Following tips would help prepare a test setup neatly. - - - Select Active Directory Domain services wizard in Server Manager - - Move on to install it with all the pre-requisits, e.g. .NET framework etc. - - Configure Active directory after installtion via exapanding the 'Roles' - section in the server manager. - - Create a new Domain in the New Forest. - - Type the FQDN, winad.com - - Set Forest functional level Windows 2008 R2. - - Selct additional options for this domain controller as DNS server. - - Leave the log locations to default provided by wizard. - - Set the Administrator Password carefully. - - Thats it. You are done configuring active directory. - - - -###Configuring DNS - -This section explains configuring the DNS server installed on Windows 2008 R2 -server. You must know know about - - - Forward lookup zone - - - Reverse lookup zone - - - Zone type - -A forward lookup zone is simply a way to resolve hostnames to IP address. -A reverse lookup zone is to lookup DNS hostname of the host IP. - -Following tips would help configure the Zones on DNS server. - - - Create a Forward lookup zone. - - Create it a primary zone. - - Add the Clients using their ip addresses and FQDN to this forward lookup - zones. - - This would add type 'A' record for that host on DNS server. - - Similarly create a Reverser lookup zone. - - Add clients 'PTR' record to this zone via browsing through the forward - zones clients. - -The above setup can be tested on client once it joins the domain using 'dig' -command as mentioned below. - - -On client: - - # dig fcclient.winad.com - This should yield you a Answer section mentioning its IP address. - - Reverse lookup can be tested using - - # 'dig -t ptr 101.56.168.192.in-addr.arpa.' - The answer section should state the FQDN of the client. - - Repeat the above steps on client for Windows AD server as well. - - - -###Adding users and groups - -The following convention is to be followed in creating group names: - - \_ - - \_ - -As of now, account=volume=group - -For example: - - AUTH\_test - -Adding groups and users to the Windows domain is easy task. - - - Start -> Administrative Tools -> Active Directory Users & Computers - - Expand the domain name which was prepared earlier. e.g winad.com - - Add groups with appropreate access rights. - - Add users to the group with appropreate permissions. - - Make sure you set password for users prepared on AD server. diff --git a/doc/markdown/swiftkerbauth/architecture.md b/doc/markdown/swiftkerbauth/architecture.md deleted file mode 100644 index fc6d764..0000000 --- a/doc/markdown/swiftkerbauth/architecture.md +++ /dev/null @@ -1,105 +0,0 @@ -# Architecture - -The Swift API is HTTP-based. As described in the Swift documentation -[1], clients first make a request to an authentication URL, providing -a username and password. The reply contains a token which is used in -all subsequent requests. - -Swift has a chain of filters through which all client requests go. The -filters to use are configured with the pipeline parameter in -/etc/swift/proxy-server.conf: - - [pipeline:main] - pipeline = healthcheck cache tempauth proxy-server - -For the single sign authentication, we added a new filter called -"kerbauth" and put it into the filter pipeline in place of tempauth. - -The filter checks the URL for each client request. If it matches the -authentication URL, the client is redirected to a URL on a different -server (on the same machine). The URL is handled by a CGI script, which -is set up to authenticate the client with Kerberos negotiation, retrieve -the user's system groups [2], store them in a memcache ring shared with -the Swift server, and return the authentication token to the client. - -When the client provides the token as part of a resource request, the -kerbauth filter checks it against its memcache, grants administrator -rights based on the group membership retrieved from memcache, and -either grants or denies the resource access. - -[1] http://docs.openstack.org/api/openstack-object-storage/1.0/content/authentication-object-dev-guide.html - -[2] The user data and system groups are usually provided by Red Hat - Enterprise Linux identity Management or Microsoft Active - Directory. The script relies on the system configuration to be set - accordingly (/etc/nsswitch.conf). - -***** - -## kerbauth.py - -The script kerbauth.py began as a copy of the tempauth.py script from -from tempauth middleware. It contains the following modifications, among -others: - -In the __init__ method, we read the ext_authentication_url parameter -from /etc/swift/proxy-server.conf. This is the URL that clients are -redirected to when they access either the Swift authentication URL, or -when they request a resource without a valid authentication token. - -The configuration in proxy-server.conf looks like this: - - [filter:kerbauth] - use = egg:swiftkerbauth#kerbauth - ext_authentication_url = http://client.rhelbox.com/cgi-bin/swift-auth - -The authorize method was changed so that global administrator rights -are granted if the user is a member of the auth_reseller_admin -group. Administrator rights for a specific account like vol1 are -granted if the user is a member of the auth_vol1 group. [3] - -The denied_response method was changed to return a HTTP redirect to -the external authentication URL if no valid token was provided by the -client. - -Most of the handle_get_token method was moved to the external -authentication script. This method now returns a HTTP redirect. - -In the __call__ and get_groups method, we removed support for the -HTTP_AUTHORIZATION header, which is only needed when Amazon S3 is -used. - -Like tempauth.py, kerbauth.py uses a Swift wrapper to access -memcache. This wrapper converts the key to an MD5 hash and uses the -hash value to determine on which of a pre-defined list of servers to -store the data. - -[3] "auth" is the default reseller prefix, and would be different if - the reseller_prefix parameter in proxy-server.conf was set. - -## swift-auth CGI script - -swift-auth resides on an Apache server and assumes that Apache is -configured to authenticate the user before this script is -executed. The script retrieves the username from the REMOTE_USER -environment variable, and checks if there already is a token for this -user in the memcache ring. If not, it generates a new one, retrieves -the user's system groups with "id -Gn USERNAME", stores this -information in the memcache ring, and returns the token to the client. - -To allow the CGI script to connect to memcache, the SELinux booleans -httpd_can_network_connect and httpd_can_network_memcache had to be -set. - -The tempauth filter uses the uuid module to generate token -strings. This module creates and runs temporary files, which leads to -AVC denial messages in /var/log/audit/audit.log when used from an -Apache CGI script. While the module still works, the audit log would -grow quickly. Instead of writing an SELinux policy module to allow or -to silently ignore these accesses, the swift-auth script uses the -"random" module for generating token strings. - -Red Hat Enterprise Linux 6 comes with Python 2.6 which only provides -method to list the locally defined user groups. To include groups from -Red Hat Enterprise Linux Identity Management and in the future from -Active Directory, the "id" command is run in a subprocess. diff --git a/doc/markdown/swiftkerbauth/ipa_client.md b/doc/markdown/swiftkerbauth/ipa_client.md deleted file mode 100644 index f6afc42..0000000 --- a/doc/markdown/swiftkerbauth/ipa_client.md +++ /dev/null @@ -1,80 +0,0 @@ -#IPA Client Guide - -##Contents -* [Setup Overview] (#setup) -* [Configure Network] (#network) -* [Installing IPA Client] (#ipa-client) - - -##Setup Overview -We have used a F18 box as IPA client machine and used FreeIPA client. -This document borrows instructions from the following more detailed guide. -[RHEL 6 Identity Management Guide][] - - - -## Configure network - -Set hostname (FQDN) to client.rhelbox.com -> hostnamectl set-hostname "client.rhelbox.com" -> -> hostname "client.rhelbox.com" - -Add following to /etc/sysconfig/network: - - HOSTNAME=client.rhelbox.com - -Add the following to /etc/hostname - - client.rhelbox.com - -Add the following to /etc/hosts - - 192.168.56.110 server.rhelbox.com server - 192.168.56.101 client.rhelbox.com client - -Logout and login again and verify hostname : -> hostname --fqdn - -Edit */etc/resolv.conf* to add this at beginning of file - - nameserver 192.168.56.110 - -Warning: NetworkManager changes resolv.conf on restart - -Turn off firewall -> service iptables stop -> -> chkconfig iptables off - - -## Installing IPA Client - -Install IPA client packages: - -For RHEL: -> yum install ipa-client ipa-admintools - -For Fedora: -> yum install freeipa-client freeipa-admintools - -Install IPA client and add to domain: ->ipa-client-install --enable-dns-updates - - Discovery was successful! - Hostname: client.rhelbox.com - Realm: RHELBOX.COM - DNS Domain: rhelbox.com - IPA Server: server.rhelbox.com - BaseDN: dc=rhelbox,dc=com - - Continue to configure the system with these values? [no]: yes - User authorized to enroll computers: admin - -Check if client is configured correctly: -> kinit admin -> -> getent passwd admin - - -[RHEL 6 Identity Management Guide]: https://access.redhat.com/site/documentation/en-US/Red_Hat_Enterprise_Linux/6/html/Identity_Management_Guide/ diff --git a/doc/markdown/swiftkerbauth/ipa_server.md b/doc/markdown/swiftkerbauth/ipa_server.md deleted file mode 100644 index 55e654e..0000000 --- a/doc/markdown/swiftkerbauth/ipa_server.md +++ /dev/null @@ -1,146 +0,0 @@ -#IPA Server Guide - -##Contents -* [Setup Overview] (#setup) -* [Configure Network] (#network) -* [Installing IPA Server] (#ipa-server) -* [Configuring DNS] (#dns) -* [Adding Users and Groups] (#users-groups) - - - -##Setup Overview -We have used a RHEL 6.4 box as IPA and DNS server. This document borrows -instructions from the following more detailed guide. -[RHEL 6 Identity Management Guide][] - - - -## Configure network - -Change hostname (FQDN) to server.rhelbox.com -> hostname "server.rhelbox.com" - -Add following to */etc/sysconfig/network* file - - HOSTNAME=server.rhelbox.com - -Add the following to */etc/hosts* file - - 192.168.56.110 server.rhelbox.com server - 192.168.56.101 client.rhelbox.com client - -Logout and login again and verify new hostname -> hostname --fqdn - -Turn off firewall -> service iptables stop -> -> chkconfig iptables off - - - -## Installing IPA Server - -Install IPA server packages and DNS dependencies -> yum install ipa-server bind bind-dyndb-ldap - -Run the following interactive setup to install IPA server with DNS -> ipa-server-install --setup-dns - - The IPA Master Server will be configured with: - Hostname: server.rhelbox.com - IP address: 192.168.56.110 - Domain name: rhelbox.com - Realm name: RHELBOX.COM - - BIND DNS server will be configured to serve IPA domain with: - Forwarders: No forwarders - Reverse zone: 56.168.192.in-addr.arpa. - -The installation may take some time. - -Check if IPA is installed correctly : -> kinit admin -> -> ipa user-find admin - - - -## Configuring DNS - -Edit */etc/resolv.conf* to add this at beginning of file : - - nameserver 192.168.56.110 - -Warning: NetworkManager changes resolv.conf on restart - -Add a DNS A record and PTR record for the client under rhelbox.com zone -> ipa dnsrecord-add rhelbox.com client --a-rec=192.168.56.101 --a-create-reverse - -Check if DNS resolution is working by running : - -> dig server.rhelbox.com - - ;; ANSWER SECTION: - server.rhelbox.com. 1200 IN A 192.168.56.110 - -> dig client.rhelbox.com - - ;; ANSWER SECTION: - client.rhelbox.com. 86400 IN A 192.168.56.101 - -Check if reverse resolution works : - -> dig -t ptr 101.56.168.192.in-addr.arpa. - - ;; ANSWER SECTION: - 101.56.168.192.in-addr.arpa. 86400 IN PTR client.rhelbox.com. - - -> dig -t ptr 110.56.168.192.in-addr.arpa. - - ;; ANSWER SECTION: - 110.56.168.192.in-addr.arpa. 86400 IN PTR server.rhelbox.com. - - - -## Adding users and groups - -The following convention is to be followed in creating group names: - - \_ - - \_ - -As of now, account=volume=group - -For example: - - AUTH\_test - -Create *auth_reseller_admin* user group -> ipa group-add auth_reseller_admin --desc="Full access to all Swift accounts" - -Create *auth_rhs_test* user group -> ipa group-add auth_rhs_test --desc="Full access to rhs_test account" - -Create user *auth_admin* user as member of *auth_reseller_admin* user group -> ipa user-add auth_admin --first=Auth --last=Admin --password -> -> ipa group-add-member auth_reseller_admin --users=auth_admin - -Create user *rhs_test_admin* as member of *auth_rhs_test* user group -> ipa user-add rhs_test_admin --first=RHS --last=Admin --password -> -> ipa group-add-member auth_rhs_test --users=rhs_test_admin - -Create user *jsmith* with no relevant group membership -> ipa user-add rhs_test_admin --first=RHS --last=Admin --password - -You can verify users have been added by running ->ipa user-find admin - -NOTE: Every user has to change password on first login. - -[RHEL 6 Identity Management Guide]: https://access.redhat.com/site/documentation/en-US/Red_Hat_Enterprise_Linux/6/html/Identity_Management_Guide/ diff --git a/doc/markdown/swiftkerbauth/swiftkerbauth_guide.md b/doc/markdown/swiftkerbauth/swiftkerbauth_guide.md deleted file mode 100644 index 5da1827..0000000 --- a/doc/markdown/swiftkerbauth/swiftkerbauth_guide.md +++ /dev/null @@ -1,517 +0,0 @@ -#swiftkerbauth - -* [Installing Kerberos module for Apache] (#httpd-kerb-install) -* [Creating HTTP Service Principal] (#http-principal) -* [Installing and configuring swiftkerbauth] (#install-swiftkerbauth) -* [Using swiftkerbauth] (#use-swiftkerbauth) -* [Configurable Parameters] (#config-swiftkerbauth) -* [Functional tests] (#swfunctest) - - -## Installing Kerberos module for Apache on IPA client - -Install httpd server with kerberos module: -> yum install httpd mod_auth_kerb -> -> service httpd restart - -Check if auth_kerb_module is loaded : -> httpd -M | grep kerb - -Change httpd log level to debug by adding/changing the following in -*/etc/httpd/conf/httpd.conf* file - - LogLevel debug - -httpd logs are at */var/log/httpd/error_log* for troubleshooting - -If SELinux is enabled, allow Apache to connect to memcache and -activate the changes by running ->setsebool -P httpd_can_network_connect 1 -> ->setsebool -P httpd_can_network_memcache 1 - -***** - - -## Creating HTTP Service Principal on IPA server - -Add a HTTP Kerberos service principal : -> ipa service-add HTTP/client.rhelbox.com@RHELBOX.COM - -Retrieve the HTTP service principal to a keytab file: -> ipa-getkeytab -s server.rhelbox.com -p HTTP/client.rhelbox.com@RHELBOX.COM -k /tmp/http.keytab - -Copy keytab file to client: -> scp /tmp/http.keytab root@192.168.56.101:/etc/httpd/conf/http.keytab - -## Creating HTTP Service Principal on Windows AD server - -Add a HTTP Kerberos service principal: -> c:\>ktpass.exe -princ HTTP/fcclient.winad.com@WINAD.COM -mapuser -> auth_admin@WINAD.COM -pass Redhat*123 -out c:\HTTP.keytab -crypto DES-CBC-CRC -> -kvno 0 - -Use winscp to copy HTTP.ketab file to /etc/httpd/conf/http.keytab - -***** - - -##Installing and configuring swiftkerbauth on IPA client - -Prerequisites for installing swiftkerbauth -* swift (havana) -* gluster-swift (optional) - -You can install swiftkerbauth using one of these three ways: - -Installing swiftkerbauth from source: -> python setup.py install - -Installing swiftkerbauth using pip: -> pip install swiftkerbauth - -Installing swiftkerbauth from RPMs: -> ./makerpm.sh -> -> rpm -ivh dist/swiftkerbauth-1.0.0-1.noarch.rpm - -Edit */etc/httpd/conf.d/swift-auth.conf* and change KrbServiceName, KrbAuthRealms and Krb5KeyTab parameters accordingly. -More detail on configuring kerberos for apache can be found at: -[auth_kerb_module Configuration][] - -Make /etc/httpd/conf/http.keytab readable by any user : -> chmod 644 /etc/httpd/conf/http.keytab - -And preferably change owner of keytab file to apache : -> chown apache:apache /etc/httpd/conf/http.keytab - -Reload httpd -> service httpd reload - -Make authentication script executable: -> chmod +x /var/www/cgi-bin/swift-auth - -***** - - -##Using swiftkerbauth - -### Adding kerbauth filter in swift pipeline - -Edit */etc/swift/proxy-server.conf* and add a new filter section as follows: - - [filter:kerbauth] - use = egg:swiftkerbauth#kerbauth - ext_authentication_url = http://client.rhelbox.com/cgi-bin/swift-auth - auth_mode=passive - -Add kerbauth to pipeline - - [pipeline:main] - pipeline = catch_errors healthcheck proxy-logging cache proxy-logging kerbauth proxy-server - -If the Swift server is not one of your Gluster nodes, edit -*/etc/swift/fs.conf* and change the following lines in the DEFAULT -section: - - mount_ip = RHS_NODE_HOSTNAME - remote_cluster = yes - -Restart swift to activate kerbauth filer -> swift-init main restart - - -###Examples - -####Authenticate user and get Kerberos ticket - -> kinit auth_admin - -NOTE: curl ignores user specified in -u option. All further curl commands -will use the currently authenticated auth_admin user. - -####Get an authentication token: -> curl -v -u : --negotiate --location-trusted http://client.rhelbox.com:8080/auth/v1.0 - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > GET /auth/v1.0 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > - < HTTP/1.1 303 See Other - < Content-Type: text/html; charset=UTF-8 - < Location: http://client.rhelbox.com/cgi-bin/swift-auth - < Content-Length: 0 - < X-Trans-Id: txecd415aae89b4320b6145-0052417ea5 - < Date: Tue, 24 Sep 2013 11:59:33 GMT - < - * Connection #0 to host client.rhelbox.com left intact - * Issue another request to this URL: 'http://client.rhelbox.com/cgi-bin/swift-auth' - * About to connect() to client.rhelbox.com port 80 (#1) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 80 (#1) - > GET /cgi-bin/swift-auth HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com - > Accept: */* - > - < HTTP/1.1 401 Unauthorized - < Date: Tue, 24 Sep 2013 11:59:33 GMT - < Server: Apache/2.4.6 (Fedora) mod_auth_kerb/5.4 - < WWW-Authenticate: Negotiate - < WWW-Authenticate: Basic realm="Swift Authentication" - < Content-Length: 381 - < Content-Type: text/html; charset=iso-8859-1 - < - * Ignoring the response-body - * Connection #1 to host client.rhelbox.com left intact - * Issue another request to this URL: 'http://client.rhelbox.com/cgi-bin/swift-auth' - * Re-using existing connection! (#1) with host (nil) - * Connected to (nil) (192.168.56.101) port 80 (#1) - * Server auth using GSS-Negotiate with user '' - > GET /cgi-bin/swift-auth HTTP/1.1 - > Authorization: Negotiate YIICYgYJKoZIhvcSAQICAQBuggJRMIICTaADAgEFoQMCAQ6iBwMFACAAAACjggFgYYIBXDCCAVigAwIBBaENGwtSSEVMQk9YLkNPTaIlMCOgAwIBA6EcMBobBEhUVFAbEmNsaWVudC5yaGVsYm94LmNvbaOCARkwggEVoAMCARKhAwIBAaKCAQcEggEDx9SH2R90RO4eAkhsNKow/DYfjv1rWhgxNRqj/My3yslASSgefls48VdDNHVVWqr1Kd6mB/9BIoumpA+of+KSAg2QfPtcWiVFj5n5Fa8fyCHyQPvV8c92KzUdrBPc8OVn0aldFp0I4P1MsYZbnddDRSH3kjVA5oSucHF59DhZWiGJV/F6sVimBSeoTBHQD38Cs5RhyDHNyUad9v3gZERVGCJXC76i7+yyaoIDA+N9s0hasHajhTnjs3XQBYfZFwp8lWl3Ub+sOtPO1Ng7mFlSAYXCM6ljlKTEaxRwaYoXUC1EoIqEOG/8pC9SJThS2M1G7MW1c5xm4lksNss72OH4gtPns6SB0zCB0KADAgESooHIBIHFrLtai5U8ajEWo1J9B26PnIUqLd+uA0KPd2Y2FjrH6rx4xT8qG2p8i36SVGubvwBVmfQ7lSJcXt6wUvb43qyPs/fMiSY7QxHxt7/btMgxQl6JWMagvXMhCNXnhEHNNaTdBcG5KFERDGeo0txaAD1bzZ4mnxCQmoqusGzZ6wdDw6+5wq1tK/hQTQUgk2NwxfXAg2J5K02/3fKjFR2h7zewI1pEyhhpeONRkkRETcyojkK2EbVzZ8kc3RsuwzFYsJ+9u5Qj3E4= - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com - > Accept: */* - > - < HTTP/1.1 200 OK - < Date: Tue, 24 Sep 2013 11:59:33 GMT - < Server: Apache/2.4.6 (Fedora) mod_auth_kerb/5.4 - < WWW-Authenticate: Negotiate YIGZBgkqhkiG9xIBAgICAG+BiTCBhqADAgEFoQMCAQ+iejB4oAMCARKicQRveeZTV/QRJSIOoOWPbZkEmtdug9V5ZcMGXWqAJvCAnrvw9gHbklMyLl8f8jU2e0wU3ehtchLEL4dVeAYgKsnUgw4wGhHu59AZBwSbHRKSpv3I6gWEZqC4NAEuZJFW9ipdUHOiclBQniVXXCsRF/5Y - < X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a - < X-Debug-Remote-User: auth_admin - < X-Debug-Groups: auth_admin,auth_reseller_admin - < X-Debug-Token-Life: 86400s - < X-Debug-Token-Expires: Wed Sep 25 17:29:33 2013 - < Content-Length: 0 - < Content-Type: text/html; charset=UTF-8 - < - * Connection #1 to host (nil) left intact - * Closing connection #0 - * Closing connection #1 - -The header *X-Auth-Token* in response contains the token *AUTH_tk083b8abc92f4a514f34224a181ed568a*. - -####PUT a container ->curl -v -X PUT -H 'X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a' http://client.rhelbox.com:8080/v1/AUTH_myvolume/c1 - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > PUT /v1/AUTH_myvolume/c1 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a - > - < HTTP/1.1 201 Created - < Content-Length: 0 - < Content-Type: text/html; charset=UTF-8 - < X-Trans-Id: txc420b0ebf9714445900e8-0052418863 - < Date: Tue, 24 Sep 2013 12:41:07 GMT - < - * Connection #0 to host client.rhelbox.com left intact - * Closing connection #0 - -####GET a container listing -> curl -v -X GET -H 'X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a' http://client.rhelbox.com:8080/v1/AUTH_myvolume - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > GET /v1/AUTH_myvolume HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a - > - < HTTP/1.1 200 OK - < Content-Length: 3 - < X-Account-Container-Count: 0 - < Accept-Ranges: bytes - < X-Account-Object-Count: 0 - < X-Bytes-Used: 0 - < X-Timestamp: 1379997117.09468 - < X-Object-Count: 0 - < X-Account-Bytes-Used: 0 - < X-Type: Account - < Content-Type: text/plain; charset=utf-8 - < X-Container-Count: 0 - < X-Trans-Id: tx89826736a1ab4d6aae6e3-00524188dc - < Date: Tue, 24 Sep 2013 12:43:08 GMT - < - c1 - * Connection #0 to host client.rhelbox.com left intact - * Closing connection #0 - -####PUT an object in container -> curl -v -X PUT -H 'X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a' http://client.rhelbox.com:8080/v1/AUTH_myvolume/c1/object1 -d'Hello world' - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > PUT /v1/AUTH_myvolume/c1/object1 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a - > Content-Length: 11 - > Content-Type: application/x-www-form-urlencoded - > - * upload completely sent off: 11 out of 11 bytes - < HTTP/1.1 201 Created - < Last-Modified: Wed, 25 Sep 2013 06:08:00 GMT - < Content-Length: 0 - < Etag: 3e25960a79dbc69b674cd4ec67a72c62 - < Content-Type: text/html; charset=UTF-8 - < X-Trans-Id: tx01f1b5a430cf4af3897be-0052427dc0 - < Date: Wed, 25 Sep 2013 06:08:01 GMT - < - * Connection #0 to host client.rhelbox.com left intact - * Closing connection #0 - -####Give permission to jsmith to list and download objects from c1 container -> curl -v -X POST -H 'X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a' -H 'X-Container-Read: jsmith' http://client.rhelbox.com:8080/v1/AUTH_myvolume/c1 - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > POST /v1/AUTH_myvolume/c1 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > X-Auth-Token: AUTH_tk083b8abc92f4a514f34224a181ed568a - > X-Container-Read: jsmith - > - < HTTP/1.1 204 No Content - < Content-Length: 0 - < Content-Type: text/html; charset=UTF-8 - < X-Trans-Id: txcedea3e2557d463eb591d-0052427f60 - < Date: Wed, 25 Sep 2013 06:14:56 GMT - < - * Connection #0 to host client.rhelbox.com left intact - * Closing connection #0 - -####Access container as jsmith - -> kinit jsmith - -Get token for jsmith -> curl -v -u : --negotiate --location-trusted http://client.rhelbox.com:8080/auth/v1.0 - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > GET /auth/v1.0 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > - < HTTP/1.1 303 See Other - < Content-Type: text/html; charset=UTF-8 - < Location: http://client.rhelbox.com/cgi-bin/swift-auth - < Content-Length: 0 - < X-Trans-Id: txf51e1bf7f8c5496f8cc93-005242800b - < Date: Wed, 25 Sep 2013 06:17:47 GMT - < - * Connection #0 to host client.rhelbox.com left intact - * Issue another request to this URL: 'http://client.rhelbox.com/cgi-bin/swift-auth' - * About to connect() to client.rhelbox.com port 80 (#1) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 80 (#1) - > GET /cgi-bin/swift-auth HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com - > Accept: */* - > - < HTTP/1.1 401 Unauthorized - < Date: Wed, 25 Sep 2013 06:17:47 GMT - < Server: Apache/2.4.6 (Fedora) mod_auth_kerb/5.4 - < WWW-Authenticate: Negotiate - < WWW-Authenticate: Basic realm="Swift Authentication" - < Content-Length: 381 - < Content-Type: text/html; charset=iso-8859-1 - < - * Ignoring the response-body - * Connection #1 to host client.rhelbox.com left intact - * Issue another request to this URL: 'http://client.rhelbox.com/cgi-bin/swift-auth' - * Re-using existing connection! (#1) with host (nil) - * Connected to (nil) (192.168.56.101) port 80 (#1) - * Server auth using GSS-Negotiate with user '' - > GET /cgi-bin/swift-auth HTTP/1.1 - > Authorization: Negotiate YIICWAYJKoZIhvcSAQICAQBuggJHMIICQ6ADAgEFoQMCAQ6iBwMFACAAAACjggFbYYIBVzCCAVOgAwIBBaENGwtSSEVMQk9YLkNPTaIlMCOgAwIBA6EcMBobBEhUVFAbEmNsaWVudC5yaGVsYm94LmNvbaOCARQwggEQoAMCARKhAwIBAaKCAQIEgf/+3OaXYCSEjcsjU3t3lOLcYG84GBP9Kj9YTHc7yVMlcam4ivCwMqCkzxgvNo2E3a5KSWyFwngeX4b/QFbCKPXA4sfBibZRkeMk5gr2f0MLI3gWEAIYq7bJLre04bnkD2F0MzijPJrOLIx1KmFe08UGWCEmnG2uj07lvIR1RwV/7dMM4J1B+KKvDVKA0LxahwPIpx8oOON2yMGcstrBAHBBk5pmpt1Gg9Lh7xdNPsjP0IfI5Q0zkGCRBKpvpXymP1lQpQXlHbqkdBYOmG4+p/R+vIosO4ui1G6GWE9t71h3AqW61CcCj3/oOjZsG56k8HMSNk/+3mfUTP86nzLRGkekgc4wgcugAwIBEqKBwwSBwPsG9nGloEnOsA1abP4R1/yUDcikjjwKiacvZ+cu7bWEzu3L376k08U8C2YIClyUJy3Grt68LxhnfZ65VCZ5J5IOLiXOJnHBIoJ1L4GMYp4EgZzHvI7R3U3DApMzNWZwc1MsSF5UGhmLwxSevDLetJHjgKzKNteRyVN/8CFgjSBEjGSN1Qgy1RZHuQR9d3JHPczONZ4+ZgStfy+I1m2IUIgW3+4JGFVafHiBQVwSWRNfdXFgI3wBz7slntd7r3qMWA== - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com - > Accept: */* - > - < HTTP/1.1 200 OK - < Date: Wed, 25 Sep 2013 06:17:47 GMT - < Server: Apache/2.4.6 (Fedora) mod_auth_kerb/5.4 - < WWW-Authenticate: Negotiate YIGYBgkqhkiG9xIBAgICAG+BiDCBhaADAgEFoQMCAQ+ieTB3oAMCARKicARuH2YpjFrtgIhGr5nO7gh/21EvGH9tayRo5A3pw5pxD1B1036ePLG/x98OdMrSflse5s8ttz8FmvRphCFJa8kfYtnWULgoFLF2F2a1zBdSo2oCA0R05YFwArNhkg6ou5o7wWZkERHK33CKlhudSj8= - < X-Auth-Token: AUTH_tkb5a20eb8207a819e76619431c8410447 - < X-Debug-Remote-User: jsmith - < X-Debug-Groups: jsmith - < X-Debug-Token-Life: 86400s - < X-Debug-Token-Expires: Thu Sep 26 11:47:47 2013 - < Content-Length: 0 - < Content-Type: text/html; charset=UTF-8 - < - * Connection #1 to host (nil) left intact - * Closing connection #0 - * Closing connection #1 - -List the container using authentication token for jsmith: -> curl -v -X GET -H 'X-Auth-Token: AUTH_tkb5a20eb8207a819e76619431c8410447' http://client.rhelbox.com:8080/v1/AUTH_myvolume/c1 - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > GET /v1/AUTH_myvolume/c1 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > X-Auth-Token: AUTH_tkb5a20eb8207a819e76619431c8410447 - > - < HTTP/1.1 200 OK - < Content-Length: 8 - < X-Container-Object-Count: 0 - < Accept-Ranges: bytes - < X-Timestamp: 1 - < X-Container-Bytes-Used: 0 - < Content-Type: text/plain; charset=utf-8 - < X-Trans-Id: tx575215929c654d9f9f284-00524280a4 - < Date: Wed, 25 Sep 2013 06:20:20 GMT - < - object1 - * Connection #0 to host client.rhelbox.com left intact - * Closing connection #0 - -Downloading the object as jsmith: -> curl -v -X GET -H 'X-Auth-Token: AUTH_tkb5a20eb8207a819e76619431c8410447' http://client.rhelbox.com:8080/v1/AUTH_myvolume/c1/object1 - - * About to connect() to client.rhelbox.com port 8080 (#0) - * Trying 192.168.56.101... - * connected - * Connected to client.rhelbox.com (192.168.56.101) port 8080 (#0) - > GET /v1/AUTH_myvolume/c1/object1 HTTP/1.1 - > User-Agent: curl/7.27.0 - > Host: client.rhelbox.com:8080 - > Accept: */* - > X-Auth-Token: AUTH_tkb5a20eb8207a819e76619431c8410447 - > - < HTTP/1.1 200 OK - < Content-Length: 11 - < Accept-Ranges: bytes - < Last-Modified: Wed, 25 Sep 2013 06:08:00 GMT - < Etag: 3e25960a79dbc69b674cd4ec67a72c62 - < X-Timestamp: 1380089280.98829 - < Content-Type: application/x-www-form-urlencoded - < X-Trans-Id: tx19b5cc3847854f40a6ca8-00524281aa - < Date: Wed, 25 Sep 2013 06:24:42 GMT - < - * Connection #0 to host client.rhelbox.com left intact - Hello world* Closing connection #0 - -For curl to follow the redirect, you need to specify additional -options. With these, and with a current Kerberos ticket, you should -get the Kerberos user's cached authentication token, or a new one if -the previous token has expired. - -> curl -v -u : --negotiate --location-trusted -X GET http://client.rhelbox.com:8080/v1/AUTH_myvolume/c1/object1 - -The --negotiate option is for curl to perform Kerberos authentication and ---location-trusted is for curl to follow the redirect. - -[auth_kerb_module Configuration]: http://modauthkerb.sourceforge.net/configure.html - - -#### Get an authentication token when auth_mode=passive: -> curl -v -H 'X-Auth-User: test:auth_admin' -H 'X-Auth-Key: Redhat*123' http://127.0.0.1:8080/auth/v1.0 - -**NOTE**: X-Storage-Url response header can be returned only in passive mode. - - -##Configurable Parameters - -The kerbauth filter section in **/etc/swift/proxy-server.conf** looks something -like this: - - [filter:kerbauth] - use = egg:swiftkerbauth#kerbauth - ext_authentication_url = http://client.rhelbox.com/cgi-bin/swift-auth - auth_method = active - token_life = 86400 - debug_headers = yes - realm_name = RHELBOX.COM - -Of all the options listed above, specifying **ext\_authentication\_url** is -mandatory. The rest of the options are optional and have default values. - -#### ext\_authentication\_url -A URL specifying location of the swift-auth CGI script. Avoid using IP address. -Default value: None - -#### token_life -After how many seconds the cached information about an authentication token is -discarded. -Default value: 86400 - -#### debug_headers -When turned on, the response headers sent to the user will contain additional -debug information apart from the auth token. -Default value: yes - -#### auth_method -Set this to **"active"** when you want to allow access **only to clients -residing inside the domain**. In this mode, authentication is performed by -mod\_auth\_kerb using the Kerberos ticket bundled with the client request. -No username and password have to be specified to get a token. -Set this to **"passive"** when you want to allow access to clients residing -outside the domain. In this mode, authentication is performed by gleaning -username and password from request headers (X-Auth-User and X-Auth-Key) and -running kinit command against it. -Default value: passive - -#### realm_name -This is applicable only when the auth_method=passive. This option specifies -realm name if storage server belongs to more than one realm and realm name is not -part of the username specified in X-Auth-User header. - - -##Functional tests for SwiftkerbAuth - -Functional tests to be run on the storage node after SwiftKerbAuth is setup using -either IPA server or Windows AD. The gluster-swift/doc/markdown/swiftkerbauth -directory contains the SwiftkerbAuth setup documents. There are two modes of -working with SwiftKerbAuth. 'PASSIVE' mode indicates the client is outside the -domain configured using SwiftKerbAuth. Client provides the 'Username' and -'Password' while invoking a command. SwiftKerbAuth auth filter code then -would get the ticket granting ticket from AD server or IPA server. -In 'ACTIVE' mode of SwiftKerbAuth, User is already logged into storage node using -its kerberos credentials. That user is authenticated across AD/IPA server. - -In PASSIVE mode all the generic functional tests are run. ACTIVE mode has a -different way of acquiring Ticket Granting Ticket. And hence the different -framework of functional tests there. - -The accounts, users, passwords must be prepared on AD/IPA server as per -mentioned in test/functional_auth/swiftkerbauth/conf/test.conf - -Command to invoke SwiftKerbAuth functional tests is -> $tox -e swfunctest - -This would run both ACTIVE and PASSIVE mode functional test cases. diff --git a/doc/markdown/user_guide.md b/doc/markdown/user_guide.md deleted file mode 100644 index 6108832..0000000 --- a/doc/markdown/user_guide.md +++ /dev/null @@ -1,66 +0,0 @@ -# User Guide - -## Installation - -### GlusterFS Installation -First, we need to install GlusterFS on the system by following the -instructions on [GlusterFS QuickStart Guide][]. - -### Fedora/RHEL/CentOS -Gluster for Swift depends on OpenStack Swift Grizzly, which can be -obtained by using [RedHat's RDO][] packages as follows: - -~~~ -yum install -y http://rdo.fedorapeople.org/openstack/openstack-grizzly/rdo-release-grizzly.rpm -~~~ - -### Download -Gluster for Swift uses [Jenkins][] for continuous integration and -creation of distribution builds. Download the latest RPM builds -from one of the links below: - -* RHEL/CentOS 6: [Download](http://build.gluster.org/job/gluster-swift-builds-cent6/lastSuccessfulBuild/artifact/build/) -* Fedora 18+: [Download](http://build.gluster.org/job/gluster-swift-builds-f18/lastSuccessfulBuild/artifact/build/) - -Install the downloaded RPM using the following command: - -~~~ -yum install -y RPMFILE -~~~ - -where *RPMFILE* is the RPM file downloaded from Jenkins. - -## Configuration -TBD - -## Server Control -Command to start the servers (TBD) - -~~~ -swift-init main start -~~~ - -Command to stop the servers (TBD) - -~~~ -swift-init main stop -~~~ - -Command to gracefully reload the servers - -~~~ -swift-init main reload -~~~ - -### Mounting your volumes -TBD - -Once this is done, you can access GlusterFS volumes via the Swift API where -accounts are mounted volumes, containers are top-level directories, -and objects are files and sub-directories of container directories. - - - -[GlusterFS QuickStart Guide]: http://www.gluster.org/community/documentation/index.php/QuickStart -[RedHat's RDO]: http://openstack.redhat.com/Quickstart -[Jenkins]: http://jenkins-ci.org diff --git a/etc/account-server.conf-gluster b/etc/account-server.conf-gluster deleted file mode 100644 index 53cae14..0000000 --- a/etc/account-server.conf-gluster +++ /dev/null @@ -1,39 +0,0 @@ -[DEFAULT] -# -# Default gluster mount point to be used for object store,can be changed by -# setting the following value in {account,container,object}-server.conf files. -# It is recommended to keep this value same for all the three services but can -# be kept different if environment demands. -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the account-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -mount_check = true -bind_port = 6012 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = account-server - -[app:account-server] -use = egg:gluster_swift#account -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# The following parameter is used by object-expirer and needs to be same -# across all conf files! -auto_create_account_prefix = gs -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the account server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - diff --git a/etc/container-server.conf-gluster b/etc/container-server.conf-gluster deleted file mode 100644 index 3136bd9..0000000 --- a/etc/container-server.conf-gluster +++ /dev/null @@ -1,39 +0,0 @@ -[DEFAULT] -# -# Default gluster mount point to be used for object store,can be changed by -# setting the following value in {account,container,object}-server.conf files. -# It is recommended to keep this value same for all the three services but can -# be kept different if environment demands. -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the container-server workers -# start, you can *consider* setting this value to "false" to reduce the -# per-request overhead it can incur. -mount_check = true -bind_port = 6011 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = container-server - -[app:container-server] -use = egg:gluster_swift#container -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# The following parameters is used by object-expirer and needs to be same -# across all conf files! -auto_create_account_prefix = gs -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the container server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - diff --git a/etc/object-expirer.conf-gluster b/etc/object-expirer.conf-gluster deleted file mode 100644 index 4449ee2..0000000 --- a/etc/object-expirer.conf-gluster +++ /dev/null @@ -1,27 +0,0 @@ -#TODO: Add documentation to explain various options -#For now, refer: https://github.com/openstack/swift/blob/master/etc/object-expirer.conf-sample - -[DEFAULT] - -[object-expirer] -user = root -log_facility = LOG_LOCAL2 -log_level = DEBUG -# The following parameters are used by object-expirer and needs to be same -# across all conf files! -auto_create_account_prefix = gs -expiring_objects_account_name = expiring - -interval = 30 - -[pipeline:main] -pipeline = catch_errors cache proxy-server - -[app:proxy-server] -use = egg:gluster_swift#proxy - -[filter:cache] -use = egg:swift#memcache - -[filter:catch_errors] -use = egg:swift#catch_errors diff --git a/etc/object-server.conf-gluster b/etc/object-server.conf-gluster index d8d06c7..033a2fb 100644 --- a/etc/object-server.conf-gluster +++ b/etc/object-server.conf-gluster @@ -4,14 +4,14 @@ # setting the following value in {account,container,object}-server.conf files. # It is recommended to keep this value same for all the three services but can # be kept different if environment demands. -devices = /mnt/gluster-object +devices = /mnt/swiftonfile # # Once you are confident that your startup processes will always have your # gluster volumes properly mounted *before* the object-server workers start, # you can *consider* setting this value to "false" to reduce the per-request # overhead it can incur. mount_check = true -bind_port = 6010 +bind_port = 6050 # # Maximum number of clients one worker can process simultaneously (it will # actually accept N + 1). Setting this to one (1) will only handle one request @@ -34,11 +34,6 @@ use = egg:gluster_swift#object user = root log_facility = LOG_LOCAL2 log_level = WARN -# The following parameters are used by object-expirer and needs to be same -# across all conf files! -auto_create_account_prefix = gs -expiring_objects_account_name = expiring -# # For performance, after ensuring things are running in a stable manner, you # can turn off normal request logging for the object server to reduce the # per-request overhead and unclutter the log files. Warnings and errors will diff --git a/etc/proxy-server.conf-gluster b/etc/proxy-server.conf-gluster deleted file mode 100644 index 7996a5d..0000000 --- a/etc/proxy-server.conf-gluster +++ /dev/null @@ -1,70 +0,0 @@ -[DEFAULT] -bind_port = 8080 -user = root -# Consider using 1 worker per CPU -workers = 1 - -[pipeline:main] -pipeline = catch_errors healthcheck proxy-logging cache proxy-logging proxy-server - -[app:proxy-server] -use = egg:gluster_swift#proxy -log_facility = LOG_LOCAL1 -log_level = WARN -# The API allows for account creation and deletion, but since Gluster/Swift -# automounts a Gluster volume for a given account, there is no way to create -# or delete an account. So leave this off. -allow_account_management = false -account_autocreate = true -# The following parameters are used by object-expirer and needs to be same -# across all conf files! -auto_create_account_prefix = gs -expiring_objects_account_name = expiring -# Ensure the proxy server uses fast-POSTs since we don't need to make a copy -# of the entire object given that all metadata is stored in the object -# extended attributes (no .meta file used after creation) and no container -# sync feature to present. -object_post_as_copy = false -# Only need to recheck the account exists once a day -recheck_account_existence = 86400 -# May want to consider bumping this up if containers are created and destroyed -# infrequently. -recheck_container_existence = 60 -# Timeout clients that don't read or write to the proxy server after 5 -# seconds. -client_timeout = 5 -# Give more time to connect to the object, container or account servers in -# cases of high load. -conn_timeout = 5 -# For high load situations, once connected to an object, container or account -# server, allow for delays communicating with them. -node_timeout = 60 -# May want to consider bumping up this value to 1 - 4 MB depending on how much -# traffic is for multi-megabyte or gigabyte requests; perhaps matching the -# stripe width (not stripe element size) of your storage volume is a good -# starting point. See below for sizing information. -object_chunk_size = 65536 -# If you do decide to increase the object_chunk_size, then consider lowering -# this value to one. Up to "put_queue_length" object_chunk_size'd buffers can -# be queued to the object server for processing. Given one proxy server worker -# can handle up to 1,024 connections, by default, it will consume 10 * 65,536 -# * 1,024 bytes of memory in the worse case (default values). Be sure the -# amount of memory available on the system can accommodate increased values -# for object_chunk_size. -put_queue_depth = 10 - -[filter:catch_errors] -use = egg:swift#catch_errors - -[filter:proxy-logging] -use = egg:swift#proxy_logging -access_log_level = WARN - -[filter:healthcheck] -use = egg:swift#healthcheck - -[filter:cache] -use = egg:swift#memcache -# Update this line to contain a comma separated list of memcache servers -# shared by all nodes running the proxy-server service. -memcache_servers = localhost:11211 diff --git a/etc/swift.conf-gluster b/etc/swift.conf-gluster index ce9a4d0..4bbec24 100644 --- a/etc/swift.conf-gluster +++ b/etc/swift.conf-gluster @@ -1,13 +1,53 @@ -[DEFAULT] - - [swift-hash] -# random unique string that can never change (DO NOT LOSE) -swift_hash_path_suffix = gluster +# swift_hash_path_suffix and swift_hash_path_prefix are used as part of the +# the hashing algorithm when determining data placement in the cluster. +# These values should remain secret and MUST NOT change +# once a cluster has been deployed. + +swift_hash_path_suffix = changeme +swift_hash_path_prefix = changeme + +# storage policies are defined here and determine various characteristics +# about how objects are stored and treated. Policies are specified by name on +# a per container basis. Names are case-insensitive. The policy index is +# specified in the section header and is used internally. The policy with +# index 0 is always used for legacy containers and can be given a name for use +# in metadata however the ring file name will always be 'object.ring.gz' for +# backwards compatibility. If no policies are defined a policy with index 0 +# will be automatically created for backwards compatibility and given the name +# Policy-0. A default policy is used when creating new containers when no +# policy is specified in the request. If no other policies are defined the +# policy with index 0 will be declared the default. If multiple policies are +# defined you must define a policy with index 0 and you must specify a +# default. It is recommended you always define a section for +# storage-policy:0. +[storage-policy:0] +name = Policy-0 +default = yes + +# the following section would declare a policy called 'silver', the number of +# replicas will be determined by how the ring is built. In this example the +# 'silver' policy could have a lower or higher # of replicas than the +# 'Policy-0' policy above. The ring filename will be 'object-1.ring.gz'. You +# may only specify one storage policy section as the default. If you changed +# this section to specify 'silver' as the default, when a client created a new +# container w/o a policy specified, it will get the 'silver' policy because +# this config has specified it as the default. However if a legacy container +# (one created with a pre-policy version of swift) is accessed, it is known +# implicitly to be assigned to the policy with index 0 as opposed to the +# current default. +#[storage-policy:1] +#name = silver + +# The following section defines a policy called 'swiftonfile' to be used by +# swiftonfile object-server implementation. +[storage-policy:2] +name = swiftonfile # The swift-constraints section sets the basic constraints on data -# saved in the swift cluster. +# saved in the swift cluster. These constraints are automatically +# published by the proxy server in responses to /info requests. [swift-constraints] @@ -15,9 +55,10 @@ swift_hash_path_suffix = gluster # the cluster. This is also the limit on the size of each segment of # a "large" object when using the large object manifest support. # This value is set in bytes. Setting it to lower than 1MiB will cause -# some tests to fail. -# Default is 1 TiB = 2**30*1024 -max_file_size = 1099511627776 +# some tests to fail. It is STRONGLY recommended to leave this value at +# the default (5 * 2**30 + 2). + +#max_file_size = 5368709122 # max_meta_name_length is the max number of bytes in the utf8 encoding @@ -43,43 +84,50 @@ max_file_size = 1099511627776 #max_meta_overall_size = 4096 +# max_header_size is the max number of bytes in the utf8 encoding of each +# header. Using 8192 as default because eventlet use 8192 as max size of +# header line. This value may need to be increased when using identity +# v3 API tokens including more than 7 catalog entries. +# See also include_service_catalog in proxy-server.conf-sample +# (documented in overview_auth.rst) -# max_object_name_length is the max number of bytes in the utf8 encoding of an -# object name: Gluster FS can handle much longer file names, but the length -# between the slashes of the URL is handled below. Remember that most web -# clients can't handle anything greater than 2048, and those that do are -# rather clumsy. - -max_object_name_length = 2048 - -# max_object_name_component_length (GlusterFS) is the max number of bytes in -# the utf8 encoding of an object name component (the part between the -# slashes); this is a limit imposed by the underlying file system (for XFS it -# is 255 bytes). - -max_object_name_component_length = 255 +#max_header_size = 8192 # container_listing_limit is the default (and max) number of items # returned for a container listing request #container_listing_limit = 10000 - # account_listing_limit is the default (and max) number of items returned # for an account listing request - #account_listing_limit = 10000 +# SwiftOnFile constraints - do not exceed the maximum values which are +# set here as default -# max_account_name_length is the max number of bytes in the utf8 encoding of -# an account name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. +# max_object_name_length is the max number of bytes in the utf8 encoding +# of an object name +max_object_name_length = 221 +# Why 221 ? +# The longest filename supported by XFS in 255. +# http://lxr.free-electrons.com/source/fs/xfs/xfs_types.h#L125 +# SoF creates a temp file with following naming convention: +# .OBJECT_NAME. +# The random string is 32 character long and and file name has two dots. +# Hence 255 - 32 - 2 = 221 +# NOTE: This limitation can be sefely raised by having slashes in really long +# object name. Each segment between slashes ('/') should not exceed 221. + +# max_account_name_length is the max number of bytes in the utf8 encoding +# of an account name max_account_name_length = 255 - # max_container_name_length is the max number of bytes in the utf8 encoding -# of a container name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - +# of a container name max_container_name_length = 255 + +# Why 255 ? +# The longest filename supported by XFS in 255. +# http://lxr.free-electrons.com/source/fs/xfs/xfs_types.h#L125 +# SoF creates following directory hierarchy on mount point: account/container diff --git a/gluster/swift/account/__init__.py b/gluster/swift/account/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/account/server.py b/gluster/swift/account/server.py deleted file mode 100644 index a2a20af..0000000 --- a/gluster/swift/account/server.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) 2012-2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" Account Server for Gluster Swift UFO """ - -# Simply importing this monkey patches the constraint handling to fit our -# needs -import gluster.swift.common.constraints # noqa - -from swift.account import server -from gluster.swift.common.DiskDir import DiskAccount - - -class AccountController(server.AccountController): - - def _get_account_broker(self, drive, part, account, **kwargs): - """ - Overriden to provide the GlusterFS specific broker that talks to - Gluster for the information related to servicing a given request - instead of talking to a database. - - :param drive: drive that holds the container - :param part: partition the container is in - :param account: account name - :returns: DiskDir object - """ - return DiskAccount(self.root, drive, account, self.logger, **kwargs) - - -def app_factory(global_conf, **local_conf): - """paste.deploy app factory for creating WSGI account server apps.""" - conf = global_conf.copy() - conf.update(local_conf) - return AccountController(conf) diff --git a/gluster/swift/common/DiskDir.py b/gluster/swift/common/DiskDir.py deleted file mode 100644 index 0a91009..0000000 --- a/gluster/swift/common/DiskDir.py +++ /dev/null @@ -1,705 +0,0 @@ -# Copyright (c) 2012-2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import errno - -from gluster.swift.common.fs_utils import dir_empty, mkdirs, do_chown, \ - do_exists, do_touch -from gluster.swift.common.utils import validate_account, validate_container, \ - get_container_details, get_account_details, create_container_metadata, \ - create_account_metadata, DEFAULT_GID, get_container_metadata, \ - get_account_metadata, DEFAULT_UID, validate_object, \ - create_object_metadata, read_metadata, write_metadata, X_CONTENT_TYPE, \ - X_CONTENT_LENGTH, X_TIMESTAMP, X_PUT_TIMESTAMP, X_ETAG, X_OBJECTS_COUNT, \ - X_BYTES_USED, X_CONTAINER_COUNT, DIR_TYPE, rmobjdir, dir_is_object -from gluster.swift.common import Glusterfs -from gluster.swift.common.exceptions import FileOrDirNotFoundError - - -DATADIR = 'containers' - -# Create a dummy db_file in Glusterfs.RUN_DIR -_db_file = "" - - -def _read_metadata(dd): - """ Filter read metadata so that it always returns a tuple that includes - some kind of timestamp. With 1.4.8 of the Swift integration the - timestamps were not stored. Here we fabricate timestamps for volumes - where the existing data has no timestamp (that is, stored data is not - a tuple), allowing us a measure of backward compatibility. - - FIXME: At this time it does not appear that the timestamps on each - metadata are used for much, so this should not hurt anything. - """ - metadata_i = read_metadata(dd) - metadata = {} - timestamp = 0 - for key, value in metadata_i.iteritems(): - if not isinstance(value, tuple): - value = (value, timestamp) - metadata[key] = value - return metadata - - -def filter_prefix(objects, prefix): - """ - Accept a sorted list of strings, returning all strings starting with the - given prefix. - """ - found = False - for object_name in objects: - if object_name.startswith(prefix): - yield object_name - found = True - else: - # Since the list is assumed to be sorted, once we find an object - # name that does not start with the prefix we know we won't find - # any others, so we exit early. - if found: - break - - -def filter_delimiter(objects, delimiter, prefix, marker, path=None): - """ - Accept a sorted list of strings, returning strings that: - 1. begin with "prefix" (empty string matches all) - 2. does not match the "path" argument - 3. does not contain the delimiter in the given prefix length - """ - assert delimiter - assert prefix is not None - skip_name = None - for object_name in objects: - if prefix and not object_name.startswith(prefix): - break - if path is not None: - if object_name == path: - continue - if skip_name: - if object_name < skip_name: - continue - else: - skip_name = None - end = object_name.find(delimiter, len(prefix)) - if end >= 0 and (len(object_name) > (end + 1)): - skip_name = object_name[:end] + chr(ord(delimiter) + 1) - continue - else: - if skip_name: - if object_name < skip_name: - continue - else: - skip_name = None - end = object_name.find(delimiter, len(prefix)) - if end > 0: - dir_name = object_name[:end + 1] - if dir_name != marker: - yield dir_name - skip_name = object_name[:end] + chr(ord(delimiter) + 1) - continue - yield object_name - - -def filter_marker(objects, marker): - """ - Accept sorted list of strings, return all strings whose value is strictly - greater than the given marker value. - """ - for object_name in objects: - if object_name > marker: - yield object_name - - -def filter_prefix_as_marker(objects, prefix): - """ - Accept sorted list of strings, return all strings whose value is greater - than or equal to the given prefix value. - """ - for object_name in objects: - if object_name >= prefix: - yield object_name - - -def filter_end_marker(objects, end_marker): - """ - Accept a list of strings, sorted, and return all the strings that are - strictly less than the given end_marker string. We perform this as a - generator to avoid creating potentially large intermediate object lists. - """ - for object_name in objects: - if object_name < end_marker: - yield object_name - else: - break - - -class DiskCommon(object): - """ - Common fields and methods shared between DiskDir and DiskAccount classes. - """ - def __init__(self, root, drive, account, logger, pending_timeout=None, - stale_reads_ok=False): - # WARNING: The following four fields are referenced as fields by our - # callers outside of this module, do not remove. - # Create a dummy db_file in Glusterfs.RUN_DIR - global _db_file - if not _db_file: - _db_file = os.path.join(Glusterfs.RUN_DIR, 'db_file.db') - if not do_exists(_db_file): - do_touch(_db_file) - self.db_file = _db_file - self.metadata = {} - self.pending_timeout = pending_timeout or 10 - self.stale_reads_ok = stale_reads_ok - # The following fields are common - self.root = root - assert logger is not None - self.logger = logger - self.account = account - self.datadir = os.path.join(root, drive) - self._dir_exists = None - - def _dir_exists_read_metadata(self): - self._dir_exists = do_exists(self.datadir) - if self._dir_exists: - self.metadata = _read_metadata(self.datadir) - return self._dir_exists - - def is_deleted(self): - # The intention of this method is to check the file system to see if - # the directory actually exists. - return not do_exists(self.datadir) - - def empty(self): - # If it does not exist, then it is empty. A value of True is - # what is expected by OpenStack Swift when the directory does - # not exist. Check swift/common/db.py:ContainerBroker.empty() - # and swift/container/server.py:ContainerController.DELETE() - # for more information - try: - return dir_empty(self.datadir) - except FileOrDirNotFoundError: - return True - - def update_metadata(self, metadata): - assert self.metadata, "Valid container/account metadata should have " \ - "been created by now" - if metadata: - new_metadata = self.metadata.copy() - new_metadata.update(metadata) - if new_metadata != self.metadata: - write_metadata(self.datadir, new_metadata) - self.metadata = new_metadata - - -class DiskDir(DiskCommon): - """ - Manage object files on disk. - - :param path: path to devices on the node - :param drive: gluster volume drive name - :param account: account name for the object - :param container: container name for the object - :param logger: account or container server logging object - :param uid: user ID container object should assume - :param gid: group ID container object should assume - - Usage pattern from container/server.py (Havana, 1.8.0+): - DELETE: - if auto-create and obj and not .db_file: - # Creates container - .initialize() - if not .db_file: - # Container does not exist - return 404 - if obj: - # Should be a NOOP - .delete_object() - else: - if not .empty() - # Gluster's definition of empty should mean only - # sub-directories exist in Object-Only mode - return conflict - .get_info()['put_timestamp'] and not .is_deleted() - # Deletes container - .delete_db() - if not .is_deleted(): - return conflict - account_update(): - .get_info() - PUT: - if obj: - if auto-create cont and not .db_file - # Creates container - .initialize() - if not .db_file - return 404 - .put_object() - else: - if not .db_file: - # Creates container - .initialize() - else: - # Update container timestamp - .is_deleted() - .update_put_timestamp() - if .is_deleted() - return conflict - if metadata: - if .metadata - .set_x_container_sync_points() - .update_metadata() - account_update(): - .get_info() - HEAD: - .pending_timeout - .stale_reads_ok - if .is_deleted(): - return 404 - .get_info() - .metadata - GET: - .pending_timeout - .stale_reads_ok - if .is_deleted(): - return 404 - .get_info() - .metadata - .list_objects_iter() - POST: - if .is_deleted(): - return 404 - .metadata - .set_x_container_sync_points() - .update_metadata() - """ - - def __init__(self, path, drive, account, container, logger, - uid=DEFAULT_UID, gid=DEFAULT_GID, **kwargs): - super(DiskDir, self).__init__(path, drive, account, logger, **kwargs) - - self.uid = int(uid) - self.gid = int(gid) - - self.container = container - self.datadir = os.path.join(self.datadir, self.container) - - if not self._dir_exists_read_metadata(): - return - - if not self.metadata: - create_container_metadata(self.datadir) - self.metadata = _read_metadata(self.datadir) - else: - if not validate_container(self.metadata): - create_container_metadata(self.datadir) - self.metadata = _read_metadata(self.datadir) - - def list_objects_iter(self, limit, marker, end_marker, - prefix, delimiter, path=None): - """ - Returns tuple of name, created_at, size, content_type, etag. - """ - assert limit >= 0 - assert not delimiter or (len(delimiter) == 1 and ord(delimiter) <= 254) - - if path is not None: - if path: - prefix = path = path.rstrip('/') + '/' - else: - prefix = path - delimiter = '/' - elif delimiter and not prefix: - prefix = '' - - container_list = [] - - objects = self._update_object_count() - if objects: - objects.sort() - else: - return container_list - - if end_marker: - objects = filter_end_marker(objects, end_marker) - - if marker and marker >= prefix: - objects = filter_marker(objects, marker) - elif prefix: - objects = filter_prefix_as_marker(objects, prefix) - - if prefix is None: - # No prefix, we don't need to apply the other arguments, we just - # return what we have. - pass - else: - # We have a non-None (for all intents and purposes it is a string) - # prefix. - if not delimiter: - if not prefix: - # We have nothing more to do - pass - else: - objects = filter_prefix(objects, prefix) - else: - objects = filter_delimiter(objects, delimiter, prefix, marker, - path) - - count = 0 - for obj in objects: - obj_path = os.path.join(self.datadir, obj) - metadata = read_metadata(obj_path) - if not metadata or not validate_object(metadata): - if delimiter == '/' and obj_path[-1] == delimiter: - clean_obj_path = obj_path[:-1] - else: - clean_obj_path = obj_path - try: - metadata = create_object_metadata(clean_obj_path) - except OSError as e: - # FIXME - total hack to get upstream swift ported unit - # test cases working for now. - if e.errno != errno.ENOENT: - raise - if not Glusterfs._implicit_dir_objects and metadata \ - and metadata[X_CONTENT_TYPE] == DIR_TYPE \ - and not dir_is_object(metadata): - continue - list_item = [] - list_item.append(obj) - if metadata: - list_item.append(metadata[X_TIMESTAMP]) - list_item.append(int(metadata[X_CONTENT_LENGTH])) - list_item.append(metadata[X_CONTENT_TYPE]) - list_item.append(metadata[X_ETAG]) - container_list.append(list_item) - count += 1 - if count >= limit: - break - - return container_list - - def _update_object_count(self): - objects, object_count, bytes_used = get_container_details(self.datadir) - - if X_OBJECTS_COUNT not in self.metadata \ - or int(self.metadata[X_OBJECTS_COUNT][0]) != object_count \ - or X_BYTES_USED not in self.metadata \ - or int(self.metadata[X_BYTES_USED][0]) != bytes_used: - self.metadata[X_OBJECTS_COUNT] = (object_count, 0) - self.metadata[X_BYTES_USED] = (bytes_used, 0) - write_metadata(self.datadir, self.metadata) - - return objects - - def get_info(self): - """ - Get global data for the container. - :returns: dict with keys: account, container, object_count, bytes_used, - hash, id, created_at, put_timestamp, delete_timestamp, - reported_put_timestamp, reported_delete_timestamp, - reported_object_count, and reported_bytes_used. - """ - if self._dir_exists and Glusterfs._container_update_object_count: - self._update_object_count() - - data = {'account': self.account, 'container': self.container, - 'object_count': self.metadata.get( - X_OBJECTS_COUNT, ('0', 0))[0], - 'bytes_used': self.metadata.get(X_BYTES_USED, ('0', 0))[0], - 'hash': '', 'id': '', 'created_at': '1', - 'put_timestamp': self.metadata.get( - X_PUT_TIMESTAMP, ('0', 0))[0], - 'delete_timestamp': '1', - 'reported_put_timestamp': '1', - 'reported_delete_timestamp': '1', - 'reported_object_count': '1', 'reported_bytes_used': '1', - 'x_container_sync_point1': self.metadata.get( - 'x_container_sync_point1', -1), - 'x_container_sync_point2': self.metadata.get( - 'x_container_sync_point2', -1), - } - return data - - def put_object(self, name, timestamp, size, content_type, etag, deleted=0): - # NOOP - should never be called since object file creation occurs - # within a directory implicitly. - pass - - def initialize(self, timestamp): - """ - Create and write metatdata to directory/container. - :param metadata: Metadata to write. - """ - if not self._dir_exists: - mkdirs(self.datadir) - # If we create it, ensure we own it. - do_chown(self.datadir, self.uid, self.gid) - metadata = get_container_metadata(self.datadir) - metadata[X_TIMESTAMP] = timestamp - write_metadata(self.datadir, metadata) - self.metadata = metadata - self._dir_exists = True - - def update_put_timestamp(self, timestamp): - """ - Update the PUT timestamp for the container. - - If the container does not exist, create it using a PUT timestamp of - the given value. - - If the container does exist, update the PUT timestamp only if it is - later than the existing value. - """ - if not do_exists(self.datadir): - self.initialize(timestamp) - else: - if timestamp > self.metadata[X_PUT_TIMESTAMP]: - self.metadata[X_PUT_TIMESTAMP] = (timestamp, 0) - write_metadata(self.datadir, self.metadata) - - def delete_object(self, name, timestamp): - # NOOP - should never be called since object file removal occurs - # within a directory implicitly. - return - - def delete_db(self, timestamp): - """ - Delete the container (directory) if empty. - - :param timestamp: delete timestamp - """ - # Let's check and see if it has directories that - # where created by the code, but not by the - # caller as objects - rmobjdir(self.datadir) - - def set_x_container_sync_points(self, sync_point1, sync_point2): - self.metadata['x_container_sync_point1'] = sync_point1 - self.metadata['x_container_sync_point2'] = sync_point2 - - -class DiskAccount(DiskCommon): - """ - Usage pattern from account/server.py (Havana, 1.8.0+): - DELETE: - .is_deleted() - .delete_db() - PUT: - container: - .pending_timeout - .db_file - .initialize() - .is_deleted() - .put_container() - account: - .db_file - .initialize() - .is_status_deleted() - .is_deleted() - .update_put_timestamp() - .is_deleted() ??? - .update_metadata() - HEAD: - .pending_timeout - .stale_reads_ok - .is_deleted() - .get_info() - .metadata - GET: - .pending_timeout - .stale_reads_ok - .is_deleted() - .get_info() - .metadata - .list_containers_iter() - POST: - .is_deleted() - .update_metadata() - """ - - def __init__(self, root, drive, account, logger, **kwargs): - super(DiskAccount, self).__init__(root, drive, account, logger, - **kwargs) - - # Since accounts should always exist (given an account maps to a - # gluster volume directly, and the mount has already been checked at - # the beginning of the REST API handling), just assert that that - # assumption still holds. - assert self._dir_exists_read_metadata() - assert self._dir_exists - - if not self.metadata or not validate_account(self.metadata): - create_account_metadata(self.datadir) - self.metadata = _read_metadata(self.datadir) - - def is_status_deleted(self): - """ - Only returns true if the status field is set to DELETED. - """ - # This function should always return False. Accounts are not created - # and deleted, they exist if a Gluster volume can be mounted. There is - # no way to delete accounts, so this could never return True. - return False - - def initialize(self, timestamp): - """ - Create and write metatdata to directory/account. - :param metadata: Metadata to write. - """ - metadata = get_account_metadata(self.datadir) - metadata[X_TIMESTAMP] = timestamp - write_metadata(self.datadir, metadata) - self.metadata = metadata - - def update_put_timestamp(self, timestamp): - # Since accounts always exists at this point, just update the account - # PUT timestamp if this given timestamp is later than what we already - # know. - assert self._dir_exists - - if timestamp > self.metadata[X_PUT_TIMESTAMP][0]: - self.metadata[X_PUT_TIMESTAMP] = (timestamp, 0) - write_metadata(self.datadir, self.metadata) - - def delete_db(self, timestamp): - """ - Mark the account as deleted - - :param timestamp: delete timestamp - """ - # Deleting an account is a no-op, since accounts are one-to-one - # mappings to gluster volumes. - # - # FIXME: This means the caller will end up returning a success status - # code for an operation that really should not be allowed. Instead, we - # should modify the account server to not allow the DELETE method, and - # should probably modify the proxy account controller to not allow the - # DELETE method as well. - return - - def put_container(self, container, put_timestamp, del_timestamp, - object_count, bytes_used): - """ - Create a container with the given attributes. - - :param name: name of the container to create - :param put_timestamp: put_timestamp of the container to create - :param delete_timestamp: delete_timestamp of the container to create - :param object_count: number of objects in the container - :param bytes_used: number of bytes used by the container - """ - # NOOP - should never be called since container directory creation - # occurs from within the account directory implicitly. - return - - def _update_container_count(self): - containers, container_count = get_account_details(self.datadir) - - if X_CONTAINER_COUNT not in self.metadata \ - or int(self.metadata[X_CONTAINER_COUNT][0]) != container_count: - self.metadata[X_CONTAINER_COUNT] = (container_count, 0) - write_metadata(self.datadir, self.metadata) - - return containers - - def list_containers_iter(self, limit, marker, end_marker, - prefix, delimiter): - """ - Return tuple of name, object_count, bytes_used, 0(is_subdir). - Used by account server. - """ - if delimiter and not prefix: - prefix = '' - - account_list = [] - containers = self._update_container_count() - if containers: - containers.sort() - else: - return account_list - - if containers and end_marker: - containers = filter_end_marker(containers, end_marker) - - if containers: - if marker and marker >= prefix: - containers = filter_marker(containers, marker) - elif prefix: - containers = filter_prefix_as_marker(containers, prefix) - - if prefix is None: - # No prefix, we don't need to apply the other arguments, we just - # return what we have. - pass - else: - # We have a non-None (for all intents and purposes it is a string) - # prefix. - if not delimiter: - if not prefix: - # We have nothing more to do - pass - else: - containers = filter_prefix(containers, prefix) - else: - containers = filter_delimiter(containers, delimiter, prefix, - marker) - - count = 0 - for cont in containers: - list_item = [] - metadata = None - list_item.append(cont) - cont_path = os.path.join(self.datadir, cont) - metadata = _read_metadata(cont_path) - if not metadata or not validate_container(metadata): - try: - metadata = create_container_metadata(cont_path) - except OSError as e: - # FIXME - total hack to get upstream swift ported unit - # test cases working for now. - if e.errno != errno.ENOENT: - raise - if metadata: - list_item.append(metadata[X_OBJECTS_COUNT][0]) - list_item.append(metadata[X_BYTES_USED][0]) - list_item.append(0) - account_list.append(list_item) - count += 1 - if count >= limit: - break - - return account_list - - def get_info(self): - """ - Get global data for the account. - :returns: dict with keys: account, created_at, put_timestamp, - delete_timestamp, container_count, object_count, - bytes_used, hash, id - """ - if Glusterfs._account_update_container_count: - self._update_container_count() - - data = {'account': self.account, 'created_at': '1', - 'put_timestamp': '1', 'delete_timestamp': '1', - 'container_count': self.metadata.get( - X_CONTAINER_COUNT, (0, 0))[0], - 'object_count': self.metadata.get(X_OBJECTS_COUNT, (0, 0))[0], - 'bytes_used': self.metadata.get(X_BYTES_USED, (0, 0))[0], - 'hash': '', 'id': ''} - return data diff --git a/gluster/swift/common/Glusterfs.py b/gluster/swift/common/Glusterfs.py index 5d2cab1..5545ded 100644 --- a/gluster/swift/common/Glusterfs.py +++ b/gluster/swift/common/Glusterfs.py @@ -32,12 +32,7 @@ _fs_conf = ConfigParser() MOUNT_IP = 'localhost' RUN_DIR = '/var/run/swift' SWIFT_DIR = '/etc/swift' -_do_getsize = False _allow_mount_per_server = False -_implicit_dir_objects = False -_container_update_object_count = False -_account_update_container_count = False -_ignore_unsupported_headers = False if _fs_conf.read(os.path.join(SWIFT_DIR, 'fs.conf')): try: @@ -49,13 +44,6 @@ if _fs_conf.read(os.path.join(SWIFT_DIR, 'fs.conf')): except (NoSectionError, NoOptionError): pass - try: - _do_getsize = _fs_conf.get('DEFAULT', - 'accurate_size_in_listing', - "no") in TRUE_VALUES - except (NoSectionError, NoOptionError): - pass - try: _allow_mount_per_server = _fs_conf.get('DEFAULT', 'allow_mount_per_server', @@ -64,55 +52,6 @@ if _fs_conf.read(os.path.join(SWIFT_DIR, 'fs.conf')): except (NoSectionError, NoOptionError): pass - # -- Hidden configuration option -- - # Report gratuitously created directories as objects - # Directories can be gratuitously created on the path to a given - # object. This option turn on or off the reporting of those directories. - # It defaults to False so that only those directories explicitly - # created by the object server PUT REST API are reported - try: - _implicit_dir_objects = \ - _fs_conf.get('DEFAULT', - 'implicit_dir_objects', - "no") in TRUE_VALUES - except (NoSectionError, NoOptionError): - pass - - # -- Hidden configuration option -- - # Due to the impact on performance, this option is disabled by default - try: - _container_update_object_count = \ - _fs_conf.get('DEFAULT', - 'container_update_object_count', - "no") in TRUE_VALUES - except (NoSectionError, NoOptionError): - pass - - # -- Hidden configuration option -- - # Due to the impact on performance, this option is disabled by default - try: - _account_update_container_count = \ - _fs_conf.get('DEFAULT', - 'account_update_container_count', - "no") in TRUE_VALUES - except (NoSectionError, NoOptionError): - pass - - # -- Hidden configuration option -- - # Ignore unsupported headers and allow them in a request without - # returning a 400-BadRequest. This setting can be set to - # allow unsupported headers such as X-Delete-At and - # X-Delete-After even though they will not be used. - try: - _ignore_unsupported_headers = \ - _fs_conf.get('DEFAULT', - 'ignore_unsupported_headers', - "no") in TRUE_VALUES - except (NoSectionError, NoOptionError): - pass - -NAME = 'glusterfs' - def _busy_wait(full_mount_path): # Iterate for definite number of time over a given diff --git a/gluster/swift/common/constraints.py b/gluster/swift/common/constraints.py index 80616f2..7834fc7 100644 --- a/gluster/swift/common/constraints.py +++ b/gluster/swift/common/constraints.py @@ -14,86 +14,36 @@ # limitations under the License. import os -try: - from webob.exc import HTTPBadRequest -except ImportError: - from swift.common.swob import HTTPBadRequest +from swift.common.swob import HTTPBadRequest import swift.common.constraints -import swift.common.ring as _ring -from gluster.swift.common import Glusterfs, ring -MAX_OBJECT_NAME_COMPONENT_LENGTH = 255 -UNSUPPORTED_HEADERS = [] - - -def set_object_name_component_length(len=None): - global MAX_OBJECT_NAME_COMPONENT_LENGTH - - if len: - MAX_OBJECT_NAME_COMPONENT_LENGTH = len - elif hasattr(swift.common.constraints, 'constraints_conf_int'): - MAX_OBJECT_NAME_COMPONENT_LENGTH = \ - swift.common.constraints.constraints_conf_int( - 'max_object_name_component_length', 255) - else: - MAX_OBJECT_NAME_COMPONENT_LENGTH = 255 - return - -set_object_name_component_length() - - -def get_object_name_component_length(): - return MAX_OBJECT_NAME_COMPONENT_LENGTH +SOF_MAX_OBJECT_NAME_LENGTH = 221 +# Why 221 ? +# The longest filename supported by XFS in 255. +# http://lxr.free-electrons.com/source/fs/xfs/xfs_types.h#L125 +# SoF creates a temp file with following naming convention: +# .OBJECT_NAME. +# The random string is 32 character long and and file name has two dots. +# Hence 255 - 32 - 2 = 221 +# NOTE: This limitation can be sefely raised by having slashes in really long +# object name. Each segment between slashes ('/') should not exceed 221. def validate_obj_name_component(obj): if not obj: return 'cannot begin, end, or have contiguous %s\'s' % os.path.sep - if len(obj) > MAX_OBJECT_NAME_COMPONENT_LENGTH: + if len(obj) > SOF_MAX_OBJECT_NAME_LENGTH: return 'too long (%d)' % len(obj) if obj == '.' or obj == '..': return 'cannot be . or ..' return '' - -def validate_headers(req): - """ - Validate client header requests - :param req: Http request - """ - if not Glusterfs._ignore_unsupported_headers: - for unsupported_header in UNSUPPORTED_HEADERS: - if unsupported_header in req.headers: - return '%s headers are not supported' \ - % ','.join(UNSUPPORTED_HEADERS) - return '' - -# Save the original check object creation -__check_object_creation = swift.common.constraints.check_object_creation -__check_metadata = swift.common.constraints.check_metadata - - -def gluster_check_metadata(req, target_type, POST=True): - """ - :param req: HTTP request object - :param target_type: Value from POST passed to __check_metadata - :param POST: Only call __check_metadata on POST since Swift only - calls check_metadata on POSTs. - """ - ret = None - if POST: - ret = __check_metadata(req, target_type) - if ret is None: - bdy = validate_headers(req) - if bdy: - ret = HTTPBadRequest(body=bdy, - request=req, - content_type='text/plain') - return ret +# Store Swift's check_object_creation method to be invoked later +swift_check_object_creation = swift.common.constraints.check_object_creation # Define our new one which invokes the original -def gluster_check_object_creation(req, object_name): +def sof_check_object_creation(req, object_name): """ Check to ensure that everything is alright about an object to be created. Monkey patches swift.common.constraints.check_object_creation, invoking @@ -108,8 +58,10 @@ def gluster_check_object_creation(req, object_name): :raises HTTPBadRequest: missing or bad content-type header, or bad metadata """ - ret = __check_object_creation(req, object_name) + # Invoke Swift's method + ret = swift_check_object_creation(req, object_name) + # SoF's additional checks if ret is None: for obj in object_name.split(os.path.sep): reason = validate_obj_name_component(obj) @@ -119,20 +71,4 @@ def gluster_check_object_creation(req, object_name): ret = HTTPBadRequest(body=bdy, request=req, content_type='text/plain') - if ret is None: - ret = gluster_check_metadata(req, 'object', POST=False) - return ret - -# Replace the original checks with ours -swift.common.constraints.check_object_creation = gluster_check_object_creation -swift.common.constraints.check_metadata = gluster_check_metadata - -# Replace the original check mount with ours -swift.common.constraints.check_mount = Glusterfs.mount - -# Save the original Ring class -__Ring = _ring.Ring - -# Replace the original Ring class -_ring.Ring = ring.Ring diff --git a/gluster/swift/common/middleware/__init__.py b/gluster/swift/common/middleware/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/common/middleware/gswauth/.gitignore b/gluster/swift/common/middleware/gswauth/.gitignore deleted file mode 100644 index 0558c26..0000000 --- a/gluster/swift/common/middleware/gswauth/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*.egg-info -*.py[co] -.DS_Store diff --git a/gluster/swift/common/middleware/gswauth/.unittests b/gluster/swift/common/middleware/gswauth/.unittests deleted file mode 100755 index 281ac03..0000000 --- a/gluster/swift/common/middleware/gswauth/.unittests +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -nosetests test_swauth/unit --exe --with-coverage --cover-package swauth --cover-erase -rm -f .coverage diff --git a/gluster/swift/common/middleware/gswauth/AUTHORS b/gluster/swift/common/middleware/gswauth/AUTHORS deleted file mode 100644 index 3687625..0000000 --- a/gluster/swift/common/middleware/gswauth/AUTHORS +++ /dev/null @@ -1,39 +0,0 @@ -Maintainer ----------- -Greg Holt - -Original Authors ----------------- -Chuck Thier -Greg Holt -Greg Lange -Jay Payne -John Dickinson -Michael Barton -Will Reese - -Contributors ------------- -Andrew Clay Shafer -Anne Gentle -Brian K. Jones -Caleb Tennis -Chmouel Boudjnah -Christian Schwede -Chris Wedgwood -Clay Gerrard -Colin Nicholson -Conrad Weidenkeller -Cory Wright -David Goetz -Ed Leafe -Fujita Tomonori -Kapil Thangavelu -Monty Taylor -Pablo Llopis -Paul Jimenez -Pete Zaitcev -Russ Nelson -Scott Simpson -Soren Hansen -Stephen Milton diff --git a/gluster/swift/common/middleware/gswauth/CHANGELOG b/gluster/swift/common/middleware/gswauth/CHANGELOG deleted file mode 100644 index 4b85c11..0000000 --- a/gluster/swift/common/middleware/gswauth/CHANGELOG +++ /dev/null @@ -1,62 +0,0 @@ -swauth (1.0.8) - - Added request.environ[reseller_request] = True if request is coming from an - user in .reseller_admin group - - Fixed to work with newer Swift versions whose memcache clients require a - time keyword argument when the older versions required a timeout keyword - argument. - -swauth (1.0.7) - - New X-Auth-Token-Lifetime header a user can set to how long they'd like - their token to be good for. - - New max_token_life config value for capping the above. - - New X-Auth-Token-Expires header returned with the get token request. - - Switchover to swift.common.swob instead of WebOb; requires Swift >= 1.7.6 - now. - -swauth (1.0.6) - - Apparently I haven't been keeping up with this CHANGELOG. I'll try to be - better onward. - - This release added passing OPTIONS requests through untouched, needed for - CORS support in Swift. - - Also, Swauth is a bit more restrictive in deciding when it's the definitive - auth for a request. - -swauth (1.0.3-dev) - - This release is still under development. A full change log will be made at - release. Until then, you can see what has changed with: - - git log 1.0.2..HEAD - -swauth (1.0.2) - - Fixed bug rejecting requests when using multiple instances of Swauth or - Swauth with other auth services. - - Fixed bug interpreting URL-encoded user names and keys. - - Added support for the Swift container sync feature. - - Allowed /not/ setting super_admin_key to disable Swauth administration - features. - - Added swauth_remote mode so the Swauth middleware for one Swift cluster - could be pointing to the Swauth service on another Swift cluster, sharing - account/user data sets. - - Added ability to purge stored tokens. - - Added API documentation for internal Swauth API. - -swauth (1.0.1) - - Initial release after separation from Swift. diff --git a/gluster/swift/common/middleware/gswauth/LICENSE b/gluster/swift/common/middleware/gswauth/LICENSE deleted file mode 100644 index 75b5248..0000000 --- a/gluster/swift/common/middleware/gswauth/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/gluster/swift/common/middleware/gswauth/MANIFEST.in b/gluster/swift/common/middleware/gswauth/MANIFEST.in deleted file mode 100644 index c73869e..0000000 --- a/gluster/swift/common/middleware/gswauth/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include AUTHORS LICENSE README.md .unittests test_swauth/__init__.py -include CHANGELOG -graft doc -graft etc diff --git a/gluster/swift/common/middleware/gswauth/README.md b/gluster/swift/common/middleware/gswauth/README.md deleted file mode 100644 index 2ccd199..0000000 --- a/gluster/swift/common/middleware/gswauth/README.md +++ /dev/null @@ -1,71 +0,0 @@ -Swauth ------- - -An Auth Service for Swift as WSGI Middleware that uses Swift itself as a -backing store. Sphinx-built docs at: - -See also for the standard OpenStack -auth service. - - -NOTE ----- - -**Be sure to review the Sphinx-built docs at: -** - - -Quick Install -------------- - -1) Install Swauth with ``sudo python setup.py install`` or ``sudo python - setup.py develop`` or via whatever packaging system you may be using. - -2) Alter your proxy-server.conf pipeline to have swauth instead of tempauth: - - Was: - - [pipeline:main] - pipeline = catch_errors cache tempauth proxy-server - - Change To: - - [pipeline:main] - pipeline = catch_errors cache swauth proxy-server - -3) Add to your proxy-server.conf the section for the Swauth WSGI filter: - - [filter:swauth] - use = egg:swauth#swauth - set log_name = swauth - super_admin_key = swauthkey - -4) Be sure your proxy server allows account management: - - [app:proxy-server] - ... - allow_account_management = true - -5) Restart your proxy server ``swift-init proxy reload`` - -6) Initialize the Swauth backing store in Swift ``swauth-prep -K swauthkey`` - -7) Add an account/user ``swauth-add-user -A http://127.0.0.1:8080/auth/ -K - swauthkey -a test tester testing`` - -8) Ensure it works ``swift -A http://127.0.0.1:8080/auth/v1.0 -U test:tester -K - testing stat -v`` - - -Web Admin Install ------------------ - -1) If you installed from packages, you'll need to cd to the webadmin directory - the package installed. This is ``/usr/share/doc/python-swauth/webadmin`` - with the Lucid packages. If you installed from source, you'll need to cd to - the webadmin directory in the source directory. - -2) Upload the Web Admin files with ``swift -A http://127.0.0.1:8080/auth/v1.0 - -U .super_admin:.super_admin -K swauthkey upload .webadmin .`` - -3) Open ``http://127.0.0.1:8080/auth/`` in your browser. diff --git a/gluster/swift/common/middleware/gswauth/__init__.py b/gluster/swift/common/middleware/gswauth/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/common/middleware/gswauth/babel.cfg b/gluster/swift/common/middleware/gswauth/babel.cfg deleted file mode 100644 index 15cd6cb..0000000 --- a/gluster/swift/common/middleware/gswauth/babel.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[python: **.py] - diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-add-account b/gluster/swift/common/middleware/gswauth/bin/gswauth-add-account deleted file mode 100755 index d3fd243..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-add-account +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gettext -import socket - -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage='Usage: %prog [options] ') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/)') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', help='The user with admin rights to add ' - 'accounts (default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) != 1: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - account = args[0] - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - path = '%sv2/%s' % (parsed_path, account) - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key, - 'Content-Length': '0'} - try: - conn = http_connect(parsed.hostname, parsed.port, 'PUT', path, headers, - ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - except socket.gaierror, err: - exit('Account creation failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('Account creation failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - if resp.status // 100 != 2: - if resp.status == 401: - exit('Account creation failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - elif resp.status == 403: - exit('Account creation failed: %s %s: Insufficient privileges' % - (resp.status, resp.reason)) - else: - exit('Account creation failed: %s %s' % - (resp.status, resp.reason)) diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-add-user b/gluster/swift/common/middleware/gswauth/bin/gswauth-add-user deleted file mode 100755 index 0eb2e9c..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-add-user +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gettext -import socket - -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser( - usage='Usage: %prog [options] ') - parser.add_option('-a', '--admin', dest='admin', action='store_true', - default=False, help='Give the user administrator access; otherwise ' - 'the user will only have access to containers specifically allowed ' - 'with ACLs.') - parser.add_option('-r', '--reseller-admin', dest='reseller_admin', - action='store_true', default=False, help='Give the user full reseller ' - 'administrator access, giving them full access to all accounts within ' - 'the reseller, including the ability to create new accounts. Creating ' - 'a new reseller admin requires super_admin rights.') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', help='The user with admin rights to add users ' - '(default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights to add users is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) != 3: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - account, user, password = args - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - - # Check if user is changing his own password. This is carried out by - # making sure that the user changing the password and the user whose - # password is being changed are the same. - # If not, ensure that the account exists before creating new user. - if not options.admin_user == (account + ':' + user): - # GET the account - path = '%sv2/%s' % (parsed_path, account) - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key} - try: - conn = http_connect(parsed.hostname, parsed.port, 'GET', path, - headers, ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - if resp.status // 100 != 2: - # If the GET operation fails, it means the account does not - # exist. Now we create the account by sending a PUT request. - headers['Content-Length'] = '0' - conn = http_connect(parsed.hostname, parsed.port, 'PUT', path, - headers, ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - if resp.status // 100 != 2: - print 'Account creation failed: %s %s' % \ - (resp.status, resp.reason) - except socket.gaierror, err: - exit('User creation failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('User creation failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - # Add the user - path = '%sv2/%s/%s' % (parsed_path, account, user) - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key, - 'X-Auth-User-Key': password, - 'Content-Length': '0'} - if options.admin: - headers['X-Auth-User-Admin'] = 'true' - if options.reseller_admin: - headers['X-Auth-User-Reseller-Admin'] = 'true' - try: - conn = http_connect(parsed.hostname, parsed.port, 'PUT', path, headers, - ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - except socket.gaierror, err: - exit('User creation failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('User creation failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - if resp.status // 100 != 2: - if resp.status == 401: - exit('User creation failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - elif resp.status == 403: - exit('User creation failed: %s %s: Insufficient privileges' % - (resp.status, resp.reason)) - else: - exit('User creation failed: %s %s' % - (resp.status, resp.reason)) diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-cleanup-tokens b/gluster/swift/common/middleware/gswauth/bin/gswauth-cleanup-tokens deleted file mode 100755 index f73c8b1..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-cleanup-tokens +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -import gettext -import re -import socket - -from datetime import datetime, timedelta -from optparse import OptionParser -from sys import argv, exit -from time import sleep, time - -from swiftclient.client import Connection, ClientException - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage='Usage: %prog [options]') - parser.add_option('-t', '--token-life', dest='token_life', - default='86400', help='The expected life of tokens; token objects ' - 'modified more than this number of seconds ago will be checked for ' - 'expiration (default: 86400).') - parser.add_option('-s', '--sleep', dest='sleep', - default='0.1', help='The number of seconds to sleep between token ' - 'checks (default: 0.1)') - parser.add_option('-v', '--verbose', dest='verbose', action='store_true', - default=False, help='Outputs everything done instead of just the ' - 'deletions.') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/)') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for .super_admin is required.') - parser.add_option('', '--purge', dest='purge_account', help='Purges all ' - 'tokens for a given account whether the tokens have expired or not.' - ' Memcached restart is recommended. Old tokens may still persist in' - ' memcached.') - parser.add_option('', '--purge-all', dest='purge_all', action='store_true', - default=False, help='Purges all tokens for all accounts and users ' - 'whether the tokens have expired or not.' - ' Memcached restart is recommended. Old tokens may still persist in' - ' memcached.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) != 0: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - - options.admin_url = options.admin_url.rstrip('/') - if not options.admin_url.endswith('/v1.0'): - options.admin_url += '/v1.0' - options.admin_user = '.super_admin:.super_admin' - - try: - options.token_life = timedelta(0, float(options.token_life)) - options.sleep = float(options.sleep) - except ValueError: - parser.parse_args(['-h']) - - conn = Connection(options.admin_url, options.admin_user, options.admin_key) - if options.purge_account: - marker = None - while True: - if options.verbose: - print 'GET %s?marker=%s' % (options.purge_account, marker) - try: - objs = conn.get_container(options.purge_account, - marker=marker)[1] - except ClientException, e: - if e.http_status == 404: - exit('Account %s not found.' % (options.purge_account)) - elif e.http_status == 401: - exit('Cleanup tokens failed: 401 Unauthorized: ' \ - 'Invalid user/key provided') - else: - exit('Purging %s failed with status ' - 'code %d' % (options.purge_account, e.http_status)) - except socket.error, (errno, msg): - exit('Token clean-up failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - if objs: - marker = objs[-1]['name'] - else: - if options.verbose: - print 'No more objects in %s' % options.purge_account - break - for obj in objs: - if options.verbose: - print 'HEAD %s/%s' % (options.purge_account, obj['name']) - headers = conn.head_object(options.purge_account, obj['name']) - if 'x-object-meta-auth-token' in headers: - token = headers['x-object-meta-auth-token'] - container = '.token_%s' % token[-1] - if options.verbose: - print '%s/%s purge account %r; deleting' % \ - (container, token, options.purge_account) - print 'DELETE %s/%s' % (container, token) - try: - conn.delete_object(container, token) - except ClientException, err: - if err.http_status != 404: - raise - continue - if options.verbose: - print 'Done.' - exit(0) - for x in xrange(16): - container = '.token_%x' % x - marker = None - while True: - if options.verbose: - print 'GET %s?marker=%s' % (container, marker) - try: - objs = conn.get_container(container, marker=marker)[1] - except ClientException, e: - if e.http_status == 404: - exit('Container %s not found. gswauth-prep needs to be ' - 'rerun' % (container)) - elif e.http_status == 401: - exit('Cleanup tokens failed: 401 Unauthorized: ' \ - 'Invalid user/key provided') - else: - exit('Object listing on container %s failed with status ' - 'code %d' % (container, e.http_status)) - except socket.error, (errno, msg): - exit('Token clean-up failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - if objs: - marker = objs[-1]['name'] - else: - if options.verbose: - print 'No more objects in %s' % container - break - for obj in objs: - if options.purge_all: - if options.verbose: - print '%s/%s purge all; deleting' % \ - (container, obj['name']) - print 'DELETE %s/%s' % (container, obj['name']) - try: - conn.delete_object(container, obj['name']) - except ClientException, err: - if err.http_status != 404: - raise - continue - last_modified = datetime(*map(int, re.split('[^\d]', - obj['last_modified'])[:-1])) - ago = datetime.utcnow() - last_modified - if ago > options.token_life: - if options.verbose: - print '%s/%s last modified %ss ago; investigating' % \ - (container, obj['name'], - ago.days * 86400 + ago.seconds) - print 'GET %s/%s' % (container, obj['name']) - detail = conn.get_object(container, obj['name'])[1] - detail = json.loads(detail) - if detail['expires'] < time(): - if options.verbose: - print '%s/%s expired %ds ago; deleting' % \ - (container, obj['name'], - time() - detail['expires']) - print 'DELETE %s/%s' % (container, obj['name']) - try: - conn.delete_object(container, obj['name']) - except ClientException, e: - if e.http_status != 404: - print 'DELETE of %s/%s failed with status ' \ - 'code %d' % (container, obj['name'], - e.http_status) - elif options.verbose: - print "%s/%s won't expire for %ds; skipping" % \ - (container, obj['name'], - detail['expires'] - time()) - elif options.verbose: - print '%s/%s last modified %ss ago; skipping' % \ - (container, obj['name'], - ago.days * 86400 + ago.seconds) - sleep(options.sleep) - if options.verbose: - print 'Done.' - print 'Recommended to restart memcached as old invalid tokens may' \ - ' still persist in memcached.' diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-delete-account b/gluster/swift/common/middleware/gswauth/bin/gswauth-delete-account deleted file mode 100755 index 204bb95..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-delete-account +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gettext -import socket - -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage='Usage: %prog [options] ') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', - help='The user with admin rights to delete accounts ' - '(default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights to delete accounts ' - 'is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) != 1: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - account = args[0] - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - path = '%sv2/%s' % (parsed_path, account) - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key} - try: - conn = http_connect(parsed.hostname, parsed.port, 'DELETE', path, headers, - ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - except socket.gaierror, err: - exit('Account deletion failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('Account deletion failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - if resp.status // 100 != 2: - if resp.status == 401: - exit('Delete account failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - elif resp.status == 403: - exit('Delete account failed: %s %s: Insufficient privileges' % - (resp.status, resp.reason)) - elif resp.status == 404: - exit('Delete account failed: %s %s: Account %s does not exist' % - (resp.status, resp.reason, account)) - elif resp.status == 409: - exit('Delete account failed: %s %s: Account %s contains active users. ' - 'Delete all users first.' % (resp.status, resp.reason, account)) - else: - exit('Delete account failed: %s %s' % (resp.status, resp.reason)) diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-delete-user b/gluster/swift/common/middleware/gswauth/bin/gswauth-delete-user deleted file mode 100755 index 5e56a79..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-delete-user +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gettext -import socket - -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage='Usage: %prog [options] ') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', - help='The user with admin rights to delete users ' - '(default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights to delete ' - 'users is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) != 2: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - account, user = args - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - path = '%sv2/%s/%s' % (parsed_path, account, user) - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key} - try: - conn = http_connect(parsed.hostname, parsed.port, 'DELETE', path, headers, - ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - except socket.gaierror, err: - exit('User deletion failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('User deletion failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - if resp.status // 100 != 2: - if resp.status == 401: - exit('Delete user failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - elif resp.status == 403: - exit('Delete user failed: %s %s: Insufficient privileges' % - (resp.status, resp.reason)) - elif resp.status == 404: - exit('Delete user failed: %s %s: User %s does not exist' % - (resp.status, resp.reason, user)) - else: - exit('Delete user failed: %s %s' % (resp.status, resp.reason)) diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-list b/gluster/swift/common/middleware/gswauth/bin/gswauth-list deleted file mode 100755 index 50b7936..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-list +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -import gettext -import socket -import types - -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - -from prettytable import PrettyTable - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage=''' -Usage: %prog [options] [account] [user] - -If [account] and [user] are omitted, a list of accounts will be output. - -If [account] is included but not [user], a list of users within the account -will be output. - -If [account] and [user] are included, a list of groups the user belongs to -will be ouptput. - -If the [user] is '.groups', the active groups for the account will be listed. -'''.strip()) - parser.add_option('-p', '--plain-text', dest='plain_text', - action='store_true', default=False, help='Changes the output from ' - 'JSON to plain text. This will cause an account to list only the ' - 'users and a user to list only the groups.') - parser.add_option('-j', '--json', dest='json_format', - action='store_true', default=False, help='Output in JSON format. ' - 'This will print all information about given account or user, ' - 'including stored password.') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', help='The user with admin rights ' - '(default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) > 2: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - path = '%sv2/%s' % (parsed_path, '/'.join(args)) - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key} - try: - conn = http_connect(parsed.hostname, parsed.port, 'GET', path, headers, - ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - except socket.gaierror, err: - exit('List failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('List failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - body = resp.read() - if resp.status // 100 != 2: - if resp.status == 401: - exit('List failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - elif resp.status == 403: - exit('List failed: %s %s: Insufficient privileges' % - (resp.status, resp.reason)) - else: - exit('List failed: %s %s' % (resp.status, resp.reason)) - if options.plain_text: - info = json.loads(body) - for group in info[['accounts', 'users', 'groups'][len(args)]]: - print group['name'] - elif options.json_format: - print body - else: - info = json.loads(body) - h = ['accounts', 'users', 'groups'][len(args)] - table = PrettyTable([h.title()]) - for group in info[h]: - table.add_row([group['name']]) - print table diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-prep b/gluster/swift/common/middleware/gswauth/bin/gswauth-prep deleted file mode 100755 index a73aa1d..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-prep +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gettext -import socket - -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage='Usage: %prog [options]') - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', help='The user with admin rights ' - '(default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if args: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - path = '%sv2/.prep' % parsed_path - headers = {'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key} - try: - conn = http_connect(parsed.hostname, parsed.port, 'POST', path, headers, - ssl=(parsed.scheme == 'https')) - resp = conn.getresponse() - except socket.gaierror, err: - exit('gswauth preparation failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('gswauth preparation failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - - if resp.status // 100 != 2: - if resp.status == 401: - exit('gswauth preparation failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - else: - exit('gswauth preparation failed: %s %s' % - (resp.status, resp.reason)) diff --git a/gluster/swift/common/middleware/gswauth/bin/gswauth-set-account-service b/gluster/swift/common/middleware/gswauth/bin/gswauth-set-account-service deleted file mode 100755 index 426ce3c..0000000 --- a/gluster/swift/common/middleware/gswauth/bin/gswauth-set-account-service +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -import gettext -import socket -from optparse import OptionParser -from sys import argv, exit - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.utils import urlparse - - -if __name__ == '__main__': - gettext.install('gswauth', unicode=1) - parser = OptionParser(usage=''' -Usage: %prog [options] - -Sets a service URL for an account. Can only be set by a reseller admin. - -Example: %prog -K gswauthkey test storage local http://127.0.0.1:8080/v1/AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162 -'''.strip()) - parser.add_option('-A', '--admin-url', dest='admin_url', - default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' - 'subsystem (default: http://127.0.0.1:8080/auth/)') - parser.add_option('-U', '--admin-user', dest='admin_user', - default='.super_admin', help='The user with admin rights ' - '(default: .super_admin).') - parser.add_option('-K', '--admin-key', dest='admin_key', - help='The key for the user with admin rights is required.') - args = argv[1:] - if not args: - args.append('-h') - (options, args) = parser.parse_args(args) - if len(args) != 4: - parser.parse_args(['-h']) - if options.admin_key is None: - parser.parse_args(['-h']) - account, service, name, url = args - parsed = urlparse(options.admin_url) - if parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (parsed.scheme, repr(options.admin_url))) - parsed_path = parsed.path - if not parsed_path: - parsed_path = '/' - elif parsed_path[-1] != '/': - parsed_path += '/' - path = '%sv2/%s/.services' % (parsed_path, account) - body = json.dumps({service: {name: url}}) - headers = {'Content-Length': str(len(body)), - 'X-Auth-Admin-User': options.admin_user, - 'X-Auth-Admin-Key': options.admin_key} - try: - conn = http_connect(parsed.hostname, parsed.port, 'POST', path, headers, - ssl=(parsed.scheme == 'https')) - conn.send(body) - resp = conn.getresponse() - except socket.gaierror, err: - exit('Service set failed: %s. ' \ - 'Check that the admin_url is valid' % err) - except socket.error, (errno, msg): - exit('Service set failed: %s. ' \ - 'Check that the admin_url is valid' % msg) - if resp.status // 100 != 2: - if resp.status == 401: - exit('Service set failed: %s %s: Invalid user/key provided' % - (resp.status, resp.reason)) - elif resp.status == 403: - exit('Service set failed: %s %s: Insufficient privileges' % - (resp.status, resp.reason)) - else: - exit('Service set failed: %s %s' % (resp.status, resp.reason)) diff --git a/gluster/swift/common/middleware/gswauth/doc/source/_static/.empty b/gluster/swift/common/middleware/gswauth/doc/source/_static/.empty deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/common/middleware/gswauth/doc/source/_templates/.empty b/gluster/swift/common/middleware/gswauth/doc/source/_templates/.empty deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/common/middleware/gswauth/doc/source/api.rst b/gluster/swift/common/middleware/gswauth/doc/source/api.rst deleted file mode 100644 index d2efa0f..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/api.rst +++ /dev/null @@ -1,466 +0,0 @@ -.. _api_top: - ----------- -Swauth API ----------- - -Overview -======== - -Swauth has its own internal versioned REST API for adding, removing, -and editing accounts. This document explains the v2 API. - -Authentication --------------- - -Each REST request against the swauth API requires the inclusion of a -specific authorization user and key to be passed in a specific HTTP -header. These headers are defined as ``X-Auth-Admin-User`` and -``X-Auth-Admin-Key``. - -Typically, these values are ``.super_admin`` (the site super admin -user) with the key being specified in the swauth middleware -configuration as ``super_admin_key``. - -This could also be a reseller admin with the appropriate rights to -perform actions on reseller accounts. - -Endpoints ---------- - -The swauth API endpoint is presented on the proxy servers, in the -"/auth" namespace. In addition, the API is versioned, and the version -documented is version 2. API versions subdivide the auth namespace by -version, specified as a version identifier like "v2". - -The auth endpoint described herein is therefore located at "/auth/v2/" -as presented by the proxy servers. - -Bear in mind that in order for the auth management API to be -presented, it must be enabled in the proxy server config by setting -``allow_account_managment`` to ``true`` in the ``[app:proxy-server]`` -stanza of your proxy-server.conf. - -Responses ---------- - -Responses from the auth APIs are returned as a JSON structure. -Example return values in this document are edited for readability. - - -Reseller/Admin Services -======================= - -Operations can be performed against the endpoint itself to perform -general administrative operations. Currently, the only operations -that can be performed is a GET operation to get reseller or site admin -information. - -Get Admin Info --------------- - -A GET request at the swauth endpoint will return reseller information -for the account specified in the ``X-Auth-Admin-User`` header. -Currently, the information returned is limited to a list of accounts -for the reseller or site admin. - -Valid return codes: - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 5xx: Internal error - -Example Request:: - - GET /auth// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -D - https:///auth/v2/ \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Result:: - - HTTP/1.1 200 OK - - { "accounts": - [ - { "name": "account1" }, - { "name": "account2" }, - { "name": "account3" } - ] - } - - -Account Services -================ - -There are API request to get account details, create, and delete -accounts, mapping logically to the REST verbs GET, PUT, and DELETE. -These actions are performed against an account URI, in the following -general request structure:: - - METHOD /auth// HTTP/1.1 - -The methods that can be used are detailed below. - -Get Account Details -------------------- - -Account details can be retrieved by performing a GET request against -an account URI. On success, a JSON dictionary will be returned -containing the keys `account_id`, `services`, and `users`. The -`account_id` is the value used when creating service accounts. The -`services` value is a dict that represents valid storage cluster -endpoints, and which endpoint is the default. The 'users' value is a -list of dicts, each dict representing a user and currently only -containing the single key 'name'. - -Valid Responses: - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 5xx: Internal error - -Example Request:: - - GET /auth// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -D - https:///auth/v2/ \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Response:: - - HTTP/1.1 200 OK - - { "services": - { "storage": - { "default": "local", - "local": "https:///v1/" }, - }, - "account_id": "", - "users": [ { "name": "user1" }, - { "name": "user2" } ] - } - -Create Account --------------- - -An account can be created with a PUT request against a non-existent -account. By default, a newly created UUID4 will be used with the -reseller prefix as the account ID used when creating corresponding -service accounts. However, you can provide an X-Account-Suffix header -to replace the UUDI4 part. - -Valid return codes: - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 5xx: Internal error - -Example Request:: - - GET /auth// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -D - https:///auth/v2/ \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Response:: - - HTTP/1.1 201 Created - - -Delete Account --------------- - -An account can be deleted with a DELETE request against an existing -account. - -Valid Responses: - * 204: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 404: Account not found - * 5xx: Internal error - -Example Request:: - - DELETE /auth// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -XDELETE -D - https:///auth/v2/ \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Response:: - - HTTP/1.1 204 No Content - - -User Services -============= - -Each account in swauth contains zero or more users. These users can -be determined with the 'Get Account Details' API request against an -account. - -Users in an account can be created, modified, and detailed as -described below by apply the appropriate REST verbs to a user URI, in -the following general request structure:: - - METHOD /auth/// HTTP/1.1 - -The methods that can be used are detailed below. - -Get User Details ----------------- - -User details can be retrieved by performing a GET request against -a user URI. On success, a JSON dictionary will be returned as -described:: - - {"groups": [ # List of groups the user is a member of - {"name": ":"}, - # The first group is a unique user identifier - {"name": ""}, - # The second group is the auth account name - {"name": ""} - # There may be additional groups, .admin being a - # special group indicating an account admin and - # .reseller_admin indicating a reseller admin. - ], - "auth": ":" - # The auth-type and key for the user; currently only - # plaintext and sha1 are implemented as auth types. - } - -For example:: - - {"groups": [{"name": "test:tester"}, {"name": "test"}, - {"name": ".admin"}], - "auth": "plaintext:testing"} - -Valid Responses: - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 404: Unknown account - * 5xx: Internal error - -Example Request:: - - GET /auth/// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -D - https:///auth/v2// \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Response:: - - HTTP/1.1 200 Ok - - { "groups": [ { "name": ":" }, - { "name": "" }, - { "name": ".admin" } ], - "auth" : "plaintext:password" } - - -Create User ------------ - -A user can be created with a PUT request against a non-existent -user URI. The new user's password must be set using the -``X-Auth-User-Key`` header. The user name MUST NOT start with a -period ('.'). This requirement is enforced by the API, and will -result in a 400 error. - -Optional Headers: - - * ``X-Auth-User-Admin: true``: create the user as an account admin - * ``X-Auth-User-Reseller-Admin: true``: create the user as a reseller - admin - -Reseller admin accounts can only be created by the site admin, while -regular accounts (or account admin accounts) can be created by an -account admin, an appropriate reseller admin, or the site admin. - -Note that PUT requests are idempotent, and the PUT request serves as -both a request and modify action. - -Valid Responses: - * 200: Success - * 400: Invalid request (missing required headers) - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key, or insufficient priv - * 404: Unknown account - * 5xx: Internal error - -Example Request:: - - PUT /auth/// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - X-Auth-User-Admin: true - X-Auth-User-Key: secret - -Example Curl Request:: - - curl -XPUT -D - https:///auth/v2// \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" \ - -H "X-Auth-User-Admin: true" \ - -H "X-Auth-User-Key: secret" - -Example Response:: - - HTTP/1.1 201 Created - -Delete User ------------ - -A user can be deleted by performing a DELETE request against a user -URI. This action can only be performed by an account admin, -appropriate reseller admin, or site admin. - -Valid Responses: - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key, or insufficient priv - * 404: Unknown account or user - * 5xx: Internal error - -Example Request:: - - DELETE /auth/// HTTP/1.1 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -XDELETE -D - https:///auth/v2// \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Response:: - - HTTP/1.1 204 No Content - - -Other Services -============== - -There are several other swauth functions that can be performed, mostly -done via "pseudo-user" accounts. These are well-known user names that -are unable to be actually provisioned. These pseudo-users are -described below. - -.. _api_set_service_endpoints: - -Set Service Endpoints ---------------------- - -Service endpoint information can be retrived using the _`Get Account -Details` API method. - -This function allows setting values within this section for -the , allowing the addition of new service end points -or updating existing ones by performing a POST to the URI -corresponding to the pseudo-user ".services". - -The body of the POST request should contain a JSON dict with -the following format:: - - {"service_name": {"end_point_name": "end_point_value"}} - -There can be multiple services and multiple end points in the -same call. - -Any new services or end points will be added to the existing -set of services and end points. Any existing services with the -same service name will be merged with the new end points. Any -existing end points with the same end point name will have -their values updated. - -The updated services dictionary will be returned on success. - -Valid Responses: - - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 404: Account not found - * 5xx: Internal error - -Example Request:: - - POST /auth///.services HTTP/1.0 - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - - {"storage": { "local": "" }} - -Example Curl Request:: - - curl -XPOST -D - https:///auth/v2//.services \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" --data-binary \ - '{ "storage": { "local": "" }}' - -Example Response:: - - HTTP/1.1 200 OK - - {"storage": {"default": "local", "local": "" }} - -Get Account Groups ------------------- - -Individual user group information can be retrieved using the `Get User Details`_ API method. - -This function allows retrieving all group information for all users in -an existing account. This can be achieved using a GET action against -a user URI with the pseudo-user ".groups". - -The JSON dictionary returned will be a "groups" dictionary similar to -that documented in the `Get User Details`_ method, but representing -the summary of all groups utilized by all active users in the account. - -Valid Responses: - * 200: Success - * 403: Invalid X-Auth-Admin-User/X-Auth-Admin-Key - * 404: Account not found - * 5xx: Internal error - -Example Request:: - - GET /auth///.groups - X-Auth-Admin-User: .super_admin - X-Auth-Admin-Key: swauthkey - -Example Curl Request:: - - curl -D - https:///auth/v2//.groups \ - -H "X-Auth-Admin-User: .super_admin" \ - -H "X-Auth-Admin-Key: swauthkey" - -Example Response:: - - HTTP/1.1 200 OK - - { "groups": [ { "name": ".admin" }, - { "name": "" }, - { "name": ":user1" }, - { "name": ":user2" } ] } - diff --git a/gluster/swift/common/middleware/gswauth/doc/source/authtypes.rst b/gluster/swift/common/middleware/gswauth/doc/source/authtypes.rst deleted file mode 100644 index a19ee22..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/authtypes.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _swauth_authtypes_module: - -swauth.authtypes -================= - -.. automodule:: swauth.authtypes - :members: - :undoc-members: - :show-inheritance: - :noindex: diff --git a/gluster/swift/common/middleware/gswauth/doc/source/conf.py b/gluster/swift/common/middleware/gswauth/doc/source/conf.py deleted file mode 100644 index ab0645a..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/conf.py +++ /dev/null @@ -1,233 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# Swauth documentation build configuration file, created by -# sphinx-quickstart on Mon Feb 14 19:34:51 2011. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os - -import swauth - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Swauth' -copyright = u'2010-2011, OpenStack, LLC' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '.'.join(str(v) for v in swauth.version_info[:2]) -# The full version, including alpha/beta/rc tags. -release = swauth.version - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'Swauthdoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'Swauth.tex', u'Swauth Documentation', - u'OpenStack, LLC', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'swauth', u'Swauth Documentation', - [u'OpenStack, LLC'], 1) -] diff --git a/gluster/swift/common/middleware/gswauth/doc/source/details.rst b/gluster/swift/common/middleware/gswauth/doc/source/details.rst deleted file mode 100644 index 3b14ad8..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/details.rst +++ /dev/null @@ -1,159 +0,0 @@ ----------------------- -Implementation Details ----------------------- - -The Swauth system is a scalable authentication and authorization system that -uses Swift itself as its backing store. This section will describe how it -stores its data. - -.. note:: - - You can access Swauth's internal .auth account by using the account:user of - .super_admin:.super_admin and the super admin key you have set in your - configuration. Here's an example using `st` on a standard SAIO: ``st -A - http://127.0.0.1:8080/auth/v1.0 -U .super_admin:.super_admin -K swauthkey - stat`` - -At the topmost level, the auth system has its own Swift account it stores its -own account information within. This Swift account is known as -self.auth_account in the code and its name is in the format -self.reseller_prefix + ".auth". In this text, we'll refer to this account as -. - -The containers whose names do not begin with a period represent the accounts -within the auth service. For example, the /test container would -represent the "test" account. - -The objects within each container represent the users for that auth service -account. For example, the /test/bob object would represent the -user "bob" within the auth service account of "test". Each of these user -objects contain a JSON dictionary of the format:: - - {"auth": ":", "groups": } - -The `` specifies how the user key is encoded. The default is `plaintext`, -which saves the user's key in plaintext in the `` field. -The value `sha1` is supported as well, which stores the user's key as a salted -SHA1 hash. Note that using a one-way hash like SHA1 will likely inhibit future use of key-signing request types, assuming such support is added. The `` can be specified in the swauth section of the proxy server's -config file, along with the salt value in the following way:: - - auth_type = - auth_type_salt = - -Both fields are optional. auth_type defaults to `plaintext` and auth_type_salt defaults to "swauthsalt". Additional auth types can be implemented along with existing ones in the authtypes.py module. - -The `` contains at least two groups. The first is a unique group -identifying that user and it's name is of the format `:`. The -second group is the `` itself. Additional groups of `.admin` for -account administrators and `.reseller_admin` for reseller administrators may -exist. Here's an example user JSON dictionary:: - - {"auth": "plaintext:testing", - "groups": ["name": "test:tester", "name": "test", "name": ".admin"]} - -To map an auth service account to a Swift storage account, the Service Account -Id string is stored in the `X-Container-Meta-Account-Id` header for the -/ container. To map back the other way, an -/.account_id/ object is created with the contents of -the corresponding auth service's account name. - -Also, to support a future where the auth service will support multiple Swift -clusters or even multiple services for the same auth service account, an -//.services object is created with its contents having a -JSON dictionary of the format:: - - {"storage": {"default": "local", "local": }} - -The "default" is always "local" right now, and "local" is always the single -Swift cluster URL; but in the future there can be more than one cluster with -various names instead of just "local", and the "default" key's value will -contain the primary cluster to use for that account. Also, there may be more -services in addition to the current "storage" service right now. - -Here's an example .services dictionary at the moment:: - - {"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}} - -But, here's an example of what the dictionary may look like in the future:: - - {"storage": - {"default": "dfw", - "dfw": "http://dfw.storage.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", - "ord": "http://ord.storage.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", - "sat": "http://ord.storage.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}, - "servers": - {"default": "dfw", - "dfw": "http://dfw.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", - "ord": "http://ord.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", - "sat": "http://ord.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}} - -Lastly, the tokens themselves are stored as objects in the -`/.token_[0-f]` containers. The names of the objects are the -token strings themselves, such as `AUTH_tked86bbd01864458aa2bd746879438d5a`. -The exact `.token_[0-f]` container chosen is based on the final digit of the -token name, such as `.token_a` for the token -`AUTH_tked86bbd01864458aa2bd746879438d5a`. The contents of the token objects -are JSON dictionaries of the format:: - - {"account": , - "user": , - "account_id": , - "groups": , - "expires": } - -The `` is the auth service account's name for that token. The `` -is the user within the account for that token. The `` is the -same as the `X-Container-Meta-Account-Id` for the auth service's account, -as described above. The `` is the user's groups, as described -above with the user object. The "expires" value indicates when the token is no -longer valid, as compared to Python's time.time() value. - -Here's an example token object's JSON dictionary:: - - {"account": "test", - "user": "tester", - "account_id": "AUTH_8980f74b1cda41e483cbe0a925f448a9", - "groups": ["name": "test:tester", "name": "test", "name": ".admin"], - "expires": 1291273147.1624689} - -To easily map a user to an already issued token, the token name is stored in -the user object's `X-Object-Meta-Auth-Token` header. - -Here is an example full listing of an :: - - .account_id - AUTH_2282f516-559f-4966-b239-b5c88829e927 - AUTH_f6f57a3c-33b5-4e85-95a5-a801e67505c8 - AUTH_fea96a36-c177-4ca4-8c7e-b8c715d9d37b - .token_0 - .token_1 - .token_2 - .token_3 - .token_4 - .token_5 - .token_6 - AUTH_tk9d2941b13d524b268367116ef956dee6 - .token_7 - .token_8 - AUTH_tk93627c6324c64f78be746f1e6a4e3f98 - .token_9 - .token_a - .token_b - .token_c - .token_d - .token_e - AUTH_tk0d37d286af2c43ffad06e99112b3ec4e - .token_f - AUTH_tk766bbde93771489982d8dc76979d11cf - reseller - .services - reseller - test - .services - tester - tester3 - test2 - .services - tester2 diff --git a/gluster/swift/common/middleware/gswauth/doc/source/index.rst b/gluster/swift/common/middleware/gswauth/doc/source/index.rst deleted file mode 100644 index 87b22d5..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/index.rst +++ /dev/null @@ -1,142 +0,0 @@ -.. Swauth documentation master file, created by - sphinx-quickstart on Mon Feb 14 19:34:51 2011. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Swauth -====== - - Copyright (c) 2010-2012 OpenStack, LLC - - An Auth Service for Swift as WSGI Middleware that uses Swift itself as a - backing store. Sphinx-built docs at: http://gholt.github.com/swauth/ - Source available at: https://github.com/gholt/swauth - - See also https://github.com/openstack/keystone for the standard OpenStack - auth service. - -Overview --------- - -Before discussing how to install Swauth within a Swift system, it might help to understand how Swauth does it work first. - -1. Swauth is middleware installed in the Swift Proxy's WSGI pipeline. - -2. It intercepts requests to ``/auth/`` (by default). - -3. It also uses Swift's `authorize callback `_ and `acl callback `_ features to authorize Swift requests. - -4. Swauth will also make various internal calls to the Swift WSGI pipeline it's installed in to manipulate containers and objects within an ``AUTH_.auth`` (by default) Swift account. These containers and objects are what store account and user information. - -5. Instead of #4, Swauth can be configured to call out to another remote Swauth to perform #4 on its behalf (using the swauth_remote config value). - -6. When managing accounts and users with the various ``swauth-`` command line tools, these tools are actually just performing HTTP requests against the ``/auth/`` end point referenced in #2. You can make your own tools that use the same :ref:`API `. - -7. In the special case of creating a new account, Swauth will do its usual WSGI-internal requests as per #4 but will also call out to the Swift cluster to create the actual Swift account. - - a. This Swift cluster callout is an account PUT request to the URL defined by the ``swift_default_cluster`` config value. - - b. This callout end point is also saved when the account is created so that it can be given to the users of that account in the future. - - c. Sometimes, due to public/private network routing or firewalling, the URL Swauth should use should be different than the URL Swauth should give the users later. That is why the ``default_swift_cluster`` config value can accept two URLs (first is the one for users, second is the one for Swauth). - - d. Once an account is created, the URL given to users for that account will not change, even if the ``default_swift_cluster`` config value changes. This is so that you can use multiple clusters with the same Swauth system; ``default_swift_cluster`` just points to the one where you want new users to go. - - f. You can change the stored URL for an account if need be with the ``swauth-set-account-service`` command line tool or a POST request (see :ref:`API `). - - -Install -------- - -1) Install Swauth with ``sudo python setup.py install`` or ``sudo python - setup.py develop`` or via whatever packaging system you may be using. - -2) Alter your ``proxy-server.conf`` pipeline to have ``swauth`` instead of ``tempauth``: - - Was:: - - [pipeline:main] - pipeline = catch_errors cache tempauth proxy-server - - Change To:: - - [pipeline:main] - pipeline = catch_errors cache swauth proxy-server - -3) Add to your ``proxy-server.conf`` the section for the Swauth WSGI filter:: - - [filter:swauth] - use = egg:swauth#swauth - set log_name = swauth - super_admin_key = swauthkey - default_swift_cluster = - - The ``default_swift_cluster`` setting can be confusing. - - a. If you're using an all-in-one type configuration where everything will be run on the local host on port 8080, you can omit the ``default_swift_cluster`` completely and it will default to ``local#http://127.0.0.1:8080/v1``. - - b. If you're using a single Swift proxy you can just set the ``default_swift_cluster = cluster_name#https://:/v1`` and that URL will be given to users as well as used by Swauth internally. (Quick note: be sure the ``http`` vs. ``https`` is set right depending on if you're using SSL.) - - c. If you're using multiple Swift proxies behind a load balancer, you'll probably want ``default_swift_cluster = cluster_name#https://:/v1#http://127.0.0.1:/v1`` so that Swauth gives out the first URL but uses the second URL internally. Remember to double-check the ``http`` vs. ``https`` settings for each of the URLs; they might be different if you're terminating SSL at the load balancer. - - Also see the ``proxy-server.conf-sample`` for more config options, such as the ability to have a remote Swauth in a multiple Swift cluster configuration. - -4) Be sure your Swift proxy allows account management in the ``proxy-server.conf``:: - - [app:proxy-server] - ... - allow_account_management = true - - For greater security, you can leave this off any public proxies and just have one or two private proxies with it turned on. - -5) Restart your proxy server ``swift-init proxy reload`` - -6) Initialize the Swauth backing store in Swift ``swauth-prep -K swauthkey`` - -7) Add an account/user ``swauth-add-user -A http[s]://:/auth/ -K - swauthkey -a test tester testing`` - -8) Ensure it works ``swift -A http[s]://:/auth/v1.0 -U test:tester -K testing stat -v`` - - -If anything goes wrong, it's best to start checking the proxy server logs. The client command line utilities often don't get enough information to help. I will often just ``tail -F`` the appropriate proxy log (``/var/log/syslog`` or however you have it configured) and then run the Swauth command to see exactly what requests are happening to try to determine where things fail. - -General note, I find I occasionally just forget to reload the proxies after a config change; so that's the first thing you might try. Or, if you suspect the proxies aren't reloading properly, you might try ``swift-init proxy stop``, ensure all the processes died, then ``swift-init proxy start``. - -Also, it's quite common to get the ``/auth/v1.0`` vs. just ``/auth/`` URL paths confused. Usual rule is: Swauth tools use just ``/auth/`` and Swift tools use ``/auth/v1.0``. - - -Web Admin Install ------------------ - -1) If you installed from packages, you'll need to cd to the webadmin directory - the package installed. This is ``/usr/share/doc/python-swauth/webadmin`` - with the Lucid packages. If you installed from source, you'll need to cd to - the webadmin directory in the source directory. - -2) Upload the Web Admin files with ``swift -A http[s]://:/auth/v1.0 - -U .super_admin:.super_admin -K swauthkey upload .webadmin .`` - -3) Open ``http[s]://:/auth/`` in your browser. - - -Contents --------- - -.. toctree:: - :maxdepth: 2 - - license - details - swauth - middleware - api - authtypes - - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/gluster/swift/common/middleware/gswauth/doc/source/license.rst b/gluster/swift/common/middleware/gswauth/doc/source/license.rst deleted file mode 100644 index 590a9b4..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/license.rst +++ /dev/null @@ -1,225 +0,0 @@ -.. _license: - -******* -LICENSE -******* - -:: - - Copyright (c) 2010-2011 OpenStack, LLC - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied. - See the License for the specific language governing permissions and - limitations under the License. - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/gluster/swift/common/middleware/gswauth/doc/source/middleware.rst b/gluster/swift/common/middleware/gswauth/doc/source/middleware.rst deleted file mode 100644 index a25acd4..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/middleware.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _swauth_middleware_module: - -swauth.middleware -================= - -.. automodule:: swauth.middleware - :members: - :undoc-members: - :show-inheritance: diff --git a/gluster/swift/common/middleware/gswauth/doc/source/swauth.rst b/gluster/swift/common/middleware/gswauth/doc/source/swauth.rst deleted file mode 100644 index c50c350..0000000 --- a/gluster/swift/common/middleware/gswauth/doc/source/swauth.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _swauth_module: - -swauth -====== - -.. automodule:: swauth - :members: - :undoc-members: - :show-inheritance: diff --git a/gluster/swift/common/middleware/gswauth/etc/proxy-server.conf-sample b/gluster/swift/common/middleware/gswauth/etc/proxy-server.conf-sample deleted file mode 100644 index a5f4ea1..0000000 --- a/gluster/swift/common/middleware/gswauth/etc/proxy-server.conf-sample +++ /dev/null @@ -1,78 +0,0 @@ -[DEFAULT] -# Standard from Swift - -[pipeline:main] -# Standard from Swift, this is just an example of where to put swauth -pipeline = catch_errors healthcheck cache ratelimit swauth proxy-server - -[app:proxy-server] -# Standard from Swift, main point to note is the inclusion of -# allow_account_management = true (only for the proxy servers where you want to -# be able to create/delete accounts). -use = egg:swift#proxy -allow_account_management = true - -[filter:swauth] -use = egg:swauth#swauth -# You can override the default log routing for this filter here: -# set log_name = swauth -# set log_facility = LOG_LOCAL0 -# set log_level = INFO -# set log_headers = False -# The reseller prefix will verify a token begins with this prefix before even -# attempting to validate it. Also, with authorization, only Swift storage -# accounts with this prefix will be authorized by this middleware. Useful if -# multiple auth systems are in use for one Swift cluster. -# reseller_prefix = AUTH -# If you wish to use a Swauth service on a remote cluster with this cluster: -# swauth_remote = http://remotehost:port/auth -# swauth_remote_timeout = 10 -# When using swauth_remote, the rest of these settings have no effect. -# -# The auth prefix will cause requests beginning with this prefix to be routed -# to the auth subsystem, for granting tokens, creating accounts, users, etc. -# auth_prefix = /auth/ -# Cluster strings are of the format name#url where name is a short name for the -# Swift cluster and url is the url to the proxy server(s) for the cluster. -# default_swift_cluster = local#http://127.0.0.1:8080/v1 -# You may also use the format name#url#url where the first url is the one -# given to users to access their account (public url) and the second is the one -# used by swauth itself to create and delete accounts (private url). This is -# useful when a load balancer url should be used by users, but swauth itself is -# behind the load balancer. Example: -# default_swift_cluster = local#https://public.com:8080/v1#http://private.com:8080/v1 -# Number of seconds a newly issued token should be valid for, by default. -# token_life = 86400 -# Maximum number of seconds a newly issued token can be valid for. -# max_token_life = -# Specifies how the user key is stored. The default is 'plaintext', leaving the -# key unsecured but available for key-signing features if such are ever added. -# An alternative is 'sha1' which stores only a one-way hash of the key leaving -# it secure but unavailable for key-signing. -# auth_type = plaintext -# Used if the auth_type is sha1 or another method that can make use of a salt. -# auth_type_salt = swauthsalt -# This allows middleware higher in the WSGI pipeline to override auth -# processing, useful for middleware such as tempurl and formpost. If you know -# you're not going to use such middleware and you want a bit of extra security, -# you can set this to false. -# allow_overrides = true -# Highly recommended to change this. If you comment this out, the Swauth -# administration features will be disabled for this proxy. -super_admin_key = swauthkey - -[filter:ratelimit] -# Standard from Swift -use = egg:swift#ratelimit - -[filter:cache] -# Standard from Swift -use = egg:swift#memcache - -[filter:healthcheck] -# Standard from Swift -use = egg:swift#healthcheck - -[filter:catch_errors] -# Standard from Swift -use = egg:swift#catch_errors diff --git a/gluster/swift/common/middleware/gswauth/locale/swauth.pot b/gluster/swift/common/middleware/gswauth/locale/swauth.pot deleted file mode 100644 index 86bcbec..0000000 --- a/gluster/swift/common/middleware/gswauth/locale/swauth.pot +++ /dev/null @@ -1,30 +0,0 @@ -# Translations template for swauth. -# Copyright (C) 2011 ORGANIZATION -# This file is distributed under the same license as the swauth project. -# FIRST AUTHOR , 2011. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: swauth 1.0.1.dev\n" -"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2011-05-26 10:35+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 0.9.4\n" - -#: swauth/middleware.py:94 -msgid "No super_admin_key set in conf file! Exiting." -msgstr "" - -#: swauth/middleware.py:637 -#, python-format -msgid "" -"ERROR: Exception while trying to communicate with " -"%(scheme)s://%(host)s:%(port)s/%(path)s" -msgstr "" - diff --git a/gluster/swift/common/middleware/gswauth/setup.cfg b/gluster/swift/common/middleware/gswauth/setup.cfg deleted file mode 100644 index a0122b1..0000000 --- a/gluster/swift/common/middleware/gswauth/setup.cfg +++ /dev/null @@ -1,23 +0,0 @@ -[build_sphinx] -all_files = 1 -build-dir = doc/build -source-dir = doc/source - -[egg_info] -tag_build = -tag_date = 0 -tag_svn_revision = 0 - -[compile_catalog] -directory = locale -domain = swauth - -[update_catalog] -domain = swauth -output_dir = locale -input_file = locale/swauth.pot - -[extract_messages] -keywords = _ l_ lazy_gettext -mapping_file = babel.cfg -output_file = locale/swauth.pot diff --git a/gluster/swift/common/middleware/gswauth/setup.py b/gluster/swift/common/middleware/gswauth/setup.py deleted file mode 100644 index f85718c..0000000 --- a/gluster/swift/common/middleware/gswauth/setup.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/python -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from setuptools import setup, find_packages -from setuptools.command.sdist import sdist -import os -import subprocess -try: - from babel.messages import frontend -except ImportError: - frontend = None - -from swauth import __version__ as version - - -class local_sdist(sdist): - """Customized sdist hook - builds the ChangeLog file from VC first""" - - def run(self): - if os.path.isdir('.bzr'): - # We're in a bzr branch - - log_cmd = subprocess.Popen(["bzr", "log", "--gnu"], - stdout=subprocess.PIPE) - changelog = log_cmd.communicate()[0] - with open("ChangeLog", "w") as changelog_file: - changelog_file.write(changelog) - sdist.run(self) - - -name = 'swauth' - - -cmdclass = {'sdist': local_sdist} - - -if frontend: - cmdclass.update({ - 'compile_catalog': frontend.compile_catalog, - 'extract_messages': frontend.extract_messages, - 'init_catalog': frontend.init_catalog, - 'update_catalog': frontend.update_catalog, - }) - - -setup( - name=name, - version=version, - description='Swauth', - license='Apache License (2.0)', - author='OpenStack, LLC.', - author_email='swauth@brim.net', - url='https://github.com/gholt/swauth', - packages=find_packages(exclude=['test_swauth', 'bin']), - test_suite='nose.collector', - cmdclass=cmdclass, - classifiers=[ - 'Development Status :: 4 - Beta', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 2.6', - 'Environment :: No Input/Output (Daemon)', - ], - install_requires=[], # removed for better compat - scripts=[ - 'bin/swauth-add-account', 'bin/swauth-add-user', - 'bin/swauth-cleanup-tokens', 'bin/swauth-delete-account', - 'bin/swauth-delete-user', 'bin/swauth-list', 'bin/swauth-prep', - 'bin/swauth-set-account-service', - ], - entry_points={ - 'paste.filter_factory': [ - 'swauth=swauth.middleware:filter_factory', - ], - }, -) diff --git a/gluster/swift/common/middleware/gswauth/swauth/__init__.py b/gluster/swift/common/middleware/gswauth/swauth/__init__.py deleted file mode 100644 index 67f86b6..0000000 --- a/gluster/swift/common/middleware/gswauth/swauth/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) 2010-2013 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gettext - - -#: Version information (major, minor, revision[, 'dev']). -version_info = (1, 0, 9, 'dev') -#: Version string 'major.minor.revision'. -version = __version__ = ".".join(map(str, version_info)) -gettext.install('swauth') diff --git a/gluster/swift/common/middleware/gswauth/swauth/authtypes.py b/gluster/swift/common/middleware/gswauth/swauth/authtypes.py deleted file mode 100644 index 90aad72..0000000 --- a/gluster/swift/common/middleware/gswauth/swauth/authtypes.py +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Pablo Llopis 2011 - - -""" -This module hosts available auth types for encoding and matching user keys. -For adding a new auth type, simply write a class that satisfies the following -conditions: - -- For the class name, capitalize first letter only. This makes sure the user - can specify an all-lowercase config option such as "plaintext" or "sha1". - Swauth takes care of capitalizing the first letter before instantiating it. -- Write an encode(key) method that will take a single argument, the user's key, - and returns the encoded string. For plaintext, this would be - "plaintext:" -- Write a match(key, creds) method that will take two arguments: the user's - key, and the user's retrieved credentials. Return a boolean value that - indicates whether the match is True or False. - -Note that, since some of the encodings will be hashes, swauth supports the -notion of salts. Thus, self.salt will be set to either a user-specified salt -value or to a default value. -""" - -import hashlib - - -#: Maximum length any valid token should ever be. -MAX_TOKEN_LENGTH = 5000 - - -class Plaintext(object): - """ - Provides a particular auth type for encoding format for encoding and - matching user keys. - - This class must be all lowercase except for the first character, which - must be capitalized. encode and match methods must be provided and are - the only ones that will be used by swauth. - """ - def encode(self, key): - """ - Encodes a user key into a particular format. The result of this method - will be used by swauth for storing user credentials. - - :param key: User's secret key - :returns: A string representing user credentials - """ - return "plaintext:%s" % key - - def match(self, key, creds): - """ - Checks whether the user-provided key matches the user's credentials - - :param key: User-supplied key - :param creds: User's stored credentials - :returns: True if the supplied key is valid, False otherwise - """ - return self.encode(key) == creds - - -class Sha1(object): - """ - Provides a particular auth type for encoding format for encoding and - matching user keys. - - This class must be all lowercase except for the first character, which - must be capitalized. encode and match methods must be provided and are - the only ones that will be used by swauth. - """ - def encode(self, key): - """ - Encodes a user key into a particular format. The result of this method - will be used by swauth for storing user credentials. - - :param key: User's secret key - :returns: A string representing user credentials - """ - enc_key = '%s%s' % (self.salt, key) - enc_val = hashlib.sha1(enc_key).hexdigest() - return "sha1:%s$%s" % (self.salt, enc_val) - - def match(self, key, creds): - """ - Checks whether the user-provided key matches the user's credentials - - :param key: User-supplied key - :param creds: User's stored credentials - :returns: True if the supplied key is valid, False otherwise - """ - return self.encode(key) == creds diff --git a/gluster/swift/common/middleware/gswauth/swauth/middleware.py b/gluster/swift/common/middleware/gswauth/swauth/middleware.py deleted file mode 100644 index 314eedb..0000000 --- a/gluster/swift/common/middleware/gswauth/swauth/middleware.py +++ /dev/null @@ -1,1614 +0,0 @@ -# Copyright (c) 2010-2012 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -from httplib import HTTPConnection, HTTPSConnection -from time import gmtime, strftime, time -from traceback import format_exc -from urllib import quote, unquote -from uuid import uuid4 -from hashlib import sha1 -import hmac -import base64 - -from eventlet.timeout import Timeout -from eventlet import TimeoutError -from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ - HTTPCreated, HTTPForbidden, HTTPMethodNotAllowed, HTTPMovedPermanently, \ - HTTPNoContent, HTTPNotFound, HTTPUnauthorized, \ - Request, Response - -from swift.common.bufferedhttp import http_connect_raw as http_connect -from swift.common.middleware.acl import clean_acl, parse_acl, referrer_allowed -from swift.common.utils import cache_from_env, get_logger, get_remote_client, \ - split_path, TRUE_VALUES, urlparse -import swift.common.wsgi - - -from gluster.swift.common.middleware.gswauth.swauth import authtypes - - -class Swauth(object): - """ - Scalable authentication and authorization system that uses Swift as its - backing store. - - :param app: The next WSGI app in the pipeline - :param conf: The dict of configuration values - """ - - def __init__(self, app, conf): - self.app = app - self.conf = conf - self.logger = get_logger(conf, log_route='gswauth') - self.log_headers = conf.get('log_headers', 'no').lower() in TRUE_VALUES - self.reseller_prefix = conf.get('reseller_prefix', 'AUTH').strip() - if self.reseller_prefix and self.reseller_prefix[-1] != '_': - self.reseller_prefix += '_' - self.auth_prefix = conf.get('auth_prefix', '/auth/') - if not self.auth_prefix: - self.auth_prefix = '/auth/' - if self.auth_prefix[0] != '/': - self.auth_prefix = '/' + self.auth_prefix - if self.auth_prefix[-1] != '/': - self.auth_prefix += '/' - self.swauth_remote = conf.get('swauth_remote') - if self.swauth_remote: - self.swauth_remote = self.swauth_remote.rstrip('/') - if not self.swauth_remote: - msg = _('Invalid swauth_remote set in conf file! Exiting.') - try: - self.logger.critical(msg) - except Exception: - pass - raise ValueError(msg) - self.swauth_remote_parsed = urlparse(self.swauth_remote) - if self.swauth_remote_parsed.scheme not in ('http', 'https'): - msg = _('Cannot handle protocol scheme %s for url %s!') % ( - self.swauth_remote_parsed.scheme, - repr(self.swauth_remote)) - try: - self.logger.critical(msg) - except Exception: - pass - raise ValueError(msg) - self.swauth_remote_timeout = int(conf.get('swauth_remote_timeout', 10)) - - self.metadata_volume = conf.get('metadata_volume', 'gsmetadata') - self.auth_account = '%s%s' % (self.reseller_prefix, - self.metadata_volume) - self.default_swift_cluster = conf.get( - 'default_swift_cluster', - 'local#http://127.0.0.1:8080/v1') - # This setting is a little messy because of the options it has to - # provide. The basic format is cluster_name#url, such as the default - # value of local#http://127.0.0.1:8080/v1. - # If the URL given to the user needs to differ from the url used by - # Swauth to create/delete accounts, there's a more complex format: - # cluster_name#url#url, such as - # local#https://public.com:8080/v1#http://private.com:8080/v1. - cluster_parts = self.default_swift_cluster.split('#', 2) - self.dsc_name = cluster_parts[0] - if len(cluster_parts) == 3: - self.dsc_url = cluster_parts[1].rstrip('/') - self.dsc_url2 = cluster_parts[2].rstrip('/') - elif len(cluster_parts) == 2: - self.dsc_url = self.dsc_url2 = cluster_parts[1].rstrip('/') - else: - raise Exception('Invalid cluster format') - self.dsc_parsed = urlparse(self.dsc_url) - if self.dsc_parsed.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (self.dsc_parsed.scheme, repr(self.dsc_url))) - self.dsc_parsed2 = urlparse(self.dsc_url2) - if self.dsc_parsed2.scheme not in ('http', 'https'): - raise Exception('Cannot handle protocol scheme %s for url %s' % - (self.dsc_parsed2.scheme, repr(self.dsc_url2))) - self.super_admin_key = conf.get('super_admin_key') - if not self.super_admin_key and not self.swauth_remote: - msg = _('No super_admin_key set in conf file; Swauth ' - 'administration features will be disabled.') - try: - self.logger.warn(msg) - except Exception: - pass - self.token_life = int(conf.get('token_life', 86400)) - self.max_token_life = int(conf.get('max_token_life', self.token_life)) - self.timeout = int(conf.get('node_timeout', 10)) - self.itoken = None - self.itoken_expires = None - self.allowed_sync_hosts = [ - h.strip() - for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',') - if h.strip()] - # Get an instance of our auth_type encoder for saving and checking the - # user's key - self.auth_type = conf.get('auth_type', 'Plaintext').title() - self.auth_encoder = getattr(authtypes, self.auth_type, None) - if self.auth_encoder is None: - raise Exception( - 'Invalid auth_type in config file: %s' - % self.auth_type) - self.auth_encoder.salt = conf.get('auth_type_salt', 'gswauthsalt') - self.allow_overrides = \ - conf.get('allow_overrides', 't').lower() in TRUE_VALUES - self.agent = '%(orig)s Swauth' - self.swift_source = 'SWTH' - - def make_pre_authed_request(self, env, method=None, path=None, body=None, - headers=None): - """ - Nearly the same as swift.common.wsgi.make_pre_authed_request - except that this also always sets the 'swift.source' and user - agent. - - Newer Swift code will support swift_source as a kwarg, but we - do it this way so we don't have to have a newer Swift. - - Since we're doing this anyway, we may as well set the user - agent too since we always do that. - """ - subreq = swift.common.wsgi.make_pre_authed_request( - env, method=method, path=path, body=body, headers=headers, - agent=self.agent) - subreq.environ['swift.source'] = self.swift_source - return subreq - - def __call__(self, env, start_response): - """ - Accepts a standard WSGI application call, authenticating the request - and installing callback hooks for authorization and ACL header - validation. For an authenticated request, REMOTE_USER will be set to a - comma separated list of the user's groups. - - With a non-empty reseller prefix, acts as the definitive auth service - for just tokens and accounts that begin with that prefix, but will deny - requests outside this prefix if no other auth middleware overrides it. - - With an empty reseller prefix, acts as the definitive auth service only - for tokens that validate to a non-empty set of groups. For all other - requests, acts as the fallback auth service when no other auth - middleware overrides it. - - Alternatively, if the request matches the self.auth_prefix, the request - will be routed through the internal auth request handler (self.handle). - This is to handle creating users, accounts, granting tokens, etc. - """ - # We're going to consider OPTIONS requests harmless and the CORS - # support in the Swift proxy needs to get them. - if env.get('REQUEST_METHOD') == 'OPTIONS': - return self.app(env, start_response) - if self.allow_overrides and env.get('swift.authorize_override', False): - return self.app(env, start_response) - if not self.swauth_remote: - if env.get('PATH_INFO', '') == self.auth_prefix[:-1]: - return HTTPMovedPermanently(add_slash=True)(env, - start_response) - elif env.get('PATH_INFO', '').startswith(self.auth_prefix): - return self.handle(env, start_response) - s3 = env.get('HTTP_AUTHORIZATION') - token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) - if token and len(token) > authtypes.MAX_TOKEN_LENGTH: - return HTTPBadRequest(body='Token exceeds maximum length.')( - env, start_response) - if s3 or (token and token.startswith(self.reseller_prefix)): - # Note: Empty reseller_prefix will match all tokens. - groups = self.get_groups(env, token) - if groups: - env['REMOTE_USER'] = groups - user = groups and groups.split(',', 1)[0] or '' - # We know the proxy logs the token, so we augment it just a bit - # to also log the authenticated user. - env['HTTP_X_AUTH_TOKEN'] = \ - '%s,%s' % (user, 's3' if s3 else token) - env['swift.authorize'] = self.authorize - env['swift.clean_acl'] = clean_acl - if '.reseller_admin' in groups: - env['reseller_request'] = True - else: - # Unauthorized token - if self.reseller_prefix and token and \ - token.startswith(self.reseller_prefix): - # Because I know I'm the definitive auth for this token, I - # can deny it outright. - return HTTPUnauthorized()(env, start_response) - # Because I'm not certain if I'm the definitive auth, I won't - # overwrite swift.authorize and I'll just set a delayed denial - # if nothing else overrides me. - elif 'swift.authorize' not in env: - env['swift.authorize'] = self.denied_response - else: - if self.reseller_prefix: - # With a non-empty reseller_prefix, I would like to be called - # back for anonymous access to accounts I know I'm the - # definitive auth for. - try: - version, rest = split_path(env.get('PATH_INFO', ''), - 1, 2, True) - except ValueError: - version, rest = None, None - if rest and rest.startswith(self.reseller_prefix): - # Handle anonymous access to accounts I'm the definitive - # auth for. - env['swift.authorize'] = self.authorize - env['swift.clean_acl'] = clean_acl - # Not my token, not my account, I can't authorize this request, - # deny all is a good idea if not already set... - elif 'swift.authorize' not in env: - env['swift.authorize'] = self.denied_response - # Because I'm not certain if I'm the definitive auth for empty - # reseller_prefixed accounts, I won't overwrite swift.authorize. - elif 'swift.authorize' not in env: - env['swift.authorize'] = self.authorize - env['swift.clean_acl'] = clean_acl - return self.app(env, start_response) - - def get_groups(self, env, token): - """ - Get groups for the given token. - - :param env: The current WSGI environment dictionary. - :param token: Token to validate and return a group string for. - - :returns: None if the token is invalid or a string containing a comma - separated list of groups the authenticated user is a member - of. The first group in the list is also considered a unique - identifier for that user. - """ - groups = None - memcache_client = cache_from_env(env) - if memcache_client: - memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) - cached_auth_data = memcache_client.get(memcache_key) - if cached_auth_data: - expires, groups = cached_auth_data - if expires < time(): - groups = None - - if env.get('HTTP_AUTHORIZATION'): - if self.swauth_remote: - # TODO: Support S3-style authorization with swauth_remote mode - self.logger.warn('S3-style authorization not supported yet ' - 'with swauth_remote mode.') - return None - try: - account = env['HTTP_AUTHORIZATION'].split(' ')[1] - account, user, sign = account.split(':') - except Exception: - self.logger.debug( - 'Swauth cannot parse Authorization header value %r' % - env['HTTP_AUTHORIZATION']) - return None - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - resp = self.make_pre_authed_request( - env, 'GET', path).get_response(self.app) - if resp.status_int // 100 != 2: - return None - - if 'x-object-meta-account-id' in resp.headers: - account_id = resp.headers['x-object-meta-account-id'] - else: - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp2 = self.make_pre_authed_request( - env, 'HEAD', path).get_response(self.app) - if resp2.status_int // 100 != 2: - return None - account_id = resp2.headers['x-container-meta-account-id'] - - path = env['PATH_INFO'] - env['PATH_INFO'] = path.replace("%s:%s" % (account, user), - account_id, 1) - detail = json.loads(resp.body) - - password = detail['auth'].split(':')[-1] - msg = base64.urlsafe_b64decode(unquote(token)) - s = base64.encodestring(hmac.new(password, - msg, sha1).digest()).strip() - if s != sign: - return None - groups = [g['name'] for g in detail['groups']] - if '.admin' in groups: - groups.remove('.admin') - groups.append(account_id) - groups = ','.join(groups) - return groups - - if not groups: - if self.swauth_remote: - with Timeout(self.swauth_remote_timeout): - conn = http_connect( - self.swauth_remote_parsed.hostname, - self.swauth_remote_parsed.port, 'GET', - '%s/v2/.token/%s' % (self.swauth_remote_parsed.path, - quote(token)), - ssl=(self.swauth_remote_parsed.scheme == 'https')) - resp = conn.getresponse() - resp.read() - conn.close() - if resp.status // 100 != 2: - return None - expires_from_now = float(resp.getheader('x-auth-ttl')) - groups = resp.getheader('x-auth-groups') - if memcache_client: - memcache_client.set( - memcache_key, (time() + expires_from_now, groups), - timeout=expires_from_now) - else: - path = quote('/v1/%s/.token_%s/%s' % - (self.auth_account, token[-1], token)) - resp = self.make_pre_authed_request( - env, 'GET', path).get_response(self.app) - if resp.status_int // 100 != 2: - return None - detail = json.loads(resp.body) - if detail['expires'] < time(): - self.make_pre_authed_request( - env, 'DELETE', path).get_response(self.app) - return None - groups = [g['name'] for g in detail['groups']] - if '.admin' in groups: - groups.remove('.admin') - groups.append(detail['account_id']) - groups = ','.join(groups) - if memcache_client: - memcache_client.set( - memcache_key, - (detail['expires'], groups), - timeout=float(detail['expires'] - time())) - return groups - - def authorize(self, req): - """ - Returns None if the request is authorized to continue or a standard - WSGI response callable if not. - """ - try: - version, account, container, obj = split_path(req.path, 1, 4, True) - except ValueError: - return HTTPNotFound(request=req) - if not account or not account.startswith(self.reseller_prefix): - return self.denied_response(req) - user_groups = (req.remote_user or '').split(',') - if '.reseller_admin' in user_groups and \ - account != self.reseller_prefix and \ - account[len(self.reseller_prefix):] != self.metadata_volume: - req.environ['swift_owner'] = True - return None - if account in user_groups and \ - (req.method not in ('DELETE', 'PUT') or container): - # If the user is admin for the account and is not trying to do an - # account DELETE or PUT... - req.environ['swift_owner'] = True - return None - if (req.environ.get('swift_sync_key') and - req.environ['swift_sync_key'] == - req.headers.get('x-container-sync-key', None) and - 'x-timestamp' in req.headers and - (req.remote_addr in self.allowed_sync_hosts or - get_remote_client(req) in self.allowed_sync_hosts)): - return None - referrers, groups = parse_acl(getattr(req, 'acl', None)) - if referrer_allowed(req.referer, referrers): - if obj or '.rlistings' in groups: - return None - return self.denied_response(req) - if not req.remote_user: - return self.denied_response(req) - for user_group in user_groups: - if user_group in groups: - return None - return self.denied_response(req) - - def denied_response(self, req): - """ - Returns a standard WSGI response callable with the status of 403 or 401 - depending on whether the REMOTE_USER is set or not. - """ - if not hasattr(req, 'credentials_valid'): - req.credentials_valid = None - if req.remote_user or req.credentials_valid: - return HTTPForbidden(request=req) - else: - return HTTPUnauthorized(request=req) - - def handle(self, env, start_response): - """ - WSGI entry point for auth requests (ones that match the - self.auth_prefix). - Wraps env in swob.Request object and passes it down. - - :param env: WSGI environment dictionary - :param start_response: WSGI callable - """ - try: - req = Request(env) - if self.auth_prefix: - req.path_info_pop() - req.bytes_transferred = '-' - req.client_disconnect = False - if 'x-storage-token' in req.headers and \ - 'x-auth-token' not in req.headers: - req.headers['x-auth-token'] = req.headers['x-storage-token'] - if 'eventlet.posthooks' in env: - env['eventlet.posthooks'].append( - (self.posthooklogger, (req,), {})) - return self.handle_request(req)(env, start_response) - else: - # Lack of posthook support means that we have to log on the - # start of the response, rather than after all the data has - # been sent. This prevents logging client disconnects - # differently than full transmissions. - response = self.handle_request(req)(env, start_response) - self.posthooklogger(env, req) - return response - except (Exception, TimeoutError): - print "EXCEPTION IN handle: %s: %s" % (format_exc(), env) - start_response('500 Server Error', - [('Content-Type', 'text/plain')]) - return ['Internal server error.\n'] - - def handle_request(self, req): - """ - Entry point for auth requests (ones that match the self.auth_prefix). - Should return a WSGI-style callable (such as swob.Response). - - :param req: swob.Request object - """ - req.start_time = time() - handler = None - try: - version, account, user, _junk = split_path( - req.path_info, minsegs=0, maxsegs=4, rest_with_last=True) - except ValueError: - return HTTPNotFound(request=req) - if version in ('v1', 'v1.0', 'auth'): - if req.method == 'GET': - handler = self.handle_get_token - elif version == 'v2': - if not self.super_admin_key: - return HTTPNotFound(request=req) - req.path_info_pop() - if req.method == 'GET': - if not account and not user: - handler = self.handle_get_reseller - elif account: - if not user: - handler = self.handle_get_account - elif account == '.token': - req.path_info_pop() - handler = self.handle_validate_token - else: - handler = self.handle_get_user - elif req.method == 'PUT': - if not user: - handler = self.handle_put_account - else: - handler = self.handle_put_user - elif req.method == 'DELETE': - if not user: - handler = self.handle_delete_account - else: - handler = self.handle_delete_user - elif req.method == 'POST': - if account == '.prep': - handler = self.handle_prep - elif user == '.services': - handler = self.handle_set_services - else: - handler = self.handle_webadmin - if not handler: - req.response = HTTPBadRequest(request=req) - else: - req.response = handler(req) - return req.response - - def handle_webadmin(self, req): - if req.method not in ('GET', 'HEAD'): - return HTTPMethodNotAllowed(request=req) - subpath = req.path[len(self.auth_prefix):] or 'index.html' - path = quote('/v1/%s/.webadmin/%s' % (self.auth_account, subpath)) - req.response = self.make_pre_authed_request( - req.environ, req.method, path).get_response(self.app) - return req.response - - def handle_prep(self, req): - """ - Handles the POST v2/.prep call for preparing the backing store Swift - cluster for use with the auth subsystem. Can only be called by - .super_admin. - - :param req: The swob.Request to process. - :returns: swob.Response, 204 on success - """ - if not self.is_super_admin(req): - return HTTPUnauthorized(request=req) - path = quote('/v1/%s/.account_id' % self.auth_account) - resp = self.make_pre_authed_request( - req.environ, 'PUT', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not create container: %s %s' % - (path, resp.status)) - for container in xrange(16): - path = quote('/v1/%s/.token_%x' % (self.auth_account, container)) - resp = self.make_pre_authed_request( - req.environ, 'PUT', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not create container: %s %s' % - (path, resp.status)) - return HTTPNoContent(request=req) - - def handle_get_reseller(self, req): - """ - Handles the GET v2 call for getting general reseller information - (currently just a list of accounts). Can only be called by a - .reseller_admin. - - On success, a JSON dictionary will be returned with a single `accounts` - key whose value is list of dicts. Each dict represents an account and - currently only contains the single key `name`. For example:: - - {"accounts": [{"name": "reseller"}, {"name": "test"}, - {"name": "test2"}]} - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with a JSON dictionary as - explained above. - """ - if not self.is_reseller_admin(req): - return self.denied_response(req) - listing = [] - marker = '' - while True: - path = '/v1/%s?format=json&marker=%s' % (quote(self.auth_account), - quote(marker)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not list main auth account: %s %s' % - (path, resp.status)) - sublisting = json.loads(resp.body) - if not sublisting: - break - for container in sublisting: - if container['name'][0] != '.': - listing.append({'name': container['name']}) - marker = sublisting[-1]['name'].encode('utf-8') - return Response(body=json.dumps({'accounts': listing})) - - def handle_get_account(self, req): - """ - Handles the GET v2/ call for getting account information. - Can only be called by an account .admin. - - On success, a JSON dictionary will be returned containing the keys - `account_id`, `services`, and `users`. The `account_id` is the value - used when creating service accounts. The `services` value is a dict as - described in the :func:`handle_get_token` call. The `users` value is a - list of dicts, each dict representing a user and currently only - containing the single key `name`. For example:: - - {"account_id": "AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162", - "services": {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}}, - "users": [{"name": "tester"}, {"name": "tester3"}]} - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with a JSON dictionary as - explained above. - """ - account = req.path_info_pop() - if req.path_info or not account or account[0] == '.': - return HTTPBadRequest(request=req) - if not self.is_account_admin(req, account): - return self.denied_response(req) - path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not obtain the .services object: %s %s' % - (path, resp.status)) - services = json.loads(resp.body) - listing = [] - marker = '' - while True: - path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % ( - self.auth_account, account)), quote(marker)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not list in main auth account: %s %s' % - (path, resp.status)) - account_id = resp.headers['X-Container-Meta-Account-Id'] - sublisting = json.loads(resp.body) - if not sublisting: - break - for obj in sublisting: - if obj['name'][0] != '.': - listing.append({'name': obj['name']}) - marker = sublisting[-1]['name'].encode('utf-8') - return Response(body=json.dumps( - {'account_id': account_id, - 'services': services, 'users': listing})) - - def handle_set_services(self, req): - """ - Handles the POST v2//.services call for setting services - information. Can only be called by a reseller .admin. - - In the :func:`handle_get_account` (GET v2/) call, a section of - the returned JSON dict is `services`. This section looks something like - this:: - - "services": {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_018c3946"}} - - Making use of this section is described in :func:`handle_get_token`. - - This function allows setting values within this section for the - , allowing the addition of new service end points or updating - existing ones. - - The body of the POST request should contain a JSON dict with the - following format:: - - {"service_name": {"end_point_name": "end_point_value"}} - - There can be multiple services and multiple end points in the same - call. - - Any new services or end points will be added to the existing set of - services and end points. Any existing services with the same service - name will be merged with the new end points. Any existing end points - with the same end point name will have their values updated. - - The updated services dictionary will be returned on success. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with the udpated services JSON - dict as described above - """ - if not self.is_reseller_admin(req): - return self.denied_response(req) - account = req.path_info_pop() - if req.path_info != '/.services' or not account or account[0] == '.': - return HTTPBadRequest(request=req) - try: - new_services = json.loads(req.body) - except ValueError, err: - return HTTPBadRequest(body=str(err)) - # Get the current services information - path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not obtain services info: %s %s' % - (path, resp.status)) - services = json.loads(resp.body) - for new_service, value in new_services.iteritems(): - if new_service in services: - services[new_service].update(value) - else: - services[new_service] = value - # Save the new services information - services = json.dumps(services) - resp = self.make_pre_authed_request( - req.environ, 'PUT', path, services).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not save .services object: %s %s' % - (path, resp.status)) - return Response(request=req, body=services) - - def handle_put_account(self, req): - """ - Handles the PUT v2/ call for adding an account to the auth - system. Can only be called by a .reseller_admin. - - By default, a newly created UUID4 will be used with the reseller prefix - as the account id used when creating corresponding service accounts. - However, you can provide an X-Account-Suffix header to replace the - UUID4 part. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success. - """ - if not self.is_reseller_admin(req): - return self.denied_response(req) - account = req.path_info_pop() - if req.path_info or not account or account[0] == '.': - return HTTPBadRequest(request=req) - - # Ensure the container in the main auth account exists (this - # container represents the new account) - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'HEAD', path).get_response(self.app) - if resp.status_int == 404: - resp = self.make_pre_authed_request( - req.environ, 'PUT', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception( - 'Could not create account within main auth ' - 'account: %s %s' % (path, resp.status)) - elif resp.status_int // 100 == 2: - if 'x-container-meta-account-id' in resp.headers: - # Account was already created - return HTTPAccepted(request=req) - else: - raise Exception( - 'Could not verify account within main auth ' - 'account: %s %s' % (path, resp.status)) - - # Record the mapping from account id back to account name - path = quote('/v1/%s/.account_id/%s%s' % - (self.auth_account, self.reseller_prefix, account)) - resp = self.make_pre_authed_request( - req.environ, 'PUT', path, account).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not create account id mapping: %s %s' % - (path, resp.status)) - # Record the cluster url(s) for the account - path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) - services = {'storage': {}} - services['storage'][self.dsc_name] = '%s/%s%s' % ( - self.dsc_url, - self.reseller_prefix, account) - services['storage']['default'] = self.dsc_name - resp = self.make_pre_authed_request( - req.environ, 'PUT', path, - json.dumps(services)).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not create .services object: %s %s' % - (path, resp.status)) - # Record the mapping from account name to the account id - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'POST', path, - headers={'X-Container-Meta-Account-Id': '%s%s' % ( - self.reseller_prefix, account)}).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not record the account id on the account: ' - '%s %s' % (path, resp.status)) - return HTTPCreated(request=req) - - def handle_delete_account(self, req): - """ - Handles the DELETE v2/ call for removing an account from the - auth system. Can only be called by a .reseller_admin. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success. - """ - if not self.is_reseller_admin(req): - return self.denied_response(req) - account = req.path_info_pop() - if req.path_info or not account or account[0] == '.': - return HTTPBadRequest(request=req) - - # Make sure the account has no users and get the account_id - marker = '' - while True: - path = '/v1/%s?format=json&marker=%s' % ( - quote('%s/%s' % ( - self.auth_account, account)), quote(marker)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not list in main auth account: %s %s' % - (path, resp.status)) - account_id = resp.headers['x-container-meta-account-id'] - sublisting = json.loads(resp.body) - if not sublisting: - break - for obj in sublisting: - if obj['name'][0] != '.': - return HTTPConflict(request=req) - marker = sublisting[-1]['name'].encode('utf-8') - - # Obtain the listing of services the account is on. - path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int // 100 != 2 and resp.status_int != 404: - raise Exception('Could not obtain .services object: %s %s' % - (path, resp.status)) - if resp.status_int // 100 == 2: - # Delete .services - path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - if resp.status_int // 100 != 2 and resp.status_int != 404: - raise Exception('Could not delete .services object: %s %s' % - (path, resp.status)) - - # Delete the account id mapping for the account. - path = quote('/v1/%s/.account_id/%s' % - (self.auth_account, account_id)) - resp = self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - if resp.status_int // 100 != 2 and resp.status_int != 404: - raise Exception('Could not delete account id mapping: %s %s' % - (path, resp.status)) - - # Delete the account marker itself. - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - if resp.status_int // 100 != 2 and resp.status_int != 404: - raise Exception('Could not delete account marked: %s %s' % - (path, resp.status)) - return HTTPNoContent(request=req) - - def handle_get_user(self, req): - """ - Handles the GET v2// call for getting user information. - Can only be called by an account .admin. - - On success, a JSON dict will be returned as described:: - - {"groups": [ # List of groups the user is a member of - {"name": ":"}, - # The first group is a unique user identifier - {"name": ""}, - # The second group is the auth account name - {"name": ""} - # There may be additional groups, .admin being a special - # group indicating an account admin and .reseller_admin - # indicating a reseller admin. - ], - "auth": "plaintext:" - # The auth-type and key for the user; currently only plaintext is - # implemented. - } - - For example:: - - {"groups": [{"name": "test:tester"}, {"name": "test"}, - {"name": ".admin"}], - "auth": "plaintext:testing"} - - If the in the request is the special user `.groups`, the JSON - dict will contain a single key of `groups` whose value is a list of - dicts representing the active groups within the account. Each dict - currently has the single key `name`. For example:: - - {"groups": [{"name": ".admin"}, {"name": "test"}, - {"name": "test:tester"}, {"name": "test:tester3"}]} - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with a JSON dictionary as - explained above. - """ - account = req.path_info_pop() - user = req.path_info_pop() - if req.path_info or not account or account[0] == '.' or not user or \ - (user[0] == '.' and user != '.groups'): - return HTTPBadRequest(request=req) - if not self.is_account_admin(req, account): - return self.denied_response(req) - - # get information for each user for the specified - # account and create a list of all groups that the users - # are part of - if user == '.groups': - # TODO: This could be very slow for accounts with a really large - # number of users. Speed could be improved by concurrently - # requesting user group information. Then again, I don't *know* - # it's slow for `normal` use cases, so testing should be done. - groups = set() - marker = '' - while True: - path = '/v1/%s?format=json&marker=%s' % ( - quote( - '%s/%s' % - (self.auth_account, account)), quote(marker)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not list in main auth account: ' - '%s %s' % (path, resp.status)) - sublisting = json.loads(resp.body) - if not sublisting: - break - for obj in sublisting: - if obj['name'][0] != '.': - - # get list of groups for each user - user_json = self.get_user_detail(req, account, - obj['name']) - if user_json is None: - raise Exception('Could not retrieve user object: ' - '%s:%s %s' % (account, user, 404)) - groups.update( - g['name'] for g in json.loads(user_json)['groups']) - marker = sublisting[-1]['name'].encode('utf-8') - body = json.dumps( - {'groups': [{'name': g} for g in sorted(groups)]}) - else: - # get information for specific user, - # if user doesn't exist, return HTTPNotFound - body = self.get_user_detail(req, account, user) - if body is None: - return HTTPNotFound(request=req) - - display_groups = [g['name'] for g in json.loads(body)['groups']] - if ('.admin' in display_groups and - not self.is_reseller_admin(req)) or \ - ('.reseller_admin' in display_groups and - not self.is_super_admin(req)): - return HTTPForbidden(request=req) - return Response(body=body) - - def handle_put_user(self, req): - """ - Handles the PUT v2// call for adding a user to an - account. - - X-Auth-User-Key represents the user's key (url encoded), - X-Auth-User-Admin may be set to `true` to create an account .admin, and - X-Auth-User-Reseller-Admin may be set to `true` to create a - .reseller_admin. - - Creating users - ************** - Can only be called by an account .admin unless the user is to be a - .reseller_admin, in which case the request must be by .super_admin. - - Changing password/key - ********************* - 1) reseller_admin key can be changed by super_admin and by himself. - 2) admin key can be changed by any admin in same account, - reseller_admin, super_admin and himself. - 3) Regular user key can be changed by any admin in his account, - reseller_admin, super_admin and himself. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success. - """ - # Validate path info - account = req.path_info_pop() - user = req.path_info_pop() - key = unquote(req.headers.get('x-auth-user-key', '')) - admin = req.headers.get('x-auth-user-admin') == 'true' - reseller_admin = \ - req.headers.get('x-auth-user-reseller-admin') == 'true' - if reseller_admin: - admin = True - if req.path_info or not account or account[0] == '.' or not user or \ - user[0] == '.' or not key: - return HTTPBadRequest(request=req) - user_arg = account + ':' + user - if reseller_admin: - if not self.is_super_admin(req) and\ - not self.is_user_changing_own_key(req, user_arg): - return HTTPUnauthorized(request=req) - elif not self.is_account_admin(req, account) and\ - not self.is_user_changing_own_key(req, user_arg): - return self.denied_response(req) - - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'HEAD', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not retrieve account id value: %s %s' % - (path, resp.status)) - headers = {'X-Object-Meta-Account-Id': - resp.headers['x-container-meta-account-id']} - # Create the object in the main auth account (this object represents - # the user) - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - groups = ['%s:%s' % (account, user), account] - if admin: - groups.append('.admin') - if reseller_admin: - groups.append('.reseller_admin') - auth_value = self.auth_encoder().encode(key) - resp = self.make_pre_authed_request( - req.environ, 'PUT', path, - json.dumps({'auth': auth_value, - 'groups': [{'name': g} for g in groups]}), - headers=headers).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not create user object: %s %s' % - (path, resp.status)) - return HTTPCreated(request=req) - - def handle_delete_user(self, req): - """ - Handles the DELETE v2// call for deleting a user from an - account. - - Can only be called by an account .admin. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success. - """ - # Validate path info - account = req.path_info_pop() - user = req.path_info_pop() - if req.path_info or not account or account[0] == '.' or not user or \ - user[0] == '.': - return HTTPBadRequest(request=req) - - # if user to be deleted is reseller_admin, then requesting - # user must also be a reseller_admin - is_reseller_admin = self.is_user_reseller_admin(req, account, user) - if not is_reseller_admin and not req.credentials_valid: - # if user to be deleted can't be found, return 404 - return HTTPNotFound(request=req) - elif is_reseller_admin and not self.is_super_admin(req): - return HTTPForbidden(request=req) - - if not self.is_account_admin(req, account): - return self.denied_response(req) - - # Delete the user's existing token, if any. - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - resp = self.make_pre_authed_request( - req.environ, 'HEAD', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) - elif resp.status_int // 100 != 2: - raise Exception('Could not obtain user details: %s %s' % - (path, resp.status)) - candidate_token = resp.headers.get('x-object-meta-auth-token') - if candidate_token: - path = quote( - '/v1/%s/.token_%s/%s' % - (self.auth_account, candidate_token[-1], candidate_token)) - resp = self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - if resp.status_int // 100 != 2 and resp.status_int != 404: - raise Exception('Could not delete possibly existing token: ' - '%s %s' % (path, resp.status)) - # Delete the user entry itself. - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - resp = self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - if resp.status_int // 100 != 2 and resp.status_int != 404: - raise Exception('Could not delete the user object: %s %s' % - (path, resp.status)) - return HTTPNoContent(request=req) - - def is_user_reseller_admin(self, req, account, user): - """ - Returns True if the user is a .reseller_admin. - - :param account: account user is part of - :param user: the user - :returns: True if user .reseller_admin, False - if user is not a reseller_admin and None if the user - doesn't exist. - """ - req.credentials_valid = True - user_json = self.get_user_detail(req, account, user) - if user_json is None: - req.credentials_valid = False - return False - - user_detail = json.loads(user_json) - - return '.reseller_admin' in (g['name'] for g in user_detail['groups']) - - def handle_get_token(self, req): - """ - Handles the various `request for token and service end point(s)` calls. - There are various formats to support the various auth servers in the - past. Examples:: - - GET /v1//auth - X-Auth-User: : or X-Storage-User: - X-Auth-Key: or X-Storage-Pass: - GET /auth - X-Auth-User: : or X-Storage-User: : - X-Auth-Key: or X-Storage-Pass: - GET /v1.0 - X-Auth-User: : or X-Storage-User: : - X-Auth-Key: or X-Storage-Pass: - - Values should be url encoded, "act%3Ausr" instead of "act:usr" for - example; however, for backwards compatibility the colon may be included - unencoded. - - On successful authentication, the response will have X-Auth-Token and - X-Storage-Token set to the token to use with Swift and X-Storage-URL - set to the URL to the default Swift cluster to use. - - The response body will be set to the account's services JSON object as - described here:: - - {"storage": { # Represents the Swift storage service end points - "default": "cluster1", # Indicates which cluster is the default - "cluster1": "", - # A Swift cluster that can be used with this account, - # "cluster1" is the name of the cluster which is usually a - # location indicator (like "dfw" for a datacenter region). - "cluster2": "" - # Another Swift cluster that can be used with this account, - # there will always be at least one Swift cluster to use or - # this whole "storage" dict won't be included at all. - }, - "servers": { # Represents the Nova server service end points - # Expected to be similar to the "storage" dict, but not - # implemented yet. - }, - # Possibly other service dicts, not implemented yet. - } - - One can also include an "X-Auth-New-Token: true" header to - force issuing a new token and revoking any old token, even if - it hasn't expired yet. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with data set as explained - above. - """ - # Validate the request info - try: - pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, - rest_with_last=True) - except ValueError: - return HTTPNotFound(request=req) - if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': - account = pathsegs[1] - user = req.headers.get('x-storage-user') - if not user: - user = unquote(req.headers.get('x-auth-user', '')) - if not user or ':' not in user: - return HTTPUnauthorized(request=req) - account2, user = user.split(':', 1) - if account != account2: - return HTTPUnauthorized(request=req) - key = req.headers.get('x-storage-pass') - if not key: - key = unquote(req.headers.get('x-auth-key', '')) - elif pathsegs[0] in ('auth', 'v1.0'): - user = unquote(req.headers.get('x-auth-user', '')) - if not user: - user = req.headers.get('x-storage-user') - if not user or ':' not in user: - return HTTPUnauthorized(request=req) - account, user = user.split(':', 1) - key = unquote(req.headers.get('x-auth-key', '')) - if not key: - key = req.headers.get('x-storage-pass') - else: - return HTTPBadRequest(request=req) - if not all((account, user, key)): - return HTTPUnauthorized(request=req) - if user == '.super_admin' and self.super_admin_key and \ - key == self.super_admin_key: - token = self.get_itoken(req.environ) - url = '%s/%s' % (self.dsc_url, self.auth_account) - return Response( - request=req, - body=json.dumps( - {'storage': {'default': 'local', - 'local': url}}), - headers={'x-auth-token': token, 'x-storage-token': token, - 'x-storage-url': url}) - # Authenticate user - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPUnauthorized(request=req) - if resp.status_int // 100 != 2: - raise Exception('Could not obtain user details: %s %s' % - (path, resp.status)) - user_detail = json.loads(resp.body) - if not self.credentials_match(user_detail, key): - return HTTPUnauthorized(request=req) - # See if a token already exists and hasn't expired - token = None - expires = None - candidate_token = resp.headers.get('x-object-meta-auth-token') - if candidate_token: - path = quote( - '/v1/%s/.token_%s/%s' % - (self.auth_account, candidate_token[-1], candidate_token)) - delete_token = False - try: - if req.headers.get('x-auth-new-token', 'false').lower() in \ - TRUE_VALUES: - delete_token = True - else: - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int // 100 == 2: - token_detail = json.loads(resp.body) - if token_detail['expires'] > time(): - token = candidate_token - expires = token_detail['expires'] - else: - delete_token = True - elif resp.status_int != 404: - raise Exception( - 'Could not detect whether a token already exists: ' - '%s %s' % (path, resp.status)) - finally: - if delete_token: - self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - # Create a new token if one didn't exist - if not token: - # Retrieve account id, we'll save this in the token - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'HEAD', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not retrieve account id value: ' - '%s %s' % (path, resp.status)) - account_id = \ - resp.headers['x-container-meta-account-id'] - # Generate new token - token = '%stk%s' % (self.reseller_prefix, uuid4().hex) - # Save token info - path = quote('/v1/%s/.token_%s/%s' % - (self.auth_account, token[-1], token)) - try: - token_life = min( - int(req.headers.get('x-auth-token-lifetime', - self.token_life)), - self.max_token_life) - except ValueError: - token_life = self.token_life - expires = int(time() + token_life) - resp = self.make_pre_authed_request( - req.environ, 'PUT', path, - json.dumps( - {'account': account, 'user': user, - 'account_id': account_id, - 'groups': user_detail['groups'], - 'expires': expires})).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not create new token: %s %s' % - (path, resp.status)) - # Record the token with the user info for future use. - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - resp = self.make_pre_authed_request( - req.environ, 'POST', path, - headers={'X-Object-Meta-Auth-Token': token} - ).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not save new token: %s %s' % - (path, resp.status)) - # Get the services information - path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int // 100 != 2: - raise Exception('Could not obtain services info: %s %s' % - (path, resp.status)) - detail = json.loads(resp.body) - url = detail['storage'][detail['storage']['default']] - return Response( - request=req, body=resp.body, - headers={'x-auth-token': token, 'x-storage-token': token, - 'x-auth-token-expires': str(int(expires - time())), - 'x-storage-url': url}) - - def handle_validate_token(self, req): - """ - Handles the GET v2/.token/ call for validating a token, usually - called by a service like Swift. - - On a successful validation, X-Auth-TTL will be set for how much longer - this token is valid and X-Auth-Groups will contain a comma separated - list of groups the user belongs to. - - The first group listed will be a unique identifier for the user the - token represents. - - .reseller_admin is a special group that indicates the user should be - allowed to do anything on any account. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with data set as explained - above. - """ - token = req.path_info_pop() - if req.path_info or not token.startswith(self.reseller_prefix): - return HTTPBadRequest(request=req) - expires = groups = None - memcache_client = cache_from_env(req.environ) - if memcache_client: - memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) - cached_auth_data = memcache_client.get(memcache_key) - if cached_auth_data: - expires, groups = cached_auth_data - if expires < time(): - groups = None - if not groups: - path = quote('/v1/%s/.token_%s/%s' % - (self.auth_account, token[-1], token)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int // 100 != 2: - return HTTPNotFound(request=req) - detail = json.loads(resp.body) - expires = detail['expires'] - if expires < time(): - self.make_pre_authed_request( - req.environ, 'DELETE', path).get_response(self.app) - return HTTPNotFound(request=req) - groups = [g['name'] for g in detail['groups']] - if '.admin' in groups: - groups.remove('.admin') - groups.append(detail['account_id']) - groups = ','.join(groups) - return HTTPNoContent(headers={'X-Auth-TTL': expires - time(), - 'X-Auth-Groups': groups}) - - def get_conn(self, urlparsed=None): - """ - Returns an HTTPConnection based on the urlparse result given or the - default Swift cluster (internal url) urlparse result. - - :param urlparsed: The result from urlparse.urlparse or None to use the - default Swift cluster's value - """ - if not urlparsed: - urlparsed = self.dsc_parsed2 - if urlparsed.scheme == 'http': - return HTTPConnection(urlparsed.netloc) - else: - return HTTPSConnection(urlparsed.netloc) - - def get_itoken(self, env): - """ - Returns the current internal token to use for the auth system's own - actions with other services. Each process will create its own - itoken and the token will be deleted and recreated based on the - token_life configuration value. The itoken information is stored in - memcache because the auth process that is asked by Swift to validate - the token may not be the same as the auth process that created the - token. - """ - if not self.itoken or self.itoken_expires < time() or \ - env.get('HTTP_X_AUTH_NEW_TOKEN', 'false').lower() in \ - TRUE_VALUES: - self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) - memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) - self.itoken_expires = time() + self.token_life - 60 - memcache_client = cache_from_env(env) - if not memcache_client: - raise Exception( - 'No memcache set up; required for Swauth middleware') - memcache_client.set( - memcache_key, - (self.itoken_expires, - '%s,.reseller_admin,%s' % (self.metadata_volume, - self.auth_account)), - timeout=self.token_life) - return self.itoken - - def get_admin_detail(self, req): - """ - Returns the dict for the user specified as the admin in the request - with the addition of an `account` key set to the admin user's account. - - :param req: The swob request to retrieve X-Auth-Admin-User and - X-Auth-Admin-Key from. - :returns: The dict for the admin user with the addition of the - `account` key. - """ - if ':' not in req.headers.get('x-auth-admin-user', ''): - return None - admin_account, admin_user = \ - req.headers.get('x-auth-admin-user').split(':', 1) - user_json = self.get_user_detail(req, admin_account, admin_user) - if user_json is None: - return None - admin_detail = json.loads(user_json) - admin_detail['account'] = admin_account - return admin_detail - - def get_user_detail(self, req, account, user): - """ - Returns the response body of a GET request for the specified user - The body is in JSON format and contains all user information. - - :param req: The swob request - :param account: the account the user is a member of - :param user: the user - - :returns: A JSON response with the user detail information, None - if the user doesn't exist - """ - path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) - resp = self.make_pre_authed_request( - req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return None - if resp.status_int // 100 != 2: - raise Exception('Could not get user object: %s %s' % - (path, resp.status)) - return resp.body - - def credentials_match(self, user_detail, key): - """ - Returns True if the key is valid for the user_detail. - It will use self.auth_encoder to check for a key match. - - :param user_detail: The dict for the user. - :param key: The key to validate for the user. - :returns: True if the key is valid for the user, False if not. - """ - return user_detail and self.auth_encoder().match( - key, user_detail.get('auth')) - - def is_user_changing_own_key(self, req, user): - """ - Check if the user is changing his own key. - :param req: The swob.Request to check. This contains x-auth-admin-user - and x-auth-admin-key headers which are credentials of the - user sending the request. - :param user: User whose password is to be changed. - :returns True if user is changing his own key, False if not. - """ - admin_detail = self.get_admin_detail(req) - if not admin_detail: - # The user does not exist - return False - - # If user is not admin/reseller_admin and x-auth-user-admin or - # x-auth-user-reseller-admin headers are present in request, he may be - # attempting to escalate himself as admin/reseller_admin! - if '.admin' not in (g['name'] for g in admin_detail['groups']): - if req.headers.get('x-auth-user-admin') == 'true' or \ - req.headers.get('x-auth-user-reseller-admin') == 'true': - return False - if '.reseller_admin' not in \ - (g['name'] for g in admin_detail['groups']) and \ - req.headers.get('x-auth-user-reseller-admin') == 'true': - return False - - return req.headers.get('x-auth-admin-user') == user and \ - self.credentials_match(admin_detail, - req.headers.get('x-auth-admin-key')) - - def is_super_admin(self, req): - """ - Returns True if the admin specified in the request represents the - .super_admin. - - :param req: The swob.Request to check. - :param returns: True if .super_admin. - """ - return req.headers.get('x-auth-admin-user') == '.super_admin' and \ - self.super_admin_key and \ - req.headers.get('x-auth-admin-key') == self.super_admin_key - - def is_reseller_admin(self, req, admin_detail=None): - """ - Returns True if the admin specified in the request represents a - .reseller_admin. - - The variable req.credentials_valid is set to True if the credentials - match. This is used to distinguish between HTTPUnauthorized and - HTTPForbidden cases in denied_response method. HTTPUnauthorized is - returned when the credentials(username and key) do not match. A - HTTPForbidden is returned when the credentials match, but the user does - not have necessary permission to perform the requested action. - - :param req: The swob.Request to check. - :param admin_detail: The previously retrieved dict from - :func:`get_admin_detail` or None for this function - to retrieve the admin_detail itself. - :param returns: True if .reseller_admin. - """ - req.credentials_valid = False - if self.is_super_admin(req): - return True - if not admin_detail: - admin_detail = self.get_admin_detail(req) - if not self.credentials_match(admin_detail, - req.headers.get('x-auth-admin-key')): - return False - req.credentials_valid = True - return '.reseller_admin' in (g['name'] for g in admin_detail['groups']) - - def is_account_admin(self, req, account): - """ - Returns True if the admin specified in the request represents a .admin - for the account specified. - - The variable req.credentials_valid is set to True if the credentials - match. This is used to distinguish between HTTPUnauthorized and - HTTPForbidden cases in denied_response method. HTTPUnauthorized is - returned when the credentials(username and key) do not match. A - HTTPForbidden is returned when the credentials match, but the user does - not have necessary permission to perform the requested action. - - :param req: The swob.Request to check. - :param account: The account to check for .admin against. - :param returns: True if .admin. - """ - req.credentials_valid = False - if self.is_super_admin(req): - return True - admin_detail = self.get_admin_detail(req) - if admin_detail: - if self.is_reseller_admin(req, admin_detail=admin_detail): - return True - if not self.credentials_match(admin_detail, - req.headers.get('x-auth-admin-key')): - return False - req.credentials_valid = True - return admin_detail and admin_detail['account'] == account and \ - '.admin' in (g['name'] for g in admin_detail['groups']) - return False - - def posthooklogger(self, env, req): - if not req.path.startswith(self.auth_prefix): - return - response = getattr(req, 'response', None) - if not response: - return - trans_time = '%.4f' % (time() - req.start_time) - the_request = quote(unquote(req.path)) - if req.query_string: - the_request = the_request + '?' + req.query_string - # remote user for zeus - client = req.headers.get('x-cluster-client-ip') - if not client and 'x-forwarded-for' in req.headers: - # remote user for other lbs - client = req.headers['x-forwarded-for'].split(',')[0].strip() - logged_headers = None - if self.log_headers: - logged_headers = '\n'.join('%s: %s' % (k, v) - for k, v in req.headers.items()) - status_int = response.status_int - if getattr(req, 'client_disconnect', False) or \ - getattr(response, 'client_disconnect', False): - status_int = 499 - self.logger.info( - ' '.join(quote(str(x)) for x in (client or '-', - req.remote_addr or '-', strftime('%d/%b/%Y/%H/%M/%S', gmtime()), - req.method, the_request, req.environ['SERVER_PROTOCOL'], - status_int, req.referer or '-', req.user_agent or '-', - req.headers.get( - 'x-auth-token', - req.headers.get('x-auth-admin-user', '-')), - getattr(req, 'bytes_transferred', 0) or '-', - getattr(response, 'bytes_transferred', 0) or '-', - req.headers.get('etag', '-'), - req.headers.get('x-trans-id', '-'), logged_headers or '-', - trans_time))) - - -def filter_factory(global_conf, **local_conf): - """Returns a WSGI filter app for use with paste.deploy.""" - conf = global_conf.copy() - conf.update(local_conf) - - def auth_filter(app): - return Swauth(app, conf) - return auth_filter diff --git a/gluster/swift/common/middleware/gswauth/swauth/swift_version.py b/gluster/swift/common/middleware/gswauth/swauth/swift_version.py deleted file mode 100644 index cabe284..0000000 --- a/gluster/swift/common/middleware/gswauth/swauth/swift_version.py +++ /dev/null @@ -1,71 +0,0 @@ -import swift - - -MAJOR = None -MINOR = None -REVISION = None -FINAL = None - - -def parse(value): - parts = value.split('.') - if parts[-1].endswith('-dev'): - final = False - parts[-1] = parts[-1][:-4] - else: - final = True - major = int(parts.pop(0)) - minor = int(parts.pop(0)) - if parts: - revision = int(parts.pop(0)) - else: - revision = 0 - return major, minor, revision, final - - -def newer_than(value): - global MAJOR, MINOR, REVISION, FINAL - major, minor, revision, final = parse(value) - if MAJOR is None: - MAJOR, MINOR, REVISION, FINAL = parse(swift.__version__) - if MAJOR < major: - return False - elif MAJOR == major: - if MINOR < minor: - return False - elif MINOR == minor: - if REVISION < revision: - return False - elif REVISION == revision: - if not FINAL or final: - return False - return True - - -def run_tests(): - global MAJOR, MINOR, REVISION, FINAL - MAJOR, MINOR, REVISION, FINAL = parse('1.3') - assert(newer_than('1.2')) - assert(newer_than('1.2.9')) - assert(newer_than('1.3-dev')) - assert(newer_than('1.3.0-dev')) - assert(not newer_than('1.3')) - assert(not newer_than('1.3.0')) - assert(not newer_than('1.3.1-dev')) - assert(not newer_than('1.3.1')) - assert(not newer_than('1.4')) - assert(not newer_than('2.0')) - MAJOR, MINOR, REVISION, FINAL = parse('1.7.7-dev') - assert(newer_than('1.6')) - assert(newer_than('1.7')) - assert(newer_than('1.7.6-dev')) - assert(newer_than('1.7.6')) - assert(not newer_than('1.7.7')) - assert(not newer_than('1.7.8-dev')) - assert(not newer_than('1.7.8')) - assert(not newer_than('1.8.0')) - assert(not newer_than('2.0')) - - -if __name__ == '__main__': - run_tests() diff --git a/gluster/swift/common/middleware/gswauth/test_swauth/__init__.py b/gluster/swift/common/middleware/gswauth/test_swauth/__init__.py deleted file mode 100644 index f53bc5a..0000000 --- a/gluster/swift/common/middleware/gswauth/test_swauth/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# See http://code.google.com/p/python-nose/issues/detail?id=373 -# The code below enables nosetests to work with i18n _() blocks - -import __builtin__ - -setattr(__builtin__, '_', lambda x: x) diff --git a/gluster/swift/common/middleware/gswauth/test_swauth/unit/__init__.py b/gluster/swift/common/middleware/gswauth/test_swauth/unit/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/common/middleware/gswauth/test_swauth/unit/test_authtypes.py b/gluster/swift/common/middleware/gswauth/test_swauth/unit/test_authtypes.py deleted file mode 100644 index d9b7b55..0000000 --- a/gluster/swift/common/middleware/gswauth/test_swauth/unit/test_authtypes.py +++ /dev/null @@ -1,63 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Pablo Llopis 2011 - -import unittest -from swauth import authtypes - - -class TestPlaintext(unittest.TestCase): - - def setUp(self): - self.auth_encoder = authtypes.Plaintext() - - def test_plaintext_encode(self): - enc_key = self.auth_encoder.encode('keystring') - self.assertEquals('plaintext:keystring', enc_key) - - def test_plaintext_valid_match(self): - creds = 'plaintext:keystring' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, True) - - def test_plaintext_invalid_match(self): - creds = 'plaintext:other-keystring' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, False) - - -class TestSha1(unittest.TestCase): - - def setUp(self): - self.auth_encoder = authtypes.Sha1() - self.auth_encoder.salt = 'salt' - - def test_sha1_encode(self): - enc_key = self.auth_encoder.encode('keystring') - self.assertEquals('sha1:salt$d50dc700c296e23ce5b41f7431a0e01f69010f06', - enc_key) - - def test_sha1_valid_match(self): - creds = 'sha1:salt$d50dc700c296e23ce5b41f7431a0e01f69010f06' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, True) - - def test_sha1_invalid_match(self): - creds = 'sha1:salt$deadbabedeadbabedeadbabec0ffeebadc0ffeee' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, False) - - -if __name__ == '__main__': - unittest.main() diff --git a/gluster/swift/common/middleware/gswauth/test_swauth/unit/test_middleware.py b/gluster/swift/common/middleware/gswauth/test_swauth/unit/test_middleware.py deleted file mode 100644 index 62259ff..0000000 --- a/gluster/swift/common/middleware/gswauth/test_swauth/unit/test_middleware.py +++ /dev/null @@ -1,4519 +0,0 @@ -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -import unittest -from contextlib import contextmanager -from time import time - -from swift.common.swob import Request, Response - -from swauth import middleware as auth -from swauth.authtypes import MAX_TOKEN_LENGTH - - -DEFAULT_TOKEN_LIFE = 86400 -MAX_TOKEN_LIFE = 100000 - - -class FakeMemcache(object): - - def __init__(self): - self.store = {} - - def get(self, key): - return self.store.get(key) - - def set(self, key, value, timeout=0, time=0): - self.store[key] = value - return True - - def incr(self, key, timeout=0, time=0): - self.store[key] = self.store.setdefault(key, 0) + 1 - return self.store[key] - - @contextmanager - def soft_lock(self, key, timeout=0, retries=5, time=0): - yield True - - def delete(self, key): - try: - del self.store[key] - except Exception: - pass - return True - - -class FakeApp(object): - - def __init__( - self, status_headers_body_iter=None, acl=None, sync_key=None): - self.calls = 0 - self.status_headers_body_iter = status_headers_body_iter - if not self.status_headers_body_iter: - self.status_headers_body_iter = iter( - [('404 Not Found', {}, '')]) - self.acl = acl - self.sync_key = sync_key - - def __call__(self, env, start_response): - self.calls += 1 - self.request = Request.blank('', environ=env) - if self.acl: - self.request.acl = self.acl - if self.sync_key: - self.request.environ[ - 'swift_sync_key'] = self.sync_key - if 'swift.authorize' in env: - resp = env['swift.authorize'](self.request) - if resp: - return resp(env, start_response) - status, headers, body = self.status_headers_body_iter.next( - ) - return Response(status=status, headers=headers, - body=body)(env, start_response) - - -class FakeConn(object): - - def __init__(self, status_headers_body_iter=None): - self.calls = 0 - self.status_headers_body_iter = status_headers_body_iter - if not self.status_headers_body_iter: - self.status_headers_body_iter = iter( - [('404 Not Found', {}, '')]) - - def request(self, method, path, headers): - self.calls += 1 - self.request_path = path - self.status, self.headers, self.body = \ - self.status_headers_body_iter.next() - self.status, self.reason = self.status.split(' ', 1) - self.status = int(self.status) - - def getresponse(self): - return self - - def read(self): - body = self.body - self.body = '' - return body - - -class TestAuth(unittest.TestCase): - - def setUp(self): - self.test_auth = \ - auth.filter_factory({ - 'super_admin_key': 'supertest', - 'token_life': str(DEFAULT_TOKEN_LIFE), - 'max_token_life': str(MAX_TOKEN_LIFE)})(FakeApp()) - - def test_super_admin_key_not_required(self): - auth.filter_factory({})(FakeApp()) - - def test_reseller_prefix_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'super_admin_key': 'supertest'})(app) - self.assertEquals(ath.reseller_prefix, 'AUTH_') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': 'TEST'})(app) - self.assertEquals(ath.reseller_prefix, 'TEST_') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': 'TEST_'})(app) - self.assertEquals(ath.reseller_prefix, 'TEST_') - - def test_auth_prefix_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'super_admin_key': 'supertest'})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': ''})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': '/test/'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': '/test'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': 'test/'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': 'test'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - - def test_no_auth_type_init(self): - app = FakeApp() - ath = auth.filter_factory({})(app) - self.assertEquals(ath.auth_type, 'Plaintext') - - def test_valid_auth_type_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'auth_type': 'sha1'})(app) - self.assertEquals(ath.auth_type, 'Sha1') - ath = auth.filter_factory( - {'auth_type': 'plaintext'})(app) - self.assertEquals(ath.auth_type, 'Plaintext') - - def test_invalid_auth_type_init(self): - app = FakeApp() - exc = None - try: - auth.filter_factory( - {'auth_type': 'NONEXISTANT'})(app) - except Exception as err: - exc = err - self.assertEquals(str(exc), - 'Invalid auth_type in config file: %s' % - 'Nonexistant') - - def test_default_swift_cluster_init(self): - app = FakeApp() - self.assertRaises(Exception, auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#badscheme://host/path'}), app) - ath = auth.filter_factory( - {'super_admin_key': 'supertest'})(app) - self.assertEquals(ath.default_swift_cluster, - 'local#http://127.0.0.1:8080/v1') - ath = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#http://host/path'})(app) - self.assertEquals(ath.default_swift_cluster, - 'local#http://host/path') - ath = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#https://host/path/'})(app) - self.assertEquals(ath.dsc_url, 'https://host/path') - self.assertEquals(ath.dsc_url2, 'https://host/path') - ath = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': - 'local#https://host/path/#http://host2/path2/'})(app) - self.assertEquals(ath.dsc_url, 'https://host/path') - self.assertEquals( - ath.dsc_url2, - 'http://host2/path2') - - def test_top_level_denied(self): - resp = Request.blank( - '/').get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_anon(self): - resp = Request.blank( - '/v1/AUTH_account').get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.authorize) - - def test_auth_deny_non_reseller_prefix(self): - resp = Request.blank( - '/v1/BLAH_account', - headers={'X-Auth-Token': 'BLAH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.denied_response) - - def test_auth_deny_non_reseller_prefix_no_override( - self): - fake_authorize = lambda x: Response( - status='500 Fake') - resp = Request.blank( - '/v1/BLAH_account', - headers={'X-Auth-Token': 'BLAH_t'}, - environ={'swift.authorize': fake_authorize}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(resp.environ['swift.authorize'], fake_authorize) - - def test_auth_no_reseller_prefix_deny(self): - # Ensures that when we have no reseller prefix, we don't deny a request - # outright but set up a denial swift.authorize and pass the request on - # down the chain. - local_app = FakeApp() - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(local_app) - resp = Request.blank( - '/v1/account', - headers={'X-Auth-Token': 't'}).get_response(local_auth) - self.assertEquals(resp.status_int, 401) - # one for checking auth, two for request passed - # along - self.assertEquals(local_app.calls, 2) - self.assertEquals(resp.environ['swift.authorize'], - local_auth.denied_response) - - def test_auth_no_reseller_prefix_allow(self): - # Ensures that when we have no reseller prefix, we can still allow - # access if our auth server accepts requests - local_app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(local_app) - resp = Request.blank( - '/v1/act', - headers={'X-Auth-Token': 't'}).get_response(local_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(local_app.calls, 2) - self.assertEquals(resp.environ['swift.authorize'], - local_auth.authorize) - - def test_auth_no_reseller_prefix_no_token(self): - # Check that normally we set up a call back to our - # authorize. - local_auth = \ - auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(FakeApp(iter([]))) - resp = Request.blank( - '/v1/account').get_response( - local_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals( - resp.environ['swift.authorize'], local_auth.authorize) - # Now make sure we don't override an existing swift.authorize when we - # have no reseller prefix. - local_auth = \ - auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(FakeApp()) - local_authorize = lambda req: Response('test') - resp = Request.blank( - '/v1/account', environ={'swift.authorize': - local_authorize}).get_response(local_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - resp.environ['swift.authorize'], - local_authorize) - - def test_auth_fail(self): - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_auth_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_auth_memcache(self): - # First run our test without memcache, showing we need to return the - # token contents twice. - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, ''), - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 4) - # Now run our test with memcache, showing we no longer need to return - # the token contents twice. - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, ''), - # Don't need a second token object returned if memcache is - # used - ('204 No Content', {}, '')])) - fake_memcache = FakeMemcache() - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}, - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 204) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}, - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_auth_just_expired(self): - self.test_auth.app = FakeApp(iter([ - # Request for token (which will have expired) - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() - 1})), - # Request to delete token - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_middleware_storage_token(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Storage-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_authorize_bad_path(self): - req = Request.blank('/badpath') - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/badpath') - req.remote_user = 'act:usr,act,AUTH_cfa' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_account_access(self): - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act,AUTH_cfa' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_acl_group_access(self): - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act:usr' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act2' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act:usr2' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_deny_cross_reseller(self): - # Tests that cross-reseller is denied, even if ACLs/group - # names match - req = Request.blank('/v1/OTHER_cfa') - req.remote_user = 'act:usr,act,AUTH_cfa' - req.acl = 'act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_acl_referrer_access(self): - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:*,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:*' # No listings allowed - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:.example.com,.rlistings' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.referer = 'http://www.example.com/index.html' - req.acl = '.r:.example.com,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa/c') - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/v1/AUTH_cfa/c') - req.acl = '.r:*,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa/c') - req.acl = '.r:*' # No listings allowed - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/v1/AUTH_cfa/c') - req.acl = '.r:.example.com,.rlistings' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/v1/AUTH_cfa/c') - req.referer = 'http://www.example.com/index.html' - req.acl = '.r:.example.com,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - - def test_detect_reseller_request(self): - req = self._make_request('/v1/AUTH_admin', - headers={'X-Auth-Token': 'AUTH_t'}) - cache_key = 'AUTH_/auth/AUTH_t' - cache_entry = (time() + 3600, '.reseller_admin') - req.environ['swift.cache'].set( - cache_key, cache_entry) - self.assertTrue(req.environ.get('reseller_request')) - - def test_account_put_permissions(self): - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,AUTH_other' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - # Even PUTs to your own account as account admin - # should fail - req = Request.blank( - '/v1/AUTH_old', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,AUTH_old' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp, None) - - # .super_admin is not something the middleware should ever see or care - # about - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,.super_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_account_delete_permissions(self): - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,AUTH_other' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - # Even DELETEs to your own account as account admin should - # fail - req = Request.blank('/v1/AUTH_old', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,AUTH_old' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp, None) - - # .super_admin is not something the middleware should ever see or care - # about - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,.super_admin' - resp = self.test_auth.authorize(req) - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_get_token_fail(self): - resp = Request.blank( - '/auth/v1.0').get_response( - self.test_auth) - self.assertEquals(resp.status_int, 401) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_invalid_key(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'invalid'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_token_fail_invalid_x_auth_user_format( - self): - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_non_matching_account_in_request( - self): - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'act2:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_bad_path(self): - resp = Request.blank( - '/auth/v1/act/auth/invalid', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_token_fail_missing_key(self): - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'act:usr'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_get_user_details(self): - self.test_auth.app = FakeApp(iter([ - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_token_fail_get_account(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_token_fail_put_new_token(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_token_fail_post_to_user(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_get_token_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_fail_get_existing_token(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_token_success_v1_0(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_0_with_user_token_life( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key', - 'X-Auth-Token-Lifetime': 10}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - left = int(resp.headers['x-auth-token-expires']) - self.assertTrue(left > 0, '%d > 0' % left) - self.assertTrue(left <= 10, '%d <= 10' % left) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_0_with_user_token_life_past_max( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - req = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key', - 'X-Auth-Token-Lifetime': MAX_TOKEN_LIFE * 10}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - left = int(resp.headers['x-auth-token-expires']) - self.assertTrue(left > DEFAULT_TOKEN_LIFE, - '%d > %d' % (left, DEFAULT_TOKEN_LIFE)) - self.assertTrue(left <= MAX_TOKEN_LIFE, - '%d <= %d' % (left, MAX_TOKEN_LIFE)) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_act_auth(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Storage-User': 'usr', - 'X-Storage-Pass': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_storage_instead_of_auth( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Storage-User': 'act:usr', - 'X-Storage-Pass': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_( - resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_act_auth_auth_instead_of_storage( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_existing_token(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('200 Ok', {}, json.dumps( - {"account": "act", "user": "usr", - "account_id": "AUTH_cfa", - "groups": [{'name': "act:usr"}, - {'name': "key"}, {'name': ".admin"}], - "expires": 9999999999.9999999})), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - resp.headers.get('x-auth-token'), - 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_token_success_existing_token_but_request_new_one( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # DELETE of expired token - ('204 No Content', {}, ''), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key', - 'X-Auth-New-Token': 'true'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertNotEquals( - resp.headers.get('x-auth-token'), 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_get_token_success_existing_token_expired(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('200 Ok', {}, json.dumps( - {"account": "act", "user": "usr", - "account_id": "AUTH_cfa", - "groups": [{'name': "act:usr"}, - {'name': "key"}, {'name': ".admin"}], - "expires": 0.0})), - # DELETE of expired token - ('204 No Content', {}, ''), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertNotEquals( - resp.headers.get('x-auth-token'), - 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 7) - - def test_get_token_success_existing_token_expired_fail_deleting_old( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('200 Ok', {}, json.dumps({"account": "act", "user": "usr", - "account_id": "AUTH_cfa", - "groups": [{'name': "act:usr"}, - {'name': "key"}, {'name': ".admin"}], - "expires": 0.0})), - # DELETE of expired token - ('503 Service Unavailable', {}, ''), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertNotEquals( - resp.headers.get('x-auth-token'), - 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 7) - - def test_prep_success(self): - list_to_iter = [ - # PUT of .auth account - ('201 Created', {}, ''), - # PUT of .account_id container - ('201 Created', {}, '')] - # PUT of .token* containers - for x in xrange(16): - list_to_iter.append(('201 Created', {}, '')) - self.test_auth.app = FakeApp(iter(list_to_iter)) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 18) - - def test_prep_bad_method(self): - resp = Request.blank('/auth/v2/.prep', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'HEAD'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_prep_bad_creds(self): - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'upertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': '.super_admin'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - resp = Request.blank( - '/auth/v2/.prep', - environ={'REQUEST_METHOD': 'POST'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - - def test_prep_fail_account_create(self): - self.test_auth.app = FakeApp(iter([ - # PUT of .auth account - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_prep_fail_token_container_create(self): - self.test_auth.app = FakeApp(iter([ - # PUT of .auth account - ('201 Created', {}, ''), - # PUT of .token container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_prep_fail_account_id_container_create(self): - self.test_auth.app = FakeApp(iter([ - # PUT of .auth account - ('201 Created', {}, ''), - # PUT of .token container - ('201 Created', {}, ''), - # PUT of .account_id container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_reseller_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of .auth account (list containers) - ('200 Ok', {}, json.dumps([ - {"name": ".token", "count": 0, "bytes": 0}, - {"name": ".account_id", - "count": 0, "bytes": 0}, - {"name": "act", "count": 0, "bytes": 0}])), - # GET of .auth account (list containers - # continuation) - ('200 Ok', {}, '[]')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(json.loads(resp.body), - {"accounts": [{"name": "act"}]}) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"})), - # GET of .auth account (list containers) - ('200 Ok', {}, json.dumps([ - {"name": ".token", "count": 0, "bytes": 0}, - {"name": ".account_id", - "count": 0, "bytes": 0}, - {"name": "act", "count": 0, "bytes": 0}])), - # GET of .auth account (list containers - # continuation) - ('200 Ok', {}, '[]')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(json.loads(resp.body), - {"accounts": [{"name": "act"}]}) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_reseller_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_reseller_fail_listing(self): - self.test_auth.app = FakeApp(iter([ - # GET of .auth account (list containers) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of .auth account (list containers) - ('200 Ok', {}, json.dumps([ - {"name": ".token", "count": 0, "bytes": 0}, - {"name": ".account_id", - "count": 0, "bytes": 0}, - {"name": "act", "count": 0, "bytes": 0}])), - # GET of .auth account (list containers - # continuation) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_account_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, - json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'account_id': 'AUTH_cfa', - 'services': {'storage': - {'default': 'local', - 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, - 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) - self.assertEquals(self.test_auth.app.calls, 3) - - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of .services object - ('200 Ok', {}, - json.dumps({"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'account_id': 'AUTH_cfa', - 'services': {'storage': - {'default': 'local', - 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, - 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_get_account_fail_bad_account_name(self): - resp = Request.blank('/auth/v2/.token', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/.anything', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_account_fail_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_account_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_account_fail_listing(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of account container (list objects - # continuation) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_set_services_new_service(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, - json.dumps({"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'new_service': - {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'storage': {'default': 'local', - 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}, - 'new_service': {'new_endpoint': 'new_value'}}) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_set_services_new_endpoint(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'storage': {'default': 'local', - 'local': - 'http://127.0.0.1:8080/v1/AUTH_cfa', - 'new_endpoint': 'new_value'}}) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_set_services_update_endpoint(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(json.loads(resp.body), - {'storage': {'default': 'local', - 'local': 'new_value'}}) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_set_services_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_set_services_fail_bad_account_name(self): - resp = Request.blank('/auth/v2/.act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_set_services_fail_bad_json(self): - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body='garbage' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body='' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_set_services_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('503 Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps({ - 'new_service': {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps({ - 'new_service': {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_set_services_fail_put_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('503 Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'new_service': - {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_account_success(self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('201 Created', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 5) - self.assertEquals(conn.calls, 1) - - def test_put_account_success_preexist_but_not_completed( - self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('201 Created', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for pre-existence - # We're going to show it as existing this time, but with no - # X-Container-Meta-Account-Id, indicating a failed - # previous attempt - ('200 Ok', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 4) - self.assertEquals(conn.calls, 1) - - def test_put_account_success_preexist_and_completed( - self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for pre-existence - # We're going to show it as existing this time, and with an - # X-Container-Meta-Account-Id, indicating it already - # exists - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 202) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_account_success_with_given_suffix(self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('201 Created', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest', - 'X-Account-Suffix': 'test-suffix'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals( - conn.request_path, - '/v1/AUTH_test-suffix') - self.assertEquals(self.test_auth.app.calls, 5) - self.assertEquals(conn.calls, 1) - - def test_put_account_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': 'super:admin', - 'X-Auth-Admin-Key': 'supertest'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': 'act:adm', - 'X-Auth-Admin-Key': 'key'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'key'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_account_fail_invalid_account_name(self): - resp = Request.blank( - '/auth/v2/.act', - environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_account_fail_on_initial_account_head(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_account_fail_on_account_marker_put(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_account_fail_on_storage_account_put(self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('503 Service Unavailable', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(conn.calls, 1) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_account_fail_on_account_id_mapping(self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('201 Created', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(conn.calls, 1) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_put_account_fail_on_services_object(self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('201 Created', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(conn.calls, 1) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_put_account_fail_on_post_mapping(self): - conn = FakeConn(iter([ - # PUT of storage account itself - ('201 Created', {}, '')])) - self.test_auth.get_conn = lambda: conn - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(conn.calls, 1) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_delete_account_success(self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - self.assertEquals(conn.calls, 1) - - def test_delete_account_success_missing_services(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('404 Not Found', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_delete_account_success_missing_storage_account( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('404 Not Found', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - self.assertEquals(conn.calls, 1) - - def test_delete_account_success_missing_account_id_mapping( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('404 Not Found', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - self.assertEquals(conn.calls, 1) - - def test_delete_account_success_missing_account_container_at_end( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - self.assertEquals(conn.calls, 1) - - def test_delete_account_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_invalid_account_name(self): - resp = Request.blank('/auth/v2/.act', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_delete_account_fail_not_found(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_not_found_concurrency( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_account_fail_list_account(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_list_account_concurrency( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_account_fail_has_users(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}]))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 409) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_has_users2(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}]))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 409) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_account_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_account_fail_delete_storage_account( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('409 Conflict', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 409) - self.assertEquals(self.test_auth.app.calls, 3) - self.assertEquals(conn.calls, 1) - - def test_delete_account_fail_delete_storage_account2( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, ''), - # DELETE of storage account itself - ('409 Conflict', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa", - "other": "http://127.0.0.1:8080/v1/AUTH_cfa2"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - self.assertEquals(conn.calls, 2) - - def test_delete_account_fail_delete_storage_account3( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('503 Service Unavailable', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - self.assertEquals(conn.calls, 1) - - def test_delete_account_fail_delete_storage_account4( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, ''), - # DELETE of storage account itself - ('503 Service Unavailable', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa", - "other": "http://127.0.0.1:8080/v1/AUTH_cfa2"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - self.assertEquals(conn.calls, 2) - - def test_delete_account_fail_delete_services(self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - self.assertEquals(conn.calls, 1) - - def test_delete_account_fail_delete_account_id_mapping( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 5) - self.assertEquals(conn.calls, 1) - - def test_delete_account_fail_delete_account_container( - self): - conn = FakeConn(iter([ - # DELETE of storage account itself - ('204 No Content', {}, '')])) - self.test_auth.get_conn = lambda x: conn - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 6) - self.assertEquals(conn.calls, 1) - - def test_get_user_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_fail_no_super_admin_key(self): - local_auth = auth.filter_factory({})(FakeApp(iter([ - # GET of user object (but we should never get - # here) - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"}))]))) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(local_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(local_auth.app.calls, 0) - - def test_get_user_groups_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester3"}, {"name": "act"}], - "auth": "plaintext:key3"})), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": ".admin"}, {"name": "act"}, - {"name": "act:tester"}, {"name": "act:tester3"}]})) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_get_user_groups_success2(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}])), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester3"}, {"name": "act"}], - "auth": "plaintext:key3"})), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": ".admin"}, {"name": "act"}, - {"name": "act:tester"}, {"name": "act:tester3"}]})) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_user_fail_invalid_account(self): - resp = Request.blank('/auth/v2/.invalid/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_user_fail_invalid_user(self): - resp = Request.blank('/auth/v2/act/.invalid', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_user_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_account_admin_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of requested user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}], - "auth": "plaintext:key"})) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_account_admin_fail_getting_account_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin check) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of requested user object [who is an .admin - # as well] - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of user object (reseller admin check [and fail - # here]) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_user_account_admin_fail_getting_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin check) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of requested user object [who is a - # .reseller_admin] - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_reseller_admin_fail_getting_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin check) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".reseller_admin"}], - "auth": "plaintext:key"})), - # GET of requested user object [who also is a - # .reseller_admin] - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_super_admin_succeed_getting_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of requested user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"})) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_groups_not_found(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_groups_fail_listing(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_groups_fail_get_user(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_not_found(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_fail(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_user_fail_invalid_account(self): - resp = Request.blank('/auth/v2/.invalid/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_user_fail_invalid_user(self): - resp = Request.blank('/auth/v2/act/.usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_user_fail_no_user_key(self): - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_user_reseller_admin_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (reseller admin) - # This shouldn't actually get called, checked - # below - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:rdm"}, - {"name": "test"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 0) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller admin) - # This shouldn't actually get called, checked - # below - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 0) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - # This shouldn't actually get called, checked - # below - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 0) - - def test_put_user_account_admin_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_user_regular_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_user_regular_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:usr"}, {"name": "act"}], - "auth": "plaintext:key"}) - - def test_put_user_special_chars_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/u_s-r', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:u_s-r"}, {"name": "act"}], - "auth": "plaintext:key"}) - - def test_put_user_account_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key', - 'X-Auth-User-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"}) - - def test_put_user_reseller_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}, {"name": ".reseller_admin"}], - "auth": "plaintext:key"}) - - def test_put_user_fail_not_found(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_user_fail(self): - self.test_auth.app = FakeApp(iter([ - # PUT of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_invalid_account(self): - resp = Request.blank('/auth/v2/.invalid/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_delete_user_invalid_user(self): - resp = Request.blank('/auth/v2/act/.invalid', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_delete_user_not_found(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_fail_head_user(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_fail_delete_token(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_user_fail_delete_user(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_user_success(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_user_success_missing_user_at_end(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_user_success_missing_token(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('404 Not Found', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_user_success_no_token(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('200 Ok', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_validate_token_bad_prefix(self): - resp = Request.blank('/auth/v2/.token/BAD_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_validate_token_tmi(self): - resp = Request.blank( - '/auth/v2/.token/AUTH_token/tmi').get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_validate_token_bad_memcache(self): - fake_memcache = FakeMemcache() - fake_memcache.set('AUTH_/auth/AUTH_token', 'bogus') - resp = Request.blank( - '/auth/v2/.token/AUTH_token', - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - - def test_validate_token_from_memcache(self): - fake_memcache = FakeMemcache() - fake_memcache.set( - 'AUTH_/auth/AUTH_token', - (time() + 1, - 'act:usr,act')) - resp = Request.blank( - '/auth/v2/.token/AUTH_token', - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals( - resp.headers.get('x-auth-groups'), - 'act:usr,act') - self.assert_(float(resp.headers['x-auth-ttl']) < 1, - resp.headers['x-auth-ttl']) - - def test_validate_token_from_memcache_expired(self): - fake_memcache = FakeMemcache() - fake_memcache.set( - 'AUTH_/auth/AUTH_token', - (time() - 1, - 'act:usr,act')) - resp = Request.blank( - '/auth/v2/.token/AUTH_token', - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assert_('x-auth-groups' not in resp.headers) - self.assert_('x-auth-ttl' not in resp.headers) - - def test_validate_token_from_object(self): - self.test_auth.app = FakeApp(iter([ - # GET of token object - ('200 Ok', {}, json.dumps({'groups': [{'name': 'act:usr'}, - {'name': 'act'}], 'expires': time() + 1}))])) - resp = Request.blank('/auth/v2/.token/AUTH_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals( - resp.headers.get('x-auth-groups'), - 'act:usr,act') - self.assert_(float(resp.headers['x-auth-ttl']) < 1, - resp.headers['x-auth-ttl']) - - def test_validate_token_from_object_expired(self): - self.test_auth.app = FakeApp(iter([ - # GET of token object - ('200 Ok', {}, json.dumps({'groups': 'act:usr,act', - 'expires': time() - 1})), - # DELETE of expired token object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/.token/AUTH_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_validate_token_from_object_with_admin(self): - self.test_auth.app = FakeApp(iter([ - # GET of token object - ('200 Ok', {}, json.dumps({'account_id': 'AUTH_cfa', 'groups': - [{'name': 'act:usr'}, - {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 1}))])) - resp = Request.blank('/auth/v2/.token/AUTH_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals(resp.headers.get('x-auth-groups'), - 'act:usr,act,AUTH_cfa') - self.assert_(float(resp.headers['x-auth-ttl']) < 1, - resp.headers['x-auth-ttl']) - - def test_get_conn_default(self): - conn = self.test_auth.get_conn() - self.assertEquals( - conn.__class__, - auth.HTTPConnection) - self.assertEquals(conn.host, '127.0.0.1') - self.assertEquals(conn.port, 8080) - - def test_get_conn_default_https(self): - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) - conn = local_auth.get_conn() - self.assertEquals( - conn.__class__, - auth.HTTPSConnection) - self.assertEquals(conn.host, '1.2.3.4') - self.assertEquals(conn.port, 443) - - def test_get_conn_overridden(self): - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) - conn = \ - local_auth.get_conn( - urlparsed=auth.urlparse('http://5.6.7.8/v1')) - self.assertEquals( - conn.__class__, - auth.HTTPConnection) - self.assertEquals(conn.host, '5.6.7.8') - self.assertEquals(conn.port, 80) - - def test_get_conn_overridden_https(self): - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#http://1.2.3.4/v1'})(FakeApp()) - conn = \ - local_auth.get_conn( - urlparsed=auth.urlparse( - 'https://5.6.7.8/v1')) - self.assertEquals( - conn.__class__, - auth.HTTPSConnection) - self.assertEquals(conn.host, '5.6.7.8') - self.assertEquals(conn.port, 443) - - def test_get_itoken_fail_no_memcache(self): - exc = None - try: - self.test_auth.get_itoken({}) - except Exception as err: - exc = err - self.assertEquals(str(exc), - 'No memcache set up; required for Swauth middleware') - - def test_get_itoken_success(self): - fmc = FakeMemcache() - itk = self.test_auth.get_itoken( - {'swift.cache': fmc}) - self.assert_(itk.startswith('AUTH_itk'), itk) - expires, groups = fmc.get('AUTH_/auth/%s' % itk) - self.assert_(expires > time(), expires) - self.assertEquals( - groups, - '.auth,.reseller_admin,AUTH_.auth') - - def test_get_admin_detail_fail_no_colon(self): - self.test_auth.app = FakeApp(iter([])) - self.assertEquals( - self.test_auth.get_admin_detail( - Request.blank('/')), - None) - self.assertEquals( - self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'usr'})), None) - self.assertRaises( - StopIteration, self.test_auth.get_admin_detail, - Request.blank('/', headers={'X-Auth-Admin-User': 'act:usr'})) - - def test_get_admin_detail_fail_user_not_found(self): - self.test_auth.app = FakeApp( - iter([('404 Not Found', {}, '')])) - self.assertEquals( - self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'})), None) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_admin_detail_fail_get_user_error(self): - self.test_auth.app = FakeApp(iter([ - ('503 Service Unavailable', {}, '')])) - exc = None - try: - self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'})) - except Exception as err: - exc = err - self.assertEquals(str(exc), 'Could not get admin user object: ' - '/v1/AUTH_.auth/act/usr 503 Service Unavailable') - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_admin_detail_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]}))])) - detail = self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'})) - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals( - detail, {'account': 'act', - 'auth': 'plaintext:key', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}]}) - - def test_credentials_match_success(self): - self.assert_(self.test_auth.credentials_match( - {'auth': 'plaintext:key'}, 'key')) - - def test_credentials_match_fail_no_details(self): - self.assert_( - not self.test_auth.credentials_match(None, 'notkey')) - - def test_credentials_match_fail_plaintext(self): - self.assert_(not self.test_auth.credentials_match( - {'auth': 'plaintext:key'}, 'notkey')) - - def test_is_super_admin_success(self): - self.assert_( - self.test_auth.is_super_admin( - Request.blank( - '/', - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}))) - - def test_is_super_admin_fail_bad_key(self): - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'bad'}))) - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={'X-Auth-Admin-User': '.super_admin'}))) - self.assert_( - not self.test_auth.is_super_admin(Request.blank('/'))) - - def test_is_super_admin_fail_bad_user(self): - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'bad', - 'X-Auth-Admin-Key': 'supertest'}))) - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={'X-Auth-Admin-Key': 'supertest'}))) - self.assert_( - not self.test_auth.is_super_admin(Request.blank('/'))) - - def test_is_reseller_admin_success_is_super_admin(self): - self.assert_( - self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}))) - - def test_is_reseller_admin_success_called_get_admin_detail( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'key'}))) - - def test_is_reseller_admin_fail_only_account_admin( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:adm'}, {'name': 'act'}, - {'name': '.admin'}]}))])) - self.assert_( - not self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}))) - - def test_is_reseller_admin_fail_regular_user(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}]}))])) - self.assert_( - not self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}))) - - def test_is_reseller_admin_fail_bad_key(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - not self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'bad'}))) - - def test_is_account_admin_success_is_super_admin(self): - self.assert_( - self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}), 'act')) - - def test_is_account_admin_success_is_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:adm'}, {'name': 'act'}, - {'name': '.admin'}]}))])) - self.assert_( - self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_fail_account_admin_different_account( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act2:adm'}, {'name': 'act2'}, - {'name': '.admin'}]}))])) - self.assert_( - not self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_fail_regular_user(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}]}))])) - self.assert_( - not self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_fail_bad_key(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - not self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'bad'}), 'act')) - - def test_reseller_admin_but_account_is_internal_use_only( - self): - req = Request.blank('/v1/AUTH_.auth', - environ={'REQUEST_METHOD': 'GET'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_reseller_admin_but_account_is_exactly_reseller_prefix( - self): - req = Request.blank( - '/v1/AUTH_', - environ={'REQUEST_METHOD': 'GET'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def _get_token_success_v1_0_encoded( - self, saved_user, saved_key, sent_user, - sent_key): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:%s" % saved_key, - "groups": [{'name': saved_user}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': sent_user, - 'X-Auth-Key': sent_key}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_( - resp.headers.get('x-auth-token', - '').startswith('AUTH_tk'), - resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_0_encoded1(self): - self._get_token_success_v1_0_encoded( - 'act:usr', 'key', 'act%3ausr', 'key') - - def test_get_token_success_v1_0_encoded2(self): - self._get_token_success_v1_0_encoded( - 'act:u s r', 'key', 'act%3au%20s%20r', 'key') - - def test_get_token_success_v1_0_encoded3(self): - self._get_token_success_v1_0_encoded( - 'act:u s r', 'k:e:y', 'act%3au%20s%20r', 'k%3Ae%3ay') - - def test_allowed_sync_hosts(self): - a = auth.filter_factory( - {'super_admin_key': 'supertest'})(FakeApp()) - self.assertEquals( - a.allowed_sync_hosts, - ['127.0.0.1']) - a = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'allowed_sync_hosts': - '1.1.1.1,2.1.1.1, 3.1.1.1 , 4.1.1.1,, , 5.1.1.1'})(FakeApp()) - self.assertEquals( - a.allowed_sync_hosts, - ['1.1.1.1', '2.1.1.1', '3.1.1.1', '4.1.1.1', '5.1.1.1']) - - def test_reseller_admin_is_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append( - req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'other', 'user': 'other:usr', - 'account_id': 'AUTH_other', - 'groups': [{'name': 'other:usr'}, {'name': 'other'}, - {'name': '.reseller_admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - req = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(owner_values, [True]) - - def test_admin_is_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append( - req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - req = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(owner_values, [True]) - - def test_regular_is_not_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append( - req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': - [{'name': 'act:usr'}, { - 'name': 'act'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')]), acl='act:usr') - req = Request.blank('/v1/AUTH_cfa/c', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(owner_values, [False]) - - def test_sync_request_success(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - - def test_sync_request_fail_key(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'wrongsecret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='othersecret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key=None) - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_sync_request_fail_no_timestamp(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={'x-container-sync-key': 'secret'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_sync_request_fail_sync_host(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.2' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_sync_request_success_lb_sync_host(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': - '123.456', - 'x-forwarded-for': '127.0.0.1'}) - req.remote_addr = '127.0.0.2' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': - '123.456', - 'x-cluster-client-ip': '127.0.0.1'}) - req.remote_addr = '127.0.0.2' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - - def _make_request(self, path, **kwargs): - req = Request.blank(path, **kwargs) - req.environ['swift.cache'] = FakeMemcache() - return req - - def test_override_asked_for_but_not_allowed(self): - self.test_auth = \ - auth.filter_factory( - {'allow_overrides': 'false'})(FakeApp()) - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.authorize) - - def test_override_asked_for_and_allowed(self): - self.test_auth = \ - auth.filter_factory( - {'allow_overrides': 'true'})(FakeApp()) - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertTrue( - 'swift.authorize' not in resp.environ) - - def test_override_default_allowed(self): - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertTrue( - 'swift.authorize' not in resp.environ) - - def test_token_too_long(self): - req = self._make_request('/v1/AUTH_account', headers={ - 'x-auth-token': 'a' * MAX_TOKEN_LENGTH}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertNotEquals( - resp.body, - 'Token exceeds maximum length.') - req = self._make_request('/v1/AUTH_account', headers={ - 'x-auth-token': 'a' * (MAX_TOKEN_LENGTH + 1)}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - self.assertEquals( - resp.body, - 'Token exceeds maximum length.') - - def test_crazy_authorization(self): - req = self._make_request('/v1/AUTH_account', headers={ - 'authorization': 'somebody elses header value'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.denied_response) - - -if __name__ == '__main__': - unittest.main() diff --git a/gluster/swift/common/middleware/gswauth/webadmin/index.html b/gluster/swift/common/middleware/gswauth/webadmin/index.html deleted file mode 100644 index cbc7c8a..0000000 --- a/gluster/swift/common/middleware/gswauth/webadmin/index.html +++ /dev/null @@ -1,552 +0,0 @@ - - - - - - -
-
-
Swauth
-
-
-
-
- - diff --git a/gluster/swift/common/middleware/swiftkerbauth/__init__.py b/gluster/swift/common/middleware/swiftkerbauth/__init__.py deleted file mode 100644 index c752df7..0000000 --- a/gluster/swift/common/middleware/swiftkerbauth/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from swift.common.utils import readconf, config_true_value - -config_file = {} -try: - config_file = readconf("/etc/swift/proxy-server.conf", - section_name="filter:cache") -except SystemExit: - pass - -MEMCACHE_SERVERS = config_file.get('memcache_servers', None) - -config_file = {} - -try: - config_file = readconf("/etc/swift/proxy-server.conf", - section_name="filter:kerbauth") -except SystemExit: - pass - -TOKEN_LIFE = int(config_file.get('token_life', 86400)) -RESELLER_PREFIX = config_file.get('reseller_prefix', "AUTH_") -DEBUG_HEADERS = config_true_value(config_file.get('debug_headers', 'yes')) diff --git a/gluster/swift/common/middleware/swiftkerbauth/apachekerbauth/etc/httpd/conf.d/swift-auth.conf b/gluster/swift/common/middleware/swiftkerbauth/apachekerbauth/etc/httpd/conf.d/swift-auth.conf deleted file mode 100644 index 68472d8..0000000 --- a/gluster/swift/common/middleware/swiftkerbauth/apachekerbauth/etc/httpd/conf.d/swift-auth.conf +++ /dev/null @@ -1,12 +0,0 @@ - - AuthType Kerberos - AuthName "Swift Authentication" - KrbMethodNegotiate On - KrbMethodK5Passwd On - KrbSaveCredentials On - KrbServiceName HTTP/client.example.com - KrbAuthRealms EXAMPLE.COM - Krb5KeyTab /etc/httpd/conf/http.keytab - KrbVerifyKDC Off - Require valid-user - diff --git a/gluster/swift/common/middleware/swiftkerbauth/apachekerbauth/var/www/cgi-bin/swift-auth b/gluster/swift/common/middleware/swiftkerbauth/apachekerbauth/var/www/cgi-bin/swift-auth deleted file mode 100755 index 11fe0e2..0000000 --- a/gluster/swift/common/middleware/swiftkerbauth/apachekerbauth/var/www/cgi-bin/swift-auth +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Requires the following command to be run: -# setsebool -P httpd_can_network_connect 1 -# setsebool -P httpd_can_network_memcache 1 - -import os -import cgi -from swift.common.memcached import MemcacheRing -from time import time, ctime -from swiftkerbauth import MEMCACHE_SERVERS, TOKEN_LIFE, DEBUG_HEADERS -from swiftkerbauth.kerbauth_utils import get_remote_user, get_auth_data, \ - generate_token, set_auth_data, get_groups_from_username - - -def main(): - try: - username = get_remote_user(os.environ) - except RuntimeError: - print "Status: 401 Unauthorized\n" - print "Malformed REMOTE_USER" - return - - if not MEMCACHE_SERVERS: - print "Status: 500 Internal Server Error\n" - print "Memcache not configured in /etc/swift/proxy-server.conf" - return - - mc_servers = [s.strip() for s in MEMCACHE_SERVERS.split(',') if s.strip()] - mc = MemcacheRing(mc_servers) - - token, expires, groups = get_auth_data(mc, username) - - if not token: - token = generate_token() - expires = time() + TOKEN_LIFE - groups = get_groups_from_username(username) - set_auth_data(mc, username, token, expires, groups) - - print "X-Auth-Token: %s" % token - print "X-Storage-Token: %s" % token - - # For debugging. - if DEBUG_HEADERS: - print "X-Debug-Remote-User: %s" % username - print "X-Debug-Groups: %s" % groups - print "X-Debug-Token-Life: %ss" % TOKEN_LIFE - print "X-Debug-Token-Expires: %s" % ctime(expires) - - print "" - -try: - print("Content-Type: text/html") - main() -except: - cgi.print_exception() diff --git a/gluster/swift/common/middleware/swiftkerbauth/kerbauth.py b/gluster/swift/common/middleware/swiftkerbauth/kerbauth.py deleted file mode 100644 index 1a63a40..0000000 --- a/gluster/swift/common/middleware/swiftkerbauth/kerbauth.py +++ /dev/null @@ -1,463 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import errno -from time import time, ctime -from traceback import format_exc -from eventlet import Timeout -from urllib import unquote - -from swift.common.swob import Request, Response -from swift.common.swob import HTTPBadRequest, HTTPForbidden, HTTPNotFound, \ - HTTPSeeOther, HTTPUnauthorized, HTTPServerError - -from swift.common.middleware.acl import clean_acl, parse_acl, referrer_allowed -from swift.common.utils import cache_from_env, get_logger, \ - split_path, config_true_value -from gluster.swift.common.middleware.swiftkerbauth.kerbauth_utils import \ - get_auth_data, generate_token, \ - set_auth_data, run_kinit, get_groups_from_username - - -class KerbAuth(object): - """ - Test authentication and authorization system. - - Add to your pipeline in proxy-server.conf, such as:: - - [pipeline:main] - pipeline = catch_errors cache kerbauth proxy-server - - Set account auto creation to true in proxy-server.conf:: - - [app:proxy-server] - account_autocreate = true - - And add a kerbauth filter section, such as:: - - [filter:kerbauth] - use = egg:swiftkerbauth#kerbauth - - See the proxy-server.conf-sample for more information. - - :param app: The next WSGI app in the pipeline - :param conf: The dict of configuration values - """ - - def __init__(self, app, conf): - self.app = app - self.conf = conf - self.logger = get_logger(conf, log_route='kerbauth') - self.log_headers = config_true_value(conf.get('log_headers', 'f')) - self.reseller_prefix = conf.get('reseller_prefix', 'AUTH').strip() - if self.reseller_prefix and self.reseller_prefix[-1] != '_': - self.reseller_prefix += '_' - self.logger.set_statsd_prefix('kerbauth.%s' % ( - self.reseller_prefix if self.reseller_prefix else 'NONE',)) - self.auth_prefix = conf.get('auth_prefix', '/auth/') - if not self.auth_prefix or not self.auth_prefix.strip('/'): - self.logger.warning('Rewriting invalid auth prefix "%s" to ' - '"/auth/" (Non-empty auth prefix path ' - 'is required)' % self.auth_prefix) - self.auth_prefix = '/auth/' - if self.auth_prefix[0] != '/': - self.auth_prefix = '/' + self.auth_prefix - if self.auth_prefix[-1] != '/': - self.auth_prefix += '/' - self.token_life = int(conf.get('token_life', 86400)) - self.auth_method = conf.get('auth_method', 'passive') - self.debug_headers = config_true_value( - conf.get('debug_headers', 'yes')) - self.realm_name = conf.get('realm_name', None) - self.allow_overrides = config_true_value( - conf.get('allow_overrides', 't')) - self.storage_url_scheme = conf.get('storage_url_scheme', 'default') - self.ext_authentication_url = conf.get('ext_authentication_url') - if not self.ext_authentication_url: - raise RuntimeError("Missing filter parameter ext_authentication_" - "url in /etc/swift/proxy-server.conf") - - def __call__(self, env, start_response): - """ - Accepts a standard WSGI application call, authenticating the request - and installing callback hooks for authorization and ACL header - validation. For an authenticated request, REMOTE_USER will be set to a - comma separated list of the user's groups. - - If the request matches the self.auth_prefix, the request will be - routed through the internal auth request handler (self.handle). - This is to handle granting tokens, etc. - """ - if self.allow_overrides and env.get('swift.authorize_override', False): - return self.app(env, start_response) - if env.get('PATH_INFO', '').startswith(self.auth_prefix): - return self.handle(env, start_response) - token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) - if token and token.startswith(self.reseller_prefix): - groups = self.get_groups(env, token) - if groups: - user = groups and groups.split(',', 1)[0] or '' - trans_id = env.get('swift.trans_id') - self.logger.debug('User: %s uses token %s (trans_id %s)' % - (user, token, trans_id)) - env['REMOTE_USER'] = groups - env['swift.authorize'] = self.authorize - env['swift.clean_acl'] = clean_acl - if '.reseller_admin' in groups: - env['reseller_request'] = True - else: - # Invalid token (may be expired) - if self.auth_method == "active": - return HTTPSeeOther( - location=self.ext_authentication_url)(env, - start_response) - elif self.auth_method == "passive": - self.logger.increment('unauthorized') - return HTTPUnauthorized()(env, start_response) - else: - # With a non-empty reseller_prefix, I would like to be called - # back for anonymous access to accounts I know I'm the - # definitive auth for. - try: - version, rest = split_path(env.get('PATH_INFO', ''), - 1, 2, True) - except ValueError: - version, rest = None, None - self.logger.increment('errors') - # Not my token, not my account, I can't authorize this request, - # deny all is a good idea if not already set... - if 'swift.authorize' not in env: - env['swift.authorize'] = self.denied_response - - return self.app(env, start_response) - - def get_groups(self, env, token): - """ - Get groups for the given token. - - :param env: The current WSGI environment dictionary. - :param token: Token to validate and return a group string for. - - :returns: None if the token is invalid or a string containing a comma - separated list of groups the authenticated user is a member - of. The first group in the list is also considered a unique - identifier for that user. - """ - groups = None - memcache_client = cache_from_env(env) - if not memcache_client: - raise Exception('Memcache required') - memcache_token_key = '%s/token/%s' % (self.reseller_prefix, token) - cached_auth_data = memcache_client.get(memcache_token_key) - if cached_auth_data: - expires, groups = cached_auth_data - if expires < time(): - groups = None - - return groups - - def authorize(self, req): - """ - Returns None if the request is authorized to continue or a standard - WSGI response callable if not. - - Assumes that user groups are all lower case, which is true when Red Hat - Enterprise Linux Identity Management is used. - """ - try: - version, account, container, obj = req.split_path(1, 4, True) - except ValueError: - self.logger.increment('errors') - return HTTPNotFound(request=req) - - if not account or not account.startswith(self.reseller_prefix): - self.logger.debug("Account name: %s doesn't start with " - "reseller_prefix: %s." - % (account, self.reseller_prefix)) - return self.denied_response(req) - - user_groups = (req.remote_user or '').split(',') - account_user = user_groups[1] if len(user_groups) > 1 else None - # If the user is in the reseller_admin group for our prefix, he gets - # full access to all accounts we manage. For the default reseller - # prefix, the group name is auth_reseller_admin. - admin_group = ("%sreseller_admin" % self.reseller_prefix).lower() - if admin_group in user_groups and \ - account != self.reseller_prefix and \ - account[len(self.reseller_prefix)] != '.': - req.environ['swift_owner'] = True - return None - - # The "account" is part of the request URL, and already contains the - # reseller prefix, like in "/v1/AUTH_vol1/pictures/pic1.png". - if account.lower() in user_groups and \ - (req.method not in ('DELETE', 'PUT') or container): - # If the user is admin for the account and is not trying to do an - # account DELETE or PUT... - req.environ['swift_owner'] = True - self.logger.debug("User %s has admin authorizing." - % account_user) - return None - - if (req.environ.get('swift_sync_key') - and (req.environ['swift_sync_key'] == - req.headers.get('x-container-sync-key', None)) - and 'x-timestamp' in req.headers): - self.logger.debug("Allow request with container sync-key: %s." - % req.environ['swift_sync_key']) - return None - - if req.method == 'OPTIONS': - #allow OPTIONS requests to proceed as normal - self.logger.debug("Allow OPTIONS request.") - return None - - referrers, groups = parse_acl(getattr(req, 'acl', None)) - - if referrer_allowed(req.referer, referrers): - if obj or '.rlistings' in groups: - self.logger.debug("Allow authorizing %s via referer ACL." - % req.referer) - return None - - for user_group in user_groups: - if user_group in groups: - self.logger.debug("User %s allowed in ACL: %s authorizing." - % (account_user, user_group)) - return None - - return self.denied_response(req) - - def denied_response(self, req): - """ - Returns a standard WSGI response callable with the status of 403 or 401 - depending on whether the REMOTE_USER is set or not. - """ - if req.remote_user: - self.logger.increment('forbidden') - return HTTPForbidden(request=req) - else: - if self.auth_method == "active": - return HTTPSeeOther(location=self.ext_authentication_url) - elif self.auth_method == "passive": - self.logger.increment('unauthorized') - return HTTPUnauthorized(request=req) - - def handle(self, env, start_response): - """ - WSGI entry point for auth requests (ones that match the - self.auth_prefix). - Wraps env in swob.Request object and passes it down. - - :param env: WSGI environment dictionary - :param start_response: WSGI callable - """ - try: - req = Request(env) - if self.auth_prefix: - req.path_info_pop() - req.bytes_transferred = '-' - req.client_disconnect = False - if 'x-storage-token' in req.headers and \ - 'x-auth-token' not in req.headers: - req.headers['x-auth-token'] = req.headers['x-storage-token'] - return self.handle_request(req)(env, start_response) - except (Exception, Timeout): - print "EXCEPTION IN handle: %s: %s" % (format_exc(), env) - self.logger.increment('errors') - start_response('500 Server Error', - [('Content-Type', 'text/plain')]) - return ['Internal server error.\n'] - - def handle_request(self, req): - """ - Entry point for auth requests (ones that match the self.auth_prefix). - Should return a WSGI-style callable (such as webob.Response). - - :param req: swob.Request object - """ - req.start_time = time() - handler = None - try: - version, account, user, _junk = req.split_path(1, 4, True) - except ValueError: - self.logger.increment('errors') - return HTTPNotFound(request=req) - if version in ('v1', 'v1.0', 'auth'): - if req.method == 'GET': - handler = self.handle_get_token - if not handler: - self.logger.increment('errors') - req.response = HTTPBadRequest(request=req) - else: - req.response = handler(req) - return req.response - - def handle_get_token(self, req): - """ - Handles the various `request for token and service end point(s)` calls. - There are various formats to support the various auth servers in the - past. - - "Active Mode" usage: - All formats require GSS (Kerberos) authentication. - - GET /v1//auth - GET /auth - GET /v1.0 - - On successful authentication, the response will have X-Auth-Token - and X-Storage-Token set to the token to use with Swift. - - "Passive Mode" usage:: - - GET /v1//auth - X-Auth-User: : or X-Storage-User: - X-Auth-Key: or X-Storage-Pass: - GET /auth - X-Auth-User: : or X-Storage-User: : - X-Auth-Key: or X-Storage-Pass: - GET /v1.0 - X-Auth-User: : or X-Storage-User: : - X-Auth-Key: or X-Storage-Pass: - - Values should be url encoded, "act%3Ausr" instead of "act:usr" for - example; however, for backwards compatibility the colon may be - included unencoded. - - On successful authentication, the response will have X-Auth-Token - and X-Storage-Token set to the token to use with Swift and - X-Storage-URL set to the URL to the default Swift cluster to use. - - :param req: The swob.Request to process. - :returns: swob.Response, 2xx on success with data set as explained - above. - """ - # Validate the request info - try: - pathsegs = split_path(req.path_info, 1, 3, True) - except ValueError: - self.logger.increment('errors') - return HTTPNotFound(request=req) - if not ((pathsegs[0] == 'v1' and pathsegs[2] == 'auth') - or pathsegs[0] in ('auth', 'v1.0')): - return HTTPBadRequest(request=req) - - # Client is inside the domain - if self.auth_method == "active": - return HTTPSeeOther(location=self.ext_authentication_url) - - # Client is outside the domain - elif self.auth_method == "passive": - account, user, key = None, None, None - # Extract user, account and key from request - if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': - account = pathsegs[1] - user = req.headers.get('x-storage-user') - if not user: - user = unquote(req.headers.get('x-auth-user', '')) - if user: - if ':' not in user: - return HTTPUnauthorized(request=req) - else: - account2, user = user.split(':', 1) - if account != account2: - return HTTPUnauthorized(request=req) - key = req.headers.get('x-storage-pass') - if not key: - key = unquote(req.headers.get('x-auth-key', '')) - elif pathsegs[0] in ('auth', 'v1.0'): - user = unquote(req.headers.get('x-auth-user', '')) - if not user: - user = req.headers.get('x-storage-user') - if user: - if ':' not in user: - return HTTPUnauthorized(request=req) - else: - account, user = user.split(':', 1) - key = unquote(req.headers.get('x-auth-key', '')) - if not key: - key = req.headers.get('x-storage-pass') - - if not (account or user or key): - # If all are not given, client may be part of the domain - return HTTPSeeOther(location=self.ext_authentication_url) - elif None in (key, user, account): - # If only one or two of them is given, but not all - return HTTPUnauthorized(request=req) - - # Run kinit on the user - if self.realm_name and "@" not in user: - user = user + "@" + self.realm_name - try: - ret = run_kinit(user, key) - except OSError as e: - if e.errno == errno.ENOENT: - return HTTPServerError("kinit command not found\n") - if ret != 0: - self.logger.warning("Failed: kinit %s", user) - if ret == -1: - self.logger.warning("Failed: kinit: Password has probably " - "expired.") - return HTTPServerError("Kinit is taking too long.\n") - return HTTPUnauthorized(request=req) - self.logger.debug("kinit succeeded") - - if "@" in user: - user = user.split("@")[0] - - # Check if user really belongs to the account - groups_list = get_groups_from_username(user).strip().split(",") - user_group = ("%s%s" % (self.reseller_prefix, account)).lower() - reseller_admin_group = \ - ("%sreseller_admin" % self.reseller_prefix).lower() - if user_group not in groups_list: - # Check if user is reseller_admin. If not, return Unauthorized. - # On AD/IdM server, auth_reseller_admin is a separate group - if reseller_admin_group not in groups_list: - return HTTPUnauthorized(request=req) - - mc = cache_from_env(req.environ) - if not mc: - raise Exception('Memcache required') - token, expires, groups = get_auth_data(mc, user) - if not token: - token = generate_token() - expires = time() + self.token_life - groups = get_groups_from_username(user) - set_auth_data(mc, user, token, expires, groups) - - headers = {'X-Auth-Token': token, - 'X-Storage-Token': token} - - if self.debug_headers: - headers.update({'X-Debug-Remote-User': user, - 'X-Debug-Groups:': groups, - 'X-Debug-Token-Life': self.token_life, - 'X-Debug-Token-Expires': ctime(expires)}) - - resp = Response(request=req, headers=headers) - resp.headers['X-Storage-Url'] = \ - '%s/v1/%s%s' % (resp.host_url, self.reseller_prefix, account) - return resp - - -def filter_factory(global_conf, **local_conf): - """Returns a WSGI filter app for use with paste.deploy.""" - conf = global_conf.copy() - conf.update(local_conf) - - def auth_filter(app): - return KerbAuth(app, conf) - return auth_filter diff --git a/gluster/swift/common/middleware/swiftkerbauth/kerbauth_utils.py b/gluster/swift/common/middleware/swiftkerbauth/kerbauth_utils.py deleted file mode 100644 index 599ef99..0000000 --- a/gluster/swift/common/middleware/swiftkerbauth/kerbauth_utils.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import random -import grp -import signal -from subprocess import Popen, PIPE -from time import time -from gluster.swift.common.middleware.swiftkerbauth \ - import TOKEN_LIFE, RESELLER_PREFIX - - -def get_remote_user(env): - """Retrieve REMOTE_USER set by Apache from environment.""" - remote_user = env.get('REMOTE_USER', "") - matches = re.match('([^@]+)@.*', remote_user) - if not matches: - raise RuntimeError("Malformed REMOTE_USER \"%s\"" % remote_user) - return matches.group(1) - - -def get_auth_data(mc, username): - """ - Returns the token, expiry time and groups for the user if it already exists - on memcache. Returns None otherwise. - - :param mc: MemcacheRing object - :param username: swift user - """ - token, expires, groups = None, None, None - memcache_user_key = '%s/user/%s' % (RESELLER_PREFIX, username) - candidate_token = mc.get(memcache_user_key) - if candidate_token: - memcache_token_key = '%s/token/%s' % (RESELLER_PREFIX, candidate_token) - cached_auth_data = mc.get(memcache_token_key) - if cached_auth_data: - expires, groups = cached_auth_data - if expires > time(): - token = candidate_token - else: - expires, groups = None, None - return (token, expires, groups) - - -def set_auth_data(mc, username, token, expires, groups): - """ - Stores the following key value pairs on Memcache: - (token, expires+groups) - (user, token) - """ - auth_data = (expires, groups) - memcache_token_key = "%s/token/%s" % (RESELLER_PREFIX, token) - mc.set(memcache_token_key, auth_data, time=TOKEN_LIFE) - - # Record the token with the user info for future use. - memcache_user_key = '%s/user/%s' % (RESELLER_PREFIX, username) - mc.set(memcache_user_key, token, time=TOKEN_LIFE) - - -def generate_token(): - """Generates a random token.""" - # We don't use uuid.uuid4() here because importing the uuid module - # causes (harmless) SELinux denials in the audit log on RHEL 6. If this - # is a security concern, a custom SELinux policy module could be - # written to not log those denials. - r = random.SystemRandom() - token = '%stk%s' % \ - (RESELLER_PREFIX, - ''.join(r.choice('abcdef0123456789') for x in range(32))) - return token - - -def get_groups_from_username(username): - """Return a set of groups to which the user belongs to.""" - # Retrieve the numerical group IDs. We cannot list the group names - # because group names from Active Directory may contain spaces, and - # we wouldn't be able to split the list of group names into its - # elements. - p = Popen(['id', '-G', username], stdout=PIPE) - if p.wait() != 0: - raise RuntimeError("Failure running id -G for %s" % username) - (p_stdout, p_stderr) = p.communicate() - - # Convert the group numbers into group names. - groups = [] - for gid in p_stdout.strip().split(" "): - groups.append(grp.getgrgid(int(gid))[0]) - - # The first element of the list is considered a unique identifier - # for the user. We add the username to accomplish this. - if username in groups: - groups.remove(username) - groups = [username] + groups - groups = ','.join(groups) - return groups - - -def run_kinit(username, password): - """Runs kinit command as a child process and returns the status code.""" - kinit = Popen(['kinit', username], - stdin=PIPE, stdout=PIPE, stderr=PIPE) - kinit.stdin.write('%s\n' % password) - - # The following code handles a corner case where the Kerberos password - # has expired and a prompt is displayed to enter new password. Ideally, - # we would want to read from stdout but these are blocked reads. This is - # a hack to kill the process if it's taking too long! - - class Alarm(Exception): - pass - - def signal_handler(signum, frame): - raise Alarm - # Set the signal handler and a 1-second alarm - signal.signal(signal.SIGALRM, signal_handler) - signal.alarm(1) - try: - kinit.wait() # Wait for the child to exit - signal.alarm(0) # Reset the alarm - return kinit.returncode # Exit status of child on graceful exit - except Alarm: - # Taking too long, kill and return error - kinit.kill() - return -1 diff --git a/gluster/swift/common/ring.py b/gluster/swift/common/ring.py deleted file mode 100644 index f8c268a..0000000 --- a/gluster/swift/common/ring.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import errno -from ConfigParser import ConfigParser -from swift.common.ring import ring -from swift.common.utils import search_tree -from gluster.swift.common.Glusterfs import SWIFT_DIR - -reseller_prefix = "AUTH_" -conf_files = search_tree(SWIFT_DIR, "proxy-server*", 'conf') -if conf_files: - conf_file = conf_files[0] - -_conf = ConfigParser() -if conf_files and _conf.read(conf_file): - if _conf.defaults().get("reseller_prefix", None): - reseller_prefix = _conf.defaults().get("reseller_prefix") - else: - for key, value in _conf._sections.items(): - if value.get("reseller_prefix", None): - reseller_prefix = value["reseller_prefix"] - break - -if not reseller_prefix.endswith('_'): - reseller_prefix = reseller_prefix + '_' - - -class Ring(ring.Ring): - - def __init__(self, serialized_path, reload_time=15, ring_name=None): - self.false_node = {'zone': 1, 'weight': 100.0, 'ip': '127.0.0.1', - 'id': 0, 'meta': '', 'device': 'volume_not_in_ring', - 'port': 6012} - self.account_list = [] - - if ring_name: - _serialized_path = os.path.join(serialized_path, - ring_name + '.ring.gz') - else: - _serialized_path = os.path.join(serialized_path) - - if not os.path.exists(_serialized_path): - raise OSError(errno.ENOENT, 'No such file or directory', - '%s ring file does not exists, aborting ' - 'proxy-server start.' % _serialized_path) - - ring.Ring.__init__(self, serialized_path, reload_time, ring_name) - - def _get_part_nodes(self, part): - seen_ids = set() - - try: - account = self.account_list[part] - except IndexError: - return [self.false_node] - else: - nodes = [] - for dev in self._devs: - if dev['device'] == account: - if dev['id'] not in seen_ids: - seen_ids.add(dev['id']) - nodes.append(dev) - if not nodes: - nodes = [self.false_node] - return nodes - - def get_part_nodes(self, part): - """ - Get the nodes that are responsible for the partition. If one - node is responsible for more than one replica of the same - partition, it will only appear in the output once. - - :param part: partition to get nodes for - :returns: list of node dicts - - See :func:`get_nodes` for a description of the node dicts. - """ - return self._get_part_nodes(part) - - def get_part(self, account, container=None, obj=None): - """ - Get the partition for an account/container/object. - - :param account: account name - :param container: container name - :param obj: object name - :returns: the partition number - """ - if account.startswith(reseller_prefix): - account = account.replace(reseller_prefix, '', 1) - - # Save the account name in the table - # This makes part be the index of the location of the account - # in the list - try: - part = self.account_list.index(account) - except ValueError: - self.account_list.append(account) - part = self.account_list.index(account) - - return part - - def get_nodes(self, account, container=None, obj=None): - """ - Get the partition and nodes for an account/container/object. - If a node is responsible for more than one replica, it will - only appear in the output once. - :param account: account name - :param container: container name - :param obj: object name - :returns: a tuple of (partition, list of node dicts) - - Each node dict will have at least the following keys: - ====== =============================================================== - id unique integer identifier amongst devices - weight a float of the relative weight of this device as compared to - others; this indicates how many partitions the builder will try - to assign to this device - zone integer indicating which zone the device is in; a given - partition will not be assigned to multiple devices within the - same zone - ip the ip address of the device - port the tcp port of the device - device the device's name on disk (sdb1, for example) - meta general use 'extra' field; for example: the online date, the - hardware description - ====== =============================================================== - """ - part = self.get_part(account, container, obj) - return part, self._get_part_nodes(part) - - def get_more_nodes(self, part): - """ - Generator to get extra nodes for a partition for hinted handoff. - - :param part: partition to get handoff nodes for - :returns: generator of node dicts - - See :func:`get_nodes` for a description of the node dicts. - Should never be called in the swift UFO environment, so yield nothing - """ - return [] diff --git a/gluster/swift/common/utils.py b/gluster/swift/common/utils.py index 145add3..c58b870 100644 --- a/gluster/swift/common/utils.py +++ b/gluster/swift/common/utils.py @@ -16,37 +16,29 @@ import os import stat import errno +import random import logging from hashlib import md5 from eventlet import sleep import cPickle as pickle from gluster.swift.common.exceptions import GlusterFileSystemIOError from swift.common.exceptions import DiskFileNoSpace -from gluster.swift.common.fs_utils import do_getctime, do_getmtime, do_stat, \ - do_listdir, do_walk, do_rmdir, do_log_rl, get_filename_from_fd, do_open, \ - do_isdir, do_getsize, do_getxattr, do_setxattr, do_removexattr, do_read, \ - do_close, do_dup, do_lseek, do_fstat -from gluster.swift.common import Glusterfs +from gluster.swift.common.fs_utils import do_stat, \ + do_walk, do_rmdir, do_log_rl, get_filename_from_fd, do_open, \ + do_getxattr, do_setxattr, do_removexattr, do_read, \ + do_close, do_dup, do_lseek, do_fstat, do_fsync, do_rename X_CONTENT_TYPE = 'Content-Type' X_CONTENT_LENGTH = 'Content-Length' X_TIMESTAMP = 'X-Timestamp' -X_PUT_TIMESTAMP = 'X-PUT-Timestamp' X_TYPE = 'X-Type' X_ETAG = 'ETag' -X_OBJECTS_COUNT = 'X-Object-Count' -X_BYTES_USED = 'X-Bytes-Used' -X_CONTAINER_COUNT = 'X-Container-Count' X_OBJECT_TYPE = 'X-Object-Type' DIR_TYPE = 'application/directory' -ACCOUNT = 'Account' METADATA_KEY = 'user.swift.metadata' MAX_XATTR_SIZE = 65536 -CONTAINER = 'container' DIR_NON_OBJECT = 'dir' DIR_OBJECT = 'marker_dir' -TEMP_DIR = 'tmp' -ASYNCDIR = 'async_pending' # Keep in sync with swift.obj.server.ASYNCDIR FILE = 'file' FILE_TYPE = 'application/octet-stream' OBJECT = 'Object' @@ -169,49 +161,6 @@ def clean_metadata(path_or_fd): key += 1 -def validate_container(metadata): - if not metadata: - logging.warn('validate_container: No metadata') - return False - - if X_TYPE not in metadata.keys() or \ - X_TIMESTAMP not in metadata.keys() or \ - X_PUT_TIMESTAMP not in metadata.keys() or \ - X_OBJECTS_COUNT not in metadata.keys() or \ - X_BYTES_USED not in metadata.keys(): - return False - - (value, timestamp) = metadata[X_TYPE] - if value == CONTAINER: - return True - - logging.warn('validate_container: metadata type is not CONTAINER (%r)', - value) - return False - - -def validate_account(metadata): - if not metadata: - logging.warn('validate_account: No metadata') - return False - - if X_TYPE not in metadata.keys() or \ - X_TIMESTAMP not in metadata.keys() or \ - X_PUT_TIMESTAMP not in metadata.keys() or \ - X_OBJECTS_COUNT not in metadata.keys() or \ - X_BYTES_USED not in metadata.keys() or \ - X_CONTAINER_COUNT not in metadata.keys(): - return False - - (value, timestamp) = metadata[X_TYPE] - if value == ACCOUNT: - return True - - logging.warn('validate_account: metadata type is not ACCOUNT (%r)', - value) - return False - - def validate_object(metadata): if not metadata: return False @@ -232,86 +181,6 @@ def validate_object(metadata): return False -def _update_list(path, cont_path, src_list, reg_file=True, object_count=0, - bytes_used=0, obj_list=[]): - # strip the prefix off, also stripping the leading and trailing slashes - obj_path = path.replace(cont_path, '').strip(os.path.sep) - - for obj_name in src_list: - # If it is not a reg_file then it is a directory. - if not reg_file and not Glusterfs._implicit_dir_objects: - # Now check if this is a dir object or a gratuiously crated - # directory - metadata = \ - read_metadata(os.path.join(cont_path, obj_path, obj_name)) - if not dir_is_object(metadata): - continue - - if obj_path: - obj_list.append(os.path.join(obj_path, obj_name)) - else: - obj_list.append(obj_name) - - object_count += 1 - - if reg_file and Glusterfs._do_getsize: - bytes_used += do_getsize(os.path.join(path, obj_name)) - sleep() - - return object_count, bytes_used - - -def update_list(path, cont_path, dirs=[], files=[], object_count=0, - bytes_used=0, obj_list=[]): - if files: - object_count, bytes_used = _update_list(path, cont_path, files, True, - object_count, bytes_used, - obj_list) - if dirs: - object_count, bytes_used = _update_list(path, cont_path, dirs, False, - object_count, bytes_used, - obj_list) - return object_count, bytes_used - - -def get_container_details(cont_path): - """ - get container details by traversing the filesystem - """ - bytes_used = 0 - object_count = 0 - obj_list = [] - - if do_isdir(cont_path): - for (path, dirs, files) in do_walk(cont_path): - object_count, bytes_used = update_list(path, cont_path, dirs, - files, object_count, - bytes_used, obj_list) - - sleep() - - return obj_list, object_count, bytes_used - - -def get_account_details(acc_path): - """ - Return container_list and container_count. - """ - container_list = [] - container_count = 0 - - if do_isdir(acc_path): - for name in do_listdir(acc_path): - if name.lower() == TEMP_DIR \ - or name.lower() == ASYNCDIR \ - or not do_isdir(os.path.join(acc_path, name)): - continue - container_count += 1 - container_list.append(name) - - return container_list, container_count - - def _read_for_etag(fp): etag = md5() while True: @@ -381,49 +250,6 @@ def get_object_metadata(obj_path_or_fd): return metadata -def _add_timestamp(metadata_i): - # At this point we have a simple key/value dictionary, turn it into - # key/(value,timestamp) pairs. - timestamp = 0 - metadata = {} - for key, value_i in metadata_i.iteritems(): - if not isinstance(value_i, tuple): - metadata[key] = (value_i, timestamp) - else: - metadata[key] = value_i - return metadata - - -def get_container_metadata(cont_path): - objects = [] - object_count = 0 - bytes_used = 0 - objects, object_count, bytes_used = get_container_details(cont_path) - metadata = {X_TYPE: CONTAINER, - X_TIMESTAMP: normalize_timestamp( - do_getctime(cont_path)), - X_PUT_TIMESTAMP: normalize_timestamp( - do_getmtime(cont_path)), - X_OBJECTS_COUNT: object_count, - X_BYTES_USED: bytes_used} - return _add_timestamp(metadata) - - -def get_account_metadata(acc_path): - containers = [] - container_count = 0 - containers, container_count = get_account_details(acc_path) - metadata = {X_TYPE: ACCOUNT, - X_TIMESTAMP: normalize_timestamp( - do_getctime(acc_path)), - X_PUT_TIMESTAMP: normalize_timestamp( - do_getmtime(acc_path)), - X_OBJECTS_COUNT: 0, - X_BYTES_USED: 0, - X_CONTAINER_COUNT: container_count} - return _add_timestamp(metadata) - - def restore_metadata(path, metadata): meta_orig = read_metadata(path) if meta_orig: @@ -444,18 +270,6 @@ def create_object_metadata(obj_path_or_fd): return restore_metadata(obj_path_or_fd, metadata) -def create_container_metadata(cont_path): - metadata = get_container_metadata(cont_path) - rmd = restore_metadata(cont_path, metadata) - return rmd - - -def create_account_metadata(acc_path): - metadata = get_account_metadata(acc_path) - rmd = restore_metadata(acc_path, metadata) - return rmd - - # The following dir_xxx calls should definitely be replaced # with a Metadata class to encapsulate their implementation. # :FIXME: For now we have them as functions, but we should @@ -530,3 +344,41 @@ def rmobjdir(dir_path): raise else: return True + + +def write_pickle(obj, dest, tmp=None, pickle_protocol=0): + """ + Ensure that a pickle file gets written to disk. The file is first written + to a tmp file location in the destination directory path, ensured it is + synced to disk, then moved to its final destination name. + + This version takes advantage of Gluster's dot-prefix-dot-suffix naming + where the a file named ".thefile.name.9a7aasv" is hashed to the same + Gluster node as "thefile.name". This ensures the renaming of a temp file + once written does not move it to another Gluster node. + + :param obj: python object to be pickled + :param dest: path of final destination file + :param tmp: path to tmp to use, defaults to None (ignored) + :param pickle_protocol: protocol to pickle the obj with, defaults to 0 + """ + dirname = os.path.dirname(dest) + # Create destination directory + try: + os.makedirs(dirname) + except OSError as err: + if err.errno != errno.EEXIST: + raise + basename = os.path.basename(dest) + tmpname = '.' + basename + '.' + \ + md5(basename + str(random.random())).hexdigest() + tmppath = os.path.join(dirname, tmpname) + with open(tmppath, 'wb') as fo: + pickle.dump(obj, fo, pickle_protocol) + # TODO: This flush() method call turns into a flush() system call + # We'll need to wrap this as well, but we would do this by writing + # a context manager for our own open() method which returns an object + # in fo which makes the gluster API call. + fo.flush() + do_fsync(fo) + do_rename(tmppath, dest) diff --git a/gluster/swift/container/__init__.py b/gluster/swift/container/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/container/server.py b/gluster/swift/container/server.py deleted file mode 100644 index 173976a..0000000 --- a/gluster/swift/container/server.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (c) 2012-2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" Container Server for Gluster Swift UFO """ - -# Simply importing this monkey patches the constraint handling to fit our -# needs -import gluster.swift.common.constraints # noqa - -from swift.container import server -from gluster.swift.common.DiskDir import DiskDir -from swift.common.utils import public, timing_stats -from swift.common.exceptions import DiskFileNoSpace -from swift.common.swob import HTTPInsufficientStorage - - -class ContainerController(server.ContainerController): - """ - Subclass of the container server's ContainerController which replaces the - _get_container_broker() method so that we can use Gluster's DiskDir - duck-type of the container DatabaseBroker object, and make the - account_update() method a no-op (information is simply stored on disk and - already updated by virtue of performaing the file system operations - directly). - """ - - def _get_container_broker(self, drive, part, account, container, **kwargs): - """ - Overriden to provide the GlusterFS specific broker that talks to - Gluster for the information related to servicing a given request - instead of talking to a database. - - :param drive: drive that holds the container - :param part: partition the container is in - :param account: account name - :param container: container name - :returns: DiskDir object, a duck-type of DatabaseBroker - """ - return DiskDir(self.root, drive, account, container, self.logger, - **kwargs) - - def account_update(self, req, account, container, broker): - """ - Update the account server(s) with latest container info. - - For Gluster, this is just a no-op, since an account is just the - directory holding all the container directories. - - :param req: swob.Request object - :param account: account name - :param container: container name - :param broker: container DB broker object - :returns: None. - """ - return None - - @public - @timing_stats() - def PUT(self, req): - try: - return server.ContainerController.PUT(self, req) - except DiskFileNoSpace: - # As container=directory in gluster-swift, we might run out of - # space or exceed quota when creating containers. - drive = req.split_path(1, 1, True) - return HTTPInsufficientStorage(drive=drive, request=req) - - -def app_factory(global_conf, **local_conf): - """paste.deploy app factory for creating WSGI container server apps.""" - conf = global_conf.copy() - conf.update(local_conf) - return ContainerController(conf) diff --git a/gluster/swift/obj/diskfile.py b/gluster/swift/obj/diskfile.py index 3223dcf..3b9d040 100644 --- a/gluster/swift/obj/diskfile.py +++ b/gluster/swift/obj/diskfile.py @@ -23,7 +23,6 @@ except ImportError: import random import logging import time -from collections import defaultdict from socket import gethostname from hashlib import md5 from eventlet import sleep @@ -31,36 +30,32 @@ from greenlet import getcurrent from contextlib import contextmanager from gluster.swift.common.exceptions import AlreadyExistsAsFile, \ AlreadyExistsAsDir -from swift.common.utils import TRUE_VALUES, ThreadPool, config_true_value +from swift.common.utils import TRUE_VALUES, ThreadPool, hash_path, \ + normalize_timestamp from swift.common.exceptions import DiskFileNotExist, DiskFileError, \ DiskFileNoSpace, DiskFileDeviceUnavailable, DiskFileNotOpen, \ DiskFileExpired from swift.common.swob import multi_range_iterator from gluster.swift.common.exceptions import GlusterFileSystemOSError -from gluster.swift.common.Glusterfs import mount from gluster.swift.common.fs_utils import do_fstat, do_open, do_close, \ do_unlink, do_chown, do_fsync, do_fchown, do_stat, do_write, do_read, \ do_fadvise64, do_rename, do_fdatasync, do_lseek, do_mkdir from gluster.swift.common.utils import read_metadata, write_metadata, \ validate_object, create_object_metadata, rmobjdir, dir_is_object, \ - get_object_metadata + get_object_metadata, write_pickle from gluster.swift.common.utils import X_CONTENT_TYPE, \ X_TIMESTAMP, X_TYPE, X_OBJECT_TYPE, FILE, OBJECT, DIR_TYPE, \ FILE_TYPE, DEFAULT_UID, DEFAULT_GID, DIR_NON_OBJECT, DIR_OBJECT, \ X_ETAG, X_CONTENT_LENGTH from ConfigParser import ConfigParser, NoSectionError, NoOptionError +from swift.obj.diskfile import DiskFileManager as SwiftDiskFileManager +from swift.obj.diskfile import get_async_dir # FIXME: Hopefully we'll be able to move to Python 2.7+ where O_CLOEXEC will # be back ported. See http://www.python.org/dev/peps/pep-0433/ O_CLOEXEC = 02000000 -DEFAULT_DISK_CHUNK_SIZE = 65536 -DEFAULT_KEEP_CACHE_SIZE = (5 * 1024 * 1024) -DEFAULT_MB_PER_SYNC = 512 -# keep these lower-case -DISALLOWED_HEADERS = set('content-length content-type deleted etag'.split()) - MAX_RENAME_ATTEMPTS = 10 MAX_OPEN_ATTEMPTS = 10 @@ -183,13 +178,6 @@ def make_directory(full_path, uid, gid, metadata=None): _fs_conf = ConfigParser() if _fs_conf.read(os.path.join('/etc/swift', 'fs.conf')): - try: - _mkdir_locking = _fs_conf.get('DEFAULT', 'mkdir_locking', "no") \ - in TRUE_VALUES - logging.warn("The option mkdir_locking has been deprecated and is" - " no longer supported") - except (NoSectionError, NoOptionError): - pass try: _use_put_mount = _fs_conf.get('DEFAULT', 'use_put_mount', "no") \ in TRUE_VALUES @@ -223,7 +211,7 @@ def _adjust_metadata(metadata): return metadata -class OnDiskManager(object): +class DiskFileManager(SwiftDiskFileManager): """ Management class for devices, providing common place for shared parameters and methods not provided by the DiskFile class (which primarily services @@ -241,41 +229,33 @@ class OnDiskManager(object): :param logger: caller provided logger """ def __init__(self, conf, logger): - self.logger = logger - self.disk_chunk_size = int(conf.get('disk_chunk_size', - DEFAULT_DISK_CHUNK_SIZE)) - self.keep_cache_size = int(conf.get('keep_cache_size', - DEFAULT_KEEP_CACHE_SIZE)) - self.bytes_per_sync = int(conf.get('mb_per_sync', - DEFAULT_MB_PER_SYNC)) * 1024 * 1024 - self.devices = conf.get('devices', '/srv/node/') - self.mount_check = config_true_value(conf.get('mount_check', 'true')) - threads_per_disk = int(conf.get('threads_per_disk', '0')) - self.threadpools = defaultdict( - lambda: ThreadPool(nthreads=threads_per_disk)) + super(DiskFileManager, self).__init__(conf, logger) + self.reseller_prefix = \ + conf.get('reseller_prefix', 'AUTH_').strip() # Not used, currently - def _get_dev_path(self, device): - """ - Return the path to a device, checking to see that it is a proper mount - point based on a configuration parameter. - - :param device: name of target device - :returns: full path to the device, None if the path to the device is - not a proper mount point. - """ - if self.mount_check and not mount(self.devices, device): - dev_path = None - else: - dev_path = os.path.join(self.devices, device) - return dev_path - - def get_diskfile(self, device, account, container, obj, - **kwargs): - dev_path = self._get_dev_path(device) + def get_diskfile(self, device, partition, account, container, obj, + policy_idx=0, **kwargs): + dev_path = self.get_dev_path(device) if not dev_path: raise DiskFileDeviceUnavailable() return DiskFile(self, dev_path, self.threadpools[device], - account, container, obj, **kwargs) + partition, account, container, obj, + policy_idx=policy_idx, **kwargs) + + def pickle_async_update(self, device, account, container, obj, data, + timestamp, policy_idx): + # This method invokes swiftonfile's writepickle method. + # Is patching just write_pickle and calling parent method better ? + device_path = self.construct_dev_path(device) + async_dir = os.path.join(device_path, get_async_dir(policy_idx)) + ohash = hash_path(account, container, obj) + self.threadpools[device].run_in_thread( + write_pickle, + data, + os.path.join(async_dir, ohash[-3:], ohash + '-' + + normalize_timestamp(timestamp)), + os.path.join(device_path, 'tmp')) + self.logger.increment('async_pendings') class DiskFileWriter(object): @@ -593,8 +573,10 @@ class DiskFile(object): :param uid: user ID disk object should assume (file or directory) :param gid: group ID disk object should assume (file or directory) """ - def __init__(self, mgr, dev_path, threadpool, account, container, obj, - uid=DEFAULT_UID, gid=DEFAULT_GID): + def __init__(self, mgr, dev_path, threadpool, partition, + account=None, container=None, obj=None, + policy_idx=0, uid=DEFAULT_UID, gid=DEFAULT_GID): + # Variables partition and policy_idx is currently unused. self._mgr = mgr self._device_path = dev_path self._threadpool = threadpool or ThreadPool(nthreads=0) @@ -607,7 +589,14 @@ class DiskFile(object): # Don't store a value for data_file until we know it exists. self._data_file = None - self._container_path = os.path.join(self._device_path, container) + # Account name contains resller_prefix which is retained and not + # stripped. This to conform to Swift's behavior where account name + # entry in Account DBs contain resller_prefix. + self._account = account + self._container = container + + self._container_path = \ + os.path.join(self._device_path, self._account, self._container) obj = obj.strip(os.path.sep) obj_path, self._obj = os.path.split(obj) if obj_path: @@ -862,6 +851,13 @@ class DiskFile(object): :raises AlreadyExistsAsFile: if path or part of a path is not a \ directory """ + # Create /account/container directory structure on mount point root + try: + os.makedirs(self._container_path) + except OSError as err: + if err.errno != errno.EEXIST: + raise + data_file = os.path.join(self._put_datadir, self._obj) # Assume the full directory path exists to the file already, and diff --git a/gluster/swift/obj/server.py b/gluster/swift/obj/server.py index 8bc080a..0ecfc27 100644 --- a/gluster/swift/obj/server.py +++ b/gluster/swift/obj/server.py @@ -15,8 +15,6 @@ """ Object Server for Gluster for Swift """ -# Simply importing this monkey patches the constraint handling to fit our -# needs import gluster.swift.common.constraints # noqa from swift.common.swob import HTTPConflict from swift.common.utils import public, timing_stats @@ -26,15 +24,7 @@ from swift.common.request_helpers import split_and_validate_path from swift.obj import server -from gluster.swift.obj.diskfile import OnDiskManager - -import os -from swift.common.exceptions import ConnectionTimeout -from swift.common.bufferedhttp import http_connect -from eventlet import Timeout -from swift.common.http import is_success -from gluster.swift.common.ring import Ring -from swift import gettext_ as _ +from gluster.swift.obj.diskfile import DiskFileManager class ObjectController(server.ObjectController): @@ -54,11 +44,10 @@ class ObjectController(server.ObjectController): """ # Common on-disk hierarchy shared across account, container and object # servers. - self._ondisk_mgr = OnDiskManager(conf, self.logger) - self.swift_dir = conf.get('swift_dir', '/etc/swift') + self._diskfile_mgr = DiskFileManager(conf, self.logger) def get_diskfile(self, device, partition, account, container, obj, - **kwargs): + policy_idx, **kwargs): """ Utility method for instantiating a DiskFile object supporting a given REST API. @@ -67,108 +56,15 @@ class ObjectController(server.ObjectController): DiskFile class would simply over-ride this method to provide that behavior. """ - return self._ondisk_mgr.get_diskfile(device, account, container, obj, - **kwargs) - - def container_update(self, *args, **kwargs): - """ - Update the container when objects are updated. - - For Gluster, this is just a no-op, since a container is just the - directory holding all the objects (sub-directory hierarchy of files). - """ - return - - def get_object_ring(self): - if hasattr(self, 'object_ring'): - if not self.object_ring: - self.object_ring = Ring(self.swift_dir, ring_name='object') - else: - self.object_ring = Ring(self.swift_dir, ring_name='object') - return self.object_ring - - def async_update(self, op, account, container, obj, host, partition, - contdevice, headers_out, objdevice): - """ - In Openstack Swift, this method is called by: - * container_update (a no-op in gluster-swift) - * delete_at_update (to PUT objects into .expiring_objects account) - - The Swift's version of async_update only sends the request to - container-server to PUT the object. The container-server calls - container_update method which makes an entry for the object in it's - database. No actual object is created on disk. - - But in gluster-swift container_update is a no-op, so we'll - have to PUT an actual object. We override async_update to create a - container first and then the corresponding "tracker object" which - tracks expired objects scheduled for deletion. - """ - - headers_out['user-agent'] = 'obj-server %s' % os.getpid() - if all([host, partition, contdevice]): - # PUT the container. Send request directly to container-server - container_path = '/%s/%s' % (account, container) - try: - with ConnectionTimeout(self.conn_timeout): - ip, port = host.rsplit(':', 1) - conn = http_connect(ip, port, contdevice, partition, op, - container_path, headers_out) - with Timeout(self.node_timeout): - response = conn.getresponse() - response.read() - if not is_success(response.status): - self.logger.error(_( - 'async_update : ' - 'ERROR Container update failed :%(status)d ' - 'response from %(ip)s:%(port)s/%(dev)s'), - {'status': response.status, 'ip': ip, 'port': port, - 'dev': contdevice}) - return - except (Exception, Timeout): - self.logger.exception(_( - 'async_update : ' - 'ERROR Container update failed :%(ip)s:%(port)s/%(dev)s'), - {'ip': ip, 'port': port, 'dev': contdevice}) - - # PUT the tracker object. Send request directly to object-server - object_path = '/%s/%s/%s' % (account, container, obj) - headers_out['Content-Length'] = 0 - headers_out['Content-Type'] = 'text/plain' - try: - with ConnectionTimeout(self.conn_timeout): - # FIXME: Assuming that get_nodes returns single node - part, nodes = self.get_object_ring().get_nodes(account, - container, - obj) - ip = nodes[0]['ip'] - port = nodes[0]['port'] - objdevice = nodes[0]['device'] - conn = http_connect(ip, port, objdevice, partition, op, - object_path, headers_out) - with Timeout(self.node_timeout): - response = conn.getresponse() - response.read() - if is_success(response.status): - return - else: - self.logger.error(_( - 'async_update : ' - 'ERROR Object PUT failed : %(status)d ' - 'response from %(ip)s:%(port)s/%(dev)s'), - {'status': response.status, 'ip': ip, 'port': port, - 'dev': objdevice}) - except (Exception, Timeout): - self.logger.exception(_( - 'async_update : ' - 'ERROR Object PUT failed :%(ip)s:%(port)s/%(dev)s'), - {'ip': ip, 'port': port, 'dev': objdevice}) - return + return self._diskfile_mgr.get_diskfile( + device, partition, account, container, obj, policy_idx, **kwargs) @public @timing_stats() def PUT(self, request): try: + server.check_object_creation = \ + gluster.swift.common.constraints.sof_check_object_creation return server.ObjectController.PUT(self, request) except (AlreadyExistsAsFile, AlreadyExistsAsDir): device = \ diff --git a/gluster/swift/proxy/__init__.py b/gluster/swift/proxy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/gluster/swift/proxy/server.py b/gluster/swift/proxy/server.py deleted file mode 100644 index 7b2f58e..0000000 --- a/gluster/swift/proxy/server.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) 2012-2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# Simply importing this monkey patches the constraint handling to fit our -# needs -import gluster.swift.common.constraints # noqa - -from swift.proxy.server import Application, mimetypes # noqa -from swift.proxy.controllers import AccountController # noqa -from swift.proxy.controllers import ObjectController # noqa -from swift.proxy.controllers import ContainerController # noqa - - -def app_factory(global_conf, **local_conf): # noqa - """paste.deploy app factory for creating WSGI proxy apps.""" - conf = global_conf.copy() - conf.update(local_conf) - return Application(conf) diff --git a/modules/swift b/modules/swift deleted file mode 160000 index f310006..0000000 --- a/modules/swift +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f310006fae1af991097eee5929a1c73051eb1e00 diff --git a/setup.py b/setup.py index 0b52115..f928c5c 100644 --- a/setup.py +++ b/setup.py @@ -41,31 +41,11 @@ setup( ], install_requires=[], scripts=[ - 'bin/gluster-swift-gen-builders', 'bin/gluster-swift-print-metadata', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-add-account', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-add-user', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-cleanup-tokens', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-delete-account', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-delete-user', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-list', - 'gluster/swift/common/middleware/gswauth/bin/gswauth-prep', - 'gluster/swift/common/middleware/gswauth/bin/' - 'gswauth-set-account-service', - ], entry_points={ 'paste.app_factory': [ - 'proxy=gluster.swift.proxy.server:app_factory', 'object=gluster.swift.obj.server:app_factory', - 'container=gluster.swift.container.server:app_factory', - 'account=gluster.swift.account.server:app_factory', - ], - 'paste.filter_factory': [ - 'gswauth=gluster.swift.common.middleware.gswauth.swauth.' - 'middleware:filter_factory', - 'kerbauth=gluster.swift.common.middleware.' - 'swiftkerbauth.kerbauth:filter_factory', ], }, ) diff --git a/test/__init__.py b/test/__init__.py index 7eb5f47..3bd25b1 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -53,12 +53,12 @@ def get_config(section_name=None, defaults=None): :param section_name: the section to read (all sections if not defined) :param defaults: an optional dictionary namespace of defaults """ - config_file = os.environ.get('SWIFT_TEST_CONFIG_FILE', - '/etc/swift/test.conf') config = {} if defaults is not None: config.update(defaults) + config_file = os.environ.get('SWIFT_TEST_CONFIG_FILE', + '/etc/swift/test.conf') try: config = readconf(config_file, section_name) except SystemExit: diff --git a/test/deploy/glusterfs/conf/account-server.conf b/test/deploy/glusterfs/conf/account-server.conf deleted file mode 100644 index ca7f40f..0000000 --- a/test/deploy/glusterfs/conf/account-server.conf +++ /dev/null @@ -1,32 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the account-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = true -bind_port = 6012 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = account-server - -[app:account-server] -use = egg:gluster_swift#account -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the account server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off diff --git a/test/deploy/glusterfs/conf/container-server.conf b/test/deploy/glusterfs/conf/container-server.conf deleted file mode 100644 index 2c6cbe4..0000000 --- a/test/deploy/glusterfs/conf/container-server.conf +++ /dev/null @@ -1,35 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the container-server workers -# start, you can *consider* setting this value to "false" to reduce the -# per-request overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = true -bind_port = 6011 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = container-server - -[app:container-server] -use = egg:gluster_swift#container -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the container server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - -#enable object versioning for functional test -allow_versions = on diff --git a/test/deploy/glusterfs/conf/fs.conf b/test/deploy/glusterfs/conf/fs.conf deleted file mode 100644 index b06a854..0000000 --- a/test/deploy/glusterfs/conf/fs.conf +++ /dev/null @@ -1,19 +0,0 @@ -[DEFAULT] -# -# IP address of a node in the GlusterFS server cluster hosting the -# volumes to be served via Swift API. -mount_ip = localhost - -# Performance optimization parameter. When turned off, the filesystem will -# see a reduced number of stat calls, resulting in substantially faster -# response time for GET and HEAD container requests on containers with large -# numbers of objects, at the expense of an accurate count of combined bytes -# used by all objects in the container. For most installations "off" works -# fine. -# -# *** Keep on for Functional Tests *** -accurate_size_in_listing = on - -# *** Keep on for Functional Tests *** -container_update_object_count = on -account_update_container_count = on diff --git a/test/deploy/glusterfs/conf/object-expirer.conf b/test/deploy/glusterfs/conf/object-expirer.conf deleted file mode 100644 index b75963c..0000000 --- a/test/deploy/glusterfs/conf/object-expirer.conf +++ /dev/null @@ -1,17 +0,0 @@ -[DEFAULT] - -[object-expirer] -# auto_create_account_prefix = . - -[pipeline:main] -pipeline = catch_errors cache proxy-server - -[app:proxy-server] -use = egg:swift#proxy - -[filter:cache] -use = egg:swift#memcache -memcache_servers = 127.0.0.1:11211 - -[filter:catch_errors] -use = egg:swift#catch_errors diff --git a/test/deploy/glusterfs/conf/object-server.conf b/test/deploy/glusterfs/conf/object-server.conf deleted file mode 100644 index c219c14..0000000 --- a/test/deploy/glusterfs/conf/object-server.conf +++ /dev/null @@ -1,48 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the object-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = true -bind_port = 6010 -# -# Maximum number of clients one worker can process simultaneously (it will -# actually accept N + 1). Setting this to one (1) will only handle one request -# at a time, without accepting another request concurrently. By increasing the -# number of workers to a much higher value, one can prevent slow file system -# operations for one request from starving other requests. -max_clients = 1024 -# -# If not doing the above, setting this value initially to match the number of -# CPUs is a good starting point for determining the right value. -workers = 1 -# Override swift's default behaviour for fallocate. -disable_fallocate = true - -[pipeline:main] -pipeline = object-server - -[app:object-server] -use = egg:gluster_swift#object -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# For performance, after ensuring things are running in a stable manner, you -# can turn off normal request logging for the object server to reduce the -# per-request overhead and unclutter the log files. Warnings and errors will -# still be logged. -log_requests = off -# -# Adjust this value to match the stripe width of the underlying storage array -# (not the stripe element size). This will provide a reasonable starting point -# for tuning this value. -disk_chunk_size = 65536 -# -# Adjust this value match whatever is set for the disk_chunk_size initially. -# This will provide a reasonable starting point for tuning this value. -network_chunk_size = 65556 diff --git a/test/deploy/glusterfs/conf/proxy-server.conf b/test/deploy/glusterfs/conf/proxy-server.conf deleted file mode 100644 index 165cb0c..0000000 --- a/test/deploy/glusterfs/conf/proxy-server.conf +++ /dev/null @@ -1,78 +0,0 @@ -[DEFAULT] -bind_port = 8080 -user = root -# Consider using 1 worker per CPU -workers = 1 - -[pipeline:main] -pipeline = catch_errors healthcheck proxy-logging cache gswauth proxy-logging proxy-server - -[app:proxy-server] -use = egg:gluster_swift#proxy -log_facility = LOG_LOCAL1 -log_level = WARN -# The API allows for account creation and deletion, but since Gluster/Swift -# automounts a Gluster volume for a given account, there is no way to create -# or delete an account. So leave this off. -allow_account_management = false -account_autocreate = true -# Ensure the proxy server uses fast-POSTs since we don't need to make a copy -# of the entire object given that all metadata is stored in the object -# extended attributes (no .meta file used after creation) and no container -# sync feature to present. -object_post_as_copy = false -# Only need to recheck the account exists once a day -recheck_account_existence = 86400 -# May want to consider bumping this up if containers are created and destroyed -# infrequently. -recheck_container_existence = 60 -# Timeout clients that don't read or write to the proxy server after 5 -# seconds. -client_timeout = 5 -# Give more time to connect to the object, container or account servers in -# cases of high load. -conn_timeout = 5 -# For high load situations, once connected to an object, container or account -# server, allow for delays communicating with them. -node_timeout = 60 -# May want to consider bumping up this value to 1 - 4 MB depending on how much -# traffic is for multi-megabyte or gigabyte requests; perhaps matching the -# stripe width (not stripe element size) of your storage volume is a good -# starting point. See below for sizing information. -object_chunk_size = 65536 -# If you do decide to increase the object_chunk_size, then consider lowering -# this value to one. Up to "put_queue_length" object_chunk_size'd buffers can -# be queued to the object server for processing. Given one proxy server worker -# can handle up to 1,024 connections, by default, it will consume 10 * 65,536 -# * 1,024 bytes of memory in the worse case (default values). Be sure the -# amount of memory available on the system can accommodate increased values -# for object_chunk_size. -put_queue_depth = 10 - -[filter:catch_errors] -use = egg:swift#catch_errors - -[filter:proxy-logging] -use = egg:swift#proxy_logging - -[filter:healthcheck] -use = egg:swift#healthcheck - -[filter:tempauth] -use = egg:swift#tempauth -user_admin_admin = admin .admin .reseller_admin -user_test_tester = testing .admin -user_test2_tester2 = testing2 .admin -user_test_tester3 = testing3 - -[filter:gswauth] -use = egg:gluster_swift#gswauth -set log_name = gswauth -super_admin_key = gswauthkey -metadata_volume = gsmetadata - -[filter:cache] -use = egg:swift#memcache -# Update this line to contain a comma separated list of memcache servers -# shared by all nodes running the proxy-server service. -memcache_servers = localhost:11211 diff --git a/test/deploy/glusterfs/conf/swift.conf b/test/deploy/glusterfs/conf/swift.conf deleted file mode 100644 index f64ba5a..0000000 --- a/test/deploy/glusterfs/conf/swift.conf +++ /dev/null @@ -1,85 +0,0 @@ -[DEFAULT] - - -[swift-hash] -# random unique string that can never change (DO NOT LOSE) -swift_hash_path_suffix = gluster - - -# The swift-constraints section sets the basic constraints on data -# saved in the swift cluster. - -[swift-constraints] - -# max_file_size is the largest "normal" object that can be saved in -# the cluster. This is also the limit on the size of each segment of -# a "large" object when using the large object manifest support. -# This value is set in bytes. Setting it to lower than 1MiB will cause -# some tests to fail. -# Default is 1 TiB = 2**30*1024 -max_file_size = 1099511627776 - - -# max_meta_name_length is the max number of bytes in the utf8 encoding -# of the name portion of a metadata header. - -#max_meta_name_length = 128 - - -# max_meta_value_length is the max number of bytes in the utf8 encoding -# of a metadata value - -#max_meta_value_length = 256 - - -# max_meta_count is the max number of metadata keys that can be stored -# on a single account, container, or object - -#max_meta_count = 90 - - -# max_meta_overall_size is the max number of bytes in the utf8 encoding -# of the metadata (keys + values) - -#max_meta_overall_size = 4096 - - -# max_object_name_length is the max number of bytes in the utf8 encoding of an -# object name: Gluster FS can handle much longer file names, but the length -# between the slashes of the URL is handled below. Remember that most web -# clients can't handle anything greater than 2048, and those that do are -# rather clumsy. - -max_object_name_length = 2048 - -# max_object_name_component_length (GlusterFS) is the max number of bytes in -# the utf8 encoding of an object name component (the part between the -# slashes); this is a limit imposed by the underlying file system (for XFS it -# is 255 bytes). - -max_object_name_component_length = 255 - -# container_listing_limit is the default (and max) number of items -# returned for a container listing request - -#container_listing_limit = 10000 - - -# account_listing_limit is the default (and max) number of items returned -# for an account listing request - -#account_listing_limit = 10000 - - -# max_account_name_length is the max number of bytes in the utf8 encoding of -# an account name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_account_name_length = 255 - - -# max_container_name_length is the max number of bytes in the utf8 encoding -# of a container name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_container_name_length = 255 diff --git a/test/deploy/glusterfs/conf/test.conf b/test/deploy/glusterfs/conf/test.conf deleted file mode 100644 index 15c9aea..0000000 --- a/test/deploy/glusterfs/conf/test.conf +++ /dev/null @@ -1,58 +0,0 @@ -[func_test] -# sample config -auth_host = 127.0.0.1 -auth_port = 8080 -auth_ssl = no -auth_prefix = /auth/ -## sample config for Swift with Keystone -#auth_version = 2 -#auth_host = localhost -#auth_port = 5000 -#auth_ssl = no -#auth_prefix = /v2.0/ - -# GSWauth internal admin user configuration information -admin_key = gswauthkey -admin_user = .super_admin - -# Gluster setup information -devices = /mnt/gluster-object -gsmetadata_volume = gsmetadata - -# Primary functional test account (needs admin access to the account) -account = test -username = tester -password = testing - -# User on a second account (needs admin access to the account) -account2 = test2 -username2 = tester2 -password2 = testing2 - -# User on same account as first, but without admin access -username3 = tester3 -password3 = testing3 - -# Default constraints if not defined here, the test runner will try -# to set them from /etc/swift/swift.conf. If that file isn't found, -# the test runner will skip tests that depend on these values. -# Note that the cluster must have "sane" values for the test suite to pass. -#max_file_size = 5368709122 -#max_meta_name_length = 128 -#max_meta_value_length = 256 -#max_meta_count = 90 -#max_meta_overall_size = 4096 -#max_object_name_length = 1024 -#container_listing_limit = 10000 -#account_listing_limit = 10000 -#max_account_name_length = 256 -#max_container_name_length = 256 -normalized_urls = True - -collate = C - -[unit_test] -fake_syslog = False - -[probe_test] -# check_server_timeout = 30 diff --git a/test/deploy/glusterfs/tests.sh b/test/deploy/glusterfs/tests.sh deleted file mode 100755 index ee1b683..0000000 --- a/test/deploy/glusterfs/tests.sh +++ /dev/null @@ -1,106 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2014 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This program expects to be run against a locally deployed swiftonfile -# applicatoin. This tests also expects three glusterfs volumes to have -# been created: 'test', 'test2', and 'gsmetadata'. - -cleanup() -{ - service memcached stop - swift-init main stop - if [ -x /etc/swift.bak ] ; then - rm -rf /etc/swift > /dev/null 2>&1 - mv /etc/swift.bak /etc/swift > /dev/null 2>&1 - fi - rm -rf /mnt/gluster-object/test{,2}/* > /dev/null 2>&1 - setfattr -x user.swift.metadata /mnt/gluster-object/test{,2} > /dev/null 2>&1 - gswauth_cleanup -} - -gswauth_cleanup() -{ - rm -rf /mnt/gluster-object/gsmetadata/.* > /dev/null 2>&1 - rm -rf /mnt/gluster-object/gsmetadata/* > /dev/null 2>&1 - setfattr -x user.swift.metadata /mnt/gluster-object/gsmetadata > /dev/null 2>&1 -} - -quit() -{ - echo "$1" - exit 1 -} - - -fail() -{ - cleanup - quit "$1" -} - -run_generic_tests() -{ - # clean up gsmetadata dir - gswauth_cleanup - - #swauth-prep - gswauth-prep -K gswauthkey || fail "Unable to prep gswauth" - gswauth-add-user -K gswauthkey -a test tester testing || fail "Unable to add user test" - gswauth-add-user -K gswauthkey -a test2 tester2 testing2 || fail "Unable to add user test2" - gswauth-add-user -K gswauthkey test tester3 testing3 || fail "Unable to add user test3" - - nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests/gluster-swift-gswauth-generic-functional-TC-report.xml \ - test/functional || fail "Functional tests failed" - nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests/gluster-swift-gswauth-functionalnosetests-TC-report.xml \ - test/functionalnosetests || fail "Functional-nose tests failed" -} - -### MAIN ### - -# Backup the swift directory if it already exists -if [ -x /etc/swift ] ; then - mv /etc/swift /etc/swift.bak -fi - -export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf - -# Install the configuration files -mkdir /etc/swift > /dev/null 2>&1 -cp -r test/deploy/glusterfs/conf/* /etc/swift || fail "Unable to copy configuration files to /etc/swift" -gluster-swift-gen-builders test test2 gsmetadata || fail "Unable to create ring files" - -# Start the services -service memcached start || fail "Unable to start memcached" -swift-init main start || fail "Unable to start swift" - -#swauth-prep -gswauth-prep -K gswauthkey || fail "Unable to prep gswauth" - -mkdir functional_tests > /dev/null 2>&1 -nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests/gluster-swift-gswauth-functional-TC-report.xml \ - test/functional_auth/gswauth || fail "Functional gswauth test failed" - -run_generic_tests - -cleanup -exit 0 diff --git a/test/functional/__init__.py b/test/functional/__init__.py index e69de29..fb9f421 100644 --- a/test/functional/__init__.py +++ b/test/functional/__init__.py @@ -0,0 +1,729 @@ +# Copyright (c) 2014 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import pickle +import socket +import locale +import eventlet +import eventlet.debug +import functools +import random +from time import time, sleep +from httplib import HTTPException +from urlparse import urlparse +from nose import SkipTest +from contextlib import closing +from gzip import GzipFile +from shutil import rmtree +from tempfile import mkdtemp + +from test import get_config +from test.functional.swift_test_client import Connection, ResponseError +# This has the side effect of mocking out the xattr module so that unit tests +# (and in this case, when in-process functional tests are called for) can run +# on file systems that don't support extended attributes. +from test.unit import debug_logger, FakeMemcache + +from swift.common import constraints, utils, ring, storage_policy +from swift.common.wsgi import monkey_patch_mimetools +from swift.common.middleware import catch_errors, gatekeeper, healthcheck, \ + proxy_logging, container_sync, bulk, tempurl, slo, dlo, ratelimit, \ + tempauth, container_quotas, account_quotas +from swift.common.utils import config_true_value +from swift.proxy import server as proxy_server +from swift.account import server as account_server +from swift.container import server as container_server +from swift.obj import server as object_server +import swift.proxy.controllers.obj + +# In order to get the proper blocking behavior of sockets without using +# threads, where we can set an arbitrary timeout for some piece of code under +# test, we use eventlet with the standard socket library patched. We have to +# perform this setup at module import time, since all the socket module +# bindings in the swiftclient code will have been made by the time nose +# invokes the package or class setup methods. +eventlet.hubs.use_hub(utils.get_hub()) +eventlet.patcher.monkey_patch(all=False, socket=True) +eventlet.debug.hub_exceptions(False) + +from swiftclient import get_auth, http_connection + +has_insecure = False +try: + from swiftclient import __version__ as client_version + # Prevent a ValueError in StrictVersion with '2.0.3.68.ga99c2ff' + client_version = '.'.join(client_version.split('.')[:3]) +except ImportError: + # Pre-PBR we had version, not __version__. Anyhow... + client_version = '1.2' +from distutils.version import StrictVersion +if StrictVersion(client_version) >= StrictVersion('2.0'): + has_insecure = True + + +config = {} +web_front_end = None +normalized_urls = None + +# If no config was read, we will fall back to old school env vars +swift_test_auth_version = None +swift_test_auth = os.environ.get('SWIFT_TEST_AUTH') +swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None] +swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None] +swift_test_tenant = ['', '', ''] +swift_test_perm = ['', '', ''] + +skip, skip2, skip3 = False, False, False + +orig_collate = '' +insecure = False + +orig_hash_path_suff_pref = ('', '') +orig_swift_conf_name = None + +in_process = False +_testdir = _test_servers = _test_sockets = _test_coros = None + + +class FakeMemcacheMiddleware(object): + """ + Caching middleware that fakes out caching in swift. + """ + + def __init__(self, app, conf): + self.app = app + self.memcache = FakeMemcache() + + def __call__(self, env, start_response): + env['swift.cache'] = self.memcache + return self.app(env, start_response) + + +def fake_memcache_filter_factory(conf): + def filter_app(app): + return FakeMemcacheMiddleware(app, conf) + return filter_app + + +# swift.conf contents for in-process functional test runs +functests_swift_conf = ''' +[swift-hash] +swift_hash_path_suffix = inprocfunctests +swift_hash_path_prefix = inprocfunctests + +[swift-constraints] +max_file_size = %d +''' % ((8 * 1024 * 1024) + 2) # 8 MB + 2 + + +def in_process_setup(the_object_server=object_server): + print >>sys.stderr, 'IN-PROCESS SERVERS IN USE FOR FUNCTIONAL TESTS' + + monkey_patch_mimetools() + + global _testdir + _testdir = os.path.join(mkdtemp(), 'tmp_functional') + utils.mkdirs(_testdir) + rmtree(_testdir) + utils.mkdirs(os.path.join(_testdir, 'sda1')) + utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp')) + utils.mkdirs(os.path.join(_testdir, 'sdb1')) + utils.mkdirs(os.path.join(_testdir, 'sdb1', 'tmp')) + + swift_conf = os.path.join(_testdir, "swift.conf") + with open(swift_conf, "w") as scfp: + scfp.write(functests_swift_conf) + + global orig_swift_conf_name + orig_swift_conf_name = utils.SWIFT_CONF_FILE + utils.SWIFT_CONF_FILE = swift_conf + constraints.reload_constraints() + storage_policy.SWIFT_CONF_FILE = swift_conf + storage_policy.reload_storage_policies() + global config + if constraints.SWIFT_CONSTRAINTS_LOADED: + # Use the swift constraints that are loaded for the test framework + # configuration + config.update(constraints.EFFECTIVE_CONSTRAINTS) + else: + # In-process swift constraints were not loaded, somethings wrong + raise SkipTest + global orig_hash_path_suff_pref + orig_hash_path_suff_pref = utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX + utils.validate_hash_conf() + + # We create the proxy server listening socket to get its port number so + # that we can add it as the "auth_port" value for the functional test + # clients. + prolis = eventlet.listen(('localhost', 0)) + + # The following set of configuration values is used both for the + # functional test frame work and for the various proxy, account, container + # and object servers. + config.update({ + # Values needed by the various in-process swift servers + 'devices': _testdir, + 'swift_dir': _testdir, + 'mount_check': 'false', + 'client_timeout': 4, + 'allow_account_management': 'true', + 'account_autocreate': 'true', + 'allowed_headers': + 'content-disposition, content-encoding, x-delete-at,' + ' x-object-manifest, x-static-large-object', + 'allow_versions': 'True', + # Below are values used by the functional test framework, as well as + # by the various in-process swift servers + 'auth_host': '127.0.0.1', + 'auth_port': str(prolis.getsockname()[1]), + 'auth_ssl': 'no', + 'auth_prefix': '/auth/', + # Primary functional test account (needs admin access to the + # account) + 'account': 'test', + 'username': 'tester', + 'password': 'testing', + # User on a second account (needs admin access to the account) + 'account2': 'test2', + 'username2': 'tester2', + 'password2': 'testing2', + # User on same account as first, but without admin access + 'username3': 'tester3', + 'password3': 'testing3', + # For tempauth middleware + 'user_admin_admin': 'admin .admin .reseller_admin', + 'user_test_tester': 'testing .admin', + 'user_test2_tester2': 'testing2 .admin', + 'user_test_tester3': 'testing3' + }) + + acc1lis = eventlet.listen(('localhost', 0)) + acc2lis = eventlet.listen(('localhost', 0)) + con1lis = eventlet.listen(('localhost', 0)) + con2lis = eventlet.listen(('localhost', 0)) + obj1lis = eventlet.listen(('localhost', 0)) + obj2lis = eventlet.listen(('localhost', 0)) + global _test_sockets + _test_sockets = \ + (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) + + account_ring_path = os.path.join(_testdir, 'account.ring.gz') + with closing(GzipFile(account_ring_path, 'wb')) as f: + pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], + [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', + 'port': acc1lis.getsockname()[1]}, + {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', + 'port': acc2lis.getsockname()[1]}], 30), + f) + container_ring_path = os.path.join(_testdir, 'container.ring.gz') + with closing(GzipFile(container_ring_path, 'wb')) as f: + pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], + [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', + 'port': con1lis.getsockname()[1]}, + {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', + 'port': con2lis.getsockname()[1]}], 30), + f) + object_ring_path = os.path.join(_testdir, 'object.ring.gz') + with closing(GzipFile(object_ring_path, 'wb')) as f: + pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], + [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', + 'port': obj1lis.getsockname()[1]}, + {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', + 'port': obj2lis.getsockname()[1]}], 30), + f) + + eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0" + # Turn off logging requests by the underlying WSGI software. + eventlet.wsgi.HttpProtocol.log_request = lambda *a: None + logger = utils.get_logger(config, 'wsgi-server', log_route='wsgi') + # Redirect logging other messages by the underlying WSGI software. + eventlet.wsgi.HttpProtocol.log_message = \ + lambda s, f, *a: logger.error('ERROR WSGI: ' + f % a) + # Default to only 4 seconds for in-process functional test runs + eventlet.wsgi.WRITE_TIMEOUT = 4 + + prosrv = proxy_server.Application(config, logger=debug_logger('proxy')) + acc1srv = account_server.AccountController( + config, logger=debug_logger('acct1')) + acc2srv = account_server.AccountController( + config, logger=debug_logger('acct2')) + con1srv = container_server.ContainerController( + config, logger=debug_logger('cont1')) + con2srv = container_server.ContainerController( + config, logger=debug_logger('cont2')) + obj1srv = the_object_server.ObjectController( + config, logger=debug_logger('obj1')) + obj2srv = the_object_server.ObjectController( + config, logger=debug_logger('obj2')) + global _test_servers + _test_servers = \ + (prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv) + + pipeline = [ + catch_errors.filter_factory, + gatekeeper.filter_factory, + healthcheck.filter_factory, + proxy_logging.filter_factory, + fake_memcache_filter_factory, + container_sync.filter_factory, + bulk.filter_factory, + tempurl.filter_factory, + slo.filter_factory, + dlo.filter_factory, + ratelimit.filter_factory, + tempauth.filter_factory, + container_quotas.filter_factory, + account_quotas.filter_factory, + proxy_logging.filter_factory, + ] + app = prosrv + import mock + for filter_factory in reversed(pipeline): + app_filter = filter_factory(config) + with mock.patch('swift.common.utils') as mock_utils: + mock_utils.get_logger.return_value = None + app = app_filter(app) + app.logger = prosrv.logger + + nl = utils.NullLogger() + prospa = eventlet.spawn(eventlet.wsgi.server, prolis, app, nl) + acc1spa = eventlet.spawn(eventlet.wsgi.server, acc1lis, acc1srv, nl) + acc2spa = eventlet.spawn(eventlet.wsgi.server, acc2lis, acc2srv, nl) + con1spa = eventlet.spawn(eventlet.wsgi.server, con1lis, con1srv, nl) + con2spa = eventlet.spawn(eventlet.wsgi.server, con2lis, con2srv, nl) + obj1spa = eventlet.spawn(eventlet.wsgi.server, obj1lis, obj1srv, nl) + obj2spa = eventlet.spawn(eventlet.wsgi.server, obj2lis, obj2srv, nl) + global _test_coros + _test_coros = \ + (prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa) + + # Create accounts "test" and "test2" + def create_account(act): + ts = utils.normalize_timestamp(time()) + partition, nodes = prosrv.account_ring.get_nodes(act) + for node in nodes: + # Note: we are just using the http_connect method in the object + # controller here to talk to the account server nodes. + conn = swift.proxy.controllers.obj.http_connect( + node['ip'], node['port'], node['device'], partition, 'PUT', + '/' + act, {'X-Timestamp': ts, 'x-trans-id': act}) + resp = conn.getresponse() + assert(resp.status == 201) + + create_account('AUTH_test') + create_account('AUTH_test2') + +cluster_info = {} + + +def get_cluster_info(): + # The fallback constraints used for testing will come from the current + # effective constraints. + eff_constraints = dict(constraints.EFFECTIVE_CONSTRAINTS) + + # We'll update those constraints based on what the /info API provides, if + # anything. + global cluster_info + try: + conn = Connection(config) + conn.authenticate() + cluster_info.update(conn.cluster_info()) + except (ResponseError, socket.error): + # Failed to get cluster_information via /info API, so fall back on + # test.conf data + pass + else: + eff_constraints.update(cluster_info.get('swift', {})) + + # Finally, we'll allow any constraint present in the swift-constraints + # section of test.conf to override everything. Note that only those + # constraints defined in the constraints module are converted to integers. + test_constraints = get_config('swift-constraints') + for k in constraints.DEFAULT_CONSTRAINTS: + try: + test_constraints[k] = int(test_constraints[k]) + except KeyError: + pass + except ValueError: + print >>sys.stderr, "Invalid constraint value: %s = %s" % ( + k, test_constraints[k]) + eff_constraints.update(test_constraints) + + # Just make it look like these constraints were loaded from a /info call, + # even if the /info call failed, or when they are overridden by values + # from the swift-constraints section of test.conf + cluster_info['swift'] = eff_constraints + + +def setup_package(): + in_process_env = os.environ.get('SWIFT_TEST_IN_PROCESS') + if in_process_env is not None: + use_in_process = utils.config_true_value(in_process_env) + else: + use_in_process = None + + global in_process + + if use_in_process: + # Explicitly set to True, so barrel on ahead with in-process + # functional test setup. + in_process = True + # NOTE: No attempt is made to a read local test.conf file. + else: + if use_in_process is None: + # Not explicitly set, default to using in-process functional tests + # if the test.conf file is not found, or does not provide a usable + # configuration. + config.update(get_config('func_test')) + if config: + in_process = False + else: + in_process = True + else: + # Explicitly set to False, do not attempt to use in-process + # functional tests, be sure we attempt to read from local + # test.conf file. + in_process = False + config.update(get_config('func_test')) + + if in_process: + in_process_setup() + + global web_front_end + web_front_end = config.get('web_front_end', 'integral') + global normalized_urls + normalized_urls = config.get('normalized_urls', False) + + global orig_collate + orig_collate = locale.setlocale(locale.LC_COLLATE) + locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C')) + + global insecure + insecure = config_true_value(config.get('insecure', False)) + + global swift_test_auth_version + global swift_test_auth + global swift_test_user + global swift_test_key + global swift_test_tenant + global swift_test_perm + + if config: + swift_test_auth_version = str(config.get('auth_version', '1')) + + swift_test_auth = 'http' + if config_true_value(config.get('auth_ssl', 'no')): + swift_test_auth = 'https' + if 'auth_prefix' not in config: + config['auth_prefix'] = '/' + try: + suffix = '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % config + swift_test_auth += suffix + except KeyError: + pass # skip + + if swift_test_auth_version == "1": + swift_test_auth += 'v1.0' + + try: + if 'account' in config: + swift_test_user[0] = '%(account)s:%(username)s' % config + else: + swift_test_user[0] = '%(username)s' % config + swift_test_key[0] = config['password'] + except KeyError: + # bad config, no account/username configured, tests cannot be + # run + pass + try: + swift_test_user[1] = '%s%s' % ( + '%s:' % config['account2'] if 'account2' in config else '', + config['username2']) + swift_test_key[1] = config['password2'] + except KeyError: + pass # old config, no second account tests can be run + try: + swift_test_user[2] = '%s%s' % ( + '%s:' % config['account'] if 'account' + in config else '', config['username3']) + swift_test_key[2] = config['password3'] + except KeyError: + pass # old config, no third account tests can be run + + for _ in range(3): + swift_test_perm[_] = swift_test_user[_] + + else: + swift_test_user[0] = config['username'] + swift_test_tenant[0] = config['account'] + swift_test_key[0] = config['password'] + swift_test_user[1] = config['username2'] + swift_test_tenant[1] = config['account2'] + swift_test_key[1] = config['password2'] + swift_test_user[2] = config['username3'] + swift_test_tenant[2] = config['account'] + swift_test_key[2] = config['password3'] + + for _ in range(3): + swift_test_perm[_] = swift_test_tenant[_] + ':' \ + + swift_test_user[_] + + global skip + skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]]) + if skip: + print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG' + + global skip2 + skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]]) + if not skip and skip2: + print >>sys.stderr, \ + 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS' \ + ' DUE TO NO CONFIG FOR THEM' + + global skip3 + skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]]) + if not skip and skip3: + print >>sys.stderr, \ + 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' + + get_cluster_info() + + +def teardown_package(): + global orig_collate + locale.setlocale(locale.LC_COLLATE, orig_collate) + + global in_process + if in_process: + try: + for server in _test_coros: + server.kill() + except Exception: + pass + try: + rmtree(os.path.dirname(_testdir)) + except Exception: + pass + utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX = \ + orig_hash_path_suff_pref + utils.SWIFT_CONF_FILE = orig_swift_conf_name + constraints.reload_constraints() + + +class AuthError(Exception): + pass + + +class InternalServerError(Exception): + pass + + +url = [None, None, None] +token = [None, None, None] +parsed = [None, None, None] +conn = [None, None, None] + + +def connection(url): + if has_insecure: + return http_connection(url, insecure=insecure) + return http_connection(url) + + +def retry(func, *args, **kwargs): + """ + You can use the kwargs to override: + 'retries' (default: 5) + 'use_account' (default: 1) - which user's token to pass + 'url_account' (default: matches 'use_account') - which user's storage URL + 'resource' (default: url[url_account] - URL to connect to; retry() + will interpolate the variable :storage_url: if present + """ + global url, token, parsed, conn + retries = kwargs.get('retries', 5) + attempts, backoff = 0, 1 + + # use account #1 by default; turn user's 1-indexed account into 0-indexed + use_account = kwargs.pop('use_account', 1) - 1 + + # access our own account by default + url_account = kwargs.pop('url_account', use_account + 1) - 1 + + while attempts <= retries: + attempts += 1 + try: + if not url[use_account] or not token[use_account]: + url[use_account], token[use_account] = \ + get_auth(swift_test_auth, swift_test_user[use_account], + swift_test_key[use_account], + snet=False, + tenant_name=swift_test_tenant[use_account], + auth_version=swift_test_auth_version, + os_options={}) + parsed[use_account] = conn[use_account] = None + if not parsed[use_account] or not conn[use_account]: + parsed[use_account], conn[use_account] = \ + connection(url[use_account]) + + # default resource is the account url[url_account] + resource = kwargs.pop('resource', '%(storage_url)s') + template_vars = {'storage_url': url[url_account]} + parsed_result = urlparse(resource % template_vars) + return func(url[url_account], token[use_account], + parsed_result, conn[url_account], + *args, **kwargs) + except (socket.error, HTTPException): + if attempts > retries: + raise + parsed[use_account] = conn[use_account] = None + except AuthError: + url[use_account] = token[use_account] = None + continue + except InternalServerError: + pass + if attempts <= retries: + sleep(backoff) + backoff *= 2 + raise Exception('No result after %s retries.' % retries) + + +def check_response(conn): + resp = conn.getresponse() + if resp.status == 401: + resp.read() + raise AuthError() + elif resp.status // 100 == 5: + resp.read() + raise InternalServerError() + return resp + + +def load_constraint(name): + global cluster_info + try: + c = cluster_info['swift'][name] + except KeyError: + raise SkipTest("Missing constraint: %s" % name) + if not isinstance(c, int): + raise SkipTest("Bad value, %r, for constraint: %s" % (c, name)) + return c + + +def get_storage_policy_from_cluster_info(info): + policies = info['swift'].get('policies', {}) + default_policy = [] + non_default_policies = [] + for p in policies: + if p.get('default', {}): + default_policy.append(p) + else: + non_default_policies.append(p) + return default_policy, non_default_policies + + +def reset_acl(): + def post(url, token, parsed, conn): + conn.request('POST', parsed.path, '', { + 'X-Auth-Token': token, + 'X-Account-Access-Control': '{}' + }) + return check_response(conn) + resp = retry(post, use_account=1) + resp.read() + + +def requires_acls(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + global skip, cluster_info + if skip or not cluster_info: + raise SkipTest + # Determine whether this cluster has account ACLs; if not, skip test + if not cluster_info.get('tempauth', {}).get('account_acls'): + raise SkipTest + if 'keystoneauth' in cluster_info: + # remove when keystoneauth supports account acls + raise SkipTest + reset_acl() + try: + rv = f(*args, **kwargs) + finally: + reset_acl() + return rv + return wrapper + + +class FunctionalStoragePolicyCollection(object): + + def __init__(self, policies): + self._all = policies + self.default = None + for p in self: + if p.get('default', False): + assert self.default is None, 'Found multiple default ' \ + 'policies %r and %r' % (self.default, p) + self.default = p + + @classmethod + def from_info(cls, info=None): + if not (info or cluster_info): + get_cluster_info() + info = info or cluster_info + try: + policy_info = info['swift']['policies'] + except KeyError: + raise AssertionError('Did not find any policy info in %r' % info) + policies = cls(policy_info) + assert policies.default, \ + 'Did not find default policy in %r' % policy_info + return policies + + def __len__(self): + return len(self._all) + + def __iter__(self): + return iter(self._all) + + def __getitem__(self, index): + return self._all[index] + + def filter(self, **kwargs): + return self.__class__([p for p in self if all( + p.get(k) == v for k, v in kwargs.items())]) + + def exclude(self, **kwargs): + return self.__class__([p for p in self if all( + p.get(k) != v for k, v in kwargs.items())]) + + def select(self): + return random.choice(self) + + +def requires_policies(f): + @functools.wraps(f) + def wrapper(self, *args, **kwargs): + if skip: + raise SkipTest + try: + self.policies = FunctionalStoragePolicyCollection.from_info() + except AssertionError: + raise SkipTest("Unable to determine available policies") + if len(self.policies) < 2: + raise SkipTest("Multiple policies not enabled") + return f(self, *args, **kwargs) + + return wrapper diff --git a/test/functional/conf/swift.conf b/test/functional/conf/swift.conf new file mode 100644 index 0000000..31226c1 --- /dev/null +++ b/test/functional/conf/swift.conf @@ -0,0 +1,21 @@ +[swift-hash] +# random unique strings that can never change (DO NOT LOSE) +swift_hash_path_prefix = changeme +swift_hash_path_suffix = changeme + +[storage-policy:0] +name = gold + +[storage-policy:1] +name = silver + +# SwiftOnFile +[storage-policy:2] +name = swiftonfile +default = yes + +[swift-constraints] +max_object_name_length = 221 +max_account_name_length = 255 +max_container_name_length = 255 + diff --git a/test/functional/conf/test.conf b/test/functional/conf/test.conf new file mode 100644 index 0000000..a8ca16d --- /dev/null +++ b/test/functional/conf/test.conf @@ -0,0 +1,69 @@ +[func_test] +# sample config for Swift with tempauth +auth_host = 127.0.0.1 +auth_port = 8080 +auth_ssl = no +auth_prefix = /auth/ +## sample config for Swift with Keystone +#auth_version = 2 +#auth_host = localhost +#auth_port = 5000 +#auth_ssl = no +#auth_prefix = /v2.0/ + +# Primary functional test account (needs admin access to the account) +account = test +username = tester +password = testing + +# User on a second account (needs admin access to the account) +account2 = test2 +username2 = tester2 +password2 = testing2 + +# User on same account as first, but without admin access +username3 = tester3 +password3 = testing3 + +collate = C + +[unit_test] +fake_syslog = False + +[probe_test] +# check_server_timeout = 30 +# validate_rsync = false + +[swift-constraints] +# The functional test runner will try to use the constraint values provided in +# the swift-constraints section of test.conf. +# +# If a constraint value does not exist in that section, or because the +# swift-constraints section does not exist, the constraints values found in +# the /info API call (if successful) will be used. +# +# If a constraint value cannot be found in the /info results, either because +# the /info API call failed, or a value is not present, the constraint value +# used will fall back to those loaded by the constraints module at time of +# import (which will attempt to load /etc/swift/swift.conf, see the +# swift.common.constraints module for more information). +# +# Note that the cluster must have "sane" values for the test suite to pass +# (for some definition of sane). +# +#max_file_size = 1099511 +#max_meta_name_length = 128 +#max_meta_value_length = 256 +#max_meta_count = 90 +#max_meta_overall_size = 4096 +#max_header_size = 8192 +max_object_name_length = 221 +#container_listing_limit = 10000 +#account_listing_limit = 10000 +max_account_name_length = 255 +max_container_name_length = 255 + +# Newer swift versions default to strict cors mode, but older ones were the +# opposite. +#strict_cors_mode = true +# diff --git a/test/functional/gluster_swift_tests.py b/test/functional/gluster_swift_tests.py deleted file mode 100644 index 7ba27de..0000000 --- a/test/functional/gluster_swift_tests.py +++ /dev/null @@ -1,385 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" OpenStack Swift based functional tests for Gluster for Swift""" - -import random -import os,sys,re,hashlib -from nose import SkipTest - -from test.functional.tests import config, locale, Base, Base2, Utils, \ - TestFileEnv -from test.functional.swift_test_client import Account, Connection, File, \ - ResponseError - -web_front_end = config.get('web_front_end', 'integral') - -class TestFile(Base): - env = TestFileEnv - set_up = False - - def testObjectManifest(self): - if (web_front_end == 'apache2'): - raise SkipTest() - data = File.random_data(10000) - parts = random.randrange(2,10) - charsEachPart = len(data)/parts - for i in range(parts+1): - if i==0 : - file = self.env.container.file('objectmanifest') - hdrs={} - hdrs['Content-Length']='0' - hdrs['X-Object-Manifest']=str(self.env.container.name)+'/objectmanifest' - self.assert_(file.write('',hdrs=hdrs)) - self.assert_(file.name in self.env.container.files()) - self.assert_(file.read() == '') - elif i==parts : - file = self.env.container.file('objectmanifest'+'-'+str(i)) - segment=data[ (i-1)*charsEachPart :] - self.assertTrue(file.write(segment)) - else : - file = self.env.container.file('objectmanifest'+'-'+str(i)) - segment=data[ (i-1)*charsEachPart : i*charsEachPart] - self.assertTrue(file.write(segment)) - #matching the manifest file content with orignal data, as etag won't match - file = self.env.container.file('objectmanifest') - data_read = file.read() - self.assertEquals(data,data_read) - - def test_PUT_large_object(self): - file_item = self.env.container.file(Utils.create_name()) - data = File.random_data(1024 * 1024 * 2) - self.assertTrue(file_item.write(data)) - self.assert_status(201) - self.assertTrue(data == file_item.read()) - self.assert_status(200) - - def testInvalidHeadersPUT(self): - #TODO: Although we now support x-delete-at and x-delete-after, - #retained this test case as we may add some other header to - #unsupported list in future - raise SkipTest() - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, - file.write_random, - self.env.file_size, - hdrs={'X-Delete-At': '9876545321'}) - self.assert_status(400) - self.assertRaises(ResponseError, - file.write_random, - self.env.file_size, - hdrs={'X-Delete-After': '60'}) - self.assert_status(400) - - def testInvalidHeadersPOST(self): - #TODO: Although we now support x-delete-at and x-delete-after, - #retained this test case as we may add some other header to - #unsupported list in future - raise SkipTest() - file = self.env.container.file(Utils.create_name()) - file.write_random(self.env.file_size) - headers = file.make_headers(cfg={}) - headers.update({ 'X-Delete-At' : '987654321'}) - # Need to call conn.make_request instead of file.sync_metadata - # because sync_metadata calls make_headers. make_headers() - # overwrites any headers in file.metadata as 'user' metadata - # by appending 'X-Object-Meta-' to any of the headers - # in file.metadata. - file.conn.make_request('POST', file.path, hdrs=headers, cfg={}) - self.assertEqual(400, file.conn.response.status) - - headers = file.make_headers(cfg={}) - headers.update({ 'X-Delete-After' : '60'}) - file.conn.make_request('POST', file.path, hdrs=headers, cfg={}) - self.assertEqual(400, file.conn.response.status) - - -class TestFileUTF8(Base2, TestFile): - set_up = False - - -class TestContainerPathsEnv: - @classmethod - def setUp(cls): - cls.conn = Connection(config) - cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) - cls.account.delete_containers() - - cls.file_size = 8 - - cls.container = cls.account.container(Utils.create_name()) - if not cls.container.create(): - raise ResponseError(cls.conn.response) - - cls.dirs = [ - 'dir1', - 'dir2', - 'dir1/subdir1', - 'dir1/subdir2', - 'dir1/subdir1/subsubdir1', - 'dir1/subdir1/subsubdir2', - 'dir1/subdir with spaces', - 'dir1/subdir+with{whatever', - ] - - cls.files = [ - 'file1', - 'file A', - 'dir1/file2', - 'dir1/subdir1/file2', - 'dir1/subdir1/file3', - 'dir1/subdir1/file4', - 'dir1/subdir1/subsubdir1/file5', - 'dir1/subdir1/subsubdir1/file6', - 'dir1/subdir1/subsubdir1/file7', - 'dir1/subdir1/subsubdir1/file8', - 'dir1/subdir1/subsubdir2/file9', - 'dir1/subdir1/subsubdir2/file0', - 'dir1/subdir with spaces/file B', - 'dir1/subdir+with{whatever/file D', - ] - - stored_files = set() - for d in cls.dirs: - file = cls.container.file(d) - file.write(hdrs={'Content-Type': 'application/directory'}) - for f in cls.files: - file = cls.container.file(f) - file.write_random(cls.file_size, hdrs={'Content-Type': - 'application/octet-stream'}) - stored_files.add(f) - cls.stored_files = sorted(stored_files) - cls.sorted_objects = sorted(set(cls.dirs + cls.files)) - - -class TestContainerPaths(Base): - env = TestContainerPathsEnv - set_up = False - - def testTraverseContainer(self): - found_files = [] - found_dirs = [] - - def recurse_path(path, count=0): - if count > 10: - raise ValueError('too deep recursion') - - for file in self.env.container.files(parms={'path': path}): - self.assert_(file.startswith(path)) - if file in self.env.dirs: - recurse_path(file, count + 1) - found_dirs.append(file) - else: - found_files.append(file) - - recurse_path('') - for file in self.env.stored_files: - self.assert_(file in found_files) - self.assert_(file not in found_dirs) - - - def testContainerListing(self): - for format in (None, 'json', 'xml'): - files = self.env.container.files(parms={'format': format}) - self.assertFalse(len(files) == 0) - - if isinstance(files[0], dict): - files = [str(x['name']) for x in files] - - self.assertEquals(files, self.env.sorted_objects) - - for format in ('json', 'xml'): - for file in self.env.container.files(parms={'format': format}): - self.assert_(int(file['bytes']) >= 0) - self.assert_('last_modified' in file) - if file['name'] in self.env.dirs: - self.assertEquals(file['content_type'], - 'application/directory') - else: - self.assertEquals(file['content_type'], - 'application/octet-stream') - - def testStructure(self): - def assert_listing(path, list): - files = self.env.container.files(parms={'path': path}) - self.assertEquals(sorted(list, cmp=locale.strcoll), files) - - assert_listing('', ['file1', 'dir1', 'dir2', 'file A']) - assert_listing('dir1', ['dir1/file2', 'dir1/subdir1', - 'dir1/subdir2', 'dir1/subdir with spaces', - 'dir1/subdir+with{whatever']) - assert_listing('dir1/subdir1', - ['dir1/subdir1/file4', 'dir1/subdir1/subsubdir2', - 'dir1/subdir1/file2', 'dir1/subdir1/file3', - 'dir1/subdir1/subsubdir1']) - assert_listing('dir1/subdir1/subsubdir1', - ['dir1/subdir1/subsubdir1/file7', - 'dir1/subdir1/subsubdir1/file5', - 'dir1/subdir1/subsubdir1/file8', - 'dir1/subdir1/subsubdir1/file6']) - assert_listing('dir1/subdir1/subsubdir1', - ['dir1/subdir1/subsubdir1/file7', - 'dir1/subdir1/subsubdir1/file5', - 'dir1/subdir1/subsubdir1/file8', - 'dir1/subdir1/subsubdir1/file6']) - assert_listing('dir1/subdir with spaces', - ['dir1/subdir with spaces/file B']) - - -class TestObjectVersioningEnv: - @classmethod - def setUp(cls): - cls.conn = Connection(config) - cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) - cls.account.delete_containers() - cls.containers = {} - #create two containers one for object other for versions of objects - for i in range(2): - hdrs={} - if i==0: - hdrs={'X-Versions-Location':'versions'} - cont = cls.containers['object'] = cls.account.container('object') - else: - cont = cls.containers['versions'] = cls.account.container('versions') - if not cont.create(hdrs=hdrs): - raise ResponseError(cls.conn.response) - cls.containers.append(cont) - - -class TestObjectVersioning(Base): - env = TestObjectVersioningEnv - set_up = False - - def testObjectVersioning(self): - versions = random.randrange(2,10) - dataArr=[] - #create versions - for i in range(versions): - data = File.random_data(10000*(i+1)) - file = self.env.containers['object'].file('object') - self.assertTrue(file.write(data)) - dataArr.append(data) - cont = self.env.containers['versions'] - info = cont.info() - self.assertEquals(info['object_count'], versions-1) - #match the current version of object with data in arr and delete it - for i in range(versions): - data = dataArr[-(i+1)] - file = self.env.containers['object'].file('object') - self.assertEquals(data,file.read()) - self.assert_(file.delete()) - self.assert_status(204) - - -class TestMultiProtocolAccessEnv: - @classmethod - def setUp(cls): - cls.conn = Connection(config) - cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) - cls.root_dir = os.path.join('/mnt/gluster-object',cls.account.conn.storage_url.split('/')[2].split('_')[1]) - cls.account.delete_containers() - - cls.file_size = 8 - cls.container = cls.account.container(Utils.create_name()) - if not cls.container.create(): - raise ResponseError(cls.conn.response) - - cls.dirs = [ - 'dir1', - 'dir2', - 'dir1/subdir1', - 'dir1/subdir2', - 'dir1/subdir1/subsubdir1', - 'dir1/subdir1/subsubdir2', - 'dir1/subdir with spaces', - 'dir1/subdir+with{whatever', - ] - - cls.files = [ - 'file1', - 'file A', - 'dir1/file2', - 'dir1/subdir1/file2', - 'dir1/subdir1/file3', - 'dir1/subdir1/file4', - 'dir1/subdir1/subsubdir1/file5', - 'dir1/subdir1/subsubdir1/file6', - 'dir1/subdir1/subsubdir1/file7', - 'dir1/subdir1/subsubdir1/file8', - 'dir1/subdir1/subsubdir2/file9', - 'dir1/subdir1/subsubdir2/file0', - 'dir1/subdir with spaces/file B', - 'dir1/subdir+with{whatever/file D', - ] - - stored_files = set() - for d in cls.dirs: - file = cls.container.file(d) - file.write(hdrs={'Content-Type': 'application/directory'}) - for f in cls.files: - file = cls.container.file(f) - file.write_random(cls.file_size, hdrs={'Content-Type': - 'application/octet-stream'}) - stored_files.add(f) - cls.stored_files = sorted(stored_files) - cls.sorted_objects = sorted(set(cls.dirs + cls.files)) - - -class TestMultiProtocolAccess(Base): - env = TestMultiProtocolAccessEnv - set_up = False - - def testObjectsFromMountPoint(self): - found_files = [] - found_dirs = [] - - def recurse_path(path, count=0): - if count > 10: - raise ValueError('too deep recursion') - self.assert_(os.path.exists(path)) - for file in os.listdir(path): - if os.path.isdir(os.path.join(path,file)): - recurse_path(os.path.join(path,file), count + 1) - found_dirs.append(file) - elif os.path.isfile(os.path.join(path,file)): - filename=os.path.join(os.path.relpath(path,os.path.join(self.env.root_dir,self.env.container.name)),file) - if re.match('^[\.]',filename): - filename=filename[2:] - found_files.append(filename) - else: - pass #Just a Place holder - - recurse_path(os.path.join(self.env.root_dir,self.env.container.name)) - for file in self.env.stored_files: - self.assert_(file in found_files) - self.assert_(file not in found_dirs) - - def testObjectContentFromMountPoint(self): - file_name = Utils.create_name() - file_item = self.env.container.file(file_name) - data = file_item.write_random() - self.assert_status(201) - file_info = file_item.info() - fhOnMountPoint = open(os.path.join(self.env.root_dir,self.env.container.name,file_name),'r') - data_read_from_mountP = fhOnMountPoint.read() - md5_returned = hashlib.md5(data_read_from_mountP).hexdigest() - self.assertEquals(md5_returned,file_info['etag']) - fhOnMountPoint.close() diff --git a/test/functional/swift_on_file_tests.py b/test/functional/swift_on_file_tests.py new file mode 100644 index 0000000..c864a6a --- /dev/null +++ b/test/functional/swift_on_file_tests.py @@ -0,0 +1,137 @@ +# Copyright (c) 2013 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" OpenStack Swift based functional tests for Swift on File""" + +import os +import re +import hashlib +from shutil import rmtree + +from test.functional.tests import Base, Utils +from test.functional.swift_test_client import Account, Connection, \ + ResponseError +import test.functional as tf + + +class TestSwiftOnFileEnv: + @classmethod + def setUp(cls): + cls.conn = Connection(tf.config) + cls.conn.authenticate() + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) + cls.root_dir = os.path.join('/mnt/swiftonfile/test') + cls.account.delete_containers() + + cls.file_size = 8 + cls.container = cls.account.container(Utils.create_name()) + if not cls.container.create(): + raise ResponseError(cls.conn.response) + + cls.dirs = [ + 'dir1', + 'dir2', + 'dir1/subdir1', + 'dir1/subdir2', + 'dir1/subdir1/subsubdir1', + 'dir1/subdir1/subsubdir2', + 'dir1/subdir with spaces', + 'dir1/subdir+with{whatever', + ] + + cls.files = [ + 'file1', + 'file A', + 'dir1/file2', + 'dir1/subdir1/file2', + 'dir1/subdir1/file3', + 'dir1/subdir1/file4', + 'dir1/subdir1/subsubdir1/file5', + 'dir1/subdir1/subsubdir1/file6', + 'dir1/subdir1/subsubdir1/file7', + 'dir1/subdir1/subsubdir1/file8', + 'dir1/subdir1/subsubdir2/file9', + 'dir1/subdir1/subsubdir2/file0', + 'dir1/subdir with spaces/file B', + 'dir1/subdir+with{whatever/file D', + ] + + stored_files = set() + for d in cls.dirs: + file = cls.container.file(d) + file.write(hdrs={'Content-Type': 'application/directory'}) + for f in cls.files: + file = cls.container.file(f) + file.write_random(cls.file_size, hdrs={'Content-Type': + 'application/octet-stream'}) + stored_files.add(f) + cls.stored_files = sorted(stored_files) + cls.sorted_objects = sorted(set(cls.dirs + cls.files)) + + +class TestSwiftOnFile(Base): + env = TestSwiftOnFileEnv + set_up = False + + @classmethod + def tearDownClass(self): + self.env.account.delete_containers() + for account_dir in os.listdir(self.env.root_dir): + rmtree(os.path.join(self.env.root_dir, account_dir)) + + def testObjectsFromMountPoint(self): + found_files = [] + found_dirs = [] + + def recurse_path(path, count=0): + if count > 10: + raise ValueError('too deep recursion') + self.assert_(os.path.exists(path)) + for file in os.listdir(path): + if os.path.isdir(os.path.join(path, file)): + recurse_path(os.path.join(path, file), count + 1) + found_dirs.append(file) + elif os.path.isfile(os.path.join(path, file)): + filename = os.path.join(os.path.relpath(path, os.path.join( + self.env.root_dir, 'AUTH_' + self.env.account.name, + self.env.container.name)), file) + if re.match('^[\.]', filename): + filename = filename[2:] + found_files.append(filename) + else: + pass # Just a Place holder + + recurse_path(os.path.join(self.env.root_dir, + 'AUTH_' + self.env.account.name, + self.env.container.name)) + for file in self.env.stored_files: + self.assert_(file in found_files) + self.assert_(file not in found_dirs) + + def testObjectContentFromMountPoint(self): + file_name = Utils.create_name() + file_item = self.env.container.file(file_name) + file_item.write_random() + self.assert_status(201) + file_info = file_item.info() + fhOnMountPoint = open(os.path.join(self.env.root_dir, + 'AUTH_' + self.env.account.name, + self.env.container.name, + file_name), 'r') + data_read_from_mountP = fhOnMountPoint.read() + md5_returned = hashlib.md5(data_read_from_mountP).hexdigest() + self.assertEquals(md5_returned, file_info['etag']) + fhOnMountPoint.close() diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py index b4dcb56..2c35520 100644 --- a/test/functional/swift_test_client.py +++ b/test/functional/swift_test_client.py @@ -103,7 +103,8 @@ class Connection(object): def __init__(self, config): for key in 'auth_host auth_port auth_ssl username password'.split(): if key not in config: - raise SkipTest + raise SkipTest( + "Missing required configuration parameter: %s" % key) self.auth_host = config['auth_host'] self.auth_port = int(config['auth_port']) @@ -117,6 +118,7 @@ class Connection(object): self.storage_host = None self.storage_port = None + self.storage_url = None self.conn_class = None @@ -184,7 +186,7 @@ class Connection(object): """ status = self.make_request('GET', '/info', cfg={'absolute_path': True}) - if status == 404: + if status // 100 == 4: return {} if not 200 <= status <= 299: raise ResponseError(self.response, 'GET', '/info') @@ -195,7 +197,12 @@ class Connection(object): port=self.storage_port) #self.connection.set_debuglevel(3) - def make_path(self, path=[], cfg={}): + def make_path(self, path=None, cfg=None): + if path is None: + path = [] + if cfg is None: + cfg = {} + if cfg.get('version_only_path'): return '/' + self.storage_url.split('/')[1] @@ -208,7 +215,9 @@ class Connection(object): else: return self.storage_url - def make_headers(self, hdrs, cfg={}): + def make_headers(self, hdrs, cfg=None): + if cfg is None: + cfg = {} headers = {} if not cfg.get('no_auth_token'): @@ -218,8 +227,16 @@ class Connection(object): headers.update(hdrs) return headers - def make_request(self, method, path=[], data='', hdrs={}, parms={}, - cfg={}): + def make_request(self, method, path=None, data='', hdrs=None, parms=None, + cfg=None): + if path is None: + path = [] + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} if not cfg.get('absolute_path'): # Set absolute_path=True to make a request to exactly the given # path, not storage path + given path. Useful for @@ -277,7 +294,14 @@ class Connection(object): 'Attempts: %s, Failures: %s' % (request, len(fail_messages), fail_messages)) - def put_start(self, path, hdrs={}, parms={}, cfg={}, chunked=False): + def put_start(self, path, hdrs=None, parms=None, cfg=None, chunked=False): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} + self.http_connect() path = self.make_path(path, cfg) @@ -322,7 +346,10 @@ class Base(object): def __str__(self): return self.name - def header_fields(self, required_fields, optional_fields=()): + def header_fields(self, required_fields, optional_fields=None): + if optional_fields is None: + optional_fields = () + headers = dict(self.conn.response.getheaders()) ret = {} @@ -352,7 +379,11 @@ class Account(Base): self.conn = conn self.name = str(name) - def update_metadata(self, metadata={}, cfg={}): + def update_metadata(self, metadata=None, cfg=None): + if metadata is None: + metadata = {} + if cfg is None: + cfg = {} headers = dict(("X-Account-Meta-%s" % k, v) for k, v in metadata.items()) @@ -365,7 +396,14 @@ class Account(Base): def container(self, container_name): return Container(self.conn, self.name, container_name) - def containers(self, hdrs={}, parms={}, cfg={}): + def containers(self, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} + format_type = parms.get('format', None) if format_type not in [None, 'json', 'xml']: raise RequestError('Invalid format: %s' % format_type) @@ -411,7 +449,13 @@ class Account(Base): return listing_empty(self.containers) - def info(self, hdrs={}, parms={}, cfg={}): + def info(self, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} if self.conn.make_request('HEAD', self.path, hdrs=hdrs, parms=parms, cfg=cfg) != 204: @@ -435,11 +479,21 @@ class Container(Base): self.account = str(account) self.name = str(name) - def create(self, hdrs={}, parms={}, cfg={}): + def create(self, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} return self.conn.make_request('PUT', self.path, hdrs=hdrs, parms=parms, cfg=cfg) in (201, 202) - def delete(self, hdrs={}, parms={}): + def delete(self, hdrs=None, parms=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} return self.conn.make_request('DELETE', self.path, hdrs=hdrs, parms=parms) == 204 @@ -457,7 +511,13 @@ class Container(Base): def file(self, file_name): return File(self.conn, self.account, self.name, file_name) - def files(self, hdrs={}, parms={}, cfg={}): + def files(self, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} format_type = parms.get('format', None) if format_type not in [None, 'json', 'xml']: raise RequestError('Invalid format: %s' % format_type) @@ -507,7 +567,13 @@ class Container(Base): raise ResponseError(self.conn.response, 'GET', self.conn.make_path(self.path)) - def info(self, hdrs={}, parms={}, cfg={}): + def info(self, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} self.conn.make_request('HEAD', self.path, hdrs=hdrs, parms=parms, cfg=cfg) @@ -538,7 +604,9 @@ class File(Base): self.size = None self.metadata = {} - def make_headers(self, cfg={}): + def make_headers(self, cfg=None): + if cfg is None: + cfg = {} headers = {} if not cfg.get('no_content_length'): if cfg.get('set_content_length'): @@ -575,7 +643,13 @@ class File(Base): data.seek(0) return checksum.hexdigest() - def copy(self, dest_cont, dest_file, hdrs={}, parms={}, cfg={}): + def copy(self, dest_cont, dest_file, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} if 'destination' in cfg: headers = {'Destination': cfg['destination']} elif cfg.get('no_destination'): @@ -590,7 +664,11 @@ class File(Base): return self.conn.make_request('COPY', self.path, hdrs=headers, parms=parms) == 201 - def delete(self, hdrs={}, parms={}): + def delete(self, hdrs=None, parms=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} if self.conn.make_request('DELETE', self.path, hdrs=hdrs, parms=parms) != 204: @@ -599,7 +677,13 @@ class File(Base): return True - def info(self, hdrs={}, parms={}, cfg={}): + def info(self, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} if self.conn.make_request('HEAD', self.path, hdrs=hdrs, parms=parms, cfg=cfg) != 200: @@ -615,7 +699,11 @@ class File(Base): header_fields['etag'] = header_fields['etag'].strip('"') return header_fields - def initialize(self, hdrs={}, parms={}): + def initialize(self, hdrs=None, parms=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} if not self.name: return False @@ -660,7 +748,11 @@ class File(Base): return data def read(self, size=-1, offset=0, hdrs=None, buffer=None, - callback=None, cfg={}, parms={}): + callback=None, cfg=None, parms=None): + if cfg is None: + cfg = {} + if parms is None: + parms = {} if size > 0: range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1) @@ -717,7 +809,12 @@ class File(Base): finally: fobj.close() - def sync_metadata(self, metadata={}, cfg={}): + def sync_metadata(self, metadata=None, cfg=None): + if metadata is None: + metadata = {} + if cfg is None: + cfg = {} + self.metadata.update(metadata) if self.metadata: @@ -737,7 +834,14 @@ class File(Base): return True - def chunked_write(self, data=None, hdrs={}, parms={}, cfg={}): + def chunked_write(self, data=None, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} + if data is not None and self.chunked_write_in_progress: self.conn.put_data(data, True) elif data is not None: @@ -756,8 +860,15 @@ class File(Base): else: raise RuntimeError - def write(self, data='', hdrs={}, parms={}, callback=None, cfg={}, + def write(self, data='', hdrs=None, parms=None, callback=None, cfg=None, return_resp=False): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} + block_size = 2 ** 20 if isinstance(data, file): @@ -778,13 +889,15 @@ class File(Base): transferred = 0 buff = data.read(block_size) + buff_len = len(buff) try: - while len(buff) > 0: + while buff_len > 0: self.conn.put_data(buff) - buff = data.read(block_size) - transferred += len(buff) + transferred += buff_len if callable(callback): callback(transferred, self.size) + buff = data.read(block_size) + buff_len = len(buff) self.conn.put_end() except socket.timeout as err: @@ -806,7 +919,14 @@ class File(Base): return True - def write_random(self, size=None, hdrs={}, parms={}, cfg={}): + def write_random(self, size=None, hdrs=None, parms=None, cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} + data = self.random_data(size) if not self.write(data, hdrs=hdrs, parms=parms, cfg=cfg): raise ResponseError(self.conn.response, 'PUT', @@ -814,7 +934,15 @@ class File(Base): self.md5 = self.compute_md5sum(StringIO.StringIO(data)) return data - def write_random_return_resp(self, size=None, hdrs={}, parms={}, cfg={}): + def write_random_return_resp(self, size=None, hdrs=None, parms=None, + cfg=None): + if hdrs is None: + hdrs = {} + if parms is None: + parms = {} + if cfg is None: + cfg = {} + data = self.random_data(size) resp = self.write(data, hdrs=hdrs, parms=parms, cfg=cfg, return_resp=True) diff --git a/test/functional/swift_testing.py b/test/functional/swift_testing.py deleted file mode 100644 index 2a1e1fa..0000000 --- a/test/functional/swift_testing.py +++ /dev/null @@ -1,231 +0,0 @@ -# Copyright (c) 2010-2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from httplib import HTTPException -import os -import socket -import sys -from time import sleep -from urlparse import urlparse -import functools -from nose import SkipTest - -from test import get_config - -from swiftclient import get_auth, http_connection -from test.functional.swift_test_client import Connection - -conf = get_config('func_test') -web_front_end = conf.get('web_front_end', 'integral') -normalized_urls = conf.get('normalized_urls', False) - -# If no conf was read, we will fall back to old school env vars -swift_test_auth = os.environ.get('SWIFT_TEST_AUTH') -swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None] -swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None] -swift_test_tenant = ['', '', ''] -swift_test_perm = ['', '', ''] - -if conf: - swift_test_auth_version = str(conf.get('auth_version', '1')) - - swift_test_auth = 'http' - if conf.get('auth_ssl', 'no').lower() in ('yes', 'true', 'on', '1'): - swift_test_auth = 'https' - if 'auth_prefix' not in conf: - conf['auth_prefix'] = '/' - try: - suffix = '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % conf - swift_test_auth += suffix - except KeyError: - pass # skip - - if swift_test_auth_version == "1": - swift_test_auth += 'v1.0' - - if 'account' in conf: - swift_test_user[0] = '%(account)s:%(username)s' % conf - else: - swift_test_user[0] = '%(username)s' % conf - swift_test_key[0] = conf['password'] - try: - swift_test_user[1] = '%s%s' % ( - '%s:' % conf['account2'] if 'account2' in conf else '', - conf['username2']) - swift_test_key[1] = conf['password2'] - except KeyError as err: - pass # old conf, no second account tests can be run - try: - swift_test_user[2] = '%s%s' % ('%s:' % conf['account'] if 'account' - in conf else '', conf['username3']) - swift_test_key[2] = conf['password3'] - except KeyError as err: - pass # old conf, no third account tests can be run - - for _ in range(3): - swift_test_perm[_] = swift_test_user[_] - - else: - swift_test_user[0] = conf['username'] - swift_test_tenant[0] = conf['account'] - swift_test_key[0] = conf['password'] - swift_test_user[1] = conf['username2'] - swift_test_tenant[1] = conf['account2'] - swift_test_key[1] = conf['password2'] - swift_test_user[2] = conf['username3'] - swift_test_tenant[2] = conf['account'] - swift_test_key[2] = conf['password3'] - - for _ in range(3): - swift_test_perm[_] = swift_test_tenant[_] + ':' \ - + swift_test_user[_] - -skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]]) -if skip: - print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG' - -skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]]) -if not skip and skip2: - print >>sys.stderr, \ - 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' - -skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]]) -if not skip and skip3: - print >>sys.stderr, \ - 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' - - -class AuthError(Exception): - pass - - -class InternalServerError(Exception): - pass - - -url = [None, None, None] -token = [None, None, None] -parsed = [None, None, None] -conn = [None, None, None] - - -def retry(func, *args, **kwargs): - """ - You can use the kwargs to override: - 'retries' (default: 5) - 'use_account' (default: 1) - which user's token to pass - 'url_account' (default: matches 'use_account') - which user's storage URL - 'resource' (default: url[url_account] - URL to connect to; retry() - will interpolate the variable :storage_url: if present - """ - global url, token, parsed, conn - retries = kwargs.get('retries', 5) - attempts, backoff = 0, 1 - - # use account #1 by default; turn user's 1-indexed account into 0-indexed - use_account = kwargs.pop('use_account', 1) - 1 - - # access our own account by default - url_account = kwargs.pop('url_account', use_account + 1) - 1 - - while attempts <= retries: - attempts += 1 - try: - if not url[use_account] or not token[use_account]: - url[use_account], token[use_account] = \ - get_auth(swift_test_auth, swift_test_user[use_account], - swift_test_key[use_account], - snet=False, - tenant_name=swift_test_tenant[use_account], - auth_version=swift_test_auth_version, - os_options={}) - parsed[use_account] = conn[use_account] = None - if not parsed[use_account] or not conn[use_account]: - parsed[use_account], conn[use_account] = \ - http_connection(url[use_account]) - - # default resource is the account url[url_account] - resource = kwargs.pop('resource', '%(storage_url)s') - template_vars = {'storage_url': url[url_account]} - parsed_result = urlparse(resource % template_vars) - return func(url[url_account], token[use_account], - parsed_result, conn[url_account], - *args, **kwargs) - except (socket.error, HTTPException): - if attempts > retries: - raise - parsed[use_account] = conn[use_account] = None - except AuthError: - url[use_account] = token[use_account] = None - continue - except InternalServerError: - pass - if attempts <= retries: - sleep(backoff) - backoff *= 2 - raise Exception('No result after %s retries.' % retries) - - -def check_response(conn): - resp = conn.getresponse() - if resp.status == 401: - resp.read() - raise AuthError() - elif resp.status // 100 == 5: - resp.read() - raise InternalServerError() - return resp - -cluster_info = {} - - -def get_cluster_info(): - conn = Connection(conf) - conn.authenticate() - global cluster_info - cluster_info = conn.cluster_info() - - -def reset_acl(): - def post(url, token, parsed, conn): - conn.request('POST', parsed.path, '', { - 'X-Auth-Token': token, - 'X-Account-Access-Control': '{}' - }) - return check_response(conn) - resp = retry(post, use_account=1) - resp.read() - - -def requires_acls(f): - @functools.wraps(f) - def wrapper(*args, **kwargs): - if skip: - raise SkipTest - if not cluster_info: - get_cluster_info() - # Determine whether this cluster has account ACLs; if not, skip test - if not cluster_info.get('tempauth', {}).get('account_acls'): - raise SkipTest - if 'keystoneauth' in cluster_info: - # remove when keystoneauth supports account acls - raise SkipTest - reset_acl() - try: - rv = f(*args, **kwargs) - finally: - reset_acl() - return rv - return wrapper diff --git a/test/functional/test_account.py b/test/functional/test_account.py index 30cef31..b6b279d 100755 --- a/test/functional/test_account.py +++ b/test/functional/test_account.py @@ -21,19 +21,53 @@ from uuid import uuid4 from nose import SkipTest from string import letters -from swift.common.constraints import MAX_META_COUNT, MAX_META_NAME_LENGTH, \ - MAX_META_OVERALL_SIZE, MAX_META_VALUE_LENGTH from swift.common.middleware.acl import format_acl -from swift_testing import (check_response, retry, skip, skip2, skip3, - web_front_end, requires_acls) -import swift_testing -from test.functional.tests import load_constraint + +from test.functional import check_response, retry, requires_acls, \ + load_constraint +import test.functional as tf class TestAccount(unittest.TestCase): + def setUp(self): + self.max_meta_count = load_constraint('max_meta_count') + self.max_meta_name_length = load_constraint('max_meta_name_length') + self.max_meta_overall_size = load_constraint('max_meta_overall_size') + self.max_meta_value_length = load_constraint('max_meta_value_length') + + def head(url, token, parsed, conn): + conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(head) + self.existing_metadata = set([ + k for k, v in resp.getheaders() if + k.lower().startswith('x-account-meta')]) + + def tearDown(self): + def head(url, token, parsed, conn): + conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(head) + resp.read() + new_metadata = set( + [k for k, v in resp.getheaders() if + k.lower().startswith('x-account-meta')]) + + def clear_meta(url, token, parsed, conn, remove_metadata_keys): + headers = {'X-Auth-Token': token} + headers.update((k, '') for k in remove_metadata_keys) + conn.request('POST', parsed.path, '', headers) + return check_response(conn) + extra_metadata = list(self.existing_metadata ^ new_metadata) + for i in range(0, len(extra_metadata), 90): + batch = extra_metadata[i:i + 90] + resp = retry(clear_meta, batch) + resp.read() + self.assertEqual(resp.status // 100, 2) + def test_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, value): @@ -73,6 +107,9 @@ class TestAccount(unittest.TestCase): self.assertEqual(resp.getheader('x-account-meta-test'), 'Value') def test_invalid_acls(self): + if tf.skip: + raise SkipTest + def post(url, token, parsed, conn, headers): new_headers = dict({'X-Auth-Token': token}, **headers) conn.request('POST', parsed.path, '', new_headers) @@ -109,7 +146,7 @@ class TestAccount(unittest.TestCase): resp.read() self.assertEqual(resp.status, 400) - acl_user = swift_testing.swift_test_user[1] + acl_user = tf.swift_test_user[1] acl = {'admin': [acl_user], 'invalid_key': 'invalid_value'} headers = {'x-account-access-control': format_acl( version=2, acl_dict=acl)} @@ -137,7 +174,7 @@ class TestAccount(unittest.TestCase): @requires_acls def test_read_only_acl(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -155,7 +192,7 @@ class TestAccount(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read access - acl_user = swift_testing.swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} headers = {'x-account-access-control': format_acl( version=2, acl_dict=acl)} @@ -188,7 +225,7 @@ class TestAccount(unittest.TestCase): @requires_acls def test_read_write_acl(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -206,7 +243,7 @@ class TestAccount(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-write access - acl_user = swift_testing.swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} headers = {'x-account-access-control': format_acl( version=2, acl_dict=acl)} @@ -229,7 +266,7 @@ class TestAccount(unittest.TestCase): @requires_acls def test_admin_acl(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -247,7 +284,7 @@ class TestAccount(unittest.TestCase): self.assertEquals(resp.status, 403) # grant admin access - acl_user = swift_testing.swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} acl_json_str = format_acl(version=2, acl_dict=acl) headers = {'x-account-access-control': acl_json_str} @@ -287,7 +324,7 @@ class TestAccount(unittest.TestCase): @requires_acls def test_protected_tempurl(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -299,7 +336,7 @@ class TestAccount(unittest.TestCase): conn.request('POST', parsed.path, '', new_headers) return check_response(conn) - # add a account metadata, and temp-url-key to account + # add an account metadata, and temp-url-key to account value = str(uuid4()) headers = { 'x-account-meta-temp-url-key': 'secret', @@ -310,7 +347,7 @@ class TestAccount(unittest.TestCase): self.assertEqual(resp.status, 204) # grant read-only access to tester3 - acl_user = swift_testing.swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} acl_json_str = format_acl(version=2, acl_dict=acl) headers = {'x-account-access-control': acl_json_str} @@ -328,7 +365,7 @@ class TestAccount(unittest.TestCase): self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'), None) # grant read-write access to tester3 - acl_user = swift_testing.swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} acl_json_str = format_acl(version=2, acl_dict=acl) headers = {'x-account-access-control': acl_json_str} @@ -346,7 +383,7 @@ class TestAccount(unittest.TestCase): self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'), None) # grant admin access to tester3 - acl_user = swift_testing.swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} acl_json_str = format_acl(version=2, acl_dict=acl) headers = {'x-account-access-control': acl_json_str} @@ -381,7 +418,7 @@ class TestAccount(unittest.TestCase): @requires_acls def test_account_acls(self): - if skip2: + if tf.skip2: raise SkipTest def post(url, token, parsed, conn, headers): @@ -428,7 +465,7 @@ class TestAccount(unittest.TestCase): # User1 is swift_owner of their own account, so they can POST an # ACL -- let's do this and make User2 (test_user[1]) an admin - acl_user = swift_testing.swift_test_user[1] + acl_user = tf.swift_test_user[1] acl = {'admin': [acl_user]} headers = {'x-account-access-control': format_acl( version=2, acl_dict=acl)} @@ -505,7 +542,7 @@ class TestAccount(unittest.TestCase): @requires_acls def test_swift_account_acls(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, headers): @@ -568,7 +605,7 @@ class TestAccount(unittest.TestCase): resp.read() def test_swift_prohibits_garbage_account_acls(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, headers): @@ -635,7 +672,7 @@ class TestAccount(unittest.TestCase): resp.read() def test_unicode_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, name, value): @@ -648,7 +685,7 @@ class TestAccount(unittest.TestCase): return check_response(conn) uni_key = u'X-Account-Meta-uni\u0E12' uni_value = u'uni\u0E12' - if (web_front_end == 'integral'): + if (tf.web_front_end == 'integral'): resp = retry(post, uni_key, '1') resp.read() self.assertTrue(resp.status in (201, 204)) @@ -664,7 +701,7 @@ class TestAccount(unittest.TestCase): self.assert_(resp.status in (200, 204), resp.status) self.assertEqual(resp.getheader('X-Account-Meta-uni'), uni_value.encode('utf-8')) - if (web_front_end == 'integral'): + if (tf.web_front_end == 'integral'): resp = retry(post, uni_key, uni_value) resp.read() self.assertEqual(resp.status, 204) @@ -675,7 +712,7 @@ class TestAccount(unittest.TestCase): uni_value.encode('utf-8')) def test_multi_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, name, value): @@ -704,7 +741,7 @@ class TestAccount(unittest.TestCase): self.assertEqual(resp.getheader('x-account-meta-two'), '2') def test_bad_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, extra_headers): @@ -714,54 +751,59 @@ class TestAccount(unittest.TestCase): return check_response(conn) resp = retry(post, - {'X-Account-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) + {'X-Account-Meta-' + ( + 'k' * self.max_meta_name_length): 'v'}) resp.read() self.assertEqual(resp.status, 204) resp = retry( post, - {'X-Account-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) + {'X-Account-Meta-' + ('k' * ( + self.max_meta_name_length + 1)): 'v'}) resp.read() self.assertEqual(resp.status, 400) resp = retry(post, - {'X-Account-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) + {'X-Account-Meta-Too-Long': ( + 'k' * self.max_meta_value_length)}) resp.read() self.assertEqual(resp.status, 204) resp = retry( post, - {'X-Account-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) + {'X-Account-Meta-Too-Long': 'k' * ( + self.max_meta_value_length + 1)}) resp.read() self.assertEqual(resp.status, 400) headers = {} - for x in xrange(MAX_META_COUNT): + for x in xrange(self.max_meta_count): headers['X-Account-Meta-%d' % x] = 'v' resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 204) headers = {} - for x in xrange(MAX_META_COUNT + 1): + for x in xrange(self.max_meta_count + 1): headers['X-Account-Meta-%d' % x] = 'v' resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 400) headers = {} - header_value = 'k' * MAX_META_VALUE_LENGTH + header_value = 'k' * self.max_meta_value_length size = 0 x = 0 - while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH: - size += 4 + MAX_META_VALUE_LENGTH + while size < (self.max_meta_overall_size - 4 + - self.max_meta_value_length): + size += 4 + self.max_meta_value_length headers['X-Account-Meta-%04d' % x] = header_value x += 1 - if MAX_META_OVERALL_SIZE - size > 1: + if self.max_meta_overall_size - size > 1: headers['X-Account-Meta-k'] = \ - 'v' * (MAX_META_OVERALL_SIZE - size - 1) + 'v' * (self.max_meta_overall_size - size - 1) resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 204) headers['X-Account-Meta-k'] = \ - 'v' * (MAX_META_OVERALL_SIZE - size) + 'v' * (self.max_meta_overall_size - size) resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 400) diff --git a/test/functional/test_container.py b/test/functional/test_container.py index 7c0fd3e..3a6e1b9 100755 --- a/test/functional/test_container.py +++ b/test/functional/test_container.py @@ -20,19 +20,19 @@ import unittest from nose import SkipTest from uuid import uuid4 -from swift.common.constraints import MAX_META_COUNT, MAX_META_NAME_LENGTH, \ - MAX_META_OVERALL_SIZE, MAX_META_VALUE_LENGTH - -from swift_testing import check_response, retry, skip, skip2, skip3, \ - swift_test_perm, web_front_end, requires_acls, swift_test_user +from test.functional import check_response, retry, requires_acls, \ + load_constraint, requires_policies +import test.functional as tf class TestContainer(unittest.TestCase): def setUp(self): - if skip: + if tf.skip: raise SkipTest self.name = uuid4().hex + # this container isn't created by default, but will be cleaned up + self.container = uuid4().hex def put(url, token, parsed, conn): conn.request('PUT', parsed.path + '/' + self.name, '', @@ -43,44 +43,58 @@ class TestContainer(unittest.TestCase): resp.read() self.assertEqual(resp.status, 201) + self.max_meta_count = load_constraint('max_meta_count') + self.max_meta_name_length = load_constraint('max_meta_name_length') + self.max_meta_overall_size = load_constraint('max_meta_overall_size') + self.max_meta_value_length = load_constraint('max_meta_value_length') + def tearDown(self): - if skip: + if tf.skip: raise SkipTest - def get(url, token, parsed, conn): - conn.request('GET', parsed.path + '/' + self.name + '?format=json', - '', {'X-Auth-Token': token}) + def get(url, token, parsed, conn, container): + conn.request( + 'GET', parsed.path + '/' + container + '?format=json', '', + {'X-Auth-Token': token}) return check_response(conn) - def delete(url, token, parsed, conn, obj): - conn.request('DELETE', - '/'.join([parsed.path, self.name, obj['name']]), '', + def delete(url, token, parsed, conn, container, obj): + conn.request( + 'DELETE', '/'.join([parsed.path, container, obj['name']]), '', + {'X-Auth-Token': token}) + return check_response(conn) + + for container in (self.name, self.container): + while True: + resp = retry(get, container) + body = resp.read() + if resp.status == 404: + break + self.assert_(resp.status // 100 == 2, resp.status) + objs = json.loads(body) + if not objs: + break + for obj in objs: + resp = retry(delete, container, obj) + resp.read() + self.assertEqual(resp.status, 204) + + def delete(url, token, parsed, conn, container): + conn.request('DELETE', parsed.path + '/' + container, '', {'X-Auth-Token': token}) return check_response(conn) - while True: - resp = retry(get) - body = resp.read() - self.assert_(resp.status // 100 == 2, resp.status) - objs = json.loads(body) - if not objs: - break - for obj in objs: - resp = retry(delete, obj) - resp.read() - self.assertEqual(resp.status, 204) - - def delete(url, token, parsed, conn): - conn.request('DELETE', parsed.path + '/' + self.name, '', - {'X-Auth-Token': token}) - return check_response(conn) - - resp = retry(delete) + resp = retry(delete, self.name) resp.read() self.assertEqual(resp.status, 204) + # container may have not been created + resp = retry(delete, self.container) + resp.read() + self.assert_(resp.status in (204, 404)) + def test_multi_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, name, value): @@ -110,7 +124,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('x-container-meta-two'), '2') def test_unicode_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, name, value): @@ -125,7 +139,7 @@ class TestContainer(unittest.TestCase): uni_key = u'X-Container-Meta-uni\u0E12' uni_value = u'uni\u0E12' - if (web_front_end == 'integral'): + if (tf.web_front_end == 'integral'): resp = retry(post, uni_key, '1') resp.read() self.assertEqual(resp.status, 204) @@ -141,7 +155,7 @@ class TestContainer(unittest.TestCase): self.assert_(resp.status in (200, 204), resp.status) self.assertEqual(resp.getheader('X-Container-Meta-uni'), uni_value.encode('utf-8')) - if (web_front_end == 'integral'): + if (tf.web_front_end == 'integral'): resp = retry(post, uni_key, uni_value) resp.read() self.assertEqual(resp.status, 204) @@ -152,7 +166,7 @@ class TestContainer(unittest.TestCase): uni_value.encode('utf-8')) def test_PUT_metadata(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn, name, value): @@ -209,7 +223,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) def test_POST_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, value): @@ -249,7 +263,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('x-container-meta-test'), 'Value') def test_PUT_bad_metadata(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn, name, extra_headers): @@ -266,7 +280,7 @@ class TestContainer(unittest.TestCase): name = uuid4().hex resp = retry( put, name, - {'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) + {'X-Container-Meta-' + ('k' * self.max_meta_name_length): 'v'}) resp.read() self.assertEqual(resp.status, 201) resp = retry(delete, name) @@ -275,7 +289,8 @@ class TestContainer(unittest.TestCase): name = uuid4().hex resp = retry( put, name, - {'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) + {'X-Container-Meta-' + ( + 'k' * (self.max_meta_name_length + 1)): 'v'}) resp.read() self.assertEqual(resp.status, 400) resp = retry(delete, name) @@ -285,7 +300,7 @@ class TestContainer(unittest.TestCase): name = uuid4().hex resp = retry( put, name, - {'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) + {'X-Container-Meta-Too-Long': 'k' * self.max_meta_value_length}) resp.read() self.assertEqual(resp.status, 201) resp = retry(delete, name) @@ -294,7 +309,8 @@ class TestContainer(unittest.TestCase): name = uuid4().hex resp = retry( put, name, - {'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) + {'X-Container-Meta-Too-Long': 'k' * ( + self.max_meta_value_length + 1)}) resp.read() self.assertEqual(resp.status, 400) resp = retry(delete, name) @@ -303,7 +319,7 @@ class TestContainer(unittest.TestCase): name = uuid4().hex headers = {} - for x in xrange(MAX_META_COUNT): + for x in xrange(self.max_meta_count): headers['X-Container-Meta-%d' % x] = 'v' resp = retry(put, name, headers) resp.read() @@ -313,7 +329,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) name = uuid4().hex headers = {} - for x in xrange(MAX_META_COUNT + 1): + for x in xrange(self.max_meta_count + 1): headers['X-Container-Meta-%d' % x] = 'v' resp = retry(put, name, headers) resp.read() @@ -324,16 +340,17 @@ class TestContainer(unittest.TestCase): name = uuid4().hex headers = {} - header_value = 'k' * MAX_META_VALUE_LENGTH + header_value = 'k' * self.max_meta_value_length size = 0 x = 0 - while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH: - size += 4 + MAX_META_VALUE_LENGTH + while size < (self.max_meta_overall_size - 4 + - self.max_meta_value_length): + size += 4 + self.max_meta_value_length headers['X-Container-Meta-%04d' % x] = header_value x += 1 - if MAX_META_OVERALL_SIZE - size > 1: + if self.max_meta_overall_size - size > 1: headers['X-Container-Meta-k'] = \ - 'v' * (MAX_META_OVERALL_SIZE - size - 1) + 'v' * (self.max_meta_overall_size - size - 1) resp = retry(put, name, headers) resp.read() self.assertEqual(resp.status, 201) @@ -342,7 +359,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) name = uuid4().hex headers['X-Container-Meta-k'] = \ - 'v' * (MAX_META_OVERALL_SIZE - size) + 'v' * (self.max_meta_overall_size - size) resp = retry(put, name, headers) resp.read() self.assertEqual(resp.status, 400) @@ -351,7 +368,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 404) def test_POST_bad_metadata(self): - if skip: + if tf.skip: raise SkipTest def post(url, token, parsed, conn, extra_headers): @@ -362,61 +379,64 @@ class TestContainer(unittest.TestCase): resp = retry( post, - {'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) + {'X-Container-Meta-' + ('k' * self.max_meta_name_length): 'v'}) resp.read() self.assertEqual(resp.status, 204) resp = retry( post, - {'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) + {'X-Container-Meta-' + ( + 'k' * (self.max_meta_name_length + 1)): 'v'}) resp.read() self.assertEqual(resp.status, 400) resp = retry( post, - {'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) + {'X-Container-Meta-Too-Long': 'k' * self.max_meta_value_length}) resp.read() self.assertEqual(resp.status, 204) resp = retry( post, - {'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) + {'X-Container-Meta-Too-Long': 'k' * ( + self.max_meta_value_length + 1)}) resp.read() self.assertEqual(resp.status, 400) headers = {} - for x in xrange(MAX_META_COUNT): + for x in xrange(self.max_meta_count): headers['X-Container-Meta-%d' % x] = 'v' resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 204) headers = {} - for x in xrange(MAX_META_COUNT + 1): + for x in xrange(self.max_meta_count + 1): headers['X-Container-Meta-%d' % x] = 'v' resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 400) headers = {} - header_value = 'k' * MAX_META_VALUE_LENGTH + header_value = 'k' * self.max_meta_value_length size = 0 x = 0 - while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH: - size += 4 + MAX_META_VALUE_LENGTH + while size < (self.max_meta_overall_size - 4 + - self.max_meta_value_length): + size += 4 + self.max_meta_value_length headers['X-Container-Meta-%04d' % x] = header_value x += 1 - if MAX_META_OVERALL_SIZE - size > 1: + if self.max_meta_overall_size - size > 1: headers['X-Container-Meta-k'] = \ - 'v' * (MAX_META_OVERALL_SIZE - size - 1) + 'v' * (self.max_meta_overall_size - size - 1) resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 204) headers['X-Container-Meta-k'] = \ - 'v' * (MAX_META_OVERALL_SIZE - size) + 'v' * (self.max_meta_overall_size - size) resp = retry(post, headers) resp.read() self.assertEqual(resp.status, 400) def test_public_container(self): - if skip: + if tf.skip: raise SkipTest def get(url, token, parsed, conn): @@ -457,7 +477,7 @@ class TestContainer(unittest.TestCase): self.assert_(str(err).startswith('No result after '), err) def test_cross_account_container(self): - if skip or skip2: + if tf.skip or tf.skip2: raise SkipTest # Obtain the first account's string first_account = ['unknown'] @@ -485,8 +505,8 @@ class TestContainer(unittest.TestCase): def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[1], - 'X-Container-Write': swift_test_perm[1]}) + 'X-Container-Read': tf.swift_test_perm[1], + 'X-Container-Write': tf.swift_test_perm[1]}) return check_response(conn) resp = retry(post) @@ -513,7 +533,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 403) def test_cross_account_public_container(self): - if skip or skip2: + if tf.skip or tf.skip2: raise SkipTest # Obtain the first account's string first_account = ['unknown'] @@ -566,7 +586,7 @@ class TestContainer(unittest.TestCase): def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, - 'X-Container-Write': swift_test_perm[1]}) + 'X-Container-Write': tf.swift_test_perm[1]}) return check_response(conn) resp = retry(post) @@ -582,7 +602,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 201) def test_nonadmin_user(self): - if skip or skip3: + if tf.skip or tf.skip3: raise SkipTest # Obtain the first account's string first_account = ['unknown'] @@ -610,7 +630,7 @@ class TestContainer(unittest.TestCase): def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2]}) + 'X-Container-Read': tf.swift_test_perm[2]}) return check_response(conn) resp = retry(post) @@ -635,7 +655,7 @@ class TestContainer(unittest.TestCase): def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, - 'X-Container-Write': swift_test_perm[2]}) + 'X-Container-Write': tf.swift_test_perm[2]}) return check_response(conn) resp = retry(post) @@ -652,7 +672,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_read_only_acl_listings(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -675,7 +695,7 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-only access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -705,7 +725,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_read_only_acl_metadata(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn, name): @@ -740,7 +760,7 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-only access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -762,7 +782,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_read_write_acl_listings(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -790,7 +810,7 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-write access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post, headers=headers, use_account=1) @@ -833,7 +853,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_read_write_acl_metadata(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn, name): @@ -868,7 +888,7 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-write access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -904,7 +924,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_admin_acl_listing(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn): @@ -932,7 +952,7 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 403) # grant admin access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post, headers=headers, use_account=1) @@ -975,7 +995,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_admin_acl_metadata(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn, name): @@ -1010,7 +1030,7 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 403) # grant access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1046,7 +1066,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_protected_container_sync(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn, name): @@ -1080,7 +1100,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # grant read-only access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1102,7 +1122,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 403) # grant read-write access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1140,7 +1160,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret') # grant admin access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1168,7 +1188,7 @@ class TestContainer(unittest.TestCase): @requires_acls def test_protected_container_acl(self): - if skip3: + if tf.skip3: raise SkipTest def get(url, token, parsed, conn, name): @@ -1204,7 +1224,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # grant read-only access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1230,7 +1250,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 403) # grant read-write access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1272,7 +1292,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe') # grant admin access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -1302,7 +1322,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.getheader('X-Container-Read'), '.r:*') def test_long_name_content_type(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn): @@ -1318,7 +1338,7 @@ class TestContainer(unittest.TestCase): 'text/html; charset=UTF-8') def test_null_name(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn): @@ -1327,12 +1347,169 @@ class TestContainer(unittest.TestCase): return check_response(conn) resp = retry(put) - if (web_front_end == 'apache2'): + if (tf.web_front_end == 'apache2'): self.assertEqual(resp.status, 404) else: self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL') self.assertEqual(resp.status, 412) + def test_create_container_gets_default_policy_by_default(self): + try: + default_policy = \ + tf.FunctionalStoragePolicyCollection.from_info().default + except AssertionError: + raise SkipTest() + + def put(url, token, parsed, conn): + conn.request('PUT', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(put) + resp.read() + self.assertEqual(resp.status // 100, 2) + + def head(url, token, parsed, conn): + conn.request('HEAD', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(head) + resp.read() + headers = dict((k.lower(), v) for k, v in resp.getheaders()) + self.assertEquals(headers.get('x-storage-policy'), + default_policy['name']) + + def test_error_invalid_storage_policy_name(self): + def put(url, token, parsed, conn, headers): + new_headers = dict({'X-Auth-Token': token}, **headers) + conn.request('PUT', parsed.path + '/' + self.container, '', + new_headers) + return check_response(conn) + + # create + resp = retry(put, {'X-Storage-Policy': uuid4().hex}) + resp.read() + self.assertEqual(resp.status, 400) + + @requires_policies + def test_create_non_default_storage_policy_container(self): + policy = self.policies.exclude(default=True).select() + + def put(url, token, parsed, conn, headers=None): + base_headers = {'X-Auth-Token': token} + if headers: + base_headers.update(headers) + conn.request('PUT', parsed.path + '/' + self.container, '', + base_headers) + return check_response(conn) + headers = {'X-Storage-Policy': policy['name']} + resp = retry(put, headers=headers) + resp.read() + self.assertEqual(resp.status, 201) + + def head(url, token, parsed, conn): + conn.request('HEAD', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(head) + resp.read() + headers = dict((k.lower(), v) for k, v in resp.getheaders()) + self.assertEquals(headers.get('x-storage-policy'), + policy['name']) + + # and test recreate with-out specifiying Storage Policy + resp = retry(put) + resp.read() + self.assertEqual(resp.status, 202) + # should still be original storage policy + resp = retry(head) + resp.read() + headers = dict((k.lower(), v) for k, v in resp.getheaders()) + self.assertEquals(headers.get('x-storage-policy'), + policy['name']) + + # delete it + def delete(url, token, parsed, conn): + conn.request('DELETE', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(delete) + resp.read() + self.assertEqual(resp.status, 204) + + # verify no policy header + resp = retry(head) + resp.read() + headers = dict((k.lower(), v) for k, v in resp.getheaders()) + self.assertEquals(headers.get('x-storage-policy'), None) + + @requires_policies + def test_conflict_change_storage_policy_with_put(self): + def put(url, token, parsed, conn, headers): + new_headers = dict({'X-Auth-Token': token}, **headers) + conn.request('PUT', parsed.path + '/' + self.container, '', + new_headers) + return check_response(conn) + + # create + policy = self.policies.select() + resp = retry(put, {'X-Storage-Policy': policy['name']}) + resp.read() + self.assertEqual(resp.status, 201) + + # can't change it + other_policy = self.policies.exclude(name=policy['name']).select() + resp = retry(put, {'X-Storage-Policy': other_policy['name']}) + resp.read() + self.assertEqual(resp.status, 409) + + def head(url, token, parsed, conn): + conn.request('HEAD', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) + return check_response(conn) + # still original policy + resp = retry(head) + resp.read() + headers = dict((k.lower(), v) for k, v in resp.getheaders()) + self.assertEquals(headers.get('x-storage-policy'), + policy['name']) + + @requires_policies + def test_noop_change_storage_policy_with_post(self): + def put(url, token, parsed, conn, headers): + new_headers = dict({'X-Auth-Token': token}, **headers) + conn.request('PUT', parsed.path + '/' + self.container, '', + new_headers) + return check_response(conn) + + # create + policy = self.policies.select() + resp = retry(put, {'X-Storage-Policy': policy['name']}) + resp.read() + self.assertEqual(resp.status, 201) + + def post(url, token, parsed, conn, headers): + new_headers = dict({'X-Auth-Token': token}, **headers) + conn.request('POST', parsed.path + '/' + self.container, '', + new_headers) + return check_response(conn) + # attempt update + for header in ('X-Storage-Policy', 'X-Storage-Policy-Index'): + other_policy = self.policies.exclude(name=policy['name']).select() + resp = retry(post, {header: other_policy['name']}) + resp.read() + self.assertEqual(resp.status, 204) + + def head(url, token, parsed, conn): + conn.request('HEAD', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) + return check_response(conn) + # still original policy + resp = retry(head) + resp.read() + headers = dict((k.lower(), v) for k, v in resp.getheaders()) + self.assertEquals(headers.get('x-storage-policy'), + policy['name']) + if __name__ == '__main__': unittest.main() diff --git a/test/functional/test_object.py b/test/functional/test_object.py index 675de30..cbdca86 100755 --- a/test/functional/test_object.py +++ b/test/functional/test_object.py @@ -21,24 +21,21 @@ from uuid import uuid4 from swift.common.utils import json -from swift_testing import check_response, retry, skip, skip3, \ - swift_test_perm, web_front_end, requires_acls, swift_test_user +from test.functional import check_response, retry, requires_acls, \ + requires_policies +import test.functional as tf class TestObject(unittest.TestCase): def setUp(self): - if skip: + if tf.skip: raise SkipTest self.container = uuid4().hex - def put(url, token, parsed, conn): - conn.request('PUT', parsed.path + '/' + self.container, '', - {'X-Auth-Token': token}) - return check_response(conn) - resp = retry(put) - resp.read() - self.assertEqual(resp.status, 201) + self.containers = [] + self._create_container(self.container) + self.obj = uuid4().hex def put(url, token, parsed, conn): @@ -50,40 +47,65 @@ class TestObject(unittest.TestCase): resp.read() self.assertEqual(resp.status, 201) + def _create_container(self, name=None, headers=None): + if not name: + name = uuid4().hex + self.containers.append(name) + headers = headers or {} + + def put(url, token, parsed, conn, name): + new_headers = dict({'X-Auth-Token': token}, **headers) + conn.request('PUT', parsed.path + '/' + name, '', + new_headers) + return check_response(conn) + resp = retry(put, name) + resp.read() + self.assertEqual(resp.status, 201) + return name + def tearDown(self): - if skip: + if tf.skip: raise SkipTest - def delete(url, token, parsed, conn, obj): - conn.request('DELETE', - '%s/%s/%s' % (parsed.path, self.container, obj), - '', {'X-Auth-Token': token}) - return check_response(conn) - # get list of objects in container - def list(url, token, parsed, conn): - conn.request('GET', - '%s/%s' % (parsed.path, self.container), - '', {'X-Auth-Token': token}) + def get(url, token, parsed, conn, container): + conn.request( + 'GET', parsed.path + '/' + container + '?format=json', '', + {'X-Auth-Token': token}) return check_response(conn) - resp = retry(list) - object_listing = resp.read() - self.assertEqual(resp.status, 200) - # iterate over object listing and delete all objects - for obj in object_listing.splitlines(): - resp = retry(delete, obj) - resp.read() - self.assertEqual(resp.status, 204) + # delete an object + def delete(url, token, parsed, conn, container, obj): + conn.request( + 'DELETE', '/'.join([parsed.path, container, obj['name']]), '', + {'X-Auth-Token': token}) + return check_response(conn) + + for container in self.containers: + while True: + resp = retry(get, container) + body = resp.read() + if resp.status == 404: + break + self.assert_(resp.status // 100 == 2, resp.status) + objs = json.loads(body) + if not objs: + break + for obj in objs: + resp = retry(delete, container, obj) + resp.read() + self.assertEqual(resp.status, 204) # delete the container - def delete(url, token, parsed, conn): - conn.request('DELETE', parsed.path + '/' + self.container, '', + def delete(url, token, parsed, conn, name): + conn.request('DELETE', parsed.path + '/' + name, '', {'X-Auth-Token': token}) return check_response(conn) - resp = retry(delete) - resp.read() - self.assertEqual(resp.status, 204) + + for container in self.containers: + resp = retry(delete, container) + resp.read() + self.assert_(resp.status in (204, 404)) def test_if_none_match(self): def put(url, token, parsed, conn): @@ -112,7 +134,7 @@ class TestObject(unittest.TestCase): self.assertEquals(resp.status, 400) def test_copy_object(self): - if skip: + if tf.skip: raise SkipTest source = '%s/%s' % (self.container, self.obj) @@ -186,7 +208,7 @@ class TestObject(unittest.TestCase): self.assertEqual(resp.status, 204) def test_public_object(self): - if skip: + if tf.skip: raise SkipTest def get(url, token, parsed, conn): @@ -225,7 +247,7 @@ class TestObject(unittest.TestCase): self.assert_(str(err).startswith('No result after ')) def test_private_object(self): - if skip or skip3: + if tf.skip or tf.skip3: raise SkipTest # Ensure we can't access the object with the third account @@ -245,8 +267,8 @@ class TestObject(unittest.TestCase): conn.request('PUT', '%s/%s' % ( parsed.path, shared_container), '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2], - 'X-Container-Write': swift_test_perm[2]}) + 'X-Container-Read': tf.swift_test_perm[2], + 'X-Container-Write': tf.swift_test_perm[2]}) return check_response(conn) resp = retry(put) resp.read() @@ -319,8 +341,8 @@ class TestObject(unittest.TestCase): @requires_acls def test_read_only(self): - if skip3: - raise SkipTest + if tf.skip3: + raise tf.SkipTest def get_listing(url, token, parsed, conn): conn.request('GET', '%s/%s' % (parsed.path, self.container), '', @@ -361,7 +383,7 @@ class TestObject(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-only access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-only': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -400,7 +422,7 @@ class TestObject(unittest.TestCase): @requires_acls def test_read_write(self): - if skip3: + if tf.skip3: raise SkipTest def get_listing(url, token, parsed, conn): @@ -442,7 +464,7 @@ class TestObject(unittest.TestCase): self.assertEquals(resp.status, 403) # grant read-write access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'read-write': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -481,7 +503,7 @@ class TestObject(unittest.TestCase): @requires_acls def test_admin(self): - if skip3: + if tf.skip3: raise SkipTest def get_listing(url, token, parsed, conn): @@ -523,7 +545,7 @@ class TestObject(unittest.TestCase): self.assertEquals(resp.status, 403) # grant admin access - acl_user = swift_test_user[2] + acl_user = tf.swift_test_user[2] acl = {'admin': [acl_user]} headers = {'x-account-access-control': json.dumps(acl)} resp = retry(post_account, headers=headers, use_account=1) @@ -561,7 +583,7 @@ class TestObject(unittest.TestCase): self.assert_(self.obj not in listing) def test_manifest(self): - if skip: + if tf.skip: raise SkipTest # Data for the object segments segments1 = ['one', 'two', 'three', 'four', 'five'] @@ -672,7 +694,7 @@ class TestObject(unittest.TestCase): self.assertEqual(resp.read(), ''.join(segments2)) self.assertEqual(resp.status, 200) - if not skip3: + if not tf.skip3: # Ensure we can't access the manifest with the third account def get(url, token, parsed, conn): @@ -687,7 +709,7 @@ class TestObject(unittest.TestCase): def post(url, token, parsed, conn): conn.request('POST', '%s/%s' % (parsed.path, self.container), '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2]}) + 'X-Container-Read': tf.swift_test_perm[2]}) return check_response(conn) resp = retry(post) resp.read() @@ -745,7 +767,7 @@ class TestObject(unittest.TestCase): self.assertEqual(resp.read(), ''.join(segments3)) self.assertEqual(resp.status, 200) - if not skip3: + if not tf.skip3: # Ensure we can't access the manifest with the third account # (because the segments are in a protected container even if the @@ -763,7 +785,7 @@ class TestObject(unittest.TestCase): def post(url, token, parsed, conn): conn.request('POST', '%s/%s' % (parsed.path, acontainer), '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2]}) + 'X-Container-Read': tf.swift_test_perm[2]}) return check_response(conn) resp = retry(post) resp.read() @@ -831,7 +853,7 @@ class TestObject(unittest.TestCase): self.assertEqual(resp.status, 204) def test_delete_content_type(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn): @@ -853,7 +875,7 @@ class TestObject(unittest.TestCase): 'text/html; charset=UTF-8') def test_delete_if_delete_at_bad(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn): @@ -875,7 +897,7 @@ class TestObject(unittest.TestCase): self.assertEqual(resp.status, 400) def test_null_name(self): - if skip: + if tf.skip: raise SkipTest def put(url, token, parsed, conn): @@ -884,24 +906,16 @@ class TestObject(unittest.TestCase): self.container), 'test', {'X-Auth-Token': token}) return check_response(conn) resp = retry(put) - if (web_front_end == 'apache2'): + if (tf.web_front_end == 'apache2'): self.assertEqual(resp.status, 404) else: self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL') self.assertEqual(resp.status, 412) def test_cors(self): - if skip: + if tf.skip: raise SkipTest - def is_strict_mode(url, token, parsed, conn): - conn.request('GET', '/info') - resp = conn.getresponse() - if resp.status // 100 == 2: - info = json.loads(resp.read()) - return info.get('swift', {}).get('strict_cors_mode', False) - return False - def put_cors_cont(url, token, parsed, conn, orig): conn.request( 'PUT', '%s/%s' % (parsed.path, self.container), @@ -924,8 +938,6 @@ class TestObject(unittest.TestCase): '', headers) return conn.getresponse() - strict_cors = retry(is_strict_mode) - resp = retry(put_cors_cont, '*') resp.read() self.assertEquals(resp.status // 100, 2) @@ -977,6 +989,11 @@ class TestObject(unittest.TestCase): resp.read() self.assertEquals(resp.status, 401) + try: + strict_cors = tf.cluster_info['swift']['strict_cors_mode'] + except KeyError: + strict_cors = False + if strict_cors: resp = retry(check_cors, 'GET', 'cat', {'Origin': 'http://m.com'}) @@ -1001,6 +1018,64 @@ class TestObject(unittest.TestCase): self.assertEquals(headers.get('access-control-allow-origin'), 'http://m.com') + @requires_policies + def test_cross_policy_copy(self): + # create container in first policy + policy = self.policies.select() + container = self._create_container( + headers={'X-Storage-Policy': policy['name']}) + obj = uuid4().hex + + # create a container in second policy + other_policy = self.policies.exclude(name=policy['name']).select() + other_container = self._create_container( + headers={'X-Storage-Policy': other_policy['name']}) + other_obj = uuid4().hex + + def put_obj(url, token, parsed, conn, container, obj): + # to keep track of things, use the original path as the body + content = '%s/%s' % (container, obj) + path = '%s/%s' % (parsed.path, content) + conn.request('PUT', path, content, {'X-Auth-Token': token}) + return check_response(conn) + + # create objects + for c, o in zip((container, other_container), (obj, other_obj)): + resp = retry(put_obj, c, o) + resp.read() + self.assertEqual(resp.status, 201) + + def put_copy_from(url, token, parsed, conn, container, obj, source): + dest_path = '%s/%s/%s' % (parsed.path, container, obj) + conn.request('PUT', dest_path, '', + {'X-Auth-Token': token, + 'Content-Length': '0', + 'X-Copy-From': source}) + return check_response(conn) + + copy_requests = ( + (container, other_obj, '%s/%s' % (other_container, other_obj)), + (other_container, obj, '%s/%s' % (container, obj)), + ) + + # copy objects + for c, o, source in copy_requests: + resp = retry(put_copy_from, c, o, source) + resp.read() + self.assertEqual(resp.status, 201) + + def get_obj(url, token, parsed, conn, container, obj): + path = '%s/%s/%s' % (parsed.path, container, obj) + conn.request('GET', path, '', {'X-Auth-Token': token}) + return check_response(conn) + + # validate contents, contents should be source + validate_requests = copy_requests + for c, o, body in validate_requests: + resp = retry(get_obj, c, o) + self.assertEqual(resp.status, 200) + self.assertEqual(body, resp.read()) + if __name__ == '__main__': unittest.main() diff --git a/test/functional/tests.py b/test/functional/tests.py index ad87d7e..4d9179e 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -14,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Modifications by Red Hat, Inc. - from datetime import datetime -import os import hashlib import hmac import json @@ -25,131 +22,18 @@ import locale import random import StringIO import time -import threading import unittest import urllib import uuid +import eventlet from nose import SkipTest -from ConfigParser import ConfigParser -from test import get_config +from swift.common.storage_policy import POLICY + +from test.functional import normalized_urls, load_constraint, cluster_info +import test.functional as tf from test.functional.swift_test_client import Account, Connection, File, \ ResponseError -from swift.common.constraints import MAX_FILE_SIZE, MAX_META_NAME_LENGTH, \ - MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \ - MAX_OBJECT_NAME_LENGTH, CONTAINER_LISTING_LIMIT, ACCOUNT_LISTING_LIMIT, \ - MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH, MAX_HEADER_SIZE -from gluster.swift.common.constraints import \ - set_object_name_component_length, get_object_name_component_length - -default_constraints = dict(( - ('max_file_size', MAX_FILE_SIZE), - ('max_meta_name_length', MAX_META_NAME_LENGTH), - ('max_meta_value_length', MAX_META_VALUE_LENGTH), - ('max_meta_count', MAX_META_COUNT), - ('max_meta_overall_size', MAX_META_OVERALL_SIZE), - ('max_object_name_length', MAX_OBJECT_NAME_LENGTH), - ('container_listing_limit', CONTAINER_LISTING_LIMIT), - ('account_listing_limit', ACCOUNT_LISTING_LIMIT), - ('max_account_name_length', MAX_ACCOUNT_NAME_LENGTH), - ('max_container_name_length', MAX_CONTAINER_NAME_LENGTH), - ('max_header_size', MAX_HEADER_SIZE))) -constraints_conf = ConfigParser() -conf_exists = constraints_conf.read('/etc/swift/swift.conf') -# Constraints are set first from the test config, then from -# /etc/swift/swift.conf if it exists. If swift.conf doesn't exist, -# then limit test coverage. This allows SAIO tests to work fine but -# requires remote functional testing to know something about the cluster -# that is being tested. -config = get_config('func_test') -for k in default_constraints: - if k in config: - # prefer what's in test.conf - config[k] = int(config[k]) - elif conf_exists: - # swift.conf exists, so use what's defined there (or swift defaults) - # This normally happens when the test is running locally to the cluster - # as in a SAIO. - config[k] = default_constraints[k] - else: - # .functests don't know what the constraints of the tested cluster are, - # so the tests can't reliably pass or fail. Therefore, skip those - # tests. - config[k] = '%s constraint is not defined' % k - -web_front_end = config.get('web_front_end', 'integral') -normalized_urls = config.get('normalized_urls', False) -set_object_name_component_length() - - -def load_constraint(name): - c = config[name] - if not isinstance(c, int): - raise SkipTest(c) - return c - -locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C')) - - -def create_limit_filename(name_limit): - """ - Convert a split a large object name with - slashes so as to conform the GlusterFS file name - constraints. - Example: Take a object name: 'a'*1024, and - convert it to a*255/a*255/... - """ - # Get the file name limit from the configuration file - filename_limit = get_object_name_component_length() - - # Convert string to a list: "abc" -> ['a', 'b', 'c'] - filename_list = list('a' * name_limit) - - # Replace chars at filename limits to '/' - for index in range(filename_limit, name_limit, filename_limit): - filename_list[index] = os.path.sep - - # Cannot end in a '/' - if os.path.sep == filename_list[-1]: - return "".join(filename_list[:-1]) - else: - return "".join(filename_list) - - -def chunks(s, length=3): - i, j = 0, length - while i < len(s): - yield s[i:j] - i, j = j, j + length - - -def timeout(seconds, method, *args, **kwargs): - class TimeoutThread(threading.Thread): - def __init__(self, method, *args, **kwargs): - threading.Thread.__init__(self) - - self.method = method - self.args = args - self.kwargs = kwargs - self.exception = None - - def run(self): - try: - self.method(*self.args, **self.kwargs) - except Exception as e: - self.exception = e - - t = TimeoutThread(method, *args, **kwargs) - t.start() - t.join(seconds) - - if t.exception: - raise t.exception - - if t.isAlive(): - t._Thread__stop() - return True - return False class Utils(object): @@ -207,10 +91,10 @@ class Base2(object): class TestAccountEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.containers = [] @@ -386,6 +270,19 @@ class TestAccount(Base): self.assertEqual(sorted(containers, cmp=locale.strcoll), containers) + def testQuotedWWWAuthenticateHeader(self): + conn = Connection(tf.config) + conn.authenticate() + inserted_html = 'Hello World' + hax = 'AUTH_haxx"\nContent-Length: %d\n\n%s' % (len(inserted_html), + inserted_html) + quoted_hax = urllib.quote(hax) + conn.connection.request('GET', '/v1/' + quoted_hax, None, {}) + resp = conn.connection.getresponse() + resp_headers = resp.getheaders() + expected = ('www-authenticate', 'Swift realm="%s"' % quoted_hax) + self.assert_(expected in resp_headers) + class TestAccountUTF8(Base2, TestAccount): set_up = False @@ -394,10 +291,10 @@ class TestAccountUTF8(Base2, TestAccount): class TestAccountNoContainersEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() @@ -423,10 +320,10 @@ class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers): class TestContainerEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) @@ -715,10 +612,10 @@ class TestContainerPathsEnv(object): @classmethod def setUp(cls): raise SkipTest('Objects ending in / are not supported') - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.file_size = 8 @@ -894,10 +791,10 @@ class TestContainerPaths(Base): class TestFileEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) @@ -1079,7 +976,7 @@ class TestFile(Base): limit = load_constraint('max_object_name_length') for l in (1, 10, limit / 2, limit - 1, limit, limit + 1, limit * 2): - file_item = self.env.container.file(create_limit_filename(l)) + file_item = self.env.container.file('a' * l) if l <= limit: self.assert_(file_item.write()) @@ -1245,6 +1142,15 @@ class TestFile(Base): limit = load_constraint('max_file_size') tsecs = 3 + def timeout(seconds, method, *args, **kwargs): + try: + with eventlet.Timeout(seconds): + method(*args, **kwargs) + except eventlet.Timeout: + return True + else: + return False + for i in (limit - 100, limit - 10, limit - 1, limit, limit + 1, limit + 10, limit + 100): @@ -1553,8 +1459,16 @@ class TestFile(Base): self.assertEqual(etag, header_etag) def testChunkedPut(self): - if (web_front_end == 'apache2'): - raise SkipTest() + if (tf.web_front_end == 'apache2'): + raise SkipTest("Chunked PUT can only be tested with apache2 web" + " front end") + + def chunks(s, length=3): + i, j = 0, length + while i < len(s): + yield s[i:j] + i, j = j, j + length + data = File.random_data(10000) etag = File.compute_md5sum(data) @@ -1578,10 +1492,10 @@ class TestFileUTF8(Base2, TestFile): class TestDloEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) @@ -1699,6 +1613,51 @@ class TestDlo(Base): # try not to leave this around for other tests to stumble over self.env.container.file("copied-man1").delete() + def test_dlo_if_match_get(self): + manifest = self.env.container.file("man1") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.read, + hdrs={'If-Match': 'not-%s' % etag}) + self.assert_status(412) + + manifest.read(hdrs={'If-Match': etag}) + self.assert_status(200) + + def test_dlo_if_none_match_get(self): + manifest = self.env.container.file("man1") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.read, + hdrs={'If-None-Match': etag}) + self.assert_status(304) + + manifest.read(hdrs={'If-None-Match': "not-%s" % etag}) + self.assert_status(200) + + def test_dlo_if_match_head(self): + manifest = self.env.container.file("man1") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.info, + hdrs={'If-Match': 'not-%s' % etag}) + self.assert_status(412) + + manifest.info(hdrs={'If-Match': etag}) + self.assert_status(200) + + def test_dlo_if_none_match_head(self): + manifest = self.env.container.file("man1") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.info, + hdrs={'If-None-Match': etag}) + self.assert_status(304) + + manifest.info(hdrs={'If-None-Match': "not-%s" % etag}) + self.assert_status(200) + + class TestDloUTF8(Base2, TestDlo): set_up = False @@ -1706,10 +1665,10 @@ class TestDloUTF8(Base2, TestDlo): class TestFileComparisonEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) @@ -1761,19 +1720,25 @@ class TestFileComparison(Base): for file_item in self.env.files: hdrs = {'If-Modified-Since': self.env.time_old_f1} self.assert_(file_item.read(hdrs=hdrs)) + self.assert_(file_item.info(hdrs=hdrs)) hdrs = {'If-Modified-Since': self.env.time_new} self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(304) + self.assertRaises(ResponseError, file_item.info, hdrs=hdrs) + self.assert_status(304) def testIfUnmodifiedSince(self): for file_item in self.env.files: hdrs = {'If-Unmodified-Since': self.env.time_new} self.assert_(file_item.read(hdrs=hdrs)) + self.assert_(file_item.info(hdrs=hdrs)) hdrs = {'If-Unmodified-Since': self.env.time_old_f2} self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(412) + self.assertRaises(ResponseError, file_item.info, hdrs=hdrs) + self.assert_status(412) def testIfMatchAndUnmodified(self): for file_item in self.env.files: @@ -1823,17 +1788,16 @@ class TestSloEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.slo_enabled is None: - cluster_info = cls.conn.cluster_info() cls.slo_enabled = 'slo' in cluster_info if not cls.slo_enabled: return - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) cls.account.delete_containers() cls.container = cls.account.container(Utils.create_name()) @@ -1899,7 +1863,6 @@ class TestSlo(Base): set_up = False def setUp(self): - raise SkipTest("SLO not enabled yet in gluster-swift") super(TestSlo, self).setUp() if self.env.slo_enabled is False: raise SkipTest("SLO not enabled") @@ -2039,6 +2002,50 @@ class TestSlo(Base): self.assertEqual('application/json; charset=utf-8', got_info['content_type']) + def test_slo_if_match_get(self): + manifest = self.env.container.file("manifest-abcde") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.read, + hdrs={'If-Match': 'not-%s' % etag}) + self.assert_status(412) + + manifest.read(hdrs={'If-Match': etag}) + self.assert_status(200) + + def test_slo_if_none_match_get(self): + manifest = self.env.container.file("manifest-abcde") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.read, + hdrs={'If-None-Match': etag}) + self.assert_status(304) + + manifest.read(hdrs={'If-None-Match': "not-%s" % etag}) + self.assert_status(200) + + def test_slo_if_match_head(self): + manifest = self.env.container.file("manifest-abcde") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.info, + hdrs={'If-Match': 'not-%s' % etag}) + self.assert_status(412) + + manifest.info(hdrs={'If-Match': etag}) + self.assert_status(200) + + def test_slo_if_none_match_head(self): + manifest = self.env.container.file("manifest-abcde") + etag = manifest.info()['etag'] + + self.assertRaises(ResponseError, manifest.info, + hdrs={'If-None-Match': etag}) + self.assert_status(304) + + manifest.info(hdrs={'If-None-Match': "not-%s" % etag}) + self.assert_status(200) + class TestSloUTF8(Base2, TestSlo): set_up = False @@ -2049,11 +2056,11 @@ class TestObjectVersioningEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() - cls.account = Account(cls.conn, config.get('account', - config['username'])) + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) # avoid getting a prefix that stops halfway through an encoded # character @@ -2073,6 +2080,61 @@ class TestObjectVersioningEnv(object): cls.versioning_enabled = 'versions' in container_info +class TestCrossPolicyObjectVersioningEnv(object): + # tri-state: None initially, then True/False + versioning_enabled = None + multiple_policies_enabled = None + policies = None + + @classmethod + def setUp(cls): + cls.conn = Connection(tf.config) + cls.conn.authenticate() + + if cls.multiple_policies_enabled is None: + try: + cls.policies = tf.FunctionalStoragePolicyCollection.from_info() + except AssertionError: + pass + + if cls.policies and len(cls.policies) > 1: + cls.multiple_policies_enabled = True + else: + cls.multiple_policies_enabled = False + # We have to lie here that versioning is enabled. We actually + # don't know, but it does not matter. We know these tests cannot + # run without multiple policies present. If multiple policies are + # present, we won't be setting this field to any value, so it + # should all still work. + cls.versioning_enabled = True + return + + policy = cls.policies.select() + version_policy = cls.policies.exclude(name=policy['name']).select() + + cls.account = Account(cls.conn, tf.config.get('account', + tf.config['username'])) + + # avoid getting a prefix that stops halfway through an encoded + # character + prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") + + cls.versions_container = cls.account.container(prefix + "-versions") + if not cls.versions_container.create( + {POLICY: policy['name']}): + raise ResponseError(cls.conn.response) + + cls.container = cls.account.container(prefix + "-objs") + if not cls.container.create( + hdrs={'X-Versions-Location': cls.versions_container.name, + POLICY: version_policy['name']}): + raise ResponseError(cls.conn.response) + + container_info = cls.container.info() + # if versioning is off, then X-Versions-Location won't persist + cls.versioning_enabled = 'versions' in container_info + + class TestObjectVersioning(Base): env = TestObjectVersioningEnv set_up = False @@ -2123,26 +2185,39 @@ class TestObjectVersioningUTF8(Base2, TestObjectVersioning): set_up = False +class TestCrossPolicyObjectVersioning(TestObjectVersioning): + env = TestCrossPolicyObjectVersioningEnv + set_up = False + + def setUp(self): + super(TestCrossPolicyObjectVersioning, self).setUp() + if self.env.multiple_policies_enabled is False: + raise SkipTest('Cross policy test requires multiple policies') + elif self.env.multiple_policies_enabled is not True: + # just some sanity checking + raise Exception("Expected multiple_policies_enabled " + "to be True/False, got %r" % ( + self.env.versioning_enabled,)) + + class TestTempurlEnv(object): tempurl_enabled = None # tri-state: None initially, then True/False @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.tempurl_enabled is None: - cluster_info = cls.conn.cluster_info() cls.tempurl_enabled = 'tempurl' in cluster_info if not cls.tempurl_enabled: return - cls.tempurl_methods = cluster_info['tempurl']['methods'] cls.tempurl_key = Utils.create_name() cls.tempurl_key2 = Utils.create_name() cls.account = Account( - cls.conn, config.get('account', config['username'])) + cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.account.update_metadata({ 'temp-url-key': cls.tempurl_key, @@ -2303,17 +2378,16 @@ class TestSloTempurlEnv(object): @classmethod def setUp(cls): - cls.conn = Connection(config) + cls.conn = Connection(tf.config) cls.conn.authenticate() if cls.enabled is None: - cluster_info = cls.conn.cluster_info() cls.enabled = 'tempurl' in cluster_info and 'slo' in cluster_info cls.tempurl_key = Utils.create_name() cls.account = Account( - cls.conn, config.get('account', config['username'])) + cls.conn, tf.config.get('account', tf.config['username'])) cls.account.delete_containers() cls.account.update_metadata({'temp-url-key': cls.tempurl_key}) diff --git a/test/functional_auth/gswauth/__init__.py b/test/functional_auth/gswauth/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/functional_auth/gswauth/conf/account-server.conf b/test/functional_auth/gswauth/conf/account-server.conf deleted file mode 100644 index 4996367..0000000 --- a/test/functional_auth/gswauth/conf/account-server.conf +++ /dev/null @@ -1,32 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the account-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = false -bind_port = 6012 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = account-server - -[app:account-server] -use = egg:gluster_swift#account -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the account server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off diff --git a/test/functional_auth/gswauth/conf/container-server.conf b/test/functional_auth/gswauth/conf/container-server.conf deleted file mode 100644 index 122d97e..0000000 --- a/test/functional_auth/gswauth/conf/container-server.conf +++ /dev/null @@ -1,35 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the container-server workers -# start, you can *consider* setting this value to "false" to reduce the -# per-request overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = false -bind_port = 6011 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = container-server - -[app:container-server] -use = egg:gluster_swift#container -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the container server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - -#enable object versioning for functional test -allow_versions = on diff --git a/test/functional_auth/gswauth/conf/fs.conf b/test/functional_auth/gswauth/conf/fs.conf deleted file mode 100644 index b06a854..0000000 --- a/test/functional_auth/gswauth/conf/fs.conf +++ /dev/null @@ -1,19 +0,0 @@ -[DEFAULT] -# -# IP address of a node in the GlusterFS server cluster hosting the -# volumes to be served via Swift API. -mount_ip = localhost - -# Performance optimization parameter. When turned off, the filesystem will -# see a reduced number of stat calls, resulting in substantially faster -# response time for GET and HEAD container requests on containers with large -# numbers of objects, at the expense of an accurate count of combined bytes -# used by all objects in the container. For most installations "off" works -# fine. -# -# *** Keep on for Functional Tests *** -accurate_size_in_listing = on - -# *** Keep on for Functional Tests *** -container_update_object_count = on -account_update_container_count = on diff --git a/test/functional_auth/gswauth/conf/object-expirer.conf b/test/functional_auth/gswauth/conf/object-expirer.conf deleted file mode 100644 index b75963c..0000000 --- a/test/functional_auth/gswauth/conf/object-expirer.conf +++ /dev/null @@ -1,17 +0,0 @@ -[DEFAULT] - -[object-expirer] -# auto_create_account_prefix = . - -[pipeline:main] -pipeline = catch_errors cache proxy-server - -[app:proxy-server] -use = egg:swift#proxy - -[filter:cache] -use = egg:swift#memcache -memcache_servers = 127.0.0.1:11211 - -[filter:catch_errors] -use = egg:swift#catch_errors diff --git a/test/functional_auth/gswauth/conf/object-server.conf b/test/functional_auth/gswauth/conf/object-server.conf deleted file mode 100644 index 3cb9ead..0000000 --- a/test/functional_auth/gswauth/conf/object-server.conf +++ /dev/null @@ -1,48 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the object-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = false -bind_port = 6010 -# -# Maximum number of clients one worker can process simultaneously (it will -# actually accept N + 1). Setting this to one (1) will only handle one request -# at a time, without accepting another request concurrently. By increasing the -# number of workers to a much higher value, one can prevent slow file system -# operations for one request from starving other requests. -max_clients = 1024 -# -# If not doing the above, setting this value initially to match the number of -# CPUs is a good starting point for determining the right value. -workers = 1 -# Override swift's default behaviour for fallocate. -disable_fallocate = true - -[pipeline:main] -pipeline = object-server - -[app:object-server] -use = egg:gluster_swift#object -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# For performance, after ensuring things are running in a stable manner, you -# can turn off normal request logging for the object server to reduce the -# per-request overhead and unclutter the log files. Warnings and errors will -# still be logged. -log_requests = off -# -# Adjust this value to match the stripe width of the underlying storage array -# (not the stripe element size). This will provide a reasonable starting point -# for tuning this value. -disk_chunk_size = 65536 -# -# Adjust this value match whatever is set for the disk_chunk_size initially. -# This will provide a reasonable starting point for tuning this value. -network_chunk_size = 65556 diff --git a/test/functional_auth/gswauth/conf/proxy-server.conf b/test/functional_auth/gswauth/conf/proxy-server.conf deleted file mode 100644 index 60661e8..0000000 --- a/test/functional_auth/gswauth/conf/proxy-server.conf +++ /dev/null @@ -1,81 +0,0 @@ -[DEFAULT] -bind_port = 8080 -user = root -# Consider using 1 worker per CPU -workers = 1 - -[pipeline:main] -pipeline = catch_errors healthcheck proxy-logging cache tempurl gswauth proxy-logging proxy-server - -[app:proxy-server] -use = egg:gluster_swift#proxy -log_facility = LOG_LOCAL1 -log_level = WARN -# The API allows for account creation and deletion, but since Gluster/Swift -# automounts a Gluster volume for a given account, there is no way to create -# or delete an account. So leave this off. -allow_account_management = false -account_autocreate = true -# Ensure the proxy server uses fast-POSTs since we don't need to make a copy -# of the entire object given that all metadata is stored in the object -# extended attributes (no .meta file used after creation) and no container -# sync feature to present. -object_post_as_copy = false -# Only need to recheck the account exists once a day -recheck_account_existence = 86400 -# May want to consider bumping this up if containers are created and destroyed -# infrequently. -recheck_container_existence = 60 -# Timeout clients that don't read or write to the proxy server after 5 -# seconds. -client_timeout = 5 -# Give more time to connect to the object, container or account servers in -# cases of high load. -conn_timeout = 5 -# For high load situations, once connected to an object, container or account -# server, allow for delays communicating with them. -node_timeout = 60 -# May want to consider bumping up this value to 1 - 4 MB depending on how much -# traffic is for multi-megabyte or gigabyte requests; perhaps matching the -# stripe width (not stripe element size) of your storage volume is a good -# starting point. See below for sizing information. -object_chunk_size = 65536 -# If you do decide to increase the object_chunk_size, then consider lowering -# this value to one. Up to "put_queue_length" object_chunk_size'd buffers can -# be queued to the object server for processing. Given one proxy server worker -# can handle up to 1,024 connections, by default, it will consume 10 * 65,536 -# * 1,024 bytes of memory in the worse case (default values). Be sure the -# amount of memory available on the system can accommodate increased values -# for object_chunk_size. -put_queue_depth = 10 - -[filter:catch_errors] -use = egg:swift#catch_errors - -[filter:proxy-logging] -use = egg:swift#proxy_logging - -[filter:healthcheck] -use = egg:swift#healthcheck - -[filter:tempauth] -use = egg:swift#tempauth -user_admin_admin = admin .admin .reseller_admin -user_test_tester = testing .admin -user_test2_tester2 = testing2 .admin -user_test_tester3 = testing3 - -[filter:gswauth] -use = egg:gluster_swift#gswauth -set log_name = gswauth -super_admin_key = gswauthkey -metadata_volume = gsmetadata - -[filter:cache] -use = egg:swift#memcache -# Update this line to contain a comma separated list of memcache servers -# shared by all nodes running the proxy-server service. -memcache_servers = localhost:11211 - -[filter:tempurl] -use = egg:swift#tempurl diff --git a/test/functional_auth/gswauth/conf/swift.conf b/test/functional_auth/gswauth/conf/swift.conf deleted file mode 100644 index f64ba5a..0000000 --- a/test/functional_auth/gswauth/conf/swift.conf +++ /dev/null @@ -1,85 +0,0 @@ -[DEFAULT] - - -[swift-hash] -# random unique string that can never change (DO NOT LOSE) -swift_hash_path_suffix = gluster - - -# The swift-constraints section sets the basic constraints on data -# saved in the swift cluster. - -[swift-constraints] - -# max_file_size is the largest "normal" object that can be saved in -# the cluster. This is also the limit on the size of each segment of -# a "large" object when using the large object manifest support. -# This value is set in bytes. Setting it to lower than 1MiB will cause -# some tests to fail. -# Default is 1 TiB = 2**30*1024 -max_file_size = 1099511627776 - - -# max_meta_name_length is the max number of bytes in the utf8 encoding -# of the name portion of a metadata header. - -#max_meta_name_length = 128 - - -# max_meta_value_length is the max number of bytes in the utf8 encoding -# of a metadata value - -#max_meta_value_length = 256 - - -# max_meta_count is the max number of metadata keys that can be stored -# on a single account, container, or object - -#max_meta_count = 90 - - -# max_meta_overall_size is the max number of bytes in the utf8 encoding -# of the metadata (keys + values) - -#max_meta_overall_size = 4096 - - -# max_object_name_length is the max number of bytes in the utf8 encoding of an -# object name: Gluster FS can handle much longer file names, but the length -# between the slashes of the URL is handled below. Remember that most web -# clients can't handle anything greater than 2048, and those that do are -# rather clumsy. - -max_object_name_length = 2048 - -# max_object_name_component_length (GlusterFS) is the max number of bytes in -# the utf8 encoding of an object name component (the part between the -# slashes); this is a limit imposed by the underlying file system (for XFS it -# is 255 bytes). - -max_object_name_component_length = 255 - -# container_listing_limit is the default (and max) number of items -# returned for a container listing request - -#container_listing_limit = 10000 - - -# account_listing_limit is the default (and max) number of items returned -# for an account listing request - -#account_listing_limit = 10000 - - -# max_account_name_length is the max number of bytes in the utf8 encoding of -# an account name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_account_name_length = 255 - - -# max_container_name_length is the max number of bytes in the utf8 encoding -# of a container name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_container_name_length = 255 diff --git a/test/functional_auth/gswauth/conf/test.conf b/test/functional_auth/gswauth/conf/test.conf deleted file mode 100644 index 15c9aea..0000000 --- a/test/functional_auth/gswauth/conf/test.conf +++ /dev/null @@ -1,58 +0,0 @@ -[func_test] -# sample config -auth_host = 127.0.0.1 -auth_port = 8080 -auth_ssl = no -auth_prefix = /auth/ -## sample config for Swift with Keystone -#auth_version = 2 -#auth_host = localhost -#auth_port = 5000 -#auth_ssl = no -#auth_prefix = /v2.0/ - -# GSWauth internal admin user configuration information -admin_key = gswauthkey -admin_user = .super_admin - -# Gluster setup information -devices = /mnt/gluster-object -gsmetadata_volume = gsmetadata - -# Primary functional test account (needs admin access to the account) -account = test -username = tester -password = testing - -# User on a second account (needs admin access to the account) -account2 = test2 -username2 = tester2 -password2 = testing2 - -# User on same account as first, but without admin access -username3 = tester3 -password3 = testing3 - -# Default constraints if not defined here, the test runner will try -# to set them from /etc/swift/swift.conf. If that file isn't found, -# the test runner will skip tests that depend on these values. -# Note that the cluster must have "sane" values for the test suite to pass. -#max_file_size = 5368709122 -#max_meta_name_length = 128 -#max_meta_value_length = 256 -#max_meta_count = 90 -#max_meta_overall_size = 4096 -#max_object_name_length = 1024 -#container_listing_limit = 10000 -#account_listing_limit = 10000 -#max_account_name_length = 256 -#max_container_name_length = 256 -normalized_urls = True - -collate = C - -[unit_test] -fake_syslog = False - -[probe_test] -# check_server_timeout = 30 diff --git a/test/functional_auth/gswauth/test_gswauth.py b/test/functional_auth/gswauth/test_gswauth.py deleted file mode 100644 index 5219f13..0000000 --- a/test/functional_auth/gswauth/test_gswauth.py +++ /dev/null @@ -1,252 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -import unittest -from nose import SkipTest -from swift.common.bufferedhttp import http_connect_raw as http_connect -from test import get_config - -config = get_config('func_test') - -class TestGSWauth(unittest.TestCase): - - def _get_admin_headers(self): - return {'X-Auth-Admin-User': config['admin_user'], - 'X-Auth-Admin-Key': config['admin_key']} - - def _check_test_account_is_not_registered(self): - # check account exists - path = '%sv2/%s' % (config['auth_prefix'], config['account']) - - headers = self._get_admin_headers() - headers.update({'Content-Length': '0'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'GET', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 404) - - def _register_test_account(self): - # create account in swauth (not a swift account) - # This current version only supports one account per volume - # and the account name is the same as the volume name - # still an account must be created with swauth to map - # swauth accounts with swift accounts - path = '%sv2/%s' % (config['auth_prefix'], config['account']) - headers = self._get_admin_headers() - headers.update({'Content-Length': '0'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 201) - - def _deregister_test_account(self): - # delete account in swauth (not a swift account) - # @see _register_test_account - path = '%sv2/%s' % (config['auth_prefix'], config['account']) - headers = self._get_admin_headers() - headers.update({'Content-Length': '0'}) - conn = http_connect(config['auth_host'], config['auth_port'], - 'DELETE', path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 204) - - def test_register_account(self): - # check and register account - self._check_test_account_is_not_registered() - self._register_test_account() - - try: - # list account - path = '%sv2/%s' % (config['auth_prefix'], config['account']) - headers = self._get_admin_headers() - conn = http_connect(config['auth_host'], config['auth_port'], - 'GET', path, headers) - resp = conn.getresponse() - body = resp.read() - info = json.loads(body) - self.assertEqual(info['account_id'], 'AUTH_test') - self.assertTrue(resp.status == 200) - - finally: - # de-register account - self._deregister_test_account() - - def test_add_user(self): - # check and register account - self._check_test_account_is_not_registered() - self._register_test_account() - - # create user - path = '%sv2/%s/%s' % (config['auth_prefix'], config['account'], - config['username']) - headers = self._get_admin_headers() - headers.update({'X-Auth-User-Key': config['password'], - 'Content-Length': '0', - 'X-Auth-User-Admin': 'true'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 201) - - try: - # list user - headers = self._get_admin_headers() - conn = http_connect(config['auth_host'], config['auth_port'], - 'GET', path, headers) - resp = conn.getresponse() - body = resp.read() - self.assertEqual(body, '{"groups": [{"name": "test:tester"}, {"name":' - ' "test"}, {"name": ".admin"}], "auth": "plaintext:testing"}') - self.assertTrue(resp.status == 200) - - finally: - try: - # delete user - headers = self._get_admin_headers() - conn = http_connect(config['auth_host'], config['auth_port'], - 'DELETE', path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 204) - - finally: - # de-register account - self._deregister_test_account() - - def test_register_invalid_account(self): - # invalid account - path = '%sv2/%s' % (config['auth_prefix'], '.test') - headers = self._get_admin_headers() - headers.update({'Content-Length': '0'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 400) - - def test_add_invalid_user(self): - path = '%sv2/%s/%s' % (config['auth_prefix'], config['account'], - '.invaliduser') - headers = self._get_admin_headers() - headers.update({'X-Auth-User-Key': config['password'], - 'Content-Length': '0', - 'X-Auth-User-Admin': 'true'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 400) - - def test_register_account_without_admin_rights(self): - path = '%sv2/%s' % (config['auth_prefix'], config['account']) - headers = {'X-Auth-Admin-User': config['admin_user']} - headers.update({'Content-Length': '0'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 401) - - def test_change_user_password(self): - # check and register account - self._check_test_account_is_not_registered() - self._register_test_account() - - try: - # create user - path = '%sv2/%s/%s' % (config['auth_prefix'], config['account'], - config['username']) - headers = self._get_admin_headers() - headers.update({'X-Auth-User-Key': config['password'], - 'Content-Length': '0', - 'X-Auth-User-Admin': 'true'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - print "resp creating user %s" % resp.status - self.assertTrue(resp.status == 201) - - # change password - path = '%sv2/%s/%s' % (config['auth_prefix'], config['account'], - config['username']) - headers = self._get_admin_headers() - headers.update({'X-Auth-User-Key': 'newpassword', - 'Content-Length': '0', - 'X-Auth-User-Admin': 'true'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - print "resp changing password %s" % resp.status - self.assertTrue(resp.status == 201) - finally: - try: - # delete user - headers = self._get_admin_headers() - conn = http_connect(config['auth_host'], config['auth_port'], - 'DELETE', path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 204) - - finally: - # de-register account - self._deregister_test_account() - - def test_change_user_password_without_admin_rights(self): - # check and register account - self._check_test_account_is_not_registered() - self._register_test_account() - - try: - # create user - path = '%sv2/%s/%s' % (config['auth_prefix'], config['account'], - config['username']) - headers = self._get_admin_headers() - headers.update({'X-Auth-User-Key': config['password'], - 'Content-Length': '0', - 'X-Auth-User-Admin': 'true'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - print "resp creating user %s" % resp.status - self.assertTrue(resp.status == 201) - - # attempt to change password - path = '%sv2/%s/%s' % (config['auth_prefix'], config['account'], - config['username']) - headers = {'X-Auth-Admin-User': - config['account'] + ':' + config['username'], - 'X-Auth-Admin-Key': config['password']} - headers.update({'X-Auth-User-Key': 'newpassword', - 'Content-Length': '0', - 'X-Auth-User-Admin': 'true'}) - conn = http_connect(config['auth_host'], config['auth_port'], 'PUT', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 201) - - finally: - try: - # delete user - headers = self._get_admin_headers() - conn = http_connect(config['auth_host'], config['auth_port'], - 'DELETE', path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 204) - - finally: - # de-register account - self._deregister_test_account() diff --git a/test/functional_auth/gswauth/test_gswauth_cli.py b/test/functional_auth/gswauth/test_gswauth_cli.py deleted file mode 100644 index bc55ed9..0000000 --- a/test/functional_auth/gswauth/test_gswauth_cli.py +++ /dev/null @@ -1,826 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -from nose import SkipTest -import commands -import os -from test import get_config - -config = get_config('func_test') - -class Utils: - - @classmethod - def swauthPrep(self,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - return commands.getstatusoutput('gswauth-prep -A %s -U %s -K %s' % (authurl, user, key)) - - @classmethod - def addAccount(self,account_name,suffix=None, authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if suffix is not None: - return commands.getstatusoutput('gswauth-add-account %s -s %s -A %s -U %s -K %s' % (account_name, suffix, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-add-account %s -A %s -U %s -K %s' % (account_name, authurl, user, key)) - - @classmethod - def deleteAccount(self,account_name,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - return commands.getstatusoutput('gswauth-delete-account %s -A %s -U %s -K %s' % (account_name, authurl, user, key)) - - @classmethod - def listAccounts(self,listtype=None,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if listtype is not None: - return commands.getstatusoutput('gswauth-list %s -A %s -U %s -K %s' % (listtype, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-list -A %s -U %s -K %s' % (authurl, user, key)) - - @classmethod - def listUsers(self,account_name,listtype=None,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if listtype is not None: - return commands.getstatusoutput('gswauth-list %s %s -A %s -U %s -K %s'% (account_name, listtype, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-list %s -A %s -U %s -K %s'% (account_name, authurl, user, key)) - - @classmethod - def addAdminUser(self,account_name,username,password,suffix=None,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if suffix is not None: - return commands.getstatusoutput('gswauth-add-user -a %s %s %s -s %s -A %s -U %s -K %s'% (account_name, username, password, suffix, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-add-user -a %s %s %s -A %s -U %s -K %s'% (account_name, username, password, authurl, user, key)) - - @classmethod - def addUser(self,account_name,username,password,suffix=None,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if suffix is not None: - return commands.getstatusoutput('gswauth-add-user %s %s %s -s %s -A %s -U %s -K %s'% (account_name, username, password, suffix, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-add-user %s %s %s -A %s -U %s -K %s'% (account_name, username, password, authurl, user, key)) - - @classmethod - def addResellerAdminUser(self,account_name,username,password,suffix=None,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if suffix is not None: - return commands.getstatusoutput('gswauth-add-user -r %s %s %s -s %s -A %s -U %s -K %s'% (account_name, username, password, suffix, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-add-user -r %s %s %s -A %s -U %s -K %s'% (account_name, username, password, authurl, user, key)) - - @classmethod - def deleteUser(self,account_name,username,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - return commands.getstatusoutput('gswauth-delete-user %s %s -A %s -U %s -K %s'% (account_name, username, authurl, user, key)) - - @classmethod - def listUserGroups(self,account_name,username,listtype=None,authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - if listtype is not None: - return commands.getstatusoutput('gswauth-list %s %s %s -A %s -U %s -K %s'% (account_name, username, listtype, authurl, user, key)) - else: - return commands.getstatusoutput('gswauth-list %s %s %s -A %s -U %s -K %s'% (account_name, username, listtype, authurl, user, key)) - - @classmethod - def cleanToken(self,option=None,value=None,authurl='http://127.0.0.1:8080/auth/', key=config['admin_key']): - if option is None and value is None: - return commands.getstatusoutput('gswauth-cleanup-tokens -A %s -K %s'% (authurl, key)) - elif option is not None and value is None: - return commands.getstatusoutput('gswauth-cleanup-tokens --%s -A %s -K %s'% (option, authurl, key)) - else: - return commands.getstatusoutput('gswauth-cleanup-tokens --%s %s -A %s -K %s'% (option, value, authurl, key)) - - @classmethod - def setAccountService(self, account, service, name, value, authurl='http://127.0.0.1:8080/auth/',user=config['admin_user'],key=config['admin_key']): - return commands.getstatusoutput('gswauth-set-account-service %s %s %s %s -A %s -U %s -K %s'% (account, service, name, value, authurl, user, key)) - - @classmethod - def cleanAll(self): - commands.getstatusoutput('sudo rm -rf '+os.path.join(config['devices'], config['gsmetadata_volume'], '*')) - return commands.getstatusoutput('sudo rm -rf '+os.path.join(config['devices'], config['gsmetadata_volume'], '.*')) - - -class TestSwauthPrep(unittest.TestCase): - - def setUp(self): - pass - - def tearDown(self): - Utils.cleanAll() - - def testSwauthPrep(self): - (status,output)=Utils.swauthPrep() - self.assertEqual(status, 0, 'swauth prep failed with valid credentials'+output) - - (status,output)=Utils.swauthPrep(key='') - self.assertEqual('Usage' in output,True, 'Invalid swauth-prep request accepted(no key provided): '+output) - - (status,output)=Utils.swauthPrep(key='notavalidkey') - self.assertNotEqual(status, 0, 'Invalid swauth-prep request accepted(wrong key provided):'+output) - self.assertEqual('gswauth preparation failed: 401 Unauthorized: Invalid user/key provided' \ - in output,True, 'Invalid swauth-prep request accepted: '+output) - - (status,output)=Utils.swauthPrep(authurl='http://127.0.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid swauth-prep request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.swauthPrep(authurl='http://127.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid swauth-prep request accepted(wrong admin-url provided): %s' % output) - #TODO:More cases for invalid url and admin user - - def testAddAccountWithoutSwauthPrep(self): - #Try to add account without running gswauth-prep - Utils.cleanAll() - (status,output)=Utils.addAccount('test') - self.assertNotEqual(status, 0, 'account added without running gswauth-prep '+output) - self.assertEqual('Account creation failed: 500 Server Error' \ - in output,True, 'account added without running gswauth-prep '+output) - - -class TestAccount(unittest.TestCase): - - def setUp(self): - (status,output)=Utils.swauthPrep() - self.assertEqual(status, 0, 'setup swauth-prep failed'+output) - - def tearDown(self): - Utils.cleanAll() - - def setTestAccUserEnv(self): - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addResellerAdminUser('test','re_admin','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addAdminUser('test','admin','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addUser('test','tester','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - - def setTest2AccUserEnv(self): - (status,output)=Utils.addAccount('test2') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addResellerAdminUser('test2','re_admin','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addAdminUser('test2','admin','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addUser('test2','tester','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - - def testAddAccount(self): - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'account creation failed'+output) - - (status,output)=Utils.addAccount('accountvolumedoesnotexist') - self.assertEqual(status, 0, 'account creation failed std err was: '+output) - - (status,output)=Utils.addAccount('testnokey',key='') - self.assertEqual('Usage:' in output, True, 'Invalid account creation request accepted : '+output) - - (status,output)=Utils.addAccount('testinvalidkey',key='invalidkey') - self.assertEqual('Account creation failed: 401 Unauthorized: Invalid user/key provided' \ - in output,True, 'Invalid account creation request accepted: '+output) - - (status,output)=Utils.addAccount('test2', authurl='http://127.0.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid account creation request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.addAccount('test2', authurl='http://127.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid account creation request accepted(wrong admin-url provided): %s' % output) - - def testAddAccountNonSuperAdminUsers(self): - #set test account with all types of user - self.setTestAccUserEnv() - #try to add another account with all type of users - (status,output)=Utils.addAccount('accbyreselleradmin',user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'account creation failed with re_admin user: '+output) - - (status,output)=Utils.addAccount('accbyadmin',user='test:admin',key='testing') - self.assertNotEqual(status, 0, 'account creation success with admin user: '+output) - self.assertEqual('Account creation failed: 403 Forbidden: Insufficient privileges' in output,True, 'account creation success with admin user: '+output) - - (status,output)=Utils.addAccount('accbyuser',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'account creation success with regular user: '+output) - self.assertEqual('Account creation failed: 403 Forbidden: Insufficient privileges' \ - in output,True, 'account creation success with regular user: '+output) - - def testDeleteAccount(self): - #add test account with no users - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'account creation failed for test account'+output) - - #set test2 account with all type of users - self.setTest2AccUserEnv() - - #valid request to delete an account with no users - (status,output)=Utils.deleteAccount('test') - self.assertEqual(status, 0, 'account deletion failed for test account'+output) - - #Invalid request to delete an account with users - (status,output)=Utils.deleteAccount('test2') - self.assertNotEqual(status, 0, 'account deletion succeeded for acc with active users'+output) - self.assertEqual('Delete account failed: 409 Conflict: Account test2 contains active users. Delete all users first.' \ - in output,True, 'account deletion failed for test account'+output) - - #delete all users in above account and then try again - (status,output) = Utils.deleteUser('test2','tester') - self.assertEqual(status, 0, 'setTestDeleteAccountEnv'+output) - - (status,output) = Utils.deleteUser('test2','admin') - self.assertEqual(status, 0, 'setTestDeleteAccountEnv'+output) - - (status,output) = Utils.deleteUser('test2','re_admin') - self.assertEqual(status, 0, 'setTestDeleteAccountEnv'+output) - - (status,output)=Utils.deleteAccount('test2') - self.assertEqual(status, 0, 'account deletion failed for test2 account'+output) - - (status,output)=Utils.deleteAccount('accountdoesnotexist') - self.assertNotEqual(status, 0, 'account deletion failed for accountdoesnotexist'+output) - self.assertEqual('Delete account failed: 404 Not Found: Account accountdoesnotexist does not exist' in output,True, 'account deletion failed for test account'+output) - - (status,output)=Utils.deleteAccount('test3', authurl='http://127.0.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid deletion request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.deleteAccount('test3', authurl='http://127.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid deletion request accepted(wrong admin-url provided): %s' % output) - - def testDeleteAccountNonSuperAdminUsers(self): - #set test account with all types of user - self.setTestAccUserEnv() - - #try to add another account with all type of users - Utils.addAccount('accbysuperadminforreadmin') - (status,output)=Utils.deleteAccount('accbysuperadminforreadmin',user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'account deletion failed with re_admin user: '+output) - - Utils.addAccount('accbysuperadminforadmin') - (status,output)=Utils.deleteAccount('accbysuperadminforadmin',user='test:admin',key='testing') - self.assertNotEqual(status, 0, 'account deletion success with admin user: '+output) - self.assertEqual('Delete account failed: 403 Forbidden: Insufficient privileges' \ - in output,True, 'account deletion success with admin user: '+output) - - Utils.addAccount('accbysuperadminforuser') - (status,output)=Utils.deleteAccount('accbysuperadminforuser',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'account creation success with regular user: '+output) - self.assertEqual('Delete account failed: 403 Forbidden: Insufficient privileges' \ - in output,True, 'account deletion success with regular user: '+output) - - def testListAcounts(self): - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'account creation failed'+output) - - (status,output)=Utils.listAccounts() - self.assertEqual(output, - '+----------+\n| Accounts |\n+----------+\n| test |\n+----------+', - 'swauth-list failed:\n%s' % output) - - (status,output)=Utils.listAccounts(authurl='http://127.0.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid list request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.listAccounts(authurl='http://127.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid list request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.listAccounts('-j') - self.assertEqual(output, - '{"accounts": [{"name": "test"}]}', - 'swauth-list failed for json option:\n%s' % output) - - (status,output)=Utils.listAccounts('-p') - self.assertEqual(output, - 'test', - 'swauth-list failed for plain-text option:\n%s' % output) - - def testListAcountsNonSuperAdminUsers(self): - #set test acc with all type of users - self.setTestAccUserEnv() - - (status,output)=Utils.listAccounts(user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'account listing failed with re_admin user: '+output) - self.assertEqual(output, - '+----------+\n| Accounts |\n+----------+\n| test |\n+----------+', - 'swauth-list failed:\n%s' % output) - - (status,output)=Utils.listAccounts(user='test:admin',key='testing') - self.assertNotEqual(status, 0, 'account listing success with admin user: '+output) - self.assertEqual('List failed: 403 Forbidden: Insufficient privileges' \ - in output,True, 'account listing success with admin user: '+output) - - (status,output)=Utils.listAccounts(user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'account listing success with regular user: '+output) - self.assertEqual('403 Forbidden' in output,True, 'account listing success with regular user: '+output) - -class TestUser(unittest.TestCase): - - def setUp(self): - (status,output)=Utils.swauthPrep() - self.assertEqual(status, 0, 'setup swauth-prep failed'+output) - - def tearDown(self): - Utils.cleanAll() - - def setTestAccUserEnv(self): - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addResellerAdminUser('test','re_admin','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addAdminUser('test','admin','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addUser('test','tester','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - - def setTest2AccUserEnv(self): - (status,output)=Utils.addAccount('test2') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addResellerAdminUser('test2','re_admin','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addAdminUser('test2','admin','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addUser('test2','tester','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - - def testaddUser(self): - #add test acc - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'setTestaddAdminUserEnv (add test account) failed'+output) - - (status,output) = Utils.addAdminUser('test','testadminuser','testadminuser') - self.assertEqual(status, 0, 'user addition failed'+output) - - (status,output) = Utils.addUser('test','testuser','testuser') - self.assertEqual(status, 0, 'user addition failed'+output) - - (status,output) = Utils.addResellerAdminUser('test','testreselleradminuser','testreselleradminuser') - self.assertEqual(status, 0, 'user addition failed'+output) - - (status,output) = Utils.addAdminUser('test', '', '') - self.assertEqual('Usage:' in output, True, 'Invalid user creation request accepted: '+output) - - (status,output) = Utils.addAdminUser('test', 'testcli', '') - self.assertEqual('Usage:' in output, True, 'Invalid user creation request accepted'+output) - - (status,output) = Utils.addAdminUser('test', '', 'testcli') - self.assertEqual('Usage:' in output, True, 'Invalid user creation request accepted'+output) - - (status,output) = Utils.addAdminUser('accountdoesnotexist', 'testcli', 'testcli') - self.assertEqual(status, 0, 'User creation request failed, where accountdoesnotexist: '+output) - - (status,output)=Utils.addAdminUser('test', 'admin2', 'adminpwd', authurl='http://127.0.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid add user request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.addAdminUser('test', 'admin2', 'adminpwd', authurl='http://127.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid add user request accepted(wrong admin-url provided): %s' % output) - - def testAddUserNonSuperAdminUsers (self): - #setup test,testr accounts with all user types - self.setTestAccUserEnv() - self.setTest2AccUserEnv() - - #try to add another reseller_admin users with all type of users - #decision has been made to return 401 in place of 403 due to - #performance related reasons, in scenarios tested below - - (status,output)=Utils.addResellerAdminUser('test', 're_adminwithreadmin', 'testing', user='test:re_admin', key='testing') - self.assertNotEqual(status, 0, 're_admin creation succeeded with re_admin user: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin creation succeeded with re_admin user: '+output) - - (status,output)=Utils.addResellerAdminUser('test', 're_adminwithadmin', 'testing', user='test:admin', key='testing') - self.assertNotEqual(status, 0, 're_admin creation succeeded with admin user: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin creation succeeded with admin user: '+output) - - (status,output)=Utils.addResellerAdminUser('test', 're_adminwithuser', 'testing', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 're_admin creation succeeded with regular user: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin creation succeeded with regular user: '+output) - - (status,output)=Utils.addResellerAdminUser('test2', 're_adminwithreadmin', 'testing', user='test:re_admin', key='testing') - self.assertNotEqual(status, 0, 're_admin creation succeeded with re_admin user: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin creation succeeded with re_admin user: '+output) - - (status,output)=Utils.addResellerAdminUser('test2', 're_adminwithadmin', 'testing', user='test:admin', key='testing') - self.assertNotEqual(status, 0, 're_admin creation succeeded with admin user: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin creation succeeded with admin user: '+output) - - (status,output)=Utils.addResellerAdminUser('test2', 're_adminwithuser', 'testing', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 're_admin creation succeeded with regular user: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin creation succeeded with regular user: '+output) - - #update the password with own credential - (status,output)=Utils.addResellerAdminUser('test', 're_adminwithreadmin', 'testingupdated', user='test:re_admin', key='testing') - self.assertNotEqual(status, 0, 're_admin update password succeeded with own credentials: '+output) - self.assertEqual('401 Unauthorized' in output,True, 're_admin update password succeeded with own credentials: '+output) - - #try to add another admin users with all type of users - (status,output)=Utils.addAdminUser('test', 'adminwithreadmin', 'testing', user='test:re_admin', key='testing') - self.assertEqual(status, 0, 'admin creation failed with re_admin user: '+output) - - (status,output)=Utils.addAdminUser('test', 'adminwithreadmin', 'testing', user='test:admin', key='testing') - self.assertEqual(status, 0, 'admin creation failed with admin user: '+output) - - (status,output)=Utils.addAdminUser('test', 'adminwithuser', 'testing', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 'admin creation succeeded with regular user: '+output) - self.assertEqual('403 Forbidden' in output,True, 'admin creation succeeded with regular user: '+output) - - (status,output)=Utils.addAdminUser('test2', 'adminwithreadminofotheraccount', 'testing', user='test:re_admin', key='testing') - self.assertEqual(status, 0, 'admin creation failed with re_admin user of other account: '+output) - - (status,output)=Utils.addAdminUser('test2', 'adminwithadminofotheraccount', 'testing', user='test:admin', key='testing') - self.assertNotEqual(status, 0, 'admin creation succeeded with admin user of other acc: '+output) - self.assertEqual('403 Forbidden' in output,True, 'admin creation succeeded with admin user of other acc: '+output) - - (status,output)=Utils.addAdminUser('test2', 'adminwithuserfotheraccount', 'testing', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 'admin creation succeeded with user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 'admin creation succeeded with user of other account: '+output) - - #update password of own admin account - (status,output)=Utils.addAdminUser('test', 'admin', 'testingupdated', user='test:admin', key='testing') - self.assertEqual(status, 0, 'admin password update failed with own credentials: '+output) - #undo above password change - (status,output)=Utils.addAdminUser('test', 'admin', 'testing', user='test:admin', key='testingupdated') - self.assertEqual(status, 0, 'admin password update failed with own credentials: '+output) - - #try to add another regular users with all type of users - (status,output)=Utils.addUser('test', 'adduserwithre_admin', 'testing', user='test:re_admin', key='testing') - self.assertEqual(status, 0, 'regular user creation with re_admin credentials failed: '+output) - - (status,output)=Utils.addUser('test', 'adduserwithadmin', 'testing', user='test:admin', key='testing') - self.assertEqual(status, 0, 'regular user creation with admin credentials failed: '+output) - - (status,output)=Utils.addUser('test', 'adduserwithuser', 'testing', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 'regular user creation with regular user credentials succeded: '+output) - self.assertEqual('403 Forbidden' in output,True, 'regular user creation with regular user credentials succeded: '+output) - - (status,output)=Utils.addUser('test2', 'adduserwithreadminofotheraccount', 'testing', user='test:re_admin', key='testing') - self.assertEqual(status, 0, 'user creation failed with re_admin user of other account: '+output) - - (status,output)=Utils.addUser('test2', 'adduserwithadminofotheraccount', 'testing', user='test:admin', key='testing') - self.assertNotEqual(status, 0, 'user creation succeeded with admin user of other acc: '+output) - self.assertEqual('403 Forbidden' in output,True, 'user creation succeeded with admin user of other acc: '+output) - - (status,output)=Utils.addUser('test2', 'adminwithuserfotheraccount', 'testing', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 'user creation succeeded with user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 'user creation succeeded with user of other account: '+output) - - def testDeleteUser(self): - #set test acc - self.setTestAccUserEnv() - - (status,output) = Utils.deleteUser('test','admin') - self.assertEqual(status, 0, 'valid user deletion failed:'+output) - - (status,output) = Utils.deleteUser('test','tester') - self.assertEqual(status, 0, 'valid user deletion failed:'+output) - - (status,output) = Utils.deleteUser('test','re_admin') - self.assertEqual(status, 0, 'valid user deletion failed:'+output) - - (status,output) = Utils.deleteUser('test', '') - self.assertEqual('Usage:' in output, True, 'Invalid user deletion request accepted : '+output) - - (status,output) = Utils.deleteUser('','testcli') - self.assertEqual('Usage:' in output, True, 'Invalid user deletion request accepted : '+output) - - (status,output) = Utils.deleteUser('test', 'userdoesnotexist') - self.assertNotEqual(status, 0, 'Invalid user deletion request accepted,userdoesnotexist:'+output) - - (status,output) = Utils.deleteUser('accountisnothere', 'testcli') - self.assertNotEqual(status, 0, 'Invalid user deletion request accepted, accountdoesnotexist:'+output) - #TODO:more testcases? - (status,output)=Utils.deleteUser('test', 'admin2', authurl='http://127.0.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid delete user request accepted(wrong admin-url provided): %s' % output) - - (status,output)=Utils.deleteUser('test', 'admin2', authurl='http://127.0.1:80/auth/') - self.assertEqual('Check that the admin_url is valid' in output, True, - 'Invalid delete user request accepted(wrong admin-url provided): %s' % output) - - def testDeleteUserNonSuperAdminUsers(self): - #set test, test2 acc with all type of users - self.setTestAccUserEnv() - self.setTest2AccUserEnv() - #try to delete reseller_admin users with all type of users - Utils.addResellerAdminUser('test', 're_admintobedeletedbyotherusers1', 'testing') - (status,output) = Utils.deleteUser('test', 're_admintobedeletedbyotherusers1',user='test:re_admin',key='testing') - self.assertNotEqual(status, 0, 're_admin deletion succeeded with re_admin user: '+output) - self.assertEqual('403 Forbidden' in output,True, 're_admin deletion succeeded with re_admin user: '+output) - - Utils.addResellerAdminUser('test', 're_admintobedeletedbyotherusers2', 'testing') - (status,output) = Utils.deleteUser('test', 're_admintobedeletedbyotherusers2',user='test:admin',key='testing') - self.assertNotEqual(status, 0, 're_admin deletion succeeded with admin user: '+output) - self.assertEqual('403 Forbidden' in output,True, 're_admin deletion succeeded with admin user: '+output) - - Utils.addResellerAdminUser('test', 're_admintobedeletedbyotherusers3', 'testing') - (status,output) = Utils.deleteUser('test', 're_admintobedeletedbyotherusers3',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 're_admin deletion succeeded with regular user: '+output) - self.assertEqual('403 Forbidden' in output,True, 're_admin deletion succeeded with user: '+output) - - Utils.addResellerAdminUser('test2', 're_admintobedeletedbyotheraccountusers1', 'testing') - (status,output) = Utils.deleteUser('test2', 're_admintobedeletedbyotheraccountusers1',user='test:re_admin',key='testing') - self.assertNotEqual(status, 0, 're_admin deletion succeeded with re_admin user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 're_admin deletion succeeded with re_admin user of other account: '+output) - - Utils.addResellerAdminUser('test2', 're_admintobedeletedbyotheraccountusers2', 'testing') - (status,output) = Utils.deleteUser('test2', 're_admintobedeletedbyotheraccountusers2',user='test:admin',key='testing') - self.assertNotEqual(status, 0, 're_admin deletion succeeded with admin user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 're_admin deletion succeeded with admin user of other account: '+output) - - Utils.addResellerAdminUser('test2', 're_admintobedeletedbyotheraccountusers3', 'testing') - (status,output) = Utils.deleteUser('test2', 're_admintobedeletedbyotheraccountusers3',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 're_admin deletion succeeded with regular user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 're_admin deletion succeeded with user of other account: '+output) - - #delete/de-active own re_admin account - Utils.addAdminUser('test', 're_admintobedeletedbyitself', 'testing') - (status,output) = Utils.deleteUser('test', 're_admintobedeletedbyitself',user='test:re_admintobedeletedbyitself',key='testing') - self.assertEqual(status, 0, 're_admin deletion failed with own credentials : '+output) - - #try to delete admin users with all type of users - Utils.addAdminUser('test', 'admintobedeletedbyotherusers1', 'testing') - (status,output) = Utils.deleteUser('test', 'admintobedeletedbyotherusers1',user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'admin deletion failed with re_admin user: '+output) - - Utils.addAdminUser('test', 'admintobedeletedbyotherusers2', 'testing') - (status,output) = Utils.deleteUser('test', 'admintobedeletedbyotherusers2',user='test:admin',key='testing') - self.assertEqual(status, 0, 'admin deletion failed with admin user: '+output) - - Utils.addAdminUser('test', 'admintobedeletedbyotherusers3', 'testing') - (status,output) = Utils.deleteUser('test', 'admintobedeletedbyotherusers3',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'admin deletion succeeded with regular user: '+output) - self.assertEqual('403 Forbidden' in output,True, 'admin deletion succeeded with regular user: '+output) - - Utils.addAdminUser('test2', 'admintobedeletedbyotheraccountusers1', 'testing') - (status,output) = Utils.deleteUser('test2', 'admintobedeletedbyotheraccountusers1',user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'admin deletion failed with re_admin user of other account: '+output) - - Utils.addAdminUser('test2', 'admintobedeletedbyotheraccountusers2', 'testing') - (status,output) = Utils.deleteUser('test2', 'admintobedeletedbyotheraccountusers2',user='test:admin',key='testing') - self.assertNotEqual(status, 0, 'admin deletion succeeded with admin user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 'admin deletion succeeded with admin user of other account: '+output) - - Utils.addAdminUser('test2', 'admintobedeletedbyotheraccountusers3', 'testing') - (status,output) = Utils.deleteUser('test2', 'admintobedeletedbyotheraccountusers3',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'admin deletion succeeded with regular user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 'admin deletion succeeded with regular user of other account: '+output) - - #delete/de-active own admin account - Utils.addAdminUser('test', 'admintobedeletedbyitself', 'testing') - (status,output) = Utils.deleteUser('test', 'admintobedeletedbyitself',user='test:admintobedeletedbyitself',key='testing') - self.assertEqual(status, 0, 'admin deletion failed with own credentials : '+output) - - #try to delete another regular users with all type of users - Utils.addUser('test', 'usertobedeletedbyotherusers1', 'testing') - (status,output) = Utils.deleteUser('test', 'usertobedeletedbyotherusers1',user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'user deletion failed with re_admin user: '+output) - - Utils.addUser('test', 'usertobedeletedbyotherusers2', 'testing') - (status,output) = Utils.deleteUser('test', 'usertobedeletedbyotherusers2',user='test:admin',key='testing') - self.assertEqual(status, 0, 'user deletion failed with admin user: '+output) - - Utils.addUser('test', 'usertobedeletedbyotherusers3', 'testing') - (status,output) = Utils.deleteUser('test', 'usertobedeletedbyotherusers3',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'user deletion succeeded with regular user: '+output) - self.assertEqual('403 Forbidden' in output,True, 'user deletion succeeded with regular user: '+output) - - Utils.addUser('test2', 'usertobedeletedbyotheraccountusers1', 'testing') - (status,output) = Utils.deleteUser('test2', 'usertobedeletedbyotheraccountusers1',user='test:re_admin',key='testing') - self.assertEqual(status, 0, 'user deletion failed with re_admin user of other account: '+output) - - Utils.addUser('test2', 'usertobedeletedbyotheraccountusers2', 'testing') - (status,output) = Utils.deleteUser('test2', 'usertobedeletedbyotheraccountusers2',user='test:admin',key='testing') - self.assertNotEqual(status, 0, 'user deletion succeeded with admin user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 'user deletion succeeded with admin user of other account: '+output) - - Utils.addUser('test2', 'usertobedeletedbyotheraccountusers3', 'testing') - (status,output) = Utils.deleteUser('test2', 'usertobedeletedbyotheraccountusers3',user='test:tester',key='testing') - self.assertNotEqual(status, 0, 'user deletion succeeded with regular user of other account: '+output) - self.assertEqual('403 Forbidden' in output,True, 'user deletion succeeded with regular user of other account: '+output) - - #delete/de-active own admin account - Utils.addAdminUser('test', 'usertobedeletedbyitself', 'testing') - (status,output) = Utils.deleteUser('test', 'usertobedeletedbyitself',user='test:usertobedeletedbyitself',key='testing') - self.assertEqual(status, 0, 'user deletion failed with own credentials : '+output) - - def testChangeKey(self): - # Create account and users - (status, output) = Utils.addAccount('test') - self.assertEqual(status, 0, 'Account creation failed: ' + output) - - (status, output) = Utils.addAdminUser('test', 'admin', 'password') - self.assertEqual(status, 0, 'User addition failed: ' + output) - - (status, output) = Utils.addUser('test', 'user', 'password') - self.assertEqual(status, 0, 'User addition failed: ' + output) - - (status, output) = Utils.addResellerAdminUser('test', 'radmin', 'password') - self.assertEqual(status, 0, 'User addition failed: ' + output) - - # Change acccount admin password/key - (status, output) = Utils.addAdminUser('test', 'admin', 'new_password', user='test:admin', key='password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Change regular user password/key - (status, output) = Utils.addUser('test', 'user', 'new_password', user='test:user', key='password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Change reseller admin password/key - (status, output) = Utils.addResellerAdminUser('test', 'radmin', 'new_password', user='test:radmin', key='password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # To verify that password was changed for real, re-run the above commands, but with the new password - # Change acccount admin password/key using the new password - (status, output) = Utils.addAdminUser('test', 'admin', 'password', user='test:admin', key='new_password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Change regular user password/key using the new password - (status, output) = Utils.addUser('test', 'user', 'password', user='test:user', key='new_password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Change reseller admin password/key using the new password - (status, output) = Utils.addResellerAdminUser('test', 'radmin', 'password', user='test:radmin', key='new_password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Make sure that regular user cannot upgrade to admin - (status, output) = Utils.addAdminUser('test', 'user', 'password', user='test:user', key='password') - self.assertEqual('User creation failed' in output, True, 'Update key failed: ' + output) - - # Make sure that regular user cannot upgrade to reseller_admin - (status, output) = Utils.addResellerAdminUser('test', 'user', 'password', user='test:user', key='password') - self.assertEqual('User creation failed' in output, True, 'Update key failed: ' + output) - - # Make sure admin cannot update himself to reseller_admin - (status, output) = Utils.addResellerAdminUser('test', 'admin', 'password', user='test:admin', key='password') - self.assertEqual('User creation failed' in output, True, 'Update key failed: ' + output) - - # Account admin changing regular user password/key - (status, output) = Utils.addUser('test', 'user', 'new_password', user='test:admin', key='password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - # Verify by running the command with new password - (status, output) = Utils.addUser('test', 'user', 'password', user='test:user', key='new_password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Reseller admin changing regular user password/key - (status, output) = Utils.addUser('test', 'user', 'new_password', user='test:radmin', key='password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - # Verify by running the command with new password - (status, output) = Utils.addUser('test', 'user', 'password', user='test:user', key='new_password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - # Reseller admin changing account admin password/key - (status, output) = Utils.addAdminUser('test', 'admin', 'new_password', user='test:radmin', key='password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - # Verify by running the command with new password - (status, output) = Utils.addAdminUser('test', 'admin', 'password', user='test:admin', key='new_password') - self.assertEqual(status, 0, 'Update key failed: ' + output) - - -class TestCleanUPToken(unittest.TestCase): - - def setUp(self): - (status,output)=Utils.swauthPrep() - self.assertEqual(status, 0, 'setup swauth-prep failed'+output) - - def tearDown(self): - Utils.cleanAll() - - def setTestAccUserEnv(self): - (status,output)=Utils.addAccount('test') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addResellerAdminUser('test','re_admin','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addAdminUser('test','admin','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - (status,output)=Utils.addUser('test','tester','testing') - self.assertEqual(status, 0, 'test accUser creation failed env'+output) - - def setTest2AccUserEnv(self): - (status,output)=Utils.addAccount('test2') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addResellerAdminUser('test2','re_admin','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addAdminUser('test2','admin','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - (status,output)=Utils.addUser('test2','tester','testing') - self.assertEqual(status, 0, 'test2 accUser creation failed env'+output) - - def testCleanUPToken(self): - self.setTestAccUserEnv() - self.setTest2AccUserEnv() - - #cleanup various validation - (status,output)=Utils.cleanToken(key='') - self.assertNotEqual(status, 0, 'clean up success without key'+output) - self.assertEqual('Usage:' in output,True, 'clean up success without key: '+output) - - #validate the admin-user option is not working here - (status,output)=Utils.cleanToken(option='admin-user', value='.super_admin') - self.assertNotEqual(status, 0, 'clean up success with a username'+output) - self.assertEqual('Usage:' in output,True, 'clean up success with a username: '+output) - - (status,output)=Utils.cleanToken(key='noavalidsuperadminkey') - self.assertNotEqual(status, 0, 'clean up success with wrong super_admin key'+output) - self.assertEqual('401 Unauthorized' in output,True, 'clean up success with wrong super_admin key: '+output) - - #cleanup token with no options - (status,output)=Utils.cleanToken() - self.assertEqual(status, 0, 'clean up failed with no option'+output) - - #cleanup token with purge option - (status,output)=Utils.cleanToken(option='purge', value='test') - self.assertEqual(status, 0, 'clean up failed with purge option'+output) - - #cleanup token with purge option no valid account name - #TODO:review following https://bugs.launchpad.net/gluster-swift/+bug/1271555 - (status,output)=Utils.cleanToken(option='purge', value='accountnotvalid') - self.assertNotEqual(status, 0, 'clean up failed with purge option'+output) - - #cleanup token with purge-all option - (status,output)=Utils.cleanToken(option='purge-all') - self.assertEqual(status, 0, 'clean up failed with purge-all option'+output) - - #cleanup token with -v option - (status,output)=Utils.cleanToken(option='verbose') - self.assertEqual(status, 0, 'clean up failed with verbose option'+output) - self.assertEqual('GET .token_0' in output and 'GET .token_f' in output,True,\ - 'clean up success without key: '+output) - - #cleanup token with token-life option - (status,output)=Utils.cleanToken(option='token-life', value='500') - self.assertEqual(status, 0, 'clean up failed with token-life option'+output) - - #cleanup token with sleep option - (status,output)=Utils.cleanToken(option='sleep', value='500') - self.assertEqual(status, 0, 'clean up failed with sleep option'+output) - - #TODO:revisit below two cases after fix for - #https://bugs.launchpad.net/gluster-swift/+bug/1271550 - #cleanup token with token-life option non numeric value - (status,output)=Utils.cleanToken(option='token-life', value='notanumaric') - self.assertEqual('Usage:' in output, True, 'clean up success with token-life option non numeric value'+output) - - #cleanup token with sleep option non numeric value - (status,output)=Utils.cleanToken(option='sleep', value='notanumeric') - self.assertEqual('Usage:' in output, True, 'clean up success with sleep option non numeric value'+output) - - def testSetAccountService(self): - self.setTestAccUserEnv() - self.setTest2AccUserEnv() - - #set-account-service asset all valid value - (status,output)=Utils.setAccountService('test', 'storage', 'local', 'http://localhost:8080/v1/AUTH_test') - self.assertEqual(status, 0, 'set account service fails with valid input'+output) - (status,output)=Utils.listUsers('test', listtype='--json') - self.assertEqual('{"services": {"storage": {"default": "local", "local": "http://localhost:8080/v1/AUTH_test"}}' in output,True, \ - 'set account service success with valid input'+output) - - #invalid account - (status,output)=Utils.setAccountService('accountdoesnotexist', 'storage', 'local', 'http://localhost:8080/v1/AUTH_test') - self.assertNotEqual(status, 0, 'set account service success with invalid accountname'+output) - self.assertEqual('Service set failed: 404 Not Found' in output,True, 'set account service success with invalid accountname'+output) - - #service name other than storage - (status,output)=Utils.setAccountService('test', 'st', 'local', 'http://localhost:8080/v1/AUTH_test') - self.assertEqual(status, 0, 'set account service success with service name other than storage'+output) - (status,output)=Utils.listUsers('test', listtype='--json') - self.assertEqual('"st": {"local": "http://localhost:8080/v1/AUTH_test"}}' in output,True, \ - 'set account service success with service name other than storage'+output) - - #name other than local - (status,output)=Utils.setAccountService('test', 'storage', 'notlocal', 'http://localhost:8080/v1/AUTH_test') - self.assertEqual(status, 0, 'set account service with name other than local failed'+output) - (status,output)=Utils.listUsers('test', listtype='--json') - self.assertEqual(' "notlocal": "http://localhost:8080/v1/AUTH_test"}' in output,True, \ - 'set account service with name other than local failed'+output) - - #set default to point notlocal - (status,output)=Utils.setAccountService('test', 'storage', 'default', 'notlocal') - self.assertEqual(status, 0, 'set account service set default to local failed'+output) - (status,output)=Utils.listUsers('test', listtype='--json') - self.assertEqual(' {"default": "notlocal", "notlocal": "http://localhost:8080/v1/AUTH_test"' in output,True, \ - 'set account service set default to local failed'+output) - - #try to set account service with users other than .super_admin - #reseller_admin - (status,output)=Utils.setAccountService('test', 'storage', 'local', 'http://localhost:8080/v1/AUTH_test', user='test:re_admin', key='testing') - self.assertEqual(status, 0, 'set account service fails re_admin user cred'+output) - - #admin user - (status,output)=Utils.setAccountService('test', 'storage', 'local', 'http://localhost:8080/v1/AUTH_test', user='test:admin', key='testing') - self.assertNotEqual(status, 0, 'set account service success with admin user cred'+output) - self.assertEqual('403 Forbidden' in output,True, 'set account service success with admin user cred'+output) - - #regular user - (status,output)=Utils.setAccountService('test', 'storage', 'local', 'http://localhost:8080/v1/AUTH_test', user='test:tester', key='testing') - self.assertNotEqual(status, 0, 'set account service success with regular user cred'+output) - self.assertEqual('403 Forbidden' in output,True, 'set account service success with admin user cred'+output) - diff --git a/test/functional_auth/keystone/conf/account-server.conf b/test/functional_auth/keystone/conf/account-server.conf deleted file mode 100644 index 4996367..0000000 --- a/test/functional_auth/keystone/conf/account-server.conf +++ /dev/null @@ -1,32 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the account-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = false -bind_port = 6012 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = account-server - -[app:account-server] -use = egg:gluster_swift#account -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the account server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off diff --git a/test/functional_auth/keystone/conf/container-server.conf b/test/functional_auth/keystone/conf/container-server.conf deleted file mode 100644 index 122d97e..0000000 --- a/test/functional_auth/keystone/conf/container-server.conf +++ /dev/null @@ -1,35 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the container-server workers -# start, you can *consider* setting this value to "false" to reduce the -# per-request overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = false -bind_port = 6011 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = container-server - -[app:container-server] -use = egg:gluster_swift#container -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the container server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - -#enable object versioning for functional test -allow_versions = on diff --git a/test/functional_auth/keystone/conf/fs.conf b/test/functional_auth/keystone/conf/fs.conf deleted file mode 100644 index b06a854..0000000 --- a/test/functional_auth/keystone/conf/fs.conf +++ /dev/null @@ -1,19 +0,0 @@ -[DEFAULT] -# -# IP address of a node in the GlusterFS server cluster hosting the -# volumes to be served via Swift API. -mount_ip = localhost - -# Performance optimization parameter. When turned off, the filesystem will -# see a reduced number of stat calls, resulting in substantially faster -# response time for GET and HEAD container requests on containers with large -# numbers of objects, at the expense of an accurate count of combined bytes -# used by all objects in the container. For most installations "off" works -# fine. -# -# *** Keep on for Functional Tests *** -accurate_size_in_listing = on - -# *** Keep on for Functional Tests *** -container_update_object_count = on -account_update_container_count = on diff --git a/test/functional_auth/keystone/conf/object-server.conf b/test/functional_auth/keystone/conf/object-server.conf deleted file mode 100644 index 3cb9ead..0000000 --- a/test/functional_auth/keystone/conf/object-server.conf +++ /dev/null @@ -1,48 +0,0 @@ -[DEFAULT] -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the object-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -# -# *** Keep false for Functional Tests *** -mount_check = false -bind_port = 6010 -# -# Maximum number of clients one worker can process simultaneously (it will -# actually accept N + 1). Setting this to one (1) will only handle one request -# at a time, without accepting another request concurrently. By increasing the -# number of workers to a much higher value, one can prevent slow file system -# operations for one request from starving other requests. -max_clients = 1024 -# -# If not doing the above, setting this value initially to match the number of -# CPUs is a good starting point for determining the right value. -workers = 1 -# Override swift's default behaviour for fallocate. -disable_fallocate = true - -[pipeline:main] -pipeline = object-server - -[app:object-server] -use = egg:gluster_swift#object -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# For performance, after ensuring things are running in a stable manner, you -# can turn off normal request logging for the object server to reduce the -# per-request overhead and unclutter the log files. Warnings and errors will -# still be logged. -log_requests = off -# -# Adjust this value to match the stripe width of the underlying storage array -# (not the stripe element size). This will provide a reasonable starting point -# for tuning this value. -disk_chunk_size = 65536 -# -# Adjust this value match whatever is set for the disk_chunk_size initially. -# This will provide a reasonable starting point for tuning this value. -network_chunk_size = 65556 diff --git a/test/functional_auth/keystone/conf/proxy-server.conf b/test/functional_auth/keystone/conf/proxy-server.conf deleted file mode 100644 index 4838c46..0000000 --- a/test/functional_auth/keystone/conf/proxy-server.conf +++ /dev/null @@ -1,97 +0,0 @@ -[DEFAULT] -bind_port = 8080 -user = root -# Consider using 1 worker per CPU -workers = 1 - -[pipeline:main] -#pipeline = catch_errors healthcheck proxy-logging cache tempauth proxy-logging proxy-server -pipeline = catch_errors healthcheck proxy-logging cache tempurl authtoken keystoneauth proxy-logging proxy-server - -[app:proxy-server] -use = egg:gluster_swift#proxy -log_facility = LOG_LOCAL1 -log_level = WARN -# The API allows for account creation and deletion, but since Gluster/Swift -# automounts a Gluster volume for a given account, there is no way to create -# or delete an account. So leave this off. -allow_account_management = false -account_autocreate = true -# Ensure the proxy server uses fast-POSTs since we don't need to make a copy -# of the entire object given that all metadata is stored in the object -# extended attributes (no .meta file used after creation) and no container -# sync feature to present. -object_post_as_copy = false -# Only need to recheck the account exists once a day -recheck_account_existence = 86400 -# May want to consider bumping this up if containers are created and destroyed -# infrequently. -recheck_container_existence = 60 -# Timeout clients that don't read or write to the proxy server after 5 -# seconds. -client_timeout = 5 -# Give more time to connect to the object, container or account servers in -# cases of high load. -conn_timeout = 5 -# For high load situations, once connected to an object, container or account -# server, allow for delays communicating with them. -node_timeout = 60 -# May want to consider bumping up this value to 1 - 4 MB depending on how much -# traffic is for multi-megabyte or gigabyte requests; perhaps matching the -# stripe width (not stripe element size) of your storage volume is a good -# starting point. See below for sizing information. -object_chunk_size = 65536 -# If you do decide to increase the object_chunk_size, then consider lowering -# this value to one. Up to "put_queue_length" object_chunk_size'd buffers can -# be queued to the object server for processing. Given one proxy server worker -# can handle up to 1,024 connections, by default, it will consume 10 * 65,536 -# * 1,024 bytes of memory in the worse case (default values). Be sure the -# amount of memory available on the system can accommodate increased values -# for object_chunk_size. -put_queue_depth = 10 - -[filter:catch_errors] -use = egg:swift#catch_errors - -[filter:proxy-logging] -use = egg:swift#proxy_logging - -[filter:healthcheck] -use = egg:swift#healthcheck - -[filter:tempauth] -use = egg:swift#tempauth -user_admin_admin = admin .admin .reseller_admin -user_d4dde08c621a4f0fb4cde0ac6a62aa0c_tester = testing .admin -user_test_tester = testing .admin -user_test2_tester2 = testing2 .admin -user_test_tester3 = testing3 - -[filter:authtoken] -paste.filter_factory = keystoneclient.middleware.auth_token:filter_factory -signing_dir = /etc/swift -# Keystone server info -auth_host = 127.0.0.1 -auth_port = 35357 -auth_protocol = http -# Swift server info -service_host = 127.0.0.1 -service_port = 8080 -admin_token = ADMIN -# Needed to support Swift container ACL -delay_auth_decision = true - -[filter:keystoneauth] -use = egg:swift#keystoneauth -operator_roles = admin -is_admin = true -cache = swift.cache - -[filter:cache] -use = egg:swift#memcache -# Update this line to contain a comma separated list of memcache servers -# shared by all nodes running the proxy-server service. -memcache_servers = localhost:11211 - -[filter:tempurl] -use = egg:swift#tempurl diff --git a/test/functional_auth/keystone/conf/swift.conf b/test/functional_auth/keystone/conf/swift.conf deleted file mode 100644 index ce9a4d0..0000000 --- a/test/functional_auth/keystone/conf/swift.conf +++ /dev/null @@ -1,85 +0,0 @@ -[DEFAULT] - - -[swift-hash] -# random unique string that can never change (DO NOT LOSE) -swift_hash_path_suffix = gluster - - -# The swift-constraints section sets the basic constraints on data -# saved in the swift cluster. - -[swift-constraints] - -# max_file_size is the largest "normal" object that can be saved in -# the cluster. This is also the limit on the size of each segment of -# a "large" object when using the large object manifest support. -# This value is set in bytes. Setting it to lower than 1MiB will cause -# some tests to fail. -# Default is 1 TiB = 2**30*1024 -max_file_size = 1099511627776 - - -# max_meta_name_length is the max number of bytes in the utf8 encoding -# of the name portion of a metadata header. - -#max_meta_name_length = 128 - - -# max_meta_value_length is the max number of bytes in the utf8 encoding -# of a metadata value - -#max_meta_value_length = 256 - - -# max_meta_count is the max number of metadata keys that can be stored -# on a single account, container, or object - -#max_meta_count = 90 - - -# max_meta_overall_size is the max number of bytes in the utf8 encoding -# of the metadata (keys + values) - -#max_meta_overall_size = 4096 - - -# max_object_name_length is the max number of bytes in the utf8 encoding of an -# object name: Gluster FS can handle much longer file names, but the length -# between the slashes of the URL is handled below. Remember that most web -# clients can't handle anything greater than 2048, and those that do are -# rather clumsy. - -max_object_name_length = 2048 - -# max_object_name_component_length (GlusterFS) is the max number of bytes in -# the utf8 encoding of an object name component (the part between the -# slashes); this is a limit imposed by the underlying file system (for XFS it -# is 255 bytes). - -max_object_name_component_length = 255 - -# container_listing_limit is the default (and max) number of items -# returned for a container listing request - -#container_listing_limit = 10000 - - -# account_listing_limit is the default (and max) number of items returned -# for an account listing request - -#account_listing_limit = 10000 - - -# max_account_name_length is the max number of bytes in the utf8 encoding of -# an account name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_account_name_length = 255 - - -# max_container_name_length is the max number of bytes in the utf8 encoding -# of a container name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_container_name_length = 255 diff --git a/test/functional_auth/keystone/conf/test.conf b/test/functional_auth/keystone/conf/test.conf deleted file mode 100644 index 91be015..0000000 --- a/test/functional_auth/keystone/conf/test.conf +++ /dev/null @@ -1,50 +0,0 @@ -[func_test] -# sample config -#auth_host = 127.0.0.1 -#auth_port = 8080 -#auth_ssl = no -#auth_prefix = /auth/ -## sample config for Swift with Keystone -auth_version = 2 -auth_host = localhost -auth_port = 5000 -auth_ssl = no -auth_prefix = /v2.0/ - -# Primary functional test account (needs admin access to the account) -account = test -username = tester -password = testing - -# User on a second account (needs admin access to the account) -account2 = test2 -username2 = tester2 -password2 = testing2 - -# User on same account as first, but without admin access -username3 = tester3 -password3 = testing3 - -# Default constraints if not defined here, the test runner will try -# to set them from /etc/swift/swift.conf. If that file isn't found, -# the test runner will skip tests that depend on these values. -# Note that the cluster must have "sane" values for the test suite to pass. -#max_file_size = 5368709122 -#max_meta_name_length = 128 -#max_meta_value_length = 256 -#max_meta_count = 90 -#max_meta_overall_size = 4096 -#max_object_name_length = 1024 -#container_listing_limit = 10000 -#account_listing_limit = 10000 -#max_account_name_length = 256 -#max_container_name_length = 256 -normalized_urls = True - -collate = C - -[unit_test] -fake_syslog = False - -[probe_test] -# check_server_timeout = 30 diff --git a/test/functional_auth/swiftkerbauth/__init__.py b/test/functional_auth/swiftkerbauth/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/functional_auth/swiftkerbauth/conf/account-server.conf b/test/functional_auth/swiftkerbauth/conf/account-server.conf deleted file mode 100644 index 9ad458a..0000000 --- a/test/functional_auth/swiftkerbauth/conf/account-server.conf +++ /dev/null @@ -1,36 +0,0 @@ -[DEFAULT] -# -# Default gluster mount point to be used for object store,can be changed by -# setting the following value in {account,container,object}-server.conf files. -# It is recommended to keep this value same for all the three services but can -# be kept different if environment demands. -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the account-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -mount_check = true -bind_port = 6012 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = account-server - -[app:account-server] -use = egg:gluster_swift#account -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the account server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - diff --git a/test/functional_auth/swiftkerbauth/conf/container-server.conf b/test/functional_auth/swiftkerbauth/conf/container-server.conf deleted file mode 100644 index a406b4d..0000000 --- a/test/functional_auth/swiftkerbauth/conf/container-server.conf +++ /dev/null @@ -1,36 +0,0 @@ -[DEFAULT] -# -# Default gluster mount point to be used for object store,can be changed by -# setting the following value in {account,container,object}-server.conf files. -# It is recommended to keep this value same for all the three services but can -# be kept different if environment demands. -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the container-server workers -# start, you can *consider* setting this value to "false" to reduce the -# per-request overhead it can incur. -mount_check = true -bind_port = 6011 -# -# Override swift's default behaviour for fallocate. -disable_fallocate = true -# -# One or two workers should be sufficient for almost any installation of -# Gluster. -workers = 1 - -[pipeline:main] -pipeline = container-server - -[app:container-server] -use = egg:gluster_swift#container -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# After ensuring things are running in a stable manner, you can turn off -# normal request logging for the container server to unclutter the log -# files. Warnings and errors will still be logged. -log_requests = off - diff --git a/test/functional_auth/swiftkerbauth/conf/fs.conf b/test/functional_auth/swiftkerbauth/conf/fs.conf deleted file mode 100644 index 6d2a791..0000000 --- a/test/functional_auth/swiftkerbauth/conf/fs.conf +++ /dev/null @@ -1,13 +0,0 @@ -[DEFAULT] -# -# IP address of a node in the GlusterFS server cluster hosting the -# volumes to be served via Swift API. -mount_ip = localhost - -# Performance optimization parameter. When turned off, the filesystem will -# see a reduced number of stat calls, resulting in substantially faster -# response time for GET and HEAD container requests on containers with large -# numbers of objects, at the expense of an accurate count of combined bytes -# used by all objects in the container. For most installations "off" works -# fine. -accurate_size_in_listing = off \ No newline at end of file diff --git a/test/functional_auth/swiftkerbauth/conf/object-server.conf b/test/functional_auth/swiftkerbauth/conf/object-server.conf deleted file mode 100644 index d10d282..0000000 --- a/test/functional_auth/swiftkerbauth/conf/object-server.conf +++ /dev/null @@ -1,51 +0,0 @@ -[DEFAULT] -# -# Default gluster mount point to be used for object store,can be changed by -# setting the following value in {account,container,object}-server.conf files. -# It is recommended to keep this value same for all the three services but can -# be kept different if environment demands. -devices = /mnt/gluster-object -# -# Once you are confident that your startup processes will always have your -# gluster volumes properly mounted *before* the object-server workers start, -# you can *consider* setting this value to "false" to reduce the per-request -# overhead it can incur. -mount_check = true -bind_port = 6010 -# -# Maximum number of clients one worker can process simultaneously (it will -# actually accept N + 1). Setting this to one (1) will only handle one request -# at a time, without accepting another request concurrently. By increasing the -# number of workers to a much higher value, one can prevent slow file system -# operations for one request from starving other requests. -max_clients = 1024 -# -# If not doing the above, setting this value initially to match the number of -# CPUs is a good starting point for determining the right value. -workers = 1 -# Override swift's default behaviour for fallocate. -disable_fallocate = true - -[pipeline:main] -pipeline = object-server - -[app:object-server] -use = egg:gluster_swift#object -user = root -log_facility = LOG_LOCAL2 -log_level = WARN -# -# For performance, after ensuring things are running in a stable manner, you -# can turn off normal request logging for the object server to reduce the -# per-request overhead and unclutter the log files. Warnings and errors will -# still be logged. -log_requests = off -# -# Adjust this value to match the stripe width of the underlying storage array -# (not the stripe element size). This will provide a reasonable starting point -# for tuning this value. -disk_chunk_size = 65536 -# -# Adjust this value match whatever is set for the disk_chunk_size initially. -# This will provide a reasonable starting point for tuning this value. -network_chunk_size = 65536 diff --git a/test/functional_auth/swiftkerbauth/conf/proxy-server.conf b/test/functional_auth/swiftkerbauth/conf/proxy-server.conf deleted file mode 100644 index 855499c..0000000 --- a/test/functional_auth/swiftkerbauth/conf/proxy-server.conf +++ /dev/null @@ -1,73 +0,0 @@ -[DEFAULT] -bind_port = 8080 -user = root -# Consider using 1 worker per CPU -workers = 1 - -[pipeline:main] -pipeline = catch_errors healthcheck proxy-logging cache tempurl proxy-logging kerbauth proxy-server - -[app:proxy-server] -use = egg:gluster_swift#proxy -log_facility = LOG_LOCAL1 -log_level = WARN -# The API allows for account creation and deletion, but since Gluster/Swift -# automounts a Gluster volume for a given account, there is no way to create -# or delete an account. So leave this off. -allow_account_management = false -account_autocreate = true -# Ensure the proxy server uses fast-POSTs since we don't need to make a copy -# of the entire object given that all metadata is stored in the object -# extended attributes (no .meta file used after creation) and no container -# sync feature to present. -object_post_as_copy = false -# Only need to recheck the account exists once a day -recheck_account_existence = 86400 -# May want to consider bumping this up if containers are created and destroyed -# infrequently. -recheck_container_existence = 60 -# Timeout clients that don't read or write to the proxy server after 5 -# seconds. -client_timeout = 5 -# Give more time to connect to the object, container or account servers in -# cases of high load. -conn_timeout = 5 -# For high load situations, once connected to an object, container or account -# server, allow for delays communicating with them. -node_timeout = 60 -# May want to consider bumping up this value to 1 - 4 MB depending on how much -# traffic is for multi-megabyte or gigabyte requests; perhaps matching the -# stripe width (not stripe element size) of your storage volume is a good -# starting point. See below for sizing information. -object_chunk_size = 65536 -# If you do decide to increase the object_chunk_size, then consider lowering -# this value to one. Up to "put_queue_length" object_chunk_size'd buffers can -# be queued to the object server for processing. Given one proxy server worker -# can handle up to 1,024 connections, by default, it will consume 10 * 65,536 -# * 1,024 bytes of memory in the worse case (default values). Be sure the -# amount of memory available on the system can accommodate increased values -# for object_chunk_size. -put_queue_depth = 10 - -[filter:catch_errors] -use = egg:swift#catch_errors - -[filter:proxy-logging] -use = egg:swift#proxy_logging -access_log_level = WARN - -[filter:healthcheck] -use = egg:swift#healthcheck - -[filter:kerbauth] -use = egg:gluster_swift#kerbauth -ext_authentication_url = http://client.rhelbox.com/cgi-bin/swift-auth - -[filter:cache] -use = egg:swift#memcache -# Update this line to contain a comma separated list of memcache servers -# shared by all nodes running the proxy-server service. -memcache_servers = localhost:11211 - -[filter:tempurl] -use = egg:swift#tempurl diff --git a/test/functional_auth/swiftkerbauth/conf/swift.conf b/test/functional_auth/swiftkerbauth/conf/swift.conf deleted file mode 100644 index 0d25209..0000000 --- a/test/functional_auth/swiftkerbauth/conf/swift.conf +++ /dev/null @@ -1,84 +0,0 @@ -[DEFAULT] - - -[swift-hash] -# random unique string that can never change (DO NOT LOSE) -swift_hash_path_suffix = gluster - - -# The swift-constraints section sets the basic constraints on data -# saved in the swift cluster. - -[swift-constraints] -# max_file_size is the largest "normal" object that can be saved in -# the cluster. This is also the limit on the size of each segment of -# a "large" object when using the large object manifest support. -# This value is set in bytes. Setting it to lower than 1MiB will cause -# some tests to fail. -# Default is 1 TiB = 2**30*1024 - -max_file_size = 1099511627776 - -# max_meta_name_length is the max number of bytes in the utf8 encoding -# of the name portion of a metadata header. - -#max_meta_name_length = 128 - - -# max_meta_value_length is the max number of bytes in the utf8 encoding -# of a metadata value - -#max_meta_value_length = 256 - - -# max_meta_count is the max number of metadata keys that can be stored -# on a single account, container, or object - -#max_meta_count = 90 - - -# max_meta_overall_size is the max number of bytes in the utf8 encoding -# of the metadata (keys + values) - -#max_meta_overall_size = 4096 - - -# max_object_name_length is the max number of bytes in the utf8 encoding of an -# object name: Gluster FS can handle much longer file names, but the length -# between the slashes of the URL is handled below. Remember that most web -# clients can't handle anything greater than 2048, and those that do are -# rather clumsy. - -max_object_name_length = 2048 - -# max_object_name_component_length (GlusterFS) is the max number of bytes in -# the utf8 encoding of an object name component (the part between the -# slashes); this is a limit imposed by the underlying file system (for XFS it -# is 255 bytes). - -max_object_name_component_length = 255 - -# container_listing_limit is the default (and max) number of items -# returned for a container listing request - -#container_listing_limit = 10000 - - -# account_listing_limit is the default (and max) number of items returned -# for an account listing request - -#account_listing_limit = 10000 - - -# max_account_name_length is the max number of bytes in the utf8 encoding of -# an account name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_account_name_length = 255 - - -# max_container_name_length is the max number of bytes in the utf8 encoding -# of a container name: Gluster FS Filename limit (XFS limit?), must be the same -# size as max_object_name_component_length above. - -max_container_name_length = 255 diff --git a/test/functional_auth/swiftkerbauth/conf/test.conf b/test/functional_auth/swiftkerbauth/conf/test.conf deleted file mode 100644 index 643f2d1..0000000 --- a/test/functional_auth/swiftkerbauth/conf/test.conf +++ /dev/null @@ -1,49 +0,0 @@ -[func_test] -# Swiftkerbauth configuration -auth_host = 127.0.0.1 -auth_port = 8080 -auth_prefix = /auth/ -auth_scheme = http:// -auth_mode = passive -auth_version = 1 -domain_name = RHELBOX.COM - -#All the accounts, users & passwords to be prepared on kerberos server. -# Primary functional test account (needs admin access to the account) -# Note: Account name to be prepared on kerberos server 'AUTH_accoun' -account = test -username = tester -password = testing - -# User on a second account (needs admin access to the account) -account2 = test2 -username2 = tester2 -password2 = testing2 - -# User on same account as first, but without admin access -username3 = tester3 -password3 = testing3 - -# Default constraints if not defined here, the test runner will try -# to set them from /etc/swift/swift.conf. If that file isn't found, -# the test runner will skip tests that depend on these values. -# Note that the cluster must have "sane" values for the test suite to pass. -#max_file_size = 5368709122 -#max_meta_name_length = 128 -#max_meta_value_length = 256 -#max_meta_count = 90 -#max_meta_overall_size = 4096 -#max_object_name_length = 1024 -#container_listing_limit = 10000 -#account_listing_limit = 10000 -#max_account_name_length = 256 -#max_container_name_length = 256 -normalized_urls = True - -collate = C - -[unit_test] -fake_syslog = False - -[probe_test] -# check_server_timeout = 30 diff --git a/test/functional_auth/swiftkerbauth/test_swkrbath_active.py b/test/functional_auth/swiftkerbauth/test_swkrbath_active.py deleted file mode 100644 index 86c79ef..0000000 --- a/test/functional_auth/swiftkerbauth/test_swkrbath_active.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2010-2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import unittest -from nose import SkipTest -import commands -import os -from test import get_config -from swift.common.bufferedhttp import http_connect_raw as http_connect - -config = get_config('func_test') - -class Utils: - @classmethod - def SwiftKerbAuthPrep(self, - user=config['username'],domain=config['domain_name'],\ - passwd=config['password']): - username = '%s@%s' % (user, domain) - return commands.getstatusoutput('kinit %s <<< %s' % (username, passwd)) - - @classmethod - def SwiftKerbAuthCleanAll(self): - return commands.getstatusoutput('kdestroy') - - -class TestSwKrbAthActive(unittest.TestCase): - def setUp(self): - #Perform kinit in active mode. - (status, output) = Utils.SwiftKerbAuthPrep() - self.assertEqual(status, 0, \ - 'swkrbauth prep failed with valid credentials'+output) - self.auth_host = config['auth_host'] - self.auth_port = int(config['auth_port']) - self.auth_prefix = config.get('auth_prefix', '/auth/') - self.auth_version = str(config.get('auth_version', '1')) - self.account_name = config['account'] - self.username = config['username'] - self.password = config['password'] - self.auth_scheme = config['auth_scheme'] - - #Prepare auth_url. e.g. http://client.rhelbox.com:8080/auth/v1.0 - if self.auth_version == "1": - self.auth_path = '%sv1.0' % (self.auth_prefix) - else: - self.auth_path = self.auth_prefix - self.auth_netloc = "%s:%d" % (self.auth_host, self.auth_port) - auth_url = self.auth_scheme + self.auth_netloc + self.auth_path - - #Obtain the X-Auth-Token from kerberos server to use it in furhter - #testing - self.auth_token = None - (status, output) = commands.getstatusoutput('curl -v -u : --negotiate\ - --location-trusted %s' % (auth_url)) - self.assertEqual(status, 0, 'Token negotiation failed:' +output) - match = re.search('X-Auth-Token: AUTH.*', output) - if match: - self.auth_token = match.group(0).split(':')[1].strip() - else: - self.fail('No X-Auth-Token found, failed') - - def tearDown(self): - Utils.SwiftKerbAuthCleanAll() - - - def _get_auth_token(self): - return {'X-Auth-Token' : self.auth_token} - - def testGetAccounts(self): - #TODO: The test case is to perform GET on the account mentioned via - #configuration file. This is a sample test case. The whole test - #suite can be enhanced further to have further complicated test cases. - path = '/v1/AUTH_%s' % (config['account']) - - headers = self._get_auth_token() - conn = http_connect(config['auth_host'], config['auth_port'], 'GET', - path, headers) - resp = conn.getresponse() - self.assertTrue(resp.status == 204) diff --git a/test/unit/account/__init__.py b/test/unit/account/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/account/test_server.py b/test/unit/account/test_server.py deleted file mode 100644 index 7696fb1..0000000 --- a/test/unit/account/test_server.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" Tests for gluster.swift.account.server subclass """ - -import os -import errno -import unittest -from nose import SkipTest - -import gluster.swift.common.Glusterfs - -gluster.swift.common.Glusterfs.RUN_DIR = '/tmp/gluster_unit_tests/run' -try: - os.makedirs(gluster.swift.common.Glusterfs.RUN_DIR) -except OSError as e: - if e.errno != errno.EEXIST: - raise - -import gluster.swift.account.server as server - - -class TestAccountServer(unittest.TestCase): - """ - Tests for account server subclass. - """ - - def test_constructor(self): - raise SkipTest diff --git a/test/unit/common/middleware/__init__.py b/test/unit/common/middleware/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/common/middleware/gswauth/__init__.py b/test/unit/common/middleware/gswauth/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/common/middleware/gswauth/swauth/__init__.py b/test/unit/common/middleware/gswauth/swauth/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/common/middleware/gswauth/swauth/test_authtypes.py b/test/unit/common/middleware/gswauth/swauth/test_authtypes.py deleted file mode 100644 index aba9ad7..0000000 --- a/test/unit/common/middleware/gswauth/swauth/test_authtypes.py +++ /dev/null @@ -1,63 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Pablo Llopis 2011 - -import unittest -import gluster.swift.common.middleware.gswauth.swauth.authtypes as authtypes - - -class TestPlaintext(unittest.TestCase): - - def setUp(self): - self.auth_encoder = authtypes.Plaintext() - - def test_plaintext_encode(self): - enc_key = self.auth_encoder.encode('keystring') - self.assertEquals('plaintext:keystring', enc_key) - - def test_plaintext_valid_match(self): - creds = 'plaintext:keystring' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, True) - - def test_plaintext_invalid_match(self): - creds = 'plaintext:other-keystring' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, False) - - -class TestSha1(unittest.TestCase): - - def setUp(self): - self.auth_encoder = authtypes.Sha1() - self.auth_encoder.salt = 'salt' - - def test_sha1_encode(self): - enc_key = self.auth_encoder.encode('keystring') - self.assertEquals('sha1:salt$d50dc700c296e23ce5b41f7431a0e01f69010f06', - enc_key) - - def test_sha1_valid_match(self): - creds = 'sha1:salt$d50dc700c296e23ce5b41f7431a0e01f69010f06' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, True) - - def test_sha1_invalid_match(self): - creds = 'sha1:salt$deadbabedeadbabedeadbabec0ffeebadc0ffeee' - match = self.auth_encoder.match('keystring', creds) - self.assertEquals(match, False) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/common/middleware/gswauth/swauth/test_middleware.py b/test/unit/common/middleware/gswauth/swauth/test_middleware.py deleted file mode 100644 index e0d4ce8..0000000 --- a/test/unit/common/middleware/gswauth/swauth/test_middleware.py +++ /dev/null @@ -1,4773 +0,0 @@ -# Copyright (c) 2010-2011 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import simplejson as json -except ImportError: - import json -import unittest -from nose import SkipTest -from contextlib import contextmanager -from time import time - -from swift.common.swob import Request, Response - -from gluster.swift.common.middleware.gswauth.swauth import middleware as auth -from gluster.swift.common.middleware.gswauth.swauth.authtypes import MAX_TOKEN_LENGTH - - -DEFAULT_TOKEN_LIFE = 86400 -MAX_TOKEN_LIFE = 100000 - - -class FakeMemcache(object): - - def __init__(self): - self.store = {} - - def get(self, key): - return self.store.get(key) - - def set(self, key, value, timeout=0, time=0): - self.store[key] = value - return True - - def incr(self, key, timeout=0, time=0): - self.store[key] = self.store.setdefault(key, 0) + 1 - return self.store[key] - - @contextmanager - def soft_lock(self, key, timeout=0, retries=5, time=0): - yield True - - def delete(self, key): - try: - del self.store[key] - except Exception: - pass - return True - - -class FakeApp(object): - - def __init__( - self, status_headers_body_iter=None, acl=None, sync_key=None): - self.calls = 0 - self.status_headers_body_iter = status_headers_body_iter - if not self.status_headers_body_iter: - self.status_headers_body_iter = iter( - [('404 Not Found', {}, '')]) - self.acl = acl - self.sync_key = sync_key - - def __call__(self, env, start_response): - self.calls += 1 - self.request = Request.blank('', environ=env) - if self.acl: - self.request.acl = self.acl - if self.sync_key: - self.request.environ[ - 'swift_sync_key'] = self.sync_key - if 'swift.authorize' in env: - resp = env['swift.authorize'](self.request) - if resp: - return resp(env, start_response) - status, headers, body = self.status_headers_body_iter.next( - ) - return Response(status=status, headers=headers, - body=body)(env, start_response) - - -class FakeConn(object): - - def __init__(self, status_headers_body_iter=None): - self.calls = 0 - self.status_headers_body_iter = status_headers_body_iter - if not self.status_headers_body_iter: - self.status_headers_body_iter = iter( - [('404 Not Found', {}, '')]) - - def request(self, method, path, headers): - self.calls += 1 - self.request_path = path - self.status, self.headers, self.body = \ - self.status_headers_body_iter.next() - self.status, self.reason = self.status.split(' ', 1) - self.status = int(self.status) - - def getresponse(self): - return self - - def read(self): - body = self.body - self.body = '' - return body - - -class TestAuth(unittest.TestCase): - - def setUp(self): - self.test_auth = \ - auth.filter_factory({ - 'super_admin_key': 'supertest', - 'token_life': str(DEFAULT_TOKEN_LIFE), - 'max_token_life': str(MAX_TOKEN_LIFE)})(FakeApp()) - - def test_super_admin_key_not_required(self): - auth.filter_factory({})(FakeApp()) - - def test_reseller_prefix_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'super_admin_key': 'supertest'})(app) - self.assertEquals(ath.reseller_prefix, 'AUTH_') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': 'TEST'})(app) - self.assertEquals(ath.reseller_prefix, 'TEST_') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': 'TEST_'})(app) - self.assertEquals(ath.reseller_prefix, 'TEST_') - - def test_auth_prefix_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'super_admin_key': 'supertest'})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': ''})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': '/test/'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': '/test'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': 'test/'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory( - {'super_admin_key': 'supertest', - 'auth_prefix': 'test'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - - def test_no_auth_type_init(self): - app = FakeApp() - ath = auth.filter_factory({})(app) - self.assertEquals(ath.auth_type, 'Plaintext') - - def test_valid_auth_type_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'auth_type': 'sha1'})(app) - self.assertEquals(ath.auth_type, 'Sha1') - ath = auth.filter_factory( - {'auth_type': 'plaintext'})(app) - self.assertEquals(ath.auth_type, 'Plaintext') - - def test_invalid_auth_type_init(self): - app = FakeApp() - exc = None - try: - auth.filter_factory( - {'auth_type': 'NONEXISTANT'})(app) - except Exception as err: - exc = err - self.assertEquals(str(exc), - 'Invalid auth_type in config file: %s' % - 'Nonexistant') - - def test_default_metadata_volume_init(self): - app = FakeApp() - ath = auth.filter_factory({})(app) - self.assertEquals(ath.metadata_volume, 'gsmetadata') - - def test_conf_metadata_volume_init(self): - app = FakeApp() - ath = auth.filter_factory( - {'metadata_volume': 'meta_test'})(app) - self.assertEquals(ath.metadata_volume, 'meta_test') - ath = auth.filter_factory( - {'metadata_volume': 'new_meta_volume'})(app) - self.assertEquals(ath.metadata_volume, 'new_meta_volume') - - def test_default_swift_cluster_init(self): - app = FakeApp() - self.assertRaises(Exception, auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#badscheme://host/path'}), app) - ath = auth.filter_factory( - {'super_admin_key': 'supertest'})(app) - self.assertEquals(ath.default_swift_cluster, - 'local#http://127.0.0.1:8080/v1') - ath = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#http://host/path'})(app) - self.assertEquals(ath.default_swift_cluster, - 'local#http://host/path') - ath = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#https://host/path/'})(app) - self.assertEquals(ath.dsc_url, 'https://host/path') - self.assertEquals(ath.dsc_url2, 'https://host/path') - ath = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'default_swift_cluster': - 'local#https://host/path/#http://host2/path2/'})(app) - self.assertEquals(ath.dsc_url, 'https://host/path') - self.assertEquals( - ath.dsc_url2, - 'http://host2/path2') - - def test_top_level_denied(self): - resp = Request.blank( - '/').get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_anon(self): - resp = Request.blank( - '/v1/AUTH_account').get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.authorize) - - def test_auth_deny_non_reseller_prefix(self): - resp = Request.blank( - '/v1/BLAH_account', - headers={'X-Auth-Token': 'BLAH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.denied_response) - - def test_auth_deny_non_reseller_prefix_no_override( - self): - fake_authorize = lambda x: Response( - status='500 Fake') - resp = Request.blank( - '/v1/BLAH_account', - headers={'X-Auth-Token': 'BLAH_t'}, - environ={'swift.authorize': fake_authorize}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(resp.environ['swift.authorize'], fake_authorize) - - def test_auth_no_reseller_prefix_deny(self): - # Ensures that when we have no reseller prefix, we don't deny a request - # outright but set up a denial swift.authorize and pass the request on - # down the chain. - local_app = FakeApp() - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(local_app) - resp = Request.blank( - '/v1/account', - headers={'X-Auth-Token': 't'}).get_response(local_auth) - self.assertEquals(resp.status_int, 401) - # one for checking auth, two for request passed - # along - self.assertEquals(local_app.calls, 2) - self.assertEquals(resp.environ['swift.authorize'], - local_auth.denied_response) - - def test_auth_no_reseller_prefix_allow(self): - # Ensures that when we have no reseller prefix, we can still allow - # access if our auth server accepts requests - local_app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(local_app) - resp = Request.blank( - '/v1/act', - headers={'X-Auth-Token': 't'}).get_response(local_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(local_app.calls, 2) - self.assertEquals(resp.environ['swift.authorize'], - local_auth.authorize) - - def test_auth_no_reseller_prefix_no_token(self): - # Check that normally we set up a call back to our - # authorize. - local_auth = \ - auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(FakeApp(iter([]))) - resp = Request.blank( - '/v1/account').get_response( - local_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals( - resp.environ['swift.authorize'], local_auth.authorize) - # Now make sure we don't override an existing swift.authorize when we - # have no reseller prefix. - local_auth = \ - auth.filter_factory( - {'super_admin_key': 'supertest', - 'reseller_prefix': ''})(FakeApp()) - local_authorize = lambda req: Response('test') - resp = Request.blank( - '/v1/account', environ={'swift.authorize': - local_authorize}).get_response(local_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - resp.environ['swift.authorize'], - local_authorize) - - def test_auth_fail(self): - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_auth_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_auth_memcache(self): - # First run our test without memcache, showing we need to return the - # token contents twice. - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, ''), - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 4) - # Now run our test with memcache, showing we no longer need to return - # the token contents twice. - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, ''), - # Don't need a second token object returned if memcache is - # used - ('204 No Content', {}, '')])) - fake_memcache = FakeMemcache() - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}, - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 204) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}, - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_auth_just_expired(self): - self.test_auth.app = FakeApp(iter([ - # Request for token (which will have expired) - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() - 1})), - # Request to delete token - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_middleware_storage_token(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - resp = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Storage-Token': 'AUTH_t'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_authorize_bad_path(self): - req = Request.blank('/badpath') - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/badpath') - req.remote_user = 'act:usr,act,AUTH_cfa' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_account_access(self): - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act,AUTH_cfa' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_acl_group_access(self): - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act:usr' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act2' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act:usr2' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_deny_cross_reseller(self): - # Tests that cross-reseller is denied, even if ACLs/group - # names match - req = Request.blank('/v1/OTHER_cfa') - req.remote_user = 'act:usr,act,AUTH_cfa' - req.acl = 'act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_acl_referrer_access(self): - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:*,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:*' # No listings allowed - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:.example.com,.rlistings' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = Request.blank('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.referer = 'http://www.example.com/index.html' - req.acl = '.r:.example.com,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa/c') - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/v1/AUTH_cfa/c') - req.acl = '.r:*,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - req = Request.blank('/v1/AUTH_cfa/c') - req.acl = '.r:*' # No listings allowed - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/v1/AUTH_cfa/c') - req.acl = '.r:.example.com,.rlistings' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 401) - req = Request.blank('/v1/AUTH_cfa/c') - req.referer = 'http://www.example.com/index.html' - req.acl = '.r:.example.com,.rlistings' - self.assertEquals( - self.test_auth.authorize(req), - None) - - def test_detect_reseller_request(self): - req = self._make_request('/v1/AUTH_admin', - headers={'X-Auth-Token': 'AUTH_t'}) - cache_key = 'AUTH_/auth/AUTH_t' - cache_entry = (time() + 3600, '.reseller_admin') - req.environ['swift.cache'].set( - cache_key, cache_entry) - resp = req.get_response(self.test_auth) - self.assertTrue(req.environ.get('reseller_request')) - - def test_account_put_permissions(self): - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,AUTH_other' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - # Even PUTs to your own account as account admin - # should fail - req = Request.blank( - '/v1/AUTH_old', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,AUTH_old' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp, None) - - # .super_admin is not something the middleware should ever see or care - # about - req = Request.blank( - '/v1/AUTH_new', - environ={'REQUEST_METHOD': 'PUT'}) - req.remote_user = 'act:usr,act,.super_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_account_delete_permissions(self): - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,AUTH_other' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - # Even DELETEs to your own account as account admin should - # fail - req = Request.blank('/v1/AUTH_old', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,AUTH_old' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp, None) - - # .super_admin is not something the middleware should ever see or care - # about - req = Request.blank('/v1/AUTH_new', - environ={'REQUEST_METHOD': 'DELETE'}) - req.remote_user = 'act:usr,act,.super_admin' - resp = self.test_auth.authorize(req) - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_get_token_fail(self): - resp = Request.blank( - '/auth/v1.0').get_response( - self.test_auth) - self.assertEquals(resp.status_int, 401) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_invalid_key(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'invalid'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_token_fail_invalid_x_auth_user_format( - self): - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_non_matching_account_in_request( - self): - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'act2:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_bad_path(self): - resp = Request.blank( - '/auth/v1/act/auth/invalid', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_token_fail_missing_key(self): - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'act:usr'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_fail_get_user_details(self): - self.test_auth.app = FakeApp(iter([ - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_token_fail_get_account(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_token_fail_put_new_token(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_token_fail_post_to_user(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_get_token_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_fail_get_existing_token(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_token_for_auth_acct_success(self): - fmc = FakeMemcache() - local_auth = \ - auth.filter_factory({ - 'super_admin_key': 'supertest', - 'metadata_volume': 'gsmd', - 'token_life': str(DEFAULT_TOKEN_LIFE), - 'max_token_life': str(MAX_TOKEN_LIFE)})(FakeApp()) - resp = Request.blank( - '/auth/v1.0', - environ={'REQUEST_METHOD': 'GET', - 'swift.cache': fmc}, - headers={'X-Auth-User': 'act:.super_admin', - 'X-Auth-Key': 'supertest'}).get_response(local_auth) - self.assertEquals(resp.status_int, 200) - itk = resp.headers.get('x-auth-token') - self.assertTrue(itk.startswith('AUTH_itk'), itk) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_gsmd') - expires, groups = fmc.get('AUTH_/auth/%s' % itk) - self.assertEquals(groups, - 'gsmd,.reseller_admin,AUTH_gsmd') - - def test_get_token_for_auth_acct_fail_passwd(self): - local_auth = \ - auth.filter_factory({ - 'super_admin_key': 'supertest', - 'metadata_volume': 'gsmd', - 'token_life': str(DEFAULT_TOKEN_LIFE), - 'max_token_life': str(MAX_TOKEN_LIFE)})(FakeApp()) - resp = Request.blank( - '/auth/v1.0', - environ={'REQUEST_METHOD': 'GET', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-User': 'act:.super_admin', - 'X-Auth-Key': 'invalidpasswd'}).get_response(local_auth) - self.assertEquals(resp.status_int, 401) - - def test_get_token_success_v1_0(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_0_with_user_token_life( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key', - 'X-Auth-Token-Lifetime': 10}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - left = int(resp.headers['x-auth-token-expires']) - self.assertTrue(left > 0, '%d > 0' % left) - self.assertTrue(left <= 10, '%d <= 10' % left) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_0_with_user_token_life_past_max( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - req = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key', - 'X-Auth-Token-Lifetime': MAX_TOKEN_LIFE * 10}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - left = int(resp.headers['x-auth-token-expires']) - self.assertTrue(left > DEFAULT_TOKEN_LIFE, - '%d > %d' % (left, DEFAULT_TOKEN_LIFE)) - self.assertTrue(left <= MAX_TOKEN_LIFE, - '%d <= %d' % (left, MAX_TOKEN_LIFE)) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_act_auth(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Storage-User': 'usr', - 'X-Storage-Pass': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_storage_instead_of_auth( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Storage-User': 'act:usr', - 'X-Storage-Pass': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_( - resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_act_auth_auth_instead_of_storage( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1/act/auth', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_(resp.headers.get( - 'x-auth-token', - '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_existing_token(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('200 Ok', {}, json.dumps( - {"account": "act", "user": "usr", - "account_id": "AUTH_cfa", - "groups": [{'name': "act:usr"}, - {'name': "key"}, {'name': ".admin"}], - "expires": 9999999999.9999999})), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - resp.headers.get('x-auth-token'), - 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_token_success_existing_token_but_request_new_one( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # DELETE of expired token - ('204 No Content', {}, ''), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key', - 'X-Auth-New-Token': 'true'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertNotEquals( - resp.headers.get('x-auth-token'), 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_get_token_success_existing_token_expired(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('200 Ok', {}, json.dumps( - {"account": "act", "user": "usr", - "account_id": "AUTH_cfa", - "groups": [{'name': "act:usr"}, - {'name': "key"}, {'name': ".admin"}], - "expires": 0.0})), - # DELETE of expired token - ('204 No Content', {}, ''), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertNotEquals( - resp.headers.get('x-auth-token'), - 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 7) - - def test_get_token_success_existing_token_expired_fail_deleting_old( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of token - ('200 Ok', {}, json.dumps({"account": "act", "user": "usr", - "account_id": "AUTH_cfa", - "groups": [{'name': "act:usr"}, - {'name': "key"}, {'name': ".admin"}], - "expires": 0.0})), - # DELETE of expired token - ('503 Service Unavailable', {}, ''), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': 'act:usr', - 'X-Auth-Key': 'key'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertNotEquals( - resp.headers.get('x-auth-token'), - 'AUTH_tktest') - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 7) - - def test_prep_success(self): - list_to_iter = [ - # PUT of gsmetadata account - ('201 Created', {}, ''), - # PUT of .account_id container - ('201 Created', {}, '')] - # PUT of .token* containers - for x in xrange(16): - list_to_iter.append(('201 Created', {}, '')) - self.test_auth.app = FakeApp(iter(list_to_iter)) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 17) - - def test_prep_bad_method(self): - resp = Request.blank('/auth/v2/.prep', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'HEAD'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_prep_bad_creds(self): - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'upertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': '.super_admin'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - resp = Request.blank( - '/auth/v2/.prep', - environ={'REQUEST_METHOD': 'POST'}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_prep_fail_account_create(self): - self.test_auth.app = FakeApp(iter([ - # PUT of gsmetadata account - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_prep_fail_token_container_create(self): - self.test_auth.app = FakeApp(iter([ - # PUT of gsmetadata account - ('201 Created', {}, ''), - # PUT of .token container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_prep_fail_account_id_container_create(self): - self.test_auth.app = FakeApp(iter([ - # PUT of gsmetadata account - ('201 Created', {}, ''), - # PUT of .token container - ('201 Created', {}, ''), - # PUT of .account_id container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/.prep', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_reseller_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of gsmetadata account (list containers) - ('200 Ok', {}, json.dumps([ - {"name": ".token", "count": 0, "bytes": 0}, - {"name": ".account_id", - "count": 0, "bytes": 0}, - {"name": "act", "count": 0, "bytes": 0}])), - # GET of gsmetadata account (list containers - # continuation) - ('200 Ok', {}, '[]')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(json.loads(resp.body), - {"accounts": [{"name": "act"}]}) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"})), - # GET of gsmetadata account (list containers) - ('200 Ok', {}, json.dumps([ - {"name": ".token", "count": 0, "bytes": 0}, - {"name": ".account_id", - "count": 0, "bytes": 0}, - {"name": "act", "count": 0, "bytes": 0}])), - # GET of gsmetadata account (list containers - # continuation) - ('200 Ok', {}, '[]')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(json.loads(resp.body), - {"accounts": [{"name": "act"}]}) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_reseller_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_reseller_fail_listing(self): - self.test_auth.app = FakeApp(iter([ - # GET of gsmetadata account (list containers) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of gsmetadata account (list containers) - ('200 Ok', {}, json.dumps([ - {"name": ".token", "count": 0, "bytes": 0}, - {"name": ".account_id", - "count": 0, "bytes": 0}, - {"name": "act", "count": 0, "bytes": 0}])), - # GET of gsmetadata account (list containers - # continuation) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_account_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, - json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'account_id': 'AUTH_cfa', - 'services': {'storage': - {'default': 'local', - 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, - 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) - self.assertEquals(self.test_auth.app.calls, 3) - - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of .services object - ('200 Ok', {}, - json.dumps({"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'account_id': 'AUTH_cfa', - 'services': {'storage': - {'default': 'local', - 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, - 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_get_account_fail_bad_account_name(self): - resp = Request.blank('/auth/v2/.token', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/.anything', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_account_fail_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_account_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_account_fail_listing(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of account container (list objects - # continuation) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_set_services_new_service(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, - json.dumps({"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'new_service': - {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'storage': {'default': 'local', - 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}, - 'new_service': {'new_endpoint': 'new_value'}}) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_set_services_new_endpoint(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": - {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals( - json.loads(resp.body), - {'storage': {'default': 'local', - 'local': - 'http://127.0.0.1:8080/v1/AUTH_cfa', - 'new_endpoint': 'new_value'}}) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_set_services_update_endpoint(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(json.loads(resp.body), - {'storage': {'default': 'local', - 'local': 'new_value'}}) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_set_services_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_set_services_fail_bad_account_name(self): - resp = Request.blank('/auth/v2/.act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'storage': {'local': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_set_services_fail_bad_json(self): - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body='garbage' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body='' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_set_services_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('503 Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps({ - 'new_service': {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps({ - 'new_service': {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_set_services_fail_put_services(self): - self.test_auth.app = FakeApp(iter([ - # GET of .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # PUT of new .services object - ('503 Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.services', - environ={ - 'REQUEST_METHOD': 'POST'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}, - body=json.dumps( - {'new_service': - {'new_endpoint': 'new_value'}}) - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_account_success(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_put_account_success_preexist_but_not_completed( - self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for pre-existence - # We're going to show it as existing this time, but with no - # X-Container-Meta-Account-Id, indicating a failed - # previous attempt - ('200 Ok', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_put_account_success_preexist_and_completed( - self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for pre-existence - # We're going to show it as existing this time, and with an - # X-Container-Meta-Account-Id, indicating it already - # exists - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 202) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_account_success_with_given_suffix(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest', - 'X-Account-Suffix': 'test-suffix'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_put_account_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': 'super:admin', - 'X-Auth-Admin-Key': 'supertest'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': 'act:adm', - 'X-Auth-Admin-Key': 'key'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'key'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_account_fail_invalid_account_name(self): - resp = Request.blank( - '/auth/v2/.act', - environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'},).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_account_fail_on_initial_account_head(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_account_fail_on_account_marker_put(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_account_fail_on_storage_account_put(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_put_account_fail_on_account_id_mapping(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_put_account_fail_on_services_object(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', - 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_put_account_fail_on_post_mapping(self): - self.test_auth.app = FakeApp(iter([ - # Initial HEAD of account container to check for - # pre-existence - ('404 Not Found', {}, ''), - # PUT of account container - ('204 No Content', {}, ''), - # PUT of .account_id mapping object - ('204 No Content', {}, ''), - # PUT of .services object - ('204 No Content', {}, ''), - # POST to account container updating - # X-Container-Meta-Account-Id - ('503 Service Unavailable', {}, '')])) - resp = Request.blank( - '/auth/v2/act', - environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_delete_account_success(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_delete_account_success_missing_services(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('404 Not Found', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_delete_account_success_missing_storage_account( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_delete_account_success_missing_account_id_mapping( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('404 Not Found', {}, ''), - # DELETE the account container - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_delete_account_success_missing_account_container_at_end( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_delete_account_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_invalid_account_name(self): - resp = Request.blank('/auth/v2/.act', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_delete_account_fail_not_found(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_not_found_concurrency( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_account_fail_list_account(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_list_account_concurrency( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_account_fail_has_users(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}]))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 409) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_account_fail_has_users2(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}]))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 409) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_account_fail_get_services(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_account_fail_delete_storage_account( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_account_fail_delete_storage_account2( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa", - "other": "http://127.0.0.1:8080/v1/AUTH_cfa2"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_account_fail_delete_storage_account3( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_account_fail_delete_storage_account4( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa", - "other": "http://127.0.0.1:8080/v1/AUTH_cfa2"}}))])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_account_fail_delete_services(self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_account_fail_delete_account_id_mapping( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_delete_account_fail_delete_account_container( - self): - self.test_auth.app = FakeApp(iter([ - # Account's container listing, checking for - # users - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}])), - # Account's container listing, checking for users - # (continuation) - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), - # GET the .services object - ('200 Ok', {}, json.dumps( - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), - # DELETE the .services object - ('204 No Content', {}, ''), - # DELETE the .account_id mapping object - ('204 No Content', {}, ''), - # DELETE the account container - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act', - environ={ - 'REQUEST_METHOD': 'DELETE', - 'swift.cache': FakeMemcache()}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 6) - - def test_get_user_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_fail_no_super_admin_key(self): - local_auth = auth.filter_factory({})(FakeApp(iter([ - # GET of user object (but we should never get - # here) - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"}))]))) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(local_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(local_auth.app.calls, 0) - - def test_get_user_groups_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester3"}, {"name": "act"}], - "auth": "plaintext:key3"})), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": ".admin"}, {"name": "act"}, - {"name": "act:tester"}, {"name": "act:tester3"}]})) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_get_user_groups_success2(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}])), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:tester3"}, {"name": "act"}], - "auth": "plaintext:key3"})), - # GET of account container (list objects - # continuation) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": ".admin"}, {"name": "act"}, - {"name": "act:tester"}, {"name": "act:tester3"}]})) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_user_fail_invalid_account(self): - resp = Request.blank('/auth/v2/.invalid/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_user_fail_invalid_user(self): - resp = Request.blank('/auth/v2/act/.invalid', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_get_user_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'super:admin', - 'X-Auth-Admin-Key': 'supertest'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}, - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_account_admin_success(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but not reseller - # admin) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of requested user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}], - "auth": "plaintext:key"})) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_account_admin_fail_getting_account_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin check) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of requested user object [who is an .admin - # as well] - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of user object (reseller admin check [and fail - # here]) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_get_user_account_admin_fail_getting_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin check) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of requested user object [who is a - # .reseller_admin] - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_reseller_admin_fail_getting_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin check) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".reseller_admin"}], - "auth": "plaintext:key"})), - # GET of requested user object [who also is a - # .reseller_admin] - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_super_admin_succeed_getting_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - # GET of requested user object - ('200 Ok', {}, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, json.dumps( - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".reseller_admin"}], - "auth": "plaintext:key"})) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_groups_not_found(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_groups_fail_listing(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_groups_fail_get_user(self): - self.test_auth.app = FakeApp(iter([ - # GET of account container (list objects) - ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, - json.dumps([ - {"name": ".services", "hash": "etag", "bytes": 112, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.618110"}, - {"name": "tester", "hash": "etag", "bytes": 104, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:27.736680"}, - {"name": "tester3", "hash": "etag", "bytes": 86, - "content_type": - "application/octet-stream", - "last_modified": "2010-12-03T17:16:28.135530"}])), - # GET of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/.groups', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_get_user_not_found(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_user_fail(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_user_fail_invalid_account(self): - resp = Request.blank('/auth/v2/.invalid/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_user_fail_invalid_user(self): - resp = Request.blank('/auth/v2/act/.usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_user_fail_no_user_key(self): - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_put_user_reseller_admin_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # Checking if user is changing his own key. This is called. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:rdm"}, - {"name": "test"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"})), - # GET of user object (reseller admin) - # This shouldn't actually get called, checked - # below - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:rdm"}, - {"name": "test"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # Checking if user is changing his own key. This is called. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of user object (account admin, but not reseller admin) - # This shouldn't actually get called, checked - # below - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - self.test_auth.app = FakeApp(iter([ - # Checking if user is changing his own key. This is called. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # GET of user object (regular user) - # This shouldn't actually get called, checked - # below - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_put_user_account_admin_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # Checking if user is changing his own key. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # Checking if user is changing his own key. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': - 'key', - 'X-Auth-User-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_user_regular_fail_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # Checking if user is changing his own key. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # Checking if user is changing his own key. - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act2/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': - 'key', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_user_regular_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:usr"}, {"name": "act"}], - "auth": "plaintext:key"}) - - def test_put_user_special_chars_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/u_s-r', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:u_s-r"}, {"name": "act"}], - "auth": "plaintext:key"}) - - def test_put_user_account_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key', - 'X-Auth-User-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}], - "auth": "plaintext:key"}) - - def test_put_user_reseller_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('201 Created', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key', - 'X-Auth-User-Reseller-Admin': 'true'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 2) - self.assertEquals( - json.loads(self.test_auth.app.request.body), - {"groups": [{"name": "act:usr"}, {"name": "act"}, - {"name": ".admin"}, {"name": ".reseller_admin"}], - "auth": "plaintext:key"}) - - def test_put_user_fail_not_found(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_put_user_fail(self): - self.test_auth.app = FakeApp(iter([ - # PUT of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': - 'supertest', - 'X-Auth-User-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_bad_creds(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"})), - # GET of user object (account admin, but wrong - # account) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # GET of user object (regular user) - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_delete_reseller_admin_user_fail(self): - self.test_auth.app = FakeApp(iter([ - # is user being deleted a reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:re_adm"}, - {"name": "act2"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"})), - # GET of user object - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, - {"name": "act2"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - - resp = Request.blank('/auth/v2/act2/re_adm', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 403) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_reseller_admin_user_success(self): - self.test_auth.app = FakeApp(iter([ - # is user being deleted a reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:re_adm"}, - {"name": "act2"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - - resp = Request.blank('/auth/v2/act2/re_adm', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_user_invalid_account(self): - resp = Request.blank('/auth/v2/.invalid/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_delete_user_invalid_user(self): - resp = Request.blank('/auth/v2/act/.invalid', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_delete_user_not_found(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_fail_head_user(self): - self.test_auth.app = FakeApp(iter([ - # HEAD of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_delete_user_fail_delete_token(self): - self.test_auth.app = FakeApp(iter([ - # is user reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_delete_user_fail_delete_user(self): - self.test_auth.app = FakeApp(iter([ - # is user reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('503 Service Unavailable', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_user_success(self): - self.test_auth.app = FakeApp(iter([ - # is user reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_user_success_missing_user_at_end(self): - self.test_auth.app = FakeApp(iter([ - # is user reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('204 No Content', {}, ''), - # DELETE of user object - ('404 Not Found', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_user_success_missing_token(self): - self.test_auth.app = FakeApp(iter([ - # is user reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', - {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), - # DELETE of token - ('404 Not Found', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 4) - - def test_delete_user_success_no_token(self): - self.test_auth.app = FakeApp(iter([ - # is user reseller_admin - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"})), - # HEAD of user object - ('200 Ok', {}, ''), - # DELETE of user object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'} - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) - - def test_validate_token_bad_prefix(self): - resp = Request.blank('/auth/v2/.token/BAD_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_validate_token_tmi(self): - resp = Request.blank( - '/auth/v2/.token/AUTH_token/tmi').get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - - def test_validate_token_bad_memcache(self): - fake_memcache = FakeMemcache() - fake_memcache.set('AUTH_/auth/AUTH_token', 'bogus') - resp = Request.blank( - '/auth/v2/.token/AUTH_token', - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 500) - - def test_validate_token_from_memcache(self): - fake_memcache = FakeMemcache() - fake_memcache.set( - 'AUTH_/auth/AUTH_token', - (time() + 1, - 'act:usr,act')) - resp = Request.blank( - '/auth/v2/.token/AUTH_token', - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals( - resp.headers.get('x-auth-groups'), - 'act:usr,act') - self.assert_(float(resp.headers['x-auth-ttl']) < 1, - resp.headers['x-auth-ttl']) - - def test_validate_token_from_memcache_expired(self): - fake_memcache = FakeMemcache() - fake_memcache.set( - 'AUTH_/auth/AUTH_token', - (time() - 1, - 'act:usr,act')) - resp = Request.blank( - '/auth/v2/.token/AUTH_token', - environ={'swift.cache': fake_memcache}).get_response( - self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assert_('x-auth-groups' not in resp.headers) - self.assert_('x-auth-ttl' not in resp.headers) - - def test_validate_token_from_object(self): - self.test_auth.app = FakeApp(iter([ - # GET of token object - ('200 Ok', {}, json.dumps({'groups': [{'name': 'act:usr'}, - {'name': 'act'}], 'expires': time() + 1}))])) - resp = Request.blank('/auth/v2/.token/AUTH_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals( - resp.headers.get('x-auth-groups'), - 'act:usr,act') - self.assert_(float(resp.headers['x-auth-ttl']) < 1, - resp.headers['x-auth-ttl']) - - def test_validate_token_from_object_expired(self): - self.test_auth.app = FakeApp(iter([ - # GET of token object - ('200 Ok', {}, json.dumps({'groups': 'act:usr,act', - 'expires': time() - 1})), - # DELETE of expired token object - ('204 No Content', {}, '')])) - resp = Request.blank('/auth/v2/.token/AUTH_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 2) - - def test_validate_token_from_object_with_admin(self): - self.test_auth.app = FakeApp(iter([ - # GET of token object - ('200 Ok', {}, json.dumps({'account_id': 'AUTH_cfa', 'groups': - [{'name': 'act:usr'}, - {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 1}))])) - resp = Request.blank('/auth/v2/.token/AUTH_token' - ).get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals(resp.headers.get('x-auth-groups'), - 'act:usr,act,AUTH_cfa') - self.assert_(float(resp.headers['x-auth-ttl']) < 1, - resp.headers['x-auth-ttl']) - - def test_get_conn_default(self): - conn = self.test_auth.get_conn() - self.assertEquals( - conn.__class__, - auth.HTTPConnection) - self.assertEquals(conn.host, '127.0.0.1') - self.assertEquals(conn.port, 8080) - - def test_get_conn_default_https(self): - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) - conn = local_auth.get_conn() - self.assertEquals( - conn.__class__, - auth.HTTPSConnection) - self.assertEquals(conn.host, '1.2.3.4') - self.assertEquals(conn.port, 443) - - def test_get_conn_overridden(self): - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) - conn = \ - local_auth.get_conn( - urlparsed=auth.urlparse('http://5.6.7.8/v1')) - self.assertEquals( - conn.__class__, - auth.HTTPConnection) - self.assertEquals(conn.host, '5.6.7.8') - self.assertEquals(conn.port, 80) - - def test_get_conn_overridden_https(self): - local_auth = auth.filter_factory( - {'super_admin_key': 'supertest', - 'default_swift_cluster': 'local#http://1.2.3.4/v1'})(FakeApp()) - conn = \ - local_auth.get_conn( - urlparsed=auth.urlparse( - 'https://5.6.7.8/v1')) - self.assertEquals( - conn.__class__, - auth.HTTPSConnection) - self.assertEquals(conn.host, '5.6.7.8') - self.assertEquals(conn.port, 443) - - def test_get_itoken_fail_no_memcache(self): - exc = None - try: - self.test_auth.get_itoken({}) - except Exception as err: - exc = err - self.assertEquals(str(exc), - 'No memcache set up; required for Swauth middleware') - - def test_get_itoken_success(self): - fmc = FakeMemcache() - itk = self.test_auth.get_itoken( - {'swift.cache': fmc}) - self.assert_(itk.startswith('AUTH_itk'), itk) - expires, groups = fmc.get('AUTH_/auth/%s' % itk) - self.assert_(expires > time(), expires) - self.assertEquals( - groups, - 'gsmetadata,.reseller_admin,AUTH_gsmetadata') - - def test_get_admin_detail_fail_no_colon(self): - self.test_auth.app = FakeApp(iter([])) - self.assertEquals( - self.test_auth.get_admin_detail( - Request.blank('/')), - None) - self.assertEquals( - self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'usr'})), None) - self.assertRaises( - StopIteration, self.test_auth.get_admin_detail, - Request.blank('/', headers={'X-Auth-Admin-User': 'act:usr'})) - - def test_get_admin_detail_fail_user_not_found(self): - self.test_auth.app = FakeApp( - iter([('404 Not Found', {}, '')])) - self.assertEquals( - self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'})), None) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_admin_detail_fail_get_user_error(self): - self.test_auth.app = FakeApp(iter([ - ('503 Service Unavailable', {}, '')])) - exc = None - try: - self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'})) - except Exception as err: - exc = err - self.assertEquals(str(exc), 'Could not get user object: ' - '/v1/AUTH_gsmetadata/act/usr 503 Service Unavailable') - self.assertEquals(self.test_auth.app.calls, 1) - - def test_get_admin_detail_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]}))])) - detail = self.test_auth.get_admin_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'})) - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals( - detail, {'account': 'act', - 'auth': 'plaintext:key', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}]}) - - def test_get_user_detail_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]}))])) - detail = self.test_auth.get_user_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'}), - 'act', 'usr') - self.assertEquals(self.test_auth.app.calls, 1) - detail_json = json.loads(detail) - self.assertEquals("plaintext:key", detail_json['auth']) - - def test_get_user_detail_fail_user_doesnt_exist(self): - self.test_auth.app = FakeApp( - iter([('404 Not Found', {}, '')])) - detail = self.test_auth.get_user_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'}), - 'act', 'usr') - self.assertEquals(self.test_auth.app.calls, 1) - self.assertEquals(detail, None) - - def test_get_user_detail_fail_exception(self): - self.test_auth.app = FakeApp(iter([ - ('503 Service Unavailable', {}, '')])) - exc = None - try: - detail = self.test_auth.get_user_detail( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'}), - 'act', 'usr') - except Exception as err: - exc = err - self.assertEquals(str(exc), 'Could not get user object: ' - '/v1/AUTH_gsmetadata/act/usr 503 Service Unavailable') - self.assertEquals(self.test_auth.app.calls, 1) - - def test_is_user_reseller_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".reseller_admin"}]}))])) - result = self.test_auth.is_user_reseller_admin( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'}), - 'act', 'usr') - self.assertEquals(self.test_auth.app.calls, 1) - self.assertTrue(result) - - def test_is_user_reseller_admin_fail(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({"auth": "plaintext:key", - "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]}))])) - result = self.test_auth.is_user_reseller_admin( - Request.blank('/', - headers={'X-Auth-Admin-User': 'act:usr'}), - 'act', 'usr') - self.assertEquals(self.test_auth.app.calls, 1) - self.assertFalse(result) - - def test_is_user_reseller_admin_fail_user_doesnt_exist(self): - self.test_auth.app = FakeApp( - iter([('404 Not Found', {}, '')])) - req = Request.blank('/', headers={'X-Auth-Admin-User': 'act:usr'}) - result = self.test_auth.is_user_reseller_admin(req, 'act', 'usr') - self.assertEquals(self.test_auth.app.calls, 1) - self.assertFalse(result) - self.assertFalse(req.credentials_valid) - - def test_credentials_match_success(self): - self.assert_(self.test_auth.credentials_match( - {'auth': 'plaintext:key'}, 'key')) - - def test_credentials_match_fail_no_details(self): - self.assert_( - not self.test_auth.credentials_match(None, 'notkey')) - - def test_credentials_match_fail_plaintext(self): - self.assert_(not self.test_auth.credentials_match( - {'auth': 'plaintext:key'}, 'notkey')) - - def test_is_user_changing_own_key_err(self): - # User does not exist - self.test_auth.app = FakeApp( - iter([('404 Not Found', {}, '')])) - req = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'key'}) - self.assert_( - not self.test_auth.is_user_changing_own_key(req, 'act:usr')) - self.assertEquals(self.test_auth.app.calls, 1) - - # user attempting to escalate himself as admin - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'key', - 'X-Auth-User-Admin': 'true'}) - self.assert_( - not self.test_auth.is_user_changing_own_key(req, 'act:usr')) - self.assertEquals(self.test_auth.app.calls, 1) - - # admin attempting to escalate himself as reseller_admin - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/adm', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:adm', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'key', - 'X-Auth-User-Reseller-Admin': 'true'}) - self.assert_( - not self.test_auth.is_user_changing_own_key(req, 'act:adm')) - self.assertEquals(self.test_auth.app.calls, 1) - - # different user - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/usr2', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'key'}) - self.assert_( - not self.test_auth.is_user_changing_own_key(req, 'act:usr2')) - self.assertEquals(self.test_auth.app.calls, 1) - - # wrong key - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'wrongkey', - 'X-Auth-User-Key': 'newkey'}) - self.assert_( - not self.test_auth.is_user_changing_own_key(req, 'act:usr')) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_is_user_changing_own_key_sucess(self): - # regular user - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, - {"name": "test"}], "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/usr', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:usr', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'newkey'}) - self.assert_( - self.test_auth.is_user_changing_own_key(req, 'act:usr')) - self.assertEquals(self.test_auth.app.calls, 1) - - # account admin - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}], - "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/adm', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:adm', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'newkey', - 'X-Auth-User-Admin': 'true'}) - self.assert_( - self.test_auth.is_user_changing_own_key(req, 'act:adm')) - self.assertEquals(self.test_auth.app.calls, 1) - - # reseller admin - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, - {"name": "test"}, {"name": ".admin"}, - {"name": ".reseller_admin"}], "auth": "plaintext:key"}))])) - req = Request.blank('/auth/v2/act/adm', - environ={ - 'REQUEST_METHOD': 'PUT'}, - headers={ - 'X-Auth-Admin-User': 'act:adm', - 'X-Auth-Admin-Key': 'key', - 'X-Auth-User-Key': 'newkey', - 'X-Auth-User-Reseller-Admin': 'true'}) - self.assert_( - self.test_auth.is_user_changing_own_key(req, 'act:adm')) - self.assertEquals(self.test_auth.app.calls, 1) - - def test_is_super_admin_success(self): - self.assert_( - self.test_auth.is_super_admin( - Request.blank( - '/', - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}))) - - def test_is_super_admin_fail_bad_key(self): - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'bad'}))) - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={'X-Auth-Admin-User': '.super_admin'}))) - self.assert_( - not self.test_auth.is_super_admin(Request.blank('/'))) - - def test_is_super_admin_fail_bad_user(self): - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'bad', - 'X-Auth-Admin-Key': 'supertest'}))) - self.assert_( - not self.test_auth.is_super_admin( - Request.blank('/', - headers={'X-Auth-Admin-Key': 'supertest'}))) - self.assert_( - not self.test_auth.is_super_admin(Request.blank('/'))) - - def test_is_reseller_admin_success_is_super_admin(self): - self.assert_( - self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}))) - - def test_is_reseller_admin_success_called_get_admin_detail( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'key'}))) - - def test_is_reseller_admin_fail_only_account_admin( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:adm'}, {'name': 'act'}, - {'name': '.admin'}]}))])) - self.assert_( - not self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}))) - - def test_is_reseller_admin_fail_regular_user(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}]}))])) - self.assert_( - not self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}))) - - def test_is_reseller_admin_fail_bad_key(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - not self.test_auth.is_reseller_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'bad'}))) - - def test_is_account_admin_success_is_super_admin(self): - self.assert_( - self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - '.super_admin', - 'X-Auth-Admin-Key': 'supertest'}), 'act')) - - def test_is_account_admin_success_is_reseller_admin( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_success(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:adm'}, {'name': 'act'}, - {'name': '.admin'}]}))])) - self.assert_( - self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:adm', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_fail_account_admin_different_account( - self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act2:adm'}, {'name': 'act2'}, - {'name': '.admin'}]}))])) - self.assert_( - not self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act2:adm', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_fail_regular_user(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}]}))])) - self.assert_( - not self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:usr', - 'X-Auth-Admin-Key': 'key'}), 'act')) - - def test_is_account_admin_fail_bad_key(self): - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps({'auth': 'plaintext:key', - 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, - {'name': '.admin'}, - {'name': '.reseller_admin'}]}))])) - self.assert_( - not self.test_auth.is_account_admin( - Request.blank('/', - headers={ - 'X-Auth-Admin-User': - 'act:rdm', - 'X-Auth-Admin-Key': 'bad'}), 'act')) - - def test_reseller_admin_but_account_is_internal_use_only( - self): - req = Request.blank('/v1/AUTH_gsmetadata', - environ={'REQUEST_METHOD': 'GET'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_reseller_admin_but_account_is_exactly_reseller_prefix( - self): - req = Request.blank( - '/v1/AUTH_', - environ={'REQUEST_METHOD': 'GET'}) - req.remote_user = 'act:usr,act,.reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def _get_token_success_v1_0_encoded( - self, saved_user, saved_key, sent_user, - sent_key): - self.test_auth.app = FakeApp(iter([ - # GET of user object - ('200 Ok', {}, - json.dumps({"auth": "plaintext:%s" % saved_key, - "groups": [{'name': saved_user}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', - {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) - resp = Request.blank( - '/auth/v1.0', - headers={'X-Auth-User': sent_user, - 'X-Auth-Key': sent_key}).get_response(self.test_auth) - self.assertEquals(resp.status_int, 200) - self.assert_( - resp.headers.get('x-auth-token', - '').startswith('AUTH_tk'), - resp.headers.get('x-auth-token')) - self.assertEquals(resp.headers.get('x-auth-token'), - resp.headers.get('x-storage-token')) - self.assertEquals(resp.headers.get('x-storage-url'), - 'http://127.0.0.1:8080/v1/AUTH_cfa') - self.assertEquals( - json.loads(resp.body), - {"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) - self.assertEquals(self.test_auth.app.calls, 5) - - def test_get_token_success_v1_0_encoded1(self): - self._get_token_success_v1_0_encoded( - 'act:usr', 'key', 'act%3ausr', 'key') - - def test_get_token_success_v1_0_encoded2(self): - self._get_token_success_v1_0_encoded( - 'act:u s r', 'key', 'act%3au%20s%20r', 'key') - - def test_get_token_success_v1_0_encoded3(self): - self._get_token_success_v1_0_encoded( - 'act:u s r', 'k:e:y', 'act%3au%20s%20r', 'k%3Ae%3ay') - - def test_allowed_sync_hosts(self): - a = auth.filter_factory( - {'super_admin_key': 'supertest'})(FakeApp()) - self.assertEquals( - a.allowed_sync_hosts, - ['127.0.0.1']) - a = auth.filter_factory({ - 'super_admin_key': 'supertest', - 'allowed_sync_hosts': - '1.1.1.1,2.1.1.1, 3.1.1.1 , 4.1.1.1,, , 5.1.1.1'})(FakeApp()) - self.assertEquals( - a.allowed_sync_hosts, - ['1.1.1.1', '2.1.1.1', '3.1.1.1', '4.1.1.1', '5.1.1.1']) - - def test_reseller_admin_is_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append( - req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'other', 'user': 'other:usr', - 'account_id': 'AUTH_other', - 'groups': [{'name': 'other:usr'}, {'name': 'other'}, - {'name': '.reseller_admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - req = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(owner_values, [True]) - - def test_admin_is_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append( - req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': [{'name': 'act:usr'}, {'name': 'act'}, - {'name': '.admin'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')])) - req = Request.blank( - '/v1/AUTH_cfa', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(owner_values, [True]) - - def test_regular_is_not_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append( - req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - self.test_auth.app = FakeApp(iter([ - ('200 Ok', {}, - json.dumps( - {'account': 'act', 'user': 'act:usr', - 'account_id': 'AUTH_cfa', - 'groups': - [{'name': 'act:usr'}, { - 'name': 'act'}], - 'expires': time() + 60})), - ('204 No Content', {}, '')]), acl='act:usr') - req = Request.blank('/v1/AUTH_cfa/c', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - self.assertEquals(owner_values, [False]) - - def test_sync_request_success(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - - def test_sync_request_fail_key(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'wrongsecret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='othersecret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key=None) - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_sync_request_fail_no_timestamp(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={'x-container-sync-key': 'secret'}) - req.remote_addr = '127.0.0.1' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_sync_request_fail_sync_host(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': '123.456'}) - req.remote_addr = '127.0.0.2' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - - def test_sync_request_success_lb_sync_host(self): - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': - '123.456', - 'x-forwarded-for': '127.0.0.1'}) - req.remote_addr = '127.0.0.2' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - - self.test_auth.app = FakeApp( - iter([('204 No Content', {}, '')]), - sync_key='secret') - req = Request.blank('/v1/AUTH_cfa/c/o', - environ={ - 'REQUEST_METHOD': 'DELETE'}, - headers={ - 'x-container-sync-key': - 'secret', - 'x-timestamp': - '123.456', - 'x-cluster-client-ip': '127.0.0.1'}) - req.remote_addr = '127.0.0.2' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 204) - - def _make_request(self, path, **kwargs): - req = Request.blank(path, **kwargs) - req.environ['swift.cache'] = FakeMemcache() - return req - - def test_override_asked_for_but_not_allowed(self): - self.test_auth = \ - auth.filter_factory( - {'allow_overrides': 'false'})(FakeApp()) - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.authorize) - - def test_override_asked_for_and_allowed(self): - self.test_auth = \ - auth.filter_factory( - {'allow_overrides': 'true'})(FakeApp()) - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertTrue( - 'swift.authorize' not in resp.environ) - - def test_override_default_allowed(self): - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertTrue( - 'swift.authorize' not in resp.environ) - - def test_token_too_long(self): - req = self._make_request('/v1/AUTH_account', headers={ - 'x-auth-token': 'a' * MAX_TOKEN_LENGTH}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertNotEquals( - resp.body, - 'Token exceeds maximum length.') - req = self._make_request('/v1/AUTH_account', headers={ - 'x-auth-token': 'a' * (MAX_TOKEN_LENGTH + 1)}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 400) - self.assertEquals( - resp.body, - 'Token exceeds maximum length.') - - def test_crazy_authorization(self): - req = self._make_request('/v1/AUTH_account', headers={ - 'authorization': 'somebody elses header value'}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 401) - self.assertEquals(resp.environ['swift.authorize'], - self.test_auth.denied_response) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/common/middleware/swiftkerbauth/__init__.py b/test/unit/common/middleware/swiftkerbauth/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/common/middleware/swiftkerbauth/test_kerbauth.py b/test/unit/common/middleware/swiftkerbauth/test_kerbauth.py deleted file mode 100644 index 537b8d3..0000000 --- a/test/unit/common/middleware/swiftkerbauth/test_kerbauth.py +++ /dev/null @@ -1,478 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import errno -import unittest -from time import time -from mock import patch, Mock -from test.unit import FakeMemcache -from swift.common.swob import Request, Response -from gluster.swift.common.middleware.swiftkerbauth import kerbauth as auth - -EXT_AUTHENTICATION_URL = "127.0.0.1" -REDIRECT_STATUS = 303 # HTTPSeeOther - - -def my_filter_factory(global_conf, **local_conf): - if 'ext_authentication_url' not in global_conf: - global_conf['ext_authentication_url'] = EXT_AUTHENTICATION_URL - conf = global_conf.copy() - conf.update(local_conf) - - def auth_filter(app): - return auth.KerbAuth(app, conf) - return auth_filter - -# Monkey patching filter_factory to always pass ext_authentication_url -# as a parameter. Absence of ext_authentication_url raises a RuntimeError - - -def patch_filter_factory(): - auth.filter_factory = my_filter_factory - - -def unpatch_filter_factory(): - reload(auth) - - -class FakeApp(object): - - def __init__(self, status_headers_body_iter=None, acl=None, sync_key=None): - self.calls = 0 - self.status_headers_body_iter = status_headers_body_iter - if not self.status_headers_body_iter: - self.status_headers_body_iter = iter([('404 Not Found', {}, '')]) - self.acl = acl - self.sync_key = sync_key - - def __call__(self, env, start_response): - self.calls += 1 - self.request = Request.blank('', environ=env) - if self.acl: - self.request.acl = self.acl - if self.sync_key: - self.request.environ['swift_sync_key'] = self.sync_key - if 'swift.authorize' in env: - resp = env['swift.authorize'](self.request) - if resp: - return resp(env, start_response) - status, headers, body = self.status_headers_body_iter.next() - return Response(status=status, headers=headers, - body=body)(env, start_response) - - -class TestKerbAuth(unittest.TestCase): - - # Patch auth.filter_factory() - patch_filter_factory() - - def setUp(self): - self.test_auth = \ - auth.filter_factory({'auth_method': 'active'})(FakeApp()) - self.test_auth_passive = \ - auth.filter_factory({'auth_method': 'passive'})(FakeApp()) - - def _make_request(self, path, **kwargs): - req = Request.blank(path, **kwargs) - req.environ['swift.cache'] = FakeMemcache() - return req - - def test_no_ext_authentication_url(self): - app = FakeApp() - try: - # Use original auth.filter_factory and NOT monkey patched version - unpatch_filter_factory() - auth.filter_factory({})(app) - except RuntimeError as e: - # Restore monkey patched version - patch_filter_factory() - self.assertTrue(e.args[0].startswith("Missing filter parameter " - "ext_authentication_url")) - - def test_reseller_prefix_init(self): - app = FakeApp() - ath = auth.filter_factory({})(app) - self.assertEquals(ath.reseller_prefix, 'AUTH_') - ath = auth.filter_factory({'reseller_prefix': 'TEST'})(app) - self.assertEquals(ath.reseller_prefix, 'TEST_') - ath = auth.filter_factory({'reseller_prefix': 'TEST_'})(app) - self.assertEquals(ath.reseller_prefix, 'TEST_') - - def test_auth_prefix_init(self): - app = FakeApp() - ath = auth.filter_factory({})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory({'auth_prefix': ''})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory({'auth_prefix': '/'})(app) - self.assertEquals(ath.auth_prefix, '/auth/') - ath = auth.filter_factory({'auth_prefix': '/test/'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory({'auth_prefix': '/test'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory({'auth_prefix': 'test/'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - ath = auth.filter_factory({'auth_prefix': 'test'})(app) - self.assertEquals(ath.auth_prefix, '/test/') - - def test_top_level_redirect(self): - req = self._make_request('/') - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - self.assertEquals(req.environ['swift.authorize'], - self.test_auth.denied_response) - - def test_passive_top_level_deny(self): - req = self._make_request('/') - resp = req.get_response(self.test_auth_passive) - self.assertEquals(resp.status_int, 401) - self.assertEquals(req.environ['swift.authorize'], - self.test_auth_passive.denied_response) - - def test_passive_deny_invalid_token(self): - req = self._make_request('/v1/AUTH_account', - headers={'X-Auth-Token': 'AUTH_t'}) - resp = req.get_response(self.test_auth_passive) - self.assertEquals(resp.status_int, 401) - - def test_override_asked_for_and_allowed(self): - self.test_auth = \ - auth.filter_factory({'allow_overrides': 'true'})(FakeApp()) - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertTrue('swift.authorize' not in req.environ) - - def test_override_default_allowed(self): - req = self._make_request('/v1/AUTH_account', - environ={'swift.authorize_override': True}) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 404) - self.assertTrue('swift.authorize' not in req.environ) - - def test_options_call(self): - req = self._make_request('/v1/AUTH_cfa/c/o', - environ={'REQUEST_METHOD': 'OPTIONS'}) - resp = self.test_auth.authorize(req) - self.assertEquals(resp, None) - - def test_auth_deny_non_reseller_prefix_no_override(self): - fake_authorize = lambda x: Response(status='500 Fake') - req = self._make_request('/v1/BLAH_account', - headers={'X-Auth-Token': 'BLAH_t'}, - environ={'swift.authorize': fake_authorize} - ) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, 500) - self.assertEquals(req.environ['swift.authorize'], fake_authorize) - - def test_authorize_acl_group_access(self): - req = self._make_request('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = self._make_request('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act' - self.assertEquals(self.test_auth.authorize(req), None) - req = self._make_request('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - req.acl = 'act:usr' - self.assertEquals(self.test_auth.authorize(req), None) - req = self._make_request('/v1/AUTH_cfa') - req.remote_user = 'act:usr,act' - - def test_deny_cross_reseller(self): - # Tests that cross-reseller is denied, even if ACLs/group names match - req = self._make_request('/v1/OTHER_cfa') - req.remote_user = 'act:usr,act,AUTH_cfa' - req.acl = 'act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - - def test_authorize_acl_referer_after_user_groups(self): - req = self._make_request('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr' - req.acl = '.r:*,act:usr' - self.assertEquals(self.test_auth.authorize(req), None) - - def test_detect_reseller_request(self): - req = self._make_request('/v1/AUTH_admin', - headers={'X-Auth-Token': 'AUTH_t'}) - cache_key = 'AUTH_/token/AUTH_t' - cache_entry = (time() + 3600, '.reseller_admin') - req.environ['swift.cache'].set(cache_key, cache_entry) - req.get_response(self.test_auth) - self.assertTrue(req.environ.get('reseller_request', False)) - - def test_regular_is_not_owner(self): - orig_authorize = self.test_auth.authorize - owner_values = [] - - def mitm_authorize(req): - rv = orig_authorize(req) - owner_values.append(req.environ.get('swift_owner', False)) - return rv - - self.test_auth.authorize = mitm_authorize - - req = self._make_request( - '/v1/AUTH_cfa/c', - headers={'X-Auth-Token': 'AUTH_t'}) - req.remote_user = 'act:usr' - self.test_auth.authorize(req) - self.assertEquals(owner_values, [False]) - - def test_no_memcache(self): - env = {'swift.cache': None} - try: - self.test_auth.get_groups(env, None) - except Exception as e: - self.assertTrue(e.args[0].startswith("Memcache required")) - - def test_handle_request(self): - req = self._make_request('/auth/v1.0') - resp = self.test_auth.handle_request(req) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - - def test_handle_request_bad_request(self): - req = self._make_request('////') - resp = self.test_auth.handle_request(req) - self.assertEquals(resp.status_int, 404) - - def test_handle_request_no_handler(self): - req = self._make_request('/blah/blah/blah/blah') - resp = self.test_auth.handle_request(req) - self.assertEquals(resp.status_int, 400) - - def test_handle_get_token_bad_request(self): - req = self._make_request('/blah/blah') - resp = self.test_auth.handle_get_token(req) - self.assertEquals(resp.status_int, 400) - req = self._make_request('/////') - resp = self.test_auth.handle_get_token(req) - self.assertEquals(resp.status_int, 404) - - def test_passive_handle_get_token_no_user_or_key(self): - #No user and key - req = self._make_request('/auth/v1.0') - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - #User given but no key - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'test:user'}) - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 401) - - def test_passive_handle_get_token_account_in_req_path(self): - req = self._make_request('/v1/test/auth', - headers={'X-Auth-User': 'test:user', - 'X-Auth-Key': 'password'}) - _mock_run_kinit = Mock(return_value=0) - _mock_get_groups = Mock(return_value="user,auth_test") - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.run_kinit', _mock_run_kinit): - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.get_groups_from_username', - _mock_get_groups): - resp = self.test_auth_passive.handle_get_token(req) - _mock_run_kinit.assert_called_once_with('user', 'password') - self.assertEquals(_mock_get_groups.call_count, 2) - self.assertEquals(resp.status_int, 200) - self.assertTrue(resp.headers['X-Auth-Token'] is not None) - self.assertTrue(resp.headers['X-Storage-Token'] is not None) - self.assertTrue(resp.headers['X-Storage-Url'] is not None) - - def test_passive_handle_get_token_user_invalid_or_no__account(self): - #X-Auth-User not in acc:user format - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'user'}) - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 401) - req = self._make_request('/v1/test/auth', - headers={'X-Auth-User': 'user'}) - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 401) - # Account name mismatch - req = self._make_request('/v1/test/auth', - headers={'X-Auth-User': 'wrongacc:user'}) - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 401) - - def test_passive_handle_get_token_no_kinit(self): - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'test:user', - 'X-Auth-Key': 'password'}) - _mock_run_kinit = Mock(side_effect=OSError(errno.ENOENT, - os.strerror(errno.ENOENT))) - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.run_kinit', _mock_run_kinit): - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 500) - self.assertTrue("kinit command not found" in resp.body) - _mock_run_kinit.assert_called_once_with('user', 'password') - - def test_passive_handle_get_token_kinit_fail(self): - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'test:user', - 'X-Auth-Key': 'password'}) - _mock_run_kinit = Mock(return_value=1) - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.run_kinit', _mock_run_kinit): - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 401) - _mock_run_kinit.assert_called_once_with('user', 'password') - - def test_passive_handle_get_token_kinit_success_token_not_present(self): - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'test:user', - 'X-Auth-Key': 'password'}) - _mock_run_kinit = Mock(return_value=0) - _mock_get_groups = Mock(return_value="user,auth_test") - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.run_kinit', _mock_run_kinit): - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.get_groups_from_username', - _mock_get_groups): - resp = self.test_auth_passive.handle_get_token(req) - _mock_run_kinit.assert_called_once_with('user', 'password') - self.assertEquals(_mock_get_groups.call_count, 2) - self.assertEquals(resp.status_int, 200) - self.assertTrue(resp.headers['X-Auth-Token'] is not None) - self.assertTrue(resp.headers['X-Storage-Token'] is not None) - self.assertTrue(resp.headers['X-Storage-Url'] is not None) - - def test_passive_handle_get_token_kinit_realm_and_memcache(self): - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'test:user', - 'X-Auth-Key': 'password'}) - req.environ['swift.cache'] = None - _auth_passive = \ - auth.filter_factory({'auth_method': 'passive', - 'realm_name': 'EXAMPLE.COM'})(FakeApp()) - _mock_run_kinit = Mock(return_value=0) - _mock_get_groups = Mock(return_value="user,auth_test") - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.run_kinit', _mock_run_kinit): - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.get_groups_from_username', - _mock_get_groups): - try: - _auth_passive.handle_get_token(req) - except Exception as e: - self.assertTrue(e.args[0].startswith("Memcache " - "required")) - else: - self.fail("Expected Exception - Memcache required") - _mock_run_kinit.assert_called_once_with('user@EXAMPLE.COM', 'password') - _mock_get_groups.assert_called_once_with('user') - - def test_passive_handle_get_token_user_in_any__account(self): - req = self._make_request('/auth/v1.0', - headers={'X-Auth-User': 'test:user', - 'X-Auth-Key': 'password'}) - _mock_run_kinit = Mock(return_value=0) - _mock_get_groups = Mock(return_value="user,auth_blah") - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.run_kinit', _mock_run_kinit): - with patch('gluster.swift.common.middleware.swiftkerbauth.kerbauth.get_groups_from_username', - _mock_get_groups): - resp = self.test_auth_passive.handle_get_token(req) - self.assertEquals(resp.status_int, 401) - _mock_run_kinit.assert_called_once_with('user', 'password') - _mock_get_groups.assert_called_once_with('user') - - def test_handle(self): - req = self._make_request('/auth/v1.0') - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - - def test_authorize_invalid_req(self): - req = self._make_request('/') - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 404) - - def test_authorize_set_swift_owner(self): - req = self._make_request('/v1/AUTH_test/c1/o1') - req.remote_user = 'test,auth_reseller_admin' - resp = self.test_auth.authorize(req) - self.assertEquals(req.environ['swift_owner'], True) - self.assertTrue(resp is None) - req = self._make_request('/v1/AUTH_test/c1/o1') - req.remote_user = 'test,auth_test' - resp = self.test_auth.authorize(req) - self.assertEquals(req.environ['swift_owner'], True) - self.assertTrue(resp is None) - - def test_authorize_swift_sync_key(self): - req = self._make_request( - '/v1/AUTH_cfa/c/o', - environ={'swift_sync_key': 'secret'}, - headers={'x-container-sync-key': 'secret', - 'x-timestamp': '123.456'}) - resp = self.test_auth.authorize(req) - self.assertTrue(resp is None) - - def test_authorize_acl_referrer_access(self): - req = self._make_request('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = self._make_request('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:*,.rlistings' - self.assertEquals(self.test_auth.authorize(req), None) - req = self._make_request('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:*' # No listings allowed - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = self._make_request('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.acl = '.r:.example.com,.rlistings' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, 403) - req = self._make_request('/v1/AUTH_cfa/c') - req.remote_user = 'act:usr,act' - req.referer = 'http://www.example.com/index.html' - req.acl = '.r:.example.com,.rlistings' - self.assertEquals(self.test_auth.authorize(req), None) - req = self._make_request('/v1/AUTH_cfa/c') - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - req = self._make_request('/v1/AUTH_cfa/c') - req.acl = '.r:*,.rlistings' - self.assertEquals(self.test_auth.authorize(req), None) - req = self._make_request('/v1/AUTH_cfa/c') - req.acl = '.r:*' # No listings allowed - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - req = self._make_request('/v1/AUTH_cfa/c') - req.acl = '.r:.example.com,.rlistings' - resp = self.test_auth.authorize(req) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - req = self._make_request('/v1/AUTH_cfa/c') - req.referer = 'http://www.example.com/index.html' - req.acl = '.r:.example.com,.rlistings' - self.assertEquals(self.test_auth.authorize(req), None) - - def test_handle_x_storage_token(self): - req = self._make_request( - '/auth/v1.0', - headers={'x-storage-token': 'blahblah', }) - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - - def test_invalid_token(self): - req = self._make_request('/k1/test') - req.environ['HTTP_X_AUTH_TOKEN'] = 'AUTH_blahblahblah' - resp = req.get_response(self.test_auth) - self.assertEquals(resp.status_int, REDIRECT_STATUS) - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/common/middleware/swiftkerbauth/test_kerbauth_utils.py b/test/unit/common/middleware/swiftkerbauth/test_kerbauth_utils.py deleted file mode 100644 index 2a4e90b..0000000 --- a/test/unit/common/middleware/swiftkerbauth/test_kerbauth_utils.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -import re -from time import time -from test.unit import FakeMemcache -from gluster.swift.common.middleware.swiftkerbauth import kerbauth_utils as ku - - -class TestKerbUtils(unittest.TestCase): - - def test_get_remote_user(self): - env = {'REMOTE_USER': "auth_admin@EXAMPLE.COM"} - result = ku.get_remote_user(env) - self.assertEqual(result, "auth_admin") - - def test_get_remote_user_err(self): - env = {'REMOTE_USER': "auth_admin"} - try: - ku.get_remote_user(env) - except RuntimeError as err: - self.assertTrue(err.args[0].startswith("Malformed REMOTE_USER")) - else: - self.fail("Expected RuntimeError") - - def test_get_auth_data(self): - mc = FakeMemcache() - expiry = time() + 100 - ku.set_auth_data(mc, "root", "AUTH_tk", expiry, "root,admin") - (token, expires, groups) = ku.get_auth_data(mc, "root") - self.assertEqual(("AUTH_tk", expiry, "root,admin"), - (token, expires, groups)) - - def test_get_auth_data_err(self): - mc = FakeMemcache() - (token, expires, groups) = ku.get_auth_data(mc, "root") - self.assertEqual((token, expires, groups), (None, None, None)) - - expiry = time() - 1 - ku.set_auth_data(mc, "root", "AUTH_tk", expiry, "root,admin") - (token, expires, groups) = ku.get_auth_data(mc, "root") - self.assertEqual((token, expires, groups), (None, None, None)) - - def test_set_auth_data(self): - mc = FakeMemcache() - expiry = time() + 100 - ku.set_auth_data(mc, "root", "AUTH_tk", expiry, "root,admin") - - def test_generate_token(self): - token = ku.generate_token() - matches = re.match('AUTH_tk[a-f0-9]{32}', token) - self.assertTrue(matches is not None) - - def test_get_groups_from_username(self): - groups = ku.get_groups_from_username("root") - self.assertTrue("root" in groups) - - def test_get_groups_from_username_err(self): - try: - ku.get_groups_from_username("Zroot") - except RuntimeError as err: - self.assertTrue(err.args[0].startswith("Failure running id -G")) - else: - self.fail("Expected RuntimeError") diff --git a/test/unit/common/test_constraints.py b/test/unit/common/test_constraints.py index 2e4d55a..952e1e6 100644 --- a/test/unit/common/test_constraints.py +++ b/test/unit/common/test_constraints.py @@ -14,142 +14,35 @@ # limitations under the License. import unittest -import swift.common.constraints -from nose import SkipTest from mock import Mock, patch from gluster.swift.common import constraints as cnt -def mock_glusterfs_mount(*args, **kwargs): - return True - - -def mock_constraints_conf_int(*args, **kwargs): - return 1000 - - def mock_check_object_creation(*args, **kwargs): return None -def mock_check_mount(*args, **kwargs): - return True - - -def mock_check_mount_err(*args, **kwargs): - return False - - class TestConstraints(unittest.TestCase): """ Tests for common.constraints """ - def tearDown(self): - cnt.set_object_name_component_length() - - def test_set_object_name_component_length(self): - len = cnt.get_object_name_component_length() - cnt.set_object_name_component_length(len+1) - self.assertEqual(len, cnt.get_object_name_component_length()-1) - - if hasattr(swift.common.constraints, 'constraints_conf_int'): - len = swift.common.constraints.constraints_conf_int( - 'max_object_name_component_length', 255) - cnt.set_object_name_component_length() - self.assertEqual(len, cnt.get_object_name_component_length()) - - with patch('swift.common.constraints.constraints_conf_int', - mock_constraints_conf_int): - cnt.set_object_name_component_length() - self.assertEqual(cnt.get_object_name_component_length(), 1000) - def test_validate_obj_name_component(self): - max_obj_len = cnt.get_object_name_component_length() - self.assertFalse(cnt.validate_obj_name_component('tests'*(max_obj_len/5))) - cnt.set_object_name_component_length(300) - self.assertFalse(cnt.validate_obj_name_component('tests'*60)) + max_obj_len = cnt.SOF_MAX_OBJECT_NAME_LENGTH + self.assertFalse( + cnt.validate_obj_name_component('tests' * (max_obj_len / 5))) + self.assertEqual(cnt.validate_obj_name_component( + 'tests' * 60), 'too long (300)') def test_validate_obj_name_component_err(self): - max_obj_len = cnt.get_object_name_component_length() - self.assertTrue(cnt.validate_obj_name_component('tests'*(max_obj_len/5+1))) + max_obj_len = cnt.SOF_MAX_OBJECT_NAME_LENGTH + self.assertTrue(cnt.validate_obj_name_component( + 'tests' * (max_obj_len / 5 + 1))) self.assertTrue(cnt.validate_obj_name_component('.')) self.assertTrue(cnt.validate_obj_name_component('..')) self.assertTrue(cnt.validate_obj_name_component('')) - def test_validate_headers(self): - req = Mock() - req.headers = [] - self.assertEqual(cnt.validate_headers(req), '') - req.headers = ['x-some-header'] - self.assertEqual(cnt.validate_headers(req), '') - #TODO: Although we now support x-delete-at and x-delete-after, - #retained this test case as we may add some other header to - #unsupported list in future - raise SkipTest - req.headers = ['x-delete-at', 'x-some-header'] - self.assertNotEqual(cnt.validate_headers(req), '') - req.headers = ['x-delete-after', 'x-some-header'] - self.assertNotEqual(cnt.validate_headers(req), '') - req.headers = ['x-delete-at', 'x-delete-after', 'x-some-header'] - self.assertNotEqual(cnt.validate_headers(req), '') - - def test_validate_headers_ignoring_config_set(self): - with patch('gluster.swift.common.constraints.' - 'Glusterfs._ignore_unsupported_headers', True): - req = Mock() - req.headers = [] - self.assertEqual(cnt.validate_headers(req), '') - req.headers = ['x-some-header'] - self.assertEqual(cnt.validate_headers(req), '') - #TODO: Although we now support x-delete-at and x-delete-after, - #retained this test case as we may add some other header to - #unsupported list in future - raise SkipTest - req.headers = ['x-delete-at', 'x-some-header'] - self.assertEqual(cnt.validate_headers(req), '') - req.headers = ['x-delete-after', 'x-some-header'] - self.assertEqual(cnt.validate_headers(req), '') - req.headers = ['x-delete-at', 'x-delete-after', 'x-some-header'] - self.assertEqual(cnt.validate_headers(req), '') - - def test_gluster_check_metadata(self): - mock_check_metadata = Mock() - with patch('gluster.swift.common.constraints.__check_metadata', - mock_check_metadata): - req = Mock() - req.headers = [] - cnt.gluster_check_metadata(req, 'object') - self.assertTrue(1, mock_check_metadata.call_count) - cnt.gluster_check_metadata(req, 'object', POST=False) - self.assertTrue(1, mock_check_metadata.call_count) - req.headers = ['x-some-header'] - self.assertEqual(cnt.gluster_check_metadata(req, 'object', POST=False), None) - #TODO: Although we now support x-delete-at and x-delete-after, - #retained this test case as we may add some other header to - #unsupported list in future - raise SkipTest - req.headers = ['x-delete-at', 'x-some-header'] - self.assertNotEqual(cnt.gluster_check_metadata(req, 'object', POST=False), None) - req.headers = ['x-delete-after', 'x-some-header'] - self.assertNotEqual(cnt.gluster_check_metadata(req, 'object', POST=False), None) - req.headers = ['x-delete-at', 'x-delete-after', 'x-some-header'] - self.assertNotEqual(cnt.gluster_check_metadata(req, 'object', POST=False), None) - - def test_gluster_check_object_creation(self): - with patch('gluster.swift.common.constraints.__check_object_creation', + def test_sof_check_object_creation(self): + with patch('gluster.swift.common.constraints.swift_check_object_creation', mock_check_object_creation): req = Mock() req.headers = [] - self.assertFalse(cnt.gluster_check_object_creation(req, 'dir/z')) - - def test_gluster_check_object_creation_err(self): - with patch('gluster.swift.common.constraints.__check_object_creation', - mock_check_object_creation): - req = Mock() - req.headers = [] - self.assertTrue(cnt.gluster_check_object_creation(req, 'dir/.')) - #TODO: Although we now support x-delete-at and x-delete-after, - #retained this test case as we may add some other header to - #unsupported list in future - raise SkipTest - req.headers = ['x-delete-at'] - self.assertTrue(cnt.gluster_check_object_creation(req, 'dir/z')) + self.assertFalse(cnt.sof_check_object_creation(req, 'dir/z')) diff --git a/test/unit/common/test_diskdir.py b/test/unit/common/test_diskdir.py deleted file mode 100644 index f32c3ad..0000000 --- a/test/unit/common/test_diskdir.py +++ /dev/null @@ -1,1371 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" Tests for gluster.swift.common.DiskDir """ - -import os -import errno -import tempfile -import cPickle as pickle -import unittest -import shutil -import tarfile -import hashlib -from time import time -from swift.common.utils import normalize_timestamp -from gluster.swift.common import utils -import gluster.swift.common.Glusterfs -from test_utils import _initxattr, _destroyxattr, _setxattr, _getxattr -from test.unit import FakeLogger - -def setup(): - global _saved_RUN_DIR, _saved_do_getsize - _saved_do_getsize = gluster.swift.common.Glusterfs._do_getsize - gluster.swift.common.Glusterfs._do_getsize = True - _saved_RUN_DIR = gluster.swift.common.Glusterfs.RUN_DIR - gluster.swift.common.Glusterfs.RUN_DIR = '/tmp/gluster_unit_tests/run' - try: - os.makedirs(gluster.swift.common.Glusterfs.RUN_DIR) - except OSError as e: - if e.errno != errno.EEXIST: - raise - - -import gluster.swift.common.DiskDir as dd - - -def teardown(): - dd._db_file = "" - shutil.rmtree(gluster.swift.common.Glusterfs.RUN_DIR) - gluster.swift.common.Glusterfs.RUN_DIR = _saved_RUN_DIR - gluster.swift.common.Glusterfs._do_getsize = _saved_do_getsize - - -def timestamp_in_range(ts, base): - low = normalize_timestamp(base - 5) - high = normalize_timestamp(base + 5) - assert low <= ts, "timestamp %s is less than %s" % (ts, low) - assert high >= ts, "timestamp %s is greater than %s" % (ts, high) - - -class TestDiskDirModuleFunctions(unittest.TestCase): - """ Tests for gluster.swift.common.DiskDir module functions """ - - def test__read_metadata(self): - def fake_read_metadata(p): - return { 'a': 1, 'b': ('c', 5) } - orig_rm = dd.read_metadata - dd.read_metadata = fake_read_metadata - try: - md = dd._read_metadata("/tmp/foo") - finally: - dd.read_metadata = orig_rm - assert md['a'] == (1, 0) - assert md['b'] == ('c', 5) - - def test_filter_end_marker(self): - in_objs, end_marker = [], '' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == [] - - in_objs, end_marker = [], 'abc' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == [] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], '' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == [] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'ABC' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == [] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'efg' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == ['abc_123', 'abc_456', 'abc_789', 'def_101'] - - # Input not sorted, so we should only expect one name - in_objs, end_marker = ['abc_123', 'def_101', 'abc_456', 'abc_789'], 'abc_789' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == ['abc_123',] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc_789' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == ['abc_123', 'abc_456'] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc_5' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == ['abc_123', 'abc_456'] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc_123' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == [] - - in_objs, end_marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'def_101' - out_objs = dd.filter_end_marker(in_objs, end_marker) - assert list(out_objs) == ['abc_123', 'abc_456', 'abc_789'] - - def test_filter_marker(self): - in_objs, marker = [], '' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == [] - - in_objs, marker = [], 'abc' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == [] - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], '' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == in_objs - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'ABC' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == in_objs - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'efg' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == [] - - # Input not sorted, so we should expect the names as listed - in_objs, marker = ['abc_123', 'def_101', 'abc_456', 'abc_789'], 'abc_456' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == ['def_101', 'abc_789'] - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc_456' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == ['abc_789', 'def_101'] - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc_5' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == ['abc_789', 'def_101'] - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc_123' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == ['abc_456', 'abc_789', 'def_101'] - - in_objs, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'def_101' - out_objs = dd.filter_marker(in_objs, marker) - assert list(out_objs) == [] - - def test_filter_prefix(self): - in_objs, prefix = [], '' - out_objs = dd.filter_prefix(in_objs, prefix) - assert list(out_objs) == [] - - in_objs, prefix = [], 'abc' - out_objs = dd.filter_prefix(in_objs, prefix) - assert list(out_objs) == [] - - in_objs, prefix = ['abc_123', 'abc_456', 'abc_789', 'def_101'], '' - out_objs = dd.filter_prefix(in_objs, prefix) - assert list(out_objs) == in_objs - - in_objs, prefix = ['abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc' - out_objs = dd.filter_prefix(in_objs, prefix) - assert list(out_objs) == ['abc_123', 'abc_456', 'abc_789'] - - in_objs, prefix = ['ABC_123', 'ABC_456', 'abc_123', 'abc_456', 'abc_789', 'def_101'], 'abc' - out_objs = dd.filter_prefix(in_objs, prefix) - assert list(out_objs) == ['abc_123', 'abc_456', 'abc_789'] - - in_objs, prefix = ['abc_123', 'def_101', 'abc_456', 'abc_789'], 'abc' - out_objs = dd.filter_prefix(in_objs, prefix) - assert list(out_objs) == ['abc_123',] - - def test_filter_delimiter(self): - in_objs, delimiter, prefix, marker = [], None, '', '' - try: - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - except AssertionError: - pass - except Exception: - self.fail("Failed to raise assertion") - - in_objs, delimiter, prefix, marker = [], '', '', '' - try: - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - except AssertionError: - pass - except Exception: - self.fail("Failed to raise assertion") - - in_objs, delimiter, prefix, marker = [], str(255), '', '' - try: - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - except AssertionError: - pass - except Exception: - self.fail("Failed to raise assertion") - - in_objs, delimiter, prefix, marker = [], '_', '', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - assert list(out_objs) == [] - - in_objs, delimiter, prefix, marker = ['abc_'], '_', '', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - assert list(out_objs) == in_objs - - in_objs, delimiter, prefix, marker = ['abc_123', 'abc_456'], '_', '', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - assert list(out_objs) == ['abc_'] - - in_objs, delimiter, prefix, marker = ['abc_123', 'abc_456', 'def_123', 'def_456'], '_', '', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - assert list(out_objs) == ['abc_', 'def_'] - - in_objs, delimiter, prefix, marker = ['abc_123', 'abc_456', 'abc_789', 'def_101'], '_', 'abc_', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - l = list(out_objs) - assert l == ['abc_123', 'abc_456', 'abc_789'], repr(l) - - in_objs, delimiter, prefix, marker = ['abc_123_a', 'abc_456', 'abc_789_', 'def_101'], '_', 'abc_', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker) - l = list(out_objs) - assert l == ['abc_123_', 'abc_456', 'abc_789_'], repr(l) - - in_objs, delimiter, prefix, marker, path = ['abc_123_a', 'abc_456', 'abc_789_', 'def_101'], '_', 'abc_', '', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker, path) - l = list(out_objs) - # FIXME: This appears to be a bug due to this upstream swift reference - # implementation of list_objects_iter, where the presence of a path - # forces a code path that does not add the match on a delimiter - assert l == ['abc_456', 'abc_789_'], repr(l) - - in_objs, delimiter, prefix, marker, path = ['abc/123', 'abc/456', 'def/123', 'def/456'], '/', 'abc/', '', '' - out_objs = dd.filter_delimiter(in_objs, delimiter, prefix, marker, path) - l = list(out_objs) - assert l == ['abc/123', 'abc/456'], repr(l) - - -class TestDiskCommon(unittest.TestCase): - """ Tests for gluster.swift.common.DiskDir.DiskCommon """ - - def setUp(self): - _initxattr() - self.fake_logger = FakeLogger() - self.td = tempfile.mkdtemp() - self.fake_drives = [] - self.fake_accounts = [] - for i in range(0,3): - self.fake_drives.append("drv%d" % i) - os.makedirs(os.path.join(self.td, self.fake_drives[i])) - self.fake_accounts.append(self.fake_drives[i]) - - def tearDown(self): - _destroyxattr() - shutil.rmtree(self.td) - - def test_constructor(self): - dc = dd.DiskCommon(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - assert dc.metadata == {} - assert dc.db_file == dd._db_file - assert dc.pending_timeout == 10 - assert dc.stale_reads_ok is False - assert dc.root == self.td - assert dc.logger == self.fake_logger - assert dc.account == self.fake_accounts[0] - assert dc.datadir == os.path.join(self.td, self.fake_drives[0]) - assert dc._dir_exists is None - - def test__dir_exists_read_metadata_exists(self): - datadir = os.path.join(self.td, self.fake_drives[0]) - fake_md = { "fake": (True,0) } - fake_md_p = pickle.dumps(fake_md, utils.PICKLE_PROTOCOL) - _setxattr(datadir, utils.METADATA_KEY, fake_md_p) - dc = dd.DiskCommon(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - dc._dir_exists_read_metadata() - assert dc.metadata == fake_md, repr(dc.metadata) - assert dc.db_file == dd._db_file - assert dc.pending_timeout == 10 - assert dc.stale_reads_ok is False - assert dc.root == self.td - assert dc.logger == self.fake_logger - assert dc.account == self.fake_accounts[0] - assert dc.datadir == datadir - assert dc._dir_exists is True - - def test__dir_exists_read_metadata_does_not_exist(self): - dc = dd.DiskCommon(self.td, "dne0", "dne0", self.fake_logger) - dc._dir_exists_read_metadata() - assert dc.metadata == {} - assert dc.db_file == dd._db_file - assert dc.pending_timeout == 10 - assert dc.stale_reads_ok is False - assert dc.root == self.td - assert dc.logger == self.fake_logger - assert dc.account == "dne0" - assert dc.datadir == os.path.join(self.td, "dne0") - assert dc._dir_exists is False - - def test_is_deleted(self): - dc = dd.DiskCommon(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - assert dc.is_deleted() == False - - def test_update_metadata(self): - dc = dd.DiskCommon(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - utils.create_container_metadata(dc.datadir) - dc.metadata = dd._read_metadata(dc.datadir) - md_copy = dc.metadata.copy() - - def _mock_write_metadata(path, md): - self.fail("write_metadata should not have been called") - - orig_wm = dd.write_metadata - dd.write_metadata = _mock_write_metadata - try: - dc.update_metadata({}) - assert dc.metadata == md_copy - dc.update_metadata(md_copy) - assert dc.metadata == md_copy - finally: - dd.write_metadata = orig_wm - - dc.update_metadata({'X-Container-Meta-foo': '42'}) - assert 'X-Container-Meta-foo' in dc.metadata - assert dc.metadata['X-Container-Meta-foo'] == '42' - md = pickle.loads(_getxattr(dc.datadir, utils.METADATA_KEY)) - assert dc.metadata == md, "%r != %r" % (dc.metadata, md) - del dc.metadata['X-Container-Meta-foo'] - assert dc.metadata == md_copy - - def test_empty_dir_is_not_empty(self): - dc = dd.DiskCommon(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - os.makedirs(os.path.join(self.td, self.fake_drives[0], 'aaabbbccc')) - self.assertFalse(dc.empty()) - - def test_empty_dir_is_empty(self): - dc = dd.DiskCommon(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - self.assertTrue(dc.empty()) - - def test_empty_dir_does_not_exist(self): - dc = dd.DiskCommon(self.td, 'non_existent_drive', - self.fake_accounts[0], self.fake_logger) - self.assertTrue(dc.empty()) - - -class TestContainerBroker(unittest.TestCase): - """ - Tests for DiskDir.DiskDir class (duck-typed - swift.common.db.ContainerBroker). - """ - - def __init__(self, *args, **kwargs): - super(TestContainerBroker, self).__init__(*args, **kwargs) - self.initial_ts = normalize_timestamp('1') - - def setUp(self): - _initxattr() - self.path = tempfile.mkdtemp() - self.drive = 'drv' - self.container = None - - def tearDown(self): - self.container = None - _destroyxattr() - shutil.rmtree(self.path) - - def _get_broker(self, account=None, container=None): - assert account is not None - assert container is not None - self.container = os.path.join(self.path, self.drive, container) - return dd.DiskDir(self.path, self.drive, account=account, - container=container, logger=FakeLogger()) - - def _create_file(self, p): - fullname = os.path.join(self.container, p) - dirs = os.path.dirname(fullname) - try: - os.makedirs(dirs) - except OSError as e: - if e.errno != errno.EEXIST: - raise - with open(fullname, 'w') as fp: - fp.write("file path: %s\n" % fullname) - return fullname - - def test_creation(self): - # Test swift.common.db.ContainerBroker.__init__ - broker = self._get_broker(account='a', container='c') - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.container)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - - def test_creation_existing(self): - # Test swift.common.db.ContainerBroker.__init__ - os.makedirs(os.path.join(self.path, self.drive, 'c')) - broker = self._get_broker(account='a', container='c') - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.container)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - - def test_creation_existing_bad_metadata(self): - # Test swift.common.db.ContainerBroker.__init__ - container = os.path.join(self.path, self.drive, 'c') - os.makedirs(container) - utils.write_metadata(container, dict(a=1, b=2)) - broker = self._get_broker(account='a', container='c') - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.container)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - - def test_empty(self): - # Test swift.common.db.ContainerBroker.empty - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - self.assert_(broker.empty()) - obj = self._create_file('o.txt') - self.assert_(not broker.empty()) - os.unlink(obj) - self.assert_(broker.empty()) - - def test_put_object(self): - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - self.assert_(broker.empty()) - broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain', - 'd41d8cd98f00b204e9800998ecf8427e') - # put_object() should be a NOOP - self.assert_(broker.empty()) - - def test_delete_object(self): - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - self.assert_(broker.empty()) - obj = self._create_file('o.txt') - self.assert_(not broker.empty()) - broker.delete_object('o', normalize_timestamp(time())) - # delete_object() should be a NOOP - self.assert_(not broker.empty()) - os.unlink(obj) - self.assert_(broker.empty()) - - def test_get_info(self): - # Test swift.common.db.ContainerBroker.get_info - __save_config = \ - gluster.swift.common.Glusterfs._container_update_object_count - gluster.swift.common.Glusterfs._container_update_object_count = True - broker = self._get_broker(account='test1', - container='test2') - broker.initialize(self.initial_ts) - - info = broker.get_info() - self.assertEquals(info['account'], 'test1') - self.assertEquals(info['container'], 'test2') - - info = broker.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) - - obj1 = os.path.join(self.container, 'o1') - with open(obj1, 'w') as fp: - fp.write("%s\n" % ('x' * 122)) - info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 123) - - obj2 = os.path.join(self.container, 'o2') - with open(obj2, 'w') as fp: - fp.write("%s\n" % ('x' * 122)) - info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 246) - - with open(obj2, 'w') as fp: - fp.write("%s\n" % ('x' * 999)) - info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 1123) - - os.unlink(obj1) - info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 1000) - - os.unlink(obj2) - info = broker.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) - - info = broker.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) - gluster.swift.common.Glusterfs._container_update_object_count = \ - __save_config - - def test_get_info_nonexistent_container(self): - broker = dd.DiskDir(self.path, self.drive, account='no_account', - container='no_container', logger=FakeLogger()) - info = broker.get_info() - - # - # Because broker._dir_exists is False and _update_object_count() - # has not been called yet, the values returned for - # object_count, bytes_used, and put_timestamp are '0' as - # a string. OpenStack Swift handles this situation by - # passing the value to float(). - # - self.assertEquals(info['account'], 'no_account') - self.assertEquals(info['container'], 'no_container') - self.assertEquals(info['object_count'], '0') - self.assertEquals(info['bytes_used'], '0') - self.assertEquals(info['put_timestamp'], '0') - - def test_set_x_syncs(self): - broker = self._get_broker(account='test1', - container='test2') - broker.initialize(self.initial_ts) - - info = broker.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) - - broker.set_x_container_sync_points(1, 2) - info = broker.get_info() - self.assertEquals(info['x_container_sync_point1'], 1) - self.assertEquals(info['x_container_sync_point2'], 2) - - def test_list_objects_iter(self): - # Test swift.common.db.ContainerBroker.list_objects_iter - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - - for obj1 in xrange(4): - for obj2 in xrange(125): - self._create_file('%d.d/%04d' % (obj1, obj2)) - for obj in xrange(125): - self._create_file('2.d/0051.d/%04d' % obj) - for obj in xrange(125): - self._create_file('3.d/%04d.d/0049' % obj) - - listing = broker.list_objects_iter(100, '', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0.d/0000') - self.assertEquals(listing[-1][0], '0.d/0099') - - listing = broker.list_objects_iter(100, '', '0.d/0050', None, '') - self.assertEquals(len(listing), 50) - self.assertEquals(listing[0][0], '0.d/0000') - self.assertEquals(listing[-1][0], '0.d/0049') - - listing = broker.list_objects_iter(100, '0.d/0099', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0.d/0100') - self.assertEquals(listing[-1][0], '1.d/0074') - - listing = broker.list_objects_iter(55, '1.d/0074', None, None, '') - self.assertEquals(len(listing), 55) - self.assertEquals(listing[0][0], '1.d/0075') - self.assertEquals(listing[-1][0], '2.d/0004') - - listing = broker.list_objects_iter(10, '', None, '0.d/01', '') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0.d/0100') - self.assertEquals(listing[-1][0], '0.d/0109') - - listing = broker.list_objects_iter(10, '', None, '0.d/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0.d/0000') - self.assertEquals(listing[-1][0], '0.d/0009') - - listing = broker.list_objects_iter(10, '', None, None, '', '0.d') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0.d/0000') - self.assertEquals(listing[-1][0], '0.d/0009') - - listing = broker.list_objects_iter(10, '', None, '', '/') - self.assertEquals(len(listing), 0) - - listing = broker.list_objects_iter(10, '2', None, None, '/') - self.assertEquals(len(listing), 0) - - listing = broker.list_objects_iter(10, '2.d/', None, None, '/') - self.assertEquals(len(listing), 0) - - listing = broker.list_objects_iter(10, '2.d/0050', None, '2.d/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '2.d/0051') - self.assertEquals(listing[1][0], '2.d/0052') - self.assertEquals(listing[-1][0], '2.d/0060') - - listing = broker.list_objects_iter(10, '3.d/0045', None, '3.d/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3.d/0046', '3.d/0047', '3.d/0048', '3.d/0049', - '3.d/0050', '3.d/0051', '3.d/0052', '3.d/0053', - '3.d/0054', '3.d/0055']) - - # FIXME - #broker.put_object('3/0049/', normalize_timestamp(time()), 0, - # 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') - #listing = broker.list_objects_iter(10, '3/0048', None, None, None) - #self.assertEquals(len(listing), 10) - #self.assertEquals([row[0] for row in listing], - # ['3.d/0048.d/0049', '3.d/0049', '3.d/0049.d/', - # '3.d/0049.d/0049', '3.d/0050', '3.d/0050.d/0049', '3.d/0051', '3.d/0051.d/0049', - # '3.d/0052', '3.d/0052.d/0049']) - - listing = broker.list_objects_iter(10, '3.d/0048', None, '3.d/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3.d/0049', '3.d/0050', '3.d/0051', '3.d/0052', '3.d/0053', - '3.d/0054', '3.d/0055', '3.d/0056', '3.d/0057', '3.d/0058']) - - listing = broker.list_objects_iter(10, None, None, '3.d/0049.d/', '/') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], - ['3.d/0049.d/0049']) - - listing = broker.list_objects_iter(10, None, None, None, None, - '3.d/0049.d') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['3.d/0049.d/0049']) - - listing = broker.list_objects_iter(2, None, None, '3.d/', '/') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['3.d/0000', '3.d/0001']) - - listing = broker.list_objects_iter(2, None, None, None, None, '3.d') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['3.d/0000', '3.d/0001']) - - def test_list_objects_iter_non_slash(self): - # Test swift.common.db.ContainerBroker.list_objects_iter using a - # delimiter that is not a slash - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - - for obj1 in xrange(4): - for obj2 in xrange(125): - self._create_file('%d:%04d' % (obj1, obj2)) - for obj in xrange(125): - self._create_file('2:0051:%04d' % obj) - for obj in xrange(125): - self._create_file('3:%04d:0049' % obj) - - listing = broker.list_objects_iter(100, '', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0:0000') - self.assertEquals(listing[-1][0], '0:0099') - - listing = broker.list_objects_iter(100, '', '0:0050', None, '') - self.assertEquals(len(listing), 50) - self.assertEquals(listing[0][0], '0:0000') - self.assertEquals(listing[-1][0], '0:0049') - - listing = broker.list_objects_iter(100, '0:0099', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0:0100') - self.assertEquals(listing[-1][0], '1:0074') - - listing = broker.list_objects_iter(55, '1:0074', None, None, '') - self.assertEquals(len(listing), 55) - self.assertEquals(listing[0][0], '1:0075') - self.assertEquals(listing[-1][0], '2:0004') - - listing = broker.list_objects_iter(10, '', None, '0:01', '') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0:0100') - self.assertEquals(listing[-1][0], '0:0109') - - listing = broker.list_objects_iter(10, '', None, '0:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0:0000') - self.assertEquals(listing[-1][0], '0:0009') - - # Same as above, but using the path argument, so nothing should be - # returned since path uses a '/' as a delimiter. - listing = broker.list_objects_iter(10, '', None, None, '', '0') - self.assertEquals(len(listing), 0) - - listing = broker.list_objects_iter(10, '', None, '', ':') - self.assertEquals(len(listing), 4) - self.assertEquals([row[0] for row in listing], - ['0:', '1:', '2:', '3:']) - - listing = broker.list_objects_iter(10, '2', None, None, ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['2:', '3:']) - - listing = broker.list_objects_iter(10, '2:', None, None, ':') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['3:']) - - listing = broker.list_objects_iter(10, '2:0050', None, '2:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '2:0051') - self.assertEquals(listing[1][0], '2:0051:') - self.assertEquals(listing[2][0], '2:0052') - self.assertEquals(listing[-1][0], '2:0059') - - listing = broker.list_objects_iter(10, '3:0045', None, '3:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3:0045:', '3:0046', '3:0046:', '3:0047', - '3:0047:', '3:0048', '3:0048:', '3:0049', - '3:0049:', '3:0050']) - - self._create_file('3:0049:') - listing = broker.list_objects_iter(10, '3:0048', None, None, None) - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3:0048:0049', '3:0049', '3:0049:', - '3:0049:0049', '3:0050', '3:0050:0049', '3:0051', '3:0051:0049', - '3:0052', '3:0052:0049']) - - listing = broker.list_objects_iter(10, '3:0048', None, '3:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3:0048:', '3:0049', '3:0049:', '3:0050', - '3:0050:', '3:0051', '3:0051:', '3:0052', '3:0052:', '3:0053']) - - listing = broker.list_objects_iter(10, None, None, '3:0049:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], - ['3:0049:', '3:0049:0049']) - - # Same as above, but using the path argument, so nothing should be - # returned since path uses a '/' as a delimiter. - listing = broker.list_objects_iter(10, None, None, None, None, - '3:0049') - self.assertEquals(len(listing), 0) - - listing = broker.list_objects_iter(2, None, None, '3:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['3:0000', '3:0000:']) - - listing = broker.list_objects_iter(2, None, None, None, None, '3') - self.assertEquals(len(listing), 0) - - def test_list_objects_iter_prefix_delim(self): - # Test swift.common.db.ContainerBroker.list_objects_iter - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - - os.mkdir(os.path.join(self.container, 'pets')) - os.mkdir(os.path.join(self.container, 'pets', 'dogs')) - obj1 = os.path.join(self.container, 'pets', 'dogs', '1') - with open(obj1, 'w') as fp: - fp.write("one\n") - obj2 = os.path.join(self.container, 'pets', 'dogs', '2') - with open(obj2, 'w') as fp: - fp.write("two\n") - os.mkdir(os.path.join(self.container, 'pets', 'fish')) - obja = os.path.join(self.container, 'pets', 'fish', 'a') - with open(obja, 'w') as fp: - fp.write("A\n") - objb = os.path.join(self.container, 'pets', 'fish', 'b') - with open(objb, 'w') as fp: - fp.write("B\n") - objf = os.path.join(self.container, 'pets', 'fish_info.txt') - with open(objf, 'w') as fp: - fp.write("one fish\n") - objs = os.path.join(self.container, 'snakes') - with open(objs, 'w') as fp: - fp.write("slither\n") - - listing = broker.list_objects_iter(100, None, None, 'pets/f', '/') - self.assertEquals([row[0] for row in listing], - ['pets/fish_info.txt']) - listing = broker.list_objects_iter(100, None, None, 'pets/fish', '/') - self.assertEquals([row[0] for row in listing], - ['pets/fish_info.txt']) - listing = broker.list_objects_iter(100, None, None, 'pets/fish/', '/') - self.assertEquals([row[0] for row in listing], - ['pets/fish/a', 'pets/fish/b']) - - def test_double_check_trailing_delimiter(self): - # Test swift.common.db.ContainerBroker.list_objects_iter for a - # container that has an odd file with a trailing delimiter - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - - self._create_file('a') - self._create_file('a.d/a') - self._create_file('a.d/a.d/a') - self._create_file('a.d/a.d/b') - self._create_file('a.d/b') - self._create_file('b') - self._create_file('b.d/a') - self._create_file('b.d/b') - self._create_file('c') - self._create_file('a.d/0') - self._create_file('0') - self._create_file('00') - self._create_file('0.d/0') - self._create_file('0.d/00') - self._create_file('0.d/1') - self._create_file('0.d/1.d/0') - self._create_file('1') - self._create_file('1.d/0') - - listing = broker.list_objects_iter(25, None, None, None, None) - self.assertEquals(len(listing), 18) - self.assertEquals([row[0] for row in listing], - ['0', '0.d/0', '0.d/00', '0.d/1', '0.d/1.d/0', '00', - '1', '1.d/0', 'a', 'a.d/0', 'a.d/a', 'a.d/a.d/a', - 'a.d/a.d/b', 'a.d/b', 'b', 'b.d/a', 'b.d/b', 'c']) - listing = broker.list_objects_iter(25, None, None, '', '/') - self.assertEquals(len(listing), 6) - self.assertEquals([row[0] for row in listing], - ['0', '00', '1', 'a', 'b', 'c']) - listing = broker.list_objects_iter(25, None, None, 'a.d/', '/') - self.assertEquals(len(listing), 3) - self.assertEquals([row[0] for row in listing], - ['a.d/0', 'a.d/a', 'a.d/b']) - listing = broker.list_objects_iter(25, None, None, '0.d/', '/') - self.assertEquals(len(listing), 3) - self.assertEquals([row[0] for row in listing], - ['0.d/0', '0.d/00', '0.d/1']) - listing = broker.list_objects_iter(25, None, None, '0.d/1.d/', '/') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['0.d/1.d/0']) - listing = broker.list_objects_iter(25, None, None, 'b.d/', '/') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['b.d/a', 'b.d/b']) - - def test_double_check_trailing_delimiter_non_slash(self): - # Test swift.common.db.ContainerBroker.list_objects_iter for a - # container that has an odd file with a trailing delimiter - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - - self._create_file('a') - self._create_file('a:') - self._create_file('a:a') - self._create_file('a:a:a') - self._create_file('a:a:b') - self._create_file('a:b') - self._create_file('b') - self._create_file('b:a') - self._create_file('b:b') - self._create_file('c') - self._create_file('a:0') - self._create_file('0') - self._create_file('0:') - self._create_file('00') - self._create_file('0:0') - self._create_file('0:00') - self._create_file('0:1') - self._create_file('0:1:') - self._create_file('0:1:0') - self._create_file('1') - self._create_file('1:') - self._create_file('1:0') - - listing = broker.list_objects_iter(25, None, None, None, None) - self.assertEquals(len(listing), 22) - self.assertEquals([row[0] for row in listing], - ['0', '00', '0:', '0:0', '0:00', '0:1', '0:1:', '0:1:0', '1', '1:', - '1:0', 'a', 'a:', 'a:0', 'a:a', 'a:a:a', 'a:a:b', 'a:b', 'b', 'b:a', - 'b:b', 'c']) - listing = broker.list_objects_iter(25, None, None, '', ':') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['0', '00', '0:', '1', '1:', 'a', 'a:', 'b', 'b:', 'c']) - listing = broker.list_objects_iter(25, None, None, 'a:', ':') - self.assertEquals(len(listing), 5) - self.assertEquals([row[0] for row in listing], - ['a:', 'a:0', 'a:a', 'a:a:', 'a:b']) - listing = broker.list_objects_iter(25, None, None, '0:', ':') - self.assertEquals(len(listing), 5) - self.assertEquals([row[0] for row in listing], - ['0:', '0:0', '0:00', '0:1', '0:1:']) - listing = broker.list_objects_iter(25, None, None, '0:1:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], - ['0:1:', '0:1:0']) - listing = broker.list_objects_iter(25, None, None, 'b:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['b:a', 'b:b']) - - def test_metadata(self): - # Initializes a good broker for us - broker = self._get_broker(account='a', container='c') - broker.initialize(self.initial_ts) - - # Add our first item - first_timestamp = normalize_timestamp(1) - first_value = '1' - broker.update_metadata({'First': [first_value, first_timestamp]}) - self.assert_('First' in broker.metadata) - self.assertEquals(broker.metadata['First'], - [first_value, first_timestamp]) - # Add our second item - second_timestamp = normalize_timestamp(2) - second_value = '2' - broker.update_metadata({'Second': [second_value, second_timestamp]}) - self.assert_('First' in broker.metadata) - self.assertEquals(broker.metadata['First'], - [first_value, first_timestamp]) - self.assert_('Second' in broker.metadata) - self.assertEquals(broker.metadata['Second'], - [second_value, second_timestamp]) - # Update our first item - first_timestamp = normalize_timestamp(3) - first_value = '1b' - broker.update_metadata({'First': [first_value, first_timestamp]}) - self.assert_('First' in broker.metadata) - self.assertEquals(broker.metadata['First'], - [first_value, first_timestamp]) - self.assert_('Second' in broker.metadata) - self.assertEquals(broker.metadata['Second'], - [second_value, second_timestamp]) - # Delete our second item (by setting to empty string) - second_timestamp = normalize_timestamp(4) - second_value = '' - broker.update_metadata({'Second': [second_value, second_timestamp]}) - self.assert_('First' in broker.metadata) - self.assertEquals(broker.metadata['First'], - [first_value, first_timestamp]) - self.assert_('Second' in broker.metadata) - self.assertEquals(broker.metadata['Second'], - [second_value, second_timestamp]) - - def test_delete_db(self): - broker = self._get_broker(account='a', container='c') - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.container)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - broker.delete_db(normalize_timestamp(time())) - self.assertTrue(broker.is_deleted()) - - -class TestAccountBroker(unittest.TestCase): - """ - Tests for DiskDir.DiskAccount class (duck-typed - swift.common.db.AccountBroker). - """ - - def __init__(self, *args, **kwargs): - super(TestAccountBroker, self).__init__(*args, **kwargs) - self.initial_ts = normalize_timestamp('1') - - def setUp(self): - _initxattr() - self.path = tempfile.mkdtemp() - self.drive = 'drv' - self.drive_fullpath = os.path.join(self.path, self.drive) - os.mkdir(self.drive_fullpath) - self.account = None - - def tearDown(self): - self.account = None - _destroyxattr() - shutil.rmtree(self.path) - - def _get_broker(self, account=None): - assert account is not None - self.account = account - return dd.DiskAccount(self.path, self.drive, account=account, - logger=FakeLogger()) - - def _create_container(self, name): - cont = os.path.join(self.drive_fullpath, name) - try: - os.mkdir(cont) - except OSError as e: - if e.errno != errno.EEXIST: - raise - return cont - - def test_creation(self): - # Test swift.common.db.AccountBroker.__init__ - broker = self._get_broker(account='a') - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.drive_fullpath)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - - def test_creation_bad_metadata(self): - # Test swift.common.db.AccountBroker.__init__ - utils.write_metadata(self.drive_fullpath, dict(a=1, b=2)) - broker = self._get_broker(account='a') - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.drive_fullpath)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - - def test_empty(self): - # Test swift.common.db.AccountBroker.empty - broker = self._get_broker(account='a') - broker.initialize(self.initial_ts) - self.assert_(broker.empty()) - c1 = self._create_container('c1') - self.assert_(not broker.empty()) - os.rmdir(c1) - self.assert_(broker.empty()) - - def test_put_container(self): - broker = self._get_broker(account='a') - broker.initialize(self.initial_ts) - self.assert_(broker.empty()) - broker.put_container('c1', normalize_timestamp(time()), 0, 0, 0) - # put_container() should be a NOOP - self.assert_(broker.empty()) - - def test_put_container_for_deletes(self): - broker = self._get_broker(account='a') - broker.initialize(self.initial_ts) - self.assert_(broker.empty()) - c1 = self._create_container('c1') - self.assert_(not broker.empty()) - broker.put_container('c1', 0, normalize_timestamp(time()), 0, 0) - # put_container() should be a NOOP - self.assert_(not broker.empty()) - os.rmdir(c1) - self.assert_(broker.empty()) - - def test_get_info(self): - # Test swift.common.db.AccountBroker.get_info - __save_config = \ - gluster.swift.common.Glusterfs._account_update_container_count - gluster.swift.common.Glusterfs._account_update_container_count = True - broker = self._get_broker(account='test1') - broker.initialize(self.initial_ts) - - info = broker.get_info() - self.assertEquals(info['account'], 'test1') - - info = broker.get_info() - self.assertEquals(info['container_count'], 0) - - c1 = self._create_container('c1') - info = broker.get_info() - self.assertEquals(info['container_count'], 1) - - c2 = self._create_container('c2') - info = broker.get_info() - self.assertEquals(info['container_count'], 2) - - c2 = self._create_container('c2') - info = broker.get_info() - self.assertEquals(info['container_count'], 2) - - os.rmdir(c1) - info = broker.get_info() - self.assertEquals(info['container_count'], 1) - - os.rmdir(c2) - info = broker.get_info() - self.assertEquals(info['container_count'], 0) - gluster.swift.common.Glusterfs._account_update_container_count = \ - __save_config - - def test_list_containers_iter(self): - # Test swift.common.db.AccountBroker.list_containers_iter - broker = self._get_broker(account='a') - broker.initialize(self.initial_ts) - for cont1 in xrange(4): - for cont2 in xrange(125): - self._create_container('%d-%04d' % (cont1, cont2)) - for cont in xrange(125): - self._create_container('2-0051-%04d' % cont) - for cont in xrange(125): - self._create_container('3-%04d-0049' % cont) - - listing = broker.list_containers_iter(100, '', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0-0000') - self.assertEquals(listing[-1][0], '0-0099') - - listing = broker.list_containers_iter(100, '', '0-0050', None, '') - self.assertEquals(len(listing), 50) - self.assertEquals(listing[0][0], '0-0000') - self.assertEquals(listing[-1][0], '0-0049') - - listing = broker.list_containers_iter(100, '0-0099', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0-0100') - self.assertEquals(listing[-1][0], '1-0074') - - listing = broker.list_containers_iter(55, '1-0074', None, None, '') - self.assertEquals(len(listing), 55) - self.assertEquals(listing[0][0], '1-0075') - self.assertEquals(listing[-1][0], '2-0004') - - listing = broker.list_containers_iter(10, '', None, '0-01', '') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0-0100') - self.assertEquals(listing[-1][0], '0-0109') - - listing = broker.list_containers_iter(10, '', None, '0-01', '-') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0-0100') - self.assertEquals(listing[-1][0], '0-0109') - - listing = broker.list_containers_iter(10, '', None, '0-', '-') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0-0000') - self.assertEquals(listing[-1][0], '0-0009') - - listing = broker.list_containers_iter(10, '', None, '', '-') - self.assertEquals(len(listing), 4) - self.assertEquals([row[0] for row in listing], - ['0-', '1-', '2-', '3-']) - - listing = broker.list_containers_iter(10, '2-', None, None, '-') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['3-']) - - listing = broker.list_containers_iter(10, '', None, '2', '-') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['2-']) - - listing = broker.list_containers_iter(10, '2-0050', None, '2-', '-') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '2-0051') - self.assertEquals(listing[1][0], '2-0051-') - self.assertEquals(listing[2][0], '2-0052') - self.assertEquals(listing[-1][0], '2-0059') - - listing = broker.list_containers_iter(10, '3-0045', None, '3-', '-') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3-0045-', '3-0046', '3-0046-', '3-0047', - '3-0047-', '3-0048', '3-0048-', '3-0049', - '3-0049-', '3-0050']) - - self._create_container('3-0049-') - listing = broker.list_containers_iter(10, '3-0048', None, None, None) - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3-0048-0049', '3-0049', '3-0049-', '3-0049-0049', - '3-0050', '3-0050-0049', '3-0051', '3-0051-0049', - '3-0052', '3-0052-0049']) - - listing = broker.list_containers_iter(10, '3-0048', None, '3-', '-') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3-0048-', '3-0049', '3-0049-', '3-0050', - '3-0050-', '3-0051', '3-0051-', '3-0052', - '3-0052-', '3-0053']) - - listing = broker.list_containers_iter(10, None, None, '3-0049-', '-') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], - ['3-0049-', '3-0049-0049']) - - def test_double_check_trailing_delimiter(self): - # Test swift.common.db.AccountBroker.list_containers_iter for an - # account that has an odd container with a trailing delimiter - broker = self._get_broker(account='a') - broker.initialize(self.initial_ts) - self._create_container('a') - self._create_container('a-') - self._create_container('a-a') - self._create_container('a-a-a') - self._create_container('a-a-b') - self._create_container('a-b') - self._create_container('b') - self._create_container('b-a') - self._create_container('b-b') - self._create_container('c') - - listing = broker.list_containers_iter(15, None, None, None, None) - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['a', 'a-', 'a-a', 'a-a-a', 'a-a-b', 'a-b', 'b', - 'b-a', 'b-b', 'c']) - listing = broker.list_containers_iter(15, None, None, '', '-') - self.assertEquals(len(listing), 5) - self.assertEquals([row[0] for row in listing], - ['a', 'a-', 'b', 'b-', 'c']) - listing = broker.list_containers_iter(15, None, None, 'a-', '-') - self.assertEquals(len(listing), 4) - self.assertEquals([row[0] for row in listing], - ['a-', 'a-a', 'a-a-', 'a-b']) - listing = broker.list_containers_iter(15, None, None, 'b-', '-') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['b-a', 'b-b']) - - def test_delete_db(self): - broker = self._get_broker(account='a') - broker.initialize(self.initial_ts) - self.assertEqual(broker.db_file, dd._db_file) - self.assertEqual(os.path.basename(broker.db_file), 'db_file.db') - broker.initialize(self.initial_ts) - self.assertTrue(os.path.isdir(self.drive_fullpath)) - self.assertEquals(self.initial_ts, broker.metadata[utils.X_TIMESTAMP]) - self.assertFalse(broker.is_deleted()) - broker.delete_db(normalize_timestamp(time())) - # Deleting the "db" should be a NOOP - self.assertFalse(broker.is_deleted()) - - -class TestDiskAccount(unittest.TestCase): - """ Tests for gluster.swift.common.DiskDir.DiskAccount """ - - def setUp(self): - _initxattr() - self.fake_logger = FakeLogger() - self.td = tempfile.mkdtemp() - self.fake_drives = [] - self.fake_accounts = [] - self.fake_md = [] - for i in range(0,3): - self.fake_drives.append("drv%d" % i) - os.makedirs(os.path.join(self.td, self.fake_drives[i])) - self.fake_accounts.append(self.fake_drives[i]) - if i == 0: - # First drive does not have any initial account metadata - continue - if i == 1: - # Second drive has account metadata but it is not valid - datadir = os.path.join(self.td, self.fake_drives[i]) - fake_md = { "fake-drv-%d" % i: (True,0) } - self.fake_md.append(fake_md) - fake_md_p = pickle.dumps(fake_md, utils.PICKLE_PROTOCOL) - _setxattr(datadir, utils.METADATA_KEY, fake_md_p) - if i == 2: - # Third drive has valid account metadata - utils.create_account_metadata(datadir) - - def tearDown(self): - _destroyxattr() - shutil.rmtree(self.td) - - def test_constructor_no_metadata(self): - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - assert da._dir_exists is True - ctime = os.path.getctime(da.datadir) - mtime = os.path.getmtime(da.datadir) - exp_md = { - 'X-Bytes-Used': (0, 0), - 'X-Timestamp': (normalize_timestamp(ctime), 0), - 'X-Object-Count': (0, 0), - 'X-Type': ('Account', 0), - 'X-PUT-Timestamp': (normalize_timestamp(mtime), 0), - 'X-Container-Count': (0, 0)} - assert da.metadata == exp_md, repr(da.metadata) - - def test_constructor_metadata_not_valid(self): - da = dd.DiskAccount(self.td, self.fake_drives[1], - self.fake_accounts[1], self.fake_logger) - assert da._dir_exists is True - ctime = os.path.getctime(da.datadir) - mtime = os.path.getmtime(da.datadir) - exp_md = { - 'X-Bytes-Used': (0, 0), - 'X-Timestamp': (normalize_timestamp(ctime), 0), - 'X-Object-Count': (0, 0), - 'X-Type': ('Account', 0), - 'X-PUT-Timestamp': (normalize_timestamp(mtime), 0), - 'X-Container-Count': (0, 0), - 'fake-drv-1': (True, 0)} - assert da.metadata == exp_md, repr(da.metadata) - - def test_constructor_metadata_valid(self): - da = dd.DiskAccount(self.td, self.fake_drives[2], - self.fake_accounts[2], self.fake_logger) - assert da._dir_exists is True - ctime = os.path.getctime(da.datadir) - mtime = os.path.getmtime(da.datadir) - exp_md = { - 'X-Bytes-Used': (0, 0), - 'X-Timestamp': (normalize_timestamp(ctime), 0), - 'X-Object-Count': (0, 0), - 'X-Type': ('Account', 0), - 'X-PUT-Timestamp': (normalize_timestamp(mtime), 0), - 'X-Container-Count': (0, 0)} - assert da.metadata == exp_md, repr(da.metadata) - - get_info_keys = set(['account', 'created_at', 'put_timestamp', - 'delete_timestamp', 'container_count', - 'object_count', 'bytes_used', 'hash', 'id']) - - def test_get_info_empty(self): - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - data = da.get_info() - assert set(data.keys()) == self.get_info_keys - assert data['account'] == self.fake_accounts[0] - assert data['created_at'] == '1' - assert data['put_timestamp'] == '1' - assert data['delete_timestamp'] == '1' - assert data['container_count'] == 0 - assert data['object_count'] == 0 - assert data['bytes_used'] == 0 - assert data['hash'] == '' - assert data['id'] == '' - - def test_get_info(self): - tf = tarfile.open("common/data/account_tree.tar.bz2", "r:bz2") - orig_cwd = os.getcwd() - os.chdir(os.path.join(self.td, self.fake_drives[0])) - try: - tf.extractall() - finally: - os.chdir(orig_cwd) - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - data = da.get_info() - assert set(data.keys()) == self.get_info_keys - assert data['account'] == self.fake_accounts[0] - assert data['created_at'] == '1' - assert data['put_timestamp'] == '1' - assert data['delete_timestamp'] == '1' - assert data['container_count'] == 3 - assert data['object_count'] == 0 - assert data['bytes_used'] == 0 - assert data['hash'] == '' - assert data['id'] == '' - - def test_update_put_timestamp_not_updated(self): - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - da.update_put_timestamp('12345') - assert da.metadata['X-PUT-Timestamp'][0] != '12345', repr(da.metadata) - - def test_update_put_timestamp_updated(self): - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - exp_pts = str(float(da.metadata['X-PUT-Timestamp'][0]) + 100) - da.update_put_timestamp(exp_pts) - assert da.metadata['X-PUT-Timestamp'][0] == exp_pts, repr(da.metadata) - - def test_delete_db(self): - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - assert da._dir_exists == True - da.delete_db('12345') - assert da._dir_exists == True - - def test_is_status_deleted(self): - da = dd.DiskAccount(self.td, self.fake_drives[0], - self.fake_accounts[0], self.fake_logger) - assert da.is_status_deleted() == False diff --git a/test/unit/common/test_ring.py b/test/unit/common/test_ring.py deleted file mode 100644 index de32c7b..0000000 --- a/test/unit/common/test_ring.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import errno -import unittest -import gluster.swift.common.constraints -import swift.common.utils -from gluster.swift.common.ring import Ring - - -class TestRing(unittest.TestCase): - """ Tests for common.ring """ - - def setUp(self): - swift.common.utils.HASH_PATH_SUFFIX = 'endcap' - swiftdir = os.path.join(os.getcwd(), "common", "data") - self.ring = Ring(swiftdir, ring_name='object') - - def test_first_device(self): - part, node = self.ring.get_nodes('test') - assert node[0]['device'] == 'test' - node = self.ring.get_part_nodes(0) - assert node[0]['device'] == 'test' - for node in self.ring.get_more_nodes(0): - assert node['device'] == 'volume_not_in_ring' - - def test_invalid_device(self): - part, node = self.ring.get_nodes('test2') - assert node[0]['device'] == 'volume_not_in_ring' - node = self.ring.get_part_nodes(0) - assert node[0]['device'] == 'volume_not_in_ring' - - def test_second_device(self): - part, node = self.ring.get_nodes('iops') - assert node[0]['device'] == 'iops' - node = self.ring.get_part_nodes(0) - assert node[0]['device'] == 'iops' - for node in self.ring.get_more_nodes(0): - assert node['device'] == 'volume_not_in_ring' - - def test_second_device_part(self): - part = self.ring.get_part('iops') - assert part == 0 - - def test_second_device_with_reseller_prefix(self): - part, node = self.ring.get_nodes('AUTH_iops') - assert node[0]['device'] == 'iops' - - def test_partition_id_for_multiple_accounts(self): - test_part, test_node = self.ring.get_nodes('test') - iops_part, iops_node = self.ring.get_nodes('iops') - self.assertNotEqual(test_part, iops_part) - self.assertEqual(test_node, self.ring.get_part_nodes(test_part)) - self.assertEqual(iops_node, self.ring.get_part_nodes(iops_part)) - self.assertNotEqual(test_node, self.ring.get_part_nodes(iops_part)) - self.assertNotEqual(iops_node, self.ring.get_part_nodes(test_part)) - - def test_invalid_partition(self): - nodes = self.ring.get_part_nodes(0) - self.assertEqual(nodes[0]['device'], 'volume_not_in_ring') - - def test_ring_file_enoent(self): - swiftdir = os.path.join(os.getcwd(), "common", "data") - try: - self.ring = Ring(swiftdir, ring_name='obj') - except OSError as ose: - if ose.errno == errno.ENOENT: - pass - else: - self.fail('ENOENT expected, %s received.' %ose.errno) - else: - self.fail('OSError expected.') diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index dd03bd8..189bbb8 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -321,28 +321,6 @@ class TestUtils(unittest.TestCase): assert _xattr_op_cnt['get'] == 1, "%r" % _xattr_op_cnt assert _xattr_op_cnt['set'] == 0, "%r" % _xattr_op_cnt - def test_add_timestamp_empty(self): - orig = {} - res = utils._add_timestamp(orig) - assert res == {} - - def test_add_timestamp_none(self): - orig = {'a': 1, 'b': 2, 'c': 3} - exp = {'a': (1, 0), 'b': (2, 0), 'c': (3, 0)} - res = utils._add_timestamp(orig) - assert res == exp - - def test_add_timestamp_mixed(self): - orig = {'a': 1, 'b': (2, 1), 'c': 3} - exp = {'a': (1, 0), 'b': (2, 1), 'c': (3, 0)} - res = utils._add_timestamp(orig) - assert res == exp - - def test_add_timestamp_all(self): - orig = {'a': (1, 0), 'b': (2, 1), 'c': (3, 0)} - res = utils._add_timestamp(orig) - assert res == orig - def test_get_etag_empty(self): tf = tempfile.NamedTemporaryFile() hd = utils._get_etag(tf.name) @@ -455,235 +433,6 @@ class TestUtils(unittest.TestCase): finally: os.rmdir(td) - def test_get_container_metadata(self): - def _mock_get_container_details(path): - o_list = ['a', 'b', 'c'] - o_count = 3 - b_used = 47 - return o_list, o_count, b_used - orig_gcd = utils.get_container_details - utils.get_container_details = _mock_get_container_details - td = tempfile.mkdtemp() - try: - exp_md = { - utils.X_TYPE: (utils.CONTAINER, 0), - utils.X_TIMESTAMP: (utils.normalize_timestamp(os.path.getctime(td)), 0), - utils.X_PUT_TIMESTAMP: (utils.normalize_timestamp(os.path.getmtime(td)), 0), - utils.X_OBJECTS_COUNT: (3, 0), - utils.X_BYTES_USED: (47, 0), - } - md = utils.get_container_metadata(td) - assert md == exp_md - finally: - utils.get_container_details = orig_gcd - os.rmdir(td) - - def test_get_account_metadata(self): - def _mock_get_account_details(path): - c_list = ['123', 'abc'] - c_count = 2 - return c_list, c_count - orig_gad = utils.get_account_details - utils.get_account_details = _mock_get_account_details - td = tempfile.mkdtemp() - try: - exp_md = { - utils.X_TYPE: (utils.ACCOUNT, 0), - utils.X_TIMESTAMP: (utils.normalize_timestamp(os.path.getctime(td)), 0), - utils.X_PUT_TIMESTAMP: (utils.normalize_timestamp(os.path.getmtime(td)), 0), - utils.X_OBJECTS_COUNT: (0, 0), - utils.X_BYTES_USED: (0, 0), - utils.X_CONTAINER_COUNT: (2, 0), - } - md = utils.get_account_metadata(td) - assert md == exp_md - finally: - utils.get_account_details = orig_gad - os.rmdir(td) - - cont_keys = [utils.X_TYPE, utils.X_TIMESTAMP, utils.X_PUT_TIMESTAMP, - utils.X_OBJECTS_COUNT, utils.X_BYTES_USED] - - def test_create_container_metadata(self): - td = tempfile.mkdtemp() - try: - r_md = utils.create_container_metadata(td) - - xkey = _xkey(td, utils.METADATA_KEY) - assert len(_xattrs.keys()) == 1 - assert xkey in _xattrs - assert _xattr_op_cnt['get'] == 1 - assert _xattr_op_cnt['set'] == 1 - md = pickle.loads(_xattrs[xkey]) - assert r_md == md - - for key in self.cont_keys: - assert key in md, "Expected key %s in %r" % (key, md) - assert md[utils.X_TYPE] == (utils.CONTAINER, 0) - assert md[utils.X_TIMESTAMP] == (utils.normalize_timestamp(os.path.getctime(td)), 0) - assert md[utils.X_PUT_TIMESTAMP] == (utils.normalize_timestamp(os.path.getmtime(td)), 0) - assert md[utils.X_OBJECTS_COUNT] == (0, 0) - assert md[utils.X_BYTES_USED] == (0, 0) - finally: - os.rmdir(td) - - acct_keys = [val for val in cont_keys] - acct_keys.append(utils.X_CONTAINER_COUNT) - - def test_create_account_metadata(self): - td = tempfile.mkdtemp() - try: - r_md = utils.create_account_metadata(td) - - xkey = _xkey(td, utils.METADATA_KEY) - assert len(_xattrs.keys()) == 1 - assert xkey in _xattrs - assert _xattr_op_cnt['get'] == 1 - assert _xattr_op_cnt['set'] == 1 - md = pickle.loads(_xattrs[xkey]) - assert r_md == md - - for key in self.acct_keys: - assert key in md, "Expected key %s in %r" % (key, md) - assert md[utils.X_TYPE] == (utils.ACCOUNT, 0) - assert md[utils.X_TIMESTAMP] == (utils.normalize_timestamp(os.path.getctime(td)), 0) - assert md[utils.X_PUT_TIMESTAMP] == (utils.normalize_timestamp(os.path.getmtime(td)), 0) - assert md[utils.X_OBJECTS_COUNT] == (0, 0) - assert md[utils.X_BYTES_USED] == (0, 0) - assert md[utils.X_CONTAINER_COUNT] == (0, 0) - finally: - os.rmdir(td) - - def test_get_account_details(self): - orig_cwd = os.getcwd() - td = tempfile.mkdtemp() - try: - tf = tarfile.open("common/data/account_tree.tar.bz2", "r:bz2") - os.chdir(td) - tf.extractall() - - container_list, container_count = utils.get_account_details(td) - assert container_count == 3 - assert set(container_list) == set(['c1', 'c2', 'c3']) - finally: - os.chdir(orig_cwd) - shutil.rmtree(td) - - def test_get_account_details_notadir(self): - tf = tempfile.NamedTemporaryFile() - container_list, container_count = utils.get_account_details(tf.name) - assert container_count == 0 - assert container_list == [] - - def test_get_container_details_notadir(self): - tf = tempfile.NamedTemporaryFile() - obj_list, object_count, bytes_used = \ - utils.get_container_details(tf.name) - assert bytes_used == 0 - assert object_count == 0 - assert obj_list == [] - - def test_get_container_details(self): - orig_cwd = os.getcwd() - __do_getsize = Glusterfs._do_getsize - td = tempfile.mkdtemp() - try: - tf = tarfile.open("common/data/container_tree.tar.bz2", "r:bz2") - os.chdir(td) - tf.extractall() - - Glusterfs._do_getsize = False - - obj_list, object_count, bytes_used = \ - utils.get_container_details(td) - assert bytes_used == 0, repr(bytes_used) - # Should not include the directories - assert object_count == 5, repr(object_count) - assert set(obj_list) == set(['file1', 'file3', 'file2', - 'dir1/file1', 'dir1/file2' - ]), repr(obj_list) - finally: - Glusterfs._do_getsize = __do_getsize - os.chdir(orig_cwd) - shutil.rmtree(td) - - def test_get_container_details_from_fs_do_getsize_true(self): - orig_cwd = os.getcwd() - __do_getsize = Glusterfs._do_getsize - td = tempfile.mkdtemp() - try: - tf = tarfile.open("common/data/container_tree.tar.bz2", "r:bz2") - os.chdir(td) - tf.extractall() - - Glusterfs._do_getsize = True - - obj_list, object_count, bytes_used = \ - utils.get_container_details(td) - assert bytes_used == 30, repr(bytes_used) - assert object_count == 5, repr(object_count) - assert set(obj_list) == set(['file1', 'file3', 'file2', - 'dir1/file1', 'dir1/file2' - ]), repr(obj_list) - finally: - Glusterfs._do_getsize = __do_getsize - os.chdir(orig_cwd) - shutil.rmtree(td) - - def test_validate_container_empty(self): - ret = utils.validate_container({}) - assert not ret - - def test_validate_container_missing_keys(self): - ret = utils.validate_container({'foo': 'bar'}) - assert not ret - - def test_validate_container_bad_type(self): - md = {utils.X_TYPE: ('bad', 0), - utils.X_TIMESTAMP: ('na', 0), - utils.X_PUT_TIMESTAMP: ('na', 0), - utils.X_OBJECTS_COUNT: ('na', 0), - utils.X_BYTES_USED: ('na', 0)} - ret = utils.validate_container(md) - assert not ret - - def test_validate_container_good_type(self): - md = {utils.X_TYPE: (utils.CONTAINER, 0), - utils.X_TIMESTAMP: ('na', 0), - utils.X_PUT_TIMESTAMP: ('na', 0), - utils.X_OBJECTS_COUNT: ('na', 0), - utils.X_BYTES_USED: ('na', 0)} - ret = utils.validate_container(md) - assert ret - - def test_validate_account_empty(self): - ret = utils.validate_account({}) - assert not ret - - def test_validate_account_missing_keys(self): - ret = utils.validate_account({'foo': 'bar'}) - assert not ret - - def test_validate_account_bad_type(self): - md = {utils.X_TYPE: ('bad', 0), - utils.X_TIMESTAMP: ('na', 0), - utils.X_PUT_TIMESTAMP: ('na', 0), - utils.X_OBJECTS_COUNT: ('na', 0), - utils.X_BYTES_USED: ('na', 0), - utils.X_CONTAINER_COUNT: ('na', 0)} - ret = utils.validate_account(md) - assert not ret - - def test_validate_account_good_type(self): - md = {utils.X_TYPE: (utils.ACCOUNT, 0), - utils.X_TIMESTAMP: ('na', 0), - utils.X_PUT_TIMESTAMP: ('na', 0), - utils.X_OBJECTS_COUNT: ('na', 0), - utils.X_BYTES_USED: ('na', 0), - utils.X_CONTAINER_COUNT: ('na', 0)} - ret = utils.validate_account(md) - assert ret - def test_validate_object_empty(self): ret = utils.validate_object({}) assert not ret @@ -712,6 +461,42 @@ class TestUtils(unittest.TestCase): ret = utils.validate_object(md) assert ret + def test_write_pickle(self): + td = tempfile.mkdtemp() + try: + fpp = os.path.join(td, 'pp') + # FIXME: Remove this patch when coverage.py can handle eventlet + with patch("gluster.swift.common.fs_utils.do_fsync", + _mock_os_fsync): + utils.write_pickle('pickled peppers', fpp) + with open(fpp, "rb") as f: + contents = f.read() + s = pickle.loads(contents) + assert s == 'pickled peppers', repr(s) + finally: + shutil.rmtree(td) + + def test_write_pickle_ignore_tmp(self): + tf = tempfile.NamedTemporaryFile() + td = tempfile.mkdtemp() + try: + fpp = os.path.join(td, 'pp') + # Also test an explicity pickle protocol + # FIXME: Remove this patch when coverage.py can handle eventlet + with patch("gluster.swift.common.fs_utils.do_fsync", + _mock_os_fsync): + utils.write_pickle('pickled peppers', fpp, tmp=tf.name, + pickle_protocol=2) + with open(fpp, "rb") as f: + contents = f.read() + s = pickle.loads(contents) + assert s == 'pickled peppers', repr(s) + with open(tf.name, "rb") as f: + contents = f.read() + assert contents == '' + finally: + shutil.rmtree(td) + class TestUtilsDirObjects(unittest.TestCase): diff --git a/test/unit/container/__init__.py b/test/unit/container/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py deleted file mode 100644 index 6a836f5..0000000 --- a/test/unit/container/test_server.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" Tests for gluster.swift.container.server subclass """ - -import unittest -from nose import SkipTest - -import gluster.swift.container.server as server - - -class TestContainerServer(unittest.TestCase): - """ - Tests for container server subclass. - """ - - def test_constructor(self): - raise SkipTest diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index ec813c7..8ac0244 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -36,7 +36,7 @@ from gluster.swift.common.exceptions import GlusterFileSystemOSError import gluster.swift.common.utils from gluster.swift.common.utils import normalize_timestamp import gluster.swift.obj.diskfile -from gluster.swift.obj.diskfile import DiskFileWriter, DiskFile, OnDiskManager +from gluster.swift.obj.diskfile import DiskFileWriter, DiskFile, DiskFileManager from gluster.swift.common.utils import DEFAULT_UID, DEFAULT_GID, X_TYPE, \ X_OBJECT_TYPE, DIR_OBJECT @@ -136,7 +136,7 @@ class TestDiskFile(unittest.TestCase): self.td = tempfile.mkdtemp() self.conf = dict(devices=self.td, mb_per_sync=2, keep_cache_size=(1024 * 1024), mount_check=False) - self.mgr = OnDiskManager(self.conf, self.lg) + self.mgr = DiskFileManager(self.conf, self.lg) def tearDown(self): tpool.execute = self._orig_tpool_exc @@ -150,7 +150,7 @@ class TestDiskFile(unittest.TestCase): shutil.rmtree(self.td) def _get_diskfile(self, d, p, a, c, o, **kwargs): - return self.mgr.get_diskfile(d, a, c, o, **kwargs) + return self.mgr.get_diskfile(d, p, a, c, o, **kwargs) def test_constructor_no_slash(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") @@ -161,9 +161,9 @@ class TestDiskFile(unittest.TestCase): assert gdf._gid == DEFAULT_GID assert gdf._obj == "z" assert gdf._obj_path == "" - assert gdf._datadir == os.path.join(self.td, "vol0", "bar"), gdf._datadir + assert gdf._datadir == os.path.join(self.td, "vol0", "ufo47", "bar"), gdf._datadir assert gdf._datadir == gdf._put_datadir - assert gdf._data_file == os.path.join(self.td, "vol0", "bar", "z") + assert gdf._data_file == os.path.join(self.td, "vol0", "ufo47", "bar", "z") assert gdf._is_dir is False assert gdf._logger == self.lg assert gdf._fd is None @@ -172,10 +172,10 @@ class TestDiskFile(unittest.TestCase): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "/b/a/z/") assert gdf._obj == "z" assert gdf._obj_path == "b/a" - assert gdf._datadir == os.path.join(self.td, "vol0", "bar", "b", "a"), gdf._datadir + assert gdf._datadir == os.path.join(self.td, "vol0", "ufo47", "bar", "b", "a"), gdf._datadir def test_open_no_metadata(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -215,7 +215,7 @@ class TestDiskFile(unittest.TestCase): self.assertTrue(mock_close.called) def test_open_existing_metadata(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -243,7 +243,7 @@ class TestDiskFile(unittest.TestCase): assert gdf._metadata == exp_md, "%r != %r" % (gdf._metadata, exp_md) def test_open_invalid_existing_metadata(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -263,7 +263,7 @@ class TestDiskFile(unittest.TestCase): assert gdf._metadata != inv_md def test_open_isdir(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_path, "d") os.makedirs(the_dir) ini_md = { @@ -287,7 +287,7 @@ class TestDiskFile(unittest.TestCase): def _create_and_get_diskfile(self, dev, par, acc, con, obj, fsize=256): # FIXME: assumes account === volume - the_path = os.path.join(self.td, dev, con) + the_path = os.path.join(self.td, dev, acc, con) the_file = os.path.join(the_path, obj) base_obj = os.path.basename(the_file) base_dir = os.path.dirname(the_file) @@ -312,7 +312,7 @@ class TestDiskFile(unittest.TestCase): gdf = self._create_and_get_diskfile("vol0", "p57", "ufo47", "bar", "z") with gdf.open(): assert gdf._fd is not None - assert gdf._data_file == os.path.join(self.td, "vol0", "bar", "z") + assert gdf._data_file == os.path.join(self.td, "vol0", "ufo47", "bar", "z") reader = gdf.reader() assert reader._fd is not None fd[0] = reader._fd @@ -324,7 +324,7 @@ class TestDiskFile(unittest.TestCase): def test_reader_disk_chunk_size(self): conf = dict(disk_chunk_size=64) conf.update(self.conf) - self.mgr = OnDiskManager(conf, self.lg) + self.mgr = DiskFileManager(conf, self.lg) gdf = self._create_and_get_diskfile("vol0", "p57", "ufo47", "bar", "z") with gdf.open(): reader = gdf.reader() @@ -365,7 +365,7 @@ class TestDiskFile(unittest.TestCase): gdf = self._create_and_get_diskfile("vol0", "p57", "ufo47", "bar", "z", fsize=1024*1024*2) with gdf.open(): assert gdf._fd is not None - assert gdf._data_file == os.path.join(self.td, "vol0", "bar", "z") + assert gdf._data_file == os.path.join(self.td, "vol0", "ufo47", "bar", "z") reader = gdf.reader() assert reader._fd is not None fd[0] = reader._fd @@ -380,7 +380,7 @@ class TestDiskFile(unittest.TestCase): called[0] = True os.close(fd) - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") os.makedirs(os.path.join(the_cont, "dir")) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") with gdf.open(): @@ -396,7 +396,7 @@ class TestDiskFile(unittest.TestCase): reader.close() def test_create_dir_object_no_md(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = "dir" os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", @@ -409,7 +409,7 @@ class TestDiskFile(unittest.TestCase): assert _mapit(full_dir_path) not in _metadata def test_create_dir_object_with_md(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = "dir" os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", @@ -468,7 +468,7 @@ class TestDiskFile(unittest.TestCase): self.assertFalse(_mapit(the_dir) in _metadata) def test_write_metadata(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_path, "z") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") @@ -481,7 +481,7 @@ class TestDiskFile(unittest.TestCase): self.assertTrue(fmd['Content-Type'], md['Content-Type']) def test_add_metadata_to_existing_file(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -509,7 +509,7 @@ class TestDiskFile(unittest.TestCase): self.assertFalse('a' in on_disk_md) def test_add_md_to_existing_file_with_md_in_gdf(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -537,7 +537,7 @@ class TestDiskFile(unittest.TestCase): self.assertFalse('a' in on_disk_md) def test_add_metadata_to_existing_dir(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_cont, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") @@ -571,7 +571,7 @@ class TestDiskFile(unittest.TestCase): def test_write_metadata_w_meta_file(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -583,7 +583,7 @@ class TestDiskFile(unittest.TestCase): assert _metadata[_mapit(the_file)] == newmd def test_write_metadata_w_meta_file_no_content_type(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -596,7 +596,7 @@ class TestDiskFile(unittest.TestCase): assert _metadata[_mapit(the_file)] == newmd def test_write_metadata_w_meta_dir(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_path, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") @@ -606,7 +606,7 @@ class TestDiskFile(unittest.TestCase): assert _metadata[_mapit(the_dir)] == newmd def test_write_metadata_w_marker_dir(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_path, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") @@ -616,7 +616,7 @@ class TestDiskFile(unittest.TestCase): assert _metadata[_mapit(the_dir)] == newmd def test_put_w_marker_dir_create(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_cont, "dir") os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") @@ -633,7 +633,7 @@ class TestDiskFile(unittest.TestCase): assert _metadata[_mapit(the_dir)][X_OBJECT_TYPE] == DIR_OBJECT def test_put_is_dir(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_path, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") @@ -664,14 +664,14 @@ class TestDiskFile(unittest.TestCase): origfmd, _metadata[_mapit(the_dir)]) def test_put(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" assert gdf._obj_path == "" - assert gdf._container_path == os.path.join(self.td, "vol0", "bar") + assert gdf._container_path == os.path.join(self.td, "vol0", "ufo47", "bar") assert gdf._datadir == the_cont - assert gdf._data_file == os.path.join(self.td, "vol0", "bar", "z") + assert gdf._data_file == os.path.join(self.td, "vol0", "ufo47", "bar", "z") body = '1234\n' etag = md5() @@ -694,14 +694,14 @@ class TestDiskFile(unittest.TestCase): assert not os.path.exists(tmppath) def test_put_ENOSPC(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" assert gdf._obj_path == "" - assert gdf._container_path == os.path.join(self.td, "vol0", "bar") + assert gdf._container_path == os.path.join(self.td, "vol0", "ufo47", "bar") assert gdf._datadir == the_cont - assert gdf._data_file == os.path.join(self.td, "vol0", "bar", "z") + assert gdf._data_file == os.path.join(self.td, "vol0", "ufo47", "bar", "z") body = '1234\n' etag = md5() @@ -729,14 +729,14 @@ class TestDiskFile(unittest.TestCase): self.fail("Expected exception DiskFileNoSpace") def test_put_rename_ENOENT(self): - the_cont = os.path.join(self.td, "vol0", "bar") + the_cont = os.path.join(self.td, "vol0", "ufo47", "bar") os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" assert gdf._obj_path == "" - assert gdf._container_path == os.path.join(self.td, "vol0", "bar") + assert gdf._container_path == os.path.join(self.td, "vol0", "ufo47", "bar") assert gdf._datadir == the_cont - assert gdf._data_file == os.path.join(self.td, "vol0", "bar", "z") + assert gdf._data_file == os.path.join(self.td, "vol0", "ufo47", "bar", "z") body = '1234\n' etag = md5() @@ -775,10 +775,10 @@ class TestDiskFile(unittest.TestCase): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", the_file) assert gdf._obj == "z" assert gdf._obj_path == the_obj_path - assert gdf._container_path == os.path.join(self.td, "vol0", "bar") - assert gdf._datadir == os.path.join(self.td, "vol0", "bar", "b", "a") + assert gdf._container_path == os.path.join(self.td, "vol0", "ufo47", "bar") + assert gdf._datadir == os.path.join(self.td, "vol0", "ufo47", "bar", "b", "a") assert gdf._data_file == os.path.join( - self.td, "vol0", "bar", "b", "a", "z") + self.td, "vol0", "ufo47", "bar", "b", "a", "z") body = '1234\n' etag = md5() @@ -801,7 +801,7 @@ class TestDiskFile(unittest.TestCase): assert not os.path.exists(tmppath) def test_delete(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -816,7 +816,7 @@ class TestDiskFile(unittest.TestCase): assert not os.path.exists(os.path.join(gdf._datadir, gdf._obj)) def test_delete_same_timestamp(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -831,7 +831,7 @@ class TestDiskFile(unittest.TestCase): assert os.path.exists(os.path.join(gdf._datadir, gdf._obj)) def test_delete_file_not_found(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -850,7 +850,7 @@ class TestDiskFile(unittest.TestCase): assert not os.path.exists(os.path.join(gdf._datadir, gdf._obj)) def test_delete_file_unlink_error(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) with open(the_file, "wb") as fd: @@ -884,7 +884,7 @@ class TestDiskFile(unittest.TestCase): assert os.path.exists(os.path.join(gdf._datadir, gdf._obj)) def test_delete_is_dir(self): - the_path = os.path.join(self.td, "vol0", "bar") + the_path = os.path.join(self.td, "vol0", "ufo47", "bar") the_dir = os.path.join(the_path, "d") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d") @@ -899,7 +899,7 @@ class TestDiskFile(unittest.TestCase): saved_tmppath = '' saved_fd = None with gdf.create() as dw: - assert gdf._datadir == os.path.join(self.td, "vol0", "bar", "dir") + assert gdf._datadir == os.path.join(self.td, "vol0", "ufo47", "bar", "dir") assert os.path.isdir(gdf._datadir) saved_tmppath = dw._tmppath assert os.path.dirname(saved_tmppath) == gdf._datadir @@ -922,7 +922,7 @@ class TestDiskFile(unittest.TestCase): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") saved_tmppath = '' with gdf.create() as dw: - assert gdf._datadir == os.path.join(self.td, "vol0", "bar", "dir") + assert gdf._datadir == os.path.join(self.td, "vol0", "ufo47", "bar", "dir") assert os.path.isdir(gdf._datadir) saved_tmppath = dw._tmppath assert os.path.dirname(saved_tmppath) == gdf._datadir @@ -937,7 +937,7 @@ class TestDiskFile(unittest.TestCase): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") saved_tmppath = '' with gdf.create() as dw: - assert gdf._datadir == os.path.join(self.td, "vol0", "bar", "dir") + assert gdf._datadir == os.path.join(self.td, "vol0", "ufo47", "bar", "dir") assert os.path.isdir(gdf._datadir) saved_tmppath = dw._tmppath assert os.path.dirname(saved_tmppath) == gdf._datadir diff --git a/test/unit/obj/test_expirer.py b/test/unit/obj/test_expirer.py deleted file mode 100644 index fd8100d..0000000 --- a/test/unit/obj/test_expirer.py +++ /dev/null @@ -1,701 +0,0 @@ -# Copyright (c) 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import urllib -from time import time -from unittest import main, TestCase -from test.unit import FakeLogger -from copy import deepcopy - -import mock - -from swift.common import internal_client -from swift.obj import expirer - - -def not_random(): - return 0.5 - - -last_not_sleep = 0 - - -def not_sleep(seconds): - global last_not_sleep - last_not_sleep = seconds - - -class TestObjectExpirer(TestCase): - maxDiff = None - - def setUp(self): - global not_sleep - - self.old_loadapp = internal_client.loadapp - self.old_sleep = internal_client.sleep - - internal_client.loadapp = lambda *a, **kw: None - internal_client.sleep = not_sleep - - def teardown(self): - internal_client.sleep = self.old_sleep - internal_client.loadapp = self.loadapp - - def test_get_process_values_from_kwargs(self): - x = expirer.ObjectExpirer({}) - vals = { - 'processes': 5, - 'process': 1, - } - self.assertEqual((5, 1), x.get_process_values(vals)) - - def test_get_process_values_from_config(self): - vals = { - 'processes': 5, - 'process': 1, - } - x = expirer.ObjectExpirer(vals) - self.assertEqual((5, 1), x.get_process_values({})) - - def test_get_process_values_negative_process(self): - vals = { - 'processes': 5, - 'process': -1, - } - # from config - x = expirer.ObjectExpirer(vals) - self.assertRaises(ValueError, x.get_process_values, {}) - # from kwargs - x = expirer.ObjectExpirer({}) - self.assertRaises(ValueError, x.get_process_values, vals) - - def test_get_process_values_negative_processes(self): - vals = { - 'processes': -5, - 'process': 1, - } - # from config - x = expirer.ObjectExpirer(vals) - self.assertRaises(ValueError, x.get_process_values, {}) - # from kwargs - x = expirer.ObjectExpirer({}) - self.assertRaises(ValueError, x.get_process_values, vals) - - def test_get_process_values_process_greater_than_processes(self): - vals = { - 'processes': 5, - 'process': 7, - } - # from config - x = expirer.ObjectExpirer(vals) - self.assertRaises(ValueError, x.get_process_values, {}) - # from kwargs - x = expirer.ObjectExpirer({}) - self.assertRaises(ValueError, x.get_process_values, vals) - - def test_init_concurrency_too_small(self): - conf = { - 'concurrency': 0, - } - self.assertRaises(ValueError, expirer.ObjectExpirer, conf) - conf = { - 'concurrency': -1, - } - self.assertRaises(ValueError, expirer.ObjectExpirer, conf) - - def test_process_based_concurrency(self): - - class ObjectExpirer(expirer.ObjectExpirer): - - def __init__(self, conf): - super(ObjectExpirer, self).__init__(conf) - self.processes = 3 - self.deleted_objects = {} - - def delete_object(self, actual_obj, timestamp, container, obj): - if container not in self.deleted_objects: - self.deleted_objects[container] = set() - self.deleted_objects[container].add(obj) - - class InternalClient(object): - - def __init__(self, containers): - self.containers = containers - - def get_account_info(self, *a, **kw): - return len(self.containers.keys()), \ - sum([len(self.containers[x]) for x in self.containers]) - - def iter_containers(self, *a, **kw): - return [{'name': x} for x in self.containers.keys()] - - def iter_objects(self, account, container): - return [{'name': x} for x in self.containers[container]] - - def delete_container(*a, **kw): - pass - - containers = { - 0: set('1-one 2-two 3-three'.split()), - 1: set('2-two 3-three 4-four'.split()), - 2: set('5-five 6-six'.split()), - 3: set('7-seven'.split()), - } - x = ObjectExpirer({}) - x.swift = InternalClient(containers) - - deleted_objects = {} - for i in xrange(3): - x.process = i - x.run_once() - self.assertNotEqual(deleted_objects, x.deleted_objects) - deleted_objects = deepcopy(x.deleted_objects) - self.assertEqual(containers, deleted_objects) - - def test_delete_object(self): - class InternalClient(object): - def __init__(self, test, account, container, obj): - self.test = test - self.account = account - self.container = container - self.obj = obj - self.delete_object_called = False - - def delete_object(self, account, container, obj): - self.test.assertEqual(self.account, account) - self.test.assertEqual(self.container, container) - self.test.assertEqual(self.obj, obj) - self.delete_object_called = True - - class DeleteActualObject(object): - def __init__(self, test, actual_obj, timestamp): - self.test = test - self.actual_obj = actual_obj - self.timestamp = timestamp - self.called = False - - def __call__(self, actual_obj, timestamp): - self.test.assertEqual(self.actual_obj, actual_obj) - self.test.assertEqual(self.timestamp, timestamp) - self.called = True - - container = 'container' - obj = 'obj' - actual_obj = 'actual_obj' - timestamp = 'timestamp' - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.swift = \ - InternalClient(self, x.expiring_objects_account, container, obj) - x.delete_actual_object = \ - DeleteActualObject(self, actual_obj, timestamp) - - x.delete_object(actual_obj, timestamp, container, obj) - self.assertTrue(x.swift.delete_object_called) - self.assertTrue(x.delete_actual_object.called) - - def test_report(self): - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - - x.report() - self.assertEqual(x.logger.log_dict['info'], []) - - x.logger._clear() - x.report(final=True) - self.assertTrue('completed' in x.logger.log_dict['info'][-1][0][0], - x.logger.log_dict['info']) - self.assertTrue('so far' not in x.logger.log_dict['info'][-1][0][0], - x.logger.log_dict['info']) - - x.logger._clear() - x.report_last_time = time() - x.report_interval - x.report() - self.assertTrue('completed' not in x.logger.log_dict['info'][-1][0][0], - x.logger.log_dict['info']) - self.assertTrue('so far' in x.logger.log_dict['info'][-1][0][0], - x.logger.log_dict['info']) - - def test_run_once_nothing_to_do(self): - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.swift = 'throw error because a string does not have needed methods' - x.run_once() - self.assertEqual(x.logger.log_dict['exception'], - [(("Unhandled exception",), {}, - "'str' object has no attribute " - "'get_account_info'")]) - - def test_run_once_calls_report(self): - class InternalClient(object): - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(*a, **kw): - return [] - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.swift = InternalClient() - x.run_once() - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 0 objects expired',), {})]) - - def test_container_timestamp_break(self): - class InternalClient(object): - def __init__(self, containers): - self.containers = containers - - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(self, *a, **kw): - return self.containers - - def iter_objects(*a, **kw): - raise Exception('This should not have been called') - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.swift = InternalClient([{'name': str(int(time() + 86400))}]) - x.run_once() - for exccall in x.logger.log_dict['exception']: - self.assertTrue( - 'This should not have been called' not in exccall[0][0]) - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 0 objects expired',), {})]) - - # Reverse test to be sure it still would blow up the way expected. - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.swift = InternalClient([{'name': str(int(time() - 86400))}]) - x.run_once() - self.assertEqual( - x.logger.log_dict['exception'], - [(('Unhandled exception',), {}, - str(Exception('This should not have been called')))]) - - def test_object_timestamp_break(self): - class InternalClient(object): - def __init__(self, containers, objects): - self.containers = containers - self.objects = objects - - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(self, *a, **kw): - return self.containers - - def delete_container(*a, **kw): - pass - - def iter_objects(self, *a, **kw): - return self.objects - - def should_not_be_called(*a, **kw): - raise Exception('This should not have been called') - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.swift = InternalClient( - [{'name': str(int(time() - 86400))}], - [{'name': '%d-actual-obj' % int(time() + 86400)}]) - x.run_once() - for exccall in x.logger.log_dict['exception']: - self.assertTrue( - 'This should not have been called' not in exccall[0][0]) - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 0 objects expired',), {})]) - - # Reverse test to be sure it still would blow up the way expected. - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - ts = int(time() - 86400) - x.swift = InternalClient( - [{'name': str(int(time() - 86400))}], - [{'name': '%d-actual-obj' % ts}]) - x.delete_actual_object = should_not_be_called - x.run_once() - excswhiledeleting = [] - for exccall in x.logger.log_dict['exception']: - if exccall[0][0].startswith('Exception while deleting '): - excswhiledeleting.append(exccall[0][0]) - self.assertEqual( - excswhiledeleting, - ['Exception while deleting object %d %d-actual-obj ' - 'This should not have been called' % (ts, ts)]) - - def test_failed_delete_keeps_entry(self): - class InternalClient(object): - def __init__(self, containers, objects): - self.containers = containers - self.objects = objects - - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(self, *a, **kw): - return self.containers - - def delete_container(*a, **kw): - pass - - def delete_object(*a, **kw): - raise Exception('This should not have been called') - - def iter_objects(self, *a, **kw): - return self.objects - - def deliberately_blow_up(actual_obj, timestamp): - raise Exception('failed to delete actual object') - - def should_not_get_called(container, obj): - raise Exception('This should not have been called') - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.iter_containers = lambda: [str(int(time() - 86400))] - ts = int(time() - 86400) - x.delete_actual_object = deliberately_blow_up - x.swift = InternalClient( - [{'name': str(int(time() - 86400))}], - [{'name': '%d-actual-obj' % ts}]) - x.run_once() - excswhiledeleting = [] - for exccall in x.logger.log_dict['exception']: - if exccall[0][0].startswith('Exception while deleting '): - excswhiledeleting.append(exccall[0][0]) - self.assertEqual( - excswhiledeleting, - ['Exception while deleting object %d %d-actual-obj ' - 'failed to delete actual object' % (ts, ts)]) - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 0 objects expired',), {})]) - - # Reverse test to be sure it still would blow up the way expected. - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - ts = int(time() - 86400) - x.delete_actual_object = lambda o, t: None - x.swift = InternalClient( - [{'name': str(int(time() - 86400))}], - [{'name': '%d-actual-obj' % ts}]) - x.run_once() - excswhiledeleting = [] - for exccall in x.logger.log_dict['exception']: - if exccall[0][0].startswith('Exception while deleting '): - excswhiledeleting.append(exccall[0][0]) - self.assertEqual( - excswhiledeleting, - ['Exception while deleting object %d %d-actual-obj This should ' - 'not have been called' % (ts, ts)]) - - def test_success_gets_counted(self): - class InternalClient(object): - def __init__(self, containers, objects): - self.containers = containers - self.objects = objects - - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(self, *a, **kw): - return self.containers - - def delete_container(*a, **kw): - pass - - def delete_object(*a, **kw): - pass - - def iter_objects(self, *a, **kw): - return self.objects - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.delete_actual_object = lambda o, t: None - self.assertEqual(x.report_objects, 0) - x.swift = InternalClient( - [{'name': str(int(time() - 86400))}], - [{'name': '%d-actual-obj' % int(time() - 86400)}]) - x.run_once() - self.assertEqual(x.report_objects, 1) - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 1 objects expired',), {})]) - - def test_delete_actual_object_does_not_get_unicode(self): - class InternalClient(object): - def __init__(self, containers, objects): - self.containers = containers - self.objects = objects - - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(self, *a, **kw): - return self.containers - - def delete_container(*a, **kw): - pass - - def delete_object(*a, **kw): - pass - - def iter_objects(self, *a, **kw): - return self.objects - - got_unicode = [False] - - def delete_actual_object_test_for_unicode(actual_obj, timestamp): - if isinstance(actual_obj, unicode): - got_unicode[0] = True - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - x.delete_actual_object = delete_actual_object_test_for_unicode - self.assertEqual(x.report_objects, 0) - x.swift = InternalClient( - [{'name': str(int(time() - 86400))}], - [{'name': u'%d-actual-obj' % int(time() - 86400)}]) - x.run_once() - self.assertEqual(x.report_objects, 1) - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 1 objects expired',), {})]) - self.assertFalse(got_unicode[0]) - - def test_failed_delete_continues_on(self): - class InternalClient(object): - def __init__(self, containers, objects): - self.containers = containers - self.objects = objects - - def get_account_info(*a, **kw): - return 1, 2 - - def iter_containers(self, *a, **kw): - return self.containers - - def delete_container(*a, **kw): - raise Exception('failed to delete container') - - def delete_object(*a, **kw): - pass - - def iter_objects(self, *a, **kw): - return self.objects - - def fail_delete_actual_object(actual_obj, timestamp): - raise Exception('failed to delete actual object') - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - - cts = int(time() - 86400) - ots = int(time() - 86400) - - containers = [ - {'name': str(cts)}, - {'name': str(cts + 1)}, - ] - - objects = [ - {'name': '%d-actual-obj' % ots}, - {'name': '%d-next-obj' % ots} - ] - - x.swift = InternalClient(containers, objects) - x.delete_actual_object = fail_delete_actual_object - x.run_once() - excswhiledeleting = [] - for exccall in x.logger.log_dict['exception']: - if exccall[0][0].startswith('Exception while deleting '): - excswhiledeleting.append(exccall[0][0]) - self.assertEqual(sorted(excswhiledeleting), sorted([ - 'Exception while deleting object %d %d-actual-obj failed to ' - 'delete actual object' % (cts, ots), - 'Exception while deleting object %d %d-next-obj failed to ' - 'delete actual object' % (cts, ots), - 'Exception while deleting object %d %d-actual-obj failed to ' - 'delete actual object' % (cts + 1, ots), - 'Exception while deleting object %d %d-next-obj failed to ' - 'delete actual object' % (cts + 1, ots), - 'Exception while deleting container %d failed to delete ' - 'container' % (cts,), - 'Exception while deleting container %d failed to delete ' - 'container' % (cts + 1,)])) - self.assertEqual( - x.logger.log_dict['info'], - [(('Pass beginning; 1 possible containers; ' - '2 possible objects',), {}), - (('Pass completed in 0s; 0 objects expired',), {})]) - - def test_run_forever_initial_sleep_random(self): - global last_not_sleep - - def raise_system_exit(): - raise SystemExit('test_run_forever') - - interval = 1234 - x = expirer.ObjectExpirer({'__file__': 'unit_test', - 'interval': interval}) - orig_random = expirer.random - orig_sleep = expirer.sleep - try: - expirer.random = not_random - expirer.sleep = not_sleep - x.run_once = raise_system_exit - x.run_forever() - except SystemExit as err: - pass - finally: - expirer.random = orig_random - expirer.sleep = orig_sleep - self.assertEqual(str(err), 'test_run_forever') - self.assertEqual(last_not_sleep, 0.5 * interval) - - def test_run_forever_catches_usual_exceptions(self): - raises = [0] - - def raise_exceptions(): - raises[0] += 1 - if raises[0] < 2: - raise Exception('exception %d' % raises[0]) - raise SystemExit('exiting exception %d' % raises[0]) - - x = expirer.ObjectExpirer({}) - x.logger = FakeLogger() - orig_sleep = expirer.sleep - try: - expirer.sleep = not_sleep - x.run_once = raise_exceptions - x.run_forever() - except SystemExit as err: - pass - finally: - expirer.sleep = orig_sleep - self.assertEqual(str(err), 'exiting exception 2') - self.assertEqual(x.logger.log_dict['exception'], - [(('Unhandled exception',), {}, - 'exception 1')]) - - def test_delete_actual_object(self): - got_env = [None] - - def fake_app(env, start_response): - got_env[0] = env - start_response('204 No Content', [('Content-Length', '0')]) - return [] - - internal_client.loadapp = lambda *a, **kw: fake_app - - x = expirer.ObjectExpirer({}) - ts = '1234' - x.delete_actual_object('/path/to/object', ts) - self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts) - - def test_delete_actual_object_nourlquoting(self): - # delete_actual_object should not do its own url quoting because - # internal client's make_request handles that. - got_env = [None] - - def fake_app(env, start_response): - got_env[0] = env - start_response('204 No Content', [('Content-Length', '0')]) - return [] - - internal_client.loadapp = lambda *a, **kw: fake_app - - x = expirer.ObjectExpirer({}) - ts = '1234' - x.delete_actual_object('/path/to/object name', ts) - self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts) - self.assertEqual(got_env[0]['PATH_INFO'], '/v1/path/to/object name') - - def test_delete_actual_object_handles_404(self): - - def fake_app(env, start_response): - start_response('404 Not Found', [('Content-Length', '0')]) - return [] - - internal_client.loadapp = lambda *a, **kw: fake_app - - x = expirer.ObjectExpirer({}) - x.delete_actual_object('/path/to/object', '1234') - - def test_delete_actual_object_handles_412(self): - - def fake_app(env, start_response): - start_response('412 Precondition Failed', - [('Content-Length', '0')]) - return [] - - internal_client.loadapp = lambda *a, **kw: fake_app - - x = expirer.ObjectExpirer({}) - x.delete_actual_object('/path/to/object', '1234') - - def test_delete_actual_object_does_not_handle_odd_stuff(self): - - def fake_app(env, start_response): - start_response( - '503 Internal Server Error', - [('Content-Length', '0')]) - return [] - - internal_client.loadapp = lambda *a, **kw: fake_app - - x = expirer.ObjectExpirer({}) - exc = None - try: - x.delete_actual_object('/path/to/object', '1234') - except Exception as err: - exc = err - finally: - pass - self.assertEqual(503, exc.resp.status_int) - - def test_delete_actual_object_quotes(self): - name = 'this name should get quoted' - timestamp = '1366063156.863045' - x = expirer.ObjectExpirer({}) - x.swift.make_request = mock.MagicMock() - x.delete_actual_object(name, timestamp) - x.swift.make_request.assert_called_once() - self.assertEqual(x.swift.make_request.call_args[0][1], - '/v1/' + urllib.quote(name)) - - -if __name__ == '__main__': - main() diff --git a/test/unit/proxy/__init__.py b/test/unit/proxy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/proxy/controllers/__init__.py b/test/unit/proxy/controllers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/test/unit/proxy/controllers/test_account.py b/test/unit/proxy/controllers/test_account.py deleted file mode 100644 index 47f76dc..0000000 --- a/test/unit/proxy/controllers/test_account.py +++ /dev/null @@ -1,244 +0,0 @@ -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import unittest - -from swift.common.swob import Request, Response -from swift.common.middleware.acl import format_acl -from swift.proxy import server as proxy_server -from swift.proxy.controllers.base import headers_to_account_info -from swift.common.constraints import MAX_ACCOUNT_NAME_LENGTH as MAX_ANAME_LEN -from test.unit import fake_http_connect, FakeRing, FakeMemcache -from swift.common.request_helpers import get_sys_meta_prefix -import swift.proxy.controllers.base - - -class TestAccountController(unittest.TestCase): - def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - - def test_account_info_in_response_env(self): - controller = proxy_server.AccountController(self.app, 'AUTH_bob') - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, body='')): - req = Request.blank('/v1/AUTH_bob', {'PATH_INFO': '/v1/AUTH_bob'}) - resp = controller.HEAD(req) - self.assertEqual(2, resp.status_int // 100) - self.assertTrue('swift.account/AUTH_bob' in resp.environ) - self.assertEqual(headers_to_account_info(resp.headers), - resp.environ['swift.account/AUTH_bob']) - - def test_swift_owner(self): - owner_headers = { - 'x-account-meta-temp-url-key': 'value', - 'x-account-meta-temp-url-key-2': 'value'} - controller = proxy_server.AccountController(self.app, 'a') - - req = Request.blank('/v1/a') - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, headers=owner_headers)): - resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) - for key in owner_headers: - self.assertTrue(key not in resp.headers) - - req = Request.blank('/v1/a', environ={'swift_owner': True}) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, headers=owner_headers)): - resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) - for key in owner_headers: - self.assertTrue(key in resp.headers) - - def test_get_deleted_account(self): - resp_headers = { - 'x-account-status': 'deleted', - } - controller = proxy_server.AccountController(self.app, 'a') - - req = Request.blank('/v1/a') - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(404, headers=resp_headers)): - resp = controller.HEAD(req) - self.assertEquals(410, resp.status_int) - - def test_long_acct_names(self): - long_acct_name = '%sLongAccountName' % ('Very' * (MAX_ANAME_LEN // 4)) - controller = proxy_server.AccountController(self.app, long_acct_name) - - req = Request.blank('/v1/%s' % long_acct_name) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200)): - resp = controller.HEAD(req) - self.assertEquals(400, resp.status_int) - - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200)): - resp = controller.GET(req) - self.assertEquals(400, resp.status_int) - - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200)): - resp = controller.POST(req) - self.assertEquals(400, resp.status_int) - - def _make_callback_func(self, context): - def callback(ipaddr, port, device, partition, method, path, - headers=None, query_string=None, ssl=False): - context['method'] = method - context['path'] = path - context['headers'] = headers or {} - return callback - - def test_sys_meta_headers_PUT(self): - # check that headers in sys meta namespace make it through - # the proxy controller - sys_meta_key = '%stest' % get_sys_meta_prefix('account') - sys_meta_key = sys_meta_key.title() - user_meta_key = 'X-Account-Meta-Test' - # allow PUTs to account... - self.app.allow_account_management = True - controller = proxy_server.AccountController(self.app, 'a') - context = {} - callback = self._make_callback_func(context) - hdrs_in = {sys_meta_key: 'foo', - user_meta_key: 'bar', - 'x-timestamp': '1.0'} - req = Request.blank('/v1/a', headers=hdrs_in) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, give_connect=callback)): - controller.PUT(req) - self.assertEqual(context['method'], 'PUT') - self.assertTrue(sys_meta_key in context['headers']) - self.assertEqual(context['headers'][sys_meta_key], 'foo') - self.assertTrue(user_meta_key in context['headers']) - self.assertEqual(context['headers'][user_meta_key], 'bar') - self.assertNotEqual(context['headers']['x-timestamp'], '1.0') - - def test_sys_meta_headers_POST(self): - # check that headers in sys meta namespace make it through - # the proxy controller - sys_meta_key = '%stest' % get_sys_meta_prefix('account') - sys_meta_key = sys_meta_key.title() - user_meta_key = 'X-Account-Meta-Test' - controller = proxy_server.AccountController(self.app, 'a') - context = {} - callback = self._make_callback_func(context) - hdrs_in = {sys_meta_key: 'foo', - user_meta_key: 'bar', - 'x-timestamp': '1.0'} - req = Request.blank('/v1/a', headers=hdrs_in) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, give_connect=callback)): - controller.POST(req) - self.assertEqual(context['method'], 'POST') - self.assertTrue(sys_meta_key in context['headers']) - self.assertEqual(context['headers'][sys_meta_key], 'foo') - self.assertTrue(user_meta_key in context['headers']) - self.assertEqual(context['headers'][user_meta_key], 'bar') - self.assertNotEqual(context['headers']['x-timestamp'], '1.0') - - def _make_user_and_sys_acl_headers_data(self): - acl = { - 'admin': ['AUTH_alice', 'AUTH_bob'], - 'read-write': ['AUTH_carol'], - 'read-only': [], - } - user_prefix = 'x-account-' # external, user-facing - user_headers = {(user_prefix + 'access-control'): format_acl( - version=2, acl_dict=acl)} - sys_prefix = get_sys_meta_prefix('account') # internal, system-facing - sys_headers = {(sys_prefix + 'core-access-control'): format_acl( - version=2, acl_dict=acl)} - return user_headers, sys_headers - - def test_account_acl_headers_translated_for_GET_HEAD(self): - # Verify that a GET/HEAD which receives X-Account-Sysmeta-Acl-* headers - # from the account server will remap those headers to X-Account-Acl-* - - hdrs_ext, hdrs_int = self._make_user_and_sys_acl_headers_data() - controller = proxy_server.AccountController(self.app, 'acct') - - for verb in ('GET', 'HEAD'): - req = Request.blank('/v1/acct', environ={'swift_owner': True}) - controller.GETorHEAD_base = lambda *_: Response( - headers=hdrs_int, environ={ - 'PATH_INFO': '/acct', - 'REQUEST_METHOD': verb, - }) - method = getattr(controller, verb) - resp = method(req) - for header, value in hdrs_ext.items(): - if value: - self.assertEqual(resp.headers.get(header), value) - else: - # blank ACLs should result in no header - self.assert_(header not in resp.headers) - - def test_add_acls_impossible_cases(self): - # For test coverage: verify that defensive coding does defend, in cases - # that shouldn't arise naturally - - # add_acls should do nothing if REQUEST_METHOD isn't HEAD/GET/PUT/POST - resp = Response() - controller = proxy_server.AccountController(self.app, 'a') - resp.environ['PATH_INFO'] = '/a' - resp.environ['REQUEST_METHOD'] = 'OPTIONS' - controller.add_acls_from_sys_metadata(resp) - self.assertEqual(1, len(resp.headers)) # we always get Content-Type - self.assertEqual(2, len(resp.environ)) - - def test_memcache_key_impossible_cases(self): - # For test coverage: verify that defensive coding does defend, in cases - # that shouldn't arise naturally - self.assertRaises( - ValueError, - lambda: swift.proxy.controllers.base.get_container_memcache_key( - '/a', None)) - - def test_stripping_swift_admin_headers(self): - # Verify that a GET/HEAD which receives privileged headers from the - # account server will strip those headers for non-swift_owners - - hdrs_ext, hdrs_int = self._make_user_and_sys_acl_headers_data() - headers = { - 'x-account-meta-harmless': 'hi mom', - 'x-account-meta-temp-url-key': 's3kr1t', - } - controller = proxy_server.AccountController(self.app, 'acct') - - for verb in ('GET', 'HEAD'): - for env in ({'swift_owner': True}, {'swift_owner': False}): - req = Request.blank('/v1/acct', environ=env) - controller.GETorHEAD_base = lambda *_: Response( - headers=headers, environ={ - 'PATH_INFO': '/acct', - 'REQUEST_METHOD': verb, - }) - method = getattr(controller, verb) - resp = method(req) - self.assertEqual(resp.headers.get('x-account-meta-harmless'), - 'hi mom') - privileged_header_present = ( - 'x-account-meta-temp-url-key' in resp.headers) - self.assertEqual(privileged_header_present, env['swift_owner']) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/proxy/controllers/test_base.py b/test/unit/proxy/controllers/test_base.py deleted file mode 100644 index 0c94f90..0000000 --- a/test/unit/proxy/controllers/test_base.py +++ /dev/null @@ -1,534 +0,0 @@ -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -from mock import patch -from swift.proxy.controllers.base import headers_to_container_info, \ - headers_to_account_info, headers_to_object_info, get_container_info, \ - get_container_memcache_key, get_account_info, get_account_memcache_key, \ - get_object_env_key, _get_cache_key, get_info, get_object_info, \ - Controller, GetOrHeadHandler -from swift.common.swob import Request, HTTPException, HeaderKeyDict -from swift.common.utils import split_path -from test.unit import fake_http_connect, FakeRing, FakeMemcache -from swift.proxy import server as proxy_server -from swift.common.request_helpers import get_sys_meta_prefix - - -FakeResponse_status_int = 201 - - -class FakeResponse(object): - def __init__(self, headers, env, account, container, obj): - self.headers = headers - self.status_int = FakeResponse_status_int - self.environ = env - if obj: - env_key = get_object_env_key(account, container, obj) - else: - cache_key, env_key = _get_cache_key(account, container) - - if account and container and obj: - info = headers_to_object_info(headers, FakeResponse_status_int) - elif account and container: - info = headers_to_container_info(headers, FakeResponse_status_int) - else: - info = headers_to_account_info(headers, FakeResponse_status_int) - env[env_key] = info - - -class FakeRequest(object): - def __init__(self, env, path, swift_source=None): - self.environ = env - (version, account, container, obj) = split_path(path, 2, 4, True) - self.account = account - self.container = container - self.obj = obj - if obj: - stype = 'object' - self.headers = {'content-length': 5555, - 'content-type': 'text/plain'} - else: - stype = container and 'container' or 'account' - self.headers = {'x-%s-object-count' % (stype): 1000, - 'x-%s-bytes-used' % (stype): 6666} - if swift_source: - meta = 'x-%s-meta-fakerequest-swift-source' % stype - self.headers[meta] = swift_source - - def get_response(self, app): - return FakeResponse(self.headers, self.environ, self.account, - self.container, self.obj) - - -class FakeCache(object): - def __init__(self, val): - self.val = val - - def get(self, *args): - return self.val - - -class TestFuncs(unittest.TestCase): - def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing) - - def test_GETorHEAD_base(self): - base = Controller(self.app) - req = Request.blank('/v1/a/c/o/with/slashes') - with patch('swift.proxy.controllers.base.' - 'http_connect', fake_http_connect(200)): - resp = base.GETorHEAD_base(req, 'object', FakeRing(), 'part', - '/a/c/o/with/slashes') - self.assertTrue('swift.object/a/c/o/with/slashes' in resp.environ) - self.assertEqual( - resp.environ['swift.object/a/c/o/with/slashes']['status'], 200) - req = Request.blank('/v1/a/c/o') - with patch('swift.proxy.controllers.base.' - 'http_connect', fake_http_connect(200)): - resp = base.GETorHEAD_base(req, 'object', FakeRing(), 'part', - '/a/c/o') - self.assertTrue('swift.object/a/c/o' in resp.environ) - self.assertEqual(resp.environ['swift.object/a/c/o']['status'], 200) - req = Request.blank('/v1/a/c') - with patch('swift.proxy.controllers.base.' - 'http_connect', fake_http_connect(200)): - resp = base.GETorHEAD_base(req, 'container', FakeRing(), 'part', - '/a/c') - self.assertTrue('swift.container/a/c' in resp.environ) - self.assertEqual(resp.environ['swift.container/a/c']['status'], 200) - - req = Request.blank('/v1/a') - with patch('swift.proxy.controllers.base.' - 'http_connect', fake_http_connect(200)): - resp = base.GETorHEAD_base(req, 'account', FakeRing(), 'part', - '/a') - self.assertTrue('swift.account/a' in resp.environ) - self.assertEqual(resp.environ['swift.account/a']['status'], 200) - - def test_get_info(self): - global FakeResponse_status_int - # Do a non cached call to account - env = {} - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - info_a = get_info(None, env, 'a') - # Check that you got proper info - self.assertEquals(info_a['status'], 201) - self.assertEquals(info_a['bytes'], 6666) - self.assertEquals(info_a['total_object_count'], 1000) - # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - - # Do an env cached call to account - info_a = get_info(None, env, 'a') - # Check that you got proper info - self.assertEquals(info_a['status'], 201) - self.assertEquals(info_a['bytes'], 6666) - self.assertEquals(info_a['total_object_count'], 1000) - # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - - # This time do env cached call to account and non cached to container - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - info_c = get_info(None, env, 'a', 'c') - # Check that you got proper info - self.assertEquals(info_a['status'], 201) - self.assertEquals(info_c['bytes'], 6666) - self.assertEquals(info_c['object_count'], 1000) - # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - self.assertEquals(env.get('swift.container/a/c'), info_c) - - # This time do a non cached call to account than non cached to - # container - env = {} # abandon previous call to env - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - info_c = get_info(None, env, 'a', 'c') - # Check that you got proper info - self.assertEquals(info_a['status'], 201) - self.assertEquals(info_c['bytes'], 6666) - self.assertEquals(info_c['object_count'], 1000) - # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - self.assertEquals(env.get('swift.container/a/c'), info_c) - - # This time do an env cached call to container while account is not - # cached - del(env['swift.account/a']) - info_c = get_info(None, env, 'a', 'c') - # Check that you got proper info - self.assertEquals(info_a['status'], 201) - self.assertEquals(info_c['bytes'], 6666) - self.assertEquals(info_c['object_count'], 1000) - # Make sure the env cache is set and account still not cached - self.assertEquals(env.get('swift.container/a/c'), info_c) - - # Do a non cached call to account not found with ret_not_found - env = {} - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - try: - FakeResponse_status_int = 404 - info_a = get_info(None, env, 'a', ret_not_found=True) - finally: - FakeResponse_status_int = 201 - # Check that you got proper info - self.assertEquals(info_a['status'], 404) - self.assertEquals(info_a['bytes'], 6666) - self.assertEquals(info_a['total_object_count'], 1000) - # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - - # Do a cached call to account not found with ret_not_found - info_a = get_info(None, env, 'a', ret_not_found=True) - # Check that you got proper info - self.assertEquals(info_a['status'], 404) - self.assertEquals(info_a['bytes'], 6666) - self.assertEquals(info_a['total_object_count'], 1000) - # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - - # Do a non cached call to account not found without ret_not_found - env = {} - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - try: - FakeResponse_status_int = 404 - info_a = get_info(None, env, 'a') - finally: - FakeResponse_status_int = 201 - # Check that you got proper info - self.assertEquals(info_a, None) - self.assertEquals(env['swift.account/a']['status'], 404) - - # Do a cached call to account not found without ret_not_found - info_a = get_info(None, env, 'a') - # Check that you got proper info - self.assertEquals(info_a, None) - self.assertEquals(env['swift.account/a']['status'], 404) - - def test_get_container_info_swift_source(self): - req = Request.blank("/v1/a/c", environ={'swift.cache': FakeCache({})}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_container_info(req.environ, 'app', swift_source='MC') - self.assertEquals(resp['meta']['fakerequest-swift-source'], 'MC') - - def test_get_object_info_swift_source(self): - req = Request.blank("/v1/a/c/o", - environ={'swift.cache': FakeCache({})}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_object_info(req.environ, 'app', swift_source='LU') - self.assertEquals(resp['meta']['fakerequest-swift-source'], 'LU') - - def test_get_container_info_no_cache(self): - req = Request.blank("/v1/AUTH_account/cont", - environ={'swift.cache': FakeCache({})}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_container_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 6666) - self.assertEquals(resp['object_count'], 1000) - - def test_get_container_info_cache(self): - cached = {'status': 404, - 'bytes': 3333, - 'object_count': 10, - # simplejson sometimes hands back strings, sometimes unicodes - 'versions': u"\u1F4A9"} - req = Request.blank("/v1/account/cont", - environ={'swift.cache': FakeCache(cached)}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_container_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 3333) - self.assertEquals(resp['object_count'], 10) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['versions'], "\xe1\xbd\x8a\x39") - - def test_get_container_info_env(self): - cache_key = get_container_memcache_key("account", "cont") - env_key = 'swift.%s' % cache_key - req = Request.blank("/v1/account/cont", - environ={env_key: {'bytes': 3867}, - 'swift.cache': FakeCache({})}) - resp = get_container_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 3867) - - def test_get_account_info_swift_source(self): - req = Request.blank("/v1/a", environ={'swift.cache': FakeCache({})}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_account_info(req.environ, 'a', swift_source='MC') - self.assertEquals(resp['meta']['fakerequest-swift-source'], 'MC') - - def test_get_account_info_no_cache(self): - req = Request.blank("/v1/AUTH_account", - environ={'swift.cache': FakeCache({})}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_account_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 6666) - self.assertEquals(resp['total_object_count'], 1000) - - def test_get_account_info_cache(self): - # The original test that we prefer to preserve - cached = {'status': 404, - 'bytes': 3333, - 'total_object_count': 10} - req = Request.blank("/v1/account/cont", - environ={'swift.cache': FakeCache(cached)}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_account_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 3333) - self.assertEquals(resp['total_object_count'], 10) - self.assertEquals(resp['status'], 404) - - # Here is a more realistic test - cached = {'status': 404, - 'bytes': '3333', - 'container_count': '234', - 'total_object_count': '10', - 'meta': {}} - req = Request.blank("/v1/account/cont", - environ={'swift.cache': FakeCache(cached)}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_account_info(req.environ, 'xxx') - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['bytes'], '3333') - self.assertEquals(resp['container_count'], 234) - self.assertEquals(resp['meta'], {}) - self.assertEquals(resp['total_object_count'], '10') - - def test_get_account_info_env(self): - cache_key = get_account_memcache_key("account") - env_key = 'swift.%s' % cache_key - req = Request.blank("/v1/account", - environ={env_key: {'bytes': 3867}, - 'swift.cache': FakeCache({})}) - resp = get_account_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 3867) - - def test_get_object_info_env(self): - cached = {'status': 200, - 'length': 3333, - 'type': 'application/json', - 'meta': {}} - env_key = get_object_env_key("account", "cont", "obj") - req = Request.blank("/v1/account/cont/obj", - environ={env_key: cached, - 'swift.cache': FakeCache({})}) - resp = get_object_info(req.environ, 'xxx') - self.assertEquals(resp['length'], 3333) - self.assertEquals(resp['type'], 'application/json') - - def test_get_object_info_no_env(self): - req = Request.blank("/v1/account/cont/obj", - environ={'swift.cache': FakeCache({})}) - with patch('swift.proxy.controllers.base.' - '_prepare_pre_auth_info_request', FakeRequest): - resp = get_object_info(req.environ, 'xxx') - self.assertEquals(resp['length'], 5555) - self.assertEquals(resp['type'], 'text/plain') - - def test_headers_to_container_info_missing(self): - resp = headers_to_container_info({}, 404) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['read_acl'], None) - self.assertEquals(resp['write_acl'], None) - - def test_headers_to_container_info_meta(self): - headers = {'X-Container-Meta-Whatevs': 14, - 'x-container-meta-somethingelse': 0} - resp = headers_to_container_info(headers.items(), 200) - self.assertEquals(len(resp['meta']), 2) - self.assertEquals(resp['meta']['whatevs'], 14) - self.assertEquals(resp['meta']['somethingelse'], 0) - - def test_headers_to_container_info_sys_meta(self): - prefix = get_sys_meta_prefix('container') - headers = {'%sWhatevs' % prefix: 14, - '%ssomethingelse' % prefix: 0} - resp = headers_to_container_info(headers.items(), 200) - self.assertEquals(len(resp['sysmeta']), 2) - self.assertEquals(resp['sysmeta']['whatevs'], 14) - self.assertEquals(resp['sysmeta']['somethingelse'], 0) - - def test_headers_to_container_info_values(self): - headers = { - 'x-container-read': 'readvalue', - 'x-container-write': 'writevalue', - 'x-container-sync-key': 'keyvalue', - 'x-container-meta-access-control-allow-origin': 'here', - } - resp = headers_to_container_info(headers.items(), 200) - self.assertEquals(resp['read_acl'], 'readvalue') - self.assertEquals(resp['write_acl'], 'writevalue') - self.assertEquals(resp['cors']['allow_origin'], 'here') - - headers['x-unused-header'] = 'blahblahblah' - self.assertEquals( - resp, - headers_to_container_info(headers.items(), 200)) - - def test_headers_to_account_info_missing(self): - resp = headers_to_account_info({}, 404) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['bytes'], None) - self.assertEquals(resp['container_count'], None) - - def test_headers_to_account_info_meta(self): - headers = {'X-Account-Meta-Whatevs': 14, - 'x-account-meta-somethingelse': 0} - resp = headers_to_account_info(headers.items(), 200) - self.assertEquals(len(resp['meta']), 2) - self.assertEquals(resp['meta']['whatevs'], 14) - self.assertEquals(resp['meta']['somethingelse'], 0) - - def test_headers_to_account_info_sys_meta(self): - prefix = get_sys_meta_prefix('account') - headers = {'%sWhatevs' % prefix: 14, - '%ssomethingelse' % prefix: 0} - resp = headers_to_account_info(headers.items(), 200) - self.assertEquals(len(resp['sysmeta']), 2) - self.assertEquals(resp['sysmeta']['whatevs'], 14) - self.assertEquals(resp['sysmeta']['somethingelse'], 0) - - def test_headers_to_account_info_values(self): - headers = { - 'x-account-object-count': '10', - 'x-account-container-count': '20', - } - resp = headers_to_account_info(headers.items(), 200) - self.assertEquals(resp['total_object_count'], '10') - self.assertEquals(resp['container_count'], '20') - - headers['x-unused-header'] = 'blahblahblah' - self.assertEquals( - resp, - headers_to_account_info(headers.items(), 200)) - - def test_headers_to_object_info_missing(self): - resp = headers_to_object_info({}, 404) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['length'], None) - self.assertEquals(resp['etag'], None) - - def test_headers_to_object_info_meta(self): - headers = {'X-Object-Meta-Whatevs': 14, - 'x-object-meta-somethingelse': 0} - resp = headers_to_object_info(headers.items(), 200) - self.assertEquals(len(resp['meta']), 2) - self.assertEquals(resp['meta']['whatevs'], 14) - self.assertEquals(resp['meta']['somethingelse'], 0) - - def test_headers_to_object_info_values(self): - headers = { - 'content-length': '1024', - 'content-type': 'application/json', - } - resp = headers_to_object_info(headers.items(), 200) - self.assertEquals(resp['length'], '1024') - self.assertEquals(resp['type'], 'application/json') - - headers['x-unused-header'] = 'blahblahblah' - self.assertEquals( - resp, - headers_to_object_info(headers.items(), 200)) - - def test_have_quorum(self): - base = Controller(self.app) - # just throw a bunch of test cases at it - self.assertEqual(base.have_quorum([201, 404], 3), False) - self.assertEqual(base.have_quorum([201, 201], 4), False) - self.assertEqual(base.have_quorum([201, 201, 404, 404], 4), False) - self.assertEqual(base.have_quorum([201, 503, 503, 201], 4), False) - self.assertEqual(base.have_quorum([201, 201], 3), True) - self.assertEqual(base.have_quorum([404, 404], 3), True) - self.assertEqual(base.have_quorum([201, 201], 2), True) - self.assertEqual(base.have_quorum([404, 404], 2), True) - self.assertEqual(base.have_quorum([201, 404, 201, 201], 4), True) - - def test_range_fast_forward(self): - req = Request.blank('/') - handler = GetOrHeadHandler(None, req, None, None, None, None, {}) - handler.fast_forward(50) - self.assertEquals(handler.backend_headers['Range'], 'bytes=50-') - - handler = GetOrHeadHandler(None, req, None, None, None, None, - {'Range': 'bytes=23-50'}) - handler.fast_forward(20) - self.assertEquals(handler.backend_headers['Range'], 'bytes=43-50') - self.assertRaises(HTTPException, - handler.fast_forward, 80) - - handler = GetOrHeadHandler(None, req, None, None, None, None, - {'Range': 'bytes=23-'}) - handler.fast_forward(20) - self.assertEquals(handler.backend_headers['Range'], 'bytes=43-') - - handler = GetOrHeadHandler(None, req, None, None, None, None, - {'Range': 'bytes=-100'}) - handler.fast_forward(20) - self.assertEquals(handler.backend_headers['Range'], 'bytes=-80') - - def test_transfer_headers_with_sysmeta(self): - base = Controller(self.app) - good_hdrs = {'x-base-sysmeta-foo': 'ok', - 'X-Base-sysmeta-Bar': 'also ok'} - bad_hdrs = {'x-base-sysmeta-': 'too short'} - hdrs = dict(good_hdrs) - hdrs.update(bad_hdrs) - dst_hdrs = HeaderKeyDict() - base.transfer_headers(hdrs, dst_hdrs) - self.assertEqual(HeaderKeyDict(good_hdrs), dst_hdrs) - - def test_generate_request_headers(self): - base = Controller(self.app) - src_headers = {'x-remove-base-meta-owner': 'x', - 'x-base-meta-size': '151M', - 'new-owner': 'Kun'} - req = Request.blank('/v1/a/c/o', headers=src_headers) - dst_headers = base.generate_request_headers(req, transfer=True) - expected_headers = {'x-base-meta-owner': '', - 'x-base-meta-size': '151M'} - for k, v in expected_headers.iteritems(): - self.assertTrue(k in dst_headers) - self.assertEqual(v, dst_headers[k]) - self.assertFalse('new-owner' in dst_headers) - - def test_generate_request_headers_with_sysmeta(self): - base = Controller(self.app) - good_hdrs = {'x-base-sysmeta-foo': 'ok', - 'X-Base-sysmeta-Bar': 'also ok'} - bad_hdrs = {'x-base-sysmeta-': 'too short'} - hdrs = dict(good_hdrs) - hdrs.update(bad_hdrs) - req = Request.blank('/v1/a/c/o', headers=hdrs) - dst_headers = base.generate_request_headers(req, transfer=True) - for k, v in good_hdrs.iteritems(): - self.assertTrue(k.lower() in dst_headers) - self.assertEqual(v, dst_headers[k.lower()]) - for k, v in bad_hdrs.iteritems(): - self.assertFalse(k.lower() in dst_headers) diff --git a/test/unit/proxy/controllers/test_container.py b/test/unit/proxy/controllers/test_container.py deleted file mode 100644 index 7c8ecf7..0000000 --- a/test/unit/proxy/controllers/test_container.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import unittest - -from swift.common.swob import Request -from swift.proxy import server as proxy_server -from swift.proxy.controllers.base import headers_to_container_info -from test.unit import fake_http_connect, FakeRing, FakeMemcache -from swift.common.request_helpers import get_sys_meta_prefix - - -class TestContainerController(unittest.TestCase): - def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - - def test_container_info_in_response_env(self): - controller = proxy_server.ContainerController(self.app, 'a', 'c') - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, body='')): - req = Request.blank('/v1/a/c', {'PATH_INFO': '/v1/a/c'}) - resp = controller.HEAD(req) - self.assertEqual(2, resp.status_int // 100) - self.assertTrue("swift.container/a/c" in resp.environ) - self.assertEqual(headers_to_container_info(resp.headers), - resp.environ['swift.container/a/c']) - - def test_swift_owner(self): - owner_headers = { - 'x-container-read': 'value', 'x-container-write': 'value', - 'x-container-sync-key': 'value', 'x-container-sync-to': 'value'} - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - req = Request.blank('/v1/a/c') - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, headers=owner_headers)): - resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) - for key in owner_headers: - self.assertTrue(key not in resp.headers) - - req = Request.blank('/v1/a/c', environ={'swift_owner': True}) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, headers=owner_headers)): - resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) - for key in owner_headers: - self.assertTrue(key in resp.headers) - - def _make_callback_func(self, context): - def callback(ipaddr, port, device, partition, method, path, - headers=None, query_string=None, ssl=False): - context['method'] = method - context['path'] = path - context['headers'] = headers or {} - return callback - - def test_sys_meta_headers_PUT(self): - # check that headers in sys meta namespace make it through - # the container controller - sys_meta_key = '%stest' % get_sys_meta_prefix('container') - sys_meta_key = sys_meta_key.title() - user_meta_key = 'X-Container-Meta-Test' - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - context = {} - callback = self._make_callback_func(context) - hdrs_in = {sys_meta_key: 'foo', - user_meta_key: 'bar', - 'x-timestamp': '1.0'} - req = Request.blank('/v1/a/c', headers=hdrs_in) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, give_connect=callback)): - controller.PUT(req) - self.assertEqual(context['method'], 'PUT') - self.assertTrue(sys_meta_key in context['headers']) - self.assertEqual(context['headers'][sys_meta_key], 'foo') - self.assertTrue(user_meta_key in context['headers']) - self.assertEqual(context['headers'][user_meta_key], 'bar') - self.assertNotEqual(context['headers']['x-timestamp'], '1.0') - - def test_sys_meta_headers_POST(self): - # check that headers in sys meta namespace make it through - # the container controller - sys_meta_key = '%stest' % get_sys_meta_prefix('container') - sys_meta_key = sys_meta_key.title() - user_meta_key = 'X-Container-Meta-Test' - controller = proxy_server.ContainerController(self.app, 'a', 'c') - context = {} - callback = self._make_callback_func(context) - hdrs_in = {sys_meta_key: 'foo', - user_meta_key: 'bar', - 'x-timestamp': '1.0'} - req = Request.blank('/v1/a/c', headers=hdrs_in) - with mock.patch('swift.proxy.controllers.base.http_connect', - fake_http_connect(200, 200, give_connect=callback)): - controller.POST(req) - self.assertEqual(context['method'], 'POST') - self.assertTrue(sys_meta_key in context['headers']) - self.assertEqual(context['headers'][sys_meta_key], 'foo') - self.assertTrue(user_meta_key in context['headers']) - self.assertEqual(context['headers'][user_meta_key], 'bar') - self.assertNotEqual(context['headers']['x-timestamp'], '1.0') - - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/proxy/controllers/test_info.py b/test/unit/proxy/controllers/test_info.py deleted file mode 100644 index f33beba..0000000 --- a/test/unit/proxy/controllers/test_info.py +++ /dev/null @@ -1,293 +0,0 @@ -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -import time -from mock import Mock - -from swift.proxy.controllers import InfoController -from swift.proxy.server import Application as ProxyApp -from swift.common import utils -from swift.common.utils import json -from swift.common.swob import Request, HTTPException - - -class TestInfoController(unittest.TestCase): - - def setUp(self): - utils._swift_info = {} - utils._swift_admin_info = {} - - def get_controller(self, expose_info=None, disallowed_sections=None, - admin_key=None): - disallowed_sections = disallowed_sections or [] - - app = Mock(spec=ProxyApp) - return InfoController(app, None, expose_info, - disallowed_sections, admin_key) - - def start_response(self, status, headers): - self.got_statuses.append(status) - for h in headers: - self.got_headers.append({h[0]: h[1]}) - - def test_disabled_info(self): - controller = self.get_controller(expose_info=False) - - req = Request.blank( - '/info', environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('403 Forbidden', str(resp)) - - def test_get_info(self): - controller = self.get_controller(expose_info=True) - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - req = Request.blank( - '/info', environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - info = json.loads(resp.body) - self.assertTrue('admin' not in info) - self.assertTrue('foo' in info) - self.assertTrue('bar' in info['foo']) - self.assertEqual(info['foo']['bar'], 'baz') - - def test_options_info(self): - controller = self.get_controller(expose_info=True) - - req = Request.blank( - '/info', environ={'REQUEST_METHOD': 'GET'}) - resp = controller.OPTIONS(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - self.assertTrue('Allow' in resp.headers) - - def test_get_info_cors(self): - controller = self.get_controller(expose_info=True) - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - req = Request.blank( - '/info', environ={'REQUEST_METHOD': 'GET'}, - headers={'Origin': 'http://example.com'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - info = json.loads(resp.body) - self.assertTrue('admin' not in info) - self.assertTrue('foo' in info) - self.assertTrue('bar' in info['foo']) - self.assertEqual(info['foo']['bar'], 'baz') - self.assertTrue('Access-Control-Allow-Origin' in resp.headers) - self.assertTrue('Access-Control-Expose-Headers' in resp.headers) - - def test_head_info(self): - controller = self.get_controller(expose_info=True) - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - req = Request.blank( - '/info', environ={'REQUEST_METHOD': 'HEAD'}) - resp = controller.HEAD(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - - def test_disallow_info(self): - controller = self.get_controller(expose_info=True, - disallowed_sections=['foo2']) - utils._swift_info = {'foo': {'bar': 'baz'}, - 'foo2': {'bar2': 'baz2'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - req = Request.blank( - '/info', environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - info = json.loads(resp.body) - self.assertTrue('foo' in info) - self.assertTrue('bar' in info['foo']) - self.assertEqual(info['foo']['bar'], 'baz') - self.assertTrue('foo2' not in info) - - def test_disabled_admin_info(self): - controller = self.get_controller(expose_info=True, admin_key='') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('GET', '/info', expires, '') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('403 Forbidden', str(resp)) - - def test_get_admin_info(self): - controller = self.get_controller(expose_info=True, - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('GET', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - info = json.loads(resp.body) - self.assertTrue('admin' in info) - self.assertTrue('qux' in info['admin']) - self.assertTrue('quux' in info['admin']['qux']) - self.assertEqual(info['admin']['qux']['quux'], 'corge') - - def test_head_admin_info(self): - controller = self.get_controller(expose_info=True, - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('GET', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'HEAD'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - - expires = int(time.time() + 86400) - sig = utils.get_hmac('HEAD', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'HEAD'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - - def test_get_admin_info_invalid_method(self): - controller = self.get_controller(expose_info=True, - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('HEAD', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('401 Unauthorized', str(resp)) - - def test_get_admin_info_invalid_expires(self): - controller = self.get_controller(expose_info=True, - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = 1 - sig = utils.get_hmac('GET', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('401 Unauthorized', str(resp)) - - expires = 'abc' - sig = utils.get_hmac('GET', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('401 Unauthorized', str(resp)) - - def test_get_admin_info_invalid_path(self): - controller = self.get_controller(expose_info=True, - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('GET', '/foo', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('401 Unauthorized', str(resp)) - - def test_get_admin_info_invalid_key(self): - controller = self.get_controller(expose_info=True, - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('GET', '/foo', expires, 'invalid-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('401 Unauthorized', str(resp)) - - def test_admin_disallow_info(self): - controller = self.get_controller(expose_info=True, - disallowed_sections=['foo2'], - admin_key='secret-admin-key') - utils._swift_info = {'foo': {'bar': 'baz'}, - 'foo2': {'bar2': 'baz2'}} - utils._swift_admin_info = {'qux': {'quux': 'corge'}} - - expires = int(time.time() + 86400) - sig = utils.get_hmac('GET', '/info', expires, 'secret-admin-key') - path = '/info?swiftinfo_sig={sig}&swiftinfo_expires={expires}'.format( - sig=sig, expires=expires) - req = Request.blank( - path, environ={'REQUEST_METHOD': 'GET'}) - resp = controller.GET(req) - self.assertTrue(isinstance(resp, HTTPException)) - self.assertEqual('200 OK', str(resp)) - info = json.loads(resp.body) - self.assertTrue('foo2' not in info) - self.assertTrue('admin' in info) - self.assertTrue('disallowed_sections' in info['admin']) - self.assertTrue('foo2' in info['admin']['disallowed_sections']) - self.assertTrue('qux' in info['admin']) - self.assertTrue('quux' in info['admin']['qux']) - self.assertEqual(info['admin']['qux']['quux'], 'corge') - - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py deleted file mode 100755 index 4942691..0000000 --- a/test/unit/proxy/controllers/test_obj.py +++ /dev/null @@ -1,199 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -from contextlib import contextmanager - -import mock - -import swift -from swift.proxy import server as proxy_server -from swift.common.swob import HTTPException -from test.unit import FakeRing, FakeMemcache, fake_http_connect, debug_logger - - -@contextmanager -def set_http_connect(*args, **kwargs): - old_connect = swift.proxy.controllers.base.http_connect - new_connect = fake_http_connect(*args, **kwargs) - swift.proxy.controllers.base.http_connect = new_connect - swift.proxy.controllers.obj.http_connect = new_connect - swift.proxy.controllers.account.http_connect = new_connect - swift.proxy.controllers.container.http_connect = new_connect - yield new_connect - swift.proxy.controllers.base.http_connect = old_connect - swift.proxy.controllers.obj.http_connect = old_connect - swift.proxy.controllers.account.http_connect = old_connect - swift.proxy.controllers.container.http_connect = old_connect - - -class TestObjControllerWriteAffinity(unittest.TestCase): - def setUp(self): - self.app = proxy_server.Application( - None, FakeMemcache(), account_ring=FakeRing(), - container_ring=FakeRing(), object_ring=FakeRing(max_more_nodes=9)) - self.app.request_node_count = lambda replicas: 10000000 - self.app.sort_nodes = lambda l: l # stop shuffling the primary nodes - - def test_iter_nodes_local_first_noops_when_no_affinity(self): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - self.app.write_affinity_is_local_fn = None - - all_nodes = self.app.object_ring.get_part_nodes(1) - all_nodes.extend(self.app.object_ring.get_more_nodes(1)) - - local_first_nodes = list(controller.iter_nodes_local_first( - self.app.object_ring, 1)) - - self.maxDiff = None - - self.assertEqual(all_nodes, local_first_nodes) - - def test_iter_nodes_local_first_moves_locals_first(self): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - self.app.write_affinity_is_local_fn = ( - lambda node: node['region'] == 1) - self.app.write_affinity_node_count = lambda ring: 4 - - all_nodes = self.app.object_ring.get_part_nodes(1) - all_nodes.extend(self.app.object_ring.get_more_nodes(1)) - - local_first_nodes = list(controller.iter_nodes_local_first( - self.app.object_ring, 1)) - - # the local nodes move up in the ordering - self.assertEqual([1, 1, 1, 1], - [node['region'] for node in local_first_nodes[:4]]) - # we don't skip any nodes - self.assertEqual(sorted(all_nodes), sorted(local_first_nodes)) - - def test_connect_put_node_timeout(self): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - self.app.conn_timeout = 0.1 - with set_http_connect(200, slow_connect=True): - nodes = [dict(ip='', port='', device='')] - res = controller._connect_put_node(nodes, '', '', {}, ('', '')) - self.assertTrue(res is None) - - -class TestObjController(unittest.TestCase): - def setUp(self): - logger = debug_logger('proxy-server') - logger.thread_locals = ('txn1', '127.0.0.2') - self.app = proxy_server.Application( - None, FakeMemcache(), account_ring=FakeRing(), - container_ring=FakeRing(), object_ring=FakeRing(), - logger=logger) - self.controller = proxy_server.ObjectController(self.app, - 'a', 'c', 'o') - self.controller.container_info = mock.MagicMock(return_value={ - 'partition': 1, - 'nodes': [ - {'ip': '127.0.0.1', 'port': '1', 'device': 'sda'}, - {'ip': '127.0.0.1', 'port': '2', 'device': 'sda'}, - {'ip': '127.0.0.1', 'port': '3', 'device': 'sda'}, - ], - 'write_acl': None, - 'read_acl': None, - 'sync_key': None, - 'versions': None}) - - def test_PUT_simple(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - req.headers['content-length'] = '0' - with set_http_connect(201, 201, 201): - resp = self.controller.PUT(req) - self.assertEquals(resp.status_int, 201) - - def test_PUT_if_none_match(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - req.headers['if-none-match'] = '*' - req.headers['content-length'] = '0' - with set_http_connect(201, 201, 201): - resp = self.controller.PUT(req) - self.assertEquals(resp.status_int, 201) - - def test_PUT_if_none_match_denied(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - req.headers['if-none-match'] = '*' - req.headers['content-length'] = '0' - with set_http_connect(201, (412, 412), 201): - resp = self.controller.PUT(req) - self.assertEquals(resp.status_int, 412) - - def test_PUT_if_none_match_not_star(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - req.headers['if-none-match'] = 'somethingelse' - req.headers['content-length'] = '0' - with set_http_connect(201, 201, 201): - resp = self.controller.PUT(req) - self.assertEquals(resp.status_int, 400) - - def test_GET_simple(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - with set_http_connect(200): - resp = self.controller.GET(req) - self.assertEquals(resp.status_int, 200) - - def test_DELETE_simple(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - with set_http_connect(204, 204, 204): - resp = self.controller.DELETE(req) - self.assertEquals(resp.status_int, 204) - - def test_POST_simple(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - with set_http_connect(200, 200, 200, 201, 201, 201): - resp = self.controller.POST(req) - self.assertEquals(resp.status_int, 202) - - def test_COPY_simple(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - with set_http_connect(200, 200, 200, 201, 201, 201): - resp = self.controller.POST(req) - self.assertEquals(resp.status_int, 202) - - def test_HEAD_simple(self): - req = swift.common.swob.Request.blank('/v1/a/c/o') - with set_http_connect(200, 200, 200, 201, 201, 201): - resp = self.controller.POST(req) - self.assertEquals(resp.status_int, 202) - - def test_PUT_log_info(self): - # mock out enough to get to the area of the code we want to test - with mock.patch('swift.proxy.controllers.obj.check_object_creation', - mock.MagicMock(return_value=None)): - req = swift.common.swob.Request.blank('/v1/a/c/o') - req.headers['x-copy-from'] = 'somewhere' - try: - self.controller.PUT(req) - except HTTPException: - pass - self.assertEquals( - req.environ.get('swift.log_info'), ['x-copy-from:somewhere']) - # and then check that we don't do that for originating POSTs - req = swift.common.swob.Request.blank('/v1/a/c/o') - req.method = 'POST' - req.headers['x-copy-from'] = 'elsewhere' - try: - self.controller.PUT(req) - except HTTPException: - pass - self.assertEquals(req.environ.get('swift.log_info'), None) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py deleted file mode 100644 index 1a59016..0000000 --- a/test/unit/proxy/test_server.py +++ /dev/null @@ -1,5939 +0,0 @@ -# Copyright (c) 2010-2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import with_statement -import cPickle as pickle -import logging -import os -import sys -import unittest -import urlparse -from nose import SkipTest -from contextlib import contextmanager, nested, closing -from gzip import GzipFile -from shutil import rmtree -import gc -import time -from urllib import quote -from hashlib import md5 -from tempfile import mkdtemp -import weakref -import re - -import mock -from eventlet import sleep, spawn, wsgi, listen -import simplejson - -import gluster.swift.common.Glusterfs as gfs -gfs.RUN_DIR = mkdtemp() - -from test.unit import connect_tcp, readuntil2crlfs, FakeLogger, \ - fake_http_connect, FakeRing, FakeMemcache, debug_logger -from gluster.swift.proxy import server as proxy_server -from gluster.swift.account import server as account_server -from gluster.swift.container import server as container_server -from gluster.swift.obj import server as object_server -from swift.common import ring -from swift.common.middleware import proxy_logging -from swift.common.middleware.acl import parse_acl, format_acl -from swift.common.exceptions import ChunkReadTimeout -from swift.common.constraints import MAX_META_NAME_LENGTH, \ - MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \ - MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH, \ - ACCOUNT_LISTING_LIMIT, CONTAINER_LISTING_LIMIT, MAX_OBJECT_NAME_LENGTH -from swift.common import utils -from swift.common.utils import mkdirs, normalize_timestamp, NullLogger -from swift.common.wsgi import monkey_patch_mimetools -from swift.proxy.controllers import base as proxy_base -from swift.proxy.controllers.base import get_container_memcache_key, \ - get_account_memcache_key, cors_validation -import swift.proxy.controllers -from swift.common.request_helpers import get_sys_meta_prefix -from swift.common.swob import Request, Response, HTTPUnauthorized, \ - HTTPException - -# mocks -logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) - - -STATIC_TIME = time.time() -_test_coros = _test_servers = _test_sockets = _orig_container_listing_limit = \ - _testdir = _orig_SysLogHandler = None - - -def do_setup(the_object_server): - utils.HASH_PATH_SUFFIX = 'endcap' - global _testdir, _test_servers, _test_sockets, \ - _orig_container_listing_limit, _test_coros, _orig_SysLogHandler - _orig_SysLogHandler = utils.SysLogHandler - utils.SysLogHandler = mock.MagicMock() - monkey_patch_mimetools() - # Since we're starting up a lot here, we're going to test more than - # just chunked puts; we're also going to test parts of - # proxy_server.Application we couldn't get to easily otherwise. - _testdir = os.path.join(gfs.RUN_DIR, 'swift') - mkdirs(_testdir) - rmtree(_testdir) - mkdirs(os.path.join(_testdir, 'sda1')) - mkdirs(os.path.join(_testdir, 'sda1', 'tmp')) - mkdirs(os.path.join(_testdir, 'sdb1')) - mkdirs(os.path.join(_testdir, 'sdb1', 'tmp')) - mkdirs(os.path.join(_testdir, 'a')) - mkdirs(os.path.join(_testdir, 'a', 'tmp')) - conf = {'devices': _testdir, 'swift_dir': _testdir, - 'mount_check': 'false', 'allowed_headers': - 'content-encoding, x-object-manifest, content-disposition, foo', - 'allow_versions': 'True'} - prolis = listen(('localhost', 0)) - acc1lis = listen(('localhost', 0)) - acc2lis = listen(('localhost', 0)) - con1lis = listen(('localhost', 0)) - con2lis = listen(('localhost', 0)) - obj1lis = listen(('localhost', 0)) - obj2lis = listen(('localhost', 0)) - _test_sockets = \ - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) - account_ring_path = os.path.join(_testdir, 'account.ring.gz') - with closing(GzipFile(account_ring_path, 'wb')) as f: - pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], - [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', - 'port': acc1lis.getsockname()[1]}, - {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', - 'port': acc2lis.getsockname()[1]}, - # Gluster volume mapping to device - {'id': 1, 'zone': 1, 'device': 'a', 'ip': '127.0.0.1', - 'port': acc2lis.getsockname()[1]}], 30), - f) - container_ring_path = os.path.join(_testdir, 'container.ring.gz') - with closing(GzipFile(container_ring_path, 'wb')) as f: - pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], - [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', - 'port': con1lis.getsockname()[1]}, - {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', - 'port': con2lis.getsockname()[1]}, - # Gluster volume mapping to device - {'id': 1, 'zone': 1, 'device': 'a', 'ip': '127.0.0.1', - 'port': con2lis.getsockname()[1]}], 30), - f) - object_ring_path = os.path.join(_testdir, 'object.ring.gz') - with closing(GzipFile(object_ring_path, 'wb')) as f: - pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], - [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', - 'port': obj1lis.getsockname()[1]}, - {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', - 'port': obj2lis.getsockname()[1]}, - # Gluster volume mapping to device - {'id': 1, 'zone': 1, 'device': 'a', 'ip': '127.0.0.1', - 'port': obj2lis.getsockname()[1]}], 30), - f) - prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone(), - logger=debug_logger('proxy')) - acc1srv = account_server.AccountController( - conf, logger=debug_logger('acct1')) - acc2srv = account_server.AccountController( - conf, logger=debug_logger('acct2')) - con1srv = container_server.ContainerController( - conf, logger=debug_logger('cont1')) - con2srv = container_server.ContainerController( - conf, logger=debug_logger('cont2')) - obj1srv = the_object_server.ObjectController( - conf, logger=debug_logger('obj1')) - obj2srv = the_object_server.ObjectController( - conf, logger=debug_logger('obj2')) - _test_servers = \ - (prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv) - nl = NullLogger() - logging_prosv = proxy_logging.ProxyLoggingMiddleware(prosrv, conf, - logger=prosrv.logger) - prospa = spawn(wsgi.server, prolis, logging_prosv, nl) - acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl) - acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl) - con1spa = spawn(wsgi.server, con1lis, con1srv, nl) - con2spa = spawn(wsgi.server, con2lis, con2srv, nl) - obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl) - obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl) - _test_coros = \ - (prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa) - # Gluster: ensure account exists - ts = normalize_timestamp(time.time()) - partition, nodes = prosrv.account_ring.get_nodes('a') - for node in nodes: - conn = swift.proxy.controllers.obj.http_connect(node['ip'], - node['port'], - node['device'], - partition, 'PUT', '/a', - {'X-Timestamp': ts, - 'x-trans-id': 'test'}) - resp = conn.getresponse() - - # For GlusterFS the volume should have already been created since - # accounts map to volumes. Expect a 202 instead of a 201 as for - # OpenStack Swift's proxy unit test the account is explicitly created. - assert(resp.status == 202) - # Create container - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % ( - exp, headers[:len(exp)]) - - -def setup(): - do_setup(object_server) - - -def teardown(): - for server in _test_coros: - server.kill() - rmtree(os.path.dirname(_testdir)) - utils.SysLogHandler = _orig_SysLogHandler - - -def sortHeaderNames(headerNames): - """ - Return the given string of header names sorted. - - headerName: a comma-delimited list of header names - """ - headers = [a.strip() for a in headerNames.split(',') if a.strip()] - headers.sort() - return ', '.join(headers) - - -class FakeMemcacheReturnsNone(FakeMemcache): - - def get(self, key): - # Returns None as the timestamp of the container; assumes we're only - # using the FakeMemcache for container existence checks. - return None - - -@contextmanager -def save_globals(): - orig_http_connect = getattr(swift.proxy.controllers.base, 'http_connect', - None) - orig_account_info = getattr(swift.proxy.controllers.Controller, - 'account_info', None) - try: - yield True - finally: - swift.proxy.controllers.Controller.account_info = orig_account_info - swift.proxy.controllers.base.http_connect = orig_http_connect - swift.proxy.controllers.obj.http_connect = orig_http_connect - swift.proxy.controllers.account.http_connect = orig_http_connect - swift.proxy.controllers.container.http_connect = orig_http_connect - - -def set_http_connect(*args, **kwargs): - new_connect = fake_http_connect(*args, **kwargs) - swift.proxy.controllers.base.http_connect = new_connect - swift.proxy.controllers.obj.http_connect = new_connect - swift.proxy.controllers.account.http_connect = new_connect - swift.proxy.controllers.container.http_connect = new_connect - return new_connect - - -# tests -class TestController(unittest.TestCase): - - def setUp(self): - self.account_ring = FakeRing() - self.container_ring = FakeRing() - self.memcache = FakeMemcache() - - app = proxy_server.Application(None, self.memcache, - account_ring=self.account_ring, - container_ring=self.container_ring, - object_ring=FakeRing()) - self.controller = swift.proxy.controllers.Controller(app) - - class FakeReq(object): - def __init__(self): - self.url = "/foo/bar" - self.method = "METHOD" - - def as_referer(self): - return self.method + ' ' + self.url - - self.account = 'some_account' - self.container = 'some_container' - self.request = FakeReq() - self.read_acl = 'read_acl' - self.write_acl = 'write_acl' - - def test_transfer_headers(self): - src_headers = {'x-remove-base-meta-owner': 'x', - 'x-base-meta-size': '151M', - 'new-owner': 'Kun'} - dst_headers = {'x-base-meta-owner': 'Gareth', - 'x-base-meta-size': '150M'} - self.controller.transfer_headers(src_headers, dst_headers) - expected_headers = {'x-base-meta-owner': '', - 'x-base-meta-size': '151M'} - self.assertEquals(dst_headers, expected_headers) - - def check_account_info_return(self, partition, nodes, is_none=False): - if is_none: - p, n = None, None - else: - p, n = self.account_ring.get_nodes(self.account) - self.assertEqual(p, partition) - self.assertEqual(n, nodes) - - def test_account_info_container_count(self): - with save_globals(): - set_http_connect(200, count=123) - partition, nodes, count = \ - self.controller.account_info(self.account) - self.assertEquals(count, 123) - with save_globals(): - set_http_connect(200, count='123') - partition, nodes, count = \ - self.controller.account_info(self.account) - self.assertEquals(count, 123) - with save_globals(): - cache_key = get_account_memcache_key(self.account) - account_info = {'status': 200, 'container_count': 1234} - self.memcache.set(cache_key, account_info) - partition, nodes, count = \ - self.controller.account_info(self.account) - self.assertEquals(count, 1234) - with save_globals(): - cache_key = get_account_memcache_key(self.account) - account_info = {'status': 200, 'container_count': '1234'} - self.memcache.set(cache_key, account_info) - partition, nodes, count = \ - self.controller.account_info(self.account) - self.assertEquals(count, 1234) - - def test_make_requests(self): - with save_globals(): - set_http_connect(200) - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - set_http_connect(201, raise_timeout_exc=True) - self.controller._make_request( - nodes, partition, 'POST', '/', '', '', - self.controller.app.logger.thread_locals) - - # tests if 200 is cached and used - def test_account_info_200(self): - with save_globals(): - set_http_connect(200) - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - self.check_account_info_return(partition, nodes) - self.assertEquals(count, 12345) - - # Test the internal representation in memcache - # 'container_count' changed from int to str - cache_key = get_account_memcache_key(self.account) - container_info = {'status': 200, - 'container_count': '12345', - 'total_object_count': None, - 'bytes': None, - 'meta': {}, - 'sysmeta': {}} - self.assertEquals(container_info, - self.memcache.get(cache_key)) - - set_http_connect() - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - self.check_account_info_return(partition, nodes) - self.assertEquals(count, 12345) - - # tests if 404 is cached and used - def test_account_info_404(self): - with save_globals(): - set_http_connect(404, 404, 404) - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - self.check_account_info_return(partition, nodes, True) - self.assertEquals(count, None) - - # Test the internal representation in memcache - # 'container_count' changed from 0 to None - cache_key = get_account_memcache_key(self.account) - account_info = {'status': 404, - 'container_count': None, # internally keep None - 'total_object_count': None, - 'bytes': None, - 'meta': {}, - 'sysmeta': {}} - self.assertEquals(account_info, - self.memcache.get(cache_key)) - - set_http_connect() - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - self.check_account_info_return(partition, nodes, True) - self.assertEquals(count, None) - - # tests if some http status codes are not cached - def test_account_info_no_cache(self): - def test(*status_list): - set_http_connect(*status_list) - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - self.assertEqual(len(self.memcache.keys()), 0) - self.check_account_info_return(partition, nodes, True) - self.assertEquals(count, None) - - with save_globals(): - # We cache if we have two 404 responses - fail if only one - test(503, 503, 404) - test(504, 404, 503) - test(404, 507, 503) - test(503, 503, 503) - - def test_account_info_no_account(self): - with save_globals(): - self.memcache.store = {} - set_http_connect(404, 404, 404) - partition, nodes, count = \ - self.controller.account_info(self.account, self.request) - self.check_account_info_return(partition, nodes, is_none=True) - self.assertEquals(count, None) - - def check_container_info_return(self, ret, is_none=False): - if is_none: - partition, nodes, read_acl, write_acl = None, None, None, None - else: - partition, nodes = self.container_ring.get_nodes(self.account, - self.container) - read_acl, write_acl = self.read_acl, self.write_acl - self.assertEqual(partition, ret['partition']) - self.assertEqual(nodes, ret['nodes']) - self.assertEqual(read_acl, ret['read_acl']) - self.assertEqual(write_acl, ret['write_acl']) - - def test_container_info_invalid_account(self): - def account_info(self, account, request, autocreate=False): - return None, None - - with save_globals(): - swift.proxy.controllers.Controller.account_info = account_info - ret = self.controller.container_info(self.account, - self.container, - self.request) - self.check_container_info_return(ret, True) - - # tests if 200 is cached and used - def test_container_info_200(self): - - with save_globals(): - headers = {'x-container-read': self.read_acl, - 'x-container-write': self.write_acl} - set_http_connect(200, # account_info is found - 200, headers=headers) # container_info is found - ret = self.controller.container_info( - self.account, self.container, self.request) - self.check_container_info_return(ret) - - cache_key = get_container_memcache_key(self.account, - self.container) - cache_value = self.memcache.get(cache_key) - self.assertTrue(isinstance(cache_value, dict)) - self.assertEquals(200, cache_value.get('status')) - - set_http_connect() - ret = self.controller.container_info( - self.account, self.container, self.request) - self.check_container_info_return(ret) - - # tests if 404 is cached and used - def test_container_info_404(self): - def account_info(self, account, request): - return True, True, 0 - - with save_globals(): - set_http_connect(503, 204, # account_info found - 504, 404, 404) # container_info 'NotFound' - ret = self.controller.container_info( - self.account, self.container, self.request) - self.check_container_info_return(ret, True) - - cache_key = get_container_memcache_key(self.account, - self.container) - cache_value = self.memcache.get(cache_key) - self.assertTrue(isinstance(cache_value, dict)) - self.assertEquals(404, cache_value.get('status')) - - set_http_connect() - ret = self.controller.container_info( - self.account, self.container, self.request) - self.check_container_info_return(ret, True) - - set_http_connect(503, 404, 404) # account_info 'NotFound' - ret = self.controller.container_info( - self.account, self.container, self.request) - self.check_container_info_return(ret, True) - - cache_key = get_container_memcache_key(self.account, - self.container) - cache_value = self.memcache.get(cache_key) - self.assertTrue(isinstance(cache_value, dict)) - self.assertEquals(404, cache_value.get('status')) - - set_http_connect() - ret = self.controller.container_info( - self.account, self.container, self.request) - self.check_container_info_return(ret, True) - - # tests if some http status codes are not cached - def test_container_info_no_cache(self): - def test(*status_list): - set_http_connect(*status_list) - ret = self.controller.container_info( - self.account, self.container, self.request) - self.assertEqual(len(self.memcache.keys()), 0) - self.check_container_info_return(ret, True) - - with save_globals(): - # We cache if we have two 404 responses - fail if only one - test(503, 503, 404) - test(504, 404, 503) - test(404, 507, 503) - test(503, 503, 503) - - -class TestProxyServer(unittest.TestCase): - - def test_unhandled_exception(self): - - class MyApp(proxy_server.Application): - - def get_controller(self, path): - raise Exception('this shouldnt be caught') - - app = MyApp(None, FakeMemcache(), account_ring=FakeRing(), - container_ring=FakeRing(), object_ring=FakeRing()) - req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) - app.update_request(req) - resp = app.handle_request(req) - self.assertEquals(resp.status_int, 500) - - def test_internal_method_request(self): - baseapp = proxy_server.Application({}, - FakeMemcache(), - container_ring=FakeRing(), - object_ring=FakeRing(), - account_ring=FakeRing()) - resp = baseapp.handle_request( - Request.blank('/v1/a', environ={'REQUEST_METHOD': '__init__'})) - self.assertEquals(resp.status, '405 Method Not Allowed') - - def test_inexistent_method_request(self): - baseapp = proxy_server.Application({}, - FakeMemcache(), - container_ring=FakeRing(), - account_ring=FakeRing(), - object_ring=FakeRing()) - resp = baseapp.handle_request( - Request.blank('/v1/a', environ={'REQUEST_METHOD': '!invalid'})) - self.assertEquals(resp.status, '405 Method Not Allowed') - - def test_calls_authorize_allow(self): - called = [False] - - def authorize(req): - called[0] = True - with save_globals(): - set_http_connect(200) - app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - req = Request.blank('/v1/a') - req.environ['swift.authorize'] = authorize - app.update_request(req) - app.handle_request(req) - self.assert_(called[0]) - - def test_calls_authorize_deny(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - req = Request.blank('/v1/a') - req.environ['swift.authorize'] = authorize - app.update_request(req) - app.handle_request(req) - self.assert_(called[0]) - - def test_negative_content_length(self): - swift_dir = mkdtemp() - try: - baseapp = proxy_server.Application({'swift_dir': swift_dir}, - FakeMemcache(), FakeLogger(), - FakeRing(), FakeRing(), - FakeRing()) - resp = baseapp.handle_request( - Request.blank('/', environ={'CONTENT_LENGTH': '-1'})) - self.assertEquals(resp.status, '400 Bad Request') - self.assertEquals(resp.body, 'Invalid Content-Length') - resp = baseapp.handle_request( - Request.blank('/', environ={'CONTENT_LENGTH': '-123'})) - self.assertEquals(resp.status, '400 Bad Request') - self.assertEquals(resp.body, 'Invalid Content-Length') - finally: - rmtree(swift_dir, ignore_errors=True) - - def test_denied_host_header(self): - swift_dir = mkdtemp() - try: - baseapp = proxy_server.Application({'swift_dir': swift_dir, - 'deny_host_headers': - 'invalid_host.com'}, - FakeMemcache(), FakeLogger(), - FakeRing(), FakeRing(), - FakeRing()) - resp = baseapp.handle_request( - Request.blank('/v1/a/c/o', - environ={'HTTP_HOST': 'invalid_host.com'})) - self.assertEquals(resp.status, '403 Forbidden') - finally: - rmtree(swift_dir, ignore_errors=True) - - def test_node_timing(self): - baseapp = proxy_server.Application({'sorting_method': 'timing'}, - FakeMemcache(), - container_ring=FakeRing(), - object_ring=FakeRing(), - account_ring=FakeRing()) - self.assertEquals(baseapp.node_timings, {}) - - req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) - baseapp.update_request(req) - resp = baseapp.handle_request(req) - self.assertEquals(resp.status_int, 503) # couldn't connect to anything - exp_timings = {} - self.assertEquals(baseapp.node_timings, exp_timings) - - times = [time.time()] - exp_timings = {'127.0.0.1': (0.1, times[0] + baseapp.timing_expiry)} - with mock.patch('swift.proxy.server.time', lambda: times.pop(0)): - baseapp.set_node_timing({'ip': '127.0.0.1'}, 0.1) - self.assertEquals(baseapp.node_timings, exp_timings) - - nodes = [{'ip': '127.0.0.1'}, {'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}] - with mock.patch('swift.proxy.server.shuffle', lambda l: l): - res = baseapp.sort_nodes(nodes) - exp_sorting = [{'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}, - {'ip': '127.0.0.1'}] - self.assertEquals(res, exp_sorting) - - def test_node_affinity(self): - baseapp = proxy_server.Application({'sorting_method': 'affinity', - 'read_affinity': 'r1=1'}, - FakeMemcache(), - container_ring=FakeRing(), - object_ring=FakeRing(), - account_ring=FakeRing()) - - nodes = [{'region': 2, 'zone': 1, 'ip': '127.0.0.1'}, - {'region': 1, 'zone': 2, 'ip': '127.0.0.2'}] - with mock.patch('swift.proxy.server.shuffle', lambda x: x): - app_sorted = baseapp.sort_nodes(nodes) - exp_sorted = [{'region': 1, 'zone': 2, 'ip': '127.0.0.2'}, - {'region': 2, 'zone': 1, 'ip': '127.0.0.1'}] - self.assertEquals(exp_sorted, app_sorted) - - def test_info_defaults(self): - app = proxy_server.Application({}, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - - self.assertTrue(app.expose_info) - self.assertTrue(isinstance(app.disallowed_sections, list)) - self.assertEqual(0, len(app.disallowed_sections)) - self.assertTrue(app.admin_key is None) - - def test_get_info_controller(self): - path = '/info' - app = proxy_server.Application({}, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - - controller, path_parts = app.get_controller(path) - - self.assertTrue('version' in path_parts) - self.assertTrue(path_parts['version'] is None) - self.assertTrue('disallowed_sections' in path_parts) - self.assertTrue('expose_info' in path_parts) - self.assertTrue('admin_key' in path_parts) - - self.assertEqual(controller.__name__, 'InfoController') - - -class TestObjectController(unittest.TestCase): - - def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), - logger=debug_logger('proxy-ut'), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing()) - - def tearDown(self): - self.app.account_ring.set_replicas(3) - self.app.container_ring.set_replicas(3) - self.app.object_ring.set_replicas(3) - - def assert_status_map(self, method, statuses, expected, raise_exc=False): - with save_globals(): - kwargs = {} - if raise_exc: - kwargs['raise_exc'] = raise_exc - - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', - headers={'Content-Length': '0', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - res = method(req) - self.assertEquals(res.status_int, expected) - - # repeat test - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', - headers={'Content-Length': '0', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - res = method(req) - self.assertEquals(res.status_int, expected) - - def test_GET_newest_large_file(self): - prolis = _test_sockets[0] - prosrv = _test_servers[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - obj = 'a' * (1024 * 1024) - path = '/v1/a/c/o.large' - fd.write('PUT %s HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Length: %s\r\n' - 'Content-Type: application/octet-stream\r\n' - '\r\n%s' % (path, str(len(obj)), obj)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - req = Request.blank(path, - environ={'REQUEST_METHOD': 'GET'}, - headers={'Content-Type': - 'application/octet-stream', - 'X-Newest': 'true'}) - res = req.get_response(prosrv) - self.assertEqual(res.status_int, 200) - self.assertEqual(res.body, obj) - - def test_PUT_expect_header_zero_content_length(self): - test_errors = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if path == '/a/c/o.jpg': - if 'expect' in headers or 'Expect' in headers: - test_errors.append('Expect was in headers for object ' - 'server!') - - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - # The (201, -4) tuples in there have the effect of letting the - # initial connect succeed, after which getexpect() gets called and - # then the -4 makes the response of that actually be 201 instead of - # 100. Perfectly straightforward. - set_http_connect(200, 200, (201, -4), (201, -4), (201, -4), - give_connect=test_connect) - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 0 - self.app.update_request(req) - self.app.memcache.store = {} - res = controller.PUT(req) - self.assertEqual(test_errors, []) - self.assertTrue(res.status.startswith('201 '), res.status) - - def test_PUT_expect_header_nonzero_content_length(self): - test_errors = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if path == '/a/c/o.jpg': - if 'Expect' not in headers: - test_errors.append('Expect was not in headers for ' - 'non-zero byte PUT!') - - with save_globals(): - controller = \ - proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg') - set_http_connect(200, 200, 201, 201, 201, - give_connect=test_connect) - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 1 - req.body = 'a' - self.app.update_request(req) - self.app.memcache.store = {} - res = controller.PUT(req) - self.assertEqual(test_errors, []) - self.assertTrue(res.status.startswith('201 ')) - - def test_PUT_respects_write_affinity(self): - written_to = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if path == '/a/c/o.jpg': - written_to.append((ipaddr, port, device)) - - with save_globals(): - def is_r0(node): - return node['region'] == 0 - - self.app.object_ring.max_more_nodes = 100 - self.app.write_affinity_is_local_fn = is_r0 - self.app.write_affinity_node_count = lambda r: 3 - - controller = \ - proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg') - set_http_connect(200, 200, 201, 201, 201, - give_connect=test_connect) - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 1 - req.body = 'a' - self.app.memcache.store = {} - res = controller.PUT(req) - self.assertTrue(res.status.startswith('201 ')) - - self.assertEqual(3, len(written_to)) - for ip, port, device in written_to: - # this is kind of a hokey test, but in FakeRing, the port is even - # when the region is 0, and odd when the region is 1, so this test - # asserts that we only wrote to nodes in region 0. - self.assertEqual(0, port % 2) - - def test_PUT_respects_write_affinity_with_507s(self): - written_to = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if path == '/a/c/o.jpg': - written_to.append((ipaddr, port, device)) - - with save_globals(): - def is_r0(node): - return node['region'] == 0 - - self.app.object_ring.max_more_nodes = 100 - self.app.write_affinity_is_local_fn = is_r0 - self.app.write_affinity_node_count = lambda r: 3 - - controller = \ - proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg') - self.app.error_limit( - self.app.object_ring.get_part_nodes(1)[0], 'test') - set_http_connect(200, 200, # account, container - 201, 201, 201, # 3 working backends - give_connect=test_connect) - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 1 - req.body = 'a' - self.app.memcache.store = {} - res = controller.PUT(req) - self.assertTrue(res.status.startswith('201 ')) - - self.assertEqual(3, len(written_to)) - # this is kind of a hokey test, but in FakeRing, the port is even when - # the region is 0, and odd when the region is 1, so this test asserts - # that we wrote to 2 nodes in region 0, then went to 1 non-r0 node. - self.assertEqual(0, written_to[0][1] % 2) # it's (ip, port, device) - self.assertEqual(0, written_to[1][1] % 2) - self.assertNotEqual(0, written_to[2][1] % 2) - - def test_PUT_message_length_using_content_length(self): - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - obj = 'j' * 20 - fd.write('PUT /v1/a/c/o.content-length HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Length: %s\r\n' - 'Content-Type: application/octet-stream\r\n' - '\r\n%s' % (str(len(obj)), obj)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - - def test_PUT_message_length_using_transfer_encoding(self): - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Type: application/octet-stream\r\n' - 'Transfer-Encoding: chunked\r\n\r\n' - '2\r\n' - 'oh\r\n' - '4\r\n' - ' say\r\n' - '4\r\n' - ' can\r\n' - '4\r\n' - ' you\r\n' - '4\r\n' - ' see\r\n' - '3\r\n' - ' by\r\n' - '4\r\n' - ' the\r\n' - '8\r\n' - ' dawns\'\n\r\n' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - - def test_PUT_message_length_using_both(self): - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Type: application/octet-stream\r\n' - 'Content-Length: 33\r\n' - 'Transfer-Encoding: chunked\r\n\r\n' - '2\r\n' - 'oh\r\n' - '4\r\n' - ' say\r\n' - '4\r\n' - ' can\r\n' - '4\r\n' - ' you\r\n' - '4\r\n' - ' see\r\n' - '3\r\n' - ' by\r\n' - '4\r\n' - ' the\r\n' - '8\r\n' - ' dawns\'\n\r\n' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - - def test_PUT_bad_message_length(self): - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Type: application/octet-stream\r\n' - 'Content-Length: 33\r\n' - 'Transfer-Encoding: gzip\r\n\r\n' - '2\r\n' - 'oh\r\n' - '4\r\n' - ' say\r\n' - '4\r\n' - ' can\r\n' - '4\r\n' - ' you\r\n' - '4\r\n' - ' see\r\n' - '3\r\n' - ' by\r\n' - '4\r\n' - ' the\r\n' - '8\r\n' - ' dawns\'\n\r\n' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 400' - self.assertEqual(headers[:len(exp)], exp) - - def test_PUT_message_length_unsup_xfr_encoding(self): - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Type: application/octet-stream\r\n' - 'Content-Length: 33\r\n' - 'Transfer-Encoding: gzip,chunked\r\n\r\n' - '2\r\n' - 'oh\r\n' - '4\r\n' - ' say\r\n' - '4\r\n' - ' can\r\n' - '4\r\n' - ' you\r\n' - '4\r\n' - ' see\r\n' - '3\r\n' - ' by\r\n' - '4\r\n' - ' the\r\n' - '8\r\n' - ' dawns\'\n\r\n' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 501' - self.assertEqual(headers[:len(exp)], exp) - - def test_PUT_message_length_too_large(self): - swift.proxy.controllers.obj.MAX_FILE_SIZE = 10 - try: - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Type: application/octet-stream\r\n' - 'Content-Length: 33\r\n\r\n' - 'oh say can you see by the dawns\'\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 413' - self.assertEqual(headers[:len(exp)], exp) - finally: - swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE - - def test_PUT_last_modified(self): - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/o.last_modified HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - lm_hdr = 'Last-Modified: ' - self.assertEqual(headers[:len(exp)], exp) - - last_modified_put = [line for line in headers.split('\r\n') - if lm_hdr in line][0][len(lm_hdr):] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('HEAD /v1/a/c/o.last_modified HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - last_modified_head = [line for line in headers.split('\r\n') - if lm_hdr in line][0][len(lm_hdr):] - self.assertEqual(last_modified_put, last_modified_head) - - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/o.last_modified HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'If-Modified-Since: %s\r\n' - 'X-Storage-Token: t\r\n\r\n' % last_modified_put) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 304' - self.assertEqual(headers[:len(exp)], exp) - - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/o.last_modified HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'If-Unmodified-Since: %s\r\n' - 'X-Storage-Token: t\r\n\r\n' % last_modified_put) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - - def test_expirer_DELETE_on_versioned_object(self): - test_errors = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if method == 'DELETE': - if 'x-if-delete-at' in headers or 'X-If-Delete-At' in headers: - test_errors.append('X-If-Delete-At in headers') - - body = simplejson.dumps( - [{"name": "001o/1", - "hash": "x", - "bytes": 0, - "content_type": "text/plain", - "last_modified": "1970-01-01T00:00:01.000000"}]) - body_iter = ('', '', body, '', '', '', '', '', '', '', '', '', '', '') - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - # HEAD HEAD GET GET HEAD GET GET GET PUT PUT - # PUT DEL DEL DEL - set_http_connect(200, 200, 200, 200, 200, 200, 200, 200, 201, 201, - 201, 200, 200, 200, - give_connect=test_connect, - body_iter=body_iter, - headers={'x-versions-location': 'foo'}) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', - headers={'X-If-Delete-At': 1}, - environ={'REQUEST_METHOD': 'DELETE'}) - self.app.update_request(req) - controller.DELETE(req) - self.assertEquals(test_errors, []) - - def test_PUT_auto_content_type(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - - def test_content_type(filename, expected): - # The three responses here are for account_info() (HEAD to - # account server), container_info() (HEAD to container server) - # and three calls to _connect_put_node() (PUT to three object - # servers) - set_http_connect(201, 201, 201, 201, 201, - give_content_type=lambda content_type: - self.assertEquals(content_type, - expected.next())) - # We need into include a transfer-encoding to get past - # constraints.check_object_creation() - req = Request.blank('/v1/a/c/%s' % filename, {}, - headers={'transfer-encoding': 'chunked'}) - self.app.update_request(req) - self.app.memcache.store = {} - res = controller.PUT(req) - # If we don't check the response here we could miss problems - # in PUT() - self.assertEquals(res.status_int, 201) - - test_content_type('test.jpg', iter(['', '', 'image/jpeg', - 'image/jpeg', 'image/jpeg'])) - test_content_type('test.html', iter(['', '', 'text/html', - 'text/html', 'text/html'])) - test_content_type('test.css', iter(['', '', 'text/css', - 'text/css', 'text/css'])) - - def test_custom_mime_types_files(self): - swift_dir = mkdtemp() - try: - with open(os.path.join(swift_dir, 'mime.types'), 'w') as fp: - fp.write('foo/bar foo\n') - proxy_server.Application({'swift_dir': swift_dir}, - FakeMemcache(), FakeLogger(), - FakeRing(), FakeRing(), - FakeRing()) - self.assertEquals(proxy_server.mimetypes.guess_type('blah.foo')[0], - 'foo/bar') - self.assertEquals(proxy_server.mimetypes.guess_type('blah.jpg')[0], - 'image/jpeg') - finally: - rmtree(swift_dir, ignore_errors=True) - - def test_PUT(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - - def test_status_map(statuses, expected): - set_http_connect(*statuses) - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 0 - self.app.update_request(req) - self.app.memcache.store = {} - res = controller.PUT(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((200, 200, 201, 201, 201), 201) - test_status_map((200, 200, 201, 201, 500), 201) - test_status_map((200, 200, 204, 404, 404), 404) - test_status_map((200, 200, 204, 500, 404), 503) - - def test_PUT_connect_exceptions(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - - def test_status_map(statuses, expected): - set_http_connect(*statuses) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 0 - self.app.update_request(req) - res = controller.PUT(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((200, 200, 201, 201, -1), 201) - test_status_map((200, 200, 201, 201, -2), 201) # expect timeout - test_status_map((200, 200, 201, 201, -3), 201) # error limited - test_status_map((200, 200, 201, -1, -1), 503) - test_status_map((200, 200, 503, 503, -1), 503) - - def test_PUT_send_exceptions(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - - def test_status_map(statuses, expected): - self.app.memcache.store = {} - set_http_connect(*statuses) - req = Request.blank('/v1/a/c/o.jpg', - environ={'REQUEST_METHOD': 'PUT'}, - body='some data') - self.app.update_request(req) - res = controller.PUT(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((200, 200, 201, -1, 201), 201) - test_status_map((200, 200, 201, -1, -1), 503) - test_status_map((200, 200, 503, 503, -1), 503) - - def test_PUT_max_size(self): - with save_globals(): - set_http_connect(201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', {}, headers={ - 'Content-Length': str(MAX_FILE_SIZE + 1), - 'Content-Type': 'foo/bar'}) - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status_int, 413) - - def test_PUT_bad_content_type(self): - with save_globals(): - set_http_connect(201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', {}, headers={ - 'Content-Length': 0, 'Content-Type': 'foo/bar;swift_hey=45'}) - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status_int, 400) - - def test_PUT_getresponse_exceptions(self): - - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - - def test_status_map(statuses, expected): - self.app.memcache.store = {} - set_http_connect(*statuses) - req = Request.blank('/v1/a/c/o.jpg', {}) - req.content_length = 0 - self.app.update_request(req) - res = controller.PUT(req) - expected = str(expected) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - test_status_map((200, 200, 201, 201, -1), 201) - test_status_map((200, 200, 201, -1, -1), 503) - test_status_map((200, 200, 503, 503, -1), 503) - - def test_POST(self): - with save_globals(): - self.app.object_post_as_copy = False - - def test_status_map(statuses, expected): - set_http_connect(*statuses) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {}, method='POST', - headers={'Content-Type': 'foo/bar'}) - self.app.update_request(req) - res = req.get_response(self.app) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((200, 200, 202, 202, 202), 202) - test_status_map((200, 200, 202, 202, 500), 202) - test_status_map((200, 200, 202, 500, 500), 503) - test_status_map((200, 200, 202, 404, 500), 503) - test_status_map((200, 200, 202, 404, 404), 404) - test_status_map((200, 200, 404, 500, 500), 503) - test_status_map((200, 200, 404, 404, 404), 404) - - def test_POST_as_copy(self): - with save_globals(): - def test_status_map(statuses, expected): - set_http_connect(*statuses) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar'}) - self.app.update_request(req) - res = req.get_response(self.app) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((200, 200, 200, 200, 200, 202, 202, 202), 202) - test_status_map((200, 200, 200, 200, 200, 202, 202, 500), 202) - test_status_map((200, 200, 200, 200, 200, 202, 500, 500), 503) - test_status_map((200, 200, 200, 200, 200, 202, 404, 500), 503) - test_status_map((200, 200, 200, 200, 200, 202, 404, 404), 404) - test_status_map((200, 200, 200, 200, 200, 404, 500, 500), 503) - test_status_map((200, 200, 200, 200, 200, 404, 404, 404), 404) - - def test_DELETE(self): - with save_globals(): - def test_status_map(statuses, expected): - set_http_connect(*statuses) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'DELETE'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - test_status_map((200, 200, 204, 204, 204), 204) - test_status_map((200, 200, 204, 204, 500), 204) - test_status_map((200, 200, 204, 404, 404), 404) - test_status_map((200, 200, 204, 500, 404), 503) - test_status_map((200, 200, 404, 404, 404), 404) - test_status_map((200, 200, 404, 404, 500), 404) - - def test_HEAD(self): - with save_globals(): - def test_status_map(statuses, expected): - set_http_connect(*statuses) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - if expected < 400: - self.assert_('x-works' in res.headers) - self.assertEquals(res.headers['x-works'], 'yes') - self.assert_('accept-ranges' in res.headers) - self.assertEquals(res.headers['accept-ranges'], 'bytes') - - test_status_map((200, 200, 200, 404, 404), 200) - test_status_map((200, 200, 200, 500, 404), 200) - test_status_map((200, 200, 304, 500, 404), 304) - test_status_map((200, 200, 404, 404, 404), 404) - test_status_map((200, 200, 404, 404, 500), 404) - test_status_map((200, 200, 500, 500, 500), 503) - - def test_HEAD_newest(self): - with save_globals(): - def test_status_map(statuses, expected, timestamps, - expected_timestamp): - set_http_connect(*statuses, timestamps=timestamps) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}, - headers={'x-newest': 'true'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - self.assertEquals(res.headers.get('last-modified'), - expected_timestamp) - - # acct cont obj obj obj - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '2', '3'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '3', '2'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '3', '1'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3', - '3', '1'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None, - None, None), None) - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None, - None, '1'), '1') - - def test_GET_newest(self): - with save_globals(): - def test_status_map(statuses, expected, timestamps, - expected_timestamp): - set_http_connect(*statuses, timestamps=timestamps) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'GET'}, - headers={'x-newest': 'true'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - self.assertEquals(res.headers.get('last-modified'), - expected_timestamp) - - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '2', '3'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '3', '2'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '3', '1'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3', - '3', '1'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None, - None, None), None) - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None, - None, '1'), '1') - - with save_globals(): - def test_status_map(statuses, expected, timestamps, - expected_timestamp): - set_http_connect(*statuses, timestamps=timestamps) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - self.assertEquals(res.headers.get('last-modified'), - expected_timestamp) - - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '2', '3'), '1') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '3', '2'), '1') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', - '3', '1'), '1') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3', - '3', '1'), '3') - test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None, - '1', '2'), None) - - def test_POST_meta_val_len(self): - with save_globals(): - limit = MAX_META_VALUE_LENGTH - self.app.object_post_as_copy = False - proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 202, 202, 202) - # acct cont obj obj obj - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * limit}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) - set_http_connect(202, 202, 202) - req = Request.blank( - '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * (limit + 1)}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) - - def test_POST_as_copy_meta_val_len(self): - with save_globals(): - limit = MAX_META_VALUE_LENGTH - set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) - # acct cont objc objc objc obj obj obj - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * limit}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) - set_http_connect(202, 202, 202) - req = Request.blank( - '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * (limit + 1)}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) - - def test_POST_meta_key_len(self): - with save_globals(): - limit = MAX_META_NAME_LENGTH - self.app.object_post_as_copy = False - set_http_connect(200, 200, 202, 202, 202) - # acct cont obj obj obj - req = Request.blank( - '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * limit): 'x'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) - set_http_connect(202, 202, 202) - req = Request.blank( - '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) - - def test_POST_as_copy_meta_key_len(self): - with save_globals(): - limit = MAX_META_NAME_LENGTH - set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) - # acct cont objc objc objc obj obj obj - req = Request.blank( - '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * limit): 'x'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) - set_http_connect(202, 202, 202) - req = Request.blank( - '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) - - def test_POST_meta_count(self): - with save_globals(): - limit = MAX_META_COUNT - headers = dict( - (('X-Object-Meta-' + str(i), 'a') for i in xrange(limit + 1))) - headers.update({'Content-Type': 'foo/bar'}) - set_http_connect(202, 202, 202) - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers=headers) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) - - def test_POST_meta_size(self): - with save_globals(): - limit = MAX_META_OVERALL_SIZE - count = limit / 256 # enough to cause the limit to be reached - headers = dict( - (('X-Object-Meta-' + str(i), 'a' * 256) - for i in xrange(count + 1))) - headers.update({'Content-Type': 'foo/bar'}) - set_http_connect(202, 202, 202) - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, - headers=headers) - self.app.update_request(req) - res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) - - def test_PUT_not_autodetect_content_type(self): - with save_globals(): - headers = {'Content-Type': 'something/right', 'Content-Length': 0} - it_worked = [] - - def verify_content_type(ipaddr, port, device, partition, - method, path, headers=None, - query_string=None): - if path == '/a/c/o.html': - it_worked.append( - headers['Content-Type'].startswith('something/right')) - - set_http_connect(204, 204, 201, 201, 201, - give_connect=verify_content_type) - req = Request.blank('/v1/a/c/o.html', {'REQUEST_METHOD': 'PUT'}, - headers=headers) - self.app.update_request(req) - req.get_response(self.app) - self.assertNotEquals(it_worked, []) - self.assertTrue(all(it_worked)) - - def test_PUT_autodetect_content_type(self): - with save_globals(): - headers = {'Content-Type': 'something/wrong', 'Content-Length': 0, - 'X-Detect-Content-Type': 'True'} - it_worked = [] - - def verify_content_type(ipaddr, port, device, partition, - method, path, headers=None, - query_string=None): - if path == '/a/c/o.html': - it_worked.append( - headers['Content-Type'].startswith('text/html')) - - set_http_connect(204, 204, 201, 201, 201, - give_connect=verify_content_type) - req = Request.blank('/v1/a/c/o.html', {'REQUEST_METHOD': 'PUT'}, - headers=headers) - self.app.update_request(req) - req.get_response(self.app) - self.assertNotEquals(it_worked, []) - self.assertTrue(all(it_worked)) - - def test_client_timeout(self): - with save_globals(): - self.app.account_ring.get_nodes('account') - for dev in self.app.account_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.container_ring.get_nodes('account') - for dev in self.app.container_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.object_ring.get_nodes('account') - for dev in self.app.object_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - - class SlowBody(object): - - def __init__(self): - self.sent = 0 - - def read(self, size=-1): - if self.sent < 4: - sleep(0.1) - self.sent += 1 - return ' ' - return '' - - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'PUT', - 'wsgi.input': SlowBody()}, - headers={'Content-Length': '4', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - set_http_connect(200, 200, 201, 201, 201) - # acct cont obj obj obj - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) - self.app.client_timeout = 0.1 - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'PUT', - 'wsgi.input': SlowBody()}, - headers={'Content-Length': '4', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - set_http_connect(201, 201, 201) - # obj obj obj - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 408) - - def test_client_disconnect(self): - with save_globals(): - self.app.account_ring.get_nodes('account') - for dev in self.app.account_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.container_ring.get_nodes('account') - for dev in self.app.container_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.object_ring.get_nodes('account') - for dev in self.app.object_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - - class SlowBody(object): - - def __init__(self): - self.sent = 0 - - def read(self, size=-1): - raise Exception('Disconnected') - - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'PUT', - 'wsgi.input': SlowBody()}, - headers={'Content-Length': '4', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - set_http_connect(200, 200, 201, 201, 201) - # acct cont obj obj obj - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 499) - - def test_node_read_timeout(self): - with save_globals(): - self.app.account_ring.get_nodes('account') - for dev in self.app.account_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.container_ring.get_nodes('account') - for dev in self.app.container_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.object_ring.get_nodes('account') - for dev in self.app.object_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - self.app.update_request(req) - set_http_connect(200, 200, 200, slow=0.1) - req.sent_size = 0 - resp = req.get_response(self.app) - got_exc = False - try: - resp.body - except ChunkReadTimeout: - got_exc = True - self.assert_(not got_exc) - self.app.recoverable_node_timeout = 0.1 - set_http_connect(200, 200, 200, slow=1.0) - resp = req.get_response(self.app) - got_exc = False - try: - resp.body - except ChunkReadTimeout: - got_exc = True - self.assert_(got_exc) - - def test_node_read_timeout_retry(self): - with save_globals(): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - self.app.update_request(req) - - self.app.recoverable_node_timeout = 0.1 - set_http_connect(200, 200, 200, slow=[1.0, 1.0, 1.0]) - resp = req.get_response(self.app) - got_exc = False - try: - self.assertEquals('', resp.body) - except ChunkReadTimeout: - got_exc = True - self.assert_(got_exc) - - set_http_connect(200, 200, 200, body='lalala', - slow=[1.0, 1.0]) - resp = req.get_response(self.app) - got_exc = False - try: - self.assertEquals(resp.body, 'lalala') - except ChunkReadTimeout: - got_exc = True - self.assert_(not got_exc) - - set_http_connect(200, 200, 200, body='lalala', - slow=[1.0, 1.0], etags=['a', 'a', 'a']) - resp = req.get_response(self.app) - got_exc = False - try: - self.assertEquals(resp.body, 'lalala') - except ChunkReadTimeout: - got_exc = True - self.assert_(not got_exc) - - set_http_connect(200, 200, 200, body='lalala', - slow=[1.0, 1.0], etags=['a', 'b', 'a']) - resp = req.get_response(self.app) - got_exc = False - try: - self.assertEquals(resp.body, 'lalala') - except ChunkReadTimeout: - got_exc = True - self.assert_(not got_exc) - - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - set_http_connect(200, 200, 200, body='lalala', - slow=[1.0, 1.0], etags=['a', 'b', 'b']) - resp = req.get_response(self.app) - got_exc = False - try: - resp.body - except ChunkReadTimeout: - got_exc = True - self.assert_(got_exc) - - def test_node_write_timeout(self): - with save_globals(): - self.app.account_ring.get_nodes('account') - for dev in self.app.account_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.container_ring.get_nodes('account') - for dev in self.app.container_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - self.app.object_ring.get_nodes('account') - for dev in self.app.object_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '4', - 'Content-Type': 'text/plain'}, - body=' ') - self.app.update_request(req) - set_http_connect(200, 200, 201, 201, 201, slow=0.1) - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) - self.app.node_timeout = 0.1 - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '4', - 'Content-Type': 'text/plain'}, - body=' ') - self.app.update_request(req) - set_http_connect(201, 201, 201, slow=1.0) - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 503) - - def test_iter_nodes(self): - with save_globals(): - try: - self.app.object_ring.max_more_nodes = 2 - partition, nodes = self.app.object_ring.get_nodes('account', - 'container', - 'object') - collected_nodes = [] - for node in self.app.iter_nodes(self.app.object_ring, - partition): - collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 5) - - self.app.object_ring.max_more_nodes = 20 - self.app.request_node_count = lambda r: 20 - partition, nodes = self.app.object_ring.get_nodes('account', - 'container', - 'object') - collected_nodes = [] - for node in self.app.iter_nodes(self.app.object_ring, - partition): - collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 9) - - self.app.log_handoffs = True - self.app.logger = FakeLogger() - self.app.object_ring.max_more_nodes = 2 - partition, nodes = self.app.object_ring.get_nodes('account', - 'container', - 'object') - collected_nodes = [] - for node in self.app.iter_nodes(self.app.object_ring, - partition): - collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 5) - self.assertEquals( - self.app.logger.log_dict['warning'], - [(('Handoff requested (1)',), {}), - (('Handoff requested (2)',), {})]) - - self.app.log_handoffs = False - self.app.logger = FakeLogger() - self.app.object_ring.max_more_nodes = 2 - partition, nodes = self.app.object_ring.get_nodes('account', - 'container', - 'object') - collected_nodes = [] - for node in self.app.iter_nodes(self.app.object_ring, - partition): - collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 5) - self.assertEquals(self.app.logger.log_dict['warning'], []) - finally: - self.app.object_ring.max_more_nodes = 0 - - def test_iter_nodes_calls_sort_nodes(self): - with mock.patch.object(self.app, 'sort_nodes') as sort_nodes: - for node in self.app.iter_nodes(self.app.object_ring, 0): - pass - sort_nodes.assert_called_once_with( - self.app.object_ring.get_part_nodes(0)) - - def test_iter_nodes_skips_error_limited(self): - with mock.patch.object(self.app, 'sort_nodes', lambda n: n): - first_nodes = list(self.app.iter_nodes(self.app.object_ring, 0)) - second_nodes = list(self.app.iter_nodes(self.app.object_ring, 0)) - self.assertTrue(first_nodes[0] in second_nodes) - - self.app.error_limit(first_nodes[0], 'test') - second_nodes = list(self.app.iter_nodes(self.app.object_ring, 0)) - self.assertTrue(first_nodes[0] not in second_nodes) - - def test_iter_nodes_gives_extra_if_error_limited_inline(self): - with nested( - mock.patch.object(self.app, 'sort_nodes', lambda n: n), - mock.patch.object(self.app, 'request_node_count', - lambda r: 6), - mock.patch.object(self.app.object_ring, 'max_more_nodes', 99)): - first_nodes = list(self.app.iter_nodes(self.app.object_ring, 0)) - second_nodes = [] - for node in self.app.iter_nodes(self.app.object_ring, 0): - if not second_nodes: - self.app.error_limit(node, 'test') - second_nodes.append(node) - self.assertEquals(len(first_nodes), 6) - self.assertEquals(len(second_nodes), 7) - - def test_iter_nodes_with_custom_node_iter(self): - node_list = [dict(id=n) for n in xrange(10)] - with nested( - mock.patch.object(self.app, 'sort_nodes', lambda n: n), - mock.patch.object(self.app, 'request_node_count', - lambda r: 3)): - got_nodes = list(self.app.iter_nodes(self.app.object_ring, 0, - node_iter=iter(node_list))) - self.assertEqual(node_list[:3], got_nodes) - - with nested( - mock.patch.object(self.app, 'sort_nodes', lambda n: n), - mock.patch.object(self.app, 'request_node_count', - lambda r: 1000000)): - got_nodes = list(self.app.iter_nodes(self.app.object_ring, 0, - node_iter=iter(node_list))) - self.assertEqual(node_list, got_nodes) - - def test_best_response_sets_headers(self): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3, - 'Object', headers=[{'X-Test': '1'}, - {'X-Test': '2'}, - {'X-Test': '3'}]) - self.assertEquals(resp.headers['X-Test'], '1') - - def test_best_response_sets_etag(self): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3, - 'Object') - self.assertEquals(resp.etag, None) - resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3, - 'Object', - etag='68b329da9893e34099c7d8ad5cb9c940' - ) - self.assertEquals(resp.etag, '68b329da9893e34099c7d8ad5cb9c940') - - def test_proxy_passes_content_type(self): - with save_globals(): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - self.app.update_request(req) - set_http_connect(200, 200, 200) - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_type, 'x-application/test') - set_http_connect(200, 200, 200) - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 0) - set_http_connect(200, 200, 200, slow=True) - resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 4) - - def test_proxy_passes_content_length_on_head(self): - with save_globals(): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'HEAD'}) - self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200) - resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 0) - set_http_connect(200, 200, 200, slow=True) - resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 4) - - def test_error_limiting(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - controller.app.sort_nodes = lambda l: l - self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200), - 200) - self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2) - self.assert_('last_error' in controller.app.object_ring.devs[0]) - for _junk in xrange(self.app.error_suppression_limit): - self.assert_status_map(controller.HEAD, (200, 200, 503, 503, - 503), 503) - self.assertEquals(controller.app.object_ring.devs[0]['errors'], - self.app.error_suppression_limit + 1) - self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200), - 503) - self.assert_('last_error' in controller.app.object_ring.devs[0]) - self.assert_status_map(controller.PUT, (200, 200, 200, 201, 201, - 201), 503) - self.assert_status_map(controller.POST, - (200, 200, 200, 200, 200, 200, 202, 202, - 202), 503) - self.assert_status_map(controller.DELETE, - (200, 200, 200, 204, 204, 204), 503) - self.app.error_suppression_interval = -300 - self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200), - 200) - self.assertRaises(BaseException, - self.assert_status_map, controller.DELETE, - (200, 200, 200, 204, 204, 204), 503, - raise_exc=True) - - def test_acc_or_con_missing_returns_404(self): - with save_globals(): - self.app.memcache = FakeMemcacheReturnsNone() - for dev in self.app.account_ring.devs.values(): - del dev['errors'] - del dev['last_error'] - for dev in self.app.container_ring.devs.values(): - del dev['errors'] - del dev['last_error'] - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200, 200, 200, 200) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - self.app.update_request(req) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 200) - - set_http_connect(404, 404, 404) - # acct acct acct - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(503, 404, 404) - # acct acct acct - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(503, 503, 404) - # acct acct acct - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(503, 503, 503) - # acct acct acct - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(200, 200, 204, 204, 204) - # acct cont obj obj obj - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 204) - - set_http_connect(200, 404, 404, 404) - # acct cont cont cont - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(200, 503, 503, 503) - # acct cont cont cont - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - for dev in self.app.account_ring.devs.values(): - dev['errors'] = self.app.error_suppression_limit + 1 - dev['last_error'] = time.time() - set_http_connect(200) - # acct [isn't actually called since everything - # is error limited] - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - for dev in self.app.account_ring.devs.values(): - dev['errors'] = 0 - for dev in self.app.container_ring.devs.values(): - dev['errors'] = self.app.error_suppression_limit + 1 - dev['last_error'] = time.time() - set_http_connect(200, 200) - # acct cont [isn't actually called since - # everything is error limited] - # make sure to use a fresh request without cached env - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}) - resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) - - def test_PUT_POST_requires_container_exist(self): - with save_globals(): - self.app.object_post_as_copy = False - self.app.memcache = FakeMemcacheReturnsNone() - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - - set_http_connect(200, 404, 404, 404, 200, 200, 200) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'PUT'}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(200, 404, 404, 404, 200, 200) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'text/plain'}) - self.app.update_request(req) - resp = controller.POST(req) - self.assertEquals(resp.status_int, 404) - - def test_PUT_POST_as_copy_requires_container_exist(self): - with save_globals(): - self.app.memcache = FakeMemcacheReturnsNone() - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 404, 404, 404, 200, 200, 200) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(200, 404, 404, 404, 200, 200, 200, 200, 200, 200) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'text/plain'}) - self.app.update_request(req) - resp = controller.POST(req) - self.assertEquals(resp.status_int, 404) - - def test_bad_metadata(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 201, 201, 201) - # acct cont obj obj obj - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0'}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Object-Meta-' + ('a' * - MAX_META_NAME_LENGTH): 'v'}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Object-Meta-' + ('a' * - (MAX_META_NAME_LENGTH + 1)): 'v'}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Object-Meta-Too-Long': 'a' * - MAX_META_VALUE_LENGTH}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Object-Meta-Too-Long': 'a' * - (MAX_META_VALUE_LENGTH + 1)}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - headers = {'Content-Length': '0'} - for x in xrange(MAX_META_COUNT): - headers['X-Object-Meta-%d' % x] = 'v' - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers=headers) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - headers = {'Content-Length': '0'} - for x in xrange(MAX_META_COUNT + 1): - headers['X-Object-Meta-%d' % x] = 'v' - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers=headers) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - headers = {'Content-Length': '0'} - header_value = 'a' * MAX_META_VALUE_LENGTH - size = 0 - x = 0 - while size < MAX_META_OVERALL_SIZE - 4 - \ - MAX_META_VALUE_LENGTH: - size += 4 + MAX_META_VALUE_LENGTH - headers['X-Object-Meta-%04d' % x] = header_value - x += 1 - if MAX_META_OVERALL_SIZE - size > 1: - headers['X-Object-Meta-a'] = \ - 'a' * (MAX_META_OVERALL_SIZE - size - 1) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers=headers) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - headers['X-Object-Meta-a'] = \ - 'a' * (MAX_META_OVERALL_SIZE - size) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers=headers) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - - @contextmanager - def controller_context(self, req, *args, **kwargs): - _v, account, container, obj = utils.split_path(req.path, 4, 4, True) - controller = proxy_server.ObjectController(self.app, account, - container, obj) - self.app.update_request(req) - self.app.memcache.store = {} - with save_globals(): - new_connect = set_http_connect(*args, **kwargs) - yield controller - unused_status_list = [] - while True: - try: - unused_status_list.append(new_connect.code_iter.next()) - except StopIteration: - break - if unused_status_list: - raise self.fail('UN-USED STATUS CODES: %r' % - unused_status_list) - - def test_basic_put_with_x_copy_from(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': 'c/o'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - - def test_basic_put_with_x_copy_from_across_container(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': 'c2/o'}) - status_list = (200, 200, 200, 200, 200, 200, 201, 201, 201) - # acct cont conc objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c2/o') - - def test_copy_non_zero_content_length(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '5', - 'X-Copy-From': 'c/o'}) - status_list = (200, 200) - # acct cont - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - - def test_copy_with_slashes_in_x_copy_from(self): - # extra source path parsing - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': 'c/o/o2'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - - def test_copy_with_spaces_in_x_copy_from(self): - # space in soure path - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': 'c/o%20o2'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2') - - def test_copy_with_leading_slash_in_x_copy_from(self): - # repeat tests with leading / - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - - def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o/o2'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - - def test_copy_with_no_object_in_x_copy_from(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c'}) - status_list = (200, 200) - # acct cont - with self.controller_context(req, *status_list) as controller: - try: - controller.PUT(req) - except HTTPException as resp: - self.assertEquals(resp.status_int // 100, 4) # client error - else: - raise self.fail('Invalid X-Copy-From did not raise ' - 'client error') - - def test_copy_server_error_reading_source(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o'}) - status_list = (200, 200, 503, 503, 503) - # acct cont objc objc objc - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 503) - - def test_copy_not_found_reading_source(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o'}) - # not found - status_list = (200, 200, 404, 404, 404) - # acct cont objc objc objc - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) - - def test_copy_with_some_missing_sources(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o'}) - status_list = (200, 200, 404, 404, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - - def test_copy_with_object_metadata(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o', - 'X-Object-Meta-Ours': 'okay'}) - # test object metadata - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('x-object-meta-test'), 'testing') - self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') - self.assertEquals(resp.headers.get('x-delete-at'), '9876543210') - - def test_copy_source_larger_than_max_file_size(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0', - 'X-Copy-From': '/c/o'}) - - # copy-from object is too large to fit in target object - class LargeResponseBody(object): - - def __len__(self): - return MAX_FILE_SIZE + 1 - - def __getitem__(self, key): - return '' - - copy_from_obj_body = LargeResponseBody() - status_list = (200, 200, 200, 200, 200) - # acct cont objc objc objc - kwargs = dict(body=copy_from_obj_body) - with self.controller_context(req, *status_list, - **kwargs) as controller: - self.app.update_request(req) - - self.app.memcache.store = {} - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 413) - - def test_basic_COPY(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': 'c/o2'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - - def test_COPY_across_containers(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': 'c2/o'}) - status_list = (200, 200, 200, 200, 200, 200, 201, 201, 201) - # acct cont c2 objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - - def test_COPY_source_with_slashes_in_name(self): - req = Request.blank('/v1/a/c/o/o2', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': 'c/o'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - - def test_COPY_destination_leading_slash(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - - def test_COPY_source_with_slashes_destination_leading_slash(self): - req = Request.blank('/v1/a/c/o/o2', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - - def test_COPY_no_object_in_destination(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': 'c_o'}) - status_list = [] # no requests needed - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 412) - - def test_COPY_server_error_reading_source(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - status_list = (200, 200, 503, 503, 503) - # acct cont objc objc objc - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 503) - - def test_COPY_not_found_reading_source(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - status_list = (200, 200, 404, 404, 404) - # acct cont objc objc objc - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 404) - - def test_COPY_with_some_missing_sources(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - status_list = (200, 200, 404, 404, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - - def test_COPY_with_metadata(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o', - 'X-Object-Meta-Ours': 'okay'}) - status_list = (200, 200, 200, 200, 200, 201, 201, 201) - # acct cont objc objc objc obj obj obj - with self.controller_context(req, *status_list) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('x-object-meta-test'), - 'testing') - self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') - self.assertEquals(resp.headers.get('x-delete-at'), '9876543210') - - def test_COPY_source_larger_than_max_file_size(self): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - - class LargeResponseBody(object): - - def __len__(self): - return MAX_FILE_SIZE + 1 - - def __getitem__(self, key): - return '' - - copy_from_obj_body = LargeResponseBody() - status_list = (200, 200, 200, 200, 200) - # acct cont objc objc objc - kwargs = dict(body=copy_from_obj_body) - with self.controller_context(req, *status_list, - **kwargs) as controller: - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 413) - - def test_COPY_newest(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - req.account = 'a' - controller.object_name = 'o' - set_http_connect(200, 200, 200, 200, 200, 201, 201, 201, - #act cont objc objc objc obj obj obj - timestamps=('1', '1', '1', '3', '2', '4', '4', - '4')) - self.app.memcache.store = {} - resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from-last-modified'], - '3') - - def test_COPY_delete_at(self): - with save_globals(): - given_headers = {} - - def fake_connect_put_node(nodes, part, path, headers, - logger_thread_locals): - given_headers.update(headers) - - controller = proxy_server.ObjectController(self.app, 'a', - 'c', 'o') - controller._connect_put_node = fake_connect_put_node - set_http_connect(200, 200, 200, 200, 200, 201, 201, 201) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': '/c/o'}) - - self.app.update_request(req) - controller.COPY(req) - self.assertEquals(given_headers.get('X-Delete-At'), '9876543210') - self.assertTrue('X-Delete-At-Host' in given_headers) - self.assertTrue('X-Delete-At-Device' in given_headers) - self.assertTrue('X-Delete-At-Partition' in given_headers) - self.assertTrue('X-Delete-At-Container' in given_headers) - - def test_chunked_put(self): - - class ChunkedFile(object): - - def __init__(self, bytes): - self.bytes = bytes - self.read_bytes = 0 - - @property - def bytes_left(self): - return self.bytes - self.read_bytes - - def read(self, amt=None): - if self.read_bytes >= self.bytes: - raise StopIteration() - if not amt: - amt = self.bytes_left - data = 'a' * min(amt, self.bytes_left) - self.read_bytes += len(data) - return data - - with save_globals(): - set_http_connect(201, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Transfer-Encoding': 'chunked', - 'Content-Type': 'foo/bar'}) - - req.body_file = ChunkedFile(10) - self.app.memcache.store = {} - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status_int // 100, 2) # success - - # test 413 entity to large - set_http_connect(201, 201, 201, 201) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Transfer-Encoding': 'chunked', - 'Content-Type': 'foo/bar'}) - req.body_file = ChunkedFile(11) - self.app.memcache.store = {} - self.app.update_request(req) - try: - swift.proxy.controllers.obj.MAX_FILE_SIZE = 10 - res = controller.PUT(req) - self.assertEquals(res.status_int, 413) - finally: - swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE - - def test_chunked_put_bad_version(self): - # Check bad version - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v0 HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - - def test_chunked_put_bad_path(self): - # Check bad path - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET invalid HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 404' - self.assertEquals(headers[:len(exp)], exp) - - def test_chunked_put_bad_utf8(self): - # Check invalid utf-8 - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a%80 HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - - def test_chunked_put_bad_path_no_controller(self): - # Check bad path, no controller - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1 HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - - def test_chunked_put_bad_method(self): - # Check bad method - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('LICK /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 405' - self.assertEquals(headers[:len(exp)], exp) - - def test_chunked_put_unhandled_exception(self): - # Check unhandled exception - (prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, - obj2srv) = _test_servers - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - orig_update_request = prosrv.update_request - - def broken_update_request(*args, **kwargs): - raise Exception('fake: this should be printed') - - prosrv.update_request = broken_update_request - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 500' - self.assertEquals(headers[:len(exp)], exp) - prosrv.update_request = orig_update_request - - def test_chunked_put_head_account(self): - # Head account, just a double check and really is here to test - # the part Application.log_request that 'enforces' a - # content_length on the response. - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 204' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('\r\nContent-Length: 0\r\n' in headers) - - def test_chunked_put_utf8_all_the_way_down(self): - # Test UTF-8 Unicode all the way through the system - ustr = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xba \xe1\xbc\xb0\xce' \ - '\xbf\xe1\xbd\xbb\xce\x87 \xcf\x84\xe1\xbd\xb0 \xcf' \ - '\x80\xe1\xbd\xb1\xce\xbd\xcf\x84\xca\xbc \xe1\xbc' \ - '\x82\xce\xbd \xe1\xbc\x90\xce\xbe\xe1\xbd\xb5\xce' \ - '\xba\xce\xbf\xce\xb9 \xcf\x83\xce\xb1\xcf\x86\xe1' \ - '\xbf\x86.Test' - ustr_short = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xbatest' - # Create ustr container - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # List account with ustr container (test plain) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - containers = fd.read().split('\n') - self.assert_(ustr in containers) - # List account with ustr container (test json) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a?format=json HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - listing = simplejson.loads(fd.read()) - self.assert_(ustr.decode('utf8') in [l['name'] for l in listing]) - # List account with ustr container (test xml) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a?format=xml HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('%s' % ustr in fd.read()) - # Create ustr object with ustr metadata in ustr container - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'X-Object-Meta-%s: %s\r\nContent-Length: 0\r\n\r\n' % - (quote(ustr), quote(ustr), quote(ustr_short), - quote(ustr))) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # List ustr container with ustr object (test plain) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - objects = fd.read().split('\n') - self.assert_(ustr in objects) - # List ustr container with ustr object (test json) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?format=json HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' % - quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - listing = simplejson.loads(fd.read()) - self.assertEquals(listing[0]['name'], ustr.decode('utf8')) - # List ustr container with ustr object (test xml) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?format=xml HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' % - quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('%s' % ustr in fd.read()) - # Retrieve ustr object with ustr metadata - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % - (quote(ustr), quote(ustr))) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('\r\nX-Object-Meta-%s: %s\r\n' % - (quote(ustr_short).lower(), quote(ustr)) in headers) - - def test_chunked_put_chunked_put(self): - # Do chunked object put - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - # Also happens to assert that x-storage-token is taken as a - # replacement for x-auth-token. - fd.write('PUT /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Transfer-Encoding: chunked\r\n\r\n' - '2\r\noh\r\n4\r\n hai\r\nf\r\n123456789abcdef\r\n' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure we get what we put - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - self.assertEquals(body, 'oh hai123456789abcdef') - - def test_version_manifest(self, oc='versions', vc='vers', o='name'): - versions_to_create = 3 - # Create a container for our versioned object testing - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - pre = quote('%03x' % len(o)) - osub = '%s.sub' % o - presub = quote('%03x' % len(osub)) - osub = quote(osub) - presub = quote(presub) - oc = quote(oc) - vc = quote(vc) - fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\nX-Versions-Location: %s\r\n\r\n' - % (oc, vc)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # check that the header was set - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) - self.assert_('X-Versions-Location: %s' % vc in headers) - # make the container for the object versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % vc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Create the versioned file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'X-Object-Meta-Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Create the object versions - for segment in xrange(1, versions_to_create): - sleep(.01) # guarantee that the timestamp changes - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s' - '\r\n\r\n%05d\r\n' % (oc, o, segment, segment)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure retrieving the manifest file gets the latest version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n' - '\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Content-Type: text/jibberish%s' % segment in headers) - self.assert_('X-Object-Meta-Foo: barbaz' not in headers) - body = fd.read() - self.assertEquals(body, '%05d' % segment) - # Ensure we have the right number of versions saved - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - versions = [x for x in body.split('\n') if x] - self.assertEquals(len(versions), versions_to_create - 1) - # copy a version and make sure the version info is stripped - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('COPY /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nDestination: %s/copied_name\r\n' - 'Content-Length: 0\r\n\r\n' % (oc, o, oc)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response to the COPY - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/copied_name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - self.assertEquals(body, '%05d' % segment) - # post and make sure it's updated - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('POST /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n' - 'X-Object-Meta-Bar: foo\r\n\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response to the POST - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Content-Type: foo/bar' in headers) - self.assert_('X-Object-Meta-Bar: foo' in headers) - body = fd.read() - self.assertEquals(body, '%05d' % segment) - # Delete the object versions - for segment in xrange(versions_to_create - 1, 0, -1): - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r' - '\nConnection: close\r\nX-Storage-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) - # Ensure retrieving the manifest file gets the latest version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Content-Type: text/jibberish%s' % (segment - 1) - in headers) - body = fd.read() - self.assertEquals(body, '%05d' % (segment - 1)) - # Ensure we have the right number of versions saved - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r' - '\n' % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - versions = [x for x in body.split('\n') if x] - self.assertEquals(len(versions), segment - 1) - # there is now one segment left (in the manifest) - # Ensure we have no saved versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 204 No Content' - self.assertEquals(headers[:len(exp)], exp) - # delete the last verision - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) - # Ensure it's all gone - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 404' - self.assertEquals(headers[:len(exp)], exp) - - # make sure manifest files don't get versioned - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\nX-Object-Manifest: %s/foo_\r\n\r\n' - % (oc, vc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure we have no saved versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 204 No Content' - self.assertEquals(headers[:len(exp)], exp) - - # DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\n00001\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\nsub1\r\n' % (oc, osub)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\nsub2\r\n' % (oc, osub)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, presub, osub)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - versions = [x for x in body.split('\n') if x] - self.assertEquals(len(versions), 1) - - # Check for when the versions target container doesn't exist - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%swhoops HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Create the versioned file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n00000\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Create another version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n00001\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - # Delete the object - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%swhoops/foo HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx response - self.assertEquals(headers[:len(exp)], exp) - - def test_version_manifest_utf8(self): - oc = '0_oc_non_ascii\xc2\xa3' - vc = '0_vc_non_ascii\xc2\xa3' - o = '0_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - def test_version_manifest_utf8_container(self): - oc = '1_oc_non_ascii\xc2\xa3' - vc = '1_vc_ascii' - o = '1_o_ascii' - self.test_version_manifest(oc, vc, o) - - def test_version_manifest_utf8_version_container(self): - oc = '2_oc_ascii' - vc = '2_vc_non_ascii\xc2\xa3' - o = '2_o_ascii' - self.test_version_manifest(oc, vc, o) - - def test_version_manifest_utf8_containers(self): - oc = '3_oc_non_ascii\xc2\xa3' - vc = '3_vc_non_ascii\xc2\xa3' - o = '3_o_ascii' - self.test_version_manifest(oc, vc, o) - - def test_version_manifest_utf8_object(self): - oc = '4_oc_ascii' - vc = '4_vc_ascii' - o = '4_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - def test_version_manifest_utf8_version_container_utf_object(self): - oc = '5_oc_ascii' - vc = '5_vc_non_ascii\xc2\xa3' - o = '5_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - def test_version_manifest_utf8_container_utf_object(self): - oc = '6_oc_non_ascii\xc2\xa3' - vc = '6_vc_ascii' - o = '6_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - def test_conditional_range_get(self): - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) = \ - _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - - # make a container - fd = sock.makefile() - fd.write('PUT /v1/a/con HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - exp = 'HTTP/1.1 201' - headers = readuntil2crlfs(fd) - self.assertEquals(headers[:len(exp)], exp) - - # put an object in it - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/con/o HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Length: 10\r\n' - 'Content-Type: text/plain\r\n' - '\r\n' - 'abcdefghij\r\n') - fd.flush() - exp = 'HTTP/1.1 201' - headers = readuntil2crlfs(fd) - self.assertEquals(headers[:len(exp)], exp) - - # request with both If-None-Match and Range - etag = md5("abcdefghij").hexdigest() - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/con/o HTTP/1.1\r\n' + - 'Host: localhost\r\n' + - 'Connection: close\r\n' + - 'X-Storage-Token: t\r\n' + - 'If-None-Match: "' + etag + '"\r\n' + - 'Range: bytes=3-8\r\n' + - '\r\n') - fd.flush() - exp = 'HTTP/1.1 304' - headers = readuntil2crlfs(fd) - self.assertEquals(headers[:len(exp)], exp) - - def test_mismatched_etags(self): - with save_globals(): - # no etag supplied, object servers return success w/ diff values - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0'}) - self.app.update_request(req) - set_http_connect(200, 201, 201, 201, - etags=[None, - '68b329da9893e34099c7d8ad5cb9c940', - '68b329da9893e34099c7d8ad5cb9c940', - '68b329da9893e34099c7d8ad5cb9c941']) - resp = controller.PUT(req) - self.assertEquals(resp.status_int // 100, 5) # server error - - # req supplies etag, object servers return 422 - mismatch - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={ - 'Content-Length': '0', - 'ETag': '68b329da9893e34099c7d8ad5cb9c940', - }) - self.app.update_request(req) - set_http_connect(200, 422, 422, 503, - etags=['68b329da9893e34099c7d8ad5cb9c940', - '68b329da9893e34099c7d8ad5cb9c941', - None, - None]) - resp = controller.PUT(req) - self.assertEquals(resp.status_int // 100, 4) # client error - - def test_response_get_accept_ranges_header(self): - with save_globals(): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) - self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200) - resp = controller.GET(req) - self.assert_('accept-ranges' in resp.headers) - self.assertEquals(resp.headers['accept-ranges'], 'bytes') - - def test_response_head_accept_ranges_header(self): - with save_globals(): - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'HEAD'}) - self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200) - resp = controller.HEAD(req) - self.assert_('accept-ranges' in resp.headers) - self.assertEquals(resp.headers['accept-ranges'], 'bytes') - - def test_GET_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 200, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o') - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.GET(req) - self.assert_(called[0]) - - def test_HEAD_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 200, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.HEAD(req) - self.assert_(called[0]) - - def test_POST_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - self.app.object_post_as_copy = False - set_http_connect(200, 200, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'Content-Length': '5'}, body='12345') - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.POST(req) - self.assert_(called[0]) - - def test_POST_as_copy_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 200, 200, 200, 200, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'Content-Length': '5'}, body='12345') - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.POST(req) - self.assert_(called[0]) - - def test_PUT_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 200, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '5'}, body='12345') - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.PUT(req) - self.assert_(called[0]) - - def test_COPY_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 200, 200, 200, 200, 201, 201, 201) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'COPY'}, - headers={'Destination': 'c/o'}) - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.COPY(req) - self.assert_(called[0]) - - def test_POST_converts_delete_after_to_delete_at(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) - self.app.memcache.store = {} - orig_time = time.time - try: - t = time.time() - time.time = lambda: t - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-After': '60'}) - self.app.update_request(req) - res = controller.POST(req) - self.assertEquals(res.status, '202 Fake') - self.assertEquals(req.headers.get('x-delete-at'), - str(int(t + 60))) - - self.app.object_post_as_copy = False - controller = proxy_server.ObjectController(self.app, 'account', - 'container', - 'object') - set_http_connect(200, 200, 202, 202, 202) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-After': '60'}) - self.app.update_request(req) - res = controller.POST(req) - self.assertEquals(res.status, '202 Fake') - self.assertEquals(req.headers.get('x-delete-at'), - str(int(t + 60))) - finally: - time.time = orig_time - - def test_POST_non_int_delete_after(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-After': '60.1'}) - self.app.update_request(req) - res = controller.POST(req) - self.assertEquals(res.status, '400 Bad Request') - self.assertTrue('Non-integer X-Delete-After' in res.body) - - def test_POST_negative_delete_after(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-After': '-60'}) - self.app.update_request(req) - res = controller.POST(req) - self.assertEquals(res.status, '400 Bad Request') - self.assertTrue('X-Delete-At in past' in res.body) - - def test_POST_delete_at(self): - with save_globals(): - given_headers = {} - - def fake_make_requests(req, ring, part, method, path, headers, - query_string=''): - given_headers.update(headers[0]) - - self.app.object_post_as_copy = False - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - controller.make_requests = fake_make_requests - set_http_connect(200, 200) - self.app.memcache.store = {} - t = str(int(time.time() + 100)) - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-At': t}) - self.app.update_request(req) - controller.POST(req) - self.assertEquals(given_headers.get('X-Delete-At'), t) - self.assertTrue('X-Delete-At-Host' in given_headers) - self.assertTrue('X-Delete-At-Device' in given_headers) - self.assertTrue('X-Delete-At-Partition' in given_headers) - self.assertTrue('X-Delete-At-Container' in given_headers) - - t = str(int(time.time() + 100)) + '.1' - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-At': t}) - self.app.update_request(req) - resp = controller.POST(req) - self.assertEquals(resp.status_int, 400) - self.assertTrue('Non-integer X-Delete-At' in resp.body) - - t = str(int(time.time() - 100)) - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Type': 'foo/bar', - 'X-Delete-At': t}) - self.app.update_request(req) - resp = controller.POST(req) - self.assertEquals(resp.status_int, 400) - self.assertTrue('X-Delete-At in past' in resp.body) - - def test_PUT_converts_delete_after_to_delete_at(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 201, 201, 201) - self.app.memcache.store = {} - orig_time = time.time - try: - t = time.time() - time.time = lambda: t - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Length': '0', - 'Content-Type': 'foo/bar', - 'X-Delete-After': '60'}) - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status, '201 Fake') - self.assertEquals(req.headers.get('x-delete-at'), - str(int(t + 60))) - finally: - time.time = orig_time - - def test_PUT_non_int_delete_after(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 201, 201, 201) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Length': '0', - 'Content-Type': 'foo/bar', - 'X-Delete-After': '60.1'}) - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status, '400 Bad Request') - self.assertTrue('Non-integer X-Delete-After' in res.body) - - def test_PUT_negative_delete_after(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - set_http_connect(200, 200, 201, 201, 201) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Length': '0', - 'Content-Type': 'foo/bar', - 'X-Delete-After': '-60'}) - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status, '400 Bad Request') - self.assertTrue('X-Delete-At in past' in res.body) - - def test_PUT_delete_at(self): - with save_globals(): - given_headers = {} - - def fake_connect_put_node(nodes, part, path, headers, - logger_thread_locals): - given_headers.update(headers) - - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - controller._connect_put_node = fake_connect_put_node - set_http_connect(200, 200) - self.app.memcache.store = {} - t = str(int(time.time() + 100)) - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Length': '0', - 'Content-Type': 'foo/bar', - 'X-Delete-At': t}) - self.app.update_request(req) - controller.PUT(req) - self.assertEquals(given_headers.get('X-Delete-At'), t) - self.assertTrue('X-Delete-At-Host' in given_headers) - self.assertTrue('X-Delete-At-Device' in given_headers) - self.assertTrue('X-Delete-At-Partition' in given_headers) - self.assertTrue('X-Delete-At-Container' in given_headers) - - t = str(int(time.time() + 100)) + '.1' - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Length': '0', - 'Content-Type': 'foo/bar', - 'X-Delete-At': t}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - self.assertTrue('Non-integer X-Delete-At' in resp.body) - - t = str(int(time.time() - 100)) - req = Request.blank('/v1/a/c/o', {}, - headers={'Content-Length': '0', - 'Content-Type': 'foo/bar', - 'X-Delete-At': t}) - self.app.update_request(req) - resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) - self.assertTrue('X-Delete-At in past' in resp.body) - - def test_leak_1(self): - _request_instances = weakref.WeakKeyDictionary() - _orig_init = Request.__init__ - - def request_init(self, *args, **kwargs): - _orig_init(self, *args, **kwargs) - _request_instances[self] = None - - with mock.patch.object(Request, "__init__", request_init): - prolis = _test_sockets[0] - prosrv = _test_servers[0] - obj_len = prosrv.client_chunk_size * 2 - # PUT test file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/test_leak_1 HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Auth-Token: t\r\n' - 'Content-Length: %s\r\n' - 'Content-Type: application/octet-stream\r\n' - '\r\n%s' % (obj_len, 'a' * obj_len)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # Remember Request instance count, make sure the GC is run for - # pythons without reference counting. - for i in xrange(4): - sleep(0) # let eventlet do its thing - gc.collect() - else: - sleep(0) - before_request_instances = len(_request_instances) - # GET test file, but disconnect early - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/test_leak_1 HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Auth-Token: t\r\n' - '\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - fd.read(1) - fd.close() - sock.close() - # Make sure the GC is run again for pythons without reference - # counting - for i in xrange(4): - sleep(0) # let eventlet do its thing - gc.collect() - else: - sleep(0) - self.assertEquals( - before_request_instances, len(_request_instances)) - - def test_OPTIONS(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'a', - 'c', 'o.jpg') - - def my_empty_container_info(*args): - return {} - controller.container_info = my_empty_container_info - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - - def my_empty_origin_container_info(*args): - return {'cors': {'allow_origin': None}} - controller.container_info = my_empty_origin_container_info - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - - def my_container_info(*args): - return { - 'cors': { - 'allow_origin': 'http://foo.bar:8080 https://foo.bar', - 'max_age': '999', - } - } - controller.container_info = my_container_info - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://foo.bar', - 'Access-Control-Request-Method': 'GET'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals( - 'https://foo.bar', - resp.headers['access-control-allow-origin']) - for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['access-control-allow-methods']) - self.assertEquals( - len(resp.headers['access-control-allow-methods'].split(', ')), - 7) - self.assertEquals('999', resp.headers['access-control-max-age']) - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://foo.bar'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - req = Request.blank('/v1/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 7) - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com'}) - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.bar', - 'Access-Control-Request-Method': 'GET'}) - controller.app.cors_allow_origin = ['http://foo.bar', ] - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - - def my_container_info_wildcard(*args): - return { - 'cors': { - 'allow_origin': '*', - 'max_age': '999', - } - } - controller.container_info = my_container_info_wildcard - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://bar.baz', - 'Access-Control-Request-Method': 'GET'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals('*', resp.headers['access-control-allow-origin']) - for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['access-control-allow-methods']) - self.assertEquals( - len(resp.headers['access-control-allow-methods'].split(', ')), - 7) - self.assertEquals('999', resp.headers['access-control-max-age']) - - def test_CORS_valid(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - - def stubContainerInfo(*args): - return { - 'cors': { - 'allow_origin': 'http://not.foo.bar' - } - } - controller.container_info = stubContainerInfo - controller.app.strict_cors_mode = False - - def objectGET(controller, req): - return Response(headers={ - 'X-Object-Meta-Color': 'red', - 'X-Super-Secret': 'hush', - }) - - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'GET'}, - headers={'Origin': 'http://foo.bar'}) - - resp = cors_validation(objectGET)(controller, req) - - self.assertEquals(200, resp.status_int) - self.assertEquals('http://foo.bar', - resp.headers['access-control-allow-origin']) - self.assertEquals('red', resp.headers['x-object-meta-color']) - # X-Super-Secret is in the response, but not "exposed" - self.assertEquals('hush', resp.headers['x-super-secret']) - self.assertTrue('access-control-expose-headers' in resp.headers) - exposed = set( - h.strip() for h in - resp.headers['access-control-expose-headers'].split(',')) - expected_exposed = set(['cache-control', 'content-language', - 'content-type', 'expires', 'last-modified', - 'pragma', 'etag', 'x-timestamp', - 'x-trans-id', 'x-object-meta-color']) - self.assertEquals(expected_exposed, exposed) - - controller.app.strict_cors_mode = True - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'GET'}, - headers={'Origin': 'http://foo.bar'}) - - resp = cors_validation(objectGET)(controller, req) - - self.assertEquals(200, resp.status_int) - self.assertTrue('access-control-allow-origin' not in resp.headers) - - def test_CORS_valid_with_obj_headers(self): - with save_globals(): - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - - def stubContainerInfo(*args): - return { - 'cors': { - 'allow_origin': 'http://foo.bar' - } - } - controller.container_info = stubContainerInfo - - def objectGET(controller, req): - return Response(headers={ - 'X-Object-Meta-Color': 'red', - 'X-Super-Secret': 'hush', - 'Access-Control-Allow-Origin': 'http://obj.origin', - 'Access-Control-Expose-Headers': 'x-trans-id' - }) - - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'GET'}, - headers={'Origin': 'http://foo.bar'}) - - resp = cors_validation(objectGET)(controller, req) - - self.assertEquals(200, resp.status_int) - self.assertEquals('http://obj.origin', - resp.headers['access-control-allow-origin']) - self.assertEquals('x-trans-id', - resp.headers['access-control-expose-headers']) - - def _gather_x_container_headers(self, controller_call, req, *connect_args, - **kwargs): - header_list = kwargs.pop('header_list', ['X-Container-Device', - 'X-Container-Host', - 'X-Container-Partition']) - seen_headers = [] - - def capture_headers(ipaddr, port, device, partition, method, - path, headers=None, query_string=None): - captured = {} - for header in header_list: - captured[header] = headers.get(header) - seen_headers.append(captured) - - with save_globals(): - self.app.allow_account_management = True - - set_http_connect(*connect_args, give_connect=capture_headers, - **kwargs) - resp = controller_call(req) - self.assertEqual(2, resp.status_int // 100) # sanity check - - # don't care about the account/container HEADs, so chuck - # the first two requests - return sorted(seen_headers[2:], - key=lambda d: d.get(header_list[0]) or 'z') - - def test_PUT_x_container_headers_with_equal_replicas(self): - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '5'}, body='12345') - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.PUT, req, - 200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT - self.assertEqual( - seen_headers, [ - {'X-Container-Host': '10.0.0.0:1000', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sda'}, - {'X-Container-Host': '10.0.0.1:1001', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdb'}, - {'X-Container-Host': '10.0.0.2:1002', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdc'}]) - - def test_PUT_x_container_headers_with_fewer_container_replicas(self): - self.app.container_ring.set_replicas(2) - - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '5'}, body='12345') - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.PUT, req, - 200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT - - self.assertEqual( - seen_headers, [ - {'X-Container-Host': '10.0.0.0:1000', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sda'}, - {'X-Container-Host': '10.0.0.1:1001', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdb'}, - {'X-Container-Host': None, - 'X-Container-Partition': None, - 'X-Container-Device': None}]) - - def test_PUT_x_container_headers_with_more_container_replicas(self): - self.app.container_ring.set_replicas(4) - - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '5'}, body='12345') - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.PUT, req, - 200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT - - self.assertEqual( - seen_headers, [ - {'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sda,sdd'}, - {'X-Container-Host': '10.0.0.1:1001', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdb'}, - {'X-Container-Host': '10.0.0.2:1002', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdc'}]) - - def test_POST_x_container_headers_with_more_container_replicas(self): - self.app.container_ring.set_replicas(4) - self.app.object_post_as_copy = False - - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'Content-Type': 'application/stuff'}) - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.POST, req, - 200, 200, 200, 200, 200) # HEAD HEAD POST POST POST - - self.assertEqual( - seen_headers, [ - {'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sda,sdd'}, - {'X-Container-Host': '10.0.0.1:1001', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdb'}, - {'X-Container-Host': '10.0.0.2:1002', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdc'}]) - - def test_DELETE_x_container_headers_with_more_container_replicas(self): - self.app.container_ring.set_replicas(4) - - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE'}, - headers={'Content-Type': 'application/stuff'}) - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.DELETE, req, - 200, 200, 200, 200, 200) # HEAD HEAD DELETE DELETE DELETE - - self.assertEqual(seen_headers, [ - {'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sda,sdd'}, - {'X-Container-Host': '10.0.0.1:1001', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdb'}, - {'X-Container-Host': '10.0.0.2:1002', - 'X-Container-Partition': '1', - 'X-Container-Device': 'sdc'} - ]) - - @mock.patch('time.time', new=lambda: STATIC_TIME) - def test_PUT_x_delete_at_with_fewer_container_replicas(self): - self.app.container_ring.set_replicas(2) - - delete_at_timestamp = int(time.time()) + 100000 - delete_at_container = str( - delete_at_timestamp / - self.app.expiring_objects_container_divisor * - self.app.expiring_objects_container_divisor) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Type': 'application/stuff', - 'Content-Length': '0', - 'X-Delete-At': str(delete_at_timestamp)}) - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.PUT, req, - 200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT - header_list=('X-Delete-At-Host', 'X-Delete-At-Device', - 'X-Delete-At-Partition', 'X-Delete-At-Container')) - - self.assertEqual(seen_headers, [ - {'X-Delete-At-Host': '10.0.0.0:1000', - 'X-Delete-At-Container': delete_at_container, - 'X-Delete-At-Partition': '1', - 'X-Delete-At-Device': 'sda'}, - {'X-Delete-At-Host': '10.0.0.1:1001', - 'X-Delete-At-Container': delete_at_container, - 'X-Delete-At-Partition': '1', - 'X-Delete-At-Device': 'sdb'}, - {'X-Delete-At-Host': None, - 'X-Delete-At-Container': None, - 'X-Delete-At-Partition': None, - 'X-Delete-At-Device': None} - ]) - - @mock.patch('time.time', new=lambda: STATIC_TIME) - def test_PUT_x_delete_at_with_more_container_replicas(self): - self.app.container_ring.set_replicas(4) - self.app.expiring_objects_account = 'expires' - self.app.expiring_objects_container_divisor = 60 - - delete_at_timestamp = int(time.time()) + 100000 - delete_at_container = str( - delete_at_timestamp / - self.app.expiring_objects_container_divisor * - self.app.expiring_objects_container_divisor) - req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Type': 'application/stuff', - 'Content-Length': 0, - 'X-Delete-At': str(delete_at_timestamp)}) - controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - seen_headers = self._gather_x_container_headers( - controller.PUT, req, - 200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT - header_list=('X-Delete-At-Host', 'X-Delete-At-Device', - 'X-Delete-At-Partition', 'X-Delete-At-Container')) - self.assertEqual(seen_headers, [ - {'X-Delete-At-Host': '10.0.0.0:1000,10.0.0.3:1003', - 'X-Delete-At-Container': delete_at_container, - 'X-Delete-At-Partition': '1', - 'X-Delete-At-Device': 'sda,sdd'}, - {'X-Delete-At-Host': '10.0.0.1:1001', - 'X-Delete-At-Container': delete_at_container, - 'X-Delete-At-Partition': '1', - 'X-Delete-At-Device': 'sdb'}, - {'X-Delete-At-Host': '10.0.0.2:1002', - 'X-Delete-At-Container': delete_at_container, - 'X-Delete-At-Partition': '1', - 'X-Delete-At-Device': 'sdc'} - ]) - - -class TestContainerController(unittest.TestCase): - "Test swift.proxy_server.ContainerController" - - def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing(), - logger=FakeLogger()) - - def test_transfer_headers(self): - src_headers = {'x-remove-versions-location': 'x', - 'x-container-read': '*:user'} - dst_headers = {'x-versions-location': 'backup'} - controller = swift.proxy.controllers.ContainerController(self.app, - 'a', 'c') - controller.transfer_headers(src_headers, dst_headers) - expected_headers = {'x-versions-location': '', - 'x-container-read': '*:user'} - self.assertEqual(dst_headers, expected_headers) - - def assert_status_map(self, method, statuses, expected, - raise_exc=False, missing_container=False): - with save_globals(): - kwargs = {} - if raise_exc: - kwargs['raise_exc'] = raise_exc - kwargs['missing_container'] = missing_container - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c', headers={'Content-Length': '0', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - res = method(req) - self.assertEquals(res.status_int, expected) - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/', headers={'Content-Length': '0', - 'Content-Type': 'text/plain'}) - self.app.update_request(req) - res = method(req) - self.assertEquals(res.status_int, expected) - - def test_HEAD_GET(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - def test_status_map(statuses, expected, - c_expected=None, a_expected=None, **kwargs): - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c', {}) - self.app.update_request(req) - res = controller.HEAD(req) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - if expected < 400: - self.assert_('x-works' in res.headers) - self.assertEquals(res.headers['x-works'], 'yes') - if c_expected: - self.assertTrue('swift.container/a/c' in res.environ) - self.assertEquals( - res.environ['swift.container/a/c']['status'], - c_expected) - else: - self.assertTrue('swift.container/a/c' not in res.environ) - if a_expected: - self.assertTrue('swift.account/a' in res.environ) - self.assertEquals(res.environ['swift.account/a']['status'], - a_expected) - else: - self.assertTrue('swift.account/a' not in res.environ) - - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c', {}) - self.app.update_request(req) - res = controller.GET(req) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - if expected < 400: - self.assert_('x-works' in res.headers) - self.assertEquals(res.headers['x-works'], 'yes') - if c_expected: - self.assertTrue('swift.container/a/c' in res.environ) - self.assertEquals( - res.environ['swift.container/a/c']['status'], - c_expected) - else: - self.assertTrue('swift.container/a/c' not in res.environ) - if a_expected: - self.assertTrue('swift.account/a' in res.environ) - self.assertEquals(res.environ['swift.account/a']['status'], - a_expected) - else: - self.assertTrue('swift.account/a' not in res.environ) - # In all the following tests cache 200 for account - # return and ache vary for container - # return 200 and cache 200 for and container - test_status_map((200, 200, 404, 404), 200, 200, 200) - test_status_map((200, 200, 500, 404), 200, 200, 200) - # return 304 dont cache container - test_status_map((200, 304, 500, 404), 304, None, 200) - # return 404 and cache 404 for container - test_status_map((200, 404, 404, 404), 404, 404, 200) - test_status_map((200, 404, 404, 500), 404, 404, 200) - # return 503, dont cache container - test_status_map((200, 500, 500, 500), 503, None, 200) - self.assertFalse(self.app.account_autocreate) - - # In all the following tests cache 404 for account - # return 404 (as account is not found) and dont cache container - test_status_map((404, 404, 404), 404, None, 404) - # This should make no difference - self.app.account_autocreate = True - test_status_map((404, 404, 404), 404, None, 404) - - def test_PUT(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - - def test_status_map(statuses, expected, **kwargs): - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c', {}) - req.content_length = 0 - self.app.update_request(req) - res = controller.PUT(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - - test_status_map((200, 201, 201, 201), 201, missing_container=True) - test_status_map((200, 201, 201, 500), 201, missing_container=True) - test_status_map((200, 204, 404, 404), 404, missing_container=True) - test_status_map((200, 204, 500, 404), 503, missing_container=True) - self.assertFalse(self.app.account_autocreate) - test_status_map((404, 404, 404), 404, missing_container=True) - self.app.account_autocreate = True - # fail to retrieve account info - test_status_map( - (503, 503, 503), # account_info fails on 503 - 404, missing_container=True) - # account fail after creation - test_status_map( - (404, 404, 404, # account_info fails on 404 - 201, 201, 201, # PUT account - 404, 404, 404), # account_info fail - 404, missing_container=True) - test_status_map( - (503, 503, 404, # account_info fails on 404 - 503, 503, 503, # PUT account - 503, 503, 404), # account_info fail - 404, missing_container=True) - # put fails - test_status_map( - (404, 404, 404, # account_info fails on 404 - 201, 201, 201, # PUT account - 200, # account_info success - 503, 503, 201), # put container fail - 503, missing_container=True) - # all goes according to plan - test_status_map( - (404, 404, 404, # account_info fails on 404 - 201, 201, 201, # PUT account - 200, # account_info success - 201, 201, 201), # put container success - 201, missing_container=True) - test_status_map( - (503, 404, 404, # account_info fails on 404 - 503, 201, 201, # PUT account - 503, 200, # account_info success - 503, 201, 201), # put container success - 201, missing_container=True) - - def test_POST(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - - def test_status_map(statuses, expected, **kwargs): - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c', {}) - req.content_length = 0 - self.app.update_request(req) - res = controller.POST(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - - test_status_map((200, 201, 201, 201), 201, missing_container=True) - test_status_map((200, 201, 201, 500), 201, missing_container=True) - test_status_map((200, 204, 404, 404), 404, missing_container=True) - test_status_map((200, 204, 500, 404), 503, missing_container=True) - self.assertFalse(self.app.account_autocreate) - test_status_map((404, 404, 404), 404, missing_container=True) - self.app.account_autocreate = True - test_status_map((404, 404, 404), 404, missing_container=True) - - def test_PUT_max_containers_per_account(self): - with save_globals(): - self.app.max_containers_per_account = 12346 - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - self.assert_status_map(controller.PUT, - (200, 201, 201, 201), 201, - missing_container=True) - - self.app.max_containers_per_account = 12345 - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - self.assert_status_map(controller.PUT, (201, 201, 201), 403, - missing_container=True) - - self.app.max_containers_per_account = 12345 - self.app.max_containers_whitelist = ['account'] - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - self.assert_status_map(controller.PUT, - (200, 201, 201, 201), 201, - missing_container=True) - - def test_PUT_max_container_name_length(self): - with save_globals(): - limit = MAX_CONTAINER_NAME_LENGTH - controller = proxy_server.ContainerController(self.app, 'account', - '1' * limit) - self.assert_status_map(controller.PUT, - (200, 201, 201, 201), 201, - missing_container=True) - controller = proxy_server.ContainerController(self.app, 'account', - '2' * (limit + 1)) - self.assert_status_map(controller.PUT, (201, 201, 201), 400, - missing_container=True) - - def test_PUT_connect_exceptions(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - self.assert_status_map(controller.PUT, (200, 201, 201, -1), 201, - missing_container=True) - self.assert_status_map(controller.PUT, (200, 201, -1, -1), 503, - missing_container=True) - self.assert_status_map(controller.PUT, (200, 503, 503, -1), 503, - missing_container=True) - - def test_acc_missing_returns_404(self): - for meth in ('DELETE', 'PUT'): - with save_globals(): - self.app.memcache = FakeMemcacheReturnsNone() - for dev in self.app.account_ring.devs.values(): - del dev['errors'] - del dev['last_error'] - controller = proxy_server.ContainerController(self.app, - 'account', - 'container') - if meth == 'PUT': - set_http_connect(200, 200, 200, 200, 200, 200, - missing_container=True) - else: - set_http_connect(200, 200, 200, 200) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c', - environ={'REQUEST_METHOD': meth}) - self.app.update_request(req) - resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 200) - - set_http_connect(404, 404, 404, 200, 200, 200) - # Make sure it is a blank request wthout env caching - req = Request.blank('/v1/a/c', - environ={'REQUEST_METHOD': meth}) - resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(503, 404, 404) - # Make sure it is a blank request wthout env caching - req = Request.blank('/v1/a/c', - environ={'REQUEST_METHOD': meth}) - resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) - - set_http_connect(503, 404, raise_exc=True) - # Make sure it is a blank request wthout env caching - req = Request.blank('/v1/a/c', - environ={'REQUEST_METHOD': meth}) - resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) - - for dev in self.app.account_ring.devs.values(): - dev['errors'] = self.app.error_suppression_limit + 1 - dev['last_error'] = time.time() - set_http_connect(200, 200, 200, 200, 200, 200) - # Make sure it is a blank request wthout env caching - req = Request.blank('/v1/a/c', - environ={'REQUEST_METHOD': meth}) - resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) - - def test_put_locking(self): - - class MockMemcache(FakeMemcache): - - def __init__(self, allow_lock=None): - self.allow_lock = allow_lock - super(MockMemcache, self).__init__() - - @contextmanager - def soft_lock(self, key, timeout=0, retries=5): - if self.allow_lock: - yield True - else: - raise NotImplementedError - - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - self.app.memcache = MockMemcache(allow_lock=True) - set_http_connect(200, 201, 201, 201, - missing_container=True) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'}) - self.app.update_request(req) - res = controller.PUT(req) - self.assertEquals(res.status_int, 201) - - def test_error_limiting(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - controller.app.sort_nodes = lambda l: l - self.assert_status_map(controller.HEAD, (200, 503, 200, 200), 200, - missing_container=False) - self.assertEquals( - controller.app.container_ring.devs[0]['errors'], 2) - self.assert_('last_error' in controller.app.container_ring.devs[0]) - for _junk in xrange(self.app.error_suppression_limit): - self.assert_status_map(controller.HEAD, - (200, 503, 503, 503), 503) - self.assertEquals(controller.app.container_ring.devs[0]['errors'], - self.app.error_suppression_limit + 1) - self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 503) - self.assert_('last_error' in controller.app.container_ring.devs[0]) - self.assert_status_map(controller.PUT, (200, 201, 201, 201), 503, - missing_container=True) - self.assert_status_map(controller.DELETE, - (200, 204, 204, 204), 503) - self.app.error_suppression_interval = -300 - self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 200) - self.assert_status_map(controller.DELETE, (200, 204, 204, 204), - 404, raise_exc=True) - - def test_DELETE(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - self.assert_status_map(controller.DELETE, - (200, 204, 204, 204), 204) - self.assert_status_map(controller.DELETE, - (200, 204, 204, 503), 204) - self.assert_status_map(controller.DELETE, - (200, 204, 503, 503), 503) - self.assert_status_map(controller.DELETE, - (200, 204, 404, 404), 404) - self.assert_status_map(controller.DELETE, - (200, 404, 404, 404), 404) - self.assert_status_map(controller.DELETE, - (200, 204, 503, 404), 503) - - self.app.memcache = FakeMemcacheReturnsNone() - # 200: Account check, 404x3: Container check - self.assert_status_map(controller.DELETE, - (200, 404, 404, 404), 404) - - def test_response_get_accept_ranges_header(self): - with save_globals(): - set_http_connect(200, 200, body='{}') - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c?format=json') - self.app.update_request(req) - res = controller.GET(req) - self.assert_('accept-ranges' in res.headers) - self.assertEqual(res.headers['accept-ranges'], 'bytes') - - def test_response_head_accept_ranges_header(self): - with save_globals(): - set_http_connect(200, 200, body='{}') - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c?format=json') - self.app.update_request(req) - res = controller.HEAD(req) - self.assert_('accept-ranges' in res.headers) - self.assertEqual(res.headers['accept-ranges'], 'bytes') - - def test_PUT_metadata(self): - self.metadata_helper('PUT') - - def test_POST_metadata(self): - self.metadata_helper('POST') - - def metadata_helper(self, method): - for test_header, test_value in ( - ('X-Container-Meta-TestHeader', 'TestValue'), - ('X-Container-Meta-TestHeader', ''), - ('X-Remove-Container-Meta-TestHeader', 'anything'), - ('X-Container-Read', '.r:*'), - ('X-Remove-Container-Read', 'anything'), - ('X-Container-Write', 'anyone'), - ('X-Remove-Container-Write', 'anything')): - test_errors = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if path == '/a/c': - find_header = test_header - find_value = test_value - if find_header.lower().startswith('x-remove-'): - find_header = \ - find_header.lower().replace('-remove', '', 1) - find_value = '' - for k, v in headers.iteritems(): - if k.lower() == find_header.lower() and \ - v == find_value: - break - else: - test_errors.append('%s: %s not in %s' % - (find_header, find_value, headers)) - with save_globals(): - controller = \ - proxy_server.ContainerController(self.app, 'a', 'c') - set_http_connect(200, 201, 201, 201, give_connect=test_connect) - req = Request.blank( - '/v1/a/c', - environ={'REQUEST_METHOD': method, 'swift_owner': True}, - headers={test_header: test_value}) - self.app.update_request(req) - getattr(controller, method)(req) - self.assertEquals(test_errors, []) - - def test_PUT_bad_metadata(self): - self.bad_metadata_helper('PUT') - - def test_POST_bad_metadata(self): - self.bad_metadata_helper('POST') - - def bad_metadata_helper(self, method): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'a', 'c') - set_http_connect(200, 201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Container-Meta-' + - ('a' * MAX_META_NAME_LENGTH): 'v'}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Container-Meta-' + - ('a' * (MAX_META_NAME_LENGTH + 1)): 'v'}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Container-Meta-Too-Long': - 'a' * MAX_META_VALUE_LENGTH}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Container-Meta-Too-Long': - 'a' * (MAX_META_VALUE_LENGTH + 1)}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - headers = {} - for x in xrange(MAX_META_COUNT): - headers['X-Container-Meta-%d' % x] = 'v' - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - headers = {} - for x in xrange(MAX_META_COUNT + 1): - headers['X-Container-Meta-%d' % x] = 'v' - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - headers = {} - header_value = 'a' * MAX_META_VALUE_LENGTH - size = 0 - x = 0 - while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH: - size += 4 + MAX_META_VALUE_LENGTH - headers['X-Container-Meta-%04d' % x] = header_value - x += 1 - if MAX_META_OVERALL_SIZE - size > 1: - headers['X-Container-Meta-a'] = \ - 'a' * (MAX_META_OVERALL_SIZE - size - 1) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - headers['X-Container-Meta-a'] = \ - 'a' * (MAX_META_OVERALL_SIZE - size) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - def test_POST_calls_clean_acl(self): - called = [False] - - def clean_acl(header, value): - called[0] = True - raise ValueError('fake error') - with save_globals(): - set_http_connect(200, 201, 201, 201) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'POST'}, - headers={'X-Container-Read': '.r:*'}) - req.environ['swift.clean_acl'] = clean_acl - self.app.update_request(req) - controller.POST(req) - self.assert_(called[0]) - called[0] = False - with save_globals(): - set_http_connect(200, 201, 201, 201) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'POST'}, - headers={'X-Container-Write': '.r:*'}) - req.environ['swift.clean_acl'] = clean_acl - self.app.update_request(req) - controller.POST(req) - self.assert_(called[0]) - - def test_PUT_calls_clean_acl(self): - called = [False] - - def clean_acl(header, value): - called[0] = True - raise ValueError('fake error') - with save_globals(): - set_http_connect(200, 201, 201, 201) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Container-Read': '.r:*'}) - req.environ['swift.clean_acl'] = clean_acl - self.app.update_request(req) - controller.PUT(req) - self.assert_(called[0]) - called[0] = False - with save_globals(): - set_http_connect(200, 201, 201, 201) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Container-Write': '.r:*'}) - req.environ['swift.clean_acl'] = clean_acl - self.app.update_request(req) - controller.PUT(req) - self.assert_(called[0]) - - def test_GET_no_content(self): - with save_globals(): - set_http_connect(200, 204, 204, 204) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c') - self.app.update_request(req) - res = controller.GET(req) - self.assertEquals(res.status_int, 204) - self.assertEquals( - res.environ['swift.container/a/c']['status'], 204) - self.assertEquals(res.content_length, 0) - self.assertTrue('transfer-encoding' not in res.headers) - - def test_GET_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 201, 201, 201) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c') - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - res = controller.GET(req) - self.assertEquals(res.environ['swift.container/a/c']['status'], 201) - self.assert_(called[0]) - - def test_HEAD_calls_authorize(self): - called = [False] - - def authorize(req): - called[0] = True - return HTTPUnauthorized(request=req) - with save_globals(): - set_http_connect(200, 201, 201, 201) - controller = proxy_server.ContainerController(self.app, 'account', - 'container') - req = Request.blank('/v1/a/c', {'REQUEST_METHOD': 'HEAD'}) - req.environ['swift.authorize'] = authorize - self.app.update_request(req) - controller.HEAD(req) - self.assert_(called[0]) - - def test_OPTIONS_get_info_drops_origin(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - count = [0] - - def my_get_info(app, env, account, container=None, - ret_not_found=False, swift_source=None): - if count[0] > 11: - return {} - count[0] += 1 - if not container: - return {'some': 'stuff'} - return proxy_base.was_get_info( - app, env, account, container, ret_not_found, swift_source) - - proxy_base.was_get_info = proxy_base.get_info - with mock.patch.object(proxy_base, 'get_info', my_get_info): - proxy_base.get_info = my_get_info - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) - controller.OPTIONS(req) - self.assertTrue(count[0] < 11) - - def test_OPTIONS(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - def my_empty_container_info(*args): - return {} - controller.container_info = my_empty_container_info - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - - def my_empty_origin_container_info(*args): - return {'cors': {'allow_origin': None}} - controller.container_info = my_empty_origin_container_info - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - - def my_container_info(*args): - return { - 'cors': { - 'allow_origin': 'http://foo.bar:8080 https://foo.bar', - 'max_age': '999', - } - } - controller.container_info = my_container_info - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://foo.bar', - 'Access-Control-Request-Method': 'GET'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals( - 'https://foo.bar', - resp.headers['access-control-allow-origin']) - for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['access-control-allow-methods']) - self.assertEquals( - len(resp.headers['access-control-allow-methods'].split(', ')), - 6) - self.assertEquals('999', resp.headers['access-control-max-age']) - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://foo.bar'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - req = Request.blank('/v1/a/c', {'REQUEST_METHOD': 'OPTIONS'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 6) - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.bar', - 'Access-Control-Request-Method': 'GET'}) - resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.bar', - 'Access-Control-Request-Method': 'GET'}) - controller.app.cors_allow_origin = ['http://foo.bar', ] - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - - def my_container_info_wildcard(*args): - return { - 'cors': { - 'allow_origin': '*', - 'max_age': '999', - } - } - controller.container_info = my_container_info_wildcard - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://bar.baz', - 'Access-Control-Request-Method': 'GET'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals('*', resp.headers['access-control-allow-origin']) - for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['access-control-allow-methods']) - self.assertEquals( - len(resp.headers['access-control-allow-methods'].split(', ')), - 6) - self.assertEquals('999', resp.headers['access-control-max-age']) - - req = Request.blank( - '/v1/a/c/o.jpg', - {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'https://bar.baz', - 'Access-Control-Request-Headers': - 'x-foo, x-bar, x-auth-token', - 'Access-Control-Request-Method': 'GET'} - ) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals( - sortHeaderNames('x-foo, x-bar, x-auth-token'), - sortHeaderNames(resp.headers['access-control-allow-headers'])) - - def test_CORS_valid(self): - with save_globals(): - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - def stubContainerInfo(*args): - return { - 'cors': { - 'allow_origin': 'http://foo.bar' - } - } - controller.container_info = stubContainerInfo - - def containerGET(controller, req): - return Response(headers={ - 'X-Container-Meta-Color': 'red', - 'X-Super-Secret': 'hush', - }) - - req = Request.blank( - '/v1/a/c', - {'REQUEST_METHOD': 'GET'}, - headers={'Origin': 'http://foo.bar'}) - - resp = cors_validation(containerGET)(controller, req) - - self.assertEquals(200, resp.status_int) - self.assertEquals('http://foo.bar', - resp.headers['access-control-allow-origin']) - self.assertEquals('red', resp.headers['x-container-meta-color']) - # X-Super-Secret is in the response, but not "exposed" - self.assertEquals('hush', resp.headers['x-super-secret']) - self.assertTrue('access-control-expose-headers' in resp.headers) - exposed = set( - h.strip() for h in - resp.headers['access-control-expose-headers'].split(',')) - expected_exposed = set(['cache-control', 'content-language', - 'content-type', 'expires', 'last-modified', - 'pragma', 'etag', 'x-timestamp', - 'x-trans-id', 'x-container-meta-color']) - self.assertEquals(expected_exposed, exposed) - - def _gather_x_account_headers(self, controller_call, req, *connect_args, - **kwargs): - seen_headers = [] - to_capture = ('X-Account-Partition', 'X-Account-Host', - 'X-Account-Device') - - def capture_headers(ipaddr, port, device, partition, method, - path, headers=None, query_string=None): - captured = {} - for header in to_capture: - captured[header] = headers.get(header) - seen_headers.append(captured) - - with save_globals(): - self.app.allow_account_management = True - - set_http_connect(*connect_args, give_connect=capture_headers, - **kwargs) - resp = controller_call(req) - self.assertEqual(2, resp.status_int // 100) # sanity check - - # don't care about the account HEAD, so throw away the - # first element - return sorted(seen_headers[1:], - key=lambda d: d['X-Account-Host'] or 'Z') - - def test_PUT_x_account_headers_with_fewer_account_replicas(self): - self.app.account_ring.set_replicas(2) - req = Request.blank('/v1/a/c', headers={'': ''}) - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - seen_headers = self._gather_x_account_headers( - controller.PUT, req, - 200, 201, 201, 201) # HEAD PUT PUT PUT - self.assertEqual(seen_headers, [ - {'X-Account-Host': '10.0.0.0:1000', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sda'}, - {'X-Account-Host': '10.0.0.1:1001', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sdb'}, - {'X-Account-Host': None, - 'X-Account-Partition': None, - 'X-Account-Device': None} - ]) - - def test_PUT_x_account_headers_with_more_account_replicas(self): - self.app.account_ring.set_replicas(4) - req = Request.blank('/v1/a/c', headers={'': ''}) - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - seen_headers = self._gather_x_account_headers( - controller.PUT, req, - 200, 201, 201, 201) # HEAD PUT PUT PUT - self.assertEqual(seen_headers, [ - {'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sda,sdd'}, - {'X-Account-Host': '10.0.0.1:1001', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sdb'}, - {'X-Account-Host': '10.0.0.2:1002', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sdc'} - ]) - - def test_DELETE_x_account_headers_with_fewer_account_replicas(self): - self.app.account_ring.set_replicas(2) - req = Request.blank('/v1/a/c', headers={'': ''}) - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - seen_headers = self._gather_x_account_headers( - controller.DELETE, req, - 200, 204, 204, 204) # HEAD DELETE DELETE DELETE - self.assertEqual(seen_headers, [ - {'X-Account-Host': '10.0.0.0:1000', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sda'}, - {'X-Account-Host': '10.0.0.1:1001', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sdb'}, - {'X-Account-Host': None, - 'X-Account-Partition': None, - 'X-Account-Device': None} - ]) - - def test_DELETE_x_account_headers_with_more_account_replicas(self): - self.app.account_ring.set_replicas(4) - req = Request.blank('/v1/a/c', headers={'': ''}) - controller = proxy_server.ContainerController(self.app, 'a', 'c') - - seen_headers = self._gather_x_account_headers( - controller.DELETE, req, - 200, 204, 204, 204) # HEAD DELETE DELETE DELETE - self.assertEqual(seen_headers, [ - {'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sda,sdd'}, - {'X-Account-Host': '10.0.0.1:1001', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sdb'}, - {'X-Account-Host': '10.0.0.2:1002', - 'X-Account-Partition': '1', - 'X-Account-Device': 'sdc'} - ]) - - def test_PUT_backed_x_timestamp_header(self): - timestamps = [] - - def capture_timestamps(*args, **kwargs): - headers = kwargs['headers'] - timestamps.append(headers.get('X-Timestamp')) - - req = Request.blank('/v1/a/c', method='PUT', headers={'': ''}) - with save_globals(): - new_connect = set_http_connect(200, # account existance check - 201, 201, 201, - give_connect=capture_timestamps) - resp = self.app.handle_request(req) - - # sanity - self.assertRaises(StopIteration, new_connect.code_iter.next) - self.assertEqual(2, resp.status_int // 100) - - timestamps.pop(0) # account existance check - self.assertEqual(3, len(timestamps)) - for timestamp in timestamps: - self.assertEqual(timestamp, timestamps[0]) - self.assert_(re.match('[0-9]{10}\.[0-9]{5}', timestamp)) - - def test_DELETE_backed_x_timestamp_header(self): - timestamps = [] - - def capture_timestamps(*args, **kwargs): - headers = kwargs['headers'] - timestamps.append(headers.get('X-Timestamp')) - - req = Request.blank('/v1/a/c', method='DELETE', headers={'': ''}) - self.app.update_request(req) - with save_globals(): - new_connect = set_http_connect(200, # account existance check - 201, 201, 201, - give_connect=capture_timestamps) - resp = self.app.handle_request(req) - - # sanity - self.assertRaises(StopIteration, new_connect.code_iter.next) - self.assertEqual(2, resp.status_int // 100) - - timestamps.pop(0) # account existance check - self.assertEqual(3, len(timestamps)) - for timestamp in timestamps: - self.assertEqual(timestamp, timestamps[0]) - self.assert_(re.match('[0-9]{10}\.[0-9]{5}', timestamp)) - - def test_node_read_timeout_retry_to_container(self): - with save_globals(): - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'GET'}) - self.app.node_timeout = 0.1 - set_http_connect(200, 200, 200, body='abcdef', slow=[1.0, 1.0]) - resp = req.get_response(self.app) - got_exc = False - try: - resp.body - except ChunkReadTimeout: - got_exc = True - self.assert_(got_exc) - - -class TestAccountController(unittest.TestCase): - - def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing) - - def assert_status_map(self, method, statuses, expected, env_expected=None): - with save_globals(): - set_http_connect(*statuses) - req = Request.blank('/v1/a', {}) - self.app.update_request(req) - res = method(req) - self.assertEquals(res.status_int, expected) - if env_expected: - self.assertEquals(res.environ['swift.account/a']['status'], - env_expected) - set_http_connect(*statuses) - req = Request.blank('/v1/a/', {}) - self.app.update_request(req) - res = method(req) - self.assertEquals(res.status_int, expected) - if env_expected: - self.assertEquals(res.environ['swift.account/a']['status'], - env_expected) - - def test_OPTIONS(self): - with save_globals(): - self.app.allow_account_management = False - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/v1/account', {'REQUEST_METHOD': 'OPTIONS'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - for verb in 'OPTIONS GET POST HEAD'.split(): - self.assertTrue( - verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 4) - - # Test a CORS OPTIONS request (i.e. including Origin and - # Access-Control-Request-Method headers) - self.app.allow_account_management = False - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank( - '/v1/account', {'REQUEST_METHOD': 'OPTIONS'}, - headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - for verb in 'OPTIONS GET POST HEAD'.split(): - self.assertTrue( - verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 4) - - self.app.allow_account_management = True - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/v1/account', {'REQUEST_METHOD': 'OPTIONS'}) - req.content_length = 0 - resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): - self.assertTrue( - verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 6) - - def test_GET(self): - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - # GET returns after the first successful call to an Account Server - self.assert_status_map(controller.GET, (200,), 200, 200) - self.assert_status_map(controller.GET, (503, 200), 200, 200) - self.assert_status_map(controller.GET, (503, 503, 200), 200, 200) - self.assert_status_map(controller.GET, (204,), 204, 204) - self.assert_status_map(controller.GET, (503, 204), 204, 204) - self.assert_status_map(controller.GET, (503, 503, 204), 204, 204) - self.assert_status_map(controller.GET, (404, 200), 200, 200) - self.assert_status_map(controller.GET, (404, 404, 200), 200, 200) - self.assert_status_map(controller.GET, (404, 503, 204), 204, 204) - # If Account servers fail, if autocreate = False, return majority - # response - self.assert_status_map(controller.GET, (404, 404, 404), 404, 404) - self.assert_status_map(controller.GET, (404, 404, 503), 404, 404) - self.assert_status_map(controller.GET, (404, 503, 503), 503) - - self.app.memcache = FakeMemcacheReturnsNone() - self.assert_status_map(controller.GET, (404, 404, 404), 404, 404) - - def test_GET_autocreate(self): - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - self.app.memcache = FakeMemcacheReturnsNone() - self.assertFalse(self.app.account_autocreate) - # Repeat the test for autocreate = False and 404 by all - self.assert_status_map(controller.GET, - (404, 404, 404), 404) - self.assert_status_map(controller.GET, - (404, 503, 404), 404) - # When autocreate is True, if none of the nodes respond 2xx - # And quorum of the nodes responded 404, - # ALL nodes are asked to create the account - # If successful, the GET request is repeated. - controller.app.account_autocreate = True - self.assert_status_map(controller.GET, - (404, 404, 404), 204) - self.assert_status_map(controller.GET, - (404, 503, 404), 204) - - # We always return 503 if no majority between 4xx, 3xx or 2xx found - self.assert_status_map(controller.GET, - (500, 500, 400), 503) - - def test_HEAD(self): - # Same behaviour as GET - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - self.assert_status_map(controller.HEAD, (200,), 200, 200) - self.assert_status_map(controller.HEAD, (503, 200), 200, 200) - self.assert_status_map(controller.HEAD, (503, 503, 200), 200, 200) - self.assert_status_map(controller.HEAD, (204,), 204, 204) - self.assert_status_map(controller.HEAD, (503, 204), 204, 204) - self.assert_status_map(controller.HEAD, (204, 503, 503), 204, 204) - self.assert_status_map(controller.HEAD, (204,), 204, 204) - self.assert_status_map(controller.HEAD, (404, 404, 404), 404, 404) - self.assert_status_map(controller.HEAD, (404, 404, 200), 200, 200) - self.assert_status_map(controller.HEAD, (404, 200), 200, 200) - self.assert_status_map(controller.HEAD, (404, 404, 503), 404, 404) - self.assert_status_map(controller.HEAD, (404, 503, 503), 503) - self.assert_status_map(controller.HEAD, (404, 503, 204), 204, 204) - - def test_HEAD_autocreate(self): - # Same behaviour as GET - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - self.app.memcache = FakeMemcacheReturnsNone() - self.assertFalse(self.app.account_autocreate) - self.assert_status_map(controller.HEAD, - (404, 404, 404), 404) - controller.app.account_autocreate = True - self.assert_status_map(controller.HEAD, - (404, 404, 404), 204) - self.assert_status_map(controller.HEAD, - (500, 404, 404), 204) - # We always return 503 if no majority between 4xx, 3xx or 2xx found - self.assert_status_map(controller.HEAD, - (500, 500, 400), 503) - - def test_POST_autocreate(self): - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - self.app.memcache = FakeMemcacheReturnsNone() - # first test with autocreate being False - self.assertFalse(self.app.account_autocreate) - self.assert_status_map(controller.POST, - (404, 404, 404), 404) - # next turn it on and test account being created than updated - controller.app.account_autocreate = True - self.assert_status_map( - controller.POST, - (404, 404, 404, 202, 202, 202, 201, 201, 201), 201) - # account_info PUT account POST account - self.assert_status_map( - controller.POST, - (404, 404, 503, 201, 201, 503, 204, 204, 504), 204) - # what if create fails - self.assert_status_map( - controller.POST, - (404, 404, 404, 403, 403, 403, 400, 400, 400), 400) - - def test_connection_refused(self): - self.app.account_ring.get_nodes('account') - for dev in self.app.account_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = 1 # can't connect on this port - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) - self.app.update_request(req) - resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 503) - - def test_other_socket_error(self): - self.app.account_ring.get_nodes('account') - for dev in self.app.account_ring.devs.values(): - dev['ip'] = '127.0.0.1' - dev['port'] = -1 # invalid port number - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) - self.app.update_request(req) - resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 503) - - def test_response_get_accept_ranges_header(self): - with save_globals(): - set_http_connect(200, 200, body='{}') - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/v1/a?format=json') - self.app.update_request(req) - res = controller.GET(req) - self.assert_('accept-ranges' in res.headers) - self.assertEqual(res.headers['accept-ranges'], 'bytes') - - def test_response_head_accept_ranges_header(self): - with save_globals(): - set_http_connect(200, 200, body='{}') - controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/v1/a?format=json') - self.app.update_request(req) - res = controller.HEAD(req) - res.body - self.assert_('accept-ranges' in res.headers) - self.assertEqual(res.headers['accept-ranges'], 'bytes') - - def test_PUT(self): - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - - def test_status_map(statuses, expected, **kwargs): - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a', {}) - req.content_length = 0 - self.app.update_request(req) - res = controller.PUT(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((201, 201, 201), 405) - self.app.allow_account_management = True - test_status_map((201, 201, 201), 201) - test_status_map((201, 201, 500), 201) - test_status_map((201, 500, 500), 503) - test_status_map((204, 500, 404), 503) - - def test_PUT_max_account_name_length(self): - with save_globals(): - self.app.allow_account_management = True - limit = MAX_ACCOUNT_NAME_LENGTH - controller = proxy_server.AccountController(self.app, '1' * limit) - self.assert_status_map(controller.PUT, (201, 201, 201), 201) - controller = proxy_server.AccountController( - self.app, '2' * (limit + 1)) - self.assert_status_map(controller.PUT, (201, 201, 201), 400) - - def test_PUT_connect_exceptions(self): - with save_globals(): - self.app.allow_account_management = True - controller = proxy_server.AccountController(self.app, 'account') - self.assert_status_map(controller.PUT, (201, 201, -1), 201) - self.assert_status_map(controller.PUT, (201, -1, -1), 503) - self.assert_status_map(controller.PUT, (503, 503, -1), 503) - - def test_PUT_metadata(self): - self.metadata_helper('PUT') - - def test_POST_metadata(self): - self.metadata_helper('POST') - - def metadata_helper(self, method): - for test_header, test_value in ( - ('X-Account-Meta-TestHeader', 'TestValue'), - ('X-Account-Meta-TestHeader', ''), - ('X-Remove-Account-Meta-TestHeader', 'anything')): - test_errors = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if path == '/a': - find_header = test_header - find_value = test_value - if find_header.lower().startswith('x-remove-'): - find_header = \ - find_header.lower().replace('-remove', '', 1) - find_value = '' - for k, v in headers.iteritems(): - if k.lower() == find_header.lower() and \ - v == find_value: - break - else: - test_errors.append('%s: %s not in %s' % - (find_header, find_value, headers)) - with save_globals(): - self.app.allow_account_management = True - controller = \ - proxy_server.AccountController(self.app, 'a') - set_http_connect(201, 201, 201, give_connect=test_connect) - req = Request.blank('/v1/a/c', - environ={'REQUEST_METHOD': method}, - headers={test_header: test_value}) - self.app.update_request(req) - getattr(controller, method)(req) - self.assertEquals(test_errors, []) - - def test_PUT_bad_metadata(self): - self.bad_metadata_helper('PUT') - - def test_POST_bad_metadata(self): - self.bad_metadata_helper('POST') - - def bad_metadata_helper(self, method): - with save_globals(): - self.app.allow_account_management = True - controller = proxy_server.AccountController(self.app, 'a') - set_http_connect(200, 201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Account-Meta-' + - ('a' * MAX_META_NAME_LENGTH): 'v'}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Account-Meta-' + - ('a' * (MAX_META_NAME_LENGTH + 1)): 'v'}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Account-Meta-Too-Long': - 'a' * MAX_META_VALUE_LENGTH}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers={'X-Account-Meta-Too-Long': - 'a' * (MAX_META_VALUE_LENGTH + 1)}) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - headers = {} - for x in xrange(MAX_META_COUNT): - headers['X-Account-Meta-%d' % x] = 'v' - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - headers = {} - for x in xrange(MAX_META_COUNT + 1): - headers['X-Account-Meta-%d' % x] = 'v' - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - set_http_connect(201, 201, 201) - headers = {} - header_value = 'a' * MAX_META_VALUE_LENGTH - size = 0 - x = 0 - while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH: - size += 4 + MAX_META_VALUE_LENGTH - headers['X-Account-Meta-%04d' % x] = header_value - x += 1 - if MAX_META_OVERALL_SIZE - size > 1: - headers['X-Account-Meta-a'] = \ - 'a' * (MAX_META_OVERALL_SIZE - size - 1) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) - set_http_connect(201, 201, 201) - headers['X-Account-Meta-a'] = \ - 'a' * (MAX_META_OVERALL_SIZE - size) - req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, - headers=headers) - self.app.update_request(req) - resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) - - def test_DELETE(self): - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - - def test_status_map(statuses, expected, **kwargs): - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a', {'REQUEST_METHOD': 'DELETE'}) - req.content_length = 0 - self.app.update_request(req) - res = controller.DELETE(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((201, 201, 201), 405) - self.app.allow_account_management = True - test_status_map((201, 201, 201), 201) - test_status_map((201, 201, 500), 201) - test_status_map((201, 500, 500), 503) - test_status_map((204, 500, 404), 503) - - def test_DELETE_with_query_string(self): - # Extra safety in case someone typos a query string for an - # account-level DELETE request that was really meant to be caught by - # some middleware. - with save_globals(): - controller = proxy_server.AccountController(self.app, 'account') - - def test_status_map(statuses, expected, **kwargs): - set_http_connect(*statuses, **kwargs) - self.app.memcache.store = {} - req = Request.blank('/v1/a?whoops', - environ={'REQUEST_METHOD': 'DELETE'}) - req.content_length = 0 - self.app.update_request(req) - res = controller.DELETE(req) - expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) - test_status_map((201, 201, 201), 400) - self.app.allow_account_management = True - test_status_map((201, 201, 201), 400) - test_status_map((201, 201, 500), 400) - test_status_map((201, 500, 500), 400) - test_status_map((204, 500, 404), 400) - - -class TestAccountControllerFakeGetResponse(unittest.TestCase): - """ - Test all the faked-out GET responses for accounts that don't exist. They - have to match the responses for empty accounts that really exist. - """ - def setUp(self): - conf = {'account_autocreate': 'yes'} - self.app = proxy_server.Application(conf, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing) - self.app.memcache = FakeMemcacheReturnsNone() - - def test_GET_autocreate_accept_json(self): - with save_globals(): - set_http_connect(*([404] * 100)) # nonexistent: all backends 404 - req = Request.blank( - '/v1/a', headers={'Accept': 'application/json'}, - environ={'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/v1/a'}) - resp = req.get_response(self.app) - self.assertEqual(200, resp.status_int) - self.assertEqual('application/json; charset=utf-8', - resp.headers['Content-Type']) - self.assertEqual("[]", resp.body) - - def test_GET_autocreate_format_json(self): - with save_globals(): - set_http_connect(*([404] * 100)) # nonexistent: all backends 404 - req = Request.blank('/v1/a?format=json', - environ={'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/v1/a', - 'QUERY_STRING': 'format=json'}) - resp = req.get_response(self.app) - self.assertEqual(200, resp.status_int) - self.assertEqual('application/json; charset=utf-8', - resp.headers['Content-Type']) - self.assertEqual("[]", resp.body) - - def test_GET_autocreate_accept_xml(self): - with save_globals(): - set_http_connect(*([404] * 100)) # nonexistent: all backends 404 - req = Request.blank('/v1/a', headers={"Accept": "text/xml"}, - environ={'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/v1/a'}) - - resp = req.get_response(self.app) - self.assertEqual(200, resp.status_int) - - self.assertEqual('text/xml; charset=utf-8', - resp.headers['Content-Type']) - empty_xml_listing = ('\n' - '\n') - self.assertEqual(empty_xml_listing, resp.body) - - def test_GET_autocreate_format_xml(self): - with save_globals(): - set_http_connect(*([404] * 100)) # nonexistent: all backends 404 - req = Request.blank('/v1/a?format=xml', - environ={'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/v1/a', - 'QUERY_STRING': 'format=xml'}) - resp = req.get_response(self.app) - self.assertEqual(200, resp.status_int) - self.assertEqual('application/xml; charset=utf-8', - resp.headers['Content-Type']) - empty_xml_listing = ('\n' - '\n') - self.assertEqual(empty_xml_listing, resp.body) - - def test_GET_autocreate_accept_unknown(self): - with save_globals(): - set_http_connect(*([404] * 100)) # nonexistent: all backends 404 - req = Request.blank('/v1/a', headers={"Accept": "mystery/meat"}, - environ={'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/v1/a'}) - resp = req.get_response(self.app) - self.assertEqual(406, resp.status_int) - - def test_GET_autocreate_format_invalid_utf8(self): - with save_globals(): - set_http_connect(*([404] * 100)) # nonexistent: all backends 404 - req = Request.blank('/v1/a?format=\xff\xfe', - environ={'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/v1/a', - 'QUERY_STRING': 'format=\xff\xfe'}) - resp = req.get_response(self.app) - self.assertEqual(400, resp.status_int) - - def test_account_acl_header_access(self): - acl = { - 'admin': ['AUTH_alice'], - 'read-write': ['AUTH_bob'], - 'read-only': ['AUTH_carol'], - } - prefix = get_sys_meta_prefix('account') - privileged_headers = {(prefix + 'core-access-control'): format_acl( - version=2, acl_dict=acl)} - - app = proxy_server.Application( - None, FakeMemcache(), account_ring=FakeRing(), - container_ring=FakeRing(), object_ring=FakeRing()) - - with save_globals(): - # Mock account server will provide privileged information (ACLs) - set_http_connect(200, 200, 200, headers=privileged_headers) - req = Request.blank('/v1/a', environ={'REQUEST_METHOD': 'GET'}) - resp = app.handle_request(req) - - # Not a swift_owner -- ACLs should NOT be in response - header = 'X-Account-Access-Control' - self.assert_(header not in resp.headers, '%r was in %r' % ( - header, resp.headers)) - - # Same setup -- mock acct server will provide ACLs - set_http_connect(200, 200, 200, headers=privileged_headers) - req = Request.blank('/v1/a', environ={'REQUEST_METHOD': 'GET', - 'swift_owner': True}) - resp = app.handle_request(req) - - # For a swift_owner, the ACLs *should* be in response - self.assert_(header in resp.headers, '%r not in %r' % ( - header, resp.headers)) - - def test_account_acls_through_delegation(self): - - # Define a way to grab the requests sent out from the AccountController - # to the Account Server, and a way to inject responses we'd like the - # Account Server to return. - resps_to_send = [] - - @contextmanager - def patch_account_controller_method(verb): - old_method = getattr(proxy_server.AccountController, verb) - new_method = lambda self, req, *_, **__: resps_to_send.pop(0) - try: - setattr(proxy_server.AccountController, verb, new_method) - yield - finally: - setattr(proxy_server.AccountController, verb, old_method) - - def make_test_request(http_method, swift_owner=True): - env = { - 'REQUEST_METHOD': http_method, - 'swift_owner': swift_owner, - } - acl = { - 'admin': ['foo'], - 'read-write': ['bar'], - 'read-only': ['bas'], - } - headers = {} if http_method in ('GET', 'HEAD') else { - 'x-account-access-control': format_acl(version=2, acl_dict=acl) - } - - return Request.blank('/v1/a', environ=env, headers=headers) - - # Our AccountController will invoke methods to communicate with the - # Account Server, and they will return responses like these: - def make_canned_response(http_method): - acl = { - 'admin': ['foo'], - 'read-write': ['bar'], - 'read-only': ['bas'], - } - headers = {'x-account-sysmeta-core-access-control': format_acl( - version=2, acl_dict=acl)} - canned_resp = Response(headers=headers) - canned_resp.environ = { - 'PATH_INFO': '/acct', - 'REQUEST_METHOD': http_method, - } - resps_to_send.append(canned_resp) - - app = proxy_server.Application( - None, FakeMemcache(), account_ring=FakeRing(), - container_ring=FakeRing(), object_ring=FakeRing()) - app.allow_account_management = True - - ext_header = 'x-account-access-control' - with patch_account_controller_method('GETorHEAD_base'): - # GET/HEAD requests should remap sysmeta headers from acct server - for verb in ('GET', 'HEAD'): - make_canned_response(verb) - req = make_test_request(verb) - resp = app.handle_request(req) - h = parse_acl(version=2, data=resp.headers.get(ext_header)) - self.assertEqual(h['admin'], ['foo']) - self.assertEqual(h['read-write'], ['bar']) - self.assertEqual(h['read-only'], ['bas']) - - # swift_owner = False: GET/HEAD shouldn't return sensitive info - make_canned_response(verb) - req = make_test_request(verb, swift_owner=False) - resp = app.handle_request(req) - h = resp.headers - self.assertEqual(None, h.get(ext_header)) - - # swift_owner unset: GET/HEAD shouldn't return sensitive info - make_canned_response(verb) - req = make_test_request(verb, swift_owner=False) - del req.environ['swift_owner'] - resp = app.handle_request(req) - h = resp.headers - self.assertEqual(None, h.get(ext_header)) - - # Verify that PUT/POST requests remap sysmeta headers from acct server - with patch_account_controller_method('make_requests'): - make_canned_response('PUT') - req = make_test_request('PUT') - resp = app.handle_request(req) - - h = parse_acl(version=2, data=resp.headers.get(ext_header)) - self.assertEqual(h['admin'], ['foo']) - self.assertEqual(h['read-write'], ['bar']) - self.assertEqual(h['read-only'], ['bas']) - - make_canned_response('POST') - req = make_test_request('POST') - resp = app.handle_request(req) - - h = parse_acl(version=2, data=resp.headers.get(ext_header)) - self.assertEqual(h['admin'], ['foo']) - self.assertEqual(h['read-write'], ['bar']) - self.assertEqual(h['read-only'], ['bas']) - - -class FakeObjectController(object): - - def __init__(self): - self.app = self - self.logger = self - self.account_name = 'a' - self.container_name = 'c' - self.object_name = 'o' - self.trans_id = 'tx1' - self.object_ring = FakeRing() - self.node_timeout = 1 - self.rate_limit_after_segment = 3 - self.rate_limit_segments_per_sec = 2 - self.GETorHEAD_base_args = [] - - def exception(self, *args): - self.exception_args = args - self.exception_info = sys.exc_info() - - def GETorHEAD_base(self, *args): - self.GETorHEAD_base_args.append(args) - req = args[0] - path = args[4] - body = data = path[-1] * int(path[-1]) - if req.range: - r = req.range.ranges_for_length(len(data)) - if r: - (start, stop) = r[0] - body = data[start:stop] - resp = Response(app_iter=iter(body)) - return resp - - def iter_nodes(self, ring, partition): - for node in ring.get_part_nodes(partition): - yield node - for node in ring.get_more_nodes(partition): - yield node - - def sort_nodes(self, nodes): - return nodes - - def set_node_timing(self, node, timing): - return - - -class Stub(object): - pass - - -class TestProxyObjectPerformance(unittest.TestCase): - - def setUp(self): - # This is just a simple test that can be used to verify and debug the - # various data paths between the proxy server and the object - # server. Used as a play ground to debug buffer sizes for sockets. - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - # Client is transmitting in 2 MB chunks - fd = sock.makefile('wb', 2 * 1024 * 1024) - # Small, fast for testing - obj_len = 2 * 64 * 1024 - # Use 1 GB or more for measurements - #obj_len = 2 * 512 * 1024 * 1024 - self.path = '/v1/a/c/o.large' - fd.write('PUT %s HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Length: %s\r\n' - 'Content-Type: application/octet-stream\r\n' - '\r\n' % (self.path, str(obj_len))) - fd.write('a' * obj_len) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - self.obj_len = obj_len - - def test_GET_debug_large_file(self): - for i in range(10): - start = time.time() - - prolis = _test_sockets[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - # Client is reading in 2 MB chunks - fd = sock.makefile('wb', 2 * 1024 * 1024) - fd.write('GET %s HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - '\r\n' % self.path) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - - total = 0 - while True: - buf = fd.read(100000) - if not buf: - break - total += len(buf) - self.assertEqual(total, self.obj_len) - - end = time.time() - print "Run %02d took %07.03f" % (i, end - start) - - -class TestSwiftInfo(unittest.TestCase): - def setUp(self): - utils._swift_info = {} - utils._swift_admin_info = {} - - def test_registered_defaults(self): - proxy_server.Application({}, FakeMemcache(), - account_ring=FakeRing(), - container_ring=FakeRing(), - object_ring=FakeRing) - - si = utils.get_swift_info()['swift'] - self.assertTrue('version' in si) - self.assertEqual(si['max_file_size'], MAX_FILE_SIZE) - self.assertEqual(si['max_meta_name_length'], MAX_META_NAME_LENGTH) - self.assertEqual(si['max_meta_value_length'], MAX_META_VALUE_LENGTH) - self.assertEqual(si['max_meta_count'], MAX_META_COUNT) - self.assertEqual(si['account_listing_limit'], ACCOUNT_LISTING_LIMIT) - self.assertEqual(si['container_listing_limit'], - CONTAINER_LISTING_LIMIT) - self.assertEqual(si['max_account_name_length'], - MAX_ACCOUNT_NAME_LENGTH) - self.assertEqual(si['max_container_name_length'], - MAX_CONTAINER_NAME_LENGTH) - self.assertEqual(si['max_object_name_length'], MAX_OBJECT_NAME_LENGTH) - - -if __name__ == '__main__': - setup() - try: - unittest.main() - finally: - teardown() diff --git a/tools/gswauth_functional_tests.sh b/tools/gswauth_functional_tests.sh deleted file mode 100755 index 0e4bc90..0000000 --- a/tools/gswauth_functional_tests.sh +++ /dev/null @@ -1,115 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This program expects to be run by tox in a virtual python environment -# so that it does not pollute the host development system - -sudo_env() -{ - sudo bash -c "PATH=$PATH $*" -} - -cleanup() -{ - sudo service memcached stop - sudo_env swift-init main stop - sudo rm -rf /etc/swift > /dev/null 2>&1 - sudo rm -rf /mnt/gluster-object/test{,2}/* > /dev/null 2>&1 - sudo setfattr -x user.swift.metadata /mnt/gluster-object/test{,2} > /dev/null 2>&1 - gswauth_cleanup -} - -gswauth_cleanup() -{ - sudo rm -rf /mnt/gluster-object/gsmetadata/.* > /dev/null 2>&1 - sudo rm -rf /mnt/gluster-object/gsmetadata/* > /dev/null 2>&1 - sudo setfattr -x user.swift.metadata /mnt/gluster-object/gsmetadata > /dev/null 2>&1 -} - -quit() -{ - echo "$1" - exit 1 -} - - -fail() -{ - cleanup - quit "$1" -} - -run_generic_tests() -{ - # clean up gsmetadata dir - gswauth_cleanup - - #swauth-prep - sudo_env gswauth-prep -K gswauthkey || fail "Unable to prep gswauth" - sudo_env gswauth-add-user -K gswauthkey -a test tester testing || fail "Unable to add user test" - sudo_env gswauth-add-user -K gswauthkey -a test2 tester2 testing2 || fail "Unable to add user test2" - sudo_env gswauth-add-user -K gswauthkey test tester3 testing3 || fail "Unable to add user test3" - - nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests_result/gluster-swift-gswauth-generic-functional-TC-report.xml \ - --with-html-output \ - --html-out-file functional_tests_result/gluster-swift-gswauth-generic-functional-result.html \ - test/functional || fail "Functional tests failed" -} - -### MAIN ### - -# Only run if there is no configuration in the system -if [ -x /etc/swift ] ; then - quit "/etc/swift exists, cannot run functional tests." -fi - -# Check the directories exist -DIRS="/mnt/gluster-object /mnt/gluster-object/test /mnt/gluster-object/test2 /mnt/gluster-object/gsmetadata" -for d in $DIRS ; do - if [ ! -x $d ] ; then - quit "$d must exist on an XFS or GlusterFS volume" - fi -done - -export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf - -# Install the configuration files -sudo mkdir /etc/swift > /dev/null 2>&1 -sudo cp -r test/functional_auth/gswauth/conf/* /etc/swift || fail "Unable to copy configuration files to /etc/swift" -sudo_env gluster-swift-gen-builders test test2 gsmetadata || fail "Unable to create ring files" - -# Start the services -sudo service memcached start || fail "Unable to start memcached" -sudo_env swift-init main start || fail "Unable to start swift" - -#swauth-prep -sudo_env gswauth-prep -K gswauthkey || fail "Unable to prep gswauth" - -mkdir functional_tests_result > /dev/null 2>&1 -nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests_result/gluster-swift-gswauth-functional-TC-report.xml \ - --with-html-output \ - --html-out-file functional_tests_result/gluster-swift-gswauth-functional-result.html \ - test/functional_auth/gswauth || fail "Functional gswauth test failed" - -run_generic_tests - -cleanup -exit 0 diff --git a/tools/keystone_functional_tests.sh b/tools/keystone_functional_tests.sh deleted file mode 100755 index b5aa25a..0000000 --- a/tools/keystone_functional_tests.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This program expects to be run by tox in a virtual python environment -# so that it does not pollute the host development system - -sudo_env() -{ - sudo bash -c "PATH=$PATH $*" -} - -cleanup() -{ - sudo service memcached stop - sudo_env swift-init main stop - sudo rm -rf /etc/swift > /dev/null 2>&1 - for acct in /mnt/gluster-object/* ; do - sudo rm -rf /mnt/gluster-object/${acct}/* > /dev/null 2>&1 - sudo setfattr -x user.swift.metadata /mnt/gluster-object/${acct} > /dev/null 2>&1 - done -} - -quit() -{ - echo "$1" - exit 1 -} - - -fail() -{ - cleanup - quit "$1" -} - -### MAIN ### - -# Only run if there is no configuration in the system -if [ -x /etc/swift ] ; then - quit "/etc/swift exists, cannot run functional tests." -fi - -# Check the directories exist -DIRS="/mnt/gluster-object /mnt/gluster-object/test /mnt/gluster-object/test2 /mnt/gluster-object/gsmetadata" -for d in $DIRS ; do - if [ ! -x $d ] ; then - quit "$d must exist on an XFS or GlusterFS volume" - fi -done - -# Check if keystone is running on this host -if ! ps -ef | grep keystone | grep python > /dev/null 2>&1 ; then - fail "Keystone is not running on localhost" -fi - -export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf - -# Install the configuration files -sudo mkdir /etc/swift > /dev/null 2>&1 -sudo cp -r test/functional_auth/keystone/conf/* /etc/swift || fail "Unable to copy configuration files to /etc/swift" - -# Create the ring files according to any directories -# in /mnt/gluster-object since the script can't -# interrogate keystone to get the tenant-id's -accounts="" -for acct in /mnt/gluster-object/* ; do - acct=`basename $acct` - accounts="$acct $accounts" -done -sudo_env gluster-swift-gen-builders $accounts || fail "Unable to create ring files" - -# Start the services -sudo service memcached start || fail "Unable to start memcached" -sudo_env swift-init main start || fail "Unable to start swift" - -mkdir functional_tests_result > /dev/null 2>&1 - -echo "== Keystone: Generic Functional Tests ==" - -nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests_result/gluster-swift-keystone-generic-functional-TC-report.xml \ - --with-html-output \ - --html-out-file functional_tests_result/gluster-swift-keystone-generic-functional-result.html \ - test/functional || fail "Functional tests failed" - -cleanup -exit 0 diff --git a/tools/swkrbath_functional_tests.sh b/tools/swkrbath_functional_tests.sh deleted file mode 100755 index 0768de6..0000000 --- a/tools/swkrbath_functional_tests.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2014 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This program expects to be run by tox in a virtual python environment -# so that it does not pollute the host development system - -sudo_env() -{ - sudo bash -c "PATH=$PATH $*" -} - -cleanup() -{ - sudo service memcached stop - sudo_env swift-init main stop - sudo rm -rf /etc/swift > /dev/null 2>&1 - for acct in /mnt/gluster-object/* ; do - sudo rm -rf /mnt/gluster-object/${acct}/* > /dev/null 2>&1 - sudo setfattr -x user.swift.metadata /mnt/gluster-object/${acct} > /dev/null 2>&1 - done -} - -quit() -{ - echo "$1" - exit 1 -} - - -fail() -{ - cleanup - quit "$1" -} - -### MAIN ### - -# Only run if there is no configuration in the system -if [ -x /etc/swift ] ; then - quit "/etc/swift exists, cannot run functional tests." -fi - -# Check the directories exist -DIRS="/mnt/gluster-object /mnt/gluster-object/test /mnt/gluster-object/test2 /mnt/gluster-object/gsmetadata" -for d in $DIRS ; do - if [ ! -x $d ] ; then - quit "$d must exist on an XFS or GlusterFS volume" - fi -done - -export SWIFT_TEST_CONFIG_FILE=/etc/swift/test.conf - -# Install the configuration files -sudo mkdir /etc/swift > /dev/null 2>&1 -sudo cp -r test/functional_auth/swiftkerbauth/conf/* /etc/swift || fail "Unable to copy configuration files to /etc/swift" - -# Create the ring files -accounts="" -for acct in /mnt/gluster-object/* ; do - acct=`basename $acct` - accounts="$acct $accounts" -done -sudo_env gluster-swift-gen-builders $accounts || fail "Unable to create ring files" - -# Start the services -sudo service memcached start || fail "Unable to start memcached" -sudo_env swift-init main start || fail "Unable to start swift" - -mkdir functional_tests_result > /dev/null 2>&1 - -echo "== SwiftKerbAuth: Functional Tests ==" - - -nosetests -v --exe \ - --with-xunit \ - --xunit-file functional_tests_result/gluster-swift-swiftkerbauth-generic-functional-TC-report.xml \ - --with-html-output \ - --html-out-file functional_tests_result/gluster-swift-swiftkerbauth-generic-functional-result.html \ - test/functional_auth/swiftkerbauth || fail "Functional tests failed" - -cleanup -exit 0 diff --git a/tox.ini b/tox.ini index 299fd6f..44a4bce 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py26,py27,pep8,functest,ksfunctest +envlist = py26,py27,pep8,functest,functest-ci minversion = 1.6 skipsdist = True @@ -18,7 +18,9 @@ setenv = VIRTUAL_ENV={envdir} NOSE_COVER_BRANCHES=1 NOSE_COVER_PACKAGE=gluster deps = - https://launchpad.net/swift/icehouse/1.13.1/+download/swift-1.13.1.tar.gz +# GitHub's .zip URL won't work! pip supports installing from git repos. +# https://pip.pypa.io/en/latest/reference/pip_install.html#git + git+https://github.com/openstack/swift.git@2.0.0.rc1 -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt changedir = {toxinidir}/test/unit @@ -34,18 +36,14 @@ setenv = VIRTUAL_ENV={envdir} [tox:jenkins] downloadcache = ~/cache/pip +# To be used by Jenkins +[testenv:functest-ci] +changedir = {toxinidir} +commands = bash ./.functests-ci -q + [testenv:functest] changedir = {toxinidir} -commands = bash ./.functests - bash tools/gswauth_functional_tests.sh - -[testenv:ksfunctest] -changedir = {toxinidir} -commands = bash tools/keystone_functional_tests.sh - -[testenv:swfunctest] -changedir = {toxinidir} -commands = bash tools/swkrbath_functional_tests.sh +commands = bash ./.functests -q [testenv:pep8] changedir = {toxinidir}