Merged from trunk

This commit is contained in:
gholt 2011-06-14 22:20:23 +00:00
commit d2230e853d
28 changed files with 390 additions and 102 deletions

View File

@ -1,3 +1,19 @@
swift (1.4.1)
* st renamed to swift
* swauth was separated froms swift. It is now its own project and can be
found at https://github.com/gholt/swauth.
* tempauth middleware added as an extremely limited auth system for dev
work.
* Account and container listings now properly labeled UTF-8 (previously the
label was "utf8").
* Accounts are auto-created if an auth token is valid when the
account_autocreate proxy config parameter is set to true.
swift (1.4.0) swift (1.4.0)
* swift-bench now cleans up containers it creates. * swift-bench now cleans up containers it creates.

View File

View File

@ -557,6 +557,10 @@ object_post_as_copy true Set object_post_as_copy = false
in this mode, features like in this mode, features like
container sync won't be able to container sync won't be able to
sync posts. sync posts.
account_autocreate false If set to 'true' authorized
accounts that do not yet exist
within the Swift cluster will
be automatically created.
============================ =============== ============================= ============================ =============== =============================
[tempauth] [tempauth]

View File

@ -625,7 +625,7 @@ Setting up scripts for running Swift
#. `recreateaccounts` #. `recreateaccounts`
#. Get an `X-Storage-Url` and `X-Auth-Token`: ``curl -v -H 'X-Storage-User: test:tester' -H 'X-Storage-Pass: testing' http://127.0.0.1:8080/auth/v1.0`` #. Get an `X-Storage-Url` and `X-Auth-Token`: ``curl -v -H 'X-Storage-User: test:tester' -H 'X-Storage-Pass: testing' http://127.0.0.1:8080/auth/v1.0``
#. Check that you can GET account: ``curl -v -H 'X-Auth-Token: <token-from-x-auth-token-above>' <url-from-x-storage-url-above>`` #. Check that you can GET account: ``curl -v -H 'X-Auth-Token: <token-from-x-auth-token-above>' <url-from-x-storage-url-above>``
#. Check that `st` works: `st -A http://127.0.0.1:8080/auth/v1.0 -U test:tester -K testing stat` #. Check that `swift` works: `swift -A http://127.0.0.1:8080/auth/v1.0 -U test:tester -K testing stat`
#. `cp ~/swift/trunk/test/functional/sample.conf /etc/swift/func_test.conf` #. `cp ~/swift/trunk/test/functional/sample.conf /etc/swift/func_test.conf`
#. `cd ~/swift/trunk; ./.functests` (Note: functional tests will first delete #. `cd ~/swift/trunk; ./.functests` (Note: functional tests will first delete
everything in the configured accounts.) everything in the configured accounts.)

View File

@ -372,34 +372,34 @@ You run these commands from the Proxy node.
curl -k -v -H 'X-Auth-Token: <token-from-x-auth-token-above>' <url-from-x-storage-url-above> curl -k -v -H 'X-Auth-Token: <token-from-x-auth-token-above>' <url-from-x-storage-url-above>
#. Check that ``st`` works (at this point, expect zero containers, zero objects, and zero bytes):: #. Check that ``swift`` works (at this point, expect zero containers, zero objects, and zero bytes)::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass stat swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass stat
#. Use ``st`` to upload a few files named 'bigfile[1-2].tgz' to a container named 'myfiles':: #. Use ``swift`` to upload a few files named 'bigfile[1-2].tgz' to a container named 'myfiles'::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass upload myfiles bigfile1.tgz swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass upload myfiles bigfile1.tgz
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass upload myfiles bigfile2.tgz swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass upload myfiles bigfile2.tgz
#. Use ``st`` to download all files from the 'myfiles' container:: #. Use ``swift`` to download all files from the 'myfiles' container::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass download myfiles swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass download myfiles
#. Use ``st`` to save a backup of your builder files to a container named 'builders'. Very important not to lose your builders!:: #. Use ``swift`` to save a backup of your builder files to a container named 'builders'. Very important not to lose your builders!::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass upload builders /etc/swift/*.builder swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass upload builders /etc/swift/*.builder
#. Use ``st`` to list your containers:: #. Use ``swift`` to list your containers::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass list swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass list
#. Use ``st`` to list the contents of your 'builders' container:: #. Use ``swift`` to list the contents of your 'builders' container::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass list builders swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass list builders
#. Use ``st`` to download all files from the 'builders' container:: #. Use ``swift`` to download all files from the 'builders' container::
st -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass download builders swift -A https://$PROXY_LOCAL_NET_IP:8080/auth/v1.0 -U system:root -K testpass download builders
.. _add-proxy-server: .. _add-proxy-server:

View File

@ -14,24 +14,24 @@ concatenated as a single object. This also offers much greater upload speed
with the possibility of parallel uploads of the segments. with the possibility of parallel uploads of the segments.
---------------------------------- ----------------------------------
Using ``st`` for Segmented Objects Using ``swift`` for Segmented Objects
---------------------------------- ----------------------------------
The quickest way to try out this feature is use the included ``st`` Swift Tool. The quickest way to try out this feature is use the included ``swift`` Swift Tool.
You can use the ``-S`` option to specify the segment size to use when splitting You can use the ``-S`` option to specify the segment size to use when splitting
a large file. For example:: a large file. For example::
st upload test_container -S 1073741824 large_file swift upload test_container -S 1073741824 large_file
This would split the large_file into 1G segments and begin uploading those This would split the large_file into 1G segments and begin uploading those
segments in parallel. Once all the segments have been uploaded, ``st`` will segments in parallel. Once all the segments have been uploaded, ``swift`` will
then create the manifest file so the segments can be downloaded as one. then create the manifest file so the segments can be downloaded as one.
So now, the following ``st`` command would download the entire large object:: So now, the following ``swift`` command would download the entire large object::
st download test_container large_file swift download test_container large_file
``st`` uses a strict convention for its segmented object support. In the above ``swift`` uses a strict convention for its segmented object support. In the above
example it will upload all the segments into a second container named example it will upload all the segments into a second container named
test_container_segments. These segments will have names like test_container_segments. These segments will have names like
large_file/1290206778.25/21474836480/00000000, large_file/1290206778.25/21474836480/00000000,
@ -43,7 +43,7 @@ the segment name format of <name>/<timestamp>/<size>/<segment> is so that an
upload of a new file with the same name won't overwrite the contents of the upload of a new file with the same name won't overwrite the contents of the
first until the last moment when the manifest file is updated. first until the last moment when the manifest file is updated.
``st`` will manage these segment files for you, deleting old segments on ``swift`` will manage these segment files for you, deleting old segments on
deletes and overwrites, etc. You can override this behavior with the deletes and overwrites, etc. You can override this behavior with the
``--leave-segments`` option if desired; this is useful if you want to have ``--leave-segments`` option if desired; this is useful if you want to have
multiple versions of the same large object available. multiple versions of the same large object available.
@ -53,14 +53,14 @@ Direct API
---------- ----------
You can also work with the segments and manifests directly with HTTP requests You can also work with the segments and manifests directly with HTTP requests
instead of having ``st`` do that for you. You can just upload the segments like instead of having ``swift`` do that for you. You can just upload the segments like
you would any other object and the manifest is just a zero-byte file with an you would any other object and the manifest is just a zero-byte file with an
extra ``X-Object-Manifest`` header. extra ``X-Object-Manifest`` header.
All the object segments need to be in the same container, have a common object All the object segments need to be in the same container, have a common object
name prefix, and their names sort in the order they should be concatenated. name prefix, and their names sort in the order they should be concatenated.
They don't have to be in the same container as the manifest file will be, which They don't have to be in the same container as the manifest file will be, which
is useful to keep container listings clean as explained above with ``st``. is useful to keep container listings clean as explained above with ``swift``.
The manifest file is simply a zero-byte file with the extra The manifest file is simply a zero-byte file with the extra
``X-Object-Manifest: <container>/<prefix>`` header, where ``<container>`` is ``X-Object-Manifest: <container>/<prefix>`` header, where ``<container>`` is

View File

@ -19,11 +19,11 @@ the proxy log output to an hourly log file. For example, a proxy request that
is made on August 4, 2010 at 12:37 gets logged in a file named 2010080412. is made on August 4, 2010 at 12:37 gets logged in a file named 2010080412.
This allows easy log rotation and easy per-hour log processing. This allows easy log rotation and easy per-hour log processing.
****************** *********************************
Account stats logs Account / Container DB stats logs
****************** *********************************
Account stats logs are generated by a stats system process. DB stats logs are generated by a stats system process.
swift-account-stats-logger runs on each account server (via cron) and walks swift-account-stats-logger runs on each account server (via cron) and walks
the filesystem looking for account databases. When an account database is the filesystem looking for account databases. When an account database is
found, the logger selects the account hash, bytes_used, container_count, and found, the logger selects the account hash, bytes_used, container_count, and
@ -34,7 +34,8 @@ runs the account stats logger every hour. Therefore, in a cluster of ten
account servers, ten csv files are produced every hour. Also, every account account servers, ten csv files are produced every hour. Also, every account
will have one entry for every replica in the system. On average, there will be will have one entry for every replica in the system. On average, there will be
three copies of each account in the aggregate of all account stat csv files three copies of each account in the aggregate of all account stat csv files
created in one system-wide run. created in one system-wide run. The swift-container-stats-logger runs in a
similar fashion, scanning the container dbs.
---------------------- ----------------------
Log Processing plugins Log Processing plugins

View File

@ -54,3 +54,4 @@ processable = false
# devices = /srv/node # devices = /srv/node
# mount_check = true # mount_check = true
# user = swift # user = swift
# metadata_keys = comma separated list of user metadata keys to be collected

View File

@ -45,6 +45,9 @@ use = egg:swift#proxy
# makes for quicker posts; but since the container metadata isn't updated in # makes for quicker posts; but since the container metadata isn't updated in
# this mode, features like container sync won't be able to sync posts. # this mode, features like container sync won't be able to sync posts.
# object_post_as_copy = true # object_post_as_copy = true
# If set to 'true' authorized accounts that do not yet exist within the Swift
# cluster will be automatically created.
# account_autocreate = false
[filter:tempauth] [filter:tempauth]
use = egg:swift#tempauth use = egg:swift#tempauth

View File

@ -76,7 +76,7 @@ setup(
], ],
install_requires=[], # removed for better compat install_requires=[], # removed for better compat
scripts=[ scripts=[
'bin/st', 'bin/swift-account-auditor', 'bin/swift', 'bin/swift-account-auditor',
'bin/swift-account-audit', 'bin/swift-account-reaper', 'bin/swift-account-audit', 'bin/swift-account-reaper',
'bin/swift-account-replicator', 'bin/swift-account-server', 'bin/swift-account-replicator', 'bin/swift-account-server',
'bin/swift-container-auditor', 'bin/swift-container-auditor',

View File

@ -14,7 +14,7 @@ class Version(object):
return '%s-dev' % (self.canonical_version,) return '%s-dev' % (self.canonical_version,)
_version = Version('1.4.1', False) _version = Version('1.4.2', False)
__version__ = _version.pretty_version __version__ = _version.pretty_version
__canonical_version__ = _version.canonical_version __canonical_version__ = _version.canonical_version

View File

@ -244,7 +244,7 @@ class AccountController(object):
account_list = '\n'.join(r[0] for r in account_list) + '\n' account_list = '\n'.join(r[0] for r in account_list) + '\n'
ret = Response(body=account_list, request=req, headers=resp_headers) ret = Response(body=account_list, request=req, headers=resp_headers)
ret.content_type = out_content_type ret.content_type = out_content_type
ret.charset = 'utf8' ret.charset = 'utf-8'
return ret return ret
def REPLICATE(self, req): def REPLICATE(self, req):

View File

@ -43,7 +43,7 @@ class Bench(object):
self.user = conf.user self.user = conf.user
self.key = conf.key self.key = conf.key
self.auth_url = conf.auth self.auth_url = conf.auth
self.use_proxy = conf.use_proxy in TRUE_VALUES self.use_proxy = conf.use_proxy.lower() in TRUE_VALUES
if self.use_proxy: if self.use_proxy:
url, token = client.get_auth(self.auth_url, self.user, self.key) url, token = client.get_auth(self.auth_url, self.user, self.key)
self.token = token self.token = token
@ -125,7 +125,7 @@ class BenchController(object):
self.logger = logger self.logger = logger
self.conf = conf self.conf = conf
self.names = [] self.names = []
self.delete = conf.delete in TRUE_VALUES self.delete = conf.delete.lower() in TRUE_VALUES
self.gets = int(conf.num_gets) self.gets = int(conf.num_gets)
def run(self): def run(self):

View File

@ -75,7 +75,8 @@ def run_daemon(klass, conf_file, section_name='', once=False, **kwargs):
log_name=kwargs.get('log_name')) log_name=kwargs.get('log_name'))
# once on command line (i.e. daemonize=false) will over-ride config # once on command line (i.e. daemonize=false) will over-ride config
once = once or conf.get('daemonize', 'true') not in utils.TRUE_VALUES once = once or \
conf.get('daemonize', 'true').lower() not in utils.TRUE_VALUES
# pre-configure logger # pre-configure logger
if 'logger' in kwargs: if 'logger' in kwargs:

View File

@ -879,14 +879,16 @@ class ContainerBroker(DatabaseBroker):
return (row['object_count'] in (None, '', 0, '0')) and \ return (row['object_count'] in (None, '', 0, '0')) and \
(float(row['delete_timestamp']) > float(row['put_timestamp'])) (float(row['delete_timestamp']) > float(row['put_timestamp']))
def get_info(self): def get_info(self, include_metadata=False):
""" """
Get global data for the container. Get global data for the container.
:returns: sqlite.row of (account, container, created_at, put_timestamp, :returns: dict with keys: account, container, created_at,
delete_timestamp, object_count, bytes_used, put_timestamp, delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp, reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id) reported_object_count, reported_bytes_used, hash, id
If include_metadata is set, metadata is included as a key
pointing to a dict of tuples of the metadata
""" """
try: try:
self._commit_puts() self._commit_puts()
@ -894,13 +896,34 @@ class ContainerBroker(DatabaseBroker):
if not self.stale_reads_ok: if not self.stale_reads_ok:
raise raise
with self.get() as conn: with self.get() as conn:
return conn.execute(''' metadata = ''
SELECT account, container, created_at, put_timestamp, if include_metadata:
delete_timestamp, object_count, bytes_used, metadata = ', metadata'
reported_put_timestamp, reported_delete_timestamp, try:
reported_object_count, reported_bytes_used, hash, id data = conn.execute('''
FROM container_stat SELECT account, container, created_at, put_timestamp,
''').fetchone() delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id
%s
FROM container_stat
''' % metadata).fetchone()
except sqlite3.OperationalError, err:
if 'no such column: metadata' not in str(err):
raise
data = conn.execute('''
SELECT account, container, created_at, put_timestamp,
delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id
FROM container_stat''').fetchone()
data = dict(data)
if include_metadata:
try:
data['metadata'] = json.loads(data.get('metadata', ''))
except ValueError:
data['metadata'] = {}
return data
def reported(self, put_timestamp, delete_timestamp, object_count, def reported(self, put_timestamp, delete_timestamp, object_count,
bytes_used): bytes_used):
@ -1394,9 +1417,9 @@ class AccountBroker(DatabaseBroker):
""" """
Get global data for the account. Get global data for the account.
:returns: sqlite.row of (account, created_at, put_timestamp, :returns: dict with keys: account, created_at, put_timestamp,
delete_timestamp, container_count, object_count, delete_timestamp, container_count, object_count,
bytes_used, hash, id) bytes_used, hash, id
""" """
try: try:
self._commit_puts() self._commit_puts()
@ -1404,11 +1427,11 @@ class AccountBroker(DatabaseBroker):
if not self.stale_reads_ok: if not self.stale_reads_ok:
raise raise
with self.get() as conn: with self.get() as conn:
return conn.execute(''' return dict(conn.execute('''
SELECT account, created_at, put_timestamp, delete_timestamp, SELECT account, created_at, put_timestamp, delete_timestamp,
container_count, object_count, bytes_used, hash, id container_count, object_count, bytes_used, hash, id
FROM account_stat FROM account_stat
''').fetchone() ''').fetchone())
def list_containers_iter(self, limit, marker, end_marker, prefix, def list_containers_iter(self, limit, marker, end_marker, prefix,
delimiter): delimiter):

View File

@ -74,36 +74,36 @@ the .../listing.css style sheet. If you "view source" in your browser on a
listing page, you will see the well defined document structure that can be listing page, you will see the well defined document structure that can be
styled. styled.
Example usage of this middleware via ``st``: Example usage of this middleware via ``swift``:
Make the container publicly readable:: Make the container publicly readable::
st post -r '.r:*' container swift post -r '.r:*' container
You should be able to get objects directly, but no index.html resolution or You should be able to get objects directly, but no index.html resolution or
listings. listings.
Set an index file directive:: Set an index file directive::
st post -m 'web-index:index.html' container swift post -m 'web-index:index.html' container
You should be able to hit paths that have an index.html without needing to You should be able to hit paths that have an index.html without needing to
type the index.html part. type the index.html part.
Turn on listings:: Turn on listings::
st post -m 'web-listings: true' container swift post -m 'web-listings: true' container
Now you should see object listings for paths and pseudo paths that have no Now you should see object listings for paths and pseudo paths that have no
index.html. index.html.
Enable a custom listings style sheet:: Enable a custom listings style sheet::
st post -m 'web-listings-css:listings.css' container swift post -m 'web-listings-css:listings.css' container
Set an error file:: Set an error file::
st post -m 'web-error:error.html' container swift post -m 'web-error:error.html' container
Now 401's should load 401error.html, 404's should load 404error.html, etc. Now 401's should load 401error.html, 404's should load 404error.html, etc.
""" """
@ -270,7 +270,7 @@ class StaticWeb(object):
:param start_response: The original WSGI start_response hook. :param start_response: The original WSGI start_response hook.
:param prefix: Any prefix desired for the container listing. :param prefix: Any prefix desired for the container listing.
""" """
if self._listings not in TRUE_VALUES: if self._listings.lower() not in TRUE_VALUES:
resp = HTTPNotFound()(env, self._start_response) resp = HTTPNotFound()(env, self._start_response)
return self._error_response(resp, env, start_response) return self._error_response(resp, env, start_response)
tmp_env = self._get_escalated_env(env) tmp_env = self._get_escalated_env(env)

View File

@ -72,7 +72,7 @@ if hash_conf.read('/etc/swift/swift.conf'):
pass pass
# Used when reading config values # Used when reading config values
TRUE_VALUES = set(('true', '1', 'yes', 'True', 'Yes', 'on', 'On', 't', 'y')) TRUE_VALUES = set(('true', '1', 'yes', 'on', 't', 'y'))
def validate_configuration(): def validate_configuration():

View File

@ -334,7 +334,7 @@ class ContainerController(object):
container_list = '\n'.join(r[0] for r in container_list) + '\n' container_list = '\n'.join(r[0] for r in container_list) + '\n'
ret = Response(body=container_list, request=req, headers=resp_headers) ret = Response(body=container_list, request=req, headers=resp_headers)
ret.content_type = out_content_type ret.content_type = out_content_type
ret.charset = 'utf8' ret.charset = 'utf-8'
return ret return ret
def REPLICATE(self, req): def REPLICATE(self, req):

View File

@ -339,7 +339,7 @@ class Controller(object):
node['errors'] = self.app.error_suppression_limit + 1 node['errors'] = self.app.error_suppression_limit + 1
node['last_error'] = time.time() node['last_error'] = time.time()
def account_info(self, account): def account_info(self, account, autocreate=False):
""" """
Get account information, and also verify that the account exists. Get account information, and also verify that the account exists.
@ -354,7 +354,7 @@ class Controller(object):
result_code = self.app.memcache.get(cache_key) result_code = self.app.memcache.get(cache_key)
if result_code == 200: if result_code == 200:
return partition, nodes return partition, nodes
elif result_code == 404: elif result_code == 404 and not autocreate:
return None, None return None, None
result_code = 0 result_code = 0
attempts_left = self.app.account_ring.replica_count attempts_left = self.app.account_ring.replica_count
@ -387,6 +387,17 @@ class Controller(object):
except (Exception, TimeoutError): except (Exception, TimeoutError):
self.exception_occurred(node, _('Account'), self.exception_occurred(node, _('Account'),
_('Trying to get account info for %s') % path) _('Trying to get account info for %s') % path)
if result_code == 404 and autocreate:
if len(account) > MAX_ACCOUNT_NAME_LENGTH:
return None, None
headers = {'X-Timestamp': normalize_timestamp(time.time()),
'X-Trans-Id': self.trans_id}
resp = self.make_requests(Request.blank('/v1' + path),
self.app.account_ring, partition, 'PUT',
path, [headers] * len(nodes))
if resp.status_int // 100 != 2:
raise Exception('Could not autocreate account %r' % path)
result_code = 200
if self.app.memcache and result_code in (200, 404): if self.app.memcache and result_code in (200, 404):
if result_code == 200: if result_code == 200:
cache_timeout = self.app.recheck_account_existence cache_timeout = self.app.recheck_account_existence
@ -398,7 +409,7 @@ class Controller(object):
return partition, nodes return partition, nodes
return None, None return None, None
def container_info(self, account, container): def container_info(self, account, container, account_autocreate=False):
""" """
Get container information and thusly verify container existance. Get container information and thusly verify container existance.
This will also make a call to account_info to verify that the This will also make a call to account_info to verify that the
@ -424,7 +435,7 @@ class Controller(object):
return partition, nodes, read_acl, write_acl return partition, nodes, read_acl, write_acl
elif status == 404: elif status == 404:
return None, None, None, None return None, None, None, None
if not self.account_info(account)[1]: if not self.account_info(account, autocreate=account_autocreate)[1]:
return None, None, None, None return None, None, None, None
result_code = 0 result_code = 0
read_acl = None read_acl = None
@ -886,7 +897,8 @@ class ObjectController(Controller):
if error_response: if error_response:
return error_response return error_response
container_partition, containers, _junk, req.acl = \ container_partition, containers, _junk, req.acl = \
self.container_info(self.account_name, self.container_name) self.container_info(self.account_name, self.container_name,
account_autocreate=self.app.account_autocreate)
if 'swift.authorize' in req.environ: if 'swift.authorize' in req.environ:
aresp = req.environ['swift.authorize'](req) aresp = req.environ['swift.authorize'](req)
if aresp: if aresp:
@ -943,7 +955,8 @@ class ObjectController(Controller):
def PUT(self, req): def PUT(self, req):
"""HTTP PUT request handler.""" """HTTP PUT request handler."""
container_partition, containers, _junk, req.acl = \ container_partition, containers, _junk, req.acl = \
self.container_info(self.account_name, self.container_name) self.container_info(self.account_name, self.container_name,
account_autocreate=self.app.account_autocreate)
if 'swift.authorize' in req.environ: if 'swift.authorize' in req.environ:
aresp = req.environ['swift.authorize'](req) aresp = req.environ['swift.authorize'](req)
if aresp: if aresp:
@ -1259,7 +1272,8 @@ class ContainerController(Controller):
resp.body = 'Container name length of %d longer than %d' % \ resp.body = 'Container name length of %d longer than %d' % \
(len(self.container_name), MAX_CONTAINER_NAME_LENGTH) (len(self.container_name), MAX_CONTAINER_NAME_LENGTH)
return resp return resp
account_partition, accounts = self.account_info(self.account_name) account_partition, accounts = self.account_info(self.account_name,
autocreate=self.app.account_autocreate)
if not accounts: if not accounts:
return HTTPNotFound(request=req) return HTTPNotFound(request=req)
container_partition, containers = self.app.container_ring.get_nodes( container_partition, containers = self.app.container_ring.get_nodes(
@ -1289,7 +1303,8 @@ class ContainerController(Controller):
self.clean_acls(req) or check_metadata(req, 'container') self.clean_acls(req) or check_metadata(req, 'container')
if error_response: if error_response:
return error_response return error_response
account_partition, accounts = self.account_info(self.account_name) account_partition, accounts = self.account_info(self.account_name,
autocreate=self.app.account_autocreate)
if not accounts: if not accounts:
return HTTPNotFound(request=req) return HTTPNotFound(request=req)
container_partition, containers = self.app.container_ring.get_nodes( container_partition, containers = self.app.container_ring.get_nodes(
@ -1345,8 +1360,26 @@ class AccountController(Controller):
"""Handler for HTTP GET/HEAD requests.""" """Handler for HTTP GET/HEAD requests."""
partition, nodes = self.app.account_ring.get_nodes(self.account_name) partition, nodes = self.app.account_ring.get_nodes(self.account_name)
shuffle(nodes) shuffle(nodes)
return self.GETorHEAD_base(req, _('Account'), partition, nodes, resp = self.GETorHEAD_base(req, _('Account'), partition, nodes,
req.path_info.rstrip('/'), self.app.account_ring.replica_count) req.path_info.rstrip('/'), self.app.account_ring.replica_count)
if resp.status_int == 404 and self.app.account_autocreate:
if len(self.account_name) > MAX_ACCOUNT_NAME_LENGTH:
resp = HTTPBadRequest(request=req)
resp.body = 'Account name length of %d longer than %d' % \
(len(self.account_name), MAX_ACCOUNT_NAME_LENGTH)
return resp
headers = {'X-Timestamp': normalize_timestamp(time.time()),
'X-Trans-Id': self.trans_id}
resp = self.make_requests(
Request.blank('/v1/' + self.account_name),
self.app.account_ring, partition, 'PUT',
'/' + self.account_name, [headers] * len(nodes))
if resp.status_int // 100 != 2:
raise Exception('Could not autocreate account %r' %
self.account_name)
resp = self.GETorHEAD_base(req, _('Account'), partition, nodes,
req.path_info.rstrip('/'), self.app.account_ring.replica_count)
return resp
@public @public
def PUT(self, req): def PUT(self, req):
@ -1386,9 +1419,23 @@ class AccountController(Controller):
if value[0].lower().startswith('x-account-meta-')) if value[0].lower().startswith('x-account-meta-'))
if self.app.memcache: if self.app.memcache:
self.app.memcache.delete('account%s' % req.path_info.rstrip('/')) self.app.memcache.delete('account%s' % req.path_info.rstrip('/'))
return self.make_requests(req, self.app.account_ring, resp = self.make_requests(req, self.app.account_ring,
account_partition, 'POST', req.path_info, account_partition, 'POST', req.path_info,
[headers] * len(accounts)) [headers] * len(accounts))
if resp.status_int == 404 and self.app.account_autocreate:
if len(self.account_name) > MAX_ACCOUNT_NAME_LENGTH:
resp = HTTPBadRequest(request=req)
resp.body = 'Account name length of %d longer than %d' % \
(len(self.account_name), MAX_ACCOUNT_NAME_LENGTH)
return resp
resp = self.make_requests(
Request.blank('/v1/' + self.account_name),
self.app.account_ring, account_partition, 'PUT',
'/' + self.account_name, [headers] * len(accounts))
if resp.status_int // 100 != 2:
raise Exception('Could not autocreate account %r' %
self.account_name)
return resp
@public @public
def DELETE(self, req): def DELETE(self, req):
@ -1432,7 +1479,7 @@ class BaseApplication(object):
self.put_queue_depth = int(conf.get('put_queue_depth', 10)) self.put_queue_depth = int(conf.get('put_queue_depth', 10))
self.object_chunk_size = int(conf.get('object_chunk_size', 65536)) self.object_chunk_size = int(conf.get('object_chunk_size', 65536))
self.client_chunk_size = int(conf.get('client_chunk_size', 65536)) self.client_chunk_size = int(conf.get('client_chunk_size', 65536))
self.log_headers = conf.get('log_headers') == 'True' self.log_headers = conf.get('log_headers', 'no').lower() in TRUE_VALUES
self.error_suppression_interval = \ self.error_suppression_interval = \
int(conf.get('error_suppression_interval', 60)) int(conf.get('error_suppression_interval', 60))
self.error_suppression_limit = \ self.error_suppression_limit = \
@ -1442,7 +1489,7 @@ class BaseApplication(object):
self.recheck_account_existence = \ self.recheck_account_existence = \
int(conf.get('recheck_account_existence', 60)) int(conf.get('recheck_account_existence', 60))
self.allow_account_management = \ self.allow_account_management = \
conf.get('allow_account_management', 'false').lower() == 'true' conf.get('allow_account_management', 'no').lower() in TRUE_VALUES
self.object_post_as_copy = \ self.object_post_as_copy = \
conf.get('object_post_as_copy', 'true').lower() in TRUE_VALUES conf.get('object_post_as_copy', 'true').lower() in TRUE_VALUES
self.resellers_conf = ConfigParser() self.resellers_conf = ConfigParser()
@ -1456,6 +1503,8 @@ class BaseApplication(object):
self.memcache = memcache self.memcache = memcache
mimetypes.init(mimetypes.knownfiles + mimetypes.init(mimetypes.knownfiles +
[os.path.join(swift_dir, 'mime.types')]) [os.path.join(swift_dir, 'mime.types')])
self.account_autocreate = \
conf.get('account_autocreate', 'no').lower() in TRUE_VALUES
def get_controller(self, path): def get_controller(self, path):
""" """

View File

@ -58,7 +58,10 @@ class DatabaseStatsCollector(Daemon):
(self.stats_type, (time.time() - start) / 60)) (self.stats_type, (time.time() - start) / 60))
def get_data(self): def get_data(self):
raise Exception('Not Implemented') raise NotImplementedError('Subclasses must override')
def get_header(self):
raise NotImplementedError('Subclasses must override')
def find_and_process(self): def find_and_process(self):
src_filename = time.strftime(self.filename_format) src_filename = time.strftime(self.filename_format)
@ -70,6 +73,7 @@ class DatabaseStatsCollector(Daemon):
hasher = hashlib.md5() hasher = hashlib.md5()
try: try:
with open(tmp_filename, 'wb') as statfile: with open(tmp_filename, 'wb') as statfile:
statfile.write(self.get_header())
for device in os.listdir(self.devices): for device in os.listdir(self.devices):
if self.mount_check and not check_mount(self.devices, if self.mount_check and not check_mount(self.devices,
device): device):
@ -122,6 +126,9 @@ class AccountStatsCollector(DatabaseStatsCollector):
info['bytes_used']) info['bytes_used'])
return line_data return line_data
def get_header(self):
return ''
class ContainerStatsCollector(DatabaseStatsCollector): class ContainerStatsCollector(DatabaseStatsCollector):
""" """
@ -133,20 +140,38 @@ class ContainerStatsCollector(DatabaseStatsCollector):
super(ContainerStatsCollector, self).__init__(stats_conf, 'container', super(ContainerStatsCollector, self).__init__(stats_conf, 'container',
container_server_data_dir, container_server_data_dir,
'container-stats-%Y%m%d%H_') 'container-stats-%Y%m%d%H_')
# webob calls title on all the header keys
self.metadata_keys = ['X-Container-Meta-%s' % mkey.strip().title()
for mkey in stats_conf.get('metadata_keys', '').split(',')
if mkey.strip()]
def get_header(self):
header = 'Account Hash,Container Name,Object Count,Bytes Used'
if self.metadata_keys:
xtra_headers = ','.join(self.metadata_keys)
header += ',%s' % xtra_headers
header += '\n'
return header
def get_data(self, db_path): def get_data(self, db_path):
""" """
Data for generated csv has the following columns: Data for generated csv has the following columns:
Account Hash, Container Name, Object Count, Bytes Used Account Hash, Container Name, Object Count, Bytes Used
This will just collect whether or not the metadata is set
using a 1 or ''.
""" """
line_data = None line_data = None
broker = ContainerBroker(db_path) broker = ContainerBroker(db_path)
if not broker.is_deleted(): if not broker.is_deleted():
info = broker.get_info() info = broker.get_info(include_metadata=bool(self.metadata_keys))
encoded_container_name = urllib.quote(info['container']) encoded_container_name = urllib.quote(info['container'])
line_data = '"%s","%s",%d,%d\n' % ( line_data = '"%s","%s",%d,%d' % (
info['account'], info['account'], encoded_container_name,
encoded_container_name, info['object_count'], info['bytes_used'])
info['object_count'], if self.metadata_keys:
info['bytes_used']) metadata_results = ','.join(
[info['metadata'].get(mkey) and '1' or ''
for mkey in self.metadata_keys])
line_data += ',%s' % metadata_results
line_data += '\n'
return line_data return line_data

View File

@ -69,7 +69,7 @@ class LogUploader(Daemon):
self.internal_proxy = InternalProxy(proxy_server_conf) self.internal_proxy = InternalProxy(proxy_server_conf)
self.new_log_cutoff = int(cutoff or self.new_log_cutoff = int(cutoff or
uploader_conf.get('new_log_cutoff', '7200')) uploader_conf.get('new_log_cutoff', '7200'))
self.unlink_log = uploader_conf.get('unlink_log', 'True').lower() in \ self.unlink_log = uploader_conf.get('unlink_log', 'true').lower() in \
utils.TRUE_VALUES utils.TRUE_VALUES
self.filename_pattern = regex or \ self.filename_pattern = regex or \
uploader_conf.get('source_filename_pattern', uploader_conf.get('source_filename_pattern',

View File

@ -227,10 +227,10 @@ class TestAccount(Base):
headers = dict(self.env.conn.response.getheaders()) headers = dict(self.env.conn.response.getheaders())
if format == 'json': if format == 'json':
self.assertEquals(headers['content-type'], self.assertEquals(headers['content-type'],
'application/json; charset=utf8') 'application/json; charset=utf-8')
elif format == 'xml': elif format == 'xml':
self.assertEquals(headers['content-type'], self.assertEquals(headers['content-type'],
'application/xml; charset=utf8') 'application/xml; charset=utf-8')
def testListingLimit(self): def testListingLimit(self):
limit = 10000 limit = 10000
@ -1355,10 +1355,10 @@ class TestFile(Base):
headers = dict(self.env.conn.response.getheaders()) headers = dict(self.env.conn.response.getheaders())
if format == 'json': if format == 'json':
self.assertEquals(headers['content-type'], self.assertEquals(headers['content-type'],
'application/json; charset=utf8') 'application/json; charset=utf-8')
elif format == 'xml': elif format == 'xml':
self.assertEquals(headers['content-type'], self.assertEquals(headers['content-type'],
'application/xml; charset=utf8') 'application/xml; charset=utf-8')
lm_diff = max([f['last_modified'] for f in files]) - \ lm_diff = max([f['last_modified'] for f in files]) - \
min([f['last_modified'] for f in files]) min([f['last_modified'] for f in files])

View File

@ -388,6 +388,7 @@ class TestAccountController(unittest.TestCase):
self.assertEquals(resp.status_int, 200) self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.body.strip().split('\n'), ['c1', 'c2']) self.assertEquals(resp.body.strip().split('\n'), ['c1', 'c2'])
self.assertEquals(resp.content_type, 'text/plain') self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.charset, 'utf-8')
def test_GET_with_containers_json(self): def test_GET_with_containers_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT', req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
@ -436,6 +437,7 @@ class TestAccountController(unittest.TestCase):
[{'count': 1, 'bytes': 2, 'name': 'c1'}, [{'count': 1, 'bytes': 2, 'name': 'c1'},
{'count': 3, 'bytes': 4, 'name': 'c2'}]) {'count': 3, 'bytes': 4, 'name': 'c2'}])
self.assertEquals(resp.content_type, 'application/json') self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(resp.charset, 'utf-8')
def test_GET_with_containers_xml(self): def test_GET_with_containers_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT', req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
@ -529,6 +531,7 @@ class TestAccountController(unittest.TestCase):
self.assertEquals(node.firstChild.nodeValue, '3') self.assertEquals(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'bytes'][0] node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEquals(node.firstChild.nodeValue, '4') self.assertEquals(node.firstChild.nodeValue, '4')
self.assertEquals(resp.charset, 'utf-8')
def test_GET_limit_marker_plain(self): def test_GET_limit_marker_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT', req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',

View File

@ -187,7 +187,7 @@ class FakeApp(object):
headers.update({'X-Container-Object-Count': '11', headers.update({'X-Container-Object-Count': '11',
'X-Container-Bytes-Used': '73741', 'X-Container-Bytes-Used': '73741',
'X-Container-Read': '.r:*', 'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf8'}) 'Content-Type': 'application/json; charset=utf-8'})
body = ''' body = '''
[{"name":"subdir/1.txt", [{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
@ -204,14 +204,14 @@ class FakeApp(object):
headers.update({'X-Container-Object-Count': '11', headers.update({'X-Container-Object-Count': '11',
'X-Container-Bytes-Used': '73741', 'X-Container-Bytes-Used': '73741',
'X-Container-Read': '.r:*', 'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf8'}) 'Content-Type': 'application/json; charset=utf-8'})
body = '[]' body = '[]'
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \ elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
'limit=1&format=json&delimiter=/&limit=1&prefix=subdirz/': 'limit=1&format=json&delimiter=/&limit=1&prefix=subdirz/':
headers.update({'X-Container-Object-Count': '11', headers.update({'X-Container-Object-Count': '11',
'X-Container-Bytes-Used': '73741', 'X-Container-Bytes-Used': '73741',
'X-Container-Read': '.r:*', 'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf8'}) 'Content-Type': 'application/json; charset=utf-8'})
body = ''' body = '''
[{"name":"subdirz/1.txt", [{"name":"subdirz/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
@ -224,7 +224,7 @@ class FakeApp(object):
'X-Container-Bytes-Used': '73741', 'X-Container-Bytes-Used': '73741',
'X-Container-Read': '.r:*', 'X-Container-Read': '.r:*',
'X-Container-Web-Listings': 't', 'X-Container-Web-Listings': 't',
'Content-Type': 'application/json; charset=utf8'}) 'Content-Type': 'application/json; charset=utf-8'})
body = ''' body = '''
[{"name":"subdir/1.txt", [{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
@ -236,7 +236,7 @@ class FakeApp(object):
elif 'format=json' in env['QUERY_STRING']: elif 'format=json' in env['QUERY_STRING']:
headers.update({'X-Container-Object-Count': '11', headers.update({'X-Container-Object-Count': '11',
'X-Container-Bytes-Used': '73741', 'X-Container-Bytes-Used': '73741',
'Content-Type': 'application/json; charset=utf8'}) 'Content-Type': 'application/json; charset=utf-8'})
body = ''' body = '''
[{"name":"401error.html", [{"name":"401error.html",
"hash":"893f8d80692a4d3875b45be8f152ad18", "bytes":110, "hash":"893f8d80692a4d3875b45be8f152ad18", "bytes":110,
@ -283,7 +283,7 @@ class FakeApp(object):
else: else:
headers.update({'X-Container-Object-Count': '11', headers.update({'X-Container-Object-Count': '11',
'X-Container-Bytes-Used': '73741', 'X-Container-Bytes-Used': '73741',
'Content-Type': 'text/plain; charset=utf8'}) 'Content-Type': 'text/plain; charset=utf-8'})
body = '\n'.join(['401error.html', '404error.html', 'index.html', body = '\n'.join(['401error.html', '404error.html', 'index.html',
'listing.css', 'one.txt', 'subdir/1.txt', 'listing.css', 'one.txt', 'subdir/1.txt',
'subdir/2.txt', 'subdir/omgomg.txt', 'subdir2', 'subdir/2.txt', 'subdir/omgomg.txt', 'subdir2',

View File

@ -768,6 +768,10 @@ log_name = yarr'''
self.assertEquals(utils.human_readable(1237940039285380274899124224), self.assertEquals(utils.human_readable(1237940039285380274899124224),
'1024Yi') '1024Yi')
def test_TRUE_VALUES(self):
for v in utils.TRUE_VALUES:
self.assertEquals(v, v.lower())
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -514,6 +514,7 @@ class TestContainerController(unittest.TestCase):
resp = self.controller.GET(req) resp = self.controller.GET(req)
self.assertEquals(resp.content_type, 'application/json') self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(eval(resp.body), json_body) self.assertEquals(eval(resp.body), json_body)
self.assertEquals(resp.charset, 'utf-8')
for accept in ('application/json', 'application/json;q=1.0,*/*;q=0.9', for accept in ('application/json', 'application/json;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/json;q=1.0', 'application/*'): '*/*;q=0.9,application/json;q=1.0', 'application/*'):
@ -552,6 +553,7 @@ class TestContainerController(unittest.TestCase):
resp = self.controller.GET(req) resp = self.controller.GET(req)
self.assertEquals(resp.content_type, 'text/plain') self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body) self.assertEquals(resp.body, plain_body)
self.assertEquals(resp.charset, 'utf-8')
for accept in ('', 'text/plain', 'application/xml;q=0.8,*/*;q=0.9', for accept in ('', 'text/plain', 'application/xml;q=0.8,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=0.8', '*/*', '*/*;q=0.9,application/xml;q=0.8', '*/*',
@ -609,6 +611,7 @@ class TestContainerController(unittest.TestCase):
resp = self.controller.GET(req) resp = self.controller.GET(req)
self.assertEquals(resp.content_type, 'application/xml') self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.body, xml_body) self.assertEquals(resp.body, xml_body)
self.assertEquals(resp.charset, 'utf-8')
for xml_accept in ('application/xml', 'application/xml;q=1.0,*/*;q=0.9', for xml_accept in ('application/xml', 'application/xml;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=1.0', 'application/xml,text/xml'): '*/*;q=0.9,application/xml;q=1.0', 'application/xml,text/xml'):

View File

@ -398,6 +398,48 @@ class TestController(unittest.TestCase):
test(404, 507, 503) test(404, 507, 503)
test(503, 503, 503) test(503, 503, 503)
def test_account_info_account_autocreate(self):
with save_globals():
self.memcache.store = {}
proxy_server.http_connect = \
fake_http_connect(404, 404, 404, 201, 201, 201)
partition, nodes = \
self.controller.account_info(self.account, autocreate=False)
self.check_account_info_return(partition, nodes, is_none=True)
self.memcache.store = {}
proxy_server.http_connect = \
fake_http_connect(404, 404, 404, 201, 201, 201)
partition, nodes = \
self.controller.account_info(self.account)
self.check_account_info_return(partition, nodes, is_none=True)
self.memcache.store = {}
proxy_server.http_connect = \
fake_http_connect(404, 404, 404, 201, 201, 201)
partition, nodes = \
self.controller.account_info(self.account, autocreate=True)
self.check_account_info_return(partition, nodes)
self.memcache.store = {}
proxy_server.http_connect = \
fake_http_connect(404, 404, 404, 503, 201, 201)
partition, nodes = \
self.controller.account_info(self.account, autocreate=True)
self.check_account_info_return(partition, nodes)
self.memcache.store = {}
proxy_server.http_connect = \
fake_http_connect(404, 404, 404, 503, 201, 503)
exc = None
try:
partition, nodes = \
self.controller.account_info(self.account, autocreate=True)
except Exception, err:
exc = err
self.assertEquals(str(exc),
"Could not autocreate account '/some_account'")
def check_container_info_return(self, ret, is_none=False): def check_container_info_return(self, ret, is_none=False):
if is_none: if is_none:
partition, nodes, read_acl, write_acl = None, None, None, None partition, nodes, read_acl, write_acl = None, None, None, None
@ -411,7 +453,7 @@ class TestController(unittest.TestCase):
self.assertEqual(write_acl, ret[3]) self.assertEqual(write_acl, ret[3])
def test_container_info_invalid_account(self): def test_container_info_invalid_account(self):
def account_info(self, account): def account_info(self, account, autocreate=False):
return None, None return None, None
with save_globals(): with save_globals():
@ -422,7 +464,7 @@ class TestController(unittest.TestCase):
# tests if 200 is cached and used # tests if 200 is cached and used
def test_container_info_200(self): def test_container_info_200(self):
def account_info(self, account): def account_info(self, account, autocreate=False):
return True, True return True, True
with save_globals(): with save_globals():
@ -448,7 +490,7 @@ class TestController(unittest.TestCase):
# tests if 404 is cached and used # tests if 404 is cached and used
def test_container_info_404(self): def test_container_info_404(self):
def account_info(self, account): def account_info(self, account, autocreate=False):
return True, True return True, True
with save_globals(): with save_globals():
@ -3348,6 +3390,16 @@ class TestAccountController(unittest.TestCase):
self.app.memcache = FakeMemcacheReturnsNone() self.app.memcache = FakeMemcacheReturnsNone()
self.assert_status_map(controller.GET, (404, 404, 404), 404) self.assert_status_map(controller.GET, (404, 404, 404), 404)
def test_GET_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assert_status_map(controller.GET,
(404, 404, 404, 201, 201, 201, 204), 404)
controller.app.account_autocreate = True
self.assert_status_map(controller.GET,
(404, 404, 404, 201, 201, 201, 204), 204)
def test_HEAD(self): def test_HEAD(self):
with save_globals(): with save_globals():
controller = proxy_server.AccountController(self.app, 'account') controller = proxy_server.AccountController(self.app, 'account')
@ -3366,6 +3418,26 @@ class TestAccountController(unittest.TestCase):
self.assert_status_map(controller.HEAD, (404, 503, 503), 503) self.assert_status_map(controller.HEAD, (404, 503, 503), 503)
self.assert_status_map(controller.HEAD, (404, 204, 503), 204) self.assert_status_map(controller.HEAD, (404, 204, 503), 204)
def test_HEAD_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assert_status_map(controller.HEAD,
(404, 404, 404, 201, 201, 201, 204), 404)
controller.app.account_autocreate = True
self.assert_status_map(controller.HEAD,
(404, 404, 404, 201, 201, 201, 204), 204)
def test_POST_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assert_status_map(controller.POST,
(404, 404, 404, 201, 201, 201), 404)
controller.app.account_autocreate = True
self.assert_status_map(controller.POST,
(404, 404, 404, 201, 201, 201), 201)
def test_connection_refused(self): def test_connection_refused(self):
self.app.account_ring.get_nodes('account') self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs.values(): for dev in self.app.account_ring.devs.values():

View File

@ -66,6 +66,16 @@ class TestDbStats(unittest.TestCase):
info = stat.get_data("%s/con.db" % self.containers) info = stat.get_data("%s/con.db" % self.containers)
self.assertEquals('''"test_acc","test_con",1,10\n''', info) self.assertEquals('''"test_acc","test_con",1,10\n''', info)
def test_container_stat_get_metadata(self):
stat = db_stats_collector.ContainerStatsCollector(self.conf)
container_db = ContainerBroker("%s/con.db" % self.containers,
account='test_acc', container='test_con')
container_db.initialize()
container_db.put_object('test_obj', time.time(), 10, 'text', 'faketag')
info = stat.get_data("%s/con.db" % self.containers)
self.assertEquals('''"test_acc","test_con",1,10\n''', info)
container_db.update_metadata({'test1': ('val', 1000)})
def _gen_account_stat(self): def _gen_account_stat(self):
stat = db_stats_collector.AccountStatsCollector(self.conf) stat = db_stats_collector.AccountStatsCollector(self.conf)
output_data = set() output_data = set()
@ -83,20 +93,61 @@ class TestDbStats(unittest.TestCase):
self.assertEqual(len(output_data), 10) self.assertEqual(len(output_data), 10)
return stat, output_data return stat, output_data
def _gen_container_stat(self): def _drop_metadata_col(self, broker, acc_name):
broker.conn.execute('''drop table container_stat''')
broker.conn.executescript("""
CREATE TABLE container_stat (
account TEXT DEFAULT '%s',
container TEXT DEFAULT 'test_con',
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
object_count INTEGER,
bytes_used INTEGER,
reported_put_timestamp TEXT DEFAULT '0',
reported_delete_timestamp TEXT DEFAULT '0',
reported_object_count INTEGER DEFAULT 0,
reported_bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0'
);
INSERT INTO container_stat (object_count, bytes_used)
VALUES (1, 10);
""" % acc_name)
def _gen_container_stat(self, set_metadata=False, drop_metadata=False):
if set_metadata:
self.conf['metadata_keys'] = 'test1,test2'
# webob runs title on all headers
stat = db_stats_collector.ContainerStatsCollector(self.conf) stat = db_stats_collector.ContainerStatsCollector(self.conf)
output_data = set() output_data = set()
for i in range(10): for i in range(10):
account_db = ContainerBroker( cont_db = ContainerBroker(
"%s/container-stats-201001010%s-%s.db" % (self.containers, i, "%s/container-stats-201001010%s-%s.db" % (self.containers, i,
uuid.uuid4().hex), uuid.uuid4().hex),
account='test_acc_%s' % i, container='test_con') account='test_acc_%s' % i, container='test_con')
account_db.initialize() cont_db.initialize()
account_db.put_object('test_obj', time.time(), 10, 'text', cont_db.put_object('test_obj', time.time(), 10, 'text', 'faketag')
'faketag') metadata_output = ''
if set_metadata:
if i % 2:
cont_db.update_metadata({'X-Container-Meta-Test1': (5, 1)})
metadata_output = ',1,'
else:
cont_db.update_metadata({'X-Container-Meta-Test2': (7, 2)})
metadata_output = ',,1'
# this will "commit" the data # this will "commit" the data
account_db.get_info() cont_db.get_info()
output_data.add('''"test_acc_%s","test_con",1,10''' % i), if drop_metadata:
output_data.add('''"test_acc_%s","test_con",1,10,,''' % i)
else:
output_data.add('''"test_acc_%s","test_con",1,10%s''' %
(i, metadata_output))
if drop_metadata:
self._drop_metadata_col(cont_db, 'test_acc_%s' % i)
self.assertEqual(len(output_data), 10) self.assertEqual(len(output_data), 10)
return stat, output_data return stat, output_data
@ -112,6 +163,35 @@ class TestDbStats(unittest.TestCase):
self.assertEqual(len(output_data), 0) self.assertEqual(len(output_data), 0)
def test_account_stat_run_once_container_metadata(self):
stat, output_data = self._gen_container_stat(set_metadata=True)
stat.run_once()
stat_file = os.listdir(self.log_dir)[0]
with open(os.path.join(self.log_dir, stat_file)) as stat_handle:
headers = stat_handle.readline()
self.assert_(headers.startswith('Account Hash,Container Name,'))
for i in range(10):
data = stat_handle.readline()
output_data.discard(data.strip())
self.assertEqual(len(output_data), 0)
def test_account_stat_run_once_container_no_metadata(self):
stat, output_data = self._gen_container_stat(set_metadata=True,
drop_metadata=True)
stat.run_once()
stat_file = os.listdir(self.log_dir)[0]
with open(os.path.join(self.log_dir, stat_file)) as stat_handle:
headers = stat_handle.readline()
self.assert_(headers.startswith('Account Hash,Container Name,'))
for i in range(10):
data = stat_handle.readline()
output_data.discard(data.strip())
self.assertEqual(len(output_data), 0)
def test_account_stat_run_once_both(self): def test_account_stat_run_once_both(self):
acc_stat, acc_output_data = self._gen_account_stat() acc_stat, acc_output_data = self._gen_account_stat()
con_stat, con_output_data = self._gen_container_stat() con_stat, con_output_data = self._gen_container_stat()
@ -128,6 +208,8 @@ class TestDbStats(unittest.TestCase):
con_stat.run_once() con_stat.run_once()
stat_file = [f for f in os.listdir(self.log_dir) if f != stat_file][0] stat_file = [f for f in os.listdir(self.log_dir) if f != stat_file][0]
with open(os.path.join(self.log_dir, stat_file)) as stat_handle: with open(os.path.join(self.log_dir, stat_file)) as stat_handle:
headers = stat_handle.readline()
self.assert_(headers.startswith('Account Hash,Container Name,'))
for i in range(10): for i in range(10):
data = stat_handle.readline() data = stat_handle.readline()
con_output_data.discard(data.strip()) con_output_data.discard(data.strip())
@ -143,7 +225,8 @@ class TestDbStats(unittest.TestCase):
def test_not_implemented(self): def test_not_implemented(self):
db_stat = db_stats_collector.DatabaseStatsCollector(self.conf, db_stat = db_stats_collector.DatabaseStatsCollector(self.conf,
'account', 'test_dir', 'stats-%Y%m%d%H_') 'account', 'test_dir', 'stats-%Y%m%d%H_')
self.assertRaises(Exception, db_stat.get_data) self.assertRaises(NotImplementedError, db_stat.get_data)
self.assertRaises(NotImplementedError, db_stat.get_header)
def test_not_not_mounted(self): def test_not_not_mounted(self):
self.conf['mount_check'] = 'true' self.conf['mount_check'] = 'true'