swift/bin/st
2010-07-14 09:58:38 -05:00

1277 lines
50 KiB
Python
Executable File

#!/usr/bin/python -u
# Copyright (c) 2010 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
# Try to use installed swift.common.client...
from swift.common.client import get_auth, ClientException, Connection
except:
# But if not installed, use an included copy.
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Inclusion of swift.common.client
"""
Cloud Files client library used internally
"""
import socket
from cStringIO import StringIO
from httplib import HTTPConnection, HTTPException, HTTPSConnection
from re import compile, DOTALL
from tokenize import generate_tokens, STRING, NAME, OP
from urllib import quote as _quote, unquote
from urlparse import urlparse, urlunparse
try:
from eventlet import sleep
except:
from time import sleep
def quote(value, safe='/'):
"""
Patched version of urllib.quote that encodes utf8 strings before quoting
"""
if isinstance(value, unicode):
value = value.encode('utf8')
return _quote(value, safe)
# look for a real json parser first
try:
# simplejson is popular and pretty good
from simplejson import loads as json_loads
except ImportError:
try:
# 2.6 will have a json module in the stdlib
from json import loads as json_loads
except ImportError:
# fall back on local parser otherwise
comments = compile(r'/\*.*\*/|//[^\r\n]*', DOTALL)
def json_loads(string):
'''
Fairly competent json parser exploiting the python tokenizer and
eval(). -- From python-cloudfiles
_loads(serialized_json) -> object
'''
try:
res = []
consts = {'true': True, 'false': False, 'null': None}
string = '(' + comments.sub('', string) + ')'
for type, val, _, _, _ in \
generate_tokens(StringIO(string).readline):
if (type == OP and val not in '[]{}:,()-') or \
(type == NAME and val not in consts):
raise AttributeError()
elif type == STRING:
res.append('u')
res.append(val.replace('\\/', '/'))
else:
res.append(val)
return eval(''.join(res), {}, consts)
except:
raise AttributeError()
class ClientException(Exception):
def __init__(self, msg, http_scheme='', http_host='', http_port='',
http_path='', http_query='', http_status=0, http_reason='',
http_device=''):
Exception.__init__(self, msg)
self.msg = msg
self.http_scheme = http_scheme
self.http_host = http_host
self.http_port = http_port
self.http_path = http_path
self.http_query = http_query
self.http_status = http_status
self.http_reason = http_reason
self.http_device = http_device
def __str__(self):
a = self.msg
b = ''
if self.http_scheme:
b += '%s://' % self.http_scheme
if self.http_host:
b += self.http_host
if self.http_port:
b += ':%s' % self.http_port
if self.http_path:
b += self.http_path
if self.http_query:
b += '?%s' % self.http_query
if self.http_status:
if b:
b = '%s %s' % (b, self.http_status)
else:
b = str(self.http_status)
if self.http_reason:
if b:
b = '%s %s' % (b, self.http_reason)
else:
b = '- %s' % self.http_reason
if self.http_device:
if b:
b = '%s: device %s' % (b, self.http_device)
else:
b = 'device %s' % self.http_device
return b and '%s: %s' % (a, b) or a
def http_connection(url):
"""
Make an HTTPConnection or HTTPSConnection
:param url: url to connect to
:returns: tuple of (parsed url, connection object)
:raises ClientException: Unable to handle protocol scheme
"""
parsed = urlparse(url)
if parsed.scheme == 'http':
conn = HTTPConnection(parsed.netloc)
elif parsed.scheme == 'https':
conn = HTTPSConnection(parsed.netloc)
else:
raise ClientException('Cannot handle protocol scheme %s for url %s' %
(parsed.scheme, repr(url)))
return parsed, conn
def get_auth(url, user, key, snet=False):
"""
Get authentication credentials
:param url: authentication URL
:param user: user to auth as
:param key: key or passowrd for auth
:param snet: use SERVICENET internal network default is False
:returns: tuple of (storage URL, storage token, auth token)
:raises ClientException: HTTP GET request to auth URL failed
"""
parsed, conn = http_connection(url)
conn.request('GET', parsed.path, '',
{'X-Auth-User': user, 'X-Auth-Key': key})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Auth GET failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port,
http_path=parsed.path, http_status=resp.status,
http_reason=resp.reason)
url = resp.getheader('x-storage-url')
if snet:
parsed = list(urlparse(url))
# Second item in the list is the netloc
parsed[1] = 'snet-' + parsed[1]
url = urlunparse(parsed)
return url, resp.getheader('x-storage-token',
resp.getheader('x-auth-token'))
def get_account(url, token, marker=None, limit=None, prefix=None,
http_conn=None, full_listing=False):
"""
Get a listing of containers for the account.
:param url: storage URL
:param token: auth token
:param marker: marker query
:param limit: limit query
:param prefix: prefix query
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param full_listing: if True, return a full listing, else returns a max
of 10000 listings
:returns: a list of accounts
:raises ClientException: HTTP GET request failed
"""
if not http_conn:
http_conn = http_connection(url)
if full_listing:
rv = []
listing = get_account(url, token, marker, limit, prefix, http_conn)
while listing:
rv.extend(listing)
marker = listing[-1]['name']
listing = get_account(url, token, marker, limit, prefix, http_conn)
return rv
parsed, conn = http_conn
qs = 'format=json'
if marker:
qs += '&marker=%s' % quote(marker)
if limit:
qs += '&limit=%d' % limit
if prefix:
qs += '&prefix=%s' % quote(prefix)
conn.request('GET', '%s?%s' % (parsed.path, qs), '',
{'X-Auth-Token': token})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
resp.read()
raise ClientException('Account GET failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port,
http_path=parsed.path, http_query=qs, http_status=resp.status,
http_reason=resp.reason)
if resp.status == 204:
resp.read()
return []
return json_loads(resp.read())
def head_account(url, token, http_conn=None):
"""
Get account stats.
:param url: storage URL
:param token: auth token
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: a tuple of (container count, object count, bytes used)
:raises ClientException: HTTP HEAD request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Account HEAD failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port,
http_path=parsed.path, http_status=resp.status,
http_reason=resp.reason)
return int(resp.getheader('x-account-container-count', 0)), \
int(resp.getheader('x-account-object-count', 0)), \
int(resp.getheader('x-account-bytes-used', 0))
def get_container(url, token, container, marker=None, limit=None,
prefix=None, delimiter=None, http_conn=None,
full_listing=False):
"""
Get a listing of objects for the container.
:param url: storage URL
:param token: auth token
:param container: container name to get a listing for
:param marker: marker query
:param limit: limit query
:param prefix: prefix query
:param delimeter: string to delimit the queries on
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param full_listing: if True, return a full listing, else returns a max
of 10000 listings
:returns: a list of objects
:raises ClientException: HTTP GET request failed
"""
if not http_conn:
http_conn = http_connection(url)
if full_listing:
rv = []
listing = get_container(url, token, container, marker, limit, prefix,
delimiter, http_conn)
while listing:
rv.extend(listing)
if not delimiter:
marker = listing[-1]['name']
else:
marker = listing[-1].get('name', listing[-1].get('subdir'))
listing = get_container(url, token, container, marker, limit,
prefix, delimiter, http_conn)
return rv
parsed, conn = http_conn
path = '%s/%s' % (parsed.path, quote(container))
qs = 'format=json'
if marker:
qs += '&marker=%s' % quote(marker)
if limit:
qs += '&limit=%d' % limit
if prefix:
qs += '&prefix=%s' % quote(prefix)
if delimiter:
qs += '&delimiter=%s' % quote(delimiter)
conn.request('GET', '%s?%s' % (path, qs), '', {'X-Auth-Token': token})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
resp.read()
raise ClientException('Container GET failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_query=qs,
http_status=resp.status, http_reason=resp.reason)
if resp.status == 204:
resp.read()
return []
return json_loads(resp.read())
def head_container(url, token, container, http_conn=None):
"""
Get container stats.
:param url: storage URL
:param token: auth token
:param container: container name to get stats for
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: a tuple of (object count, bytes used)
:raises ClientException: HTTP HEAD request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
conn.request('HEAD', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container HEAD failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
return int(resp.getheader('x-container-object-count', 0)), \
int(resp.getheader('x-container-bytes-used', 0))
def put_container(url, token, container, http_conn=None):
"""
Create a container
:param url: storage URL
:param token: auth token
:param container: container name to create
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP PUT request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
conn.request('PUT', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container PUT failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
def delete_container(url, token, container, http_conn=None):
"""
Delete a container
:param url: storage URL
:param token: auth token
:param container: container name to delete
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP DELETE request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
conn.request('DELETE', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container DELETE failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
def get_object(url, token, container, name, http_conn=None,
resp_chunk_size=None):
"""
Get an object
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to get
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param resp_chunk_size: if defined, chunk size of data to read
:returns: a list of objects
:raises ClientException: HTTP GET request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
conn.request('GET', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
resp.read()
raise ClientException('Object GET failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
metadata = {}
for key, value in resp.getheaders():
if key.lower().startswith('x-object-meta-'):
metadata[unquote(key[len('x-object-meta-'):])] = unquote(value)
if resp_chunk_size:
def _object_body():
buf = resp.read(resp_chunk_size)
while buf:
yield buf
buf = resp.read(resp_chunk_size)
object_body = _object_body()
else:
object_body = resp.read()
return resp.getheader('content-type'), \
int(resp.getheader('content-length', 0)), \
resp.getheader('last-modified'), \
resp.getheader('etag').strip('"'), \
metadata, \
object_body
def head_object(url, token, container, name, http_conn=None):
"""
Get object info
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to get info for
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: a tuple of (content type, content length, last modfied, etag,
dictionary of metadata)
:raises ClientException: HTTP HEAD request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
conn.request('HEAD', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object HEAD failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
metadata = {}
for key, value in resp.getheaders():
if key.lower().startswith('x-object-meta-'):
metadata[unquote(key[len('x-object-meta-'):])] = unquote(value)
return resp.getheader('content-type'), \
int(resp.getheader('content-length', 0)), \
resp.getheader('last-modified'), \
resp.getheader('etag').strip('"'), \
metadata
def put_object(url, token, container, name, contents, metadata={},
content_length=None, etag=None, chunk_size=65536,
content_type=None, http_conn=None):
"""
Put an object
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to put
:param contents: file like object to read object data from
:param metadata: dictionary of object metadata
:param content_length: value to send as content-length header
:param etag: etag of contents
:param chunk_size: chunk size of data to write
:param content_type: value to send as content-type header
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: etag from server response
:raises ClientException: HTTP PUT request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
headers = {'X-Auth-Token': token}
for key, value in metadata.iteritems():
headers['X-Object-Meta-%s' % quote(key)] = quote(value)
if etag:
headers['ETag'] = etag.strip('"')
if content_length is not None:
headers['Content-Length'] = str(content_length)
if content_type is not None:
headers['Content-Type'] = content_type
if not contents:
headers['Content-Length'] = '0'
if hasattr(contents, 'read'):
conn.putrequest('PUT', path)
for header, value in headers.iteritems():
conn.putheader(header, value)
if not content_length:
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
chunk = contents.read(chunk_size)
while chunk:
if not content_length:
conn.send('%x\r\n%s\r\n' % (len(chunk), chunk))
else:
conn.send(chunk)
chunk = contents.read(chunk_size)
if not content_length:
conn.send('0\r\n\r\n')
else:
conn.request('PUT', path, contents, headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object PUT failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
return resp.getheader('etag').strip('"')
def post_object(url, token, container, name, metadata, http_conn=None):
"""
Change object metadata
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to change
:param metadata: dictionary of object metadata
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP POST request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
headers = {'X-Auth-Token': token}
for key, value in metadata.iteritems():
headers['X-Object-Meta-%s' % quote(key)] = quote(value)
conn.request('POST', path, '', headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object POST failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
def delete_object(url, token, container, name, http_conn=None):
"""
Delete object
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to delete
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP DELETE request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
conn.request('DELETE', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object DELETE failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
class Connection(object):
"""Convenience class to make requests that will also retry the request"""
def __init__(self, authurl, user, key, retries=5, preauthurl=None,
preauthtoken=None, snet=False):
"""
:param authurl: authenitcation URL
:param user: user name to authenticate as
:param key: key/password to authenticate with
:param retries: Number of times to retry the request before failing
:param preauthurl: storage URL (if you have already authenticated)
:param preauthtoken: authentication token (if you have already
authenticated)
:param snet: use SERVICENET internal network default is False
"""
self.authurl = authurl
self.user = user
self.key = key
self.retries = retries
self.http_conn = None
self.url = preauthurl
self.token = preauthtoken
self.attempts = 0
self.snet = snet
def _retry(self, func, *args, **kwargs):
kwargs['http_conn'] = self.http_conn
self.attempts = 0
backoff = 1
while self.attempts <= self.retries:
self.attempts += 1
try:
if not self.url or not self.token:
self.url, self.token = \
get_auth(self.authurl, self.user, self.key, snet=self.snet)
self.http_conn = None
if not self.http_conn:
self.http_conn = http_connection(self.url)
kwargs['http_conn'] = self.http_conn
rv = func(self.url, self.token, *args, **kwargs)
return rv
except (socket.error, HTTPException):
if self.attempts > self.retries:
raise
self.http_conn = None
except ClientException, err:
if self.attempts > self.retries:
raise
if err.http_status == 401:
self.url = self.token = None
if self.attempts > 1:
raise
elif 500 <= err.http_status <= 599:
pass
else:
raise
sleep(backoff)
backoff *= 2
def head_account(self):
"""Wrapper for head_account"""
return self._retry(head_account)
def get_account(self, marker=None, limit=None, prefix=None,
full_listing=False):
"""Wrapper for get_account"""
# TODO: With full_listing=True this will restart the entire listing
# with each retry. Need to make a better version that just retries
# where it left off.
return self._retry(get_account, marker=marker, limit=limit,
prefix=prefix, full_listing=full_listing)
def head_container(self, container):
"""Wrapper for head_container"""
return self._retry(head_container, container)
def get_container(self, container, marker=None, limit=None, prefix=None,
delimiter=None, full_listing=False):
"""Wrapper for get_container"""
# TODO: With full_listing=True this will restart the entire listing
# with each retry. Need to make a better version that just retries
# where it left off.
return self._retry(get_container, container, marker=marker,
limit=limit, prefix=prefix, delimiter=delimiter,
full_listing=full_listing)
def put_container(self, container):
"""Wrapper for put_container"""
return self._retry(put_container, container)
def delete_container(self, container):
"""Wrapper for delete_container"""
return self._retry(delete_container, container)
def head_object(self, container, obj):
"""Wrapper for head_object"""
return self._retry(head_object, container, obj)
def get_object(self, container, obj, resp_chunk_size=None):
"""Wrapper for get_object"""
return self._retry(get_object, container, obj,
resp_chunk_size=resp_chunk_size)
def put_object(self, container, obj, contents, metadata={},
content_length=None, etag=None, chunk_size=65536,
content_type=None):
"""Wrapper for put_object"""
return self._retry(put_object, container, obj, contents,
metadata=metadata, content_length=content_length, etag=etag,
chunk_size=chunk_size, content_type=content_type)
def post_object(self, container, obj, metadata):
"""Wrapper for post_object"""
return self._retry(post_object, container, obj, metadata)
def delete_object(self, container, obj):
"""Wrapper for delete_object"""
return self._retry(delete_object, container, obj)
# End inclusion of swift.common.client
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
from errno import EEXIST, ENOENT
from hashlib import md5
from optparse import OptionParser
from os import environ, listdir, makedirs, utime
from os.path import basename, dirname, getmtime, getsize, isdir, join
from Queue import Empty, Queue
from sys import argv, exit, stderr
from threading import enumerate as threading_enumerate, Thread
from time import sleep
def mkdirs(path):
try:
makedirs(path)
except OSError, err:
if err.errno != EEXIST:
raise
class QueueFunctionThread(Thread):
def __init__(self, queue, func, *args, **kwargs):
""" Calls func for each item in queue; func is called with a queued
item as the first arg followed by *args and **kwargs. Use the abort
attribute to have the thread empty the queue (without processing)
and exit. """
Thread.__init__(self)
self.abort = False
self.queue = queue
self.func = func
self.args = args
self.kwargs = kwargs
def run(self):
while True:
try:
item = self.queue.get_nowait()
if not self.abort:
self.func(item, *self.args, **self.kwargs)
self.queue.task_done()
except Empty:
if self.abort:
break
sleep(0.01)
st_delete_help = '''
delete --all OR delete container [object] [object] ...
Deletes everything in the account (with --all), or everything in a
container, or a list of objects depending on the args given.'''.strip('\n')
def st_delete(options, args):
if (not args and not options.yes_all) or (args and options.yes_all):
options.error_queue.put('Usage: %s [options] %s' %
(basename(argv[0]), st_delete_help))
return
object_queue = Queue(10000)
def _delete_object((container, obj), conn):
try:
conn.delete_object(container, obj)
if options.verbose:
path = options.yes_all and join(container, obj) or obj
if path[:1] in ('/', '\\'):
path = path[1:]
options.print_queue.put(path)
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Object %s not found' %
repr('%s/%s' % (container, obj)))
container_queue = Queue(10000)
def _delete_container(container, conn):
try:
marker = ''
while True:
objects = [o['name'] for o in
conn.get_container(container, marker=marker)]
if not objects:
break
for obj in objects:
object_queue.put((container, obj))
marker = objects[-1]
while not object_queue.empty():
sleep(0.01)
attempts = 1
while True:
try:
conn.delete_container(container)
break
except ClientException, err:
if err.http_status != 409:
raise
if attempts > 10:
raise
attempts += 1
sleep(1)
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Container %s not found' % repr(container))
url, token = get_auth(options.auth, options.user, options.key, snet=options.snet)
create_connection = lambda: Connection(options.auth, options.user,
options.key, preauthurl=url,
preauthtoken=token, snet=options.snet)
object_threads = [QueueFunctionThread(object_queue, _delete_object,
create_connection()) for _ in xrange(10)]
for thread in object_threads:
thread.start()
container_threads = [QueueFunctionThread(container_queue,
_delete_container, create_connection()) for _ in xrange(10)]
for thread in container_threads:
thread.start()
if not args:
conn = create_connection()
try:
marker = ''
while True:
containers = \
[c['name'] for c in conn.get_account(marker=marker)]
if not containers:
break
for container in containers:
container_queue.put(container)
marker = containers[-1]
while not container_queue.empty():
sleep(0.01)
while not object_queue.empty():
sleep(0.01)
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Account not found')
elif len(args) == 1:
conn = create_connection()
_delete_container(args[0], conn)
else:
for obj in args[1:]:
object_queue.put((args[0], obj))
while not container_queue.empty():
sleep(0.01)
for thread in container_threads:
thread.abort = True
while thread.isAlive():
thread.join(0.01)
while not object_queue.empty():
sleep(0.01)
for thread in object_threads:
thread.abort = True
while thread.isAlive():
thread.join(0.01)
st_download_help = '''
download --all OR download container [object] [object] ...
Downloads everything in the account (with --all), or everything in a
container, or a list of objects depending on the args given.'''.strip('\n')
def st_download(options, args):
if (not args and not options.yes_all) or (args and options.yes_all):
options.error_queue.put('Usage: %s [options] %s' %
(basename(argv[0]), st_download_help))
return
object_queue = Queue(10000)
def _download_object((container, obj), conn):
try:
content_type, content_length, _, etag, metadata, body = \
conn.get_object(container, obj, resp_chunk_size=65536)
path = options.yes_all and join(container, obj) or obj
if path[:1] in ('/', '\\'):
path = path[1:]
if content_type.split(';', 1)[0] == 'text/directory':
if not isdir(path):
mkdirs(path)
read_length = 0
md5sum = md5()
for chunk in body:
read_length += len(chunk)
md5sum.update(chunk)
else:
dirpath = dirname(path)
if dirpath and not isdir(dirpath):
mkdirs(dirpath)
fp = open(path, 'wb')
read_length = 0
md5sum = md5()
for chunk in body :
fp.write(chunk)
read_length += len(chunk)
md5sum.update(chunk)
fp.close()
if md5sum.hexdigest() != etag:
options.error_queue.put('%s: md5sum != etag, %s != %s' %
(path, md5sum.hexdigest(), etag))
if read_length != content_length:
options.error_queue.put(
'%s: read_length != content_length, %d != %d' %
(path, read_length, content_length))
if 'mtime' in metadata:
mtime = float(metadata['mtime'])
utime(path, (mtime, mtime))
if options.verbose:
options.print_queue.put(path)
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Object %s not found' %
repr('%s/%s' % (container, obj)))
container_queue = Queue(10000)
def _download_container(container, conn):
try:
marker = ''
while True:
objects = [o['name'] for o in
conn.get_container(container, marker=marker)]
if not objects:
break
for obj in objects:
object_queue.put((container, obj))
marker = objects[-1]
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Container %s not found' % repr(container))
url, token = get_auth(options.auth, options.user, options.key, snet=options.snet)
create_connection = lambda: Connection(options.auth, options.user,
options.key, preauthurl=url,
preauthtoken=token, snet=options.snet)
object_threads = [QueueFunctionThread(object_queue, _download_object,
create_connection()) for _ in xrange(10)]
for thread in object_threads:
thread.start()
container_threads = [QueueFunctionThread(container_queue,
_download_container, create_connection()) for _ in xrange(10)]
for thread in container_threads:
thread.start()
if not args:
conn = create_connection()
try:
marker = ''
while True:
containers = [c['name']
for c in conn.get_account(marker=marker)]
if not containers:
break
for container in containers:
container_queue.put(container)
marker = containers[-1]
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Account not found')
elif len(args) == 1:
_download_container(args[0], create_connection())
else:
for obj in args[1:]:
object_queue.put((args[0], obj))
while not container_queue.empty():
sleep(0.01)
for thread in container_threads:
thread.abort = True
while thread.isAlive():
thread.join(0.01)
while not object_queue.empty():
sleep(0.01)
for thread in object_threads:
thread.abort = True
while thread.isAlive():
thread.join(0.01)
st_list_help = '''
list [options] [container]
Lists the containers for the account or the objects for a container. -p or
--prefix is an option that will only list items beginning with that prefix.
-d or --delimiter is option (for container listings only) that will roll up
items with the given delimiter (see Cloud Files general documentation for
what this means).
'''.strip('\n')
def st_list(options, args):
if len(args) > 1:
options.error_queue.put('Usage: %s [options] %s' %
(basename(argv[0]), st_list_help))
return
conn = Connection(options.auth, options.user, options.key, snet=options.snet)
try:
marker = ''
while True:
if not args:
items = conn.get_account(marker=marker, prefix=options.prefix)
else:
items = conn.get_container(args[0], marker=marker,
prefix=options.prefix, delimiter=options.delimiter)
if not items:
break
for item in items:
options.print_queue.put(item.get('name', item.get('subdir')))
marker = items[-1].get('name', items[-1].get('subdir'))
except ClientException, err:
if err.http_status != 404:
raise
if not args:
options.error_queue.put('Account not found')
else:
options.error_queue.put('Container %s not found' % repr(args[0]))
st_stat_help = '''
stat [container] [object]
Displays information for the account, container, or object depending on the
args given (if any).'''.strip('\n')
def st_stat(options, args):
conn = Connection(options.auth, options.user, options.key)
if not args:
try:
container_count, object_count, bytes_used = conn.head_account()
options.print_queue.put('''
Account: %s
Containers: %d
Objects: %d
Bytes: %d'''.strip('\n') % (conn.url.rsplit('/', 1)[-1], container_count,
object_count, bytes_used))
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Account not found')
elif len(args) == 1:
try:
object_count, bytes_used = conn.head_container(args[0])
options.print_queue.put('''
Account: %s
Container: %s
Objects: %d
Bytes: %d'''.strip('\n') % (conn.url.rsplit('/', 1)[-1], args[0],
object_count, bytes_used))
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Container %s not found' % repr(args[0]))
elif len(args) == 2:
try:
content_type, content_length, last_modified, etag, metadata = \
conn.head_object(args[0], args[1])
options.print_queue.put('''
Account: %s
Container: %s
Object: %s
Content Type: %s
Content Length: %d
Last Modified: %s
ETag: %s'''.strip('\n') % (conn.url.rsplit('/', 1)[-1], args[0],
args[1], content_type, content_length,
last_modified, etag))
for key, value in metadata.items():
options.print_queue.put('%14s: %s' % ('Meta %s' % key, value))
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Object %s not found' %
repr('%s/%s' % (args[0], args[1])))
else:
options.error_queue.put('Usage: %s [options] %s' %
(basename(argv[0]), st_stat_help))
st_upload_help = '''
upload [options] container file_or_directory [file_or_directory] [...]
Uploads to the given container the files and directories specified by the
remaining args. -c or --changed is an option that will only upload files
that have changed since the last upload.'''.strip('\n')
def st_upload(options, args):
if len(args) < 2:
options.error_queue.put('Usage: %s [options] %s' %
(basename(argv[0]), st_upload_help))
return
file_queue = Queue(10000)
def _upload_file((path, dir_marker), conn):
try:
obj = path
if obj.startswith('./') or obj.startswith('.\\'):
obj = obj[2:]
metadata = {'mtime': str(getmtime(path))}
if dir_marker:
if options.changed:
try:
ct, cl, lm, et, md = conn.head_object(args[0], obj)
if ct.split(';', 1)[0] == 'text/directory' and \
cl == 0 and \
et == 'd41d8cd98f00b204e9800998ecf8427e' and \
md.get('mtime') == metadata['mtime']:
return
except ClientException, err:
if err.http_status != 404:
raise
conn.put_object(args[0], obj, '', content_length=0,
content_type='text/directory',
metadata=metadata)
else:
if options.changed:
try:
ct, cl, lm, et, md = conn.head_object(args[0], obj)
if cl == getsize(path) and \
md.get('mtime') == metadata['mtime']:
return
except ClientException, err:
if err.http_status != 404:
raise
conn.put_object(args[0], obj, open(path, 'rb'),
content_length=getsize(path),
metadata=metadata)
if options.verbose:
options.print_queue.put(obj)
except OSError, err:
if err.errno != ENOENT:
raise
options.error_queue.put('Local file %s not found' % repr(path))
def _upload_dir(path):
names = listdir(path)
if not names:
file_queue.put((path, True)) # dir_marker = True
else:
for name in listdir(path):
subpath = join(path, name)
if isdir(subpath):
_upload_dir(subpath)
else:
file_queue.put((subpath, False)) # dir_marker = False
url, token = get_auth(options.auth, options.user, options.key, snet=options.snet)
create_connection = lambda: Connection(options.auth, options.user,
options.key, preauthurl=url,
preauthtoken=token, snet=options.snet)
file_threads = [QueueFunctionThread(file_queue, _upload_file,
create_connection()) for _ in xrange(10)]
for thread in file_threads:
thread.start()
conn = create_connection()
try:
conn.put_container(args[0])
for arg in args[1:]:
if isdir(arg):
_upload_dir(arg)
else:
file_queue.put((arg, False)) # dir_marker = False
while not file_queue.empty():
sleep(0.01)
for thread in file_threads:
thread.abort = True
while thread.isAlive():
thread.join(0.01)
except ClientException, err:
if err.http_status != 404:
raise
options.error_queue.put('Account not found')
if __name__ == '__main__':
parser = OptionParser(version='%prog 1.0', usage='''
Usage: %%prog [options] <command> [args]
Commands:
%(st_stat_help)s
%(st_list_help)s
%(st_upload_help)s
%(st_download_help)s
%(st_delete_help)s
Example:
%%prog -A https://auth.api.rackspacecloud.com/v1.0 -U user -K key stat
'''.strip('\n') % globals())
parser.add_option('-s', '--snet', action='store_true', dest='snet',
default=False, help='Use SERVICENET internal network')
parser.add_option('-q', '--quiet', action='store_false', dest='verbose',
default=True, help='Suppress status output')
parser.add_option('-a', '--all', action='store_true', dest='yes_all',
default=False, help='Indicate that you really want the '
'whole account for commands that require --all in such '
'a case')
parser.add_option('-c', '--changed', action='store_true', dest='changed',
default=False, help='For the upload command: will '
'only upload files that have changed since the last '
'upload')
parser.add_option('-p', '--prefix', dest='prefix',
help='For the list command: will only list items '
'beginning with the prefix')
parser.add_option('-d', '--delimiter', dest='delimiter',
help='For the list command on containers: will roll up '
'items with the given delimiter (see Cloud Files '
'general documentation for what this means).')
parser.add_option('-A', '--auth', dest='auth',
help='URL for obtaining an auth token')
parser.add_option('-U', '--user', dest='user',
help='User name for obtaining an auth token')
parser.add_option('-K', '--key', dest='key',
help='Key for obtaining an auth token')
args = argv[1:]
if not args:
args.append('-h')
(options, args) = parser.parse_args(args)
required_help = '''
Requires ST_AUTH, ST_USER, and ST_KEY environment variables be set or
overridden with -A, -U, or -K.'''.strip('\n')
for attr in ('auth', 'user', 'key'):
if not getattr(options, attr, None):
setattr(options, attr, environ.get('ST_%s' % attr.upper()))
if not getattr(options, attr, None):
exit(required_help)
commands = ('delete', 'download', 'list', 'stat', 'upload')
if not args or args[0] not in commands:
parser.print_usage()
if args:
exit('no such command: %s' % args[0])
exit()
options.print_queue = Queue(10000)
def _print(item):
if isinstance(item, unicode):
item = item.encode('utf8')
print item
print_thread = QueueFunctionThread(options.print_queue, _print)
print_thread.start()
options.error_queue = Queue(10000)
def _error(item):
if isinstance(item, unicode):
item = item.encode('utf8')
print >>stderr, item
error_thread = QueueFunctionThread(options.error_queue, _error)
error_thread.start()
try:
globals()['st_%s' % args[0]](options, args[1:])
while not options.print_queue.empty():
sleep(0.01)
print_thread.abort = True
while print_thread.isAlive():
print_thread.join(0.01)
while not options.error_queue.empty():
sleep(0.01)
error_thread.abort = True
while error_thread.isAlive():
error_thread.join(0.01)
except:
for thread in threading_enumerate():
thread.abort = True
raise