From fa3c871f0b1544f859bacf38497580afd69ced0b Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 16 Nov 2010 15:35:39 -0800 Subject: [PATCH 001/199] Server-side implementation for segmented objects --- swift/common/constraints.py | 11 + swift/obj/server.py | 9 +- swift/proxy/server.py | 177 ++++++++++++- test/functionalnosetests/test_object.py | 279 ++++++++++++++++++++ test/unit/common/test_constraints.py | 27 ++ test/unit/obj/test_server.py | 104 ++++++-- test/unit/proxy/test_server.py | 335 +++++++++++++++++++++++- 7 files changed, 915 insertions(+), 27 deletions(-) diff --git a/swift/common/constraints.py b/swift/common/constraints.py index c3f4f1621d..d91c136504 100644 --- a/swift/common/constraints.py +++ b/swift/common/constraints.py @@ -113,6 +113,17 @@ def check_object_creation(req, object_name): if not check_utf8(req.headers['Content-Type']): return HTTPBadRequest(request=req, body='Invalid Content-Type', content_type='text/plain') + if 'x-object-manifest' in req.headers: + value = req.headers['x-object-manifest'] + container = prefix = None + try: + container, prefix = value.split('/', 1) + except ValueError: + pass + if not container or not prefix or '?' in value or '&' in value or \ + prefix[0] == '/': + return HTTPBadRequest(request=req, + body='X-Object-Manifest must in the format container/prefix') return check_metadata(req, 'object') diff --git a/swift/obj/server.py b/swift/obj/server.py index 632a0c04cc..cf90bb7971 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -391,6 +391,9 @@ class ObjectController(object): 'ETag': etag, 'Content-Length': str(os.fstat(fd).st_size), } + if 'x-object-manifest' in request.headers: + metadata['X-Object-Manifest'] = \ + request.headers['x-object-manifest'] metadata.update(val for val in request.headers.iteritems() if val[0].lower().startswith('x-object-meta-') and len(val[0]) > 14) @@ -460,7 +463,8 @@ class ObjectController(object): 'application/octet-stream'), app_iter=file, request=request, conditional_response=True) for key, value in file.metadata.iteritems(): - if key.lower().startswith('x-object-meta-'): + if key == 'X-Object-Manifest' or \ + key.lower().startswith('x-object-meta-'): response.headers[key] = value response.etag = file.metadata['ETag'] response.last_modified = float(file.metadata['X-Timestamp']) @@ -488,7 +492,8 @@ class ObjectController(object): response = Response(content_type=file.metadata['Content-Type'], request=request, conditional_response=True) for key, value in file.metadata.iteritems(): - if key.lower().startswith('x-object-meta-'): + if key == 'X-Object-Manifest' or \ + key.lower().startswith('x-object-meta-'): response.headers[key] = value response.etag = file.metadata['ETag'] response.last_modified = float(file.metadata['X-Timestamp']) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index bacea4db9f..4265754390 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -14,6 +14,10 @@ # limitations under the License. from __future__ import with_statement +try: + import simplejson as json +except ImportError: + import json import mimetypes import os import time @@ -22,6 +26,7 @@ from ConfigParser import ConfigParser from urllib import unquote, quote import uuid import functools +from hashlib import md5 from eventlet.timeout import Timeout from webob.exc import HTTPBadRequest, HTTPMethodNotAllowed, \ @@ -94,6 +99,138 @@ def get_container_memcache_key(account, container): return 'container%s' % path +class SegmentedIterable(object): + """ + Iterable that returns the object contents for a segmented object in Swift. + + In addition to these params, you can also set the `response` attr just + after creating the SegmentedIterable and it will update the response's + `bytes_transferred` value (used to log the size of the request). + + :param controller: The ObjectController instance to work with. + :param container: The container the object segments are within. + :param listing: The listing of object segments to iterate over; this is a + standard JSON decoded container listing. + """ + + def __init__(self, controller, container, listing): + self.controller = controller + self.container = container + self.listing = listing + self.segment = -1 + self.seek = 0 + self.segment_iter = None + self.position = 0 + self.response = None + + def _load_next_segment(self): + """ + Loads the self.segment_iter with the next object segment's contents. + + :raises: StopIteration when there are no more object segments. + """ + try: + self.segment += 1 + if self.segment >= len(self.listing): + raise StopIteration() + obj = self.listing[self.segment] + partition, nodes = self.controller.app.object_ring.get_nodes( + self.controller.account_name, self.container, obj['name']) + path = '/%s/%s/%s' % (self.controller.account_name, self.container, + obj['name']) + req = Request.blank(path) + if self.seek: + req.range = 'bytes=%s-' % self.seek + self.seek = 0 + resp = self.controller.GETorHEAD_base(req, 'Object', partition, + self.controller.iter_nodes(partition, nodes, + self.controller.app.object_ring), path, + self.controller.app.object_ring.replica_count) + if resp.status_int // 100 != 2: + raise Exception('Could not load object segment %s: %s' % (path, + resp.status_int)) + self.segment_iter = resp.app_iter + except Exception, err: + if not isinstance(err, StopIteration): + self.controller.app.logger.exception('ERROR: While processing ' + 'manifest /%s/%s/%s %s' % (self.controller.account_name, + self.controller.container_name, + self.controller.object_name, self.controller.trans_id)) + raise + + def __iter__(self): + """ Standard iterator function that returns the object's contents. """ + try: + while True: + if not self.segment_iter: + self._load_next_segment() + while True: + with ChunkReadTimeout(self.controller.app.node_timeout): + try: + chunk = self.segment_iter.next() + break + except StopIteration: + self._load_next_segment() + self.position += len(chunk) + if self.response: + self.response.bytes_transferred = getattr(self.response, + 'bytes_transferred', 0) + len(chunk) + yield chunk + except Exception, err: + if not isinstance(err, StopIteration): + self.controller.app.logger.exception('ERROR: While processing ' + 'manifest /%s/%s/%s %s' % (self.controller.account_name, + self.controller.container_name, + self.controller.object_name, self.controller.trans_id)) + raise + + def app_iter_range(self, start, stop): + """ + Non-standard iterator function for use with Webob in serving Range + requests more quickly. This will skip over segments and do a range + request on the first segment to return data from, if needed. + + :param start: The first byte (zero-based) to return. None for 0. + :param stop: The last byte (zero-based) to return. None for end. + """ + try: + if start: + if len(self.listing) <= self.segment + 1: + return + while start >= self.position + \ + self.listing[self.segment + 1]['bytes']: + self.segment += 1 + if len(self.listing) <= self.segment + 1: + return + self.position += self.listing[self.segment]['bytes'] + self.seek = start - self.position + else: + start = 0 + if stop is not None: + length = stop - start + else: + length = None + for chunk in self: + if length is not None: + length -= len(chunk) + if length < 0: + # Chop off the extra: + if self.response: + self.response.bytes_transferred = \ + getattr(self.response, 'bytes_transferred', 0) \ + + length + yield chunk[:length] + break + yield chunk + except Exception, err: + if not isinstance(err, StopIteration): + self.controller.app.logger.exception('ERROR: While processing ' + 'manifest /%s/%s/%s %s' % (self.controller.account_name, + self.controller.container_name, + self.controller.object_name, self.controller.trans_id)) + raise + + class Controller(object): """Base WSGI controller class for the proxy""" @@ -526,9 +663,47 @@ class ObjectController(Controller): return aresp partition, nodes = self.app.object_ring.get_nodes( self.account_name, self.container_name, self.object_name) - return self.GETorHEAD_base(req, 'Object', partition, + resp = self.GETorHEAD_base(req, 'Object', partition, self.iter_nodes(partition, nodes, self.app.object_ring), req.path_info, self.app.object_ring.replica_count) + # If we get a 416 Requested Range Not Satisfiable we have to check if + # we were actually requesting a manifest object and then redo the range + # request on the whole object. + if resp.status_int == 416: + req_range = req.range + req.range = None + resp2 = self.GETorHEAD_base(req, 'Object', partition, + self.iter_nodes(partition, nodes, self.app.object_ring), + req.path_info, self.app.object_ring.replica_count) + if 'x-object-manifest' not in resp2.headers: + return resp + resp = resp2 + req.range = req_range + if 'x-object-manifest' in resp.headers: + lcontainer, lprefix = \ + resp.headers['x-object-manifest'].split('/', 1) + lpartition, lnodes = self.app.container_ring.get_nodes( + self.account_name, lcontainer) + lreq = Request.blank('/%s/%s?prefix=%s&format=json' % + (self.account_name, lcontainer, lprefix)) + lresp = self.GETorHEAD_base(lreq, 'Container', lpartition, lnodes, + lreq.path_info, self.app.container_ring.replica_count) + if 'swift.authorize' in req.environ: + req.acl = lresp.headers.get('x-container-read') + aresp = req.environ['swift.authorize'](req) + if aresp: + return aresp + listing = json.loads(lresp.body) + content_length = sum(o['bytes'] for o in listing) + etag = md5('"'.join(o['hash'] for o in listing)).hexdigest() + headers = {'X-Object-Manifest': resp.headers['x-object-manifest'], + 'Content-Type': resp.content_type, 'Content-Length': + content_length, 'ETag': etag} + resp = Response(app_iter=SegmentedIterable(self, lcontainer, + listing), headers=headers, request=req, + conditional_response=True) + resp.app_iter.response = resp + return resp @public @delay_denial diff --git a/test/functionalnosetests/test_object.py b/test/functionalnosetests/test_object.py index e4b2fc48c5..2e1668db0e 100644 --- a/test/functionalnosetests/test_object.py +++ b/test/functionalnosetests/test_object.py @@ -16,6 +16,7 @@ class TestObject(unittest.TestCase): if skip: raise SkipTest self.container = uuid4().hex + def put(url, token, parsed, conn): conn.request('PUT', parsed.path + '/' + self.container, '', {'X-Auth-Token': token}) @@ -24,6 +25,7 @@ class TestObject(unittest.TestCase): resp.read() self.assertEquals(resp.status, 201) self.obj = uuid4().hex + def put(url, token, parsed, conn): conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container, self.obj), 'test', {'X-Auth-Token': token}) @@ -35,6 +37,7 @@ class TestObject(unittest.TestCase): def tearDown(self): if skip: raise SkipTest + def delete(url, token, parsed, conn): conn.request('DELETE', '%s/%s/%s' % (parsed.path, self.container, self.obj), '', {'X-Auth-Token': token}) @@ -42,6 +45,7 @@ class TestObject(unittest.TestCase): resp = retry(delete) resp.read() self.assertEquals(resp.status, 204) + def delete(url, token, parsed, conn): conn.request('DELETE', parsed.path + '/' + self.container, '', {'X-Auth-Token': token}) @@ -53,6 +57,7 @@ class TestObject(unittest.TestCase): def test_public_object(self): if skip: raise SkipTest + def get(url, token, parsed, conn): conn.request('GET', '%s/%s/%s' % (parsed.path, self.container, self.obj)) @@ -62,6 +67,7 @@ class TestObject(unittest.TestCase): raise Exception('Should not have been able to GET') except Exception, err: self.assert_(str(err).startswith('No result after ')) + def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.container, '', {'X-Auth-Token': token, @@ -73,6 +79,7 @@ class TestObject(unittest.TestCase): resp = retry(get) resp.read() self.assertEquals(resp.status, 200) + def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.container, '', {'X-Auth-Token': token, 'X-Container-Read': ''}) @@ -89,6 +96,7 @@ class TestObject(unittest.TestCase): def test_private_object(self): if skip or skip3: raise SkipTest + # Ensure we can't access the object with the third account def get(url, token, parsed, conn): conn.request('GET', '%s/%s/%s' % (parsed.path, self.container, @@ -98,8 +106,10 @@ class TestObject(unittest.TestCase): resp = retry(get, use_account=3) resp.read() self.assertEquals(resp.status, 403) + # create a shared container writable by account3 shared_container = uuid4().hex + def put(url, token, parsed, conn): conn.request('PUT', '%s/%s' % (parsed.path, shared_container), '', @@ -110,6 +120,7 @@ class TestObject(unittest.TestCase): resp = retry(put) resp.read() self.assertEquals(resp.status, 201) + # verify third account can not copy from private container def copy(url, token, parsed, conn): conn.request('PUT', '%s/%s/%s' % (parsed.path, @@ -123,6 +134,7 @@ class TestObject(unittest.TestCase): resp = retry(copy, use_account=3) resp.read() self.assertEquals(resp.status, 403) + # verify third account can write "obj1" to shared container def put(url, token, parsed, conn): conn.request('PUT', '%s/%s/%s' % (parsed.path, shared_container, @@ -131,6 +143,7 @@ class TestObject(unittest.TestCase): resp = retry(put, use_account=3) resp.read() self.assertEquals(resp.status, 201) + # verify third account can copy "obj1" to shared container def copy2(url, token, parsed, conn): conn.request('COPY', '%s/%s/%s' % (parsed.path, @@ -143,6 +156,7 @@ class TestObject(unittest.TestCase): resp = retry(copy2, use_account=3) resp.read() self.assertEquals(resp.status, 201) + # verify third account STILL can not copy from private container def copy3(url, token, parsed, conn): conn.request('COPY', '%s/%s/%s' % (parsed.path, @@ -155,6 +169,7 @@ class TestObject(unittest.TestCase): resp = retry(copy3, use_account=3) resp.read() self.assertEquals(resp.status, 403) + # clean up "obj1" def delete(url, token, parsed, conn): conn.request('DELETE', '%s/%s/%s' % (parsed.path, shared_container, @@ -163,6 +178,7 @@ class TestObject(unittest.TestCase): resp = retry(delete) resp.read() self.assertEquals(resp.status, 204) + # clean up shared_container def delete(url, token, parsed, conn): conn.request('DELETE', @@ -173,6 +189,269 @@ class TestObject(unittest.TestCase): resp.read() self.assertEquals(resp.status, 204) + def test_manifest(self): + if skip: + raise SkipTest + # Data for the object segments + segments1 = ['one', 'two', 'three', 'four', 'five'] + segments2 = ['six', 'seven', 'eight'] + segments3 = ['nine', 'ten', 'eleven'] + + # Upload the first set of segments + def put(url, token, parsed, conn, objnum): + conn.request('PUT', '%s/%s/segments1/%s' % (parsed.path, + self.container, str(objnum)), segments1[objnum], + {'X-Auth-Token': token}) + return check_response(conn) + for objnum in xrange(len(segments1)): + resp = retry(put, objnum) + resp.read() + self.assertEquals(resp.status, 201) + + # Upload the manifest + def put(url, token, parsed, conn): + conn.request('PUT', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token, + 'X-Object-Manifest': '%s/segments1/' % self.container, + 'Content-Type': 'text/jibberish', 'Content-Length': '0'}) + return check_response(conn) + resp = retry(put) + resp.read() + self.assertEquals(resp.status, 201) + + # Get the manifest (should get all the segments as the body) + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments1)) + self.assertEquals(resp.status, 200) + self.assertEquals(resp.getheader('content-type'), 'text/jibberish') + + # Get with a range at the start of the second segment + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token, 'Range': + 'bytes=3-'}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments1[1:])) + self.assertEquals(resp.status, 206) + + # Get with a range in the middle of the second segment + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token, 'Range': + 'bytes=5-'}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments1)[5:]) + self.assertEquals(resp.status, 206) + + # Get with a full start and stop range + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token, 'Range': + 'bytes=5-10'}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments1)[5:11]) + self.assertEquals(resp.status, 206) + + # Upload the second set of segments + def put(url, token, parsed, conn, objnum): + conn.request('PUT', '%s/%s/segments2/%s' % (parsed.path, + self.container, str(objnum)), segments2[objnum], + {'X-Auth-Token': token}) + return check_response(conn) + for objnum in xrange(len(segments2)): + resp = retry(put, objnum) + resp.read() + self.assertEquals(resp.status, 201) + + # Get the manifest (should still be the first segments of course) + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments1)) + self.assertEquals(resp.status, 200) + + # Update the manifest + def put(url, token, parsed, conn): + conn.request('PUT', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token, + 'X-Object-Manifest': '%s/segments2/' % self.container, + 'Content-Length': '0'}) + return check_response(conn) + resp = retry(put) + resp.read() + self.assertEquals(resp.status, 201) + + # Get the manifest (should be the second set of segments now) + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments2)) + self.assertEquals(resp.status, 200) + + if not skip3: + + # Ensure we can't access the manifest with the third account + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get, use_account=3) + resp.read() + self.assertEquals(resp.status, 403) + + # Grant access to the third account + def post(url, token, parsed, conn): + conn.request('POST', '%s/%s' % (parsed.path, self.container), + '', {'X-Auth-Token': token, 'X-Container-Read': + swift_test_user[2]}) + return check_response(conn) + resp = retry(post) + resp.read() + self.assertEquals(resp.status, 204) + + # The third account should be able to get the manifest now + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get, use_account=3) + self.assertEquals(resp.read(), ''.join(segments2)) + self.assertEquals(resp.status, 200) + + # Create another container for the third set of segments + acontainer = uuid4().hex + + def put(url, token, parsed, conn): + conn.request('PUT', parsed.path + '/' + acontainer, '', + {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(put) + resp.read() + self.assertEquals(resp.status, 201) + + # Upload the third set of segments in the other container + def put(url, token, parsed, conn, objnum): + conn.request('PUT', '%s/%s/segments3/%s' % (parsed.path, + acontainer, str(objnum)), segments3[objnum], + {'X-Auth-Token': token}) + return check_response(conn) + for objnum in xrange(len(segments3)): + resp = retry(put, objnum) + resp.read() + self.assertEquals(resp.status, 201) + + # Update the manifest + def put(url, token, parsed, conn): + conn.request('PUT', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token, + 'X-Object-Manifest': '%s/segments3/' % acontainer, + 'Content-Length': '0'}) + return check_response(conn) + resp = retry(put) + resp.read() + self.assertEquals(resp.status, 201) + + # Get the manifest to ensure it's the third set of segments + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get) + self.assertEquals(resp.read(), ''.join(segments3)) + self.assertEquals(resp.status, 200) + + if not skip3: + + # Ensure we can't access the manifest with the third account + # (because the segments are in a protected container even if the + # manifest itself is not). + + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get, use_account=3) + resp.read() + self.assertEquals(resp.status, 403) + + # Grant access to the third account + def post(url, token, parsed, conn): + conn.request('POST', '%s/%s' % (parsed.path, acontainer), + '', {'X-Auth-Token': token, 'X-Container-Read': + swift_test_user[2]}) + return check_response(conn) + resp = retry(post) + resp.read() + self.assertEquals(resp.status, 204) + + # The third account should be able to get the manifest now + def get(url, token, parsed, conn): + conn.request('GET', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get, use_account=3) + self.assertEquals(resp.read(), ''.join(segments3)) + self.assertEquals(resp.status, 200) + + # Delete the manifest + def delete(url, token, parsed, conn, objnum): + conn.request('DELETE', '%s/%s/manifest' % (parsed.path, + self.container), '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(delete, objnum) + resp.read() + self.assertEquals(resp.status, 204) + + # Delete the third set of segments + def delete(url, token, parsed, conn, objnum): + conn.request('DELETE', '%s/%s/segments3/%s' % (parsed.path, + acontainer, str(objnum)), '', {'X-Auth-Token': token}) + return check_response(conn) + for objnum in xrange(len(segments3)): + resp = retry(delete, objnum) + resp.read() + self.assertEquals(resp.status, 204) + + # Delete the second set of segments + def delete(url, token, parsed, conn, objnum): + conn.request('DELETE', '%s/%s/segments2/%s' % (parsed.path, + self.container, str(objnum)), '', {'X-Auth-Token': token}) + return check_response(conn) + for objnum in xrange(len(segments2)): + resp = retry(delete, objnum) + resp.read() + self.assertEquals(resp.status, 204) + + # Delete the first set of segments + def delete(url, token, parsed, conn, objnum): + conn.request('DELETE', '%s/%s/segments1/%s' % (parsed.path, + self.container, str(objnum)), '', {'X-Auth-Token': token}) + return check_response(conn) + for objnum in xrange(len(segments1)): + resp = retry(delete, objnum) + resp.read() + self.assertEquals(resp.status, 204) + + # Delete the extra container + def delete(url, token, parsed, conn): + conn.request('DELETE', '%s/%s' % (parsed.path, acontainer), '', + {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(delete) + resp.read() + self.assertEquals(resp.status, 204) + if __name__ == '__main__': unittest.main() diff --git a/test/unit/common/test_constraints.py b/test/unit/common/test_constraints.py index 0950d1e50e..bcc590f1ee 100644 --- a/test/unit/common/test_constraints.py +++ b/test/unit/common/test_constraints.py @@ -22,6 +22,7 @@ from webob.exc import HTTPBadRequest, HTTPLengthRequired, \ from swift.common import constraints + class TestConstraints(unittest.TestCase): def test_check_metadata_empty(self): @@ -137,6 +138,32 @@ class TestConstraints(unittest.TestCase): self.assert_(isinstance(resp, HTTPBadRequest)) self.assert_('Content-Type' in resp.body) + def test_check_object_manifest_header(self): + resp = constraints.check_object_creation(Request.blank('/', + headers={'X-Object-Manifest': 'container/prefix', 'Content-Length': + '0', 'Content-Type': 'text/plain'}), 'manifest') + self.assert_(not resp) + resp = constraints.check_object_creation(Request.blank('/', + headers={'X-Object-Manifest': 'container', 'Content-Length': '0', + 'Content-Type': 'text/plain'}), 'manifest') + self.assert_(isinstance(resp, HTTPBadRequest)) + resp = constraints.check_object_creation(Request.blank('/', + headers={'X-Object-Manifest': '/container/prefix', + 'Content-Length': '0', 'Content-Type': 'text/plain'}), 'manifest') + self.assert_(isinstance(resp, HTTPBadRequest)) + resp = constraints.check_object_creation(Request.blank('/', + headers={'X-Object-Manifest': 'container/prefix?query=param', + 'Content-Length': '0', 'Content-Type': 'text/plain'}), 'manifest') + self.assert_(isinstance(resp, HTTPBadRequest)) + resp = constraints.check_object_creation(Request.blank('/', + headers={'X-Object-Manifest': 'container/prefix&query=param', + 'Content-Length': '0', 'Content-Type': 'text/plain'}), 'manifest') + self.assert_(isinstance(resp, HTTPBadRequest)) + resp = constraints.check_object_creation(Request.blank('/', + headers={'X-Object-Manifest': 'http://host/container/prefix', + 'Content-Length': '0', 'Content-Type': 'text/plain'}), 'manifest') + self.assert_(isinstance(resp, HTTPBadRequest)) + def test_check_mount(self): self.assertFalse(constraints.check_mount('', '')) constraints.os = MockTrue() # mock os module diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 94a3b28266..90eed52977 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -42,7 +42,7 @@ class TestObjectController(unittest.TestCase): self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') if not self.path_to_test_xfs or \ not os.path.exists(self.path_to_test_xfs): - print >>sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ + print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ 'pointing to a valid directory.\n' \ 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ 'system for testing.' @@ -77,7 +77,8 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 201) timestamp = normalize_timestamp(time()) - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': timestamp, 'X-Object-Meta-3': 'Three', 'X-Object-Meta-4': 'Four', @@ -95,7 +96,8 @@ class TestObjectController(unittest.TestCase): if not self.path_to_test_xfs: raise SkipTest timestamp = normalize_timestamp(time()) - req = Request.blank('/sda1/p/a/c/fail', environ={'REQUEST_METHOD': 'POST'}, + req = Request.blank('/sda1/p/a/c/fail', + environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': timestamp, 'X-Object-Meta-1': 'One', 'X-Object-Meta-2': 'Two', @@ -116,29 +118,37 @@ class TestObjectController(unittest.TestCase): def test_POST_container_connection(self): if not self.path_to_test_xfs: raise SkipTest + def mock_http_connect(response, with_exc=False): + class FakeConn(object): + def __init__(self, status, with_exc): self.status = status self.reason = 'Fake' self.host = '1.2.3.4' self.port = '1234' self.with_exc = with_exc + def getresponse(self): if self.with_exc: raise Exception('test') return self + def read(self, amt=None): return '' + return lambda *args, **kwargs: FakeConn(response, with_exc) + old_http_connect = object_server.http_connect try: timestamp = normalize_timestamp(time()) - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, - headers={'X-Timestamp': timestamp, 'Content-Type': 'text/plain', - 'Content-Length': '0'}) + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': + 'POST'}, headers={'X-Timestamp': timestamp, 'Content-Type': + 'text/plain', 'Content-Length': '0'}) resp = self.object_controller.PUT(req) - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': timestamp, 'X-Container-Host': '1.2.3.4:0', 'X-Container-Partition': '3', @@ -148,7 +158,8 @@ class TestObjectController(unittest.TestCase): object_server.http_connect = mock_http_connect(202) resp = self.object_controller.POST(req) self.assertEquals(resp.status_int, 202) - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': timestamp, 'X-Container-Host': '1.2.3.4:0', 'X-Container-Partition': '3', @@ -158,7 +169,8 @@ class TestObjectController(unittest.TestCase): object_server.http_connect = mock_http_connect(202, with_exc=True) resp = self.object_controller.POST(req) self.assertEquals(resp.status_int, 202) - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': timestamp, 'X-Container-Host': '1.2.3.4:0', 'X-Container-Partition': '3', @@ -226,7 +238,8 @@ class TestObjectController(unittest.TestCase): timestamp + '.data') self.assert_(os.path.isfile(objfile)) self.assertEquals(open(objfile).read(), 'VERIFY') - self.assertEquals(pickle.loads(getxattr(objfile, object_server.METADATA_KEY)), + self.assertEquals(pickle.loads(getxattr(objfile, + object_server.METADATA_KEY)), {'X-Timestamp': timestamp, 'Content-Length': '6', 'ETag': '0b4c12d7e0a73840c1c4f148fda3b037', @@ -258,7 +271,8 @@ class TestObjectController(unittest.TestCase): timestamp + '.data') self.assert_(os.path.isfile(objfile)) self.assertEquals(open(objfile).read(), 'VERIFY TWO') - self.assertEquals(pickle.loads(getxattr(objfile, object_server.METADATA_KEY)), + self.assertEquals(pickle.loads(getxattr(objfile, + object_server.METADATA_KEY)), {'X-Timestamp': timestamp, 'Content-Length': '10', 'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039', @@ -270,17 +284,17 @@ class TestObjectController(unittest.TestCase): if not self.path_to_test_xfs: raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Timestamp': normalize_timestamp(time()), - 'Content-Type': 'text/plain'}) + headers={'X-Timestamp': normalize_timestamp(time()), + 'Content-Type': 'text/plain'}) req.body = 'test' resp = self.object_controller.PUT(req) self.assertEquals(resp.status_int, 201) def test_PUT_invalid_etag(self): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Timestamp': normalize_timestamp(time()), - 'Content-Type': 'text/plain', - 'ETag': 'invalid'}) + headers={'X-Timestamp': normalize_timestamp(time()), + 'Content-Type': 'text/plain', + 'ETag': 'invalid'}) req.body = 'test' resp = self.object_controller.PUT(req) self.assertEquals(resp.status_int, 422) @@ -304,7 +318,8 @@ class TestObjectController(unittest.TestCase): timestamp + '.data') self.assert_(os.path.isfile(objfile)) self.assertEquals(open(objfile).read(), 'VERIFY THREE') - self.assertEquals(pickle.loads(getxattr(objfile, object_server.METADATA_KEY)), + self.assertEquals(pickle.loads(getxattr(objfile, + object_server.METADATA_KEY)), {'X-Timestamp': timestamp, 'Content-Length': '12', 'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568', @@ -316,25 +331,33 @@ class TestObjectController(unittest.TestCase): def test_PUT_container_connection(self): if not self.path_to_test_xfs: raise SkipTest + def mock_http_connect(response, with_exc=False): + class FakeConn(object): + def __init__(self, status, with_exc): self.status = status self.reason = 'Fake' self.host = '1.2.3.4' self.port = '1234' self.with_exc = with_exc + def getresponse(self): if self.with_exc: raise Exception('test') return self + def read(self, amt=None): return '' + return lambda *args, **kwargs: FakeConn(response, with_exc) + old_http_connect = object_server.http_connect try: timestamp = normalize_timestamp(time()) - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, + req = Request.blank('/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': timestamp, 'X-Container-Host': '1.2.3.4:0', 'X-Container-Partition': '3', @@ -555,7 +578,8 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 200) self.assertEquals(resp.etag, etag) - req = Request.blank('/sda1/p/a/c/o2', environ={'REQUEST_METHOD': 'GET'}, + req = Request.blank('/sda1/p/a/c/o2', + environ={'REQUEST_METHOD': 'GET'}, headers={'If-Match': '*'}) resp = self.object_controller.GET(req) self.assertEquals(resp.status_int, 412) @@ -715,7 +739,8 @@ class TestObjectController(unittest.TestCase): """ Test swift.object_server.ObjectController.DELETE """ if not self.path_to_test_xfs: raise SkipTest - req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}) + req = Request.blank('/sda1/p/a/c', + environ={'REQUEST_METHOD': 'DELETE'}) resp = self.object_controller.DELETE(req) self.assertEquals(resp.status_int, 400) @@ -916,21 +941,26 @@ class TestObjectController(unittest.TestCase): def test_disk_file_mkstemp_creates_dir(self): tmpdir = os.path.join(self.testdir, 'sda1', 'tmp') os.rmdir(tmpdir) - with object_server.DiskFile(self.testdir, 'sda1', '0', 'a', 'c', 'o').mkstemp(): + with object_server.DiskFile(self.testdir, 'sda1', '0', 'a', 'c', + 'o').mkstemp(): self.assert_(os.path.exists(tmpdir)) def test_max_upload_time(self): if not self.path_to_test_xfs: raise SkipTest + class SlowBody(): + def __init__(self): self.sent = 0 + def read(self, size=-1): if self.sent < 4: sleep(0.1) self.sent += 1 return ' ' return '' + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()}, headers={'X-Timestamp': normalize_timestamp(time()), @@ -946,14 +976,18 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 408) def test_short_body(self): + class ShortBody(): + def __init__(self): self.sent = False + def read(self, size=-1): if not self.sent: self.sent = True return ' ' return '' + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': ShortBody()}, headers={'X-Timestamp': normalize_timestamp(time()), @@ -1001,11 +1035,37 @@ class TestObjectController(unittest.TestCase): resp = self.object_controller.GET(req) self.assertEquals(resp.status_int, 200) self.assertEquals(resp.headers['content-encoding'], 'gzip') - req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': + 'HEAD'}) resp = self.object_controller.HEAD(req) self.assertEquals(resp.status_int, 200) self.assertEquals(resp.headers['content-encoding'], 'gzip') + def test_manifest_header(self): + if not self.path_to_test_xfs: + raise SkipTest + timestamp = normalize_timestamp(time()) + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': timestamp, + 'Content-Type': 'text/plain', + 'Content-Length': '0', + 'X-Object-Manifest': 'c/o/'}) + resp = self.object_controller.PUT(req) + self.assertEquals(resp.status_int, 201) + objfile = os.path.join(self.testdir, 'sda1', + storage_directory(object_server.DATADIR, 'p', hash_path('a', 'c', + 'o')), timestamp + '.data') + self.assert_(os.path.isfile(objfile)) + self.assertEquals(pickle.loads(getxattr(objfile, + object_server.METADATA_KEY)), {'X-Timestamp': timestamp, + 'Content-Length': '0', 'Content-Type': 'text/plain', 'name': + '/a/c/o', 'X-Object-Manifest': 'c/o/', 'ETag': + 'd41d8cd98f00b204e9800998ecf8427e'}) + req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) + resp = self.object_controller.GET(req) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.headers.get('x-object-manifest'), 'c/o/') + if __name__ == '__main__': unittest.main() diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 2bf85c139c..9162455ab0 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -34,8 +34,8 @@ import eventlet from eventlet import sleep, spawn, TimeoutError, util, wsgi, listen from eventlet.timeout import Timeout import simplejson -from webob import Request -from webob.exc import HTTPUnauthorized +from webob import Request, Response +from webob.exc import HTTPNotFound, HTTPUnauthorized from test.unit import connect_tcp, readuntil2crlfs from swift.proxy import server as proxy_server @@ -53,7 +53,9 @@ logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) def fake_http_connect(*code_iter, **kwargs): + class FakeConn(object): + def __init__(self, status, etag=None, body=''): self.status = status self.reason = 'Fake' @@ -158,6 +160,7 @@ class FakeRing(object): class FakeMemcache(object): + def __init__(self): self.store = {} @@ -212,9 +215,12 @@ def save_globals(): class TestProxyServer(unittest.TestCase): def test_unhandled_exception(self): + class MyApp(proxy_server.Application): + def get_controller(self, path): raise Exception('this shouldnt be caught') + app = MyApp(None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), object_ring=FakeRing()) req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'}) @@ -323,8 +329,11 @@ class TestObjectController(unittest.TestCase): test_status_map((200, 200, 204, 500, 404), 503) def test_PUT_connect_exceptions(self): + def mock_http_connect(*code_iter, **kwargs): + class FakeConn(object): + def __init__(self, status): self.status = status self.reason = 'Fake' @@ -344,6 +353,7 @@ class TestObjectController(unittest.TestCase): if self.status == -3: return FakeConn(507) return FakeConn(100) + code_iter = iter(code_iter) def connect(*args, **ckwargs): @@ -351,7 +361,9 @@ class TestObjectController(unittest.TestCase): if status == -1: raise HTTPException() return FakeConn(status) + return connect + with save_globals(): controller = proxy_server.ObjectController(self.app, 'account', 'container', 'object') @@ -372,8 +384,11 @@ class TestObjectController(unittest.TestCase): test_status_map((200, 200, 503, 503, -1), 503) def test_PUT_send_exceptions(self): + def mock_http_connect(*code_iter, **kwargs): + class FakeConn(object): + def __init__(self, status): self.status = status self.reason = 'Fake' @@ -437,8 +452,11 @@ class TestObjectController(unittest.TestCase): self.assertEquals(res.status_int, 413) def test_PUT_getresponse_exceptions(self): + def mock_http_connect(*code_iter, **kwargs): + class FakeConn(object): + def __init__(self, status): self.status = status self.reason = 'Fake' @@ -633,6 +651,7 @@ class TestObjectController(unittest.TestCase): dev['port'] = 1 class SlowBody(): + def __init__(self): self.sent = 0 @@ -642,6 +661,7 @@ class TestObjectController(unittest.TestCase): self.sent += 1 return ' ' return '' + req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()}, headers={'Content-Length': '4', 'Content-Type': 'text/plain'}) @@ -680,11 +700,13 @@ class TestObjectController(unittest.TestCase): dev['port'] = 1 class SlowBody(): + def __init__(self): self.sent = 0 def read(self, size=-1): raise Exception('Disconnected') + req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()}, headers={'Content-Length': '4', 'Content-Type': 'text/plain'}) @@ -1334,7 +1356,9 @@ class TestObjectController(unittest.TestCase): def test_chunked_put(self): # quick test of chunked put w/o PATH_TO_TEST_XFS + class ChunkedFile(): + def __init__(self, bytes): self.bytes = bytes self.read_bytes = 0 @@ -1495,8 +1519,10 @@ class TestObjectController(unittest.TestCase): self.assertEquals(headers[:len(exp)], exp) # Check unhandled exception orig_update_request = prosrv.update_request + def broken_update_request(env, req): raise Exception('fake') + prosrv.update_request = broken_update_request sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -1545,8 +1571,10 @@ class TestObjectController(unittest.TestCase): # in a test for logging x-forwarded-for (first entry only). class Logger(object): + def info(self, msg): self.msg = msg + orig_logger = prosrv.logger prosrv.logger = Logger() sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -1568,8 +1596,10 @@ class TestObjectController(unittest.TestCase): # Turn on header logging. class Logger(object): + def info(self, msg): self.msg = msg + orig_logger = prosrv.logger prosrv.logger = Logger() prosrv.log_headers = True @@ -1726,6 +1756,52 @@ class TestObjectController(unittest.TestCase): self.assertEquals(headers[:len(exp)], exp) body = fd.read() self.assertEquals(body, 'oh hai123456789abcdef') + # Create a container for our segmented/manifest object testing + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Create the object segments + for segment in xrange(5): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/name/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\n\r\n1234 ' % str(segment)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Create the object manifest file + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nX-Object-Manifest: ' + 'segmented/name/\r\nContent-Type: text/jibberish\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure retrieving the manifest file gets the whole object + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('X-Object-Manifest: segmented/name/' in headers) + self.assert_('Content-Type: text/jibberish' in headers) + body = fd.read() + self.assertEquals(body, '1234 1234 1234 1234 1234 ') finally: prospa.kill() acc1spa.kill() @@ -1937,6 +2013,7 @@ class TestObjectController(unittest.TestCase): res = controller.COPY(req) self.assert_(called[0]) + class TestContainerController(unittest.TestCase): "Test swift.proxy_server.ContainerController" @@ -2080,7 +2157,9 @@ class TestContainerController(unittest.TestCase): self.assertEquals(resp.status_int, 404) def test_put_locking(self): + class MockMemcache(FakeMemcache): + def __init__(self, allow_lock=None): self.allow_lock = allow_lock super(MockMemcache, self).__init__() @@ -2091,6 +2170,7 @@ class TestContainerController(unittest.TestCase): yield True else: raise MemcacheLockError() + with save_globals(): controller = proxy_server.ContainerController(self.app, 'account', 'container') @@ -2669,5 +2749,256 @@ class TestAccountController(unittest.TestCase): self.assertEquals(resp.status_int, 400) +class FakeObjectController(object): + + def __init__(self): + self.app = self + self.logger = self + self.account_name = 'a' + self.container_name = 'c' + self.object_name = 'o' + self.trans_id = 'tx1' + self.object_ring = FakeRing() + self.node_timeout = 1 + + def exception(self, *args): + self.exception_args = args + self.exception_info = sys.exc_info() + + def GETorHEAD_base(self, *args): + self.GETorHEAD_base_args = args + req = args[0] + path = args[4] + body = data = path[-1] * int(path[-1]) + if req.range and req.range.ranges: + body = '' + for start, stop in req.range.ranges: + body += data[start:stop] + resp = Response(app_iter=iter(body)) + return resp + + def iter_nodes(self, partition, nodes, ring): + for node in nodes: + yield node + for node in ring.get_more_nodes(partition): + yield node + + +class Stub(object): + pass + + +class TestSegmentedIterable(unittest.TestCase): + + def setUp(self): + self.controller = FakeObjectController() + + def test_load_next_segment_unexpected_error(self): + self.assertRaises(Exception, + proxy_server.SegmentedIterable(self.controller, None, + None)._load_next_segment) + self.assertEquals(self.controller.exception_args[0], + 'ERROR: While processing manifest /a/c/o tx1') + + def test_load_next_segment_with_no_segments(self): + self.assertRaises(StopIteration, + proxy_server.SegmentedIterable(self.controller, 'lc', + [])._load_next_segment) + + def test_load_next_segment_with_one_segment(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}]) + segit._load_next_segment() + self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o1') + data = ''.join(segit.segment_iter) + self.assertEquals(data, '1') + + def test_load_next_segment_with_two_segments(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}, {'name': 'o2'}]) + segit._load_next_segment() + self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o1') + data = ''.join(segit.segment_iter) + self.assertEquals(data, '1') + segit._load_next_segment() + self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2') + data = ''.join(segit.segment_iter) + self.assertEquals(data, '22') + + def test_load_next_segment_with_two_segments_skip_first(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}, {'name': 'o2'}]) + segit.segment = 0 + segit._load_next_segment() + self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2') + data = ''.join(segit.segment_iter) + self.assertEquals(data, '22') + + def test_load_next_segment_with_seek(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}, {'name': 'o2'}]) + segit.segment = 0 + segit.seek = 1 + segit._load_next_segment() + self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2') + self.assertEquals(str(self.controller.GETorHEAD_base_args[0].range), + 'bytes=1-') + data = ''.join(segit.segment_iter) + self.assertEquals(data, '2') + + def test_load_next_segment_with_get_error(self): + + def local_GETorHEAD_base(*args): + return HTTPNotFound() + + self.controller.GETorHEAD_base = local_GETorHEAD_base + self.assertRaises(Exception, + proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}])._load_next_segment) + self.assertEquals(self.controller.exception_args[0], + 'ERROR: While processing manifest /a/c/o tx1') + self.assertEquals(str(self.controller.exception_info[1]), + 'Could not load object segment /a/lc/o1: 404') + + def test_iter_unexpected_error(self): + self.assertRaises(Exception, ''.join, + proxy_server.SegmentedIterable(self.controller, None, None)) + self.assertEquals(self.controller.exception_args[0], + 'ERROR: While processing manifest /a/c/o tx1') + + def test_iter_with_no_segments(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', []) + self.assertEquals(''.join(segit), '') + + def test_iter_with_one_segment(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}]) + segit.response = Stub() + self.assertEquals(''.join(segit), '1') + self.assertEquals(segit.response.bytes_transferred, 1) + + def test_iter_with_two_segments(self): + segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}, {'name': 'o2'}]) + segit.response = Stub() + self.assertEquals(''.join(segit), '122') + self.assertEquals(segit.response.bytes_transferred, 3) + + def test_iter_with_get_error(self): + + def local_GETorHEAD_base(*args): + return HTTPNotFound() + + self.controller.GETorHEAD_base = local_GETorHEAD_base + self.assertRaises(Exception, ''.join, + proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': + 'o1'}])) + self.assertEquals(self.controller.exception_args[0], + 'ERROR: While processing manifest /a/c/o tx1') + self.assertEquals(str(self.controller.exception_info[1]), + 'Could not load object segment /a/lc/o1: 404') + + def test_app_iter_range_unexpected_error(self): + self.assertRaises(Exception, + proxy_server.SegmentedIterable(self.controller, None, + None).app_iter_range(None, None).next) + self.assertEquals(self.controller.exception_args[0], + 'ERROR: While processing manifest /a/c/o tx1') + + def test_app_iter_range_with_no_segments(self): + self.assertEquals(''.join(proxy_server.SegmentedIterable( + self.controller, 'lc', []).app_iter_range(None, None)), '') + self.assertEquals(''.join(proxy_server.SegmentedIterable( + self.controller, 'lc', []).app_iter_range(3, None)), '') + self.assertEquals(''.join(proxy_server.SegmentedIterable( + self.controller, 'lc', []).app_iter_range(3, 5)), '') + self.assertEquals(''.join(proxy_server.SegmentedIterable( + self.controller, 'lc', []).app_iter_range(None, 5)), '') + + def test_app_iter_range_with_one_segment(self): + listing = [{'name': 'o1', 'bytes': 1}] + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, None)), '1') + self.assertEquals(segit.response.bytes_transferred, 1) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + self.assertEquals(''.join(segit.app_iter_range(3, None)), '') + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + self.assertEquals(''.join(segit.app_iter_range(3, 5)), '') + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, 5)), '1') + self.assertEquals(segit.response.bytes_transferred, 1) + + def test_app_iter_range_with_two_segments(self): + listing = [{'name': 'o1', 'bytes': 1}, {'name': 'o2', 'bytes': 2}] + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, None)), '122') + self.assertEquals(segit.response.bytes_transferred, 3) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(1, None)), '22') + self.assertEquals(segit.response.bytes_transferred, 2) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(1, 5)), '22') + self.assertEquals(segit.response.bytes_transferred, 2) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, 2)), '12') + self.assertEquals(segit.response.bytes_transferred, 2) + + def test_app_iter_range_with_many_segments(self): + listing = [{'name': 'o1', 'bytes': 1}, {'name': 'o2', 'bytes': 2}, + {'name': 'o3', 'bytes': 3}, {'name': 'o4', 'bytes': 4}, {'name': + 'o5', 'bytes': 5}] + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, None)), + '122333444455555') + self.assertEquals(segit.response.bytes_transferred, 15) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(3, None)), + '333444455555') + self.assertEquals(segit.response.bytes_transferred, 12) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(5, None)), '3444455555') + self.assertEquals(segit.response.bytes_transferred, 10) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, 6)), '122333') + self.assertEquals(segit.response.bytes_transferred, 6) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(None, 7)), '1223334') + self.assertEquals(segit.response.bytes_transferred, 7) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(3, 7)), '3334') + self.assertEquals(segit.response.bytes_transferred, 4) + + segit = proxy_server.SegmentedIterable(self.controller, 'lc', listing) + segit.response = Stub() + self.assertEquals(''.join(segit.app_iter_range(5, 7)), '34') + self.assertEquals(segit.response.bytes_transferred, 2) + + if __name__ == '__main__': unittest.main() From 4e0f0b01df33664f4be109ba030dee85bb98e5f1 Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 18 Nov 2010 18:29:03 -0800 Subject: [PATCH 002/199] Basic working segmented upload --- bin/st | 121 ++++++++++++++++++++++++++++++++--------- swift/common/client.py | 27 +++++---- swift/proxy/server.py | 8 +++ 3 files changed, 121 insertions(+), 35 deletions(-) diff --git a/bin/st b/bin/st index 6a8b02bb37..b853aa2508 100755 --- a/bin/st +++ b/bin/st @@ -581,7 +581,8 @@ def put_object(url, token, container, name, contents, content_length=None, :param container: container name that the object is in :param name: object name to put :param contents: a string or a file like object to read object data from - :param content_length: value to send as content-length header + :param content_length: value to send as content-length header; also limits + the amount read from contents :param etag: etag of contents :param chunk_size: chunk size of data to write :param content_type: value to send as content-type header @@ -611,18 +612,24 @@ def put_object(url, token, container, name, contents, content_length=None, conn.putrequest('PUT', path) for header, value in headers.iteritems(): conn.putheader(header, value) - if not content_length: + if content_length is None: conn.putheader('Transfer-Encoding', 'chunked') - conn.endheaders() - chunk = contents.read(chunk_size) - while chunk: - if not content_length: - conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) - else: - conn.send(chunk) + conn.endheaders() chunk = contents.read(chunk_size) - if not content_length: + while chunk: + conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) + chunk = contents.read(chunk_size) conn.send('0\r\n\r\n') + else: + conn.endheaders() + left = content_length + while left > 0: + size = chunk_size + if size > left: + size = left + chunk = contents.read(size) + conn.send(chunk) + left -= len(chunk) else: conn.request('PUT', path, contents, headers) resp = conn.getresponse() @@ -862,7 +869,10 @@ class QueueFunctionThread(Thread): st_delete_help = ''' delete --all OR delete container [object] [object] ... Deletes everything in the account (with --all), or everything in a - container, or a list of objects depending on the args given.'''.strip('\n') + container, or a list of objects depending on the args given. Note that + object segments won't be deleted unless you're deleting everything in the + account or specifically deleting the segments' container or object + names.'''.strip('\n') def st_delete(parser, args, print_queue, error_queue): @@ -1016,6 +1026,7 @@ def st_download(options, args, print_queue, error_queue): conn.get_object(container, obj, resp_chunk_size=65536) content_type = headers.get('content-type') content_length = int(headers.get('content-length')) + manifest = headers.get('x-object-manifest') etag = headers.get('etag') path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): @@ -1024,10 +1035,12 @@ def st_download(options, args, print_queue, error_queue): if not isdir(path): mkdirs(path) read_length = 0 - md5sum = md5() + if not manifest: + md5sum = md5() for chunk in body: read_length += len(chunk) - md5sum.update(chunk) + if not manifest: + md5sum.update(chunk) else: dirpath = dirname(path) if dirpath and not isdir(dirpath): @@ -1039,13 +1052,15 @@ def st_download(options, args, print_queue, error_queue): else: fp = open(path, 'wb') read_length = 0 - md5sum = md5() + if not manifest: + md5sum = md5() for chunk in body: fp.write(chunk) read_length += len(chunk) - md5sum.update(chunk) + if not manifest: + md5sum.update(chunk) fp.close() - if md5sum.hexdigest() != etag: + if not manifest and md5sum.hexdigest() != etag: error_queue.put('%s: md5sum != etag, %s != %s' % (path, md5sum.hexdigest(), etag)) if read_length != content_length: @@ -1266,6 +1281,9 @@ Content Length: %s headers.get('content-length'), headers.get('last-modified'), headers.get('etag'))) + if 'x-object-manifest' in headers: + print_queue.put(' Manifest: %s' % + headers['x-object-manifest']) for key, value in headers.items(): if key.startswith('x-object-meta-'): print_queue.put('%14s: %s' % ('Meta %s' % @@ -1273,7 +1291,7 @@ Content Length: %s for key, value in headers.items(): if not key.startswith('x-object-meta-') and key not in ( 'content-type', 'content-length', 'last-modified', - 'etag', 'date'): + 'etag', 'date', 'x-object-manifest'): print_queue.put( '%14s: %s' % (key.title(), value)) except ClientException, err: @@ -1362,22 +1380,41 @@ st_upload_help = ''' upload [options] container file_or_directory [file_or_directory] [...] Uploads to the given container the files and directories specified by the remaining args. -c or --changed is an option that will only upload files - that have changed since the last upload.'''.strip('\n') + that have changed since the last upload. -S or --segment-size + is an option that will upload files in segments no larger than and + then create a "manifest" file that will download all the segments as if it + were the original file. The segments will be uploaded to a + _segments container so as to not pollute the main + listings.'''.strip('\n') def st_upload(options, args, print_queue, error_queue): parser.add_option('-c', '--changed', action='store_true', dest='changed', default=False, help='Will only upload files that have changed since ' 'the last upload') + parser.add_option('-S', '--segment-size', dest='segment_size', help='Will ' + 'upload files in segments no larger than and then create a ' + '"manifest" file that will download all the segments as if it were ' + 'the original file. The segments will be uploaded to a ' + '_segments container so as to not pollute the main ' + ' listings.') (options, args) = parse_args(parser, args) args = args[1:] if len(args) < 2: error_queue.put('Usage: %s [options] %s' % (basename(argv[0]), st_upload_help)) return - file_queue = Queue(10000) + def _upload_segment((path, obj, segment_start, segment_size, log_line), + conn): + fp = open(path, 'rb') + fp.seek(segment_start) + conn.put_object(args[0] + '_segments', obj, fp, + content_length=segment_size) + if options.verbose: + print_queue.put(log_line) + def _upload_file((path, dir_marker), conn): try: obj = path @@ -1415,9 +1452,41 @@ def st_upload(options, args, print_queue, error_queue): except ClientException, err: if err.http_status != 404: raise - conn.put_object(args[0], obj, open(path, 'rb'), - content_length=getsize(path), - headers=put_headers) + if options.segment_size and \ + getsize(path) < options.segment_size: + full_size = getsize(path) + segment_queue = Queue(10000) + segment_threads = [QueueFunctionThread(segment_queue, + _upload_segment, create_connection()) for _ in + xrange(10)] + for thread in segment_threads: + thread.start() + segment = 0 + segment_start = 0 + while segment_start < full_size: + segment_size = int(options.segment_size) + if segment_start + segment_size > full_size: + segment_size = full_size - segment_start + segment_queue.put((path, '%s/%s/%s/%08d' % (obj, + put_headers['x-object-meta-mtime'], full_size, + segment), segment_start, segment_size, + '%s segment %s' % (obj, segment))) + segment += 1 + segment_start += segment_size + while not segment_queue.empty(): + sleep(0.01) + for thread in segment_threads: + thread.abort = True + while thread.isAlive(): + thread.join(0.01) + put_headers['x-object-manifest'] = \ + '%s_segments/%s/%s/%s/' % (args[0], obj, + put_headers['x-object-meta-mtime'], full_size) + conn.put_object(args[0], obj, '', content_length=0, + headers=put_headers) + else: + conn.put_object(args[0], obj, open(path, 'rb'), + content_length=getsize(path), headers=put_headers) if options.verbose: print_queue.put(obj) except OSError, err: @@ -1428,14 +1497,14 @@ def st_upload(options, args, print_queue, error_queue): def _upload_dir(path): names = listdir(path) if not names: - file_queue.put((path, True)) # dir_marker = True + file_queue.put((path, True)) # dir_marker = True else: for name in listdir(path): subpath = join(path, name) if isdir(subpath): _upload_dir(subpath) else: - file_queue.put((subpath, False)) # dir_marker = False + file_queue.put((subpath, False)) # dir_marker = False url, token = get_auth(options.auth, options.user, options.key, snet=options.snet) @@ -1452,6 +1521,8 @@ def st_upload(options, args, print_queue, error_queue): # it'll surface on the first object PUT. try: conn.put_container(args[0]) + if options.segment_size is not None: + conn.put_container(args[0] + '_segments') except: pass try: @@ -1459,7 +1530,7 @@ def st_upload(options, args, print_queue, error_queue): if isdir(arg): _upload_dir(arg) else: - file_queue.put((arg, False)) # dir_marker = False + file_queue.put((arg, False)) # dir_marker = False while not file_queue.empty(): sleep(0.01) for thread in file_threads: diff --git a/swift/common/client.py b/swift/common/client.py index b89d06aa66..1acba8cb37 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -569,7 +569,8 @@ def put_object(url, token, container, name, contents, content_length=None, :param container: container name that the object is in :param name: object name to put :param contents: a string or a file like object to read object data from - :param content_length: value to send as content-length header + :param content_length: value to send as content-length header; also limits + the amount read from contents :param etag: etag of contents :param chunk_size: chunk size of data to write :param content_type: value to send as content-type header @@ -599,18 +600,24 @@ def put_object(url, token, container, name, contents, content_length=None, conn.putrequest('PUT', path) for header, value in headers.iteritems(): conn.putheader(header, value) - if not content_length: + if content_length is None: conn.putheader('Transfer-Encoding', 'chunked') - conn.endheaders() - chunk = contents.read(chunk_size) - while chunk: - if not content_length: - conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) - else: - conn.send(chunk) + conn.endheaders() chunk = contents.read(chunk_size) - if not content_length: + while chunk: + conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) + chunk = contents.read(chunk_size) conn.send('0\r\n\r\n') + else: + conn.endheaders() + left = content_length + while left > 0: + size = chunk_size + if size > left: + size = left + chunk = contents.read(size) + conn.send(chunk) + left -= len(chunk) else: conn.request('PUT', path, contents, headers) resp = conn.getresponse() diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 4265754390..65ef0d055b 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -688,6 +688,11 @@ class ObjectController(Controller): (self.account_name, lcontainer, lprefix)) lresp = self.GETorHEAD_base(lreq, 'Container', lpartition, lnodes, lreq.path_info, self.app.container_ring.replica_count) + if lresp.status_int // 100 != 2: + lresp = HTTPNotFound(request=req) + lresp.headers['X-Object-Manifest'] = \ + resp.headers['x-object-manifest'] + return lresp if 'swift.authorize' in req.environ: req.acl = lresp.headers.get('x-container-read') aresp = req.environ['swift.authorize'](req) @@ -699,6 +704,9 @@ class ObjectController(Controller): headers = {'X-Object-Manifest': resp.headers['x-object-manifest'], 'Content-Type': resp.content_type, 'Content-Length': content_length, 'ETag': etag} + for key, value in resp.headers.iteritems(): + if key.lower().startswith('x-object-meta-'): + headers[key] = value resp = Response(app_iter=SegmentedIterable(self, lcontainer, listing), headers=headers, request=req, conditional_response=True) From df3762bd2c6ea809131c3ee7f5c37a3a6752295c Mon Sep 17 00:00:00 2001 From: gholt Date: Fri, 19 Nov 2010 13:01:28 -0800 Subject: [PATCH 003/199] st overwrites of manifests now clean up the old segments --- bin/st | 117 ++++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 74 insertions(+), 43 deletions(-) diff --git a/bin/st b/bin/st index b853aa2508..d04d3bf54b 100755 --- a/bin/st +++ b/bin/st @@ -1026,7 +1026,6 @@ def st_download(options, args, print_queue, error_queue): conn.get_object(container, obj, resp_chunk_size=65536) content_type = headers.get('content-type') content_length = int(headers.get('content-length')) - manifest = headers.get('x-object-manifest') etag = headers.get('etag') path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): @@ -1035,11 +1034,13 @@ def st_download(options, args, print_queue, error_queue): if not isdir(path): mkdirs(path) read_length = 0 - if not manifest: + if 'x-object-manifest' not in headers: md5sum = md5() + else: + md5sum = None for chunk in body: read_length += len(chunk) - if not manifest: + if md5sum: md5sum.update(chunk) else: dirpath = dirname(path) @@ -1052,15 +1053,15 @@ def st_download(options, args, print_queue, error_queue): else: fp = open(path, 'wb') read_length = 0 - if not manifest: + if 'x-object-manifest' not in headers: md5sum = md5() for chunk in body: fp.write(chunk) read_length += len(chunk) - if not manifest: + if md5sum: md5sum.update(chunk) fp.close() - if not manifest and md5sum.hexdigest() != etag: + if md5sum and md5sum.hexdigest() != etag: error_queue.put('%s: md5sum != etag, %s != %s' % (path, md5sum.hexdigest(), etag)) if read_length != content_length: @@ -1404,18 +1405,23 @@ def st_upload(options, args, print_queue, error_queue): error_queue.put('Usage: %s [options] %s' % (basename(argv[0]), st_upload_help)) return - file_queue = Queue(10000) + object_queue = Queue(10000) - def _upload_segment((path, obj, segment_start, segment_size, log_line), - conn): - fp = open(path, 'rb') - fp.seek(segment_start) - conn.put_object(args[0] + '_segments', obj, fp, - content_length=segment_size) - if options.verbose: - print_queue.put(log_line) + def _segment_job(job, conn): + if job.get('delete', False): + conn.delete_object(job['container'], job['obj']) + else: + fp = open(job['path'], 'rb') + fp.seek(job['segment_start']) + conn.put_object(job.get('container', args[0] + '_segments'), + job['obj'], fp, content_length=job['segment_size']) + if options.verbose and 'log_line' in job: + print_queue.put(job['log_line']) - def _upload_file((path, dir_marker), conn): + def _object_job(job, conn): + path = job['path'] + container = job.get('container', args[0]) + dir_marker = job.get('dir_marker', False) try: obj = path if obj.startswith('./') or obj.startswith('.\\'): @@ -1424,7 +1430,7 @@ def st_upload(options, args, print_queue, error_queue): if dir_marker: if options.changed: try: - headers = conn.head_object(args[0], obj) + headers = conn.head_object(container, obj) ct = headers.get('content-type') cl = int(headers.get('content-length')) et = headers.get('etag') @@ -1437,28 +1443,31 @@ def st_upload(options, args, print_queue, error_queue): except ClientException, err: if err.http_status != 404: raise - conn.put_object(args[0], obj, '', content_length=0, + conn.put_object(container, obj, '', content_length=0, content_type='text/directory', headers=put_headers) else: - if options.changed: - try: - headers = conn.head_object(args[0], obj) - cl = int(headers.get('content-length')) - mt = headers.get('x-object-meta-mtime') - if cl == getsize(path) and \ - mt == put_headers['x-object-meta-mtime']: - return - except ClientException, err: - if err.http_status != 404: - raise + # We need to HEAD all objects now in case we're overwriting a + # manifest object and need to delete the old segments + # ourselves. + old_manifest = None + try: + headers = conn.head_object(container, obj) + cl = int(headers.get('content-length')) + mt = headers.get('x-object-meta-mtime') + if options.changed and cl == getsize(path) and \ + mt == put_headers['x-object-meta-mtime']: + return + old_manifest = headers.get('x-object-manifest') + except ClientException, err: + if err.http_status != 404: + raise if options.segment_size and \ getsize(path) < options.segment_size: full_size = getsize(path) segment_queue = Queue(10000) segment_threads = [QueueFunctionThread(segment_queue, - _upload_segment, create_connection()) for _ in - xrange(10)] + _segment_job, create_connection()) for _ in xrange(10)] for thread in segment_threads: thread.start() segment = 0 @@ -1467,10 +1476,13 @@ def st_upload(options, args, print_queue, error_queue): segment_size = int(options.segment_size) if segment_start + segment_size > full_size: segment_size = full_size - segment_start - segment_queue.put((path, '%s/%s/%s/%08d' % (obj, - put_headers['x-object-meta-mtime'], full_size, - segment), segment_start, segment_size, - '%s segment %s' % (obj, segment))) + segment_queue.put({'path': path, + 'obj': '%s/%s/%s/%08d' % (obj, + put_headers['x-object-meta-mtime'], full_size, + segment), + 'segment_start': segment_start, + 'segment_size': segment_size, + 'log_line': '%s segment %s' % (obj, segment)}) segment += 1 segment_start += segment_size while not segment_queue.empty(): @@ -1480,13 +1492,32 @@ def st_upload(options, args, print_queue, error_queue): while thread.isAlive(): thread.join(0.01) put_headers['x-object-manifest'] = \ - '%s_segments/%s/%s/%s/' % (args[0], obj, + '%s_segments/%s/%s/%s/' % (container, obj, put_headers['x-object-meta-mtime'], full_size) - conn.put_object(args[0], obj, '', content_length=0, + conn.put_object(container, obj, '', content_length=0, headers=put_headers) else: - conn.put_object(args[0], obj, open(path, 'rb'), + conn.put_object(container, obj, open(path, 'rb'), content_length=getsize(path), headers=put_headers) + if old_manifest: + segment_queue = Queue(10000) + scontainer, sprefix = old_manifest.split('/', 1) + for delobj in conn.get_container(scontainer, + prefix=sprefix)[1]: + segment_queue.put({'delete': True, + 'container': scontainer, 'obj': delobj['name']}) + if not segment_queue.empty(): + segment_threads = [QueueFunctionThread(segment_queue, + _segment_job, create_connection()) for _ in + xrange(10)] + for thread in segment_threads: + thread.start() + while not segment_queue.empty(): + sleep(0.01) + for thread in segment_threads: + thread.abort = True + while thread.isAlive(): + thread.join(0.01) if options.verbose: print_queue.put(obj) except OSError, err: @@ -1497,20 +1528,20 @@ def st_upload(options, args, print_queue, error_queue): def _upload_dir(path): names = listdir(path) if not names: - file_queue.put((path, True)) # dir_marker = True + object_queue.put({'path': path, 'dir_marker': True}) else: for name in listdir(path): subpath = join(path, name) if isdir(subpath): _upload_dir(subpath) else: - file_queue.put((subpath, False)) # dir_marker = False + object_queue.put({'path': subpath}) url, token = get_auth(options.auth, options.user, options.key, snet=options.snet) create_connection = lambda: Connection(options.auth, options.user, options.key, preauthurl=url, preauthtoken=token, snet=options.snet) - file_threads = [QueueFunctionThread(file_queue, _upload_file, + file_threads = [QueueFunctionThread(object_queue, _object_job, create_connection()) for _ in xrange(10)] for thread in file_threads: thread.start() @@ -1530,8 +1561,8 @@ def st_upload(options, args, print_queue, error_queue): if isdir(arg): _upload_dir(arg) else: - file_queue.put((arg, False)) # dir_marker = False - while not file_queue.empty(): + object_queue.put({'path': arg}) + while not object_queue.empty(): sleep(0.01) for thread in file_threads: thread.abort = True From 598c544eddef7d382e88969a7b820e25a2c33e0b Mon Sep 17 00:00:00 2001 From: gholt Date: Fri, 19 Nov 2010 14:50:35 -0800 Subject: [PATCH 004/199] st delete will delete manifest segments as well; added --leave-segments option to override such behavior --- bin/st | 90 ++++++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 27 deletions(-) diff --git a/bin/st b/bin/st index d04d3bf54b..a0541f1d88 100755 --- a/bin/st +++ b/bin/st @@ -867,18 +867,20 @@ class QueueFunctionThread(Thread): st_delete_help = ''' -delete --all OR delete container [object] [object] ... +delete --all OR delete container [--leave-segments] [object] [object] ... Deletes everything in the account (with --all), or everything in a - container, or a list of objects depending on the args given. Note that - object segments won't be deleted unless you're deleting everything in the - account or specifically deleting the segments' container or object - names.'''.strip('\n') + container, or a list of objects depending on the args given. Segments of + manifest objects will be deleted as well, unless you specify the + --leave-segments option.'''.strip('\n') def st_delete(parser, args, print_queue, error_queue): parser.add_option('-a', '--all', action='store_true', dest='yes_all', default=False, help='Indicates that you really want to delete ' 'everything in the account') + parser.add_option('', '--leave-segments', action='store_true', + dest='leave_segments', default=False, help='Indicates that you want ' + 'the segments of manifest objects left alone') (options, args) = parse_args(parser, args) args = args[1:] if (not args and not options.yes_all) or (args and options.yes_all): @@ -886,11 +888,42 @@ def st_delete(parser, args, print_queue, error_queue): (basename(argv[0]), st_delete_help)) return + def _delete_segment((container, obj), conn): + conn.delete_object(container, obj) + if options.verbose: + print_queue.put('%s/%s' % (container, obj)) + object_queue = Queue(10000) def _delete_object((container, obj), conn): try: + old_manifest = None + if not options.leave_segments: + try: + old_manifest = conn.head_object(container, obj).get( + 'x-object-manifest') + except ClientException, err: + if err.http_status != 404: + raise conn.delete_object(container, obj) + if old_manifest: + segment_queue = Queue(10000) + scontainer, sprefix = old_manifest.split('/', 1) + for delobj in conn.get_container(scontainer, + prefix=sprefix)[1]: + segment_queue.put((scontainer, delobj['name'])) + if not segment_queue.empty(): + segment_threads = [QueueFunctionThread(segment_queue, + _delete_segment, create_connection()) for _ in + xrange(10)] + for thread in segment_threads: + thread.start() + while not segment_queue.empty(): + sleep(0.01) + for thread in segment_threads: + thread.abort = True + while thread.isAlive(): + thread.join(0.01) if options.verbose: path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): @@ -901,6 +934,7 @@ def st_delete(parser, args, print_queue, error_queue): raise error_queue.put('Object %s not found' % repr('%s/%s' % (container, obj))) + container_queue = Queue(10000) def _delete_container(container, conn): @@ -986,7 +1020,7 @@ def st_delete(parser, args, print_queue, error_queue): st_download_help = ''' -download --all OR download container [object] [object] ... +download --all OR download container [options] [object] [object] ... Downloads everything in the account (with --all), or everything in a container, or a list of objects depending on the args given. For a single object download, you may use the -o [--output] option to @@ -1030,14 +1064,13 @@ def st_download(options, args, print_queue, error_queue): path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): path = path[1:] + md5sum = None if content_type.split(';', 1)[0] == 'text/directory': if not isdir(path): mkdirs(path) read_length = 0 if 'x-object-manifest' not in headers: md5sum = md5() - else: - md5sum = None for chunk in body: read_length += len(chunk) if md5sum: @@ -1382,11 +1415,8 @@ upload [options] container file_or_directory [file_or_directory] [...] Uploads to the given container the files and directories specified by the remaining args. -c or --changed is an option that will only upload files that have changed since the last upload. -S or --segment-size - is an option that will upload files in segments no larger than and - then create a "manifest" file that will download all the segments as if it - were the original file. The segments will be uploaded to a - _segments container so as to not pollute the main - listings.'''.strip('\n') + and --leave-segments are options as well (see --help for more). +'''.strip('\n') def st_upload(options, args, print_queue, error_queue): @@ -1399,6 +1429,10 @@ def st_upload(options, args, print_queue, error_queue): 'the original file. The segments will be uploaded to a ' '_segments container so as to not pollute the main ' ' listings.') + parser.add_option('', '--leave-segments', action='store_true', + dest='leave_segments', default=False, help='Indicates that you want ' + 'the older segments of manifest objects left alone (in the case of ' + 'overwrites)') (options, args) = parse_args(parser, args) args = args[1:] if len(args) < 2: @@ -1451,17 +1485,19 @@ def st_upload(options, args, print_queue, error_queue): # manifest object and need to delete the old segments # ourselves. old_manifest = None - try: - headers = conn.head_object(container, obj) - cl = int(headers.get('content-length')) - mt = headers.get('x-object-meta-mtime') - if options.changed and cl == getsize(path) and \ - mt == put_headers['x-object-meta-mtime']: - return - old_manifest = headers.get('x-object-manifest') - except ClientException, err: - if err.http_status != 404: - raise + if options.changed or not options.leave_segments: + try: + headers = conn.head_object(container, obj) + cl = int(headers.get('content-length')) + mt = headers.get('x-object-meta-mtime') + if options.changed and cl == getsize(path) and \ + mt == put_headers['x-object-meta-mtime']: + return + if not options.leave_segments: + old_manifest = headers.get('x-object-manifest') + except ClientException, err: + if err.http_status != 404: + raise if options.segment_size and \ getsize(path) < options.segment_size: full_size = getsize(path) @@ -1541,9 +1577,9 @@ def st_upload(options, args, print_queue, error_queue): snet=options.snet) create_connection = lambda: Connection(options.auth, options.user, options.key, preauthurl=url, preauthtoken=token, snet=options.snet) - file_threads = [QueueFunctionThread(object_queue, _object_job, + object_threads = [QueueFunctionThread(object_queue, _object_job, create_connection()) for _ in xrange(10)] - for thread in file_threads: + for thread in object_threads: thread.start() conn = create_connection() # Try to create the container, just in case it doesn't exist. If this @@ -1564,7 +1600,7 @@ def st_upload(options, args, print_queue, error_queue): object_queue.put({'path': arg}) while not object_queue.empty(): sleep(0.01) - for thread in file_threads: + for thread in object_threads: thread.abort = True while thread.isAlive(): thread.join(0.01) From 1fa4ba38e554535ba0359b244d050e7795400c42 Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 23 Nov 2010 14:26:48 -0800 Subject: [PATCH 005/199] Documentation of the manifest/segments feature --- doc/source/index.rst | 1 + doc/source/overview_large_objects.rst | 121 ++++++++++++++++++++++++++ 2 files changed, 122 insertions(+) create mode 100644 doc/source/overview_large_objects.rst diff --git a/doc/source/index.rst b/doc/source/index.rst index 9b20293921..de07c132ea 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -44,6 +44,7 @@ Overview and Concepts overview_replication overview_stats ratelimit + overview_large_objects Developer Documentation ======================= diff --git a/doc/source/overview_large_objects.rst b/doc/source/overview_large_objects.rst new file mode 100644 index 0000000000..660c741e75 --- /dev/null +++ b/doc/source/overview_large_objects.rst @@ -0,0 +1,121 @@ +==================== +Large Object Support +==================== + +-------- +Overview +-------- + +Swift has a limit on the size of a single uploaded object; by default this is +5GB. However, the download size of a single object is virtually unlimited with +the concept of segmentation. Segments of the larger object are uploaded and a +special manifest file is created that, when downloaded, sends all the segments +concatenated as a single object. This also offers much greater upload speed +with the possibility of parallel uploads of the segments. + +---------------------------------- +Using ``st`` for Segmented Objects +---------------------------------- + +The quickest way to try out this feature is use the included ``st`` Swift Tool. +You can use the ``-S`` option to specify the segment size to use when splitting +a large file. For example:: + + st upload test_container -S 1073741824 large_file + +This would split the large_file into 1G segments and begin uploading those +segments in parallel. Once all the segments have been uploaded, ``st`` will +then create the manifest file so the segments can be downloaded as one. + +So now, the following ``st`` command would download the entire large object:: + + st download test_container large_file + +``st`` uses a strict convention for its segmented object support. In the above +example it will upload all the segments into a second container named +test_container_segments. These segments will have names like +large_file/1290206778.25/21474836480/00000000, +large_file/1290206778.25/21474836480/00000001, etc. + +The main benefit for using a separate container is that the main container +listings will not be polluted with all the segment names. The reason for using +the segment name format of /// is so that an +upload of a new file with the same name won't overwrite the contents of the +first until the last moment when the manifest file is updated. + +``st`` will manage these segment files for you, deleting old segments on +deletes and overwrites, etc. You can override this behavior with the +``--leave-segments`` option if desired; this is useful if you want to have +multiple versions of the same large object available. + +---------- +Direct API +---------- + +You can also work with the segments and manifests directly with HTTP requests +instead of having ``st`` do that for you. You can just upload the segments like +you would any other object and the manifest is just a zero-byte file with an +extra ``X-Object-Manifest`` header. + +All the object segments need to be in the same container, have a common object +name prefix, and their names sort in the order they should be concatenated. +They don't have to be in the same container as the manifest file will be, which +is useful to keep container listings clean as explained above with ``st``. + +The manifest file is simply a zero-byte file with the extra +``X-Object-Manifest: /`` header, where ```` is +the container the object segments are in and ```` is the common prefix +for all the segments. + +It is best to upload all the segments first and then create or update the +manifest. In this way, the full object won't be available for downloading until +the upload is complete. Also, you can upload a new set of segments to a second +location and then update the manifest to point to this new location. During the +upload of the new segments, the original manifest will still be available to +download the first set of segments. + +Here's an example using ``curl`` with tiny 1-byte segments:: + + # First, upload the segments + curl -X PUT -H 'X-Auth-Token: ' \ + http:///container/myobject/1 --data-binary '1' + curl -X PUT -H 'X-Auth-Token: ' \ + http:///container/myobject/2 --data-binary '2' + curl -X PUT -H 'X-Auth-Token: ' \ + http:///container/myobject/3 --data-binary '3' + + # Next, create the manifest file + curl -X PUT -H 'X-Auth-Token: ' \ + -H 'X-Object-Manifest: container/myobject/' \ + http:///container/myobject --data-binary '' + + # And now we can download the segments as a single object + curl -H 'X-Auth-Token: ' \ + http:///container/myobject + +---------------- +Additional Notes +---------------- + +* With a ``GET`` or ``HEAD`` of a manifest file, the ``X-Object-Manifest: + /`` header will be returned with the concatenated object + so you can tell where it's getting its segments from. + +* The response's ``Content-Length`` for a ``GET`` or ``HEAD`` on the manifest + file will be the sum of all the segments in the ``/`` + listing, dynamically. So, uploading additional segments after the manifest is + created will cause the concatenated object to be that much larger; there's no + need to recreate the manifest file. + +* The response's ``Content-Type`` for a ``GET`` or ``HEAD`` on the manifest + will be the same as the ``Content-Type`` set during the ``PUT`` request that + created the manifest. You can easily change the ``Content-Type`` by reissuing + the ``PUT``. + +* The response's ``ETag`` for a ``GET`` or ``HEAD`` on the manifest file will + be the MD5 sum of the concatenated string of ETags for each of the segments + in the ``/`` listing, dynamically. Usually in Swift the + ETag is the MD5 sum of the contents of the object, and that holds true for + each segment independently. But, it's not feasible to generate such an ETag + for the manifest itself, so this method was chosen to at least offer change + detection. From 512e5e2d79ac950381c86d38b6f3735a9083f5ae Mon Sep 17 00:00:00 2001 From: gholt Date: Mon, 29 Nov 2010 12:52:14 -0800 Subject: [PATCH 006/199] Added history section for large object support docs --- doc/source/overview_large_objects.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/doc/source/overview_large_objects.rst b/doc/source/overview_large_objects.rst index 660c741e75..333b6cde55 100644 --- a/doc/source/overview_large_objects.rst +++ b/doc/source/overview_large_objects.rst @@ -119,3 +119,20 @@ Additional Notes each segment independently. But, it's not feasible to generate such an ETag for the manifest itself, so this method was chosen to at least offer change detection. + +------- +History +------- + +Large object support has gone through various iterations before settling on +this implementation. This approach has the drawback that the eventual +consistency window of the container listings can cause a GET on the manifest +object to return an invalid whole object for that short term. + +We also implemented fully transparent support within the server, but the +drawbacks there were added complexity within the cluster, no option to do +parallel uploads, and no basis for a resume feature. + +We considered implementing both the "user manifest" option we have now and the +"transparent server manifest" option, but the second was deemed just to complex +for the benefit. From 197f343ddb5851371d96ec552d52aff1174e9705 Mon Sep 17 00:00:00 2001 From: gholt Date: Mon, 29 Nov 2010 13:07:30 -0800 Subject: [PATCH 007/199] Fixed bug dfg found in st --- bin/st | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/bin/st b/bin/st index a0541f1d88..40dd7e079d 100755 --- a/bin/st +++ b/bin/st @@ -1527,9 +1527,12 @@ def st_upload(options, args, print_queue, error_queue): thread.abort = True while thread.isAlive(): thread.join(0.01) - put_headers['x-object-manifest'] = \ - '%s_segments/%s/%s/%s/' % (container, obj, - put_headers['x-object-meta-mtime'], full_size) + new_object_manifest = '%s_segments/%s/%s/%s/' % ( + container, obj, put_headers['x-object-meta-mtime'], + full_size) + if old_manifest == new_object_manifest: + old_manifest = None + put_headers['x-object-manifest'] = new_object_manifest conn.put_object(container, obj, '', content_length=0, headers=put_headers) else: From 27741ba25bc95320e268598a6abc5f4e72ee8639 Mon Sep 17 00:00:00 2001 From: Greg Lange Date: Tue, 30 Nov 2010 22:40:44 +0000 Subject: [PATCH 008/199] makes account and container info caching in proxy better --- swift/proxy/server.py | 46 ++++---- test/unit/proxy/test_server.py | 185 +++++++++++++++++++++++++-------- 2 files changed, 168 insertions(+), 63 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 38bb3966a6..d66612484d 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -88,10 +88,11 @@ def delay_denial(func): return func(*a, **kw) return wrapped +def get_account_memcache_key(account): + return 'account/%s' % account def get_container_memcache_key(account, container): - path = '/%s/%s' % (account, container) - return 'container%s' % path + return 'container/%s/%s' % (account, container) class Controller(object): @@ -176,13 +177,17 @@ class Controller(object): if it does not exist """ partition, nodes = self.app.account_ring.get_nodes(account) - path = '/%s' % account - cache_key = 'account%s' % path # 0 = no responses, 200 = found, 404 = not found, -1 = mixed responses - if self.app.memcache and self.app.memcache.get(cache_key) == 200: - return partition, nodes + if self.app.memcache: + cache_key = get_account_memcache_key(account) + result_code = self.app.memcache.get(cache_key) + if result_code == 200: + return partition, nodes + elif result_code == 404: + return None, None result_code = 0 attempts_left = self.app.account_ring.replica_count + path = '/%s' % account headers = {'x-cf-trans-id': self.trans_id} for node in self.iter_nodes(partition, nodes, self.app.account_ring): if self.error_limited(node): @@ -213,16 +218,16 @@ class Controller(object): except: self.exception_occurred(node, 'Account', 'Trying to get account info for %s' % path) - if result_code == 200: - cache_timeout = self.app.recheck_account_existence - else: - cache_timeout = self.app.recheck_account_existence * 0.1 - if self.app.memcache: + if self.app.memcache and result_code in (200, 404): + if result_code == 200: + cache_timeout = self.app.recheck_account_existence + else: + cache_timeout = self.app.recheck_account_existence * 0.1 self.app.memcache.set(cache_key, result_code, timeout=cache_timeout) if result_code == 200: return partition, nodes - return (None, None) + return None, None def container_info(self, account, container): """ @@ -239,7 +244,6 @@ class Controller(object): partition, nodes = self.app.container_ring.get_nodes( account, container) path = '/%s/%s' % (account, container) - cache_key = None if self.app.memcache: cache_key = get_container_memcache_key(account, container) cache_value = self.app.memcache.get(cache_key) @@ -249,8 +253,10 @@ class Controller(object): write_acl = cache_value['write_acl'] if status == 200: return partition, nodes, read_acl, write_acl + elif status == 404: + return None, None, None, None if not self.account_info(account)[1]: - return (None, None, None, None) + return None, None, None, None result_code = 0 read_acl = None write_acl = None @@ -290,11 +296,11 @@ class Controller(object): except: self.exception_occurred(node, 'Container', 'Trying to get container info for %s' % path) - if result_code == 200: - cache_timeout = self.app.recheck_container_existence - else: - cache_timeout = self.app.recheck_container_existence * 0.1 - if cache_key and self.app.memcache: + if self.app.memcache and result_code in (200, 404): + if result_code == 200: + cache_timeout = self.app.recheck_container_existence + else: + cache_timeout = self.app.recheck_container_existence * 0.1 self.app.memcache.set(cache_key, {'status': result_code, 'read_acl': read_acl, @@ -303,7 +309,7 @@ class Controller(object): timeout=cache_timeout) if result_code == 200: return partition, nodes, read_acl, write_acl - return (None, None, None, None) + return None, None, None, None def iter_nodes(self, partition, nodes, ring): """ diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index ebf320fc71..393061bd8a 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -87,6 +87,8 @@ def fake_http_connect(*code_iter, **kwargs): pass if 'slow' in kwargs: headers['content-length'] = '4' + if 'headers' in kwargs: + headers.update(kwargs['headers']) return headers.items() def read(self, amt=None): @@ -163,6 +165,9 @@ class FakeMemcache(object): def get(self, key): return self.store.get(key) + def keys(self): + return self.store.keys() + def set(self, key, value, timeout=0): self.store[key] = value return True @@ -200,10 +205,12 @@ class NullLoggingHandler(logging.Handler): @contextmanager def save_globals(): orig_http_connect = getattr(proxy_server, 'http_connect', None) + orig_account_info = getattr(proxy_server.Controller, 'account_info', None) try: yield True finally: proxy_server.http_connect = orig_http_connect + proxy_server.Controller.account_info = orig_account_info # tests @@ -211,63 +218,155 @@ class TestController(unittest.TestCase): def setUp(self): self.account_ring = FakeRing() + self.container_ring = FakeRing() + self.memcache = FakeMemcache() - app = proxy_server.Application(None, FakeMemcache(), - account_ring=self.account_ring, container_ring=FakeRing(), + app = proxy_server.Application(None, self.memcache, + account_ring=self.account_ring, + container_ring=self.container_ring, object_ring=FakeRing()) self.controller = proxy_server.Controller(app) - def check_account_info_return(self, account, partition, nodes): - p, n = self.account_ring.get_nodes(account) + self.account = 'some_account' + self.container = 'some_container' + self.read_acl = 'read_acl' + self.write_acl = 'write_acl' + + def check_account_info_return(self, partition, nodes, is_none=False): + if is_none: + p, n = None, None + else: + p, n = self.account_ring.get_nodes(self.account) self.assertEqual(p, partition) self.assertEqual(n, nodes) - def test_account_info_404_200(self): - account = 'test_account_info_404_200' - - with save_globals(): - proxy_server.http_connect = fake_http_connect(404, 404, 404) - partition, nodes = self.controller.account_info(account) - self.assertEqual(partition, None) - self.assertEqual(nodes, None) - - proxy_server.http_connect = fake_http_connect(200) - partition, nodes = self.controller.account_info(account) - self.check_account_info_return(account, partition, nodes) - - def test_account_info_404(self): - account = 'test_account_info_404' - - with save_globals(): - proxy_server.http_connect = fake_http_connect(404, 404, 404) - partition, nodes = self.controller.account_info(account) - self.assertEqual(partition, None) - self.assertEqual(nodes, None) - - proxy_server.http_connect = fake_http_connect(404, 404, 404) - partition, nodes = self.controller.account_info(account) - self.assertEqual(partition, None) - self.assertEqual(nodes, None) - + # tests if 200 is cached and used def test_account_info_200(self): - account = 'test_account_info_200' - with save_globals(): proxy_server.http_connect = fake_http_connect(200) - partition, nodes = self.controller.account_info(account) - self.check_account_info_return(account, partition, nodes) + partition, nodes = self.controller.account_info(self.account) + self.check_account_info_return(partition, nodes) - def test_account_info_200_200(self): - account = 'test_account_info_200_200' + cache_key = proxy_server.get_account_memcache_key(self.account) + self.assertEquals(200, self.memcache.get(cache_key)) + + proxy_server.http_connect = fake_http_connect() + partition, nodes = self.controller.account_info(self.account) + self.check_account_info_return(partition, nodes) + + # tests if 404 is cached and used + def test_account_info_404(self): + with save_globals(): + proxy_server.http_connect = fake_http_connect(404, 404, 404) + partition, nodes = self.controller.account_info(self.account) + self.check_account_info_return(partition, nodes, True) + + cache_key = proxy_server.get_account_memcache_key(self.account) + self.assertEquals(404, self.memcache.get(cache_key)) + + proxy_server.http_connect = fake_http_connect() + partition, nodes = self.controller.account_info(self.account) + self.check_account_info_return(partition, nodes, True) + + # tests if some http status codes are not cached + def test_account_info_no_cache(self): + def test(*status_list): + proxy_server.http_connect = fake_http_connect(*status_list) + partition, nodes = self.controller.account_info(self.account) + self.assertEqual(len(self.memcache.keys()), 0) + self.check_account_info_return(partition, nodes, True) with save_globals(): - proxy_server.http_connect = fake_http_connect(200) - partition, nodes = self.controller.account_info(account) - self.check_account_info_return(account, partition, nodes) + test(503, 404, 404) + test(404, 404, 503) + test(404, 507, 503) + test(503, 503, 503) - proxy_server.http_connect = fake_http_connect(200) - partition, nodes = self.controller.account_info(account) - self.check_account_info_return(account, partition, nodes) + def check_container_info_return(self, ret, is_none=False): + if is_none: + partition, nodes, read_acl, write_acl = None, None, None, None + else: + partition, nodes = self.container_ring.get_nodes(self.account, + self.container) + read_acl, write_acl = self.read_acl, self.write_acl + self.assertEqual(partition, ret[0]) + self.assertEqual(nodes, ret[1]) + self.assertEqual(read_acl, ret[2]) + self.assertEqual(write_acl, ret[3]) + + def test_container_info_invalid_account(self): + def account_info(self, account): + return None, None + + with save_globals(): + proxy_server.Controller.account_info = account_info + ret = self.controller.container_info(self.account, + self.container) + self.check_container_info_return(ret, True) + + # tests if 200 is cached and used + def test_container_info_200(self): + def account_info(self, account): + return True, True + + with save_globals(): + headers = {'x-container-read': self.read_acl, + 'x-container-write': self.write_acl} + proxy_server.Controller.account_info = account_info + proxy_server.http_connect = fake_http_connect(200, + headers=headers) + ret = self.controller.container_info(self.account, + self.container) + self.check_container_info_return(ret) + + cache_key = proxy_server.get_container_memcache_key(self.account, + self.container) + cache_value = self.memcache.get(cache_key) + self.assertEquals(dict, type(cache_value)) + self.assertEquals(200, cache_value.get('status')) + + proxy_server.http_connect = fake_http_connect() + ret = self.controller.container_info(self.account, + self.container) + self.check_container_info_return(ret) + + # tests if 404 is cached and used + def test_container_info_404(self): + def account_info(self, account): + return True, True + + with save_globals(): + proxy_server.Controller.account_info = account_info + proxy_server.http_connect = fake_http_connect(404, 404, 404) + ret = self.controller.container_info(self.account, + self.container) + self.check_container_info_return(ret, True) + + cache_key = proxy_server.get_container_memcache_key(self.account, + self.container) + cache_value = self.memcache.get(cache_key) + self.assertEquals(dict, type(cache_value)) + self.assertEquals(404, cache_value.get('status')) + + proxy_server.http_connect = fake_http_connect() + ret = self.controller.container_info(self.account, + self.container) + self.check_container_info_return(ret, True) + + # tests if some http status codes are not cached + def test_container_info_no_cache(self): + def test(*status_list): + proxy_server.http_connect = fake_http_connect(*status_list) + ret = self.controller.container_info(self.account, + self.container) + self.assertEqual(len(self.memcache.keys()), 0) + self.check_container_info_return(ret, True) + + with save_globals(): + test(503, 404, 404) + test(404, 404, 503) + test(404, 507, 503) + test(503, 503, 503) class TestProxyServer(unittest.TestCase): From 35f3487879c650479fecd145649ce72ae48b55a7 Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 1 Dec 2010 17:08:49 -0800 Subject: [PATCH 009/199] Incorporated Swauth into Swift as an optional DevAuth replacement. --- bin/swauth-add-account | 62 ++ bin/swauth-add-user | 87 ++ bin/swauth-delete-account | 57 + bin/swauth-delete-user | 57 + bin/swauth-list | 65 ++ bin/swauth-prep | 56 + bin/swift-auth-to-swauth | 44 + doc/source/admin_guide.rst | 3 + doc/source/deployment_guide.rst | 37 + doc/source/development_auth.rst | 5 +- doc/source/development_saio.rst | 43 +- doc/source/howto_cyberduck.rst | 4 +- doc/source/howto_installmultinode.rst | 45 +- doc/source/index.rst | 1 + doc/source/misc.rst | 9 + doc/source/overview_auth.rst | 130 ++- etc/auth-server.conf-sample | 1 + etc/proxy-server.conf-sample | 28 + etc/stats.conf-sample | 3 + setup.py | 4 + swift/common/middleware/auth.py | 3 +- swift/common/middleware/swauth.py | 1120 ++++++++++++++++++++ swift/proxy/server.py | 3 +- test/functional/sample.conf | 5 + test/functional/swift.py | 7 +- test/functionalnosetests/swift_testing.py | 5 +- test/probe/common.py | 50 +- test/unit/common/middleware/test_swauth.py | 691 ++++++++++++ 28 files changed, 2580 insertions(+), 45 deletions(-) create mode 100755 bin/swauth-add-account create mode 100755 bin/swauth-add-user create mode 100755 bin/swauth-delete-account create mode 100755 bin/swauth-delete-user create mode 100755 bin/swauth-list create mode 100755 bin/swauth-prep create mode 100755 bin/swift-auth-to-swauth create mode 100644 swift/common/middleware/swauth.py create mode 100644 test/unit/common/middleware/test_swauth.py diff --git a/bin/swauth-add-account b/bin/swauth-add-account new file mode 100755 index 0000000000..740dcddcb8 --- /dev/null +++ b/bin/swauth-add-account @@ -0,0 +1,62 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser(usage='Usage: %prog [options] ') + parser.add_option('-s', '--suffix', dest='suffix', + default='', help='The suffix to use as the storage account name ' + '(default: )') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/)') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) != 1: + parser.parse_args(['-h']) + account = args[0] + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + path = '%sv2/%s' % (parsed.path, account) + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + if options.suffix: + headers['X-Account-Suffix'] = options.suffix + conn = http_connect(parsed.hostname, parsed.port, 'PUT', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'Account creation failed: %s %s' % (resp.status, resp.reason) diff --git a/bin/swauth-add-user b/bin/swauth-add-user new file mode 100755 index 0000000000..9bf27bbd7a --- /dev/null +++ b/bin/swauth-add-user @@ -0,0 +1,87 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser( + usage='Usage: %prog [options] ') + parser.add_option('-a', '--admin', dest='admin', action='store_true', + default=False, help='Give the user administrator access; otherwise ' + 'the user will only have access to containers specifically allowed ' + 'with ACLs.') + parser.add_option('-r', '--reseller-admin', dest='reseller_admin', + action='store_true', default=False, help='Give the user full reseller ' + 'administrator access, giving them full access to all accounts within ' + 'the reseller, including the ability to create new accounts. Creating ' + 'a new reseller admin requires super_admin rights.') + parser.add_option('-s', '--suffix', dest='suffix', + default='', help='The suffix to use as the storage account name ' + '(default: )') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) != 3: + parser.parse_args(['-h']) + account, user, password = args + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + # Ensure the account exists + path = '%sv2/%s' % (parsed.path, account) + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + if options.suffix: + headers['X-Account-Suffix'] = options.suffix + conn = http_connect(parsed.hostname, parsed.port, 'PUT', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'Account creation failed: %s %s' % (resp.status, resp.reason) + # Add the user + path = '%sv2/%s/%s' % (parsed.path, account, user) + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key, + 'X-Auth-User-Key': password} + if options.admin: + headers['X-Auth-User-Admin'] = 'true' + if options.reseller_admin: + headers['X-Auth-User-Reseller-Admin'] = 'true' + conn = http_connect(parsed.hostname, parsed.port, 'PUT', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'User creation failed: %s %s' % (resp.status, resp.reason) diff --git a/bin/swauth-delete-account b/bin/swauth-delete-account new file mode 100755 index 0000000000..22e6523d0e --- /dev/null +++ b/bin/swauth-delete-account @@ -0,0 +1,57 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser(usage='Usage: %prog [options] ') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) != 1: + parser.parse_args(['-h']) + account = args[0] + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + path = '%sv2/%s' % (parsed.path, account) + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + conn = http_connect(parsed.hostname, parsed.port, 'DELETE', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'Account deletion failed: %s %s' % (resp.status, resp.reason) diff --git a/bin/swauth-delete-user b/bin/swauth-delete-user new file mode 100755 index 0000000000..b018031982 --- /dev/null +++ b/bin/swauth-delete-user @@ -0,0 +1,57 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser(usage='Usage: %prog [options] ') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) != 2: + parser.parse_args(['-h']) + account, user = args + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + path = '%sv2/%s/%s' % (parsed.path, account, user) + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + conn = http_connect(parsed.hostname, parsed.port, 'DELETE', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'User deletion failed: %s %s' % (resp.status, resp.reason) diff --git a/bin/swauth-list b/bin/swauth-list new file mode 100755 index 0000000000..d681b1c38e --- /dev/null +++ b/bin/swauth-list @@ -0,0 +1,65 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import simplejson as json +except ImportError: + import json +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser(usage='Usage: %prog [options] [account] [user]') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) > 2: + parser.parse_args(['-h']) + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + path = '%sv2/%s' % (parsed.path, '/'.join(args)) + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + conn = http_connect(parsed.hostname, parsed.port, 'GET', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'List failed: %s %s' % (resp.status, resp.reason) + if len(args) == 2 and args[1] != '.groups': + print resp.read() + else: + for item in json.loads(resp.read()): + print item['name'] diff --git a/bin/swauth-prep b/bin/swauth-prep new file mode 100755 index 0000000000..cef2addba2 --- /dev/null +++ b/bin/swauth-prep @@ -0,0 +1,56 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser(usage='Usage: %prog [options]') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if args: + parser.parse_args(['-h']) + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + path = '%sv2/.prep' % parsed.path + headers = {'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + conn = http_connect(parsed.hostname, parsed.port, 'POST', path, headers, + ssl=(parsed.scheme == 'https')) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'Auth subsystem prep failed: %s %s' % (resp.status, resp.reason) diff --git a/bin/swift-auth-to-swauth b/bin/swift-auth-to-swauth new file mode 100755 index 0000000000..b8867c1f59 --- /dev/null +++ b/bin/swift-auth-to-swauth @@ -0,0 +1,44 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from subprocess import call +from sys import argv, exit + +import sqlite3 + + +if __name__ == '__main__': + if len(argv) != 4 or argv[1] != '-K': + exit('Syntax: %s -K ' % argv[0]) + _, _, super_admin_key, auth_db = argv + call(['swauth-prep', '-K', super_admin_key]) + conn = sqlite3.connect(auth_db) + for account, cfaccount, user, password, admin, reseller_admin in \ + conn.execute('SELECT account, cfaccount, user, password, admin, ' + 'reseller_admin FROM account'): + cmd = ['swauth-add-user', '-K', super_admin_key, '-s', + cfaccount.split('_', 1)[1]] + if admin == 't': + cmd.append('-a') + if reseller_admin == 't': + cmd.append('-r') + cmd.extend([account, user, password]) + print ' '.join(cmd) + call(cmd) + print '----------------------------------------------------------------' + print ' Assuming the above worked perfectly, you should copy and paste ' + print ' those lines into your ~/bin/recreateaccounts script.' + print '----------------------------------------------------------------' diff --git a/doc/source/admin_guide.rst b/doc/source/admin_guide.rst index 31c5f17123..65ad7d0ca4 100644 --- a/doc/source/admin_guide.rst +++ b/doc/source/admin_guide.rst @@ -164,7 +164,10 @@ swift-stats-populate and swift-stats-report use the same configuration file, /etc/swift/stats.conf. Example conf file:: [stats] + # For DevAuth: auth_url = http://saio:11000/v1.0 + # For Swauth: + # auth_url = http://saio:11000/auth/v1.0 auth_user = test:tester auth_key = testing diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 68f8c9b5c8..7d5bfc466b 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -484,6 +484,43 @@ ssl False If True, use SSL to node_timeout 10 Request timeout ============ =================================== ======================== +[swauth] + +===================== =============================== ======================= +Option Default Description +--------------------- ------------------------------- ----------------------- +use Entry point for + paste.deploy to use for + auth. To use the swauth + set to: + `egg:swift#swauth` +log_name auth-server Label used when logging +log_facility LOG_LOCAL0 Syslog log facility +log_level INFO Log level +log_headers True If True, log headers in + each request +reseller_prefix AUTH The naming scope for the + auth service. Swift + storage accounts and + auth tokens will begin + with this prefix. +auth_prefix /auth/ The HTTP request path + prefix for the auth + service. Swift itself + reserves anything + beginning with the + letter `v`. +default_swift_cluster local:http://127.0.0.1:8080/v1 The default Swift + cluster to place newly + created accounts on. +token_life 86400 The number of seconds a + token is valid. +node_timeout 10 Request timeout +super_admin_key None The key for the + .super_admin account. +===================== =============================== ======================= + + ------------------------ Memcached Considerations ------------------------ diff --git a/doc/source/development_auth.rst b/doc/source/development_auth.rst index 2ac1bdf261..0f28750bd3 100644 --- a/doc/source/development_auth.rst +++ b/doc/source/development_auth.rst @@ -8,7 +8,7 @@ Creating Your Own Auth Server and Middleware The included swift/auth/server.py and swift/common/middleware/auth.py are good minimal examples of how to create an external auth server and proxy server auth -middleware. Also, see the `Swauth `_ project for +middleware. Also, see swift/common/middleware/swauth.py for a more complete implementation. The main points are that the auth middleware can reject requests up front, before they ever get to the Swift Proxy application, and afterwards when the proxy issues callbacks to verify @@ -356,6 +356,7 @@ repoze.what:: self.auth_port = int(conf.get('port', 11000)) self.ssl = \ conf.get('ssl', 'false').lower() in ('true', 'on', '1', 'yes') + self.auth_prefix = conf.get('prefix', '/') self.timeout = int(conf.get('node_timeout', 10)) def authenticate(self, env, identity): @@ -371,7 +372,7 @@ repoze.what:: return user with Timeout(self.timeout): conn = http_connect(self.auth_host, self.auth_port, 'GET', - '/token/%s' % token, ssl=self.ssl) + '%stoken/%s' % (self.auth_prefix, token), ssl=self.ssl) resp = conn.getresponse() resp.read() conn.close() diff --git a/doc/source/development_saio.rst b/doc/source/development_saio.rst index 999d338fbf..072db9b327 100644 --- a/doc/source/development_saio.rst +++ b/doc/source/development_saio.rst @@ -216,7 +216,9 @@ Configuring each node Sample configuration files are provided with all defaults in line-by-line comments. - #. Create `/etc/swift/auth-server.conf`:: + #. If your going to use the DevAuth (the default swift-auth-server), create + `/etc/swift/auth-server.conf` (you can skip this if you're going to use + Swauth):: [DEFAULT] user = @@ -237,15 +239,25 @@ Sample configuration files are provided with all defaults in line-by-line commen user = [pipeline:main] + # For DevAuth: pipeline = healthcheck cache auth proxy-server + # For Swauth: + # pipeline = healthcheck cache swauth proxy-server [app:proxy-server] use = egg:swift#proxy allow_account_management = true + # Only needed for DevAuth [filter:auth] use = egg:swift#auth + # Only needed for Swauth + [filter:swauth] + use = egg:swift#swauth + # Highly recommended to change this. + super_admin_key = swauthkey + [filter:healthcheck] use = egg:swift#healthcheck @@ -562,18 +574,32 @@ Setting up scripts for running Swift #!/bin/bash + # The auth-server line is only needed for DevAuth: swift-init auth-server start swift-init proxy-server start swift-init account-server start swift-init container-server start swift-init object-server start + #. For Swauth (not needed for DevAuth), create `~/bin/recreateaccounts`:: + + #!/bin/bash + + # Replace devauth with whatever your super_admin key is (recorded in + # /etc/swift/proxy-server.conf). + swauth-prep -K swauthkey + swauth-add-user -K swauthkey -a test tester testing + swauth-add-user -K swauthkey -a test2 tester2 testing2 + swauth-add-user -K swauthkey test tester3 testing3 + swauth-add-user -K swauthkey -a -r reseller reseller reseller + #. Create `~/bin/startrest`:: #!/bin/bash # Replace devauth with whatever your super_admin key is (recorded in - # /etc/swift/auth-server.conf). + # /etc/swift/auth-server.conf). This swift-auth-recreate-accounts line + # is only needed for DevAuth: swift-auth-recreate-accounts -K devauth swift-init object-updater start swift-init container-updater start @@ -589,13 +615,14 @@ Setting up scripts for running Swift #. `remakerings` #. `cd ~/swift/trunk; ./.unittests` #. `startmain` (The ``Unable to increase file descriptor limit. Running as non-root?`` warnings are expected and ok.) - #. `swift-auth-add-user -K devauth -a test tester testing` # Replace ``devauth`` with whatever your super_admin key is (recorded in /etc/swift/auth-server.conf). - #. Get an `X-Storage-Url` and `X-Auth-Token`: ``curl -v -H 'X-Storage-User: test:tester' -H 'X-Storage-Pass: testing' http://127.0.0.1:11000/v1.0`` + #. For Swauth: `recreateaccounts` + #. For DevAuth: `swift-auth-add-user -K devauth -a test tester testing` # Replace ``devauth`` with whatever your super_admin key is (recorded in /etc/swift/auth-server.conf). + #. Get an `X-Storage-Url` and `X-Auth-Token`: ``curl -v -H 'X-Storage-User: test:tester' -H 'X-Storage-Pass: testing' http://127.0.0.1:11000/v1.0`` # For Swauth, make the last URL `http://127.0.0.1:8080/auth/v1.0` #. Check that you can GET account: ``curl -v -H 'X-Auth-Token: ' `` - #. Check that `st` works: `st -A http://127.0.0.1:11000/v1.0 -U test:tester -K testing stat` - #. `swift-auth-add-user -K devauth -a test2 tester2 testing2` # Replace ``devauth`` with whatever your super_admin key is (recorded in /etc/swift/auth-server.conf). - #. `swift-auth-add-user -K devauth test tester3 testing3` # Replace ``devauth`` with whatever your super_admin key is (recorded in /etc/swift/auth-server.conf). - #. `cp ~/swift/trunk/test/functional/sample.conf /etc/swift/func_test.conf` + #. Check that `st` works: `st -A http://127.0.0.1:11000/v1.0 -U test:tester -K testing stat` # For Swauth, make the URL `http://127.0.0.1:8080/auth/v1.0` + #. For DevAuth: `swift-auth-add-user -K devauth -a test2 tester2 testing2` # Replace ``devauth`` with whatever your super_admin key is (recorded in /etc/swift/auth-server.conf). + #. For DevAuth: `swift-auth-add-user -K devauth test tester3 testing3` # Replace ``devauth`` with whatever your super_admin key is (recorded in /etc/swift/auth-server.conf). + #. `cp ~/swift/trunk/test/functional/sample.conf /etc/swift/func_test.conf` # For Swauth, add auth_prefix = /auth/ and change auth_port = 8080. #. `cd ~/swift/trunk; ./.functests` (Note: functional tests will first delete everything in the configured accounts.) #. `cd ~/swift/trunk; ./.probetests` (Note: probe tests will reset your diff --git a/doc/source/howto_cyberduck.rst b/doc/source/howto_cyberduck.rst index 6af2f0e630..e9de135ff3 100644 --- a/doc/source/howto_cyberduck.rst +++ b/doc/source/howto_cyberduck.rst @@ -8,7 +8,9 @@ Talking to Swift with Cyberduck #. Install Swift, or have credentials for an existing Swift installation. If you plan to install Swift on your own server, follow the general guidelines - in the section following this one. + in the section following this one. (This documentation assumes the use of + the DevAuth auth server; if you're using Swauth, you should change all auth + URLs /v1.0 to /auth/v1.0) #. Verify you can connect using the standard Swift Tool `st` from your "public" URL (yes I know this resolves privately inside EC2):: diff --git a/doc/source/howto_installmultinode.rst b/doc/source/howto_installmultinode.rst index 82a3a88099..b16cfbbb14 100644 --- a/doc/source/howto_installmultinode.rst +++ b/doc/source/howto_installmultinode.rst @@ -13,8 +13,8 @@ Prerequisites Basic architecture and terms ---------------------------- - *node* - a host machine running one or more Swift services -- *Proxy node* - node that runs Proxy services -- *Auth node* - node that runs the Auth service +- *Proxy node* - node that runs Proxy services; can also run Swauth +- *Auth node* - node that runs the Auth service; only required for DevAuth - *Storage node* - node that runs Account, Container, and Object services - *ring* - a set of mappings of Swift data to physical devices @@ -23,13 +23,14 @@ This document shows a cluster using the following types of nodes: - one Proxy node - Runs the swift-proxy-server processes which proxy requests to the - appropriate Storage nodes. + appropriate Storage nodes. For Swauth, the proxy server will also contain + the Swauth service as WSGI middleware. - one Auth node - Runs the swift-auth-server which controls authentication and authorization for all requests. This can be on the same node as a - Proxy node. + Proxy node. This is only required for DevAuth. - five Storage nodes @@ -120,16 +121,27 @@ Configure the Proxy node user = swift [pipeline:main] + # For DevAuth: pipeline = healthcheck cache auth proxy-server + # For Swauth: + # pipeline = healthcheck cache swauth proxy-server [app:proxy-server] use = egg:swift#proxy allow_account_management = true + # Only needed for DevAuth [filter:auth] use = egg:swift#auth ssl = true + # Only needed for Swauth + [filter:swauth] + use = egg:swift#swauth + default_swift_cluster = https://:8080/v1 + # Highly recommended to change this key to something else! + super_admin_key = swauthkey + [filter:healthcheck] use = egg:swift#healthcheck @@ -194,6 +206,8 @@ Configure the Proxy node Configure the Auth node ----------------------- +.. note:: Only required for DevAuth; you can skip this section for Swauth. + #. If this node is not running on the same node as a proxy, create a self-signed cert as you did for the Proxy node @@ -358,13 +372,20 @@ Create Swift admin account and test You run these commands from the Auth node. +.. note:: For Swauth, replace the https://:11000/v1.0 with + https://:8080/auth/v1.0 + #. Create a user with administrative privileges (account = system, username = root, password = testpass). Make sure to replace - ``devauth`` with whatever super_admin key you assigned in the - auth-server.conf file above. *Note: None of the values of + ``devauth`` (or ``swauthkey``) with whatever super_admin key you assigned in + the auth-server.conf file (or proxy-server.conf file in the case of Swauth) + above. *Note: None of the values of account, username, or password are special - they can be anything.*:: + # For DevAuth: swift-auth-add-user -K devauth -a system root testpass + # For Swauth: + swauth-add-user -K swauthkey -a system root testpass #. Get an X-Storage-Url and X-Auth-Token:: @@ -404,15 +425,23 @@ See :ref:`config-proxy` for the initial setup, and then follow these additional use = egg:swift#memcache memcache_servers = :11211 -#. Change the default_cluster_url to point to the load balanced url, rather than the first proxy server you created in /etc/swift/auth-server.conf:: +#. Change the default_cluster_url to point to the load balanced url, rather than the first proxy server you created in /etc/swift/auth-server.conf (for DevAuth) or in /etc/swift/proxy-server.conf (for Swauth):: + # For DevAuth, in /etc/swift/auth-server.conf [app:auth-server] use = egg:swift#auth default_cluster_url = https:///v1 # Highly recommended to change this key to something else! super_admin_key = devauth -#. After you change the default_cluster_url setting, you have to delete the auth database and recreate the Swift users, or manually update the auth database with the correct URL for each account. + # For Swauth, in /etc/swift/proxy-server.conf + [filter:swauth] + use = egg:swift#swauth + default_swift_cluster = local:http:///v1 + # Highly recommended to change this key to something else! + super_admin_key = swauthkey + +#. For DevAuth, after you change the default_cluster_url setting, you have to delete the auth database and recreate the Swift users, or manually update the auth database with the correct URL for each account. For Swauth, changing the cluster URLs for the accounts is not yet supported (you'd have to hack the .cluster objects manually; not recommended). #. Next, copy all the ring information to all the nodes, including your new proxy nodes, and ensure the ring info gets to all the storage nodes as well. diff --git a/doc/source/index.rst b/doc/source/index.rst index 9b20293921..3c5f5bb3b9 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -87,6 +87,7 @@ Source Documentation db object auth + swauth misc diff --git a/doc/source/misc.rst b/doc/source/misc.rst index a0311cbf5e..eaea545a0f 100644 --- a/doc/source/misc.rst +++ b/doc/source/misc.rst @@ -42,6 +42,15 @@ Auth :members: :show-inheritance: +.. _common_swauth: + +Swauth +====== + +.. automodule:: swift.common.middleware.swauth + :members: + :show-inheritance: + .. _acls: ACLs diff --git a/doc/source/overview_auth.rst b/doc/source/overview_auth.rst index 364a6928dc..604aed266e 100644 --- a/doc/source/overview_auth.rst +++ b/doc/source/overview_auth.rst @@ -48,9 +48,129 @@ implementing your own auth. Also, see :doc:`development_auth`. ------------------- -History and Future ------------------- -What's established in Swift for authentication/authorization has history from -before Swift, so that won't be recorded here. +------ +Swauth +------ + +The Swauth system is an optional DevAuth replacement included at +swift/common/middleware/swauth.py is a scalable authentication and +authorization system that uses Swift itself as its backing store. This section +will describe how it stores its data. + +At the topmost level, the auth system has its own Swift account it stores its +own account information within. This Swift account is known as +self.auth_account in the code and its name is in the format +self.reseller_prefix + ".auth". In this text, we'll refer to this account as +. + +The containers whose names do not begin with a period represent the accounts +within the auth service. For example, the /test container would +represent the "test" account. + +The objects within each container represent the users for that auth service +account. For example, the /test/bob object would represent the +user "bob" within the auth service account of "test". Each of these user +objects contain a JSON dictionary of the format:: + + {"auth": ":", "groups": } + +The `` can only be `plaintext` at this time, and the `` +is the plain text password itself. + +The `` contains at least two group names. The first is a unique +group name identifying that user and is of the format `:`. The +second group is the `` itself. Additional groups of `.admin` for +account administrators and `.reseller_admin` for reseller administrators may +exist. Here's an example user JSON dictionary:: + + {"auth": "plaintext:testing", "groups": ["test:tester", "test", ".admin"]} + +To map an auth service account to a Swift storage account, the Service Account +Id string is stored in the `X-Container-Meta-Account-Id` header for the +/ container. To map back the other way, an +/.account_id/ object is created with the contents of +the corresponding auth service's account name. + +Also, to support a future where the auth service will support multiple Swift +clusters for the same auth service account, an +//.clusters object is created with its contents having a +JSON dictionary of the format:: + + {"storage": {"default": "local", "local": }} + +The "default" is always "local" right now, and "local" is always the single +Swift cluster URL; but in the future there can be more than one cluster with +various names instead of just "local", and the "default" key's value will +contain the primary cluster to use for that account. Also, there may be more +services in addition to the current "storage" service right now. + +Here's an example .clusters dictionary at the moment:: + + {"storage": + {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}} + +But, here's an example of what the dictionary may look like in the future:: + + {"storage": + {"default": "dfw", + "dfw": "http://dfw.storage.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", + "ord": "http://ord.storage.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", + "sat": "http://ord.storage.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}, + "servers": + {"default": "dfw", + "dfw": "http://dfw.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", + "ord": "http://ord.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9", + "sat": "http://ord.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}} + +Lastly, the tokens themselves are stored as objects in the +/.token container. The names of the objects are the token strings +themselves, such as `AUTH_tked86bbd01864458aa2bd746879438d5a`. The contents of +the token objects are JSON dictionaries of the format:: + + {"account": , + "user": , + "account_id": , + "groups": , + "expires": } + +The `` is the auth service account's name for that token. The `` +is the user within the account for that token. The `` is the +same as the `X-Container-Meta-Account-Id` for the auth service's account, +as described above. The `` is the user's groups, as described +above with the user object. The "expires" value indicates when the token is no +longer valid, as compared to Python's time.time() value. + +Here's an example token object's JSON dictionary:: + + {"account": "test", + "user": "tester", + "account_id": "AUTH_8980f74b1cda41e483cbe0a925f448a9", + "groups": ["test:tester", "test", ".admin"], + "expires": 1291273147.1624689} + +To easily map a user to an already issued token, the token name is stored in +the user object's `X-Object-Meta-Auth-Token` header. + +Here is an example full listing of an :: + + .account_id + AUTH_4a4e6655-4c8e-4bcb-b73e-0ff1104c4fef + AUTH_5162ec51-f792-4db3-8a35-b3439a1bf6fd + AUTH_8efbea51-9339-42f8-8ac5-f26e1da67eed + .token + AUTH_tk03d8571f735a4ec9abccc704df941c6e + AUTH_tk27cf3f2029b64ec8b56c5d638807b3de + AUTH_tk7594203449754c22a34ac7d910521c2e + AUTH_tk8f2ee54605dd42a8913d244de544d19e + reseller + .clusters + reseller + test + .clusters + tester + tester3 + test2 + .clusters + tester2 diff --git a/etc/auth-server.conf-sample b/etc/auth-server.conf-sample index 1309726985..27b6cf3e14 100644 --- a/etc/auth-server.conf-sample +++ b/etc/auth-server.conf-sample @@ -1,3 +1,4 @@ +# Only needed for DevAuth; Swauth is within the proxy-server.conf [DEFAULT] # bind_ip = 0.0.0.0 # bind_port = 11000 diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index 220f003ba0..c8cb20bc87 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -9,7 +9,10 @@ # key_file = /etc/swift/proxy.key [pipeline:main] +# For DevAuth: pipeline = catch_errors healthcheck cache ratelimit auth proxy-server +# For Swauth: +# pipeline = catch_errors healthcheck cache ratelimit swauth proxy-server [app:proxy-server] use = egg:swift#proxy @@ -33,6 +36,7 @@ use = egg:swift#proxy # 'false' no one, even authorized, can. # allow_account_management = false +# Only needed for DevAuth [filter:auth] use = egg:swift#auth # The reseller prefix will verify a token begins with this prefix before even @@ -44,8 +48,32 @@ use = egg:swift#auth # ip = 127.0.0.1 # port = 11000 # ssl = false +# prefix = / # node_timeout = 10 +# Only needed for Swauth +[filter:swauth] +use = egg:swift#swauth +# log_name = auth-server +# log_facility = LOG_LOCAL0 +# log_level = INFO +# log_headers = False +# The reseller prefix will verify a token begins with this prefix before even +# attempting to validate it. Also, with authorization, only Swift storage +# accounts with this prefix will be authorized by this middleware. Useful if +# multiple auth systems are in use for one Swift cluster. +# reseller_prefix = AUTH +# The auth prefix will cause requests beginning with this prefix to be routed +# to the auth subsystem, for granting tokens, creating accounts, users, etc. +# auth_prefix = /auth/ +# Cluster strings are of the format name:url where name is a short name for the +# Swift cluster and url is the url to the proxy server(s) for the cluster. +# default_swift_cluster = local:http://127.0.0.1:8080/v1 +# token_life = 86400 +# node_timeout = 10 +# Highly recommended to change this. +super_admin_key = swauthkey + [filter:healthcheck] use = egg:swift#healthcheck diff --git a/etc/stats.conf-sample b/etc/stats.conf-sample index 8ec18d4968..f89cb77d6d 100644 --- a/etc/stats.conf-sample +++ b/etc/stats.conf-sample @@ -1,5 +1,8 @@ [stats] +# For DevAuth: auth_url = http://saio:11000/auth +# For Swauth: +# auth_url = http://saio:8080/auth/v1.0 auth_user = test:tester auth_key = testing # swift_dir = /etc/swift diff --git a/setup.py b/setup.py index f72517f0de..6736a3b6a9 100644 --- a/setup.py +++ b/setup.py @@ -79,6 +79,9 @@ setup( 'bin/swift-log-uploader', 'bin/swift-log-stats-collector', 'bin/swift-account-stats-logger', + 'bin/swauth-add-account', 'bin/swauth-add-user', + 'bin/swauth-delete-account', 'bin/swauth-delete-user', + 'bin/swauth-list', 'bin/swauth-prep', 'bin/swift-auth-to-swauth', ], entry_points={ 'paste.app_factory': [ @@ -90,6 +93,7 @@ setup( ], 'paste.filter_factory': [ 'auth=swift.common.middleware.auth:filter_factory', + 'swauth=swift.common.middleware.swauth:filter_factory', 'healthcheck=swift.common.middleware.healthcheck:filter_factory', 'memcache=swift.common.middleware.memcache:filter_factory', 'ratelimit=swift.common.middleware.ratelimit:filter_factory', diff --git a/swift/common/middleware/auth.py b/swift/common/middleware/auth.py index 1278a4a67a..3382cb88f9 100644 --- a/swift/common/middleware/auth.py +++ b/swift/common/middleware/auth.py @@ -35,6 +35,7 @@ class DevAuth(object): self.auth_host = conf.get('ip', '127.0.0.1') self.auth_port = int(conf.get('port', 11000)) self.ssl = conf.get('ssl', 'false').lower() in TRUE_VALUES + self.auth_prefix = conf.get('prefix', '/') self.timeout = int(conf.get('node_timeout', 10)) def __call__(self, env, start_response): @@ -131,7 +132,7 @@ class DevAuth(object): if not groups: with Timeout(self.timeout): conn = http_connect(self.auth_host, self.auth_port, 'GET', - '/token/%s' % token, ssl=self.ssl) + '%stoken/%s' % (self.auth_prefix, token), ssl=self.ssl) resp = conn.getresponse() resp.read() conn.close() diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py new file mode 100644 index 0000000000..3838e3e6f2 --- /dev/null +++ b/swift/common/middleware/swauth.py @@ -0,0 +1,1120 @@ +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import simplejson as json +except ImportError: + import json +from httplib import HTTPConnection, HTTPSConnection +from time import gmtime, strftime, time +from traceback import format_exc +from urllib import quote, unquote +from urlparse import urlparse +from uuid import uuid4 + +from eventlet.timeout import Timeout +from webob import Response, Request +from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ + HTTPCreated, HTTPForbidden, HTTPNoContent, HTTPNotFound, \ + HTTPServiceUnavailable, HTTPUnauthorized + +from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.middleware.acl import clean_acl, parse_acl, referrer_allowed +from swift.common.utils import cache_from_env, get_logger, split_path + + +class Swauth(object): + """ + Scalable authentication and authorization system that uses Swift as its + backing store. + + :param app: The next WSGI app in the pipeline + :param conf: The dict of configuration values + """ + + def __init__(self, app, conf): + self.app = app + self.conf = conf + self.logger = get_logger(conf) + self.log_headers = conf.get('log_headers') == 'True' + self.reseller_prefix = conf.get('reseller_prefix', 'AUTH').strip() + if self.reseller_prefix and self.reseller_prefix[-1] != '_': + self.reseller_prefix += '_' + self.auth_prefix = conf.get('auth_prefix', '/auth/') + if not self.auth_prefix: + self.auth_prefix = '/auth/' + if self.auth_prefix[0] != '/': + self.auth_prefix = '/' + self.auth_prefix + if self.auth_prefix[-1] != '/': + self.auth_prefix += '/' + self.auth_account = '%s.auth' % self.reseller_prefix + self.default_swift_cluster = conf.get('default_swift_cluster', + 'local:http://127.0.0.1:8080/v1').rstrip('/') + self.dsc_name, self.dsc_url = self.default_swift_cluster.split(':', 1) + self.dsc_parsed = urlparse(self.dsc_url) + if self.dsc_parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (self.dsc_parsed.scheme, repr(self.dsc_url))) + self.super_admin_key = conf.get('super_admin_key') + if not self.super_admin_key: + msg = 'No super_admin_key set in conf file! Exiting.' + try: + self.logger.critical(msg) + except: + pass + raise ValueError(msg) + self.token_life = int(conf.get('token_life', 86400)) + self.timeout = int(conf.get('node_timeout', 10)) + self.itoken = None + self.itoken_expires = None + + def __call__(self, env, start_response): + """ + Accepts a standard WSGI application call, authenticating the request + and installing callback hooks for authorization and ACL header + validation. For an authenticated request, REMOTE_USER will be set to a + comma separated list of the user's groups. + + With a non-empty reseller prefix, acts as the definitive auth service + for just tokens and accounts that begin with that prefix, but will deny + requests outside this prefix if no other auth middleware overrides it. + + With an empty reseller prefix, acts as the definitive auth service only + for tokens that validate to a non-empty set of groups. For all other + requests, acts as the fallback auth service when no other auth + middleware overrides it. + + Alternatively, if the request matches the self.auth_prefix, the request + will be routed through the internal auth request handler (self.handle). + This is to handle creating users, accounts, granting tokens, etc. + """ + if 'HTTP_X_CF_TRANS_ID' not in env: + env['HTTP_X_CF_TRANS_ID'] = 'tx' + str(uuid4()) + if env.get('PATH_INFO', '').startswith(self.auth_prefix): + return self.handle(env, start_response) + token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) + if token and token.startswith(self.reseller_prefix): + # Note: Empty reseller_prefix will match all tokens. + groups = self.get_groups(env, token) + if groups: + env['REMOTE_USER'] = groups + user = groups and groups.split(',', 1)[0] or '' + # We know the proxy logs the token, so we augment it just a bit + # to also log the authenticated user. + env['HTTP_X_AUTH_TOKEN'] = '%s,%s' % (user, token) + env['swift.authorize'] = self.authorize + env['swift.clean_acl'] = clean_acl + else: + # Unauthorized token + if self.reseller_prefix: + # Because I know I'm the definitive auth for this token, I + # can deny it outright. + return HTTPUnauthorized()(env, start_response) + # Because I'm not certain if I'm the definitive auth for empty + # reseller_prefixed tokens, I won't overwrite swift.authorize. + elif 'swift.authorize' not in env: + env['swift.authorize'] = self.denied_response + else: + if self.reseller_prefix: + # With a non-empty reseller_prefix, I would like to be called + # back for anonymous access to accounts I know I'm the + # definitive auth for. + try: + version, rest = split_path(env.get('PATH_INFO', ''), + 1, 2, True) + except ValueError: + return HTTPNotFound()(env, start_response) + if rest and rest.startswith(self.reseller_prefix): + # Handle anonymous access to accounts I'm the definitive + # auth for. + env['swift.authorize'] = self.authorize + env['swift.clean_acl'] = clean_acl + # Not my token, not my account, I can't authorize this request, + # deny all is a good idea if not already set... + elif 'swift.authorize' not in env: + env['swift.authorize'] = self.denied_response + # Because I'm not certain if I'm the definitive auth for empty + # reseller_prefixed accounts, I won't overwrite swift.authorize. + elif 'swift.authorize' not in env: + env['swift.authorize'] = self.authorize + env['swift.clean_acl'] = clean_acl + return self.app(env, start_response) + + def get_groups(self, env, token): + """ + Get groups for the given token. + + :param env: The current WSGI environment dictionary. + :param token: Token to validate and return a group string for. + + :returns: None if the token is invalid or a string containing a comma + separated list of groups the authenticated user is a member + of. The first group in the list is also considered a unique + identifier for that user. + """ + groups = None + memcache_client = cache_from_env(env) + if memcache_client: + memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) + cached_auth_data = memcache_client.get(memcache_key) + if cached_auth_data: + expires, groups = cached_auth_data + if expires < time(): + groups = None + if not groups: + path = quote('/v1/%s/.token/%s' % (self.auth_account, token)) + resp = self.make_request(env, 'GET', path).get_response(self.app) + if resp.status_int // 100 != 2: + return None + detail = json.loads(resp.body) + if detail['expires'] < time(): + return None + groups = detail['groups'] + if '.admin' in groups: + groups.remove('.admin') + groups.append(detail['account_id']) + groups = ','.join(groups) + if memcache_client: + memcache_client.set(memcache_key, (detail['expires'], groups), + timeout=float(detail['expires'] - time())) + return groups + + def authorize(self, req): + """ + Returns None if the request is authorized to continue or a standard + WSGI response callable if not. + """ + try: + version, account, container, obj = split_path(req.path, 1, 4, True) + except ValueError: + return HTTPNotFound(request=req) + if not account or not account.startswith(self.reseller_prefix): + return self.denied_response(req) + user_groups = (req.remote_user or '').split(',') + if '.reseller_admin' in user_groups: + return None + if account in user_groups and (req.method != 'PUT' or container): + # If the user is admin for the account and is not trying to do an + # account PUT... + return None + referrers, groups = parse_acl(getattr(req, 'acl', None)) + if referrer_allowed(req.referer, referrers): + return None + if not req.remote_user: + return self.denied_response(req) + for user_group in user_groups: + if user_group in groups: + return None + return self.denied_response(req) + + def denied_response(self, req): + """ + Returns a standard WSGI response callable with the status of 403 or 401 + depending on whether the REMOTE_USER is set or not. + """ + if req.remote_user: + return HTTPForbidden(request=req) + else: + return HTTPUnauthorized(request=req) + + def handle(self, env, start_response): + """ + WSGI entry point for auth requests (ones that match the + self.auth_prefix). + Wraps env in webob.Request object and passes it down. + + :param env: WSGI environment dictionary + :param start_response: WSGI callable + """ + try: + req = Request(env) + if self.auth_prefix: + req.path_info_pop() + req.bytes_transferred = '-' + req.client_disconnect = False + if 'x-storage-token' in req.headers and \ + 'x-auth-token' not in req.headers: + req.headers['x-auth-token'] = req.headers['x-storage-token'] + if 'eventlet.posthooks' in env: + env['eventlet.posthooks'].append( + (self.posthooklogger, (req,), {})) + return self.handle_request(req)(env, start_response) + else: + # Lack of posthook support means that we have to log on the + # start of the response, rather than after all the data has + # been sent. This prevents logging client disconnects + # differently than full transmissions. + response = self.handle_request(req)(env, start_response) + self.posthooklogger(env, req) + return response + except: + print "EXCEPTION IN handle: %s: %s" % (format_exc(), env) + start_response('500 Server Error', + [('Content-Type', 'text/plain')]) + return ['Internal server error.\n'] + + def handle_request(self, req): + """ + Entry point for auth requests (ones that match the self.auth_prefix). + Should return a WSGI-style callable (such as webob.Response). + + :param req: webob.Request object + """ + req.start_time = time() + handler = None + version, account, user, _ = split_path(req.path_info, minsegs=1, + maxsegs=4, rest_with_last=True) + if version in ('v1', 'v1.0', 'auth'): + if req.method == 'GET': + handler = self.handle_get_token + elif version == 'v2': + req.path_info_pop() + if req.method == 'GET': + if not account and not user: + handler = self.handle_get_accounts + elif account: + if not user: + handler = self.handle_get_account + elif account == '.token': + req.path_info_pop() + handler = self.handle_validate_token + else: + handler = self.handle_get_user + elif req.method == 'PUT': + if not user: + handler = self.handle_put_account + else: + handler = self.handle_put_user + elif req.method == 'DELETE': + if not user: + handler = self.handle_delete_account + else: + handler = self.handle_delete_user + elif req.method == 'POST': + if account == '.prep': + handler = self.handle_prep + if not handler: + req.response = HTTPBadRequest(request=req) + else: + req.response = handler(req) + return req.response + + def handle_prep(self, req): + """ + Handles the POST v2/.prep call for preparing the backing store Swift + cluster for use with the auth subsystem. Can only be called by + .super_admin. + + :param req: The webob.Request to process. + :returns: webob.Response, 204 on success + """ + if not self.is_super_admin(req): + return HTTPForbidden(request=req) + path = quote('/v1/%s' % self.auth_account) + resp = self.make_request(req.environ, 'PUT', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create the main auth account: %s %s' % + (path, resp.status)) + for container in ('.token', '.account_id'): + path = quote('/v1/%s/%s' % (self.auth_account, container)) + resp = self.make_request(req.environ, 'PUT', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create container: %s %s' % + (path, resp.status)) + return HTTPNoContent(request=req) + + def handle_get_accounts(self, req): + """ + Handles the GET v2 call for listing the accounts handled by this auth + system. Can only be called by a .reseller_admin. + + On success, a JSON list of dicts will be returned. Each dict represents + an account and currently only contains the single key `name`. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success with a JSON list of the + accounts as explained above. + """ + if not self.is_reseller_admin(req): + return HTTPForbidden(request=req) + listing = [] + marker = '' + while True: + path = '/v1/%s?format=json&marker=%s' % (quote(self.auth_account), + quote(marker)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not list main auth account: %s %s' % + (path, resp.status)) + sublisting = json.loads(resp.body) + if not sublisting: + break + for container in sublisting: + if container['name'][0] != '.': + listing.append({'name': container['name']}) + marker = sublisting[-1]['name'] + return Response(body=json.dumps(listing)) + + def handle_get_account(self, req): + """ + Handles the GET v2/ call for listing the users in an account. + Can only be called by an account .admin. + + On success, a JSON list of dicts will be returned. Each dict represents + a user and currently only contains the single key `name`. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success with a JSON list of the users + in the account as explained above. + """ + account = req.path_info_pop() + if req.path_info: + return HTTPBadRequest(request=req) + if not self.is_account_admin(req, account): + return HTTPForbidden(request=req) + listing = [] + marker = '' + while True: + path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % + (self.auth_account, account)), quote(marker)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) + if resp.status_int // 100 != 2: + raise Exception('Could not list in main auth account: %s %s' % + (path, resp.status)) + sublisting = json.loads(resp.body) + if not sublisting: + break + for obj in sublisting: + if obj['name'][0] != '.': + listing.append({'name': obj['name']}) + marker = sublisting[-1]['name'] + return Response(body=json.dumps(listing)) + + def handle_put_account(self, req): + """ + Handles the PUT v2/ call for adding an account to the auth + system. Can only be called by a .reseller_admin. + + By default, a newly created UUID4 will be used with the reseller prefix + as the account id used when creating corresponding service accounts. + However, you can provide an X-Account-Suffix header to replace the + UUID4 part. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success. + """ + if not self.is_reseller_admin(req): + return HTTPForbidden(request=req) + account = req.path_info_pop() + if req.path_info or not account.isalnum(): + return HTTPBadRequest(request=req) + # Ensure the container in the main auth account exists (this + # container represents the new account) + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'HEAD', + path).get_response(self.app) + if resp.status_int == 404: + resp = self.make_request(req.environ, 'PUT', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create account within main auth ' + 'account: %s %s' % (path, resp.status)) + elif resp.status_int // 100 == 2: + if 'x-container-meta-account-id' in resp.headers: + # Account was already created + return HTTPAccepted(request=req) + else: + raise Exception('Could not verify account within main auth ' + 'account: %s %s' % (path, resp.status)) + account_suffix = req.headers.get('x-account-suffix') + if not account_suffix: + account_suffix = str(uuid4()) + conn = self.get_conn() + # Create the new account in the Swift cluster + path = quote('%s/%s%s' % (self.dsc_parsed.path, self.reseller_prefix, + account_suffix)) + conn.request('PUT', path, + headers={'X-Auth-Token': self.get_itoken(req.environ)}) + resp = conn.getresponse() + resp.read() + if resp.status // 100 != 2: + raise Exception('Could not create account on the Swift cluster: ' + '%s %s %s' % (path, resp.status, resp.reason)) + # Record the mapping from account id back to account name + path = quote('/v1/%s/.account_id/%s%s' % + (self.auth_account, self.reseller_prefix, account_suffix)) + resp = self.make_request(req.environ, 'PUT', path, + account).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create account id mapping: %s %s' % + (path, resp.status)) + # Record the cluster url(s) for the account + path = quote('/v1/%s/%s/.clusters' % (self.auth_account, account)) + clusters = {'storage': {}} + clusters['storage'][self.dsc_name] = '%s/%s%s' % (self.dsc_url, + self.reseller_prefix, account_suffix) + clusters['storage']['default'] = self.dsc_name + resp = self.make_request(req.environ, 'PUT', path, + json.dumps(clusters)).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create .clusters object: %s %s' % + (path, resp.status)) + # Record the mapping from account name to the account id + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'POST', path, + headers={'X-Container-Meta-Account-Id': '%s%s' % + (self.reseller_prefix, account_suffix)}).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not record the account id on the account: ' + '%s %s' % (path, resp.status)) + return HTTPCreated(request=req) + + def handle_delete_account(self, req): + """ + Handles the DELETE v2/ call for removing an account from the + auth system. Can only be called by a .reseller_admin. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success. + """ + if not self.is_reseller_admin(req): + return HTTPForbidden(request=req) + account = req.path_info_pop() + if req.path_info or not account.isalnum(): + return HTTPBadRequest(request=req) + # Make sure the account has no users. + marker = '' + while True: + path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % + (self.auth_account, account)), quote(marker)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + break + if resp.status_int // 100 != 2: + raise Exception('Could not list in main auth account: %s %s' % + (path, resp.status)) + sublisting = json.loads(resp.body) + if not sublisting: + break + for obj in sublisting: + if obj['name'][0] != '.': + return HTTPConflict(request=req) + marker = sublisting[-1]['name'] + # Obtain the listing of clusters the account is on. + path = quote('/v1/%s/%s/.clusters' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNoContent(request=req) + elif resp.status_int // 100 == 2: + clusters = json.loads(resp.body) + # Delete the account on each cluster it is on. + for name, url in clusters['storage'].iteritems(): + if name != 'default': + parsed = urlparse(url) + if parsed.scheme == 'http': + conn = HTTPConnection(parsed.netloc) + else: + conn = HTTPSConnection(parsed.netloc) + conn.request('DELETE', parsed.path, + headers={'X-Auth-Token': self.get_itoken(req.environ)}) + resp = conn.getresponse() + resp.read() + if resp.status // 100 != 2: + raise Exception('Could not delete account on the ' + 'Swift cluster: %s %s %s' % + (url, resp.status, resp.reason)) + # Delete the .clusters object itself. + path = quote('/v1/%s/%s/.clusters' % + (self.auth_account, account)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + # Obtain the account id mapping for the account. + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'HEAD', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNoContent(request=req) + elif 'x-container-meta-account-id' in resp.headers: + account_id = resp.headers['x-container-meta-account-id'] + # Delete the account id mapping for the account. + path = quote('/v1/%s/.account_id/%s' % + (self.auth_account, account_id)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2: + self.logger.error('Could not delete account id ' + 'mapping: %s %s' % (path, resp.status)) + # Delete the account marker itself. + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2: + self.logger.error('Could not delete account marked: ' + '%s %s' % (path, resp.status)) + else: + raise Exception('Could not verify account within main auth ' + 'account: %s %s' % (path, resp.status)) + + def handle_get_user(self, req): + """ + Handles the GET v2// call for retrieving the user's JSON + dict. Can only be called by an account .admin. + + On success, a JSON dict will be returned as described:: + + {"groups": [ # List of groups the user is a member of + ":", # The first group is a unique user identifier + "", # The second group is the auth account name + ""... + # There may be additional groups, .admin being a special group + # indicating an account admin and .reseller_admin indicating a + # reseller admin. + ], + "auth": "plaintext:" + # The auth-type and key for the user; currently only plaintext is + # implemented. + } + + If the in the request is the special user `.groups`, a JSON list + of dicts will be returned instead, each dict representing a group in + the account currently with just the single key `name`. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success with data set as explained + above. + """ + account = req.path_info_pop() + user = req.path_info_pop() + if req.path_info or not account.isalnum() or \ + (not user.isalnum() and user != '.groups'): + return HTTPBadRequest(request=req) + if not self.is_account_admin(req, account): + return HTTPForbidden(request=req) + if user == '.groups': + # TODO: This could be very slow for accounts with a really large + # number of users. Speed could be improved by concurrently + # requesting user group information. Then again, I don't *know* + # it's slow for `normal` use cases, so testing should be done. + groups = set() + marker = '' + while True: + path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % + (self.auth_account, account)), quote(marker)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) + if resp.status_int // 100 != 2: + raise Exception('Could not list in main auth account: ' + '%s %s' % (path, resp.status)) + sublisting = json.loads(resp.body) + if not sublisting: + break + for obj in sublisting: + if obj['name'][0] != '.': + path = quote('/v1/%s/%s/%s' % (self.auth_account, + account, obj['name'])) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) + if resp.status_int // 100 != 2: + raise Exception('Could not retrieve user object: ' + '%s %s' % (path, resp.status)) + groups.update(json.loads(resp.body)['groups']) + marker = sublisting[-1]['name'] + body = json.dumps(list({'name': g} for g in sorted(groups))) + else: + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not retrieve user object: %s %s' % + (path, resp.status)) + body = resp.body + return Response(body=body) + + def handle_put_user(self, req): + """ + Handles the PUT v2// call for adding a user to an + account. + + X-Auth-User-Key represents the user's key, X-Auth-User-Admin may be set + to `true` to create an account .admin, and X-Auth-User-Reseller-Admin + may be set to `true` to create a .reseller_admin. + + Can only be called by an account .admin unless the user is to be a + .reseller_admin, in which case the request must be by .super_admin. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success. + """ + # Validate path info + account = req.path_info_pop() + user = req.path_info_pop() + key = req.headers.get('x-auth-user-key') + admin = req.headers.get('x-auth-user-admin') == 'true' + reseller_admin = \ + req.headers.get('x-auth-user-reseller-admin') == 'true' + if reseller_admin: + admin = True + if req.path_info or not account.isalnum() or not user.isalnum() or \ + not key: + return HTTPBadRequest(request=req) + if reseller_admin: + if not self.is_super_admin(req): + return HTTPForbidden(request=req) + elif not self.is_account_admin(req, account): + return HTTPForbidden(request=req) + # Create the object in the main auth account (this object represents + # the user) + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + groups = ['%s:%s' % (account, user), account] + if admin: + groups.append('.admin') + if reseller_admin: + groups.append('.reseller_admin') + resp = self.make_request(req.environ, 'PUT', path, json.dumps({'auth': + 'plaintext:%s' % key, 'groups': groups})).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create user object: %s %s' % + (path, resp.status)) + return HTTPCreated(request=req) + + def handle_delete_user(self, req): + """ + Handles the DELETE v2// call for deleting a user from an + account. + + Can only be called by an account .admin. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success. + """ + # Validate path info + account = req.path_info_pop() + user = req.path_info_pop() + if req.path_info or not account.isalnum() or not user.isalnum(): + return HTTPBadRequest(request=req) + if not self.is_account_admin(req, account): + return HTTPForbidden(request=req) + # Delete the user's existing token, if any. + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + resp = self.make_request(req.environ, 'HEAD', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) + elif resp.status_int // 100 != 2: + raise Exception('Could not obtain user details: %s %s' % + (path, resp.status)) + candidate_token = resp.headers.get('x-object-meta-auth-token') + if candidate_token: + path = quote('/v1/%s/.token/%s' % (self.auth_account, + candidate_token)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2 and resp.status_int != 404: + raise Exception('Could not delete possibly existing token: ' + '%s %s' % (path, resp.status)) + # Delete the user entry itself. + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2 and resp.status_int != 404: + raise Exception('Could not delete the user object: %s %s' % + (path, resp.status)) + return HTTPNoContent(request=req) + + def handle_get_token(self, req): + """ + Handles the various `request for token and service end point(s)` calls. + There are various formats to support the various auth servers in the + past. Examples:: + + GET /v1//auth + X-Auth-User: : or X-Storage-User: + X-Auth-Key: or X-Storage-Pass: + GET /auth + X-Auth-User: : or X-Storage-User: : + X-Auth-Key: or X-Storage-Pass: + GET /v1.0 + X-Auth-User: : or X-Storage-User: : + X-Auth-Key: or X-Storage-Pass: + + On successful authentication, the response will have X-Auth-Token and + X-Storage-Token set to the token to use with Swift and X-Storage-URL + set to the URL to the default Swift cluster to use. + + The response body will be set to the account's clusters JSON object as + described here:: + + {"storage": { # Represents the Swift storage service end points + "default": "cluster1", # Indicates which cluster is the default + "cluster1": "", + # A Swift cluster that can be used with this account, + # "cluster1" is the name of the cluster which is usually a + # location indicator (like "dfw" for a datacenter region). + "cluster2": "" + # Another Swift cluster that can be used with this account, + # there will always be at least one Swift cluster to use or + # this whole "storage" dict won't be included at all. + }, + "servers": { # Represents the Nova server service end points + # Expected to be similar to the "storage" dict, but not + # implemented yet. + }, + # Possibly other service dicts, not implemented yet. + } + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success with data set as explained + above. + """ + # Validate the request info + pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, + rest_with_last=True) + if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': + account = pathsegs[1] + user = req.headers.get('x-storage-user') + if not user: + user = req.headers.get('x-auth-user') + if not user or ':' not in user: + return HTTPUnauthorized(request=req) + account2, user = user.split(':', 1) + if account != account2: + return HTTPUnauthorized(request=req) + key = req.headers.get('x-storage-pass') + if not key: + key = req.headers.get('x-auth-key') + elif pathsegs[0] in ('auth', 'v1.0'): + user = req.headers.get('x-auth-user') + if not user: + user = req.headers.get('x-storage-user') + if not user or ':' not in user: + return HTTPUnauthorized(request=req) + account, user = user.split(':', 1) + key = req.headers.get('x-auth-key') + if not key: + key = req.headers.get('x-storage-pass') + else: + return HTTPBadRequest(request=req) + if not all((account, user, key)): + return HTTPUnauthorized(request=req) + # Authenticate user + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPUnauthorized(request=req) + if resp.status_int // 100 != 2: + raise Exception('Could not obtain user details: %s %s' % + (path, resp.status)) + user_detail = json.loads(resp.body) + if not self.credentials_match(user_detail, key): + return HTTPUnauthorized(request=req) + # See if a token already exists and hasn't expired + token = None + candidate_token = resp.headers.get('x-object-meta-auth-token') + if candidate_token: + path = quote('/v1/%s/.token/%s' % (self.auth_account, + candidate_token)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int // 100 == 2: + token_detail = json.loads(resp.body) + if token_detail['expires'] > time(): + token = candidate_token + elif resp.status_int != 404: + raise Exception('Could not detect whether a token already ' + 'exists: %s %s' % (path, resp.status)) + # Create a new token if one didn't exist + if not token: + # Retrieve account id, we'll save this in the token + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'HEAD', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not retrieve account id value: ' + '%s %s' % (path, resp.status)) + account_id = \ + resp.headers['x-container-meta-account-id'] + # Generate new token + token = '%stk%s' % (self.reseller_prefix, uuid4().hex) + # Save token info + path = quote('/v1/%s/.token/%s' % (self.auth_account, token)) + resp = self.make_request(req.environ, 'PUT', path, + json.dumps({'account': account, 'user': user, + 'account_id': account_id, + 'groups': user_detail['groups'], + 'expires': time() + self.token_life})).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create new token: %s %s' % + (path, resp.status)) + # Record the token with the user info for future use. + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + resp = self.make_request(req.environ, 'POST', path, + headers={'X-Object-Meta-Auth-Token': token} + ).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not save new token: %s %s' % + (path, resp.status)) + # Get the cluster url information + path = quote('/v1/%s/%s/.clusters' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not obtain clusters info: %s %s' % + (path, resp.status)) + detail = json.loads(resp.body) + url = detail['storage'][detail['storage']['default']] + return Response(request=req, body=resp.body, + headers={'x-auth-token': token, 'x-storage-token': token, + 'x-storage-url': url}) + + def handle_validate_token(self, req): + """ + Handles the GET v2/.token/ call for validating a token, usually + called by a service like Swift. + + On a successful validation, X-Auth-TTL will be set for how much longer + this token is valid and X-Auth-Groups will contain a comma separated + list of groups the user belongs to. + + The first group listed will be a unique identifier for the user the + token represents. + + .reseller_admin is a special group that indicates the user should be + allowed to do anything on any account. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success with data set as explained + above. + """ + token = req.path_info_pop() + if req.path_info or not token.startswith(self.reseller_prefix): + return HTTPBadRequest(request=req) + expires = groups = None + memcache_client = cache_from_env(req.environ) + if memcache_client: + memcache_key = '%s/auth/%s' % (self.reseller_prefix, token) + cached_auth_data = memcache_client.get(memcache_key) + if cached_auth_data: + expires, groups = cached_auth_data + if expires < time(): + groups = None + if not groups: + path = quote('/v1/%s/.token/%s' % (self.auth_account, token)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int // 100 != 2: + return HTTPUnauthorized(request=req) + detail = json.loads(resp.body) + expires = detail['expires'] + if expires < time(): + return HTTPUnauthorized(request=req) + groups = detail['groups'] + if '.admin' in groups: + groups.remove('.admin') + groups.append(detail['account_id']) + groups = ','.join(groups) + return HTTPNoContent(headers={'X-Auth-TTL': expires - time(), + 'X-Auth-Groups': groups}) + + def make_request(self, env, method, path, body=None, headers=None): + """ + Makes a new webob.Request based on the current env but with the + parameters specified. + + :param env: Current WSGI environment dictionary + :param method: HTTP method of new request + :param path: HTTP path of new request + :param body: HTTP body of new request; None by default + :param headers: Extra HTTP headers of new request; None by default + + :returns: webob.Request object + """ + newenv = {'REQUEST_METHOD': method} + for name in ('swift.cache', 'HTTP_X_CF_TRANS_ID'): + if name in env: + newenv[name] = env[name] + if not headers: + headers = {} + if body: + return Request.blank(path, environ=newenv, body=body, + headers=headers) + else: + return Request.blank(path, environ=newenv, headers=headers) + + def get_conn(self, url=None): + """ + Returns an HTTPConnection based on the given `url` or the default Swift + cluster URL's scheme. + """ + if self.dsc_parsed.scheme == 'http': + return HTTPConnection(self.dsc_parsed.netloc) + else: + return HTTPSConnection(self.dsc_parsed.netloc) + + def get_itoken(self, env): + """ + Returns the current internal token to use for the auth system's own + actions with other Swift clusters. Each process will create its own + itoken and the token will be deleted and recreated based on the + token_life configuration value. The itoken information is stored in + memcache because the auth process that is asked by Swift to validate + the token may not be the same as the auth process that created the + token. + """ + if not self.itoken or self.itoken_expires < time(): + self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) + memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) + self.itoken_expires = time() + self.token_life - 60 + cache_from_env(env).set(memcache_key, (self.itoken_expires, + '.auth,.reseller_admin'), timeout=self.token_life) + return self.itoken + + def get_admin_detail(self, req): + """ + Returns the dict for the user specified as the admin in the request + with the addition of an `account` key set to the admin user's account. + + :param req: The webob request to retrieve X-Auth-Admin-User and + X-Auth-Admin-Key from. + :returns: The dict for the admin user with the addition of the + `account` key. + """ + if ':' not in req.headers.get('x-auth-admin-user'): + return None + admin_account, admin_user = \ + req.headers.get('x-auth-admin-user').split(':', 1) + path = quote('/v1/%s/%s/%s' % (self.auth_account, admin_account, + admin_user)) + resp = self.make_request(req.env, 'GET', path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not get admin user object: %s %s' % + (path, resp.status)) + admin_detail = json.loads(resp.body) + admin_detail['account'] = admin_account + return admin_detail + + def credentials_match(self, user_detail, key): + """ + Returns True if the key is valid for the user_detail. Currently, this + only supports plaintext key matching. + + :param user_detail: The dict for the user. + :param key: The key to validate for the user. + :returns: True if the key is valid for the user, False if not. + """ + return user_detail.get('auth') == 'plaintext:%s' % key + + def is_super_admin(self, req): + """ + Returns True if the admin specified in the request represents the + .super_admin. + + :param req: The webob.Request to check. + :param returns: True if .super_admin. + """ + return req.headers.get('x-auth-admin-user') == '.super_admin' and \ + req.headers.get('x-auth-admin-key') == self.super_admin_key + + def is_reseller_admin(self, req, admin_detail=None): + """ + Returns True if the admin specified in the request represents a + .reseller_admin. + + :param req: The webob.Request to check. + :param admin_detail: The previously retrieved dict from + :func:`get_admin_detail` or None for this function + to retrieve the admin_detail itself. + :param returns: True if .reseller_admin. + """ + if self.is_super_admin(req): + return True + if not admin_detail: + admin_detail = self.get_admin_detail(req) + if not self.credentials_match(admin_detail, + req.headers.get('x-auth-admin-key')): + return False + return '.reseller_admin' in admin_detail['groups'] + + def is_account_admin(self, req, account): + """ + Returns True if the admin specified in the request represents a .admin + for the account specified. + + :param req: The webob.Request to check. + :param account: The account to check for .admin against. + :param returns: True if .admin. + """ + if self.is_super_admin(req): + return True + admin_detail = self.get_admin_detail(req) + if self.is_reseller_admin(req, admin_detail=admin_detail): + return True + return admin_detail['account'] == account and \ + '.admin' in admin_detail['groups'] + + def posthooklogger(self, env, req): + response = getattr(req, 'response', None) + if not response: + return + trans_time = '%.4f' % (time() - req.start_time) + the_request = quote(unquote(req.path)) + if req.query_string: + the_request = the_request + '?' + req.query_string + # remote user for zeus + client = req.headers.get('x-cluster-client-ip') + if not client and 'x-forwarded-for' in req.headers: + # remote user for other lbs + client = req.headers['x-forwarded-for'].split(',')[0].strip() + logged_headers = None + if self.log_headers: + logged_headers = '\n'.join('%s: %s' % (k, v) + for k, v in req.headers.items()) + status_int = response.status_int + if getattr(req, 'client_disconnect', False) or \ + getattr(response, 'client_disconnect', False): + status_int = 499 + self.logger.info(' '.join(quote(str(x)) for x in (client or '-', + req.remote_addr or '-', strftime('%d/%b/%Y/%H/%M/%S', gmtime()), + req.method, the_request, req.environ['SERVER_PROTOCOL'], + status_int, req.referer or '-', req.user_agent or '-', + req.headers.get('x-auth-token', + req.headers.get('x-auth-admin-user', '-')), + getattr(req, 'bytes_transferred', 0) or '-', + getattr(response, 'bytes_transferred', 0) or '-', + req.headers.get('etag', '-'), + req.headers.get('x-cf-trans-id', '-'), logged_headers or '-', + trans_time))) + + +def filter_factory(global_conf, **local_conf): + """Returns a WSGI filter app for use with paste.deploy.""" + conf = global_conf.copy() + conf.update(local_conf) + + def auth_filter(app): + return Swauth(app, conf) + return auth_filter diff --git a/swift/proxy/server.py b/swift/proxy/server.py index e48052a398..b9589b2663 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1385,7 +1385,8 @@ class BaseApplication(object): def update_request(self, req): req.bytes_transferred = '-' req.client_disconnect = False - req.headers['x-cf-trans-id'] = 'tx' + str(uuid.uuid4()) + if 'x-cf-trans-id' not in req.headers: + req.headers['x-cf-trans-id'] = 'tx' + str(uuid.uuid4()) if 'x-storage-token' in req.headers and \ 'x-auth-token' not in req.headers: req.headers['x-auth-token'] = req.headers['x-storage-token'] diff --git a/test/functional/sample.conf b/test/functional/sample.conf index 983f2cf768..4067269af2 100644 --- a/test/functional/sample.conf +++ b/test/functional/sample.conf @@ -1,7 +1,12 @@ # sample config auth_host = 127.0.0.1 +# For DevAuth: auth_port = 11000 +# For Swauth: +# auth_port = 8080 auth_ssl = no +# For Swauth: +# auth_prefix = /auth/ # Primary functional test account (needs admin access to the account) account = test diff --git a/test/functional/swift.py b/test/functional/swift.py index e134de502f..e3012dd6b3 100644 --- a/test/functional/swift.py +++ b/test/functional/swift.py @@ -82,6 +82,7 @@ class Connection(object): self.auth_host = config['auth_host'] self.auth_port = int(config['auth_port']) self.auth_ssl = config['auth_ssl'] in ('on', 'true', 'yes', '1') + self.auth_prefix = config.get('auth_prefix', '/') self.account = config['account'] self.username = config['username'] @@ -105,11 +106,11 @@ class Connection(object): return headers = { - 'x-storage-user': self.username, - 'x-storage-pass': self.password, + 'x-auth-user': '%s:%s' % (self.account, self.username), + 'x-auth-key': self.password, } - path = '/v1/%s/auth' % (self.account) + path = '%sv1.0' % (self.auth_prefix) if self.auth_ssl: connection = httplib.HTTPSConnection(self.auth_host, port=self.auth_port) diff --git a/test/functionalnosetests/swift_testing.py b/test/functionalnosetests/swift_testing.py index 8bd46b462b..69553494b3 100644 --- a/test/functionalnosetests/swift_testing.py +++ b/test/functionalnosetests/swift_testing.py @@ -31,7 +31,10 @@ if not all([swift_test_auth, swift_test_user[0], swift_test_key[0]]): swift_test_auth = 'http' if conf.get('auth_ssl', 'no').lower() in ('yes', 'true', 'on', '1'): swift_test_auth = 'https' - swift_test_auth += '://%(auth_host)s:%(auth_port)s/v1.0' % conf + if 'auth_prefix' not in conf: + conf['auth_prefix'] = '/' + swift_test_auth += \ + '://%(auth_host)s:%(auth_port)s%(auth_prefix)sv1.0' % conf swift_test_user[0] = '%(account)s:%(username)s' % conf swift_test_key[0] = conf['password'] try: diff --git a/test/probe/common.py b/test/probe/common.py index 0bb6f42a57..907210c739 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -24,13 +24,25 @@ from swift.common.client import get_auth from swift.common.ring import Ring +SUPER_ADMIN_KEY = None +AUTH_TYPE = None + +c = ConfigParser() AUTH_SERVER_CONF_FILE = environ.get('SWIFT_AUTH_SERVER_CONF_FILE', '/etc/swift/auth-server.conf') -c = ConfigParser() -if not c.read(AUTH_SERVER_CONF_FILE): - exit('Unable to read config file: %s' % AUTH_SERVER_CONF_FILE) -conf = dict(c.items('app:auth-server')) -SUPER_ADMIN_KEY = conf.get('super_admin_key', 'devauth') +if c.read(AUTH_SERVER_CONF_FILE): + conf = dict(c.items('app:auth-server')) + SUPER_ADMIN_KEY = conf.get('super_admin_key', 'devauth') + AUTH_TYPE = 'devauth' +else: + PROXY_SERVER_CONF_FILE = environ.get('SWIFT_PROXY_SERVER_CONF_FILE', + '/etc/swift/proxy-server.conf') + if c.read(PROXY_SERVER_CONF_FILE): + conf = dict(c.items('filter:swauth')) + SUPER_ADMIN_KEY = conf.get('super_admin_key', 'swauthkey') + AUTH_TYPE = 'swauth' + else: + exit('Unable to read config file: %s' % AUTH_SERVER_CONF_FILE) def kill_pids(pids): @@ -45,8 +57,9 @@ def reset_environment(): call(['resetswift']) pids = {} try: - pids['auth'] = Popen(['swift-auth-server', - '/etc/swift/auth-server.conf']).pid + if AUTH_TYPE == 'devauth': + pids['auth'] = Popen(['swift-auth-server', + '/etc/swift/auth-server.conf']).pid pids['proxy'] = Popen(['swift-proxy-server', '/etc/swift/proxy-server.conf']).pid port2server = {} @@ -60,14 +73,21 @@ def reset_environment(): container_ring = Ring('/etc/swift/container.ring.gz') object_ring = Ring('/etc/swift/object.ring.gz') sleep(5) - conn = http_connect('127.0.0.1', '11000', 'POST', '/recreate_accounts', - headers={'X-Auth-Admin-User': '.super_admin', - 'X-Auth-Admin-Key': SUPER_ADMIN_KEY}) - resp = conn.getresponse() - if resp.status != 200: - raise Exception('Recreating accounts failed. (%d)' % resp.status) - url, token = \ - get_auth('http://127.0.0.1:11000/auth', 'test:tester', 'testing') + if AUTH_TYPE == 'devauth': + conn = http_connect('127.0.0.1', '11000', 'POST', + '/recreate_accounts', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': SUPER_ADMIN_KEY}) + resp = conn.getresponse() + if resp.status != 200: + raise Exception('Recreating accounts failed. (%d)' % + resp.status) + url, token = get_auth('http://127.0.0.1:11000/auth', 'test:tester', + 'testing') + elif AUTH_TYPE == 'swauth': + call(['recreateaccounts']) + url, token = get_auth('http://127.0.0.1:8080/auth/v1.0', + 'test:tester', 'testing') account = url.split('/')[-1] except BaseException, err: kill_pids(pids) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py new file mode 100644 index 0000000000..20075e4ea2 --- /dev/null +++ b/test/unit/common/middleware/test_swauth.py @@ -0,0 +1,691 @@ +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import simplejson as json +except ImportError: + import json +import unittest +from contextlib import contextmanager +from time import time + +from webob import Request, Response + +from swift.common.middleware import swauth as auth + + +class FakeMemcache(object): + + def __init__(self): + self.store = {} + + def get(self, key): + return self.store.get(key) + + def set(self, key, value, timeout=0): + self.store[key] = value + return True + + def incr(self, key, timeout=0): + self.store[key] = self.store.setdefault(key, 0) + 1 + return self.store[key] + + @contextmanager + def soft_lock(self, key, timeout=0, retries=5): + yield True + + def delete(self, key): + try: + del self.store[key] + except: + pass + return True + + +class FakeApp(object): + + def __init__(self, status_headers_body_iter=None): + self.calls = 0 + self.status_headers_body_iter = status_headers_body_iter + if not self.status_headers_body_iter: + self.status_headers_body_iter = iter([('404 Not Found', {}, '')]) + + def __call__(self, env, start_response): + self.calls += 1 + req = Request.blank('', environ=env) + if 'swift.authorize' in env: + resp = env['swift.authorize'](req) + if resp: + return resp(env, start_response) + status, headers, body = self.status_headers_body_iter.next() + return Response(status=status, headers=headers, + body=body)(env, start_response) + + +class TestAuth(unittest.TestCase): + + def setUp(self): + self.test_auth = \ + auth.filter_factory({'super_admin_key': 'supertest'})(FakeApp()) + + def test_super_admin_key_required(self): + app = FakeApp() + exc = None + try: + auth.filter_factory({})(app) + except ValueError, err: + exc = err + self.assertEquals(str(exc), + 'No super_admin_key set in conf file! Exiting.') + auth.filter_factory({'super_admin_key': 'supertest'})(app) + + def test_reseller_prefix_init(self): + app = FakeApp() + ath = auth.filter_factory({'super_admin_key': 'supertest'})(app) + self.assertEquals(ath.reseller_prefix, 'AUTH_') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'reseller_prefix': 'TEST'})(app) + self.assertEquals(ath.reseller_prefix, 'TEST_') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'reseller_prefix': 'TEST_'})(app) + self.assertEquals(ath.reseller_prefix, 'TEST_') + + def test_auth_prefix_init(self): + app = FakeApp() + ath = auth.filter_factory({'super_admin_key': 'supertest'})(app) + self.assertEquals(ath.auth_prefix, '/auth/') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'auth_prefix': ''})(app) + self.assertEquals(ath.auth_prefix, '/auth/') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'auth_prefix': '/test/'})(app) + self.assertEquals(ath.auth_prefix, '/test/') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'auth_prefix': '/test'})(app) + self.assertEquals(ath.auth_prefix, '/test/') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'auth_prefix': 'test/'})(app) + self.assertEquals(ath.auth_prefix, '/test/') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'auth_prefix': 'test'})(app) + self.assertEquals(ath.auth_prefix, '/test/') + + def test_default_swift_cluster_init(self): + app = FakeApp() + self.assertRaises(Exception, auth.filter_factory({ + 'super_admin_key': 'supertest', + 'default_swift_cluster': 'local:badscheme://host/path'}), app) + ath = auth.filter_factory({'super_admin_key': 'supertest'})(app) + self.assertEquals(ath.default_swift_cluster, + 'local:http://127.0.0.1:8080/v1') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'default_swift_cluster': 'local:http://host/path'})(app) + self.assertEquals(ath.default_swift_cluster, + 'local:http://host/path') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'default_swift_cluster': 'local:http://host/path/'})(app) + self.assertEquals(ath.default_swift_cluster, + 'local:http://host/path') + + def test_auth_deny_non_reseller_prefix(self): + resp = Request.blank('/v1/BLAH_account', + headers={'X-Auth-Token': 'BLAH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + self.assertEquals(resp.environ['swift.authorize'], + self.test_auth.denied_response) + + def test_auth_deny_non_reseller_prefix_no_override(self): + fake_authorize = lambda x: Response(status='500 Fake') + resp = Request.blank('/v1/BLAH_account', + headers={'X-Auth-Token': 'BLAH_t'}, + environ={'swift.authorize': fake_authorize} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(resp.environ['swift.authorize'], fake_authorize) + + def test_auth_no_reseller_prefix_deny(self): + # Ensures that when we have no reseller prefix, we don't deny a request + # outright but set up a denial swift.authorize and pass the request on + # down the chain. + local_app = FakeApp() + local_auth = auth.filter_factory({'super_admin_key': 'supertest', + 'reseller_prefix': ''})(local_app) + resp = Request.blank('/v1/account', + headers={'X-Auth-Token': 't'}).get_response(local_auth) + self.assertEquals(resp.status_int, 401) + # one for checking auth, two for request passed along + self.assertEquals(local_app.calls, 2) + self.assertEquals(resp.environ['swift.authorize'], + local_auth.denied_response) + + def test_auth_no_reseller_prefix_allow(self): + # Ensures that when we have no reseller prefix, we can still allow + # access if our auth server accepts requests + local_app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() + 60})), + ('204 No Content', {}, '')])) + local_auth = auth.filter_factory({'super_admin_key': 'supertest', + 'reseller_prefix': ''})(local_app) + resp = Request.blank('/v1/act', + headers={'X-Auth-Token': 't'}).get_response(local_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(local_app.calls, 2) + self.assertEquals(resp.environ['swift.authorize'], + local_auth.authorize) + + def test_auth_no_reseller_prefix_no_token(self): + # Check that normally we set up a call back to our authorize. + local_auth = \ + auth.filter_factory({'super_admin_key': 'supertest', + 'reseller_prefix': ''})(FakeApp(iter([]))) + resp = Request.blank('/v1/account').get_response(local_auth) + self.assertEquals(resp.status_int, 401) + self.assertEquals(resp.environ['swift.authorize'], + local_auth.authorize) + # Now make sure we don't override an existing swift.authorize when we + # have no reseller prefix. + local_auth = \ + auth.filter_factory({'super_admin_key': 'supertest', + 'reseller_prefix': ''})(FakeApp()) + local_authorize = lambda req: Response('test') + resp = Request.blank('/v1/account', environ={'swift.authorize': + local_authorize}).get_response(local_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.environ['swift.authorize'], local_authorize) + + def test_auth_fail(self): + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_auth_success(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() + 60})), + ('204 No Content', {}, '')])) + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + + def test_auth_memcache(self): + # First run our test without memcache, showing we need to return the + # token contents twice. + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() + 60})), + ('204 No Content', {}, ''), + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() + 60})), + ('204 No Content', {}, '')])) + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + # Now run our test with memcache, showing we no longer need to return + # the token contents twice. + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() + 60})), + ('204 No Content', {}, ''), + # Don't need a second token object returned if memcache is used + ('204 No Content', {}, '')])) + fake_memcache = FakeMemcache() + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}, + environ={'swift.cache': fake_memcache} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}, + environ={'swift.cache': fake_memcache} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + + def test_auth_just_expired(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() - 1}))])) + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_middleware_storage_token(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'account': 'act', 'user': 'act:usr', + 'account_id': 'AUTH_cfa', + 'groups': ['act:usr', 'act', '.admin'], + 'expires': time() + 60})), + ('204 No Content', {}, '')])) + resp = Request.blank('/v1/AUTH_cfa', + headers={'X-Storage-Token': 'AUTH_t'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + + def test_authorize_bad_path(self): + req = Request.blank('/badpath') + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('401'), resp) + req = Request.blank('/badpath') + req.remote_user = 'act:usr,act,AUTH_cfa' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + def test_authorize_account_access(self): + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act,AUTH_cfa' + self.assertEquals(self.test_auth.authorize(req), None) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + def test_authorize_acl_group_access(self): + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.acl = 'act' + self.assertEquals(self.test_auth.authorize(req), None) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.acl = 'act:usr' + self.assertEquals(self.test_auth.authorize(req), None) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.acl = 'act2' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.acl = 'act:usr2' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + def test_deny_cross_reseller(self): + # Tests that cross-reseller is denied, even if ACLs/group names match + req = Request.blank('/v1/OTHER_cfa') + req.remote_user = 'act:usr,act,AUTH_cfa' + req.acl = 'act' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + def test_authorize_acl_referrer_access(self): + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.acl = '.r:*' + self.assertEquals(self.test_auth.authorize(req), None) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.acl = '.r:.example.com' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + req = Request.blank('/v1/AUTH_cfa') + req.remote_user = 'act:usr,act' + req.referer = 'http://www.example.com/index.html' + req.acl = '.r:.example.com' + self.assertEquals(self.test_auth.authorize(req), None) + req = Request.blank('/v1/AUTH_cfa') + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('401'), resp) + req = Request.blank('/v1/AUTH_cfa') + req.acl = '.r:*' + self.assertEquals(self.test_auth.authorize(req), None) + req = Request.blank('/v1/AUTH_cfa') + req.acl = '.r:.example.com' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('401'), resp) + req = Request.blank('/v1/AUTH_cfa') + req.referer = 'http://www.example.com/index.html' + req.acl = '.r:.example.com' + self.assertEquals(self.test_auth.authorize(req), None) + + def test_account_put_permissions(self): + req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) + req.remote_user = 'act:usr,act' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) + req.remote_user = 'act:usr,act,AUTH_other' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + # Even PUTs to your own account as account admin should fail + req = Request.blank('/v1/AUTH_old', environ={'REQUEST_METHOD': 'PUT'}) + req.remote_user = 'act:usr,act,AUTH_old' + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) + req.remote_user = 'act:usr,act,.reseller_admin' + resp = self.test_auth.authorize(req) + self.assertEquals(resp, None) + + # .super_admin is not something the middleware should ever see or care + # about + req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) + req.remote_user = 'act:usr,act,.super_admin' + resp = self.test_auth.authorize(req) + resp = str(self.test_auth.authorize(req)) + self.assert_(resp.startswith('403'), resp) + + def test_get_token_fail(self): + resp = Request.blank('/auth/v1.0').get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_get_token_fail_invalid_key(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of clusters object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'invalid'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_get_token_fail_invalid_x_auth_user_format(self): + resp = Request.blank('/auth/v1/act/auth', + headers={'X-Auth-User': 'usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_get_token_fail_non_matching_account_in_request(self): + resp = Request.blank('/auth/v1/act/auth', + headers={'X-Auth-User': 'act2:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_get_token_fail_bad_path(self): + resp = Request.blank('/auth/v1/act/auth/invalid', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_get_token_fail_missing_key(self): + resp = Request.blank('/auth/v1/act/auth', + headers={'X-Auth-User': 'act:usr'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + + def test_get_token_fail_get_user_details(self): + self.test_auth.app = FakeApp(iter([ + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_token_fail_get_account(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_token_fail_put_new_token(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_token_fail_post_to_user(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_token_fail_get_clusters(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of clusters object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_token_fail_get_existing_token(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of token + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_token_success_v1_0(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of clusters object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assert_(resp.headers.get('x-auth-token', + '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + + def test_get_token_success_v1_act_auth(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of clusters object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1/act/auth', + headers={'X-Storage-User': 'usr', + 'X-Storage-Pass': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assert_(resp.headers.get('x-auth-token', + '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + + def test_get_token_success_storage_instead_of_auth(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of clusters object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1.0', + headers={'X-Storage-User': 'act:usr', + 'X-Storage-Pass': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assert_(resp.headers.get('x-auth-token', + '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + + def test_get_token_success_v1_act_auth_auth_instead_of_storage(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of clusters object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1/act/auth', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assert_(resp.headers.get('x-auth-token', + '').startswith('AUTH_tk'), resp.headers.get('x-auth-token')) + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + + def test_get_token_success_existing_token(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, + json.dumps({"auth": "plaintext:key", + "groups": ["act:usr", "act", ".admin"]})), + # GET of token + ('200 Ok', {}, json.dumps({"account": "act", "user": "usr", + "account_id": "AUTH_cfa", "groups": ["act:usr", "key", ".admin"], + "expires": 9999999999.9999999})), + # GET of clusters object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.headers.get('x-auth-token'), 'AUTH_tktest') + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + + +if __name__ == '__main__': + unittest.main() From 3d2985201c1c5e0dd03397725d3a6d3bb863b123 Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 1 Dec 2010 21:28:41 -0800 Subject: [PATCH 010/199] swauth-list work; new swauth-set-account-service; .clusters is now .services; doc updates --- bin/swauth-add-account | 7 +- bin/swauth-add-user | 7 +- bin/swauth-list | 28 +++- doc/source/howto_installmultinode.rst | 10 +- doc/source/index.rst | 1 - doc/source/overview_auth.rst | 14 +- setup.py | 3 +- swift/common/middleware/swauth.py | 172 +++++++++++++++------ test/unit/common/middleware/test_swauth.py | 73 +++++---- 9 files changed, 219 insertions(+), 96 deletions(-) diff --git a/bin/swauth-add-account b/bin/swauth-add-account index 740dcddcb8..67b89038d7 100755 --- a/bin/swauth-add-account +++ b/bin/swauth-add-account @@ -25,8 +25,11 @@ from swift.common.bufferedhttp import http_connect_raw as http_connect if __name__ == '__main__': parser = OptionParser(usage='Usage: %prog [options] ') parser.add_option('-s', '--suffix', dest='suffix', - default='', help='The suffix to use as the storage account name ' - '(default: )') + default='', help='The suffix to use with the reseller prefix as the ' + 'storage account name (default: ) Note: If ' + 'the account already exists, this will have no effect on existing ' + 'service URLs. Those will need to be updated with ' + 'swauth-set-account-service') parser.add_option('-A', '--admin-url', dest='admin_url', default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' 'subsystem (default: http://127.0.0.1:8080/auth/)') diff --git a/bin/swauth-add-user b/bin/swauth-add-user index 9bf27bbd7a..58a774665c 100755 --- a/bin/swauth-add-user +++ b/bin/swauth-add-user @@ -35,8 +35,11 @@ if __name__ == '__main__': 'the reseller, including the ability to create new accounts. Creating ' 'a new reseller admin requires super_admin rights.') parser.add_option('-s', '--suffix', dest='suffix', - default='', help='The suffix to use as the storage account name ' - '(default: )') + default='', help='The suffix to use with the reseller prefix as the ' + 'storage account name (default: ) Note: If ' + 'the account already exists, this will have no effect on existing ' + 'service URLs. Those will need to be updated with ' + 'swauth-set-account-service') parser.add_option('-A', '--admin-url', dest='admin_url', default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' 'subsystem (default: http://127.0.0.1:8080/auth/') diff --git a/bin/swauth-list b/bin/swauth-list index d681b1c38e..0276caca32 100755 --- a/bin/swauth-list +++ b/bin/swauth-list @@ -27,7 +27,23 @@ from swift.common.bufferedhttp import http_connect_raw as http_connect if __name__ == '__main__': - parser = OptionParser(usage='Usage: %prog [options] [account] [user]') + parser = OptionParser(usage=''' +Usage: %prog [options] [account] [user] + +If [account] and [user] are omitted, a list of accounts will be output. + +If [account] is included but not [user], an account's information will be +output, including a list of users within the account. + +If [account] and [user] are included, the user's information will be output, +including a list of groups the user belongs to. + +If the [user] is '.groups', the active groups for the account will be listed. +'''.strip()) + parser.add_option('-p', '--plain-text', dest='plain_text', + action='store_true', default=False, help='Changes the output from ' + 'JSON to plain text. This will cause an account to list only the ' + 'users and a user to list only the groups.') parser.add_option('-A', '--admin-url', dest='admin_url', default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' 'subsystem (default: http://127.0.0.1:8080/auth/') @@ -58,8 +74,10 @@ if __name__ == '__main__': resp = conn.getresponse() if resp.status // 100 != 2: print 'List failed: %s %s' % (resp.status, resp.reason) - if len(args) == 2 and args[1] != '.groups': - print resp.read() + body = resp.read() + if options.plain_text: + info = json.loads(body) + for group in info[['accounts', 'users', 'groups'][len(args)]]: + print group['name'] else: - for item in json.loads(resp.read()): - print item['name'] + print body diff --git a/doc/source/howto_installmultinode.rst b/doc/source/howto_installmultinode.rst index b16cfbbb14..fe38c02342 100644 --- a/doc/source/howto_installmultinode.rst +++ b/doc/source/howto_installmultinode.rst @@ -441,7 +441,15 @@ See :ref:`config-proxy` for the initial setup, and then follow these additional # Highly recommended to change this key to something else! super_admin_key = swauthkey -#. For DevAuth, after you change the default_cluster_url setting, you have to delete the auth database and recreate the Swift users, or manually update the auth database with the correct URL for each account. For Swauth, changing the cluster URLs for the accounts is not yet supported (you'd have to hack the .cluster objects manually; not recommended). +#. For DevAuth, after you change the default_cluster_url setting, you have to delete the auth database and recreate the Swift users, or manually update the auth database with the correct URL for each account. + + For Swauth, you can change a service URL with:: + + swauth-set-account-service -K swauthkey storage local + + You can obtain old service URLs with:: + + swauth-list -K swauthkey #. Next, copy all the ring information to all the nodes, including your new proxy nodes, and ensure the ring info gets to all the storage nodes as well. diff --git a/doc/source/index.rst b/doc/source/index.rst index 3c5f5bb3b9..9b20293921 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -87,7 +87,6 @@ Source Documentation db object auth - swauth misc diff --git a/doc/source/overview_auth.rst b/doc/source/overview_auth.rst index 604aed266e..b3ad2e9db0 100644 --- a/doc/source/overview_auth.rst +++ b/doc/source/overview_auth.rst @@ -54,7 +54,7 @@ Swauth ------ The Swauth system is an optional DevAuth replacement included at -swift/common/middleware/swauth.py is a scalable authentication and +swift/common/middleware/swauth.py; a scalable authentication and authorization system that uses Swift itself as its backing store. This section will describe how it stores its data. @@ -93,8 +93,8 @@ Id string is stored in the `X-Container-Meta-Account-Id` header for the the corresponding auth service's account name. Also, to support a future where the auth service will support multiple Swift -clusters for the same auth service account, an -//.clusters object is created with its contents having a +clusters or even multiple services for the same auth service account, an +//.services object is created with its contents having a JSON dictionary of the format:: {"storage": {"default": "local", "local": }} @@ -105,7 +105,7 @@ various names instead of just "local", and the "default" key's value will contain the primary cluster to use for that account. Also, there may be more services in addition to the current "storage" service right now. -Here's an example .clusters dictionary at the moment:: +Here's an example .services dictionary at the moment:: {"storage": {"default": "local", @@ -165,12 +165,12 @@ Here is an example full listing of an :: AUTH_tk7594203449754c22a34ac7d910521c2e AUTH_tk8f2ee54605dd42a8913d244de544d19e reseller - .clusters + .services reseller test - .clusters + .services tester tester3 test2 - .clusters + .services tester2 diff --git a/setup.py b/setup.py index 6736a3b6a9..ab9233b30e 100644 --- a/setup.py +++ b/setup.py @@ -81,7 +81,8 @@ setup( 'bin/swift-account-stats-logger', 'bin/swauth-add-account', 'bin/swauth-add-user', 'bin/swauth-delete-account', 'bin/swauth-delete-user', - 'bin/swauth-list', 'bin/swauth-prep', 'bin/swift-auth-to-swauth', + 'bin/swauth-list', 'bin/swauth-prep', 'bin/swauth-set-account-service', + 'bin/swift-auth-to-swauth', ], entry_points={ 'paste.app_factory': [ diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 3838e3e6f2..92ee1b7455 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -181,7 +181,7 @@ class Swauth(object): detail = json.loads(resp.body) if detail['expires'] < time(): return None - groups = detail['groups'] + groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) @@ -283,7 +283,7 @@ class Swauth(object): req.path_info_pop() if req.method == 'GET': if not account and not user: - handler = self.handle_get_accounts + handler = self.handle_get_reseller elif account: if not user: handler = self.handle_get_account @@ -305,6 +305,8 @@ class Swauth(object): elif req.method == 'POST': if account == '.prep': handler = self.handle_prep + elif user == '.services': + handler = self.handle_set_services if not handler: req.response = HTTPBadRequest(request=req) else: @@ -337,17 +339,22 @@ class Swauth(object): (path, resp.status)) return HTTPNoContent(request=req) - def handle_get_accounts(self, req): + def handle_get_reseller(self, req): """ - Handles the GET v2 call for listing the accounts handled by this auth - system. Can only be called by a .reseller_admin. + Handles the GET v2 call for getting general reseller information + (currently just a list of accounts). Can only be called by a + .reseller_admin. - On success, a JSON list of dicts will be returned. Each dict represents - an account and currently only contains the single key `name`. + On success, a JSON dictionary will be returned with a single `accounts` + key whose value is list of dicts. Each dict represents an account and + currently only contains the single key `name`. For example:: + + {"accounts": [{"name": "reseller"}, {"name": "test"}, + {"name": "test2"}]} :param req: The webob.Request to process. - :returns: webob.Response, 2xx on success with a JSON list of the - accounts as explained above. + :returns: webob.Response, 2xx on success with a JSON dictionary as + explained above. """ if not self.is_reseller_admin(req): return HTTPForbidden(request=req) @@ -368,25 +375,43 @@ class Swauth(object): if container['name'][0] != '.': listing.append({'name': container['name']}) marker = sublisting[-1]['name'] - return Response(body=json.dumps(listing)) + return Response(body=json.dumps({'accounts': listing})) def handle_get_account(self, req): """ - Handles the GET v2/ call for listing the users in an account. + Handles the GET v2/ call for getting account information. Can only be called by an account .admin. - On success, a JSON list of dicts will be returned. Each dict represents - a user and currently only contains the single key `name`. + On success, a JSON dictionary will be returned containing the keys + `account_id`, `services`, and `users`. The `account_id` is the value + used when creating service accounts. The `services` value is a dict as + described in the :func:`handle_get_token` call. The `users` value is a + list of dicts, each dict representing a user and currently only + containing the single key `name`. For example:: + + {"account_id": "AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162", + "services": {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162"}}, + "users": [{"name": "tester"}, {"name": "tester3"}]} :param req: The webob.Request to process. - :returns: webob.Response, 2xx on success with a JSON list of the users - in the account as explained above. + :returns: webob.Response, 2xx on success with a JSON dictionary as + explained above. """ account = req.path_info_pop() if req.path_info: return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return HTTPForbidden(request=req) + path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) + if resp.status_int // 100 != 2: + raise Exception('Could not obtain the .services object: %s %s' % + (path, resp.status)) + services = json.loads(resp.body) listing = [] marker = '' while True: @@ -394,6 +419,7 @@ class Swauth(object): (self.auth_account, account)), quote(marker)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) + account_id = resp.headers['X-Container-Meta-Account-Id'] if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: @@ -406,7 +432,38 @@ class Swauth(object): if obj['name'][0] != '.': listing.append({'name': obj['name']}) marker = sublisting[-1]['name'] - return Response(body=json.dumps(listing)) + return Response(body=json.dumps({'account_id': account_id, + 'services': services, 'users': listing})) + + def handle_set_services(self, req): + if not self.is_reseller_admin(req): + return HTTPForbidden(request=req) + account = req.path_info_pop() + if req.path_info != '/.services' or not account.isalnum(): + return HTTPBadRequest(request=req) + new_services = json.loads(req.body) + # Get the current services information + path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) + if resp.status_int // 100 != 2: + raise Exception('Could not obtain services info: %s %s' % + (path, resp.status)) + services = json.loads(resp.body) + for new_service, value in new_services.iteritems(): + if new_service in services: + services[new_service].update(value) + else: + services[new_service] = value + # Save the new services information + resp = self.make_request(req.environ, 'PUT', path, + json.dumps(services)).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not save .services object: %s %s' % + (path, resp.status)) + return HTTPNoContent(request=req) def handle_put_account(self, req): """ @@ -467,15 +524,15 @@ class Swauth(object): raise Exception('Could not create account id mapping: %s %s' % (path, resp.status)) # Record the cluster url(s) for the account - path = quote('/v1/%s/%s/.clusters' % (self.auth_account, account)) - clusters = {'storage': {}} - clusters['storage'][self.dsc_name] = '%s/%s%s' % (self.dsc_url, + path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) + services = {'storage': {}} + services['storage'][self.dsc_name] = '%s/%s%s' % (self.dsc_url, self.reseller_prefix, account_suffix) - clusters['storage']['default'] = self.dsc_name + services['storage']['default'] = self.dsc_name resp = self.make_request(req.environ, 'PUT', path, - json.dumps(clusters)).get_response(self.app) + json.dumps(services)).get_response(self.app) if resp.status_int // 100 != 2: - raise Exception('Could not create .clusters object: %s %s' % + raise Exception('Could not create .services object: %s %s' % (path, resp.status)) # Record the mapping from account name to the account id path = quote('/v1/%s/%s' % (self.auth_account, account)) @@ -519,16 +576,16 @@ class Swauth(object): if obj['name'][0] != '.': return HTTPConflict(request=req) marker = sublisting[-1]['name'] - # Obtain the listing of clusters the account is on. - path = quote('/v1/%s/%s/.clusters' % (self.auth_account, account)) + # Obtain the listing of services the account is on. + path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: return HTTPNoContent(request=req) elif resp.status_int // 100 == 2: - clusters = json.loads(resp.body) + services = json.loads(resp.body) # Delete the account on each cluster it is on. - for name, url in clusters['storage'].iteritems(): + for name, url in services['storage'].iteritems(): if name != 'default': parsed = urlparse(url) if parsed.scheme == 'http': @@ -543,8 +600,8 @@ class Swauth(object): raise Exception('Could not delete account on the ' 'Swift cluster: %s %s %s' % (url, resp.status, resp.reason)) - # Delete the .clusters object itself. - path = quote('/v1/%s/%s/.clusters' % + # Delete the .services object itself. + path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_request(req.environ, 'DELETE', path).get_response(self.app) @@ -577,31 +634,43 @@ class Swauth(object): def handle_get_user(self, req): """ - Handles the GET v2// call for retrieving the user's JSON - dict. Can only be called by an account .admin. + Handles the GET v2// call for getting user information. + Can only be called by an account .admin. On success, a JSON dict will be returned as described:: - {"groups": [ # List of groups the user is a member of - ":", # The first group is a unique user identifier - "", # The second group is the auth account name - ""... - # There may be additional groups, .admin being a special group - # indicating an account admin and .reseller_admin indicating a - # reseller admin. + {"groups": [ # List of groups the user is a member of + {"name": ":"}, + # The first group is a unique user identifier + {"name": ""}, + # The second group is the auth account name + {"name": ""} + # There may be additional groups, .admin being a special + # group indicating an account admin and .reseller_admin + # indicating a reseller admin. ], "auth": "plaintext:" # The auth-type and key for the user; currently only plaintext is # implemented. } - If the in the request is the special user `.groups`, a JSON list - of dicts will be returned instead, each dict representing a group in - the account currently with just the single key `name`. + For example:: + + {"groups": [{"name": "test:tester"}, {"name": "test"}, + {"name": ".admin"}], + "auth": "plaintext:testing"} + + If the in the request is the special user `.groups`, the JSON + dict will contain a single key of `groups` whose value is a list of + dicts representing the active groups within the account. Each dict + currently has the single key `name`. For example:: + + {"groups": [{"name": ".admin"}, {"name": "test"}, + {"name": "test:tester"}, {"name": "test:tester3"}]} :param req: The webob.Request to process. - :returns: webob.Response, 2xx on success with data set as explained - above. + :returns: webob.Response, 2xx on success with a JSON dictionary as + explained above. """ account = req.path_info_pop() user = req.path_info_pop() @@ -641,9 +710,11 @@ class Swauth(object): if resp.status_int // 100 != 2: raise Exception('Could not retrieve user object: ' '%s %s' % (path, resp.status)) - groups.update(json.loads(resp.body)['groups']) + groups.update(g['name'] + for g in json.loads(resp.body)['groups']) marker = sublisting[-1]['name'] - body = json.dumps(list({'name': g} for g in sorted(groups))) + body = json.dumps({'groups': + [{'name': g} for g in sorted(groups)]}) else: path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_request(req.environ, 'GET', @@ -695,7 +766,8 @@ class Swauth(object): if reseller_admin: groups.append('.reseller_admin') resp = self.make_request(req.environ, 'PUT', path, json.dumps({'auth': - 'plaintext:%s' % key, 'groups': groups})).get_response(self.app) + 'plaintext:%s' % key, + 'groups': [{'name': g} for g in groups]})).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not create user object: %s %s' % (path, resp.status)) @@ -765,7 +837,7 @@ class Swauth(object): X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. - The response body will be set to the account's clusters JSON object as + The response body will be set to the account's services JSON object as described here:: {"storage": { # Represents the Swift storage service end points @@ -878,12 +950,12 @@ class Swauth(object): if resp.status_int // 100 != 2: raise Exception('Could not save new token: %s %s' % (path, resp.status)) - # Get the cluster url information - path = quote('/v1/%s/%s/.clusters' % (self.auth_account, account)) + # Get the services information + path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: - raise Exception('Could not obtain clusters info: %s %s' % + raise Exception('Could not obtain services info: %s %s' % (path, resp.status)) detail = json.loads(resp.body) url = detail['storage'][detail['storage']['default']] @@ -978,7 +1050,7 @@ class Swauth(object): def get_itoken(self, env): """ Returns the current internal token to use for the auth system's own - actions with other Swift clusters. Each process will create its own + actions with other services. Each process will create its own itoken and the token will be deleted and recreated based on the token_life configuration value. The itoken information is stored in memcache because the auth process that is asked by Swift to validate diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 20075e4ea2..55b8bc39c6 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -177,7 +177,8 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() + 60})), ('204 No Content', {}, '')])) local_auth = auth.filter_factory({'super_admin_key': 'supertest', @@ -219,7 +220,8 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() + 60})), ('204 No Content', {}, '')])) resp = Request.blank('/v1/AUTH_cfa', @@ -233,13 +235,15 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() + 60})), ('204 No Content', {}, ''), ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() + 60})), ('204 No Content', {}, '')])) resp = Request.blank('/v1/AUTH_cfa', @@ -254,7 +258,8 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() + 60})), ('204 No Content', {}, ''), # Don't need a second token object returned if memcache is used @@ -276,7 +281,8 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() - 1}))])) resp = Request.blank('/v1/AUTH_cfa', headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) @@ -287,7 +293,8 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', - 'groups': ['act:usr', 'act', '.admin'], + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}], 'expires': time() + 60})), ('204 No Content', {}, '')])) resp = Request.blank('/v1/AUTH_cfa', @@ -421,14 +428,15 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token ('201 Created', {}, ''), # POST of token to user object ('204 No Content', {}, ''), - # GET of clusters object + # GET of services object ('200 Ok', {}, json.dumps({"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) resp = Request.blank('/auth/v1.0', @@ -472,7 +480,8 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('503 Service Unavailable', {}, '')])) resp = Request.blank('/auth/v1.0', @@ -485,7 +494,8 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token @@ -500,7 +510,8 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token @@ -512,19 +523,20 @@ class TestAuth(unittest.TestCase): 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) - def test_get_token_fail_get_clusters(self): + def test_get_token_fail_get_services(self): self.test_auth.app = FakeApp(iter([ # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token ('201 Created', {}, ''), # POST of token to user object ('204 No Content', {}, ''), - # GET of clusters object + # GET of services object ('503 Service Unavailable', {}, '')])) resp = Request.blank('/auth/v1.0', headers={'X-Auth-User': 'act:usr', @@ -536,7 +548,8 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of token ('503 Service Unavailable', {}, '')])) resp = Request.blank('/auth/v1.0', @@ -549,14 +562,15 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token ('201 Created', {}, ''), # POST of token to user object ('204 No Content', {}, ''), - # GET of clusters object + # GET of services object ('200 Ok', {}, json.dumps({"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) resp = Request.blank('/auth/v1.0', @@ -578,14 +592,15 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token ('201 Created', {}, ''), # POST of token to user object ('204 No Content', {}, ''), - # GET of clusters object + # GET of services object ('200 Ok', {}, json.dumps({"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) resp = Request.blank('/auth/v1/act/auth', @@ -607,14 +622,15 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token ('201 Created', {}, ''), # POST of token to user object ('204 No Content', {}, ''), - # GET of clusters object + # GET of services object ('200 Ok', {}, json.dumps({"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) resp = Request.blank('/auth/v1.0', @@ -636,14 +652,15 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of account ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of new token ('201 Created', {}, ''), # POST of token to user object ('204 No Content', {}, ''), - # GET of clusters object + # GET of services object ('200 Ok', {}, json.dumps({"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) resp = Request.blank('/auth/v1/act/auth', @@ -665,12 +682,14 @@ class TestAuth(unittest.TestCase): # GET of user object ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, json.dumps({"auth": "plaintext:key", - "groups": ["act:usr", "act", ".admin"]})), + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), # GET of token ('200 Ok', {}, json.dumps({"account": "act", "user": "usr", - "account_id": "AUTH_cfa", "groups": ["act:usr", "key", ".admin"], + "account_id": "AUTH_cfa", "groups": [{'name': "act:usr"}, + {'name': "key"}, {'name': ".admin"}], "expires": 9999999999.9999999})), - # GET of clusters object + # GET of services object ('200 Ok', {}, json.dumps({"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) resp = Request.blank('/auth/v1.0', From 4d8c4576cb36641f7e2c1416e8f014eb12118b6d Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 1 Dec 2010 21:29:07 -0800 Subject: [PATCH 011/199] Added forgotten swauth-set-account-service --- bin/swauth-set-account-service | 70 ++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100755 bin/swauth-set-account-service diff --git a/bin/swauth-set-account-service b/bin/swauth-set-account-service new file mode 100755 index 0000000000..57ca530d38 --- /dev/null +++ b/bin/swauth-set-account-service @@ -0,0 +1,70 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import simplejson as json +except ImportError: + import json +from optparse import OptionParser +from os.path import basename +from sys import argv, exit +from urlparse import urlparse + +from swift.common.bufferedhttp import http_connect_raw as http_connect + + +if __name__ == '__main__': + parser = OptionParser(usage=''' +Usage: %prog [options] + +Sets a service URL for an account. Can only be set by a reseller admin. + +Example: %prog -K swauthkey test storage local http://127.0.0.1:8080/v1/AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162 +'''.strip()) + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/)') + parser.add_option('-U', '--admin-user', dest='admin_user', + default='.super_admin', help='The user with admin rights to add users ' + '(default: .super_admin).') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for the user with admin rights to add users.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) != 4: + parser.parse_args(['-h']) + account, service, name, url = args + parsed = urlparse(options.admin_url) + if parsed.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (parsed.scheme, repr(options.admin_url))) + if not parsed.path: + parsed.path = '/' + elif parsed.path[-1] != '/': + parsed.path += '/' + path = '%sv2/%s/.services' % (parsed.path, account) + body = json.dumps({service: {name: url}}) + headers = {'Content-Length': str(len(body)), + 'X-Auth-Admin-User': options.admin_user, + 'X-Auth-Admin-Key': options.admin_key} + conn = http_connect(parsed.hostname, parsed.port, 'POST', path, headers, + ssl=(parsed.scheme == 'https')) + conn.send(body) + resp = conn.getresponse() + if resp.status // 100 != 2: + print 'Service set failed: %s %s' % (resp.status, resp.reason) From 442e3c8a1a92eb3da140fc39f78b4ff86c50d11a Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Thu, 2 Dec 2010 13:37:49 +0000 Subject: [PATCH 012/199] use logging formatter to standardize transaction id logging --- swift/account/server.py | 1 + swift/common/utils.py | 48 +++++++++++++++++++++++++++------------ swift/container/server.py | 1 + swift/obj/server.py | 1 + swift/proxy/server.py | 1 + 5 files changed, 37 insertions(+), 15 deletions(-) diff --git a/swift/account/server.py b/swift/account/server.py index 67a67c4854..5bd7bba517 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -297,6 +297,7 @@ class AccountController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: diff --git a/swift/common/utils.py b/swift/common/utils.py index 1c48c61339..d4dc078916 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -284,23 +284,24 @@ class LoggerFileObject(object): return self -class NamedLogger(object): +class LogAdapter(object): """Cheesy version of the LoggerAdapter available in Python 3""" - def __init__(self, logger, server): + def __init__(self, logger): self.logger = logger - self.server = server - for proxied_method in ('debug', 'info', 'log', 'warn', 'warning', - 'error', 'critical'): - setattr(self, proxied_method, - self._proxy(getattr(logger, proxied_method))) + self._txn_id = threading.local() + for proxied_method in ('debug', 'log', 'warn', 'warning', 'error', + 'critical', 'info'): + setattr(self, proxied_method, getattr(logger, proxied_method)) - def _proxy(self, logger_meth): + @property + def txn_id(self): + if hasattr(self._txn_id, 'value'): + return self._txn_id.value - def _inner_proxy(msg, *args, **kwargs): - msg = '%s %s' % (self.server, msg) - logger_meth(msg, *args, **kwargs) - return _inner_proxy + @txn_id.setter + def txn_id(self, value): + self._txn_id.value = value def getEffectiveLevel(self): return self.logger.getEffectiveLevel() @@ -330,7 +331,21 @@ class NamedLogger(object): emsg += ' %s' % exc.msg else: call = self.logger.exception - call('%s %s: %s' % (self.server, msg, emsg), *args) + call('%s: %s' % (msg, emsg), *args) + + +class NamedFormatter(logging.Formatter): + def __init__(self, server, logger): + logging.Formatter.__init__(self) + self.server = server + self.logger = logger + + def format(self, record): + msg = logging.Formatter.format(self, record) + if record.levelno != logging.INFO and self.logger.txn_id: + return '%s %s (txn: %s)' % (self.server, msg, self.logger.txn_id) + else: + return '%s %s' % (self.server, msg) def get_logger(conf, name=None, log_to_console=False): @@ -359,7 +374,8 @@ def get_logger(conf, name=None, log_to_console=False): root_logger.addHandler(get_logger.console) if conf is None: root_logger.setLevel(logging.INFO) - return NamedLogger(root_logger, name) + adapted_logger = LogAdapter(root_logger) + return adapted_logger if name is None: name = conf.get('log_name', 'swift') get_logger.handler = SysLogHandler(address='/dev/log', @@ -369,7 +385,9 @@ def get_logger(conf, name=None, log_to_console=False): root_logger.addHandler(get_logger.handler) root_logger.setLevel( getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO)) - return NamedLogger(root_logger, name) + adapted_logger = LogAdapter(root_logger) + get_logger.handler.setFormatter(NamedFormatter(name, adapted_logger)) + return adapted_logger def drop_privileges(user): diff --git a/swift/container/server.py b/swift/container/server.py index 45ed00fd4d..82e222435a 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -384,6 +384,7 @@ class ContainerController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: diff --git a/swift/obj/server.py b/swift/obj/server.py index 632a0c04cc..cdddf72edf 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -555,6 +555,7 @@ class ObjectController(object): """WSGI Application entry point for the Swift Object Server.""" start_time = time.time() req = Request(env) + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: diff --git a/swift/proxy/server.py b/swift/proxy/server.py index e48052a398..b9429ef875 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1410,6 +1410,7 @@ class BaseApplication(object): controller = controller(self, **path_parts) controller.trans_id = req.headers.get('x-cf-trans-id', '-') + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) try: handler = getattr(controller, req.method) if not getattr(handler, 'publicly_accessible'): From 10a8fc94994a74976b8e859b5b0b6b061257fe72 Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 2 Dec 2010 14:21:25 -0800 Subject: [PATCH 013/199] Expired token cleanup --- swift/common/middleware/swauth.py | 6 ++++++ test/unit/common/middleware/test_swauth.py | 6 +++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 92ee1b7455..c50cb578ee 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -180,6 +180,7 @@ class Swauth(object): return None detail = json.loads(resp.body) if detail['expires'] < time(): + self.make_request(env, 'DELETE', path).get_response(self.app) return None groups = [g['name'] for g in detail['groups']] if '.admin' in groups: @@ -916,6 +917,9 @@ class Swauth(object): token_detail = json.loads(resp.body) if token_detail['expires'] > time(): token = candidate_token + else: + self.make_request(req.environ, 'DELETE', + path).get_response(self.app) elif resp.status_int != 404: raise Exception('Could not detect whether a token already ' 'exists: %s %s' % (path, resp.status)) @@ -1003,6 +1007,8 @@ class Swauth(object): detail = json.loads(resp.body) expires = detail['expires'] if expires < time(): + self.make_request(req.environ, 'DELETE', + path).get_response(self.app) return HTTPUnauthorized(request=req) groups = detail['groups'] if '.admin' in groups: diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 55b8bc39c6..53adebb19a 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -278,15 +278,19 @@ class TestAuth(unittest.TestCase): def test_auth_just_expired(self): self.test_auth.app = FakeApp(iter([ + # Request for token (which will have expired) ('200 Ok', {}, json.dumps({'account': 'act', 'user': 'act:usr', 'account_id': 'AUTH_cfa', 'groups': [{'name': 'act:usr'}, {'name': 'act'}, {'name': '.admin'}], - 'expires': time() - 1}))])) + 'expires': time() - 1})), + # Request to delete token + ('204 No Content', {}, '')])) resp = Request.blank('/v1/AUTH_cfa', headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 401) + self.assertEquals(self.test_auth.app.calls, 2) def test_middleware_storage_token(self): self.test_auth.app = FakeApp(iter([ From 7d8ff50f43b792f1576c8de1677e3b6be527b538 Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 2 Dec 2010 19:37:58 -0800 Subject: [PATCH 014/199] SegmentIterable: logs exceptions just once; 503s on exception; fix except syntax; make sure self.response is always *something* --- swift/proxy/server.py | 52 ++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index a0c71bdeb9..2d925fbc02 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -103,17 +103,21 @@ class SegmentedIterable(object): """ Iterable that returns the object contents for a segmented object in Swift. - In addition to these params, you can also set the `response` attr just - after creating the SegmentedIterable and it will update the response's - `bytes_transferred` value (used to log the size of the request). + If set, the response's `bytes_transferred` value will be updated (used to + log the size of the request). Also, if there's a failure that cuts the + transfer short, the response's `status_int` will be updated (again, just + for logging since the original status would have already been sent to the + client). :param controller: The ObjectController instance to work with. :param container: The container the object segments are within. :param listing: The listing of object segments to iterate over; this is a standard JSON decoded container listing. + :param response: The webob.Response this iterable is associated with, if + any (default: None) """ - def __init__(self, controller, container, listing): + def __init__(self, controller, container, listing, response=None): self.controller = controller self.container = container self.listing = listing @@ -121,7 +125,9 @@ class SegmentedIterable(object): self.seek = 0 self.segment_iter = None self.position = 0 - self.response = None + self.response = response + if not self.response: + self.response = Response() def _load_next_segment(self): """ @@ -150,12 +156,16 @@ class SegmentedIterable(object): raise Exception('Could not load object segment %s: %s' % (path, resp.status_int)) self.segment_iter = resp.app_iter + except StopIteration: + raise except Exception, err: - if not isinstance(err, StopIteration): + if not getattr(err, 'swift_logged', False): self.controller.app.logger.exception('ERROR: While processing ' 'manifest /%s/%s/%s %s' % (self.controller.account_name, self.controller.container_name, self.controller.object_name, self.controller.trans_id)) + err.swift_logged = True + self.response.status_int = 503 raise def __iter__(self): @@ -172,16 +182,19 @@ class SegmentedIterable(object): except StopIteration: self._load_next_segment() self.position += len(chunk) - if self.response: - self.response.bytes_transferred = getattr(self.response, - 'bytes_transferred', 0) + len(chunk) + self.response.bytes_transferred = getattr(self.response, + 'bytes_transferred', 0) + len(chunk) yield chunk + except StopIteration: + raise except Exception, err: - if not isinstance(err, StopIteration): + if not getattr(err, 'swift_logged', False): self.controller.app.logger.exception('ERROR: While processing ' 'manifest /%s/%s/%s %s' % (self.controller.account_name, self.controller.container_name, self.controller.object_name, self.controller.trans_id)) + err.swift_logged = True + self.response.status_int = 503 raise def app_iter_range(self, start, stop): @@ -215,19 +228,22 @@ class SegmentedIterable(object): length -= len(chunk) if length < 0: # Chop off the extra: - if self.response: - self.response.bytes_transferred = \ - getattr(self.response, 'bytes_transferred', 0) \ - + length + self.response.bytes_transferred = \ + getattr(self.response, 'bytes_transferred', 0) \ + + length yield chunk[:length] break yield chunk + except StopIteration: + raise except Exception, err: - if not isinstance(err, StopIteration): + if not getattr(err, 'swift_logged', False): self.controller.app.logger.exception('ERROR: While processing ' 'manifest /%s/%s/%s %s' % (self.controller.account_name, self.controller.container_name, self.controller.object_name, self.controller.trans_id)) + err.swift_logged = True + self.response.status_int = 503 raise @@ -713,10 +729,10 @@ class ObjectController(Controller): for key, value in resp.headers.iteritems(): if key.lower().startswith('x-object-meta-'): headers[key] = value - resp = Response(app_iter=SegmentedIterable(self, lcontainer, - listing), headers=headers, request=req, + resp = Response(headers=headers, request=req, conditional_response=True) - resp.app_iter.response = resp + resp.app_iter = SegmentedIterable(self, lcontainer, listing, resp) + resp.content_length = content_length return resp @public From 36cf8c4b8575b6d8f7af57281b2ce683ccd02165 Mon Sep 17 00:00:00 2001 From: gholt Date: Fri, 3 Dec 2010 10:22:57 -0800 Subject: [PATCH 015/199] swauth: another batch of tests and bugfixes found while testing --- swift/common/middleware/swauth.py | 19 +- test/unit/common/middleware/test_swauth.py | 378 +++++++++++++++++++++ 2 files changed, 389 insertions(+), 8 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index c50cb578ee..9bf39399c7 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -72,7 +72,7 @@ class Swauth(object): msg = 'No super_admin_key set in conf file! Exiting.' try: self.logger.critical(msg) - except: + except Exception: pass raise ValueError(msg) self.token_life = int(conf.get('token_life', 86400)) @@ -400,7 +400,7 @@ class Swauth(object): explained above. """ account = req.path_info_pop() - if req.path_info: + if req.path_info or not account.isalnum(): return HTTPBadRequest(request=req) if not self.is_account_admin(req, account): return HTTPForbidden(request=req) @@ -420,12 +420,12 @@ class Swauth(object): (self.auth_account, account)), quote(marker)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) - account_id = resp.headers['X-Container-Meta-Account-Id'] if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) + account_id = resp.headers['X-Container-Meta-Account-Id'] sublisting = json.loads(resp.body) if not sublisting: break @@ -1087,7 +1087,10 @@ class Swauth(object): req.headers.get('x-auth-admin-user').split(':', 1) path = quote('/v1/%s/%s/%s' % (self.auth_account, admin_account, admin_user)) - resp = self.make_request(req.env, 'GET', path).get_response(self.app) + resp = self.make_request(req.environ, 'GET', + path).get_response(self.app) + if resp.status_int == 404: + return None if resp.status_int // 100 != 2: raise Exception('Could not get admin user object: %s %s' % (path, resp.status)) @@ -1104,7 +1107,7 @@ class Swauth(object): :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not. """ - return user_detail.get('auth') == 'plaintext:%s' % key + return user_detail and user_detail.get('auth') == 'plaintext:%s' % key def is_super_admin(self, req): """ @@ -1135,7 +1138,7 @@ class Swauth(object): if not self.credentials_match(admin_detail, req.headers.get('x-auth-admin-key')): return False - return '.reseller_admin' in admin_detail['groups'] + return '.reseller_admin' in (g['name'] for g in admin_detail['groups']) def is_account_admin(self, req, account): """ @@ -1151,8 +1154,8 @@ class Swauth(object): admin_detail = self.get_admin_detail(req) if self.is_reseller_admin(req, admin_detail=admin_detail): return True - return admin_detail['account'] == account and \ - '.admin' in admin_detail['groups'] + return admin_detail and admin_detail['account'] == account and \ + '.admin' in (g['name'] for g in admin_detail['groups']) def posthooklogger(self, env, req): response = getattr(req, 'response', None) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 53adebb19a..73d26a1ac3 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -139,6 +139,16 @@ class TestAuth(unittest.TestCase): self.assertEquals(ath.default_swift_cluster, 'local:http://host/path') + def test_top_level_ignore(self): + resp = Request.blank('/').get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + + def test_anon(self): + resp = Request.blank('/v1/AUTH_account').get_response(self.test_auth) + self.assertEquals(resp.status_int, 401) + self.assertEquals(resp.environ['swift.authorize'], + self.test_auth.authorize) + def test_auth_deny_non_reseller_prefix(self): resp = Request.blank('/v1/BLAH_account', headers={'X-Auth-Token': 'BLAH_t'}).get_response(self.test_auth) @@ -709,6 +719,374 @@ class TestAuth(unittest.TestCase): {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + def test_prep_success(self): + self.test_auth.app = FakeApp(iter([ + # PUT of .auth account + ('201 Created', {}, ''), + # PUT of .token container + ('201 Created', {}, ''), + # PUT of .account_id container + ('201 Created', {}, '')])) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + + def test_prep_bad_method(self): + resp = Request.blank('/auth/v2/.prep', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'HEAD'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_prep_bad_creds(self): + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': 'super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'upertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + def test_prep_fail_account_create(self): + self.test_auth.app = FakeApp(iter([ + # PUT of .auth account + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_prep_fail_token_container_create(self): + self.test_auth.app = FakeApp(iter([ + # PUT of .auth account + ('201 Created', {}, ''), + # PUT of .token container + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_prep_fail_account_id_container_create(self): + self.test_auth.app = FakeApp(iter([ + # PUT of .auth account + ('201 Created', {}, ''), + # PUT of .token container + ('201 Created', {}, ''), + # PUT of .account_id container + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/.prep', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_reseller_success(self): + self.test_auth.app = FakeApp(iter([ + # GET of .auth account (list containers) + ('200 Ok', {}, json.dumps([ + {"name": ".token", "count": 0, "bytes": 0}, + {"name": ".account_id", "count": 0, "bytes": 0}, + {"name": "act", "count": 0, "bytes": 0}])), + # GET of .auth account (list containers continuation) + ('200 Ok', {}, '[]')])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {"accounts": [{"name": "act"}]}) + + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}, + {"name": ".reseller_admin"}], "auth": "plaintext:key"})), + # GET of .auth account (list containers) + ('200 Ok', {}, json.dumps([ + {"name": ".token", "count": 0, "bytes": 0}, + {"name": ".account_id", "count": 0, "bytes": 0}, + {"name": "act", "count": 0, "bytes": 0}])), + # GET of .auth account (list containers continuation) + ('200 Ok', {}, '[]')])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {"accounts": [{"name": "act"}]}) + + def test_get_reseller_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': 'super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but not reseller admin) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + def test_get_reseller_fail_listing(self): + self.test_auth.app = FakeApp(iter([ + # GET of .auth account (list containers) + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + self.test_auth.app = FakeApp(iter([ + # GET of .auth account (list containers) + ('200 Ok', {}, json.dumps([ + {"name": ".token", "count": 0, "bytes": 0}, + {"name": ".account_id", "count": 0, "bytes": 0}, + {"name": "act", "count": 0, "bytes": 0}])), + # GET of .auth account (list containers continuation) + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_get_account_success(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # GET of account container (list objects) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}, + {"name": "tester3", "hash": "etag", "bytes": 86, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:28.135530"}])), + # GET of account container (list objects continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {'account_id': 'AUTH_cfa', + 'services': {'storage': + {'default': 'local', + 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, + 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) + + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"})), + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # GET of account container (list objects) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}, + {"name": "tester3", "hash": "etag", "bytes": 86, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:28.135530"}])), + # GET of account container (list objects continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {'account_id': 'AUTH_cfa', + 'services': {'storage': + {'default': 'local', + 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, + 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) + + def test_get_account_fail_bad_account_name(self): + resp = Request.blank('/auth/v2/.token', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + resp = Request.blank('/auth/v2/.anything', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_get_account_fail_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': 'super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but wrong account) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': 'act2:adm', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + + def test_get_account_fail_get_services(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + + def test_get_account_fail_listing(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # GET of account container (list objects) + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # GET of account container (list objects) + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # GET of account container (list objects) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}, + {"name": "tester3", "hash": "etag", "bytes": 86, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:28.135530"}])), + # GET of account container (list objects continuation) + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + if __name__ == '__main__': unittest.main() From 5ff95b2dcf8c34f8860e620aa7ac2933a22b66c5 Mon Sep 17 00:00:00 2001 From: gholt Date: Fri, 3 Dec 2010 15:24:16 -0800 Subject: [PATCH 016/199] swauth: another batch of tests and bugfixes found while testing --- swift/common/middleware/swauth.py | 163 +- test/unit/common/middleware/test_swauth.py | 1694 +++++++++++++++++++- 2 files changed, 1788 insertions(+), 69 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 9bf39399c7..c6f0859cd4 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -437,12 +437,51 @@ class Swauth(object): 'services': services, 'users': listing})) def handle_set_services(self, req): + """ + Handles the POST v2//.services call for setting services + information. Can only be called by a reseller .admin. + + In the :func:`handle_get_account` (GET v2/) call, a section of + the returned JSON dict is `services`. This section looks something like + this:: + + "services": {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162"}} + + Making use of this section is described in :func:`handle_get_token`. + + This function allows setting values within this section for the + , allowing the addition of new service end points or updating + existing ones. + + The body of the POST request should contain a JSON dict with the + following format:: + + {"service_name": {"end_point_name": "end_point_value"}} + + There can be multiple services and multiple end points in the same + call. + + Any new services or end points will be added to the existing set of + services and end points. Any existing services with the same service + name will be merged with the new end points. Any existing end points + with the same end point name will have their values updated. + + The updated services dictionary will be returned on success. + + :param req: The webob.Request to process. + :returns: webob.Response, 2xx on success with the udpated services JSON + dict as described above + """ if not self.is_reseller_admin(req): return HTTPForbidden(request=req) account = req.path_info_pop() if req.path_info != '/.services' or not account.isalnum(): return HTTPBadRequest(request=req) - new_services = json.loads(req.body) + try: + new_services = json.loads(req.body) + except ValueError, err: + return HTTPBadRequest(body=str(err)) # Get the current services information path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_request(req.environ, 'GET', @@ -459,12 +498,13 @@ class Swauth(object): else: services[new_service] = value # Save the new services information + services = json.dumps(services) resp = self.make_request(req.environ, 'PUT', path, - json.dumps(services)).get_response(self.app) + services).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not save .services object: %s %s' % (path, resp.status)) - return HTTPNoContent(request=req) + return Response(request=req, body=services) def handle_put_account(self, req): """ @@ -558,7 +598,7 @@ class Swauth(object): account = req.path_info_pop() if req.path_info or not account.isalnum(): return HTTPBadRequest(request=req) - # Make sure the account has no users. + # Make sure the account has no users and get the account_id marker = '' while True: path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' % @@ -566,10 +606,11 @@ class Swauth(object): resp = self.make_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int == 404: - break + return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not list in main auth account: %s %s' % (path, resp.status)) + account_id = resp.headers['x-container-meta-account-id'] sublisting = json.loads(resp.body) if not sublisting: break @@ -581,57 +622,57 @@ class Swauth(object): path = quote('/v1/%s/%s/.services' % (self.auth_account, account)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNoContent(request=req) - elif resp.status_int // 100 == 2: + if resp.status_int // 100 != 2 and resp.status_int != 404: + raise Exception('Could not obtain .services object: %s %s' % + (path, resp.status)) + if resp.status_int // 100 == 2: services = json.loads(resp.body) # Delete the account on each cluster it is on. + deleted_any = False for name, url in services['storage'].iteritems(): if name != 'default': parsed = urlparse(url) - if parsed.scheme == 'http': - conn = HTTPConnection(parsed.netloc) - else: - conn = HTTPSConnection(parsed.netloc) + conn = self.get_conn(parsed) conn.request('DELETE', parsed.path, headers={'X-Auth-Token': self.get_itoken(req.environ)}) resp = conn.getresponse() resp.read() - if resp.status // 100 != 2: + if resp.status == 409: + if deleted_any: + raise Exception('Managed to delete one or more ' + 'service end points, but failed with: ' + '%s %s %s' % (url, resp.status, resp.reason)) + else: + return HTTPConflict(request=req) + if resp.status // 100 != 2 and resp.status != 404: raise Exception('Could not delete account on the ' 'Swift cluster: %s %s %s' % (url, resp.status, resp.reason)) - # Delete the .services object itself. - path = quote('/v1/%s/%s/.services' % - (self.auth_account, account)) - resp = self.make_request(req.environ, 'DELETE', - path).get_response(self.app) - # Obtain the account id mapping for the account. - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_request(req.environ, 'HEAD', - path).get_response(self.app) - if resp.status_int == 404: - return HTTPNoContent(request=req) - elif 'x-container-meta-account-id' in resp.headers: - account_id = resp.headers['x-container-meta-account-id'] - # Delete the account id mapping for the account. - path = quote('/v1/%s/.account_id/%s' % - (self.auth_account, account_id)) - resp = self.make_request(req.environ, 'DELETE', - path).get_response(self.app) - if resp.status_int // 100 != 2: - self.logger.error('Could not delete account id ' - 'mapping: %s %s' % (path, resp.status)) - # Delete the account marker itself. - path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_request(req.environ, 'DELETE', - path).get_response(self.app) - if resp.status_int // 100 != 2: - self.logger.error('Could not delete account marked: ' - '%s %s' % (path, resp.status)) - else: - raise Exception('Could not verify account within main auth ' - 'account: %s %s' % (path, resp.status)) + deleted_any = True + # Delete the .services object itself. + path = quote('/v1/%s/%s/.services' % + (self.auth_account, account)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2 and resp.status_int != 404: + raise Exception('Could not delete .services object: %s %s' % + (path, resp.status)) + # Delete the account id mapping for the account. + path = quote('/v1/%s/.account_id/%s' % + (self.auth_account, account_id)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2 and resp.status_int != 404: + raise Exception('Could not delete account id mapping: %s %s' % + (path, resp.status)) + # Delete the account marker itself. + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'DELETE', + path).get_response(self.app) + if resp.status_int // 100 != 2 and resp.status_int != 404: + raise Exception('Could not delete account marked: %s %s' % + (path, resp.status)) + return HTTPNoContent(request=req) def handle_get_user(self, req): """ @@ -706,8 +747,6 @@ class Swauth(object): account, obj['name'])) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) - if resp.status_int == 404: - return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not retrieve user object: ' '%s %s' % (path, resp.status)) @@ -720,6 +759,8 @@ class Swauth(object): path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not retrieve user object: %s %s' % (path, resp.status)) @@ -769,6 +810,8 @@ class Swauth(object): resp = self.make_request(req.environ, 'PUT', path, json.dumps({'auth': 'plaintext:%s' % key, 'groups': [{'name': g} for g in groups]})).get_response(self.app) + if resp.status_int == 404: + return HTTPNotFound(request=req) if resp.status_int // 100 != 2: raise Exception('Could not create user object: %s %s' % (path, resp.status)) @@ -1043,15 +1086,20 @@ class Swauth(object): else: return Request.blank(path, environ=newenv, headers=headers) - def get_conn(self, url=None): + def get_conn(self, urlparsed=None): """ - Returns an HTTPConnection based on the given `url` or the default Swift - cluster URL's scheme. + Returns an HTTPConnection based on the urlparse result given or the + default Swift cluster urlparse result. + + :param urlparsed: The result from urlparse.urlparse or None to use the + default Swift cluster's value """ - if self.dsc_parsed.scheme == 'http': - return HTTPConnection(self.dsc_parsed.netloc) + if not urlparsed: + urlparsed = self.dsc_parsed + if urlparsed.scheme == 'http': + return HTTPConnection(urlparsed.netloc) else: - return HTTPSConnection(self.dsc_parsed.netloc) + return HTTPSConnection(urlparsed.netloc) def get_itoken(self, env): """ @@ -1067,7 +1115,11 @@ class Swauth(object): self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex) memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken) self.itoken_expires = time() + self.token_life - 60 - cache_from_env(env).set(memcache_key, (self.itoken_expires, + memcache_client = cache_from_env(env) + if not memcache_client: + raise Exception( + 'No memcache set up; required for Swauth middleware') + memcache_client.set(memcache_key, (self.itoken_expires, '.auth,.reseller_admin'), timeout=self.token_life) return self.itoken @@ -1081,7 +1133,7 @@ class Swauth(object): :returns: The dict for the admin user with the addition of the `account` key. """ - if ':' not in req.headers.get('x-auth-admin-user'): + if ':' not in req.headers.get('x-auth-admin-user', ''): return None admin_account, admin_user = \ req.headers.get('x-auth-admin-user').split(':', 1) @@ -1152,7 +1204,8 @@ class Swauth(object): if self.is_super_admin(req): return True admin_detail = self.get_admin_detail(req) - if self.is_reseller_admin(req, admin_detail=admin_detail): + if admin_detail and \ + self.is_reseller_admin(req, admin_detail=admin_detail): return True return admin_detail and admin_detail['account'] == account and \ '.admin' in (g['name'] for g in admin_detail['groups']) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 73d26a1ac3..deda98b2a5 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -64,15 +64,39 @@ class FakeApp(object): def __call__(self, env, start_response): self.calls += 1 - req = Request.blank('', environ=env) + self.request = Request.blank('', environ=env) if 'swift.authorize' in env: - resp = env['swift.authorize'](req) + resp = env['swift.authorize'](self.request) if resp: return resp(env, start_response) status, headers, body = self.status_headers_body_iter.next() return Response(status=status, headers=headers, body=body)(env, start_response) +class FakeConn(object): + + def __init__(self, status_headers_body_iter=None): + self.calls = 0 + self.status_headers_body_iter = status_headers_body_iter + if not self.status_headers_body_iter: + self.status_headers_body_iter = iter([('404 Not Found', {}, '')]) + + def request(self, method, path, headers): + self.calls += 1 + self.request_path = path + self.status, self.headers, self.body = \ + self.status_headers_body_iter.next() + self.status, self.reason = self.status.split(' ', 1) + self.status = int(self.status) + + def getresponse(self): + return self + + def read(self): + body = self.body + self.body = '' + return body + class TestAuth(unittest.TestCase): @@ -237,6 +261,7 @@ class TestAuth(unittest.TestCase): resp = Request.blank('/v1/AUTH_cfa', headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 2) def test_auth_memcache(self): # First run our test without memcache, showing we need to return the @@ -262,6 +287,7 @@ class TestAuth(unittest.TestCase): resp = Request.blank('/v1/AUTH_cfa', headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 4) # Now run our test with memcache, showing we no longer need to return # the token contents twice. self.test_auth.app = FakeApp(iter([ @@ -285,6 +311,7 @@ class TestAuth(unittest.TestCase): environ={'swift.cache': fake_memcache} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 3) def test_auth_just_expired(self): self.test_auth.app = FakeApp(iter([ @@ -314,6 +341,7 @@ class TestAuth(unittest.TestCase): resp = Request.blank('/v1/AUTH_cfa', headers={'X-Storage-Token': 'AUTH_t'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 2) def test_authorize_bad_path(self): req = Request.blank('/badpath') @@ -443,20 +471,12 @@ class TestAuth(unittest.TestCase): ('200 Ok', {}, json.dumps({"auth": "plaintext:key", "groups": [{'name': "act:usr"}, {'name': "act"}, - {'name': ".admin"}]})), - # GET of account - ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), - # PUT of new token - ('201 Created', {}, ''), - # POST of token to user object - ('204 No Content', {}, ''), - # GET of services object - ('200 Ok', {}, json.dumps({"storage": {"default": "local", - "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + {'name': ".admin"}]}))])) resp = Request.blank('/auth/v1.0', headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'invalid'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 401) + self.assertEquals(self.test_auth.app.calls, 1) def test_get_token_fail_invalid_x_auth_user_format(self): resp = Request.blank('/auth/v1/act/auth', @@ -488,6 +508,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) def test_get_token_fail_get_account(self): self.test_auth.app = FakeApp(iter([ @@ -502,6 +523,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) def test_get_token_fail_put_new_token(self): self.test_auth.app = FakeApp(iter([ @@ -518,6 +540,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) def test_get_token_fail_post_to_user(self): self.test_auth.app = FakeApp(iter([ @@ -536,6 +559,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 4) def test_get_token_fail_get_services(self): self.test_auth.app = FakeApp(iter([ @@ -556,6 +580,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 5) def test_get_token_fail_get_existing_token(self): self.test_auth.app = FakeApp(iter([ @@ -570,6 +595,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'key'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) def test_get_token_success_v1_0(self): self.test_auth.app = FakeApp(iter([ @@ -600,6 +626,7 @@ class TestAuth(unittest.TestCase): self.assertEquals(json.loads(resp.body), {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 5) def test_get_token_success_v1_act_auth(self): self.test_auth.app = FakeApp(iter([ @@ -630,6 +657,7 @@ class TestAuth(unittest.TestCase): self.assertEquals(json.loads(resp.body), {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 5) def test_get_token_success_storage_instead_of_auth(self): self.test_auth.app = FakeApp(iter([ @@ -660,6 +688,7 @@ class TestAuth(unittest.TestCase): self.assertEquals(json.loads(resp.body), {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 5) def test_get_token_success_v1_act_auth_auth_instead_of_storage(self): self.test_auth.app = FakeApp(iter([ @@ -690,6 +719,7 @@ class TestAuth(unittest.TestCase): self.assertEquals(json.loads(resp.body), {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 5) def test_get_token_success_existing_token(self): self.test_auth.app = FakeApp(iter([ @@ -718,6 +748,81 @@ class TestAuth(unittest.TestCase): self.assertEquals(json.loads(resp.body), {"storage": {"default": "local", "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_get_token_success_existing_token_expired(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, + json.dumps({"auth": "plaintext:key", + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), + # GET of token + ('200 Ok', {}, json.dumps({"account": "act", "user": "usr", + "account_id": "AUTH_cfa", "groups": [{'name': "act:usr"}, + {'name': "key"}, {'name': ".admin"}], + "expires": 0.0})), + # DELETE of expired token + ('204 No Content', {}, ''), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertNotEquals(resp.headers.get('x-auth-token'), 'AUTH_tktest') + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 7) + + def test_get_token_success_existing_token_expired_fail_deleting_old(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tktest'}, + json.dumps({"auth": "plaintext:key", + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]})), + # GET of token + ('200 Ok', {}, json.dumps({"account": "act", "user": "usr", + "account_id": "AUTH_cfa", "groups": [{'name': "act:usr"}, + {'name': "key"}, {'name': ".admin"}], + "expires": 0.0})), + # DELETE of expired token + ('503 Service Unavailable', {}, ''), + # GET of account + ('204 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), + # PUT of new token + ('201 Created', {}, ''), + # POST of token to user object + ('204 No Content', {}, ''), + # GET of services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v1.0', + headers={'X-Auth-User': 'act:usr', + 'X-Auth-Key': 'key'}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertNotEquals(resp.headers.get('x-auth-token'), 'AUTH_tktest') + self.assertEquals(resp.headers.get('x-auth-token'), + resp.headers.get('x-storage-token')) + self.assertEquals(resp.headers.get('x-storage-url'), + 'http://127.0.0.1:8080/v1/AUTH_cfa') + self.assertEquals(json.loads(resp.body), + {"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}) + self.assertEquals(self.test_auth.app.calls, 7) def test_prep_success(self): self.test_auth.app = FakeApp(iter([ @@ -733,6 +838,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 3) def test_prep_bad_method(self): resp = Request.blank('/auth/v2/.prep', @@ -790,6 +896,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) def test_prep_fail_token_container_create(self): self.test_auth.app = FakeApp(iter([ @@ -803,6 +910,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) def test_prep_fail_account_id_container_create(self): self.test_auth.app = FakeApp(iter([ @@ -818,6 +926,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) def test_get_reseller_success(self): self.test_auth.app = FakeApp(iter([ @@ -835,6 +944,7 @@ class TestAuth(unittest.TestCase): self.assertEquals(resp.status_int, 200) self.assertEquals(json.loads(resp.body), {"accounts": [{"name": "act"}]}) + self.assertEquals(self.test_auth.app.calls, 2) self.test_auth.app = FakeApp(iter([ # GET of user object @@ -855,16 +965,18 @@ class TestAuth(unittest.TestCase): self.assertEquals(resp.status_int, 200) self.assertEquals(json.loads(resp.body), {"accounts": [{"name": "act"}]}) + self.assertEquals(self.test_auth.app.calls, 3) def test_get_reseller_fail_bad_creds(self): self.test_auth.app = FakeApp(iter([ # GET of user object ('404 Not Found', {}, '')])) resp = Request.blank('/auth/v2', - headers={'X-Auth-Admin-User': 'super_admin', + headers={'X-Auth-Admin-User': 'super:admin', 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) self.test_auth.app = FakeApp(iter([ # GET of user object (account admin, but not reseller admin) @@ -876,6 +988,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'key'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) self.test_auth.app = FakeApp(iter([ # GET of user object (regular user) @@ -886,6 +999,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'key'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) def test_get_reseller_fail_listing(self): self.test_auth.app = FakeApp(iter([ @@ -896,6 +1010,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) self.test_auth.app = FakeApp(iter([ # GET of .auth account (list containers) @@ -910,6 +1025,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) def test_get_account_success(self): self.test_auth.app = FakeApp(iter([ @@ -941,6 +1057,7 @@ class TestAuth(unittest.TestCase): {'default': 'local', 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) + self.assertEquals(self.test_auth.app.calls, 3) self.test_auth.app = FakeApp(iter([ # GET of user object @@ -975,6 +1092,7 @@ class TestAuth(unittest.TestCase): {'default': 'local', 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}}, 'users': [{'name': 'tester'}, {'name': 'tester3'}]}) + self.assertEquals(self.test_auth.app.calls, 4) def test_get_account_fail_bad_account_name(self): resp = Request.blank('/auth/v2/.token', @@ -993,10 +1111,11 @@ class TestAuth(unittest.TestCase): # GET of user object ('404 Not Found', {}, '')])) resp = Request.blank('/auth/v2/act', - headers={'X-Auth-Admin-User': 'super_admin', + headers={'X-Auth-Admin-User': 'super:admin', 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) self.test_auth.app = FakeApp(iter([ # GET of user object (account admin, but wrong account) @@ -1008,6 +1127,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'key'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) self.test_auth.app = FakeApp(iter([ # GET of user object (regular user) @@ -1018,6 +1138,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'key'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) def test_get_account_fail_get_services(self): self.test_auth.app = FakeApp(iter([ @@ -1028,6 +1149,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) self.test_auth.app = FakeApp(iter([ # GET of .services object @@ -1037,6 +1159,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) def test_get_account_fail_listing(self): self.test_auth.app = FakeApp(iter([ @@ -1050,6 +1173,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) self.test_auth.app = FakeApp(iter([ # GET of .services object @@ -1062,6 +1186,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 2) self.test_auth.app = FakeApp(iter([ # GET of .services object @@ -1086,6 +1211,1547 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_set_services_new_service(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # PUT of new .services object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'new_service': {'new_endpoint': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {'storage': {'default': 'local', + 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa'}, + 'new_service': {'new_endpoint': 'new_value'}}) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_set_services_new_endpoint(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # PUT of new .services object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'storage': {'new_endpoint': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {'storage': {'default': 'local', + 'local': 'http://127.0.0.1:8080/v1/AUTH_cfa', + 'new_endpoint': 'new_value'}}) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_set_services_update_endpoint(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # PUT of new .services object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'storage': {'local': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(json.loads(resp.body), + {'storage': {'default': 'local', + 'local': 'new_value'}}) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_set_services_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': 'super:admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'storage': {'local': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but not reseller admin) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'}, + body=json.dumps({'storage': {'local': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'}, + body=json.dumps({'storage': {'local': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_set_services_fail_bad_account_name(self): + resp = Request.blank('/auth/v2/.act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'storage': {'local': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_set_services_fail_bad_json(self): + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body='garbage' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body='' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_set_services_fail_get_services(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('503 Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'new_service': {'new_endpoint': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'new_service': {'new_endpoint': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_set_services_fail_put_services(self): + self.test_auth.app = FakeApp(iter([ + # GET of .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # PUT of new .services object + ('503 Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/.services', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + body=json.dumps({'new_service': {'new_endpoint': 'new_value'}}) + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_put_account_success(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('201 Created', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('204 No Content', {}, ''), + # PUT of .account_id mapping object + ('204 No Content', {}, ''), + # PUT of .services object + ('204 No Content', {}, ''), + # POST to account container updating X-Container-Meta-Account-Id + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 201) + self.assertEquals(self.test_auth.app.calls, 5) + self.assertEquals(conn.calls, 1) + + def test_put_account_success_preexist_but_not_completed(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('201 Created', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + # We're going to show it as existing this time, but with no + # X-Container-Meta-Account-Id, indicating a failed previous attempt + ('200 Ok', {}, ''), + # PUT of .account_id mapping object + ('204 No Content', {}, ''), + # PUT of .services object + ('204 No Content', {}, ''), + # POST to account container updating X-Container-Meta-Account-Id + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 201) + self.assertEquals(self.test_auth.app.calls, 4) + self.assertEquals(conn.calls, 1) + + def test_put_account_success_preexist_and_completed(self): + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + # We're going to show it as existing this time, and with an + # X-Container-Meta-Account-Id, indicating it already exists + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 202) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_account_success_with_given_suffix(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('201 Created', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('204 No Content', {}, ''), + # PUT of .account_id mapping object + ('204 No Content', {}, ''), + # PUT of .services object + ('204 No Content', {}, ''), + # POST to account container updating X-Container-Meta-Account-Id + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Account-Suffix': 'test-suffix'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 201) + self.assertEquals(conn.request_path, '/v1/AUTH_test-suffix') + self.assertEquals(self.test_auth.app.calls, 5) + self.assertEquals(conn.calls, 1) + + def test_put_account_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': 'super:admin', + 'X-Auth-Admin-Key': 'supertest'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but not reseller admin) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_account_fail_invalid_account_name(self): + resp = Request.blank('/auth/v2/.act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_put_account_fail_on_initial_account_head(self): + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_account_fail_on_account_marker_put(self): + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_put_account_fail_on_storage_account_put(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('503 Service Unavailable', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(conn.calls, 1) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_put_account_fail_on_account_id_mapping(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('201 Created', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('204 No Content', {}, ''), + # PUT of .account_id mapping object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(conn.calls, 1) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_put_account_fail_on_services_object(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('201 Created', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('204 No Content', {}, ''), + # PUT of .account_id mapping object + ('204 No Content', {}, ''), + # PUT of .services object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(conn.calls, 1) + self.assertEquals(self.test_auth.app.calls, 4) + + def test_put_account_fail_on_post_mapping(self): + conn = FakeConn(iter([ + # PUT of storage account itself + ('201 Created', {}, '')])) + self.test_auth.get_conn = lambda: conn + self.test_auth.app = FakeApp(iter([ + # Initial HEAD of account container to check for pre-existence + ('404 Not Found', {}, ''), + # PUT of account container + ('204 No Content', {}, ''), + # PUT of .account_id mapping object + ('204 No Content', {}, ''), + # PUT of .services object + ('204 No Content', {}, ''), + # POST to account container updating X-Container-Meta-Account-Id + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(conn.calls, 1) + self.assertEquals(self.test_auth.app.calls, 5) + + def test_delete_account_success(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('204 No Content', {}, ''), + # DELETE the .account_id mapping object + ('204 No Content', {}, ''), + # DELETE the account container + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 6) + self.assertEquals(conn.calls, 1) + + def test_delete_account_success_missing_services(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('404 Not Found', {}, ''), + # DELETE the .account_id mapping object + ('204 No Content', {}, ''), + # DELETE the account container + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 5) + + def test_delete_account_success_missing_storage_account(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('404 Not Found', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('204 No Content', {}, ''), + # DELETE the .account_id mapping object + ('204 No Content', {}, ''), + # DELETE the account container + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 6) + self.assertEquals(conn.calls, 1) + + def test_delete_account_success_missing_account_id_mapping(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('204 No Content', {}, ''), + # DELETE the .account_id mapping object + ('404 Not Found', {}, ''), + # DELETE the account container + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 6) + self.assertEquals(conn.calls, 1) + + def test_delete_account_success_missing_account_container_at_end(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('204 No Content', {}, ''), + # DELETE the .account_id mapping object + ('204 No Content', {}, ''), + # DELETE the account container + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 6) + self.assertEquals(conn.calls, 1) + + def test_delete_account_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': 'super:admin', + 'X-Auth-Admin-Key': 'supertest'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but not reseller admin) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_account_fail_invalid_account_name(self): + resp = Request.blank('/auth/v2/.act', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_delete_account_fail_not_found(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_account_fail_not_found_concurrency(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_delete_account_fail_list_account(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_account_fail_list_account_concurrency(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_delete_account_fail_has_users(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}]))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 409) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_account_fail_has_users2(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}]))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 409) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_delete_account_fail_get_services(self): + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_delete_account_fail_delete_storage_account(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('409 Conflict', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 409) + self.assertEquals(self.test_auth.app.calls, 3) + self.assertEquals(conn.calls, 1) + + def test_delete_account_fail_delete_storage_account2(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, ''), + # DELETE of storage account itself + ('409 Conflict', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa", + "other": "http://127.0.0.1:8080/v1/AUTH_cfa2"}}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) + self.assertEquals(conn.calls, 2) + + def test_delete_account_fail_delete_storage_account3(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('503 Service Unavailable', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) + self.assertEquals(conn.calls, 1) + + def test_delete_account_fail_delete_storage_account4(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, ''), + # DELETE of storage account itself + ('503 Service Unavailable', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa", + "other": "http://127.0.0.1:8080/v1/AUTH_cfa2"}}))])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) + self.assertEquals(conn.calls, 2) + + def test_delete_account_fail_delete_services(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 4) + self.assertEquals(conn.calls, 1) + + def test_delete_account_fail_delete_account_id_mapping(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('204 No Content', {}, ''), + # DELETE the .account_id mapping object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 5) + self.assertEquals(conn.calls, 1) + + def test_delete_account_fail_delete_account_container(self): + conn = FakeConn(iter([ + # DELETE of storage account itself + ('204 No Content', {}, '')])) + self.test_auth.get_conn = lambda x: conn + self.test_auth.app = FakeApp(iter([ + # Account's container listing, checking for users + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}])), + # Account's container listing, checking for users (continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]'), + # GET the .services object + ('200 Ok', {}, json.dumps({"storage": {"default": "local", + "local": "http://127.0.0.1:8080/v1/AUTH_cfa"}})), + # DELETE the .services object + ('204 No Content', {}, ''), + # DELETE the .account_id mapping object + ('204 No Content', {}, ''), + # DELETE the account container + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act', + environ={'REQUEST_METHOD': 'DELETE', + 'swift.cache': FakeMemcache()}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 6) + self.assertEquals(conn.calls, 1) + + def test_get_user_success(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('200 Ok', {}, json.dumps( + {"groups": [{"name": "act:usr"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.body, json.dumps( + {"groups": [{"name": "act:usr"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"})) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_user_groups_success(self): + self.test_auth.app = FakeApp(iter([ + # GET of account container (list objects) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}, + {"name": "tester3", "hash": "etag", "bytes": 86, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:28.135530"}])), + # GET of user object + ('200 Ok', {}, json.dumps( + {"groups": [{"name": "act:tester"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"})), + # GET of user object + ('200 Ok', {}, json.dumps( + {"groups": [{"name": "act:tester3"}, {"name": "act"}], + "auth": "plaintext:key3"})), + # GET of account container (list objects continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) + resp = Request.blank('/auth/v2/act/.groups', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.body, json.dumps( + {"groups": [{"name": ".admin"}, {"name": "act"}, + {"name": "act:tester"}, {"name": "act:tester3"}]})) + self.assertEquals(self.test_auth.app.calls, 4) + + def test_get_user_groups_success2(self): + self.test_auth.app = FakeApp(iter([ + # GET of account container (list objects) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}])), + # GET of user object + ('200 Ok', {}, json.dumps( + {"groups": [{"name": "act:tester"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"})), + # GET of account container (list objects continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": "tester3", "hash": "etag", "bytes": 86, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:28.135530"}])), + # GET of user object + ('200 Ok', {}, json.dumps( + {"groups": [{"name": "act:tester3"}, {"name": "act"}], + "auth": "plaintext:key3"})), + # GET of account container (list objects continuation) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, '[]')])) + resp = Request.blank('/auth/v2/act/.groups', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.body, json.dumps( + {"groups": [{"name": ".admin"}, {"name": "act"}, + {"name": "act:tester"}, {"name": "act:tester3"}]})) + self.assertEquals(self.test_auth.app.calls, 5) + + def test_get_user_fail_invalid_account(self): + resp = Request.blank('/auth/v2/.invalid/usr', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_get_user_fail_invalid_user(self): + resp = Request.blank('/auth/v2/act/.invalid', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_get_user_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + headers={'X-Auth-Admin-User': 'super:admin', + 'X-Auth-Admin-Key': 'supertest'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'}, + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_user_account_admin_success(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but not reseller admin) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"})), + # GET of requested user object + ('200 Ok', {}, json.dumps( + {"groups": [{"name": "act:usr"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.body, json.dumps( + {"groups": [{"name": "act:usr"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"})) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_get_user_groups_not_found(self): + self.test_auth.app = FakeApp(iter([ + # GET of account container (list objects) + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/.groups', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_user_groups_fail_listing(self): + self.test_auth.app = FakeApp(iter([ + # GET of account container (list objects) + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/.groups', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_user_groups_fail_get_user(self): + self.test_auth.app = FakeApp(iter([ + # GET of account container (list objects) + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, + json.dumps([ + {"name": ".services", "hash": "etag", "bytes": 112, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.618110"}, + {"name": "tester", "hash": "etag", "bytes": 104, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:27.736680"}, + {"name": "tester3", "hash": "etag", "bytes": 86, + "content_type": "application/octet-stream", + "last_modified": "2010-12-03T17:16:28.135530"}])), + # GET of user object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/.groups', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_get_user_not_found(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_user_fail(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_user_fail_invalid_account(self): + resp = Request.blank('/auth/v2/.invalid/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_put_user_fail_invalid_user(self): + resp = Request.blank('/auth/v2/act/.usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_put_user_fail_no_user_key(self): + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_put_user_reseller_admin_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object (reseller admin) + # This shouldn't actually get called, checked below + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:rdm"}, + {"name": "test"}, {"name": ".admin"}, + {"name": ".reseller_admin"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act:rdm', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Reseller-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 0) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but not reseller admin) + # This shouldn't actually get called, checked below + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Reseller-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 0) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + # This shouldn't actually get called, checked below + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Reseller-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 0) + + def test_put_user_account_admin_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but wrong account) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act2:adm', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_user_regular_fail_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but wrong account) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act2:adm', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_user_regular_success(self): + self.test_auth.app = FakeApp(iter([ + # PUT of user object + ('201 Created', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 201) + self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(json.loads(self.test_auth.app.request.body), + {"groups": [{"name": "act:usr"}, {"name": "act"}], + "auth": "plaintext:key"}) + + def test_put_user_account_admin_success(self): + self.test_auth.app = FakeApp(iter([ + # PUT of user object + ('201 Created', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 201) + self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(json.loads(self.test_auth.app.request.body), + {"groups": [{"name": "act:usr"}, {"name": "act"}, + {"name": ".admin"}], + "auth": "plaintext:key"}) + + def test_put_user_reseller_admin_success(self): + self.test_auth.app = FakeApp(iter([ + # PUT of user object + ('201 Created', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key', + 'X-Auth-User-Reseller-Admin': 'true'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 201) + self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(json.loads(self.test_auth.app.request.body), + {"groups": [{"name": "act:usr"}, {"name": "act"}, + {"name": ".admin"}, {"name": ".reseller_admin"}], + "auth": "plaintext:key"}) + + def test_put_user_fail_not_found(self): + self.test_auth.app = FakeApp(iter([ + # PUT of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_put_user_fail(self): + self.test_auth.app = FakeApp(iter([ + # PUT of user object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest', + 'X-Auth-User-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_user_bad_creds(self): + self.test_auth.app = FakeApp(iter([ + # GET of user object (account admin, but wrong account) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act2:adm"}, + {"name": "test"}, {"name": ".admin"}], + "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': 'act2:adm', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + self.test_auth.app = FakeApp(iter([ + # GET of user object (regular user) + ('200 Ok', {}, json.dumps({"groups": [{"name": "act:usr"}, + {"name": "test"}], "auth": "plaintext:key"}))])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 403) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_user_invalid_account(self): + resp = Request.blank('/auth/v2/.invalid/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_delete_user_invalid_user(self): + resp = Request.blank('/auth/v2/act/.invalid', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_delete_user_not_found(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_user_fail_head_user(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_delete_user_fail_delete_token(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), + # DELETE of token + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_delete_user_fail_delete_user(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), + # DELETE of token + ('204 No Content', {}, ''), + # DELETE of user object + ('503 Service Unavailable', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_delete_user_success(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), + # DELETE of token + ('204 No Content', {}, ''), + # DELETE of user object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_delete_user_success_missing_user_at_end(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), + # DELETE of token + ('204 No Content', {}, ''), + # DELETE of user object + ('404 Not Found', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_delete_user_success_missing_token(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('200 Ok', {'X-Object-Meta-Auth-Token': 'AUTH_tk'}, ''), + # DELETE of token + ('404 Not Found', {}, ''), + # DELETE of user object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 3) + + def test_delete_user_success_no_token(self): + self.test_auth.app = FakeApp(iter([ + # HEAD of user object + ('200 Ok', {}, ''), + # DELETE of user object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/act/usr', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'} + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 2) if __name__ == '__main__': From 506618c44e5cdd04f1a021c87d10d58ccf322f70 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori Date: Mon, 6 Dec 2010 11:45:06 +0900 Subject: [PATCH 017/199] object replicator: fix replica deletion condition --- swift/obj/replicator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 9b0294627e..7559cd0933 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -494,7 +494,8 @@ class ObjectReplicator(Daemon): self.object_ring.get_part_nodes(int(partition)) if node['id'] != local_dev['id']] jobs.append(dict(path=join(obj_path, partition), - nodes=nodes, delete=len(nodes) > 2, + nodes=nodes, + delete=len(nodes) > self.object_ring.replica_count - 1, partition=partition)) except ValueError: continue From 090f86e9a6726480cbd98d183a97dab1f32d6fa8 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Mon, 6 Dec 2010 14:01:19 -0600 Subject: [PATCH 018/199] updated large objects history --- doc/source/overview_large_objects.rst | 57 ++++++++++++++++++++++----- 1 file changed, 48 insertions(+), 9 deletions(-) diff --git a/doc/source/overview_large_objects.rst b/doc/source/overview_large_objects.rst index 333b6cde55..01f4990732 100644 --- a/doc/source/overview_large_objects.rst +++ b/doc/source/overview_large_objects.rst @@ -125,14 +125,53 @@ History ------- Large object support has gone through various iterations before settling on -this implementation. This approach has the drawback that the eventual -consistency window of the container listings can cause a GET on the manifest -object to return an invalid whole object for that short term. +this implementation. -We also implemented fully transparent support within the server, but the -drawbacks there were added complexity within the cluster, no option to do -parallel uploads, and no basis for a resume feature. +The primary factor driving the limitation of object size in swift is +maintaining balance among the partitions of the ring. To maintain an even +dispersion of disk usage throughout the cluster the obvious storage pattern +was to simply split larger objects into smaller segments, which could then be +glued together during a read. -We considered implementing both the "user manifest" option we have now and the -"transparent server manifest" option, but the second was deemed just to complex -for the benefit. +Before the introduction of large object support some applications were already +splitting their uploads into segments and re-assembling them on the client +side after retrieving the individual pieces. This design allowed the client +to support backup and archiving of large data sets, but was also frequently +employed to improve performance or reduce errors due to network interruption. +The major disadvantage of this method is that knowledge of the original +partitioning scheme is required to properly reassemble the object, which is +not practical for some use cases, such as CDN origination. + +In order to eliminate any barrier to entry for clients wanting to store +objects larger than 5GB, initially we also prototyped fully transparent +support for large object uploads. A fully transparent implementation would +support a larger max size by automatically splitting objects into segments +during upload within the proxy without any changes to the client API. All +segments were completely hidden from the client API. + +This solution introduced a number of challenging failure conditions into the +cluster, wouldn't provide the client with any option to do parallel uploads, +and had no basis for a resume feature. The transparent implementation was +deemed just too complex for the benefit. + +The current "user manifest" design was chosen in order to provide a +transparent download of large objects to the client and still provide the +uploading client a clean API to support segmented uploads. + +Alternative "explicit" user manifest options were discussed which would have +required a pre-defined format for listing the segments to "finalize" the +segmented upload. While this may offer some potential advantages, it was +decided that pushing an added burden onto the client which could potentially +limit adoption should be avoided in favor of a simpler "API" (essentially just +the format of the 'X-Object-Manifest' header). + +During development it was noted that this "implicit" user manifest approach +which is based on the path prefix can be potentially affected by the eventual +consistency window of the container listings, which could theoretically cause +a GET on the manifest object to return an invalid whole object for that short +term. In reality you're unlikely to encounter this scenario unless you're +running very high concurrency uploads against a small testing environment +which isn't running the object-updaters or container-replicators. + +Like all of swift, Large Object Support is living feature which will continue +to improve and may change over time. From 95a38de0eda686d42e68807ae6a8872d073ebe51 Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Mon, 6 Dec 2010 22:53:44 +0000 Subject: [PATCH 019/199] set default journal mode for rolling back from WAL code --- swift/common/db.py | 1 + 1 file changed, 1 insertion(+) diff --git a/swift/common/db.py b/swift/common/db.py index 41854407d6..b5d5ac3b12 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -139,6 +139,7 @@ def get_db_connection(path, timeout=30, okay_to_create=False): conn.execute('PRAGMA synchronous = NORMAL') conn.execute('PRAGMA count_changes = OFF') conn.execute('PRAGMA temp_store = MEMORY') + conn.execute('PRAGMA journal_mode = DELETE') conn.create_function('chexor', 3, chexor) except sqlite3.DatabaseError: import traceback From 6f26c4fcdc9c36ae6a9696e920e01199cb83ddfd Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 8 Dec 2010 14:10:12 -0800 Subject: [PATCH 020/199] swauth: another batch of tests and bufixes found while testing --- setup.py | 1 + swift/common/middleware/swauth.py | 23 +- test/unit/common/middleware/test_swauth.py | 307 ++++++++++++++++++++- 3 files changed, 321 insertions(+), 10 deletions(-) diff --git a/setup.py b/setup.py index ab9233b30e..e214b1c722 100644 --- a/setup.py +++ b/setup.py @@ -21,6 +21,7 @@ import subprocess from swift import __version__ as version + class local_sdist(sdist): """Customized sdist hook - builds the ChangeLog file from VC first""" diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index c6f0859cd4..0f8c467113 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -1021,7 +1021,7 @@ class Swauth(object): The first group listed will be a unique identifier for the user the token represents. - + .reseller_admin is a special group that indicates the user should be allowed to do anything on any account. @@ -1046,14 +1046,14 @@ class Swauth(object): resp = self.make_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: - return HTTPUnauthorized(request=req) + return HTTPNotFound(request=req) detail = json.loads(resp.body) expires = detail['expires'] if expires < time(): self.make_request(req.environ, 'DELETE', path).get_response(self.app) - return HTTPUnauthorized(request=req) - groups = detail['groups'] + return HTTPNotFound(request=req) + groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') groups.append(detail['account_id']) @@ -1159,6 +1159,7 @@ class Swauth(object): :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not. """ + print repr(user_detail) return user_detail and user_detail.get('auth') == 'plaintext:%s' % key def is_super_admin(self, req): @@ -1204,11 +1205,15 @@ class Swauth(object): if self.is_super_admin(req): return True admin_detail = self.get_admin_detail(req) - if admin_detail and \ - self.is_reseller_admin(req, admin_detail=admin_detail): - return True - return admin_detail and admin_detail['account'] == account and \ - '.admin' in (g['name'] for g in admin_detail['groups']) + if admin_detail: + if self.is_reseller_admin(req, admin_detail=admin_detail): + return True + if not self.credentials_match(admin_detail, + req.headers.get('x-auth-admin-key')): + return False + return admin_detail and admin_detail['account'] == account and \ + '.admin' in (g['name'] for g in admin_detail['groups']) + return False def posthooklogger(self, env, req): response = getattr(req, 'response', None) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index deda98b2a5..8f1d6c0633 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -73,6 +73,7 @@ class FakeApp(object): return Response(status=status, headers=headers, body=body)(env, start_response) + class FakeConn(object): def __init__(self, status_headers_body_iter=None): @@ -262,7 +263,7 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) self.assertEquals(self.test_auth.app.calls, 2) - + def test_auth_memcache(self): # First run our test without memcache, showing we need to return the # token contents twice. @@ -2753,6 +2754,310 @@ class TestAuth(unittest.TestCase): self.assertEquals(resp.status_int, 204) self.assertEquals(self.test_auth.app.calls, 2) + def test_validate_token_bad_prefix(self): + resp = Request.blank('/auth/v2/.token/BAD_token' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_validate_token_tmi(self): + resp = Request.blank('/auth/v2/.token/AUTH_token/tmi' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 400) + + def test_validate_token_bad_memcache(self): + fake_memcache = FakeMemcache() + fake_memcache.set('AUTH_/auth/AUTH_token', 'bogus') + resp = Request.blank('/auth/v2/.token/AUTH_token', + environ={'swift.cache': + fake_memcache}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 500) + + def test_validate_token_from_memcache(self): + fake_memcache = FakeMemcache() + fake_memcache.set('AUTH_/auth/AUTH_token', (time() + 1, 'act:usr,act')) + resp = Request.blank('/auth/v2/.token/AUTH_token', + environ={'swift.cache': + fake_memcache}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(resp.headers.get('x-auth-groups'), 'act:usr,act') + self.assert_(float(resp.headers['x-auth-ttl']) < 1, + resp.headers['x-auth-ttl']) + + def test_validate_token_from_memcache_expired(self): + fake_memcache = FakeMemcache() + fake_memcache.set('AUTH_/auth/AUTH_token', (time() - 1, 'act:usr,act')) + resp = Request.blank('/auth/v2/.token/AUTH_token', + environ={'swift.cache': + fake_memcache}).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assert_('x-auth-groups' not in resp.headers) + self.assert_('x-auth-ttl' not in resp.headers) + + def test_validate_token_from_object(self): + self.test_auth.app = FakeApp(iter([ + # GET of token object + ('200 Ok', {}, json.dumps({'groups': [{'name': 'act:usr'}, + {'name': 'act'}], 'expires': time() + 1}))])) + resp = Request.blank('/auth/v2/.token/AUTH_token' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(resp.headers.get('x-auth-groups'), 'act:usr,act') + self.assert_(float(resp.headers['x-auth-ttl']) < 1, + resp.headers['x-auth-ttl']) + + def test_validate_token_from_object_expired(self): + self.test_auth.app = FakeApp(iter([ + # GET of token object + ('200 Ok', {}, json.dumps({'groups': 'act:usr,act', + 'expires': time() - 1})), + # DELETE of expired token object + ('204 No Content', {}, '')])) + resp = Request.blank('/auth/v2/.token/AUTH_token' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 404) + self.assertEquals(self.test_auth.app.calls, 2) + + def test_validate_token_from_object_with_admin(self): + self.test_auth.app = FakeApp(iter([ + # GET of token object + ('200 Ok', {}, json.dumps({'account_id': 'AUTH_cfa', 'groups': + [{'name': 'act:usr'}, {'name': 'act'}, {'name': '.admin'}], + 'expires': time() + 1}))])) + resp = Request.blank('/auth/v2/.token/AUTH_token' + ).get_response(self.test_auth) + self.assertEquals(resp.status_int, 204) + self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(resp.headers.get('x-auth-groups'), + 'act:usr,act,AUTH_cfa') + self.assert_(float(resp.headers['x-auth-ttl']) < 1, + resp.headers['x-auth-ttl']) + + def test_get_conn_default(self): + conn = self.test_auth.get_conn() + self.assertEquals(conn.__class__, auth.HTTPConnection) + self.assertEquals(conn.host, '127.0.0.1') + self.assertEquals(conn.port, 8080) + + def test_get_conn_default_https(self): + local_auth = auth.filter_factory({'super_admin_key': 'supertest', + 'default_swift_cluster': 'local:https://1.2.3.4/v1'})(FakeApp()) + conn = local_auth.get_conn() + self.assertEquals(conn.__class__, auth.HTTPSConnection) + self.assertEquals(conn.host, '1.2.3.4') + self.assertEquals(conn.port, 443) + + def test_get_conn_overridden(self): + local_auth = auth.filter_factory({'super_admin_key': 'supertest', + 'default_swift_cluster': 'local:https://1.2.3.4/v1'})(FakeApp()) + conn = \ + local_auth.get_conn(urlparsed=auth.urlparse('http://5.6.7.8/v1')) + self.assertEquals(conn.__class__, auth.HTTPConnection) + self.assertEquals(conn.host, '5.6.7.8') + self.assertEquals(conn.port, 80) + + def test_get_conn_overridden_https(self): + local_auth = auth.filter_factory({'super_admin_key': 'supertest', + 'default_swift_cluster': 'local:http://1.2.3.4/v1'})(FakeApp()) + conn = \ + local_auth.get_conn(urlparsed=auth.urlparse('https://5.6.7.8/v1')) + self.assertEquals(conn.__class__, auth.HTTPSConnection) + self.assertEquals(conn.host, '5.6.7.8') + self.assertEquals(conn.port, 443) + + def test_get_itoken_fail_no_memcache(self): + exc = None + try: + self.test_auth.get_itoken({}) + except Exception, err: + exc = err + self.assertEquals(str(exc), + 'No memcache set up; required for Swauth middleware') + + def test_get_itoken_success(self): + fmc = FakeMemcache() + itk = self.test_auth.get_itoken({'swift.cache': fmc}) + self.assert_(itk.startswith('AUTH_itk'), itk) + expires, groups = fmc.get('AUTH_/auth/%s' % itk) + self.assert_(expires > time(), expires) + self.assertEquals(groups, '.auth,.reseller_admin') + + def test_get_admin_detail_fail_no_colon(self): + self.test_auth.app = FakeApp(iter([])) + self.assertEquals(self.test_auth.get_admin_detail(Request.blank('/')), + None) + self.assertEquals(self.test_auth.get_admin_detail(Request.blank('/', + headers={'X-Auth-Admin-User': 'usr'})), None) + self.assertRaises(StopIteration, self.test_auth.get_admin_detail, + Request.blank('/', headers={'X-Auth-Admin-User': 'act:usr'})) + + def test_get_admin_detail_fail_user_not_found(self): + self.test_auth.app = FakeApp(iter([('404 Not Found', {}, '')])) + self.assertEquals(self.test_auth.get_admin_detail(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:usr'})), None) + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_admin_detail_fail_get_user_error(self): + self.test_auth.app = FakeApp(iter([ + ('503 Service Unavailable', {}, '')])) + exc = None + try: + self.test_auth.get_admin_detail(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:usr'})) + except Exception, err: + exc = err + self.assertEquals(str(exc), 'Could not get admin user object: ' + '/v1/AUTH_.auth/act/usr 503 Service Unavailable') + self.assertEquals(self.test_auth.app.calls, 1) + + def test_get_admin_detail_success(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({"auth": "plaintext:key", + "groups": [{'name': "act:usr"}, {'name': "act"}, + {'name': ".admin"}]}))])) + detail = self.test_auth.get_admin_detail(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:usr'})) + self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(detail, {'account': 'act', + 'auth': 'plaintext:key', + 'groups': [{'name': 'act:usr'}, {'name': 'act'}, + {'name': '.admin'}]}) + + def test_credentials_match_success(self): + self.assert_(self.test_auth.credentials_match( + {'auth': 'plaintext:key'}, 'key')) + + def test_credentials_match_fail_no_details(self): + self.assert_(not self.test_auth.credentials_match(None, 'notkey')) + + def test_credentials_match_fail_plaintext(self): + self.assert_(not self.test_auth.credentials_match( + {'auth': 'plaintext:key'}, 'notkey')) + + def test_is_super_admin_success(self): + self.assert_(self.test_auth.is_super_admin(Request.blank('/', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}))) + + def test_is_super_admin_fail_bad_key(self): + self.assert_(not self.test_auth.is_super_admin(Request.blank('/', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'bad'}))) + self.assert_(not self.test_auth.is_super_admin(Request.blank('/', + headers={'X-Auth-Admin-User': '.super_admin'}))) + self.assert_(not self.test_auth.is_super_admin(Request.blank('/'))) + + def test_is_super_admin_fail_bad_user(self): + self.assert_(not self.test_auth.is_super_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'bad', + 'X-Auth-Admin-Key': 'supertest'}))) + self.assert_(not self.test_auth.is_super_admin(Request.blank('/', + headers={'X-Auth-Admin-Key': 'supertest'}))) + self.assert_(not self.test_auth.is_super_admin(Request.blank('/'))) + + def test_is_reseller_admin_success_is_super_admin(self): + self.assert_(self.test_auth.is_reseller_admin(Request.blank('/', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}))) + + def test_is_reseller_admin_success_called_get_admin_detail(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, + {'name': '.admin'}, + {'name': '.reseller_admin'}]}))])) + self.assert_(self.test_auth.is_reseller_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:rdm', + 'X-Auth-Admin-Key': 'key'}))) + + def test_is_reseller_admin_fail_only_account_admin(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:adm'}, {'name': 'act'}, + {'name': '.admin'}]}))])) + self.assert_(not self.test_auth.is_reseller_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'}))) + + def test_is_reseller_admin_fail_regular_user(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:usr'}, {'name': 'act'}]}))])) + self.assert_(not self.test_auth.is_reseller_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'}))) + + def test_is_reseller_admin_fail_bad_key(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, + {'name': '.admin'}, + {'name': '.reseller_admin'}]}))])) + self.assert_(not self.test_auth.is_reseller_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:rdm', + 'X-Auth-Admin-Key': 'bad'}))) + + def test_is_account_admin_success_is_super_admin(self): + self.assert_(self.test_auth.is_account_admin(Request.blank('/', + headers={'X-Auth-Admin-User': '.super_admin', + 'X-Auth-Admin-Key': 'supertest'}), 'act')) + + def test_is_account_admin_success_is_reseller_admin(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, + {'name': '.admin'}, + {'name': '.reseller_admin'}]}))])) + self.assert_(self.test_auth.is_account_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:rdm', + 'X-Auth-Admin-Key': 'key'}), 'act')) + + def test_is_account_admin_success(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:adm'}, {'name': 'act'}, + {'name': '.admin'}]}))])) + self.assert_(self.test_auth.is_account_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:adm', + 'X-Auth-Admin-Key': 'key'}), 'act')) + + def test_is_account_admin_fail_account_admin_different_account(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act2:adm'}, {'name': 'act2'}, + {'name': '.admin'}]}))])) + self.assert_(not self.test_auth.is_account_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act2:adm', + 'X-Auth-Admin-Key': 'key'}), 'act')) + + def test_is_account_admin_fail_regular_user(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:usr'}, {'name': 'act'}]}))])) + self.assert_(not self.test_auth.is_account_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:usr', + 'X-Auth-Admin-Key': 'key'}), 'act')) + + def test_is_account_admin_fail_bad_key(self): + self.test_auth.app = FakeApp(iter([ + ('200 Ok', {}, + json.dumps({'auth': 'plaintext:key', + 'groups': [{'name': 'act:rdm'}, {'name': 'act'}, + {'name': '.admin'}, + {'name': '.reseller_admin'}]}))])) + self.assert_(not self.test_auth.is_account_admin(Request.blank('/', + headers={'X-Auth-Admin-User': 'act:rdm', + 'X-Auth-Admin-Key': 'bad'}), 'act')) + if __name__ == '__main__': unittest.main() From d13ea1dbec5a6199770e847e40e2598eee164f1e Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 8 Dec 2010 14:36:02 -0800 Subject: [PATCH 021/199] swauth: .token objects are now split into 16 containers --- swift/common/middleware/swauth.py | 27 ++++++++++++++-------- test/unit/common/middleware/test_swauth.py | 12 ++++++---- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 0f8c467113..610df8b80f 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -174,7 +174,8 @@ class Swauth(object): if expires < time(): groups = None if not groups: - path = quote('/v1/%s/.token/%s' % (self.auth_account, token)) + path = quote('/v1/%s/.token%s/%s' % + (self.auth_account, token[-1], token)) resp = self.make_request(env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: return None @@ -331,8 +332,14 @@ class Swauth(object): if resp.status_int // 100 != 2: raise Exception('Could not create the main auth account: %s %s' % (path, resp.status)) - for container in ('.token', '.account_id'): - path = quote('/v1/%s/%s' % (self.auth_account, container)) + path = quote('/v1/%s/.account_id' % self.auth_account) + resp = self.make_request(req.environ, 'PUT', + path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create container: %s %s' % + (path, resp.status)) + for container in xrange(16): + path = quote('/v1/%s/.token%x' % (self.auth_account, container)) resp = self.make_request(req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: @@ -845,8 +852,8 @@ class Swauth(object): (path, resp.status)) candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: - path = quote('/v1/%s/.token/%s' % (self.auth_account, - candidate_token)) + path = quote('/v1/%s/.token%s/%s' % + (self.auth_account, candidate_token[-1], candidate_token)) resp = self.make_request(req.environ, 'DELETE', path).get_response(self.app) if resp.status_int // 100 != 2 and resp.status_int != 404: @@ -952,8 +959,8 @@ class Swauth(object): token = None candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: - path = quote('/v1/%s/.token/%s' % (self.auth_account, - candidate_token)) + path = quote('/v1/%s/.token%s/%s' % + (self.auth_account, candidate_token[-1], candidate_token)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 == 2: @@ -980,7 +987,8 @@ class Swauth(object): # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) # Save token info - path = quote('/v1/%s/.token/%s' % (self.auth_account, token)) + path = quote('/v1/%s/.token%s/%s' % + (self.auth_account, token[-1], token)) resp = self.make_request(req.environ, 'PUT', path, json.dumps({'account': account, 'user': user, 'account_id': account_id, @@ -1042,7 +1050,8 @@ class Swauth(object): if expires < time(): groups = None if not groups: - path = quote('/v1/%s/.token/%s' % (self.auth_account, token)) + path = quote('/v1/%s/.token%s/%s' % + (self.auth_account, token[-1], token)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 8f1d6c0633..6c8c32bddd 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -826,20 +826,22 @@ class TestAuth(unittest.TestCase): self.assertEquals(self.test_auth.app.calls, 7) def test_prep_success(self): - self.test_auth.app = FakeApp(iter([ + list_to_iter = [ # PUT of .auth account ('201 Created', {}, ''), - # PUT of .token container - ('201 Created', {}, ''), # PUT of .account_id container - ('201 Created', {}, '')])) + ('201 Created', {}, '')] + # PUT of .token* containers + for x in xrange(16): + list_to_iter.append(('201 Created', {}, '')) + self.test_auth.app = FakeApp(iter(list_to_iter)) resp = Request.blank('/auth/v2/.prep', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Auth-Admin-User': '.super_admin', 'X-Auth-Admin-Key': 'supertest'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 204) - self.assertEquals(self.test_auth.app.calls, 3) + self.assertEquals(self.test_auth.app.calls, 18) def test_prep_bad_method(self): resp = Request.blank('/auth/v2/.prep', From 371c610712c50a51b3838c6a9a3aa7624b35db9b Mon Sep 17 00:00:00 2001 From: Greg Lange Date: Thu, 9 Dec 2010 17:10:37 +0000 Subject: [PATCH 022/199] in st, skips directory creation when streaming to standard out --- bin/st | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bin/st b/bin/st index 6a8b02bb37..3bd8b4f5bf 100755 --- a/bin/st +++ b/bin/st @@ -1020,8 +1020,9 @@ def st_download(options, args, print_queue, error_queue): path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): path = path[1:] + make_dir = out_file != "-" if content_type.split(';', 1)[0] == 'text/directory': - if not isdir(path): + if make_dir and not isdir(path): mkdirs(path) read_length = 0 md5sum = md5() @@ -1030,7 +1031,7 @@ def st_download(options, args, print_queue, error_queue): md5sum.update(chunk) else: dirpath = dirname(path) - if dirpath and not isdir(dirpath): + if make_dir and dirpath and not isdir(dirpath): mkdirs(dirpath) if out_file == "-": fp = stdout From 09e39032bf37d1e1e6627d1687fee1df4d14f0f2 Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 9 Dec 2010 17:57:26 -0800 Subject: [PATCH 023/199] new swauth-cleanup-tokens; restricted listing .auth account to .super_admin; doc updates --- bin/swauth-cleanup-tokens | 102 +++++++++++++++++++++ doc/source/admin_guide.rst | 15 +++ doc/source/howto_installmultinode.rst | 14 +++ doc/source/overview_auth.rst | 40 +++++--- setup.py | 6 +- swift/common/middleware/swauth.py | 25 +++-- test/unit/common/middleware/test_swauth.py | 2 +- 7 files changed, 180 insertions(+), 24 deletions(-) create mode 100755 bin/swauth-cleanup-tokens diff --git a/bin/swauth-cleanup-tokens b/bin/swauth-cleanup-tokens new file mode 100755 index 0000000000..a24ed2015c --- /dev/null +++ b/bin/swauth-cleanup-tokens @@ -0,0 +1,102 @@ +#!/usr/bin/python +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import simplejson as json +except ImportError: + import json +import re +from datetime import datetime, timedelta +from optparse import OptionParser +from sys import argv, exit +from time import sleep, time + +from swift.common.client import Connection + + +if __name__ == '__main__': + parser = OptionParser(usage='Usage: %prog [options]') + parser.add_option('-t', '--token-life', dest='token_life', + default='86400', help='The expected life of tokens; token objects ' + 'modified more than this number of seconds ago will be checked for ' + 'expiration (default: 86400).') + parser.add_option('-s', '--sleep', dest='sleep', + default='0.1', help='The number of seconds to sleep between token ' + 'checks (default: 0.1)') + parser.add_option('-v', '--verbose', dest='verbose', action='store_true', + default=False, help='Outputs everything done instead of just the ' + 'deletions.') + parser.add_option('-A', '--admin-url', dest='admin_url', + default='http://127.0.0.1:8080/auth/', help='The URL to the auth ' + 'subsystem (default: http://127.0.0.1:8080/auth/)') + parser.add_option('-K', '--admin-key', dest='admin_key', + help='The key for .super_admin.') + args = argv[1:] + if not args: + args.append('-h') + (options, args) = parser.parse_args(args) + if len(args) != 0: + parser.parse_args(['-h']) + options.admin_url = options.admin_url.rstrip('/') + if not options.admin_url.endswith('/v1.0'): + options.admin_url += '/v1.0' + options.admin_user = '.super_admin:.super_admin' + options.token_life = timedelta(0, float(options.token_life)) + options.sleep = float(options.sleep) + conn = Connection(options.admin_url, options.admin_user, options.admin_key) + for x in xrange(16): + container = '.token_%x' % x + marker = None + while True: + if options.verbose: + print 'GET %s?marker=%s' % (container, marker) + objs = conn.get_container(container, marker=marker)[1] + if objs: + marker = objs[-1]['name'] + else: + if options.verbose: + print 'No more objects in %s' % container + break + for obj in objs: + last_modified = datetime(*map(int, re.split('[^\d]', + obj['last_modified'])[:-1])) + ago = datetime.utcnow() - last_modified + if ago > options.token_life: + if options.verbose: + print '%s/%s last modified %ss ago; investigating' % \ + (container, obj['name'], + ago.days * 86400 + ago.seconds) + print 'GET %s/%s' % (container, obj['name']) + detail = conn.get_object(container, obj['name'])[1] + detail = json.loads(detail) + if detail['expires'] < time(): + if options.verbose: + print '%s/%s expired %ds ago; deleting' % \ + (container, obj['name'], + time() - detail['expires']) + print 'DELETE %s/%s' % (container, obj['name']) + conn.delete_object(container, obj['name']) + elif options.verbose: + print "%s/%s won't expire for %ds; skipping" % \ + (container, obj['name'], + detail['expires'] - time()) + elif options.verbose: + print '%s/%s last modified %ss ago; skipping' % \ + (container, obj['name'], + ago.days * 86400 + ago.seconds) + sleep(options.sleep) + if options.verbose: + print 'Done.' diff --git a/doc/source/admin_guide.rst b/doc/source/admin_guide.rst index 65ad7d0ca4..068501a0b2 100644 --- a/doc/source/admin_guide.rst +++ b/doc/source/admin_guide.rst @@ -232,6 +232,21 @@ get performance timings (warning: the initial populate takes a while). These timings are dumped into a CSV file (/etc/swift/stats.csv by default) and can then be graphed to see how cluster performance is trending. +------------------------------------ +Additional Cleanup Script for Swauth +------------------------------------ + +If you decide to use Swauth, you'll want to install a cronjob to clean up any +orphaned expired tokens. These orphaned tokens can occur when a "stampede" +occurs where a single user authenticates several times concurrently. Generally, +these orphaned tokens don't pose much of an issue, but it's good to clean them +up once a "token life" period (default: 1 day or 86400 seconds). + +This should be as simple as adding `swauth-cleanup-tokens -K swauthkey > +/dev/null` to a crontab entry on one of the proxies that is running Swauth; but +run `swauth-cleanup-tokens` with no arguments for detailed help on the options +available. + ------------------------ Debugging Tips and Tools ------------------------ diff --git a/doc/source/howto_installmultinode.rst b/doc/source/howto_installmultinode.rst index fe38c02342..2a84357bb9 100644 --- a/doc/source/howto_installmultinode.rst +++ b/doc/source/howto_installmultinode.rst @@ -455,6 +455,20 @@ See :ref:`config-proxy` for the initial setup, and then follow these additional #. After you sync all the nodes, make sure the admin has the keys in /etc/swift and the ownership for the ring file is correct. +Additional Cleanup Script for Swauth +------------------------------------ + +If you decide to use Swauth, you'll want to install a cronjob to clean up any +orphaned expired tokens. These orphaned tokens can occur when a "stampede" +occurs where a single user authenticates several times concurrently. Generally, +these orphaned tokens don't pose much of an issue, but it's good to clean them +up once a "token life" period (default: 1 day or 86400 seconds). + +This should be as simple as adding `swauth-cleanup-tokens -K swauthkey > +/dev/null` to a crontab entry on one of the proxies that is running Swauth; but +run `swauth-cleanup-tokens` with no arguments for detailed help on the options +available. + Troubleshooting Notes --------------------- If you see problems, look in var/log/syslog (or messages on some distros). diff --git a/doc/source/overview_auth.rst b/doc/source/overview_auth.rst index b3ad2e9db0..14b5c03ef3 100644 --- a/doc/source/overview_auth.rst +++ b/doc/source/overview_auth.rst @@ -78,13 +78,14 @@ objects contain a JSON dictionary of the format:: The `` can only be `plaintext` at this time, and the `` is the plain text password itself. -The `` contains at least two group names. The first is a unique -group name identifying that user and is of the format `:`. The +The `` contains at least two groups. The first is a unique group +identifying that user and it's name is of the format `:`. The second group is the `` itself. Additional groups of `.admin` for account administrators and `.reseller_admin` for reseller administrators may exist. Here's an example user JSON dictionary:: - {"auth": "plaintext:testing", "groups": ["test:tester", "test", ".admin"]} + {"auth": "plaintext:testing", + "groups": ["name": "test:tester", "name": "test", "name": ".admin"]} To map an auth service account to a Swift storage account, the Service Account Id string is stored in the `X-Container-Meta-Account-Id` header for the @@ -147,7 +148,7 @@ Here's an example token object's JSON dictionary:: {"account": "test", "user": "tester", "account_id": "AUTH_8980f74b1cda41e483cbe0a925f448a9", - "groups": ["test:tester", "test", ".admin"], + "groups": ["name": "test:tester", "name": "test", "name": ".admin"], "expires": 1291273147.1624689} To easily map a user to an already issued token, the token name is stored in @@ -156,14 +157,29 @@ the user object's `X-Object-Meta-Auth-Token` header. Here is an example full listing of an :: .account_id - AUTH_4a4e6655-4c8e-4bcb-b73e-0ff1104c4fef - AUTH_5162ec51-f792-4db3-8a35-b3439a1bf6fd - AUTH_8efbea51-9339-42f8-8ac5-f26e1da67eed - .token - AUTH_tk03d8571f735a4ec9abccc704df941c6e - AUTH_tk27cf3f2029b64ec8b56c5d638807b3de - AUTH_tk7594203449754c22a34ac7d910521c2e - AUTH_tk8f2ee54605dd42a8913d244de544d19e + AUTH_2282f516-559f-4966-b239-b5c88829e927 + AUTH_f6f57a3c-33b5-4e85-95a5-a801e67505c8 + AUTH_fea96a36-c177-4ca4-8c7e-b8c715d9d37b + .token_0 + .token_1 + .token_2 + .token_3 + .token_4 + .token_5 + .token_6 + AUTH_tk9d2941b13d524b268367116ef956dee6 + .token_7 + .token_8 + AUTH_tk93627c6324c64f78be746f1e6a4e3f98 + .token_9 + .token_a + .token_b + .token_c + .token_d + .token_e + AUTH_tk0d37d286af2c43ffad06e99112b3ec4e + .token_f + AUTH_tk766bbde93771489982d8dc76979d11cf reseller .services reseller diff --git a/setup.py b/setup.py index e214b1c722..d1466e0449 100644 --- a/setup.py +++ b/setup.py @@ -81,9 +81,9 @@ setup( 'bin/swift-log-stats-collector', 'bin/swift-account-stats-logger', 'bin/swauth-add-account', 'bin/swauth-add-user', - 'bin/swauth-delete-account', 'bin/swauth-delete-user', - 'bin/swauth-list', 'bin/swauth-prep', 'bin/swauth-set-account-service', - 'bin/swift-auth-to-swauth', + 'bin/swauth-cleanup-tokens', 'bin/swauth-delete-account', + 'bin/swauth-delete-user', 'bin/swauth-list', 'bin/swauth-prep', + 'bin/swauth-set-account-service', 'bin/swift-auth-to-swauth', ], entry_points={ 'paste.app_factory': [ diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 610df8b80f..7f985ece96 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -174,7 +174,7 @@ class Swauth(object): if expires < time(): groups = None if not groups: - path = quote('/v1/%s/.token%s/%s' % + path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) resp = self.make_request(env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: @@ -205,7 +205,8 @@ class Swauth(object): if not account or not account.startswith(self.reseller_prefix): return self.denied_response(req) user_groups = (req.remote_user or '').split(',') - if '.reseller_admin' in user_groups: + if '.reseller_admin' in user_groups and \ + account[len(self.reseller_prefix)].isalnum(): return None if account in user_groups and (req.method != 'PUT' or container): # If the user is admin for the account and is not trying to do an @@ -339,7 +340,7 @@ class Swauth(object): raise Exception('Could not create container: %s %s' % (path, resp.status)) for container in xrange(16): - path = quote('/v1/%s/.token%x' % (self.auth_account, container)) + path = quote('/v1/%s/.token_%x' % (self.auth_account, container)) resp = self.make_request(req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: @@ -852,7 +853,7 @@ class Swauth(object): (path, resp.status)) candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: - path = quote('/v1/%s/.token%s/%s' % + path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, candidate_token[-1], candidate_token)) resp = self.make_request(req.environ, 'DELETE', path).get_response(self.app) @@ -943,6 +944,13 @@ class Swauth(object): return HTTPBadRequest(request=req) if not all((account, user, key)): return HTTPUnauthorized(request=req) + if user == '.super_admin' and key == self.super_admin_key: + token = self.get_itoken(req.environ) + url = '%s/%s.auth' % (self.dsc_url, self.reseller_prefix) + return Response(request=req, + body=json.dumps({'storage': {'default': 'local', 'local': url}}), + headers={'x-auth-token': token, 'x-storage-token': token, + 'x-storage-url': url}) # Authenticate user path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_request(req.environ, 'GET', @@ -959,7 +967,7 @@ class Swauth(object): token = None candidate_token = resp.headers.get('x-object-meta-auth-token') if candidate_token: - path = quote('/v1/%s/.token%s/%s' % + path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, candidate_token[-1], candidate_token)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) @@ -987,7 +995,7 @@ class Swauth(object): # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) # Save token info - path = quote('/v1/%s/.token%s/%s' % + path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) resp = self.make_request(req.environ, 'PUT', path, json.dumps({'account': account, 'user': user, @@ -1050,7 +1058,7 @@ class Swauth(object): if expires < time(): groups = None if not groups: - path = quote('/v1/%s/.token%s/%s' % + path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) resp = self.make_request(req.environ, 'GET', path).get_response(self.app) @@ -1129,7 +1137,8 @@ class Swauth(object): raise Exception( 'No memcache set up; required for Swauth middleware') memcache_client.set(memcache_key, (self.itoken_expires, - '.auth,.reseller_admin'), timeout=self.token_life) + '.auth,.reseller_admin,%s.auth' % self.reseller_prefix), + timeout=self.token_life) return self.itoken def get_admin_detail(self, req): diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 6c8c32bddd..10f05a053e 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -2882,7 +2882,7 @@ class TestAuth(unittest.TestCase): self.assert_(itk.startswith('AUTH_itk'), itk) expires, groups = fmc.get('AUTH_/auth/%s' % itk) self.assert_(expires > time(), expires) - self.assertEquals(groups, '.auth,.reseller_admin') + self.assertEquals(groups, '.auth,.reseller_admin,AUTH_.auth') def test_get_admin_detail_fail_no_colon(self): self.test_auth.app = FakeApp(iter([])) From bf94e0b8117c81224f1634d4c85facc52c5301d6 Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Mon, 13 Dec 2010 18:25:09 +0000 Subject: [PATCH 024/199] remove migration header --- swift/proxy/server.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 6d6974a400..29c2243afe 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -616,10 +616,6 @@ class ObjectController(Controller): partition, nodes = self.app.object_ring.get_nodes( self.account_name, self.container_name, self.object_name) req.headers['X-Timestamp'] = normalize_timestamp(time.time()) - # this is a temporary hook for migrations to set PUT timestamps - if '!Migration-Timestamp!' in req.headers: - req.headers['X-Timestamp'] = \ - normalize_timestamp(req.headers['!Migration-Timestamp!']) # Sometimes the 'content-type' header exists, but is set to None. if not req.headers.get('content-type'): guessed_type, _ = mimetypes.guess_type(req.path_info) From 20d1ee6757be06728941b6672371a3501163cf53 Mon Sep 17 00:00:00 2001 From: gholt Date: Mon, 13 Dec 2010 14:14:26 -0800 Subject: [PATCH 025/199] Now supports infinite objects! --- swift/proxy/server.py | 151 +++++++++++++++++++++++---------- test/unit/proxy/test_server.py | 31 ++++++- 2 files changed, 135 insertions(+), 47 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 32b0543e1e..ff6e5e310f 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -41,8 +41,8 @@ from swift.common.utils import get_logger, normalize_timestamp, split_path, \ cache_from_env from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_metadata, check_object_creation, \ - check_utf8, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH, \ - MAX_FILE_SIZE + check_utf8, CONTAINER_LISTING_LIMIT, MAX_ACCOUNT_NAME_LENGTH, \ + MAX_CONTAINER_NAME_LENGTH, MAX_FILE_SIZE from swift.common.exceptions import ChunkReadTimeout, \ ChunkWriteTimeout, ConnectionTimeout @@ -112,8 +112,9 @@ class SegmentedIterable(object): :param controller: The ObjectController instance to work with. :param container: The container the object segments are within. - :param listing: The listing of object segments to iterate over; this is a - standard JSON decoded container listing. + :param listing: The listing of object segments to iterate over; this may + be an iterator or list that returns dicts with 'name' and + 'bytes' keys. :param response: The webob.Response this iterable is associated with, if any (default: None) """ @@ -121,8 +122,10 @@ class SegmentedIterable(object): def __init__(self, controller, container, listing, response=None): self.controller = controller self.container = container - self.listing = listing + self.listing = iter(listing) self.segment = -1 + self.segment_dict = None + self.segment_peek = None self.seek = 0 self.segment_iter = None self.position = 0 @@ -138,13 +141,13 @@ class SegmentedIterable(object): """ try: self.segment += 1 - if self.segment >= len(self.listing): - raise StopIteration() - obj = self.listing[self.segment] + self.segment_dict = self.segment_peek or self.listing.next() + self.segment_peek = None partition, nodes = self.controller.app.object_ring.get_nodes( - self.controller.account_name, self.container, obj['name']) + self.controller.account_name, self.container, + self.segment_dict['name']) path = '/%s/%s/%s' % (self.controller.account_name, self.container, - obj['name']) + self.segment_dict['name']) req = Request.blank(path) if self.seek: req.range = 'bytes=%s-' % self.seek @@ -209,14 +212,11 @@ class SegmentedIterable(object): """ try: if start: - if len(self.listing) <= self.segment + 1: - return - while start >= self.position + \ - self.listing[self.segment + 1]['bytes']: + self.segment_peek = self.listing.next() + while start >= self.position + self.segment_peek['bytes']: self.segment += 1 - if len(self.listing) <= self.segment + 1: - return - self.position += self.listing[self.segment]['bytes'] + self.position += self.segment_peek['bytes'] + self.segment_peek = self.listing.next() self.seek = start - self.position else: start = 0 @@ -707,38 +707,101 @@ class ObjectController(Controller): return resp resp = resp2 req.range = req_range + if 'x-object-manifest' in resp.headers: lcontainer, lprefix = \ resp.headers['x-object-manifest'].split('/', 1) lpartition, lnodes = self.app.container_ring.get_nodes( self.account_name, lcontainer) - lreq = Request.blank('/%s/%s?prefix=%s&format=json' % - (self.account_name, lcontainer, lprefix)) - lresp = self.GETorHEAD_base(lreq, 'Container', lpartition, lnodes, - lreq.path_info, self.app.container_ring.replica_count) - if lresp.status_int // 100 != 2: - lresp = HTTPNotFound(request=req) - lresp.headers['X-Object-Manifest'] = \ - resp.headers['x-object-manifest'] - return lresp - if 'swift.authorize' in req.environ: - req.acl = lresp.headers.get('x-container-read') - aresp = req.environ['swift.authorize'](req) - if aresp: - return aresp - listing = json.loads(lresp.body) - content_length = sum(o['bytes'] for o in listing) - etag = md5('"'.join(o['hash'] for o in listing)).hexdigest() - headers = {'X-Object-Manifest': resp.headers['x-object-manifest'], - 'Content-Type': resp.content_type, 'Content-Length': - content_length, 'ETag': etag} - for key, value in resp.headers.iteritems(): - if key.lower().startswith('x-object-meta-'): - headers[key] = value - resp = Response(headers=headers, request=req, - conditional_response=True) - resp.app_iter = SegmentedIterable(self, lcontainer, listing, resp) - resp.content_length = content_length + marker = '' + listing = [] + while True: + lreq = Request.blank('/%s/%s?prefix=%s&format=json&marker=%s' % + (quote(self.account_name), quote(lcontainer), + quote(lprefix), quote(marker))) + lresp = self.GETorHEAD_base(lreq, 'Container', lpartition, + lnodes, lreq.path_info, + self.app.container_ring.replica_count) + if lresp.status_int // 100 != 2: + lresp = HTTPNotFound(request=req) + lresp.headers['X-Object-Manifest'] = \ + resp.headers['x-object-manifest'] + return lresp + if 'swift.authorize' in req.environ: + req.acl = lresp.headers.get('x-container-read') + aresp = req.environ['swift.authorize'](req) + if aresp: + return aresp + sublisting = json.loads(lresp.body) + if not sublisting: + break + listing.extend(sublisting) + if len(listing) > CONTAINER_LISTING_LIMIT: + break + marker = sublisting[-1]['name'] + + if len(listing) > CONTAINER_LISTING_LIMIT: + # We will serve large objects with a ton of segments with + # chunked transfer encoding. + + def listing_iter(): + marker = '' + while True: + lreq = Request.blank( + '/%s/%s?prefix=%s&format=json&marker=%s' % + (quote(self.account_name), quote(lcontainer), + quote(lprefix), quote(marker))) + lresp = self.GETorHEAD_base(lreq, 'Container', + lpartition, lnodes, lreq.path_info, + self.app.container_ring.replica_count) + if lresp.status_int // 100 != 2: + raise Exception('Object manifest GET could not ' + 'continue listing: %s %s' % + (req.path, lreq.path)) + if 'swift.authorize' in req.environ: + req.acl = lresp.headers.get('x-container-read') + aresp = req.environ['swift.authorize'](req) + if aresp: + raise Exception('Object manifest GET could ' + 'not continue listing: %s %s' % + (req.path, aresp)) + sublisting = json.loads(lresp.body) + if not sublisting: + break + for obj in sublisting: + yield obj + marker = sublisting[-1]['name'] + + headers = { + 'X-Object-Manifest': resp.headers['x-object-manifest'], + 'Content-Type': resp.content_type} + for key, value in resp.headers.iteritems(): + if key.lower().startswith('x-object-meta-'): + headers[key] = value + resp = Response(headers=headers, request=req, + conditional_response=True) + resp.app_iter = SegmentedIterable(self, lcontainer, + listing_iter(), resp) + + else: + # For objects with a reasonable number of segments, we'll serve + # them with a set content-length and computed etag. + content_length = sum(o['bytes'] for o in listing) + etag = md5('"'.join(o['hash'] for o in listing)).hexdigest() + headers = { + 'X-Object-Manifest': resp.headers['x-object-manifest'], + 'Content-Type': resp.content_type, + 'Content-Length': content_length, + 'ETag': etag} + for key, value in resp.headers.iteritems(): + if key.lower().startswith('x-object-meta-'): + headers[key] = value + resp = Response(headers=headers, request=req, + conditional_response=True) + resp.app_iter = SegmentedIterable(self, lcontainer, listing, + resp) + resp.content_length = content_length + return resp @public diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index d60f4c8590..b7d43c0fb2 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1600,6 +1600,7 @@ class TestObjectController(unittest.TestCase): mkdirs(os.path.join(testdir, 'sdb1')) mkdirs(os.path.join(testdir, 'sdb1', 'tmp')) try: + orig_container_listing_limit = proxy_server.CONTAINER_LISTING_LIMIT conf = {'devices': testdir, 'swift_dir': testdir, 'mount_check': 'false'} prolis = listen(('localhost', 0)) @@ -1976,6 +1977,24 @@ class TestObjectController(unittest.TestCase): self.assert_('Content-Type: text/jibberish' in headers) body = fd.read() self.assertEquals(body, '1234 1234 1234 1234 1234 ') + # Do it again but exceeding the container listing limit + proxy_server.CONTAINER_LISTING_LIMIT = 2 + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('X-Object-Manifest: segmented/name/' in headers) + self.assert_('Content-Type: text/jibberish' in headers) + body = fd.read() + # A bit fragile of a test; as it makes the assumption that all + # will be sent in a single chunk. + self.assertEquals(body, + '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n') finally: prospa.kill() acc1spa.kill() @@ -1985,6 +2004,7 @@ class TestObjectController(unittest.TestCase): obj1spa.kill() obj2spa.kill() finally: + proxy_server.CONTAINER_LISTING_LIMIT = orig_container_listing_limit rmtree(testdir) def test_mismatched_etags(self): @@ -2995,9 +3015,10 @@ class TestSegmentedIterable(unittest.TestCase): self.controller = FakeObjectController() def test_load_next_segment_unexpected_error(self): + # Iterator value isn't a dict self.assertRaises(Exception, proxy_server.SegmentedIterable(self.controller, None, - None)._load_next_segment) + [None])._load_next_segment) self.assertEquals(self.controller.exception_args[0], 'ERROR: While processing manifest /a/c/o tx1') @@ -3030,6 +3051,7 @@ class TestSegmentedIterable(unittest.TestCase): segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': 'o1'}, {'name': 'o2'}]) segit.segment = 0 + segit.listing.next() segit._load_next_segment() self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2') data = ''.join(segit.segment_iter) @@ -3039,6 +3061,7 @@ class TestSegmentedIterable(unittest.TestCase): segit = proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': 'o1'}, {'name': 'o2'}]) segit.segment = 0 + segit.listing.next() segit.seek = 1 segit._load_next_segment() self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2') @@ -3062,8 +3085,9 @@ class TestSegmentedIterable(unittest.TestCase): 'Could not load object segment /a/lc/o1: 404') def test_iter_unexpected_error(self): + # Iterator value isn't a dict self.assertRaises(Exception, ''.join, - proxy_server.SegmentedIterable(self.controller, None, None)) + proxy_server.SegmentedIterable(self.controller, None, [None])) self.assertEquals(self.controller.exception_args[0], 'ERROR: While processing manifest /a/c/o tx1') @@ -3100,9 +3124,10 @@ class TestSegmentedIterable(unittest.TestCase): 'Could not load object segment /a/lc/o1: 404') def test_app_iter_range_unexpected_error(self): + # Iterator value isn't a dict self.assertRaises(Exception, proxy_server.SegmentedIterable(self.controller, None, - None).app_iter_range(None, None).next) + [None]).app_iter_range(None, None).next) self.assertEquals(self.controller.exception_args[0], 'ERROR: While processing manifest /a/c/o tx1') From bf0a8e934cb82bd7567bd7cbe866ded827a18c55 Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 14 Dec 2010 11:20:12 -0800 Subject: [PATCH 026/199] Fixed a bug where a HEAD on a really, really large object would give a content-length of 0 instead of transfer-encoding: chunked --- swift/proxy/server.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index ff6e5e310f..8250d181f3 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -780,8 +780,23 @@ class ObjectController(Controller): headers[key] = value resp = Response(headers=headers, request=req, conditional_response=True) - resp.app_iter = SegmentedIterable(self, lcontainer, - listing_iter(), resp) + if req.method == 'HEAD': + # These shenanigans are because webob translates the HEAD + # request into a webob EmptyResponse for the body, which + # has a len, which eventlet translates as needing a + # content-length header added. So we call the original + # webob resp for the headers but return an empty generator + # for the body. + + def head_response(environ, start_response): + resp(environ, start_response) + return ('' for x in '') + + head_response.status_int = resp.status_int + return head_response + else: + resp.app_iter = SegmentedIterable(self, lcontainer, + listing_iter(), resp) else: # For objects with a reasonable number of segments, we'll serve From d13b34fdc1e2d66c31ab40d80213c6df475037ba Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 14 Dec 2010 13:51:24 -0800 Subject: [PATCH 027/199] x-copy-from now understands manifest sources and copies details rather than contents --- swift/proxy/server.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 8250d181f3..4ba651bc4f 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -172,6 +172,9 @@ class SegmentedIterable(object): self.response.status_int = 503 raise + def next(self): + return iter(self).next() + def __iter__(self): """ Standard iterator function that returns the object's contents. """ try: @@ -790,7 +793,7 @@ class ObjectController(Controller): def head_response(environ, start_response): resp(environ, start_response) - return ('' for x in '') + return iter([]) head_response.status_int = resp.status_int return head_response @@ -935,11 +938,17 @@ class ObjectController(Controller): return source_resp self.object_name = orig_obj_name self.container_name = orig_container_name - data_source = source_resp.app_iter new_req = Request.blank(req.path_info, environ=req.environ, headers=req.headers) - new_req.content_length = source_resp.content_length - new_req.etag = source_resp.etag + if 'x-object-manifest' in source_resp.headers: + data_source = iter(['']) + new_req.content_length = 0 + new_req.headers['X-Object-Manifest'] = \ + source_resp.headers['x-object-manifest'] + else: + data_source = source_resp.app_iter + new_req.content_length = source_resp.content_length + new_req.etag = source_resp.etag # we no longer need the X-Copy-From header del new_req.headers['X-Copy-From'] for k, v in source_resp.headers.items(): From 89ad6e727bc621e49d234162327f02e359d61b18 Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 14 Dec 2010 14:16:38 -0800 Subject: [PATCH 028/199] Limit manifest gets to one segment per second; prevents amplification attacks of tons of tiny segments --- swift/proxy/server.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 4ba651bc4f..7dea1ebcde 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -28,6 +28,7 @@ import uuid import functools from hashlib import md5 +from eventlet import sleep from eventlet.timeout import Timeout from webob.exc import HTTPBadRequest, HTTPMethodNotAllowed, \ HTTPNotFound, HTTPPreconditionFailed, \ @@ -132,6 +133,7 @@ class SegmentedIterable(object): self.response = response if not self.response: self.response = Response() + self.next_get_time = 0 def _load_next_segment(self): """ @@ -152,6 +154,8 @@ class SegmentedIterable(object): if self.seek: req.range = 'bytes=%s-' % self.seek self.seek = 0 + sleep(max(self.next_get_time - time.time(), 0)) + self.next_get_time = time.time() + 1 resp = self.controller.GETorHEAD_base(req, 'Object', partition, self.controller.iter_nodes(partition, nodes, self.controller.app.object_ring), path, From 3e306e0f43cd70f81a74f11efe7d97890e7504a2 Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 14 Dec 2010 14:25:12 -0800 Subject: [PATCH 029/199] Changed to only limit manifest gets after first 10 segments. Makes tests run faster but does allow amplification 1:10. At least it's not 1:infinity like before. --- swift/proxy/server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 7dea1ebcde..66d585c9e5 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -154,8 +154,9 @@ class SegmentedIterable(object): if self.seek: req.range = 'bytes=%s-' % self.seek self.seek = 0 - sleep(max(self.next_get_time - time.time(), 0)) - self.next_get_time = time.time() + 1 + if self.segment > 10: + sleep(max(self.next_get_time - time.time(), 0)) + self.next_get_time = time.time() + 1 resp = self.controller.GETorHEAD_base(req, 'Object', partition, self.controller.iter_nodes(partition, nodes, self.controller.app.object_ring), path, From 4400f0473a67989d1563262a3a6e544a161aef80 Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 14 Dec 2010 14:49:36 -0800 Subject: [PATCH 030/199] Even though isn't 100% related, made st emit a warning if there's a / in a container name --- bin/st | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/bin/st b/bin/st index 748dfa475c..d5e1d1fab3 100755 --- a/bin/st +++ b/bin/st @@ -1000,6 +1000,10 @@ def st_delete(parser, args, print_queue, error_queue): raise error_queue.put('Account not found') elif len(args) == 1: + if '/' in args[0]: + print >> stderr, 'WARNING: / in container name; you might have ' \ + 'meant %r instead of %r.' % \ + (args[0].replace('/', ' ', 1), args[0]) conn = create_connection() _delete_container(args[0], conn) else: @@ -1159,6 +1163,10 @@ def st_download(options, args, print_queue, error_queue): raise error_queue.put('Account not found') elif len(args) == 1: + if '/' in args[0]: + print >> stderr, 'WARNING: / in container name; you might have ' \ + 'meant %r instead of %r.' % \ + (args[0].replace('/', ' ', 1), args[0]) _download_container(args[0], create_connection()) else: if len(args) == 2: @@ -1272,6 +1280,10 @@ Containers: %d raise error_queue.put('Account not found') elif len(args) == 1: + if '/' in args[0]: + print >> stderr, 'WARNING: / in container name; you might have ' \ + 'meant %r instead of %r.' % \ + (args[0].replace('/', ' ', 1), args[0]) try: headers = conn.head_container(args[0]) object_count = int(headers.get('x-container-object-count', 0)) @@ -1378,6 +1390,10 @@ def st_post(options, args, print_queue, error_queue): raise error_queue.put('Account not found') elif len(args) == 1: + if '/' in args[0]: + print >> stderr, 'WARNING: / in container name; you might have ' \ + 'meant %r instead of %r.' % \ + (args[0].replace('/', ' ', 1), args[0]) headers = {} for item in options.meta: split_item = item.split(':') From 276d6f82c615531d36603388092e8b318a3c1b0b Mon Sep 17 00:00:00 2001 From: John Dickinson Date: Thu, 16 Dec 2010 08:03:27 -0600 Subject: [PATCH 031/199] fixed lp bug 671704 --- etc/log-processing.conf-sample | 2 +- swift/stats/access_processor.py | 2 +- test/unit/stats/test_log_processor.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/etc/log-processing.conf-sample b/etc/log-processing.conf-sample index 11805add0b..7619d0599a 100644 --- a/etc/log-processing.conf-sample +++ b/etc/log-processing.conf-sample @@ -23,7 +23,7 @@ class_path = swift.stats.access_processor.AccessLogProcessor # load balancer private ips is for load balancer ip addresses that should be # counted as servicenet # lb_private_ips = -# server_name = proxy +# server_name = proxy-server # user = swift # warn_percent = 0.8 diff --git a/swift/stats/access_processor.py b/swift/stats/access_processor.py index f0ae3a023a..0f5814076f 100644 --- a/swift/stats/access_processor.py +++ b/swift/stats/access_processor.py @@ -26,7 +26,7 @@ class AccessLogProcessor(object): """Transform proxy server access logs""" def __init__(self, conf): - self.server_name = conf.get('server_name', 'proxy') + self.server_name = conf.get('server_name', 'proxy-server') self.lb_private_ips = [x.strip() for x in \ conf.get('lb_private_ips', '').split(',')\ if x.strip()] diff --git a/test/unit/stats/test_log_processor.py b/test/unit/stats/test_log_processor.py index 1221a05e9e..1a6450139f 100644 --- a/test/unit/stats/test_log_processor.py +++ b/test/unit/stats/test_log_processor.py @@ -65,7 +65,7 @@ class DumbInternalProxy(object): class TestLogProcessor(unittest.TestCase): - access_test_line = 'Jul 9 04:14:30 saio proxy 1.2.3.4 4.5.6.7 '\ + access_test_line = 'Jul 9 04:14:30 saio proxy-server 1.2.3.4 4.5.6.7 '\ '09/Jul/2010/04/14/30 GET '\ '/v1/acct/foo/bar?format=json&foo HTTP/1.0 200 - '\ 'curl tk4e350daf-9338-4cc6-aabb-090e49babfbd '\ From 80bde91333bd54006d25a7c6d3baee02cd4587d8 Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 16 Dec 2010 09:03:59 -0800 Subject: [PATCH 032/199] st: Works with chunked transfer encoded downloads now --- bin/st | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bin/st b/bin/st index d5e1d1fab3..8a4a71a245 100755 --- a/bin/st +++ b/bin/st @@ -1063,7 +1063,10 @@ def st_download(options, args, print_queue, error_queue): headers, body = \ conn.get_object(container, obj, resp_chunk_size=65536) content_type = headers.get('content-type') - content_length = int(headers.get('content-length')) + if 'content-length' in headers: + content_length = int(headers.get('content-length')) + else: + content_length = None etag = headers.get('etag') path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): @@ -1102,7 +1105,7 @@ def st_download(options, args, print_queue, error_queue): if md5sum and md5sum.hexdigest() != etag: error_queue.put('%s: md5sum != etag, %s != %s' % (path, md5sum.hexdigest(), etag)) - if read_length != content_length: + if content_length is not None and read_length != content_length: error_queue.put('%s: read_length != content_length, %d != %d' % (path, read_length, content_length)) if 'x-object-meta-mtime' in headers and not options.out_file: From a8b239e5a05e5964189126d291f836f99930fe93 Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 16 Dec 2010 09:21:30 -0800 Subject: [PATCH 033/199] Made stat display of objects suppress content-length, last-modified, and etag if they aren't in the headers --- bin/st | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/bin/st b/bin/st index 8a4a71a245..b41aca67ec 100755 --- a/bin/st +++ b/bin/st @@ -1323,14 +1323,16 @@ Write ACL: %s'''.strip('\n') % (conn.url.rsplit('/', 1)[-1], args[0], Account: %s Container: %s Object: %s - Content Type: %s -Content Length: %s - Last Modified: %s - ETag: %s'''.strip('\n') % (conn.url.rsplit('/', 1)[-1], args[0], - args[1], headers.get('content-type'), - headers.get('content-length'), - headers.get('last-modified'), - headers.get('etag'))) + Content Type: %s'''.strip('\n') % (conn.url.rsplit('/', 1)[-1], args[0], + args[1], headers.get('content-type'))) + if 'content-length' in headers: + print_queue.put('Content Length: %s' % + headers['content-length']) + if 'last-modified' in headers: + print_queue.put(' Last Modified: %s' % + headers['last-modified']) + if 'etag' in headers: + print_queue.put(' ETag: %s' % headers['etag']) if 'x-object-manifest' in headers: print_queue.put(' Manifest: %s' % headers['x-object-manifest']) From fa31d76eee4cd1024c1c99ca6a9d00e1b483ac6d Mon Sep 17 00:00:00 2001 From: gholt Date: Thu, 16 Dec 2010 10:46:11 -0800 Subject: [PATCH 034/199] lobjects: The Last-Modified header is now determined for reasonably segmented objects. --- swift/proxy/server.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 66d585c9e5..a0fefc7ab7 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -20,9 +20,11 @@ except ImportError: import json import mimetypes import os +import re import time import traceback from ConfigParser import ConfigParser +from datetime import datetime from urllib import unquote, quote import uuid import functools @@ -793,7 +795,7 @@ class ObjectController(Controller): # request into a webob EmptyResponse for the body, which # has a len, which eventlet translates as needing a # content-length header added. So we call the original - # webob resp for the headers but return an empty generator + # webob resp for the headers but return an empty iterator # for the body. def head_response(environ, start_response): @@ -810,6 +812,9 @@ class ObjectController(Controller): # For objects with a reasonable number of segments, we'll serve # them with a set content-length and computed etag. content_length = sum(o['bytes'] for o in listing) + last_modified = max(o['last_modified'] for o in listing) + last_modified = \ + datetime(*map(int, re.split('[^\d]', last_modified)[:-1])) etag = md5('"'.join(o['hash'] for o in listing)).hexdigest() headers = { 'X-Object-Manifest': resp.headers['x-object-manifest'], @@ -824,6 +829,7 @@ class ObjectController(Controller): resp.app_iter = SegmentedIterable(self, lcontainer, listing, resp) resp.content_length = content_length + resp.last_modified = last_modified return resp From 893fdd0907e888b9d34ba4f9ecc2067bd5bbe57a Mon Sep 17 00:00:00 2001 From: David Goetz Date: Thu, 16 Dec 2010 16:20:57 -0800 Subject: [PATCH 035/199] adding in rate limiting and unit tests --- swift/common/utils.py | 27 +++++++++++ swift/obj/auditor.py | 37 ++++++++++++--- swift/obj/server.py | 1 + test/unit/common/test_utils.py | 36 ++++++++++++++ test/unit/obj/test_auditor.py | 86 +++++++++++++++++++++++++++++++++- 5 files changed, 178 insertions(+), 9 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 1c48c61339..2df3145654 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -755,3 +755,30 @@ def audit_location_generator(devices, datadir, mount_check=True, logger=None): reverse=True): path = os.path.join(hash_path, fname) yield path, device, partition + + +def ratelimit_sleep(running_time, max_rate, incr_by=1): + ''' + Will time.sleep() for the appropriate time so that the max_rate + is never exceeded. If max_rate is 0, will not ratelimit. The + maximum recommended rate should not exceed (1000 * incr_by) a second + as time.sleep() does involve at least a millisecond of overhead. + Returns running_time that should be used for subsequent calls. + + :param running_time: the running time of the next allowable request. Best + to start at zero. + :param max_rate: The maximum rate per second allowed for the process. + :param incr_by: How much to increment the counter. Useful if you want + to ratelimit 1024 bytes/sec and have differing sizes + of requests. Must be >= 0. + ''' + if not max_rate or incr_by < 0: + return 0 + clock_accuracy = 1000.0 + now = time.time() * clock_accuracy + time_per_request = clock_accuracy * (float(incr_by) / max_rate) + if running_time < now: + running_time = now + elif running_time - now > time_per_request: + time.sleep((running_time - now) / clock_accuracy) + return running_time + time_per_request diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index cdcb0c3ecf..1ff74f8b25 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -20,7 +20,8 @@ from random import random from swift.obj import server as object_server from swift.obj.replicator import invalidate_hash -from swift.common.utils import get_logger, renamer, audit_location_generator +from swift.common.utils import get_logger, renamer, audit_location_generator, \ + ratelimit_sleep from swift.common.exceptions import AuditException from swift.common.daemon import Daemon @@ -35,14 +36,20 @@ class ObjectAuditor(Daemon): self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') self.interval = int(conf.get('interval', 1800)) + self.max_files_per_second = float(conf.get('files_per_second', 0)) + self.max_bytes_per_second = float(conf.get('bytes_per_second', 0)) + self.files_running_time = 0 + self.bytes_running_time = 0 + self.bytes_processed = 0 self.passes = 0 self.quarantines = 0 self.errors = 0 - def run_forever(self): # pragma: no cover + def run_forever(self, init_sleep=True): """Run the object audit until stopped.""" reported = time.time() - time.sleep(random() * self.interval) + if init_sleep: + time.sleep(random() * self.interval) while True: begin = time.time() all_locs = audit_location_generator(self.devices, @@ -51,15 +58,21 @@ class ObjectAuditor(Daemon): logger=self.logger) for path, device, partition in all_locs: self.object_audit(path, device, partition) + self.files_running_time = ratelimit_sleep( + self.files_running_time, self.max_files_per_second) if time.time() - reported >= 3600: # once an hour self.logger.info( 'Since %s: Locally: %d passed audit, %d quarantined, ' - '%d errors' % (time.ctime(reported), self.passes, - self.quarantines, self.errors)) + '%d errors files/sec: %.2f , bytes/sec: %.2f' % ( + time.ctime(reported), self.passes, + self.quarantines, self.errors, + self.passes / (time.time() - reported), + self.bytes_processed / (time.time() - reported))) reported = time.time() self.passes = 0 self.quarantines = 0 self.errors = 0 + self.bytes_processed = 0 elapsed = time.time() - begin if elapsed < self.interval: time.sleep(self.interval - elapsed) @@ -74,15 +87,21 @@ class ObjectAuditor(Daemon): logger=self.logger) for path, device, partition in all_locs: self.object_audit(path, device, partition) + self.files_running_time = ratelimit_sleep( + self.files_running_time, self.max_files_per_second) if time.time() - reported >= 3600: # once an hour self.logger.info( 'Since %s: Locally: %d passed audit, %d quarantined, ' - '%d errors' % (time.ctime(reported), self.passes, - self.quarantines, self.errors)) + '%d errors files/sec: %.2f , bytes/sec: %.2f' % ( + time.ctime(reported), self.passes, + self.quarantines, self.errors, + self.passes / (time.time() - reported), + self.bytes_processed / (time.time() - reported))) reported = time.time() self.passes = 0 self.quarantines = 0 self.errors = 0 + self.bytes_processed = 0 elapsed = time.time() - begin self.logger.info( 'Object audit "once" mode completed: %.02fs' % elapsed) @@ -117,7 +136,11 @@ class ObjectAuditor(Daemon): os.path.getsize(df.data_file))) etag = md5() for chunk in df: + self.bytes_running_time = ratelimit_sleep( + self.bytes_running_time, self.max_bytes_per_second, + incr_by=len(chunk)) etag.update(chunk) + self.bytes_processed += len(chunk) etag = etag.hexdigest() if etag != df.metadata['ETag']: raise AuditException("ETag of %s does not match file's md5 of " diff --git a/swift/obj/server.py b/swift/obj/server.py index 632a0c04cc..d7b6b49fdf 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -97,6 +97,7 @@ class DiskFile(object): self.metadata = {} self.meta_file = None self.data_file = None + self.fp = None if not os.path.exists(self.datadir): return files = sorted(os.listdir(self.datadir), reverse=True) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index c41d147f79..b680a27793 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -21,6 +21,7 @@ import mimetools import os import socket import sys +import time import unittest from getpass import getuser from shutil import rmtree @@ -446,5 +447,40 @@ log_name = yarr''' self.assertNotEquals(utils.get_logger.console, old_handler) logger.logger.removeHandler(utils.get_logger.console) + def test_ratelimit_sleep(self): + running_time = 0 + start = time.time() + for i in range(100): + running_time = utils.ratelimit_sleep(running_time, 0) + self.assertTrue(abs((time.time() - start)* 1000) < 1) + + running_time = 0 + start = time.time() + for i in range(50): + running_time = utils.ratelimit_sleep(running_time, 200) + # make sure its accurate to 2/100 of a second + self.assertTrue(abs(25 - (time.time() - start)* 100) < 2) + + def test_ratelimit_sleep_with_sleep(self): + running_time = 0 + start = time.time() + for i in range(25): + running_time = utils.ratelimit_sleep(running_time, 50) + time.sleep(1.0/75) + # make sure its accurate to 2/100 of a second + self.assertTrue(abs(50 - (time.time() - start)* 100) < 2) + + def test_ratelimit_sleep_with_incr(self): + running_time = 0 + start = time.time() + vals = [5,17,0,3,11,30,40,4,13,2,-1] * 2 # adds up to 250 (with no -1) + total = 0 + for i in vals: + running_time = utils.ratelimit_sleep(running_time, + 500, incr_by=i) + total += i + self.assertTrue(abs(50 - (time.time() - start)* 100) < 2) + + if __name__ == '__main__': unittest.main() diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index cf8a2bc37c..9686de8988 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -16,12 +16,94 @@ # TODO: Tests import unittest +import tempfile +import os +import time +from shutil import rmtree +from hashlib import md5 from swift.obj import auditor +from swift.obj.server import DiskFile +from swift.common.utils import hash_path, mkdirs, normalize_timestamp +from swift.common.exceptions import AuditException class TestAuditor(unittest.TestCase): - def test_placeholder(self): - pass + def setUp(self): + # Setup a test ring (stolen from common/test_ring.py) + self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') + if not self.path_to_test_xfs or \ + not os.path.exists(self.path_to_test_xfs): + print >>sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ + 'pointing to a valid directory.\n' \ + 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ + 'system for testing.' + self.testdir = '/tmp/SWIFTUNITTEST' + else: + self.testdir = os.path.join(self.path_to_test_xfs, + 'tmp_test_object_auditor') + + self.devices = os.path.join(self.testdir, 'node') + rmtree(self.testdir, ignore_errors=1) + os.mkdir(self.testdir) + os.mkdir(self.devices) + os.mkdir(os.path.join(self.devices, 'sda')) + self.objects = os.path.join(self.devices, 'sda', 'objects') + os.mkdir(self.objects) + self.parts = {} + for part in ['0', '1', '2', '3']: + self.parts[part] = os.path.join(self.objects, part) + os.mkdir(os.path.join(self.objects, part)) + + self.conf = dict( + devices=self.devices, + mount_check='false', + timeout='300', stats_interval='1') + + def tearDown(self): + rmtree(self.testdir, ignore_errors=1) + + def test_object_audit(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + cur_part = '0' + disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') + data = '0' * 1024 + etag = md5() + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + timestamp = str(normalize_timestamp(time.time())) + metadata = { + 'ETag': etag, + 'X-Timestamp': timestamp, + 'Content-Length': str(os.fstat(fd).st_size), + } + disk_file.put(fd, tmppath, metadata) + pre_quarantines = self.auditor.quarantines + + self.auditor.object_audit( + os.path.join(disk_file.datadir, timestamp + '.data'), + 'sda', cur_part) + self.assertEquals(self.auditor.quarantines, pre_quarantines) + + # etag = md5() + # etag.update(data) + # etag = etag.hexdigest() + # metadata['ETag'] = etag + # disk_file.put(fd, tmppath, metadata) + + # self.auditor.object_audit( + # os.path.join(disk_file.datadir, timestamp + '.data'), + # 'sda', cur_part) + # self.assertEquals(self.auditor.quarantines, pre_quarantines) + + os.write(fd, 'bad_data') + disk_file.close() + self.auditor.object_audit( + os.path.join(disk_file.datadir, timestamp + '.data'), + 'sda', cur_part) + self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) if __name__ == '__main__': From 7bd0184bfe0fff32875755206049863ec65c7789 Mon Sep 17 00:00:00 2001 From: David Goetz Date: Fri, 17 Dec 2010 00:27:08 -0800 Subject: [PATCH 036/199] more unit tests, refactoring, and loc gen fix --- swift/common/utils.py | 8 +-- swift/obj/auditor.py | 39 ++----------- swift/obj/server.py | 22 +++++-- test/unit/obj/test_auditor.py | 104 +++++++++++++++++++++++++++++----- 4 files changed, 115 insertions(+), 58 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 2df3145654..75f6409ba6 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -733,12 +733,12 @@ def audit_location_generator(devices, datadir, mount_check=True, logger=None): logger.debug( 'Skipping %s as it is not mounted' % device) continue - datadir = os.path.join(devices, device, datadir) - if not os.path.exists(datadir): + datadir_path = os.path.join(devices, device, datadir) + if not os.path.exists(datadir_path): continue - partitions = os.listdir(datadir) + partitions = os.listdir(datadir_path) for partition in partitions: - part_path = os.path.join(datadir, partition) + part_path = os.path.join(datadir_path, partition) if not os.path.isdir(part_path): continue suffixes = os.listdir(part_path) diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 1ff74f8b25..98dc077640 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -35,7 +35,6 @@ class ObjectAuditor(Daemon): self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') - self.interval = int(conf.get('interval', 1800)) self.max_files_per_second = float(conf.get('files_per_second', 0)) self.max_bytes_per_second = float(conf.get('bytes_per_second', 0)) self.files_running_time = 0 @@ -45,41 +44,15 @@ class ObjectAuditor(Daemon): self.quarantines = 0 self.errors = 0 - def run_forever(self, init_sleep=True): + def run_forever(self): """Run the object audit until stopped.""" - reported = time.time() - if init_sleep: - time.sleep(random() * self.interval) while True: - begin = time.time() - all_locs = audit_location_generator(self.devices, - object_server.DATADIR, - mount_check=self.mount_check, - logger=self.logger) - for path, device, partition in all_locs: - self.object_audit(path, device, partition) - self.files_running_time = ratelimit_sleep( - self.files_running_time, self.max_files_per_second) - if time.time() - reported >= 3600: # once an hour - self.logger.info( - 'Since %s: Locally: %d passed audit, %d quarantined, ' - '%d errors files/sec: %.2f , bytes/sec: %.2f' % ( - time.ctime(reported), self.passes, - self.quarantines, self.errors, - self.passes / (time.time() - reported), - self.bytes_processed / (time.time() - reported))) - reported = time.time() - self.passes = 0 - self.quarantines = 0 - self.errors = 0 - self.bytes_processed = 0 - elapsed = time.time() - begin - if elapsed < self.interval: - time.sleep(self.interval - elapsed) + self.run_once('forever') + time.sleep(30) - def run_once(self): + def run_once(self, mode='once'): """Run the object audit once.""" - self.logger.info('Begin object audit "once" mode') + self.logger.info('Begin object audit "%s" mode' % mode) begin = reported = time.time() all_locs = audit_location_generator(self.devices, object_server.DATADIR, @@ -104,7 +77,7 @@ class ObjectAuditor(Daemon): self.bytes_processed = 0 elapsed = time.time() - begin self.logger.info( - 'Object audit "once" mode completed: %.02fs' % elapsed) + 'Object audit "%s" mode completed: %.02fs' % (mode, elapsed)) def object_audit(self, path, device, partition): """ diff --git a/swift/obj/server.py b/swift/obj/server.py index d7b6b49fdf..a690f7fe2a 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -72,6 +72,21 @@ def read_metadata(fd): return pickle.loads(metadata) +def write_metadata(fd, metadata): + """ + Helper function to write pickled metadata for an object file. + + :param fd: file descriptor to write the metadata + :param metadata: metadata to write + """ + metastr = pickle.dumps(metadata, PICKLE_PROTOCOL) + key = 0 + while metastr: + setxattr(fd, '%s%s' % (METADATA_KEY, key or ''), metastr[:254]) + metastr = metastr[254:] + key += 1 + + class DiskFile(object): """ Manage object files on disk. @@ -209,12 +224,7 @@ class DiskFile(object): """ metadata['name'] = self.name timestamp = normalize_timestamp(metadata['X-Timestamp']) - metastr = pickle.dumps(metadata, PICKLE_PROTOCOL) - key = 0 - while metastr: - setxattr(fd, '%s%s' % (METADATA_KEY, key or ''), metastr[:254]) - metastr = metastr[254:] - key += 1 + write_metadata(fd, metadata) if 'Content-Length' in metadata: drop_buffer_cache(fd, 0, int(metadata['Content-Length'])) os.fsync(fd) diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 9686de8988..90344420a7 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -22,7 +22,7 @@ import time from shutil import rmtree from hashlib import md5 from swift.obj import auditor -from swift.obj.server import DiskFile +from swift.obj.server import DiskFile, write_metadata from swift.common.utils import hash_path, mkdirs, normalize_timestamp from swift.common.exceptions import AuditException @@ -48,6 +48,10 @@ class TestAuditor(unittest.TestCase): os.mkdir(self.devices) os.mkdir(os.path.join(self.devices, 'sda')) self.objects = os.path.join(self.devices, 'sda', 'objects') + + os.mkdir(os.path.join(self.devices, 'sdb')) + self.objects_2 = os.path.join(self.devices, 'sdb', 'objects') + os.mkdir(self.objects) self.parts = {} for part in ['0', '1', '2', '3']: @@ -62,7 +66,7 @@ class TestAuditor(unittest.TestCase): def tearDown(self): rmtree(self.testdir, ignore_errors=1) - def test_object_audit(self): + def test_object_audit_extra_data(self): self.auditor = auditor.ObjectAuditor( self.conf) cur_part = '0' @@ -87,24 +91,94 @@ class TestAuditor(unittest.TestCase): 'sda', cur_part) self.assertEquals(self.auditor.quarantines, pre_quarantines) - # etag = md5() - # etag.update(data) - # etag = etag.hexdigest() - # metadata['ETag'] = etag - # disk_file.put(fd, tmppath, metadata) - - # self.auditor.object_audit( - # os.path.join(disk_file.datadir, timestamp + '.data'), - # 'sda', cur_part) - # self.assertEquals(self.auditor.quarantines, pre_quarantines) - - os.write(fd, 'bad_data') - disk_file.close() + os.write(fd, 'extra_data') self.auditor.object_audit( os.path.join(disk_file.datadir, timestamp + '.data'), 'sda', cur_part) self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + def test_object_audit_diff_data(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + cur_part = '0' + disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') + data = '0' * 1024 + etag = md5() + timestamp = str(normalize_timestamp(time.time())) + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + metadata = { + 'ETag': etag, + 'X-Timestamp': timestamp, + 'Content-Length': str(os.fstat(fd).st_size), + } + disk_file.put(fd, tmppath, metadata) + pre_quarantines = self.auditor.quarantines + + self.auditor.object_audit( + os.path.join(disk_file.datadir, timestamp + '.data'), + 'sda', cur_part) + self.assertEquals(self.auditor.quarantines, pre_quarantines) + etag = md5() + etag.update('1' + '0' * 1023) + etag = etag.hexdigest() + metadata['ETag'] = etag + write_metadata(fd, metadata) + + self.auditor.object_audit( + os.path.join(disk_file.datadir, timestamp + '.data'), + 'sda', cur_part) + self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + + def test_object_run_once_pass(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + cur_part = '0' + timestamp = str(normalize_timestamp(time.time())) + pre_quarantines = self.auditor.quarantines + disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') + data = '0' * 1024 + etag = md5() + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + metadata = { + 'ETag': etag, + 'X-Timestamp': timestamp, + 'Content-Length': str(os.fstat(fd).st_size), + } + disk_file.put(fd, tmppath, metadata) + disk_file.close() + self.auditor.run_once() + self.assertEquals(self.auditor.quarantines, pre_quarantines) + + def test_object_run_once_multi_devices(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + cur_part = '0' + timestamp = str(normalize_timestamp(time.time())) + pre_quarantines = self.auditor.quarantines + disk_file = DiskFile(self.devices, 'sdb', cur_part, 'a', 'c', 'o') + data = '0' * 1024 + etag = md5() + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + metadata = { + 'ETag': etag, + 'X-Timestamp': timestamp, + 'Content-Length': str(os.fstat(fd).st_size), + } + disk_file.put(fd, tmppath, metadata) + disk_file.close() + os.write(fd, 'extra_data') + self.auditor.run_once() + self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + if __name__ == '__main__': unittest.main() From b024da46c616a9652850034d0fde27492fa98dd7 Mon Sep 17 00:00:00 2001 From: gholt Date: Mon, 20 Dec 2010 08:35:29 -0800 Subject: [PATCH 037/199] swauth: Fixed unit tests for webob changes --- test/unit/common/middleware/test_swauth.py | 60 +++++++++++----------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 10f05a053e..f52a0d8d5c 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -346,12 +346,12 @@ class TestAuth(unittest.TestCase): def test_authorize_bad_path(self): req = Request.blank('/badpath') - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('401'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 401) req = Request.blank('/badpath') req.remote_user = 'act:usr,act,AUTH_cfa' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) def test_authorize_account_access(self): req = Request.blank('/v1/AUTH_cfa') @@ -359,14 +359,14 @@ class TestAuth(unittest.TestCase): self.assertEquals(self.test_auth.authorize(req), None) req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) def test_authorize_acl_group_access(self): req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' req.acl = 'act' @@ -378,27 +378,27 @@ class TestAuth(unittest.TestCase): req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' req.acl = 'act2' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' req.acl = 'act:usr2' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) def test_deny_cross_reseller(self): # Tests that cross-reseller is denied, even if ACLs/group names match req = Request.blank('/v1/OTHER_cfa') req.remote_user = 'act:usr,act,AUTH_cfa' req.acl = 'act' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) def test_authorize_acl_referrer_access(self): req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' req.acl = '.r:*' @@ -406,23 +406,23 @@ class TestAuth(unittest.TestCase): req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' req.acl = '.r:.example.com' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) req = Request.blank('/v1/AUTH_cfa') req.remote_user = 'act:usr,act' req.referer = 'http://www.example.com/index.html' req.acl = '.r:.example.com' self.assertEquals(self.test_auth.authorize(req), None) req = Request.blank('/v1/AUTH_cfa') - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('401'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 401) req = Request.blank('/v1/AUTH_cfa') req.acl = '.r:*' self.assertEquals(self.test_auth.authorize(req), None) req = Request.blank('/v1/AUTH_cfa') req.acl = '.r:.example.com' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('401'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 401) req = Request.blank('/v1/AUTH_cfa') req.referer = 'http://www.example.com/index.html' req.acl = '.r:.example.com' @@ -431,19 +431,19 @@ class TestAuth(unittest.TestCase): def test_account_put_permissions(self): req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) req.remote_user = 'act:usr,act' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) req.remote_user = 'act:usr,act,AUTH_other' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) # Even PUTs to your own account as account admin should fail req = Request.blank('/v1/AUTH_old', environ={'REQUEST_METHOD': 'PUT'}) req.remote_user = 'act:usr,act,AUTH_old' - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) req.remote_user = 'act:usr,act,.reseller_admin' @@ -455,8 +455,8 @@ class TestAuth(unittest.TestCase): req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) req.remote_user = 'act:usr,act,.super_admin' resp = self.test_auth.authorize(req) - resp = str(self.test_auth.authorize(req)) - self.assert_(resp.startswith('403'), resp) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) def test_get_token_fail(self): resp = Request.blank('/auth/v1.0').get_response(self.test_auth) From d7dd3ec0659aaee87f480bcbe5d3d5cdb33d8c5b Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Mon, 20 Dec 2010 21:47:50 +0000 Subject: [PATCH 038/199] gettext updates --- swift/account/auditor.py | 27 +++--- swift/account/reaper.py | 32 ++++---- swift/account/server.py | 10 +-- swift/common/bench.py | 11 +-- swift/common/bufferedhttp.py | 14 ++-- swift/common/db.py | 11 +-- swift/common/db_replicator.py | 54 ++++++------ swift/common/memcached.py | 10 ++- swift/common/middleware/catch_errors.py | 4 +- swift/common/middleware/ratelimit.py | 16 ++-- swift/common/utils.py | 24 +++--- swift/container/auditor.py | 27 +++--- swift/container/server.py | 29 ++++--- swift/container/updater.py | 39 +++++---- swift/obj/auditor.py | 27 +++--- swift/obj/replicator.py | 104 +++++++++++++----------- swift/obj/server.py | 21 ++--- swift/obj/updater.py | 41 ++++++---- swift/proxy/server.py | 62 +++++++------- test/unit/auth/test_server.py | 8 +- test/unit/common/test_daemon.py | 2 +- test/unit/common/test_utils.py | 18 ++-- 22 files changed, 306 insertions(+), 285 deletions(-) diff --git a/swift/account/auditor.py b/swift/account/auditor.py index fc14ec520c..01afb7d469 100644 --- a/swift/account/auditor.py +++ b/swift/account/auditor.py @@ -16,6 +16,7 @@ import os import time from random import random +from gettext import gettext as _ from swift.account import server as account_server from swift.common.db import AccountBroker @@ -49,11 +50,11 @@ class AccountAuditor(Daemon): for path, device, partition in all_locs: self.account_audit(path) if time.time() - reported >= 3600: # once an hour - self.logger.info( - 'Since %s: Account audits: %s passed audit, ' - '%s failed audit' % (time.ctime(reported), - self.account_passes, - self.account_failures)) + self.logger.info(_('Since %(time)s: Account audits: ' + '%(passed)s passed audit, %(failed)s failed audit'), + {'time': time.ctime(reported), + 'passed': self.account_passes, + 'failed': self.account_failures}) reported = time.time() self.account_passes = 0 self.account_failures = 0 @@ -72,17 +73,17 @@ class AccountAuditor(Daemon): for path, device, partition in all_locs: self.account_audit(path) if time.time() - reported >= 3600: # once an hour - self.logger.info( - 'Since %s: Account audits: %s passed audit, ' - '%s failed audit' % (time.ctime(reported), - self.account_passes, - self.account_failures)) + self.logger.info(_('Since %(time)s: Account audits: ' + '%(passed)s passed audit, %(failed)s failed audit'), + {'time': time.ctime(reported), + 'passed': self.account_passes, + 'failed': self.account_failures}) reported = time.time() self.account_passes = 0 self.account_failures = 0 elapsed = time.time() - begin self.logger.info( - 'Account audit "once" mode completed: %.02fs' % elapsed) + 'Account audit "once" mode completed: %.02fs', elapsed) def account_audit(self, path): """ @@ -97,8 +98,8 @@ class AccountAuditor(Daemon): if not broker.is_deleted(): info = broker.get_info() self.account_passes += 1 - self.logger.debug('Audit passed for %s' % broker.db_file) + self.logger.debug(_('Audit passed for %s') % broker.db_file) except Exception: self.account_failures += 1 - self.logger.exception('ERROR Could not get account info %s' % + self.logger.exception(_('ERROR Could not get account info %s'), (broker.db_file)) diff --git a/swift/account/reaper.py b/swift/account/reaper.py index 814f7551e5..0225209392 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -18,6 +18,7 @@ import random from logging import DEBUG from math import sqrt from time import time +from gettext import gettext as _ from eventlet import GreenPool, sleep @@ -77,7 +78,7 @@ class AccountReaper(Daemon): """ The account :class:`swift.common.ring.Ring` for the cluster. """ if not self.account_ring: self.logger.debug( - 'Loading account ring from %s' % self.account_ring_path) + _('Loading account ring from %s'), self.account_ring_path) self.account_ring = Ring(self.account_ring_path) return self.account_ring @@ -85,7 +86,7 @@ class AccountReaper(Daemon): """ The container :class:`swift.common.ring.Ring` for the cluster. """ if not self.container_ring: self.logger.debug( - 'Loading container ring from %s' % self.container_ring_path) + _('Loading container ring from %s'), self.container_ring_path) self.container_ring = Ring(self.container_ring_path) return self.container_ring @@ -93,7 +94,7 @@ class AccountReaper(Daemon): """ The object :class:`swift.common.ring.Ring` for the cluster. """ if not self.object_ring: self.logger.debug( - 'Loading object ring from %s' % self.object_ring_path) + _('Loading object ring from %s'), self.object_ring_path) self.object_ring = Ring(self.object_ring_path) return self.object_ring @@ -103,7 +104,7 @@ class AccountReaper(Daemon): This repeatedly calls :func:`reap_once` no quicker than the configuration interval. """ - self.logger.debug('Daemon started.') + self.logger.debug(_('Daemon started.')) sleep(random.random() * self.interval) while True: begin = time() @@ -119,17 +120,17 @@ class AccountReaper(Daemon): repeatedly by :func:`run_forever`. This will call :func:`reap_device` once for each device on the server. """ - self.logger.debug('Begin devices pass: %s' % self.devices) + self.logger.debug(_('Begin devices pass: %s'), self.devices) begin = time() for device in os.listdir(self.devices): if self.mount_check and \ not os.path.ismount(os.path.join(self.devices, device)): self.logger.debug( - 'Skipping %s as it is not mounted' % device) + _('Skipping %s as it is not mounted'), device) continue self.reap_device(device) elapsed = time() - begin - self.logger.info('Devices pass completed: %.02fs' % elapsed) + self.logger.info(_('Devices pass completed: %.02fs'), elapsed) def reap_device(self, device): """ @@ -212,7 +213,7 @@ class AccountReaper(Daemon): """ begin = time() account = broker.get_info()['account'] - self.logger.info('Beginning pass on account %s' % account) + self.logger.info(_('Beginning pass on account %s'), account) self.stats_return_codes = {} self.stats_containers_deleted = 0 self.stats_objects_deleted = 0 @@ -235,12 +236,12 @@ class AccountReaper(Daemon): self.container_pool.waitall() except Exception: self.logger.exception( - 'Exception with containers for account %s' % account) + _('Exception with containers for account %s'), account) marker = containers[-1][0] log = 'Completed pass on account %s' % account except Exception: self.logger.exception( - 'Exception with account %s' % account) + _('Exception with account %s'), account) log = 'Incomplete pass on account %s' % account if self.stats_containers_deleted: log += ', %s containers deleted' % self.stats_containers_deleted @@ -317,7 +318,7 @@ class AccountReaper(Daemon): except ClientException, err: if self.logger.getEffectiveLevel() <= DEBUG: self.logger.exception( - 'Exception with %(ip)s:%(port)s/%(device)s' % node) + _('Exception with %(ip)s:%(port)s/%(device)s'), node) self.stats_return_codes[err.http_status / 100] = \ self.stats_return_codes.get(err.http_status / 100, 0) + 1 if not objects: @@ -330,8 +331,9 @@ class AccountReaper(Daemon): nodes, obj['name']) pool.waitall() except Exception: - self.logger.exception('Exception with objects for container ' - '%s for account %s' % (container, account)) + self.logger.exception(_('Exception with objects for container ' + '%(container)s for account %(account)s'), + {'container': container, 'account': account}) marker = objects[-1]['name'] successes = 0 failures = 0 @@ -351,7 +353,7 @@ class AccountReaper(Daemon): except ClientException, err: if self.logger.getEffectiveLevel() <= DEBUG: self.logger.exception( - 'Exception with %(ip)s:%(port)s/%(device)s' % node) + _('Exception with %(ip)s:%(port)s/%(device)s'), node) failures += 1 self.stats_return_codes[err.http_status / 100] = \ self.stats_return_codes.get(err.http_status / 100, 0) + 1 @@ -402,7 +404,7 @@ class AccountReaper(Daemon): except ClientException, err: if self.logger.getEffectiveLevel() <= DEBUG: self.logger.exception( - 'Exception with %(ip)s:%(port)s/%(device)s' % node) + _('Exception with %(ip)s:%(port)s/%(device)s'), node) failures += 1 self.stats_return_codes[err.http_status / 100] = \ self.stats_return_codes.get(err.http_status / 100, 0) + 1 diff --git a/swift/account/server.py b/swift/account/server.py index 5bd7bba517..67572165f5 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -18,15 +18,15 @@ from __future__ import with_statement import os import time import traceback - from urllib import unquote +from xml.sax import saxutils +from gettext import gettext as _ from webob import Request, Response from webob.exc import HTTPAccepted, HTTPBadRequest, \ HTTPCreated, HTTPForbidden, HTTPInternalServerError, \ HTTPMethodNotAllowed, HTTPNoContent, HTTPNotFound, HTTPPreconditionFailed import simplejson -from xml.sax import saxutils from swift.common.db import AccountBroker from swift.common.utils import get_logger, get_param, hash_path, \ @@ -307,10 +307,8 @@ class AccountController(object): else: res = HTTPMethodNotAllowed() except: - self.logger.exception('ERROR __call__ error with %s %s ' - 'transaction %s' % (env.get('REQUEST_METHOD', '-'), - env.get('PATH_INFO', '-'), env.get('HTTP_X_CF_TRANS_ID', - '-'))) + self.logger.exception(_('ERROR __call__ error with %(method)s' + ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) trans_time = '%.4f' % (time.time() - start_time) additional_info = '' diff --git a/swift/common/bench.py b/swift/common/bench.py index b50df4dcb8..1e525c2e7d 100644 --- a/swift/common/bench.py +++ b/swift/common/bench.py @@ -18,6 +18,7 @@ import time import random from urlparse import urlparse from contextlib import contextmanager +from gettext import gettext as _ import eventlet.pools from eventlet.green.httplib import CannotSendRequest @@ -82,10 +83,10 @@ class Bench(object): def _log_status(self, title): total = time.time() - self.beginbeat - self.logger.info('%s %s [%s failures], %.01f/s' % ( - self.complete, title, self.failures, - (float(self.complete) / total), - )) + self.logger.info(_('%(complete)s %(title)s [%(fail)s failures], ' + '%(rate).01f/s'), + {'title': title, 'complete': self.complete, 'fail': self.failures, + 'rate': (float(self.complete) / total)}) @contextmanager def connection(self): @@ -94,7 +95,7 @@ class Bench(object): try: yield hc except CannotSendRequest: - self.logger.info("CannotSendRequest. Skipping...") + self.logger.info(_("CannotSendRequest. Skipping...")) try: hc.close() except: diff --git a/swift/common/bufferedhttp.py b/swift/common/bufferedhttp.py index 6b308e5b01..536793fc87 100644 --- a/swift/common/bufferedhttp.py +++ b/swift/common/bufferedhttp.py @@ -29,6 +29,7 @@ BufferedHTTPResponse. from urllib import quote import logging import time +from gettext import gettext as _ from eventlet.green.httplib import CONTINUE, HTTPConnection, HTTPMessage, \ HTTPResponse, HTTPSConnection, _UNKNOWN @@ -82,15 +83,9 @@ class BufferedHTTPConnection(HTTPConnection): def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): self._method = method self._path = url - self._txn_id = '-' return HTTPConnection.putrequest(self, method, url, skip_host, skip_accept_encoding) - def putheader(self, header, value): - if header.lower() == 'x-cf-trans-id': - self._txn_id = value - return HTTPConnection.putheader(self, header, value) - def getexpect(self): response = BufferedHTTPResponse(self.sock, strict=self.strict, method=self._method) @@ -99,9 +94,10 @@ class BufferedHTTPConnection(HTTPConnection): def getresponse(self): response = HTTPConnection.getresponse(self) - logging.debug("HTTP PERF: %.5f seconds to %s %s:%s %s (%s)" % - (time.time() - self._connected_time, self._method, self.host, - self.port, self._path, self._txn_id)) + logging.debug(_("HTTP PERF: %(time).5f seconds to %(method)s " + "%(host)s:%(port)s %(path)s)"), + {'time': time.time() - self._connected_time, 'method': self._method, + 'host': self.host, 'port': self.port, 'path': self._path}) return response diff --git a/swift/common/db.py b/swift/common/db.py index 41854407d6..6f7372c22c 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -27,6 +27,7 @@ import cPickle as pickle import errno from random import randint from tempfile import mkstemp +from gettext import gettext as _ from eventlet import sleep import simplejson as json @@ -295,7 +296,7 @@ class DatabaseBroker(object): self.conn = conn except: # pragma: no cover logging.exception( - 'Broker error trying to rollback locked connection') + _('Broker error trying to rollback locked connection')) conn.close() def newid(self, remote_id): @@ -750,8 +751,8 @@ class ContainerBroker(DatabaseBroker): 'deleted': deleted}) except: self.logger.exception( - 'Invalid pending entry %s: %s' - % (self.pending_file, entry)) + _('Invalid pending entry %(file)s: %(entry)s'), + {'file': self.pending_file, 'entry': entry}) if item_list: self.merge_items(item_list) try: @@ -1217,8 +1218,8 @@ class AccountBroker(DatabaseBroker): 'deleted': deleted}) except: self.logger.exception( - 'Invalid pending entry %s: %s' - % (self.pending_file, entry)) + _('Invalid pending entry %(file)s: %(entry)s'), + {'file': self.pending_file, 'entry': entry}) if item_list: self.merge_items(item_list) try: diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 8519e7128f..89e0590f7e 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -20,6 +20,7 @@ import random import math import time import shutil +from gettext import gettext as _ from eventlet import GreenPool, sleep, Timeout from eventlet.green import subprocess @@ -81,7 +82,7 @@ class ReplConnection(BufferedHTTPConnection): return response except: self.logger.exception( - 'ERROR reading HTTP response from %s' % self.node) + _('ERROR reading HTTP response from %s'), self.node) return None @@ -120,12 +121,14 @@ class Replicator(Daemon): def _report_stats(self): """Report the current stats to the logs.""" self.logger.info( - 'Attempted to replicate %d dbs in %.5f seconds (%.5f/s)' - % (self.stats['attempted'], time.time() - self.stats['start'], - self.stats['attempted'] / - (time.time() - self.stats['start'] + 0.0000001))) - self.logger.info('Removed %(remove)d dbs' % self.stats) - self.logger.info('%(success)s successes, %(failure)s failures' + _('Attempted to replicate %(count)d dbs in %(time).5f seconds ' + '(%(rate).5f/s)'), + {'count': self.stats['attempted'], + 'time': time.time() - self.stats['start'], + 'rate': self.stats['attempted'] / + (time.time() - self.stats['start'] + 0.0000001)}) + self.logger.info(_('Removed %(remove)d dbs') % self.stats) + self.logger.info(_('%(success)s successes, %(failure)s failures') % self.stats) self.logger.info(' '.join(['%s:%s' % item for item in self.stats.items() if item[0] in @@ -150,8 +153,8 @@ class Replicator(Daemon): proc = subprocess.Popen(popen_args) proc.communicate() if proc.returncode != 0: - self.logger.error('ERROR rsync failed with %s: %s' % - (proc.returncode, popen_args)) + self.logger.error(_('ERROR rsync failed with %(code)s: %(args)s'), + {'code': proc.returncode, 'args': popen_args}) return proc.returncode == 0 def _rsync_db(self, broker, device, http, local_id, @@ -200,7 +203,7 @@ class Replicator(Daemon): :returns: boolean indicating completion and success """ self.stats['diff'] += 1 - self.logger.debug('Syncing chunks with %s', http.host) + self.logger.debug(_('Syncing chunks with %s'), http.host) sync_table = broker.get_syncs() objects = broker.get_items_since(point, self.per_diff) while len(objects): @@ -208,8 +211,9 @@ class Replicator(Daemon): response = http.replicate('merge_items', objects, local_id) if not response or response.status >= 300 or response.status < 200: if response: - self.logger.error('ERROR Bad response %s from %s' % - (response.status, http.host)) + self.logger.error(_('ERROR Bad response %(status)s from ' + '%(host)s'), + {'status': response.status, 'host': http.host}) return False point = objects[-1]['ROWID'] objects = broker.get_items_since(point, self.per_diff) @@ -272,7 +276,7 @@ class Replicator(Daemon): http = self._http_connect(node, partition, broker.db_file) if not http: self.logger.error( - 'ERROR Unable to connect to remote server: %s' % node) + _('ERROR Unable to connect to remote server: %s'), node) return False with Timeout(self.node_timeout): response = http.replicate('sync', info['max_row'], info['hash'], @@ -310,7 +314,7 @@ class Replicator(Daemon): :param object_file: DB file name to be replicated :param node_id: node id of the node to be replicated to """ - self.logger.debug('Replicating db %s' % object_file) + self.logger.debug(_('Replicating db %s'), object_file) self.stats['attempted'] += 1 try: broker = self.brokerclass(object_file, pending_timeout=30) @@ -319,10 +323,10 @@ class Replicator(Daemon): info = broker.get_replication_info() except Exception, e: if 'no such table' in str(e): - self.logger.error('Quarantining DB %s' % object_file) + self.logger.error(_('Quarantining DB %s'), object_file) quarantine_db(broker.db_file, broker.db_type) else: - self.logger.exception('ERROR reading db %s' % object_file) + self.logger.exception(_('ERROR reading db %s'), object_file) self.stats['failure'] += 1 return # The db is considered deleted if the delete_timestamp value is greater @@ -355,10 +359,10 @@ class Replicator(Daemon): success = self._repl_to_node(node, broker, partition, info) except DriveNotMounted: repl_nodes.append(more_nodes.next()) - self.logger.error('ERROR Remote drive not mounted %s' % node) + self.logger.error(_('ERROR Remote drive not mounted %s'), node) except: - self.logger.exception('ERROR syncing %s with node %s' % - (object_file, node)) + self.logger.exception(_('ERROR syncing %(file)s with node' + ' %(node)s'), {'file': object_file, 'node': node}) self.stats['success' if success else 'failure'] += 1 responses.append(success) if not shouldbehere and all(responses): @@ -399,14 +403,14 @@ class Replicator(Daemon): dirs = [] ips = whataremyips() if not ips: - self.logger.error('ERROR Failed to get my own IPs?') + self.logger.error(_('ERROR Failed to get my own IPs?')) return for node in self.ring.devs: if node and node['ip'] in ips and node['port'] == self.port: if self.mount_check and not os.path.ismount( os.path.join(self.root, node['device'])): self.logger.warn( - 'Skipping %(device)s as it is not mounted' % node) + _('Skipping %(device)s as it is not mounted') % node) continue unlink_older_than( os.path.join(self.root, node['device'], 'tmp'), @@ -414,12 +418,12 @@ class Replicator(Daemon): datadir = os.path.join(self.root, node['device'], self.datadir) if os.path.isdir(datadir): dirs.append((datadir, node['id'])) - self.logger.info('Beginning replication run') + self.logger.info(_('Beginning replication run')) for part, object_file, node_id in self.roundrobin_datadirs(dirs): self.cpool.spawn_n( self._replicate_object, part, object_file, node_id) self.cpool.waitall() - self.logger.info('Replication run OVER') + self.logger.info(_('Replication run OVER')) self._report_stats() def run_forever(self): @@ -430,7 +434,7 @@ class Replicator(Daemon): try: self.run_once() except: - self.logger.exception('ERROR trying to replicate') + self.logger.exception(_('ERROR trying to replicate')) sleep(self.run_pause) @@ -473,7 +477,7 @@ class ReplicatorRpc(object): except Exception, e: if 'no such table' in str(e): # TODO(unknown): find a real logger - print "Quarantining DB %s" % broker.db_file + print _("Quarantining DB %s") % broker.db_file quarantine_db(broker.db_file, broker.db_type) return HTTPNotFound() raise diff --git a/swift/common/memcached.py b/swift/common/memcached.py index d40d86c21e..d41b25616f 100644 --- a/swift/common/memcached.py +++ b/swift/common/memcached.py @@ -26,7 +26,7 @@ import socket import time from bisect import bisect from hashlib import md5 - +from gettext import gettext as _ CONN_TIMEOUT = 0.3 IO_TIMEOUT = 2.0 @@ -67,9 +67,11 @@ class MemcacheRing(object): def _exception_occurred(self, server, e, action='talking'): if isinstance(e, socket.timeout): - logging.error("Timeout %s to memcached: %s" % (action, server)) + logging.error(_("Timeout %(action)s to memcached: %(server)s"), + {'action': action, 'server': server}) else: - logging.exception("Error %s to memcached: %s" % (action, server)) + logging.exception(_("Error %(action)s to memcached: %(server)s"), + {'action': action, 'server': server}) now = time.time() self._errors[server].append(time.time()) if len(self._errors[server]) > ERROR_LIMIT_COUNT: @@ -77,7 +79,7 @@ class MemcacheRing(object): if err > now - ERROR_LIMIT_TIME] if len(self._errors[server]) > ERROR_LIMIT_COUNT: self._error_limited[server] = now + ERROR_LIMIT_DURATION - logging.error('Error limiting server %s' % server) + logging.error(_('Error limiting server %s'), server) def _get_conns(self, key): """ diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index e94133627e..0dcedd201a 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from gettext import gettext as _ + from webob import Request from webob.exc import HTTPServerError @@ -32,7 +34,7 @@ class CatchErrorMiddleware(object): try: return self.app(env, start_response) except Exception, err: - self.logger.exception('Error: %s' % err) + self.logger.exception(_('Error: %s'), err) resp = HTTPServerError(request=Request(env), body='An error occurred', content_type='text/plain') diff --git a/swift/common/middleware/ratelimit.py b/swift/common/middleware/ratelimit.py index 82f3569067..1679c1548c 100644 --- a/swift/common/middleware/ratelimit.py +++ b/swift/common/middleware/ratelimit.py @@ -15,6 +15,7 @@ import time import eventlet from webob import Request, Response from webob.exc import HTTPNotFound +from gettext import gettext as _ from swift.common.utils import split_path, cache_from_env, get_logger from swift.proxy.server import get_container_memcache_key @@ -167,7 +168,7 @@ class RateLimitMiddleware(object): :param obj_name: object name from path ''' if account_name in self.ratelimit_blacklist: - self.logger.error('Returning 497 because of blacklisting') + self.logger.error(_('Returning 497 because of blacklisting')) return Response(status='497 Blacklisted', body='Your account has been blacklisted', request=req) if account_name in self.ratelimit_whitelist: @@ -181,14 +182,15 @@ class RateLimitMiddleware(object): need_to_sleep = self._get_sleep_time(key, max_rate) if self.log_sleep_time_seconds and \ need_to_sleep > self.log_sleep_time_seconds: - self.logger.info("Ratelimit sleep log: %s for %s/%s/%s" % ( - need_to_sleep, account_name, - container_name, obj_name)) + self.logger.info(_("Ratelimit sleep log: %(sleep)s for " + "%(account)s/%(container)s/%(object)s"), + {'sleep': need_to_sleep, 'account': account_name, + 'container': container_name, 'object': obj_name}) if need_to_sleep > 0: eventlet.sleep(need_to_sleep) except MaxSleepTimeHit, e: - self.logger.error('Returning 498 because of ops ' + \ - 'rate limiting (Max Sleep) %s' % e) + self.logger.error(_('Returning 498 because of ops rate ' + 'limiting (Max Sleep) %s') % str(e)) error_resp = Response(status='498 Rate Limited', body='Slow down', request=req) return error_resp @@ -207,7 +209,7 @@ class RateLimitMiddleware(object): self.memcache_client = cache_from_env(env) if not self.memcache_client: self.logger.warning( - 'Warning: Cannot ratelimit without a memcached client') + _('Warning: Cannot ratelimit without a memcached client')) return self.app(env, start_response) try: version, account, container, obj = split_path(req.path, 1, 4, True) diff --git a/swift/common/utils.py b/swift/common/utils.py index d4dc078916..5ddac9ce84 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -34,7 +34,7 @@ from ConfigParser import ConfigParser, NoSectionError, NoOptionError from optparse import OptionParser from tempfile import mkstemp import cPickle as pickle - +from gettext import gettext as _ import eventlet from eventlet import greenio, GreenPool, sleep, Timeout, listen @@ -85,8 +85,8 @@ def load_libc_function(func_name): libc = ctypes.CDLL(ctypes.util.find_library('c')) return getattr(libc, func_name) except AttributeError: - logging.warn("Unable to locate %s in libc. Leaving as a no-op." - % func_name) + logging.warn(_("Unable to locate %s in libc. Leaving as a no-op."), + func_name) def noop_libc_function(*args): return 0 @@ -252,12 +252,12 @@ class LoggerFileObject(object): value = value.strip() if value: if 'Connection reset by peer' in value: - self.logger.error('STDOUT: Connection reset by peer') + self.logger.error(_('STDOUT: Connection reset by peer')) else: - self.logger.error('STDOUT: %s' % value) + self.logger.error(_('STDOUT: %s'), value) def writelines(self, values): - self.logger.error('STDOUT: %s' % '#012'.join(values)) + self.logger.error(_('STDOUT: %s'), '#012'.join(values)) def close(self): pass @@ -462,12 +462,12 @@ def parse_options(usage="%prog CONFIG [options]", once=False, test_args=None): if not args: parser.print_usage() - print "Error: missing config file argument" + print _("Error: missing config file argument") sys.exit(1) config = os.path.abspath(args.pop(0)) if not os.path.exists(config): parser.print_usage() - print "Error: unable to locate %s" % config + print _("Error: unable to locate %s") % config sys.exit(1) extra_args = [] @@ -690,14 +690,14 @@ def readconf(conf, section_name=None, log_name=None, defaults=None): defaults = {} c = ConfigParser(defaults) if not c.read(conf): - print "Unable to read config file %s" % conf + print _("Unable to read config file %s") % conf sys.exit(1) if section_name: if c.has_section(section_name): conf = dict(c.items(section_name)) else: - print "Unable to find %s config section in %s" % (section_name, - conf) + print _("Unable to find %s config section in %s") % \ + (section_name, conf) sys.exit(1) if "log_name" not in conf: if log_name is not None: @@ -749,7 +749,7 @@ def audit_location_generator(devices, datadir, mount_check=True, logger=None): os.path.ismount(os.path.join(devices, device)): if logger: logger.debug( - 'Skipping %s as it is not mounted' % device) + _('Skipping %s as it is not mounted'), device) continue datadir = os.path.join(devices, device, datadir) if not os.path.exists(datadir): diff --git a/swift/container/auditor.py b/swift/container/auditor.py index 0d237a1c82..a6f25538f5 100644 --- a/swift/container/auditor.py +++ b/swift/container/auditor.py @@ -16,6 +16,7 @@ import os import time from random import random +from gettext import gettext as _ from swift.container import server as container_server from swift.common.db import ContainerBroker @@ -51,10 +52,11 @@ class ContainerAuditor(Daemon): self.container_audit(path) if time.time() - reported >= 3600: # once an hour self.logger.info( - 'Since %s: Container audits: %s passed audit, ' - '%s failed audit' % (time.ctime(reported), - self.container_passes, - self.container_failures)) + _('Since %(time)s: Container audits: %(pass)s passed ' + 'audit, %(fail)s failed audit'), + {'time': time.ctime(reported), + 'pass': self.container_passes, + 'fail': self.container_failures}) reported = time.time() self.container_passes = 0 self.container_failures = 0 @@ -64,7 +66,7 @@ class ContainerAuditor(Daemon): def run_once(self): """Run the container audit once.""" - self.logger.info('Begin container audit "once" mode') + self.logger.info(_('Begin container audit "once" mode')) begin = reported = time.time() all_locs = audit_location_generator(self.devices, container_server.DATADIR, @@ -74,16 +76,17 @@ class ContainerAuditor(Daemon): self.container_audit(path) if time.time() - reported >= 3600: # once an hour self.logger.info( - 'Since %s: Container audits: %s passed audit, ' - '%s failed audit' % (time.ctime(reported), - self.container_passes, - self.container_failures)) + _('Since %(time)s: Container audits: %(pass)s passed ' + 'audit, %(fail)s failed audit'), + {'time': time.ctime(reported), + 'pass': self.container_passes, + 'fail': self.container_failures}) reported = time.time() self.container_passes = 0 self.container_failures = 0 elapsed = time.time() - begin self.logger.info( - 'Container audit "once" mode completed: %.02fs' % elapsed) + _('Container audit "once" mode completed: %.02fs'), elapsed) def container_audit(self, path): """ @@ -98,8 +101,8 @@ class ContainerAuditor(Daemon): if not broker.is_deleted(): info = broker.get_info() self.container_passes += 1 - self.logger.debug('Audit passed for %s' % broker.db_file) + self.logger.debug(_('Audit passed for %s'), broker.db_file) except Exception: self.container_failures += 1 - self.logger.exception('ERROR Could not get container info %s' % + self.logger.exception(_('ERROR Could not get container info %s'), (broker.db_file)) diff --git a/swift/container/server.py b/swift/container/server.py index 82e222435a..4eb92f0dd9 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -21,6 +21,7 @@ import traceback from urllib import unquote from xml.sax import saxutils from datetime import datetime +from gettext import gettext as _ import simplejson from eventlet.timeout import Timeout @@ -111,18 +112,18 @@ class ContainerController(object): return HTTPNotFound(request=req) elif account_response.status < 200 or \ account_response.status > 299: - self.logger.error('ERROR Account update failed ' - 'with %s:%s/%s transaction %s (will retry ' - 'later): Response %s %s' % (account_ip, - account_port, account_device, - req.headers.get('x-cf-trans-id'), - account_response.status, - account_response.reason)) + self.logger.error(_('ERROR Account update failed ' + 'with %(ip)s:%(port)s/%(device)s (will retry ' + 'later): Response %(status)s %(reason)s'), + {'ip': account_ip, 'port': account_port, + 'device': account_device, + 'status': account_response.status, + 'reason': account_response.reason}) except: - self.logger.exception('ERROR account update failed with ' - '%s:%s/%s transaction %s (will retry later)' % - (account_ip, account_port, account_device, - req.headers.get('x-cf-trans-id', '-'))) + self.logger.exception(_('ERROR account update failed with ' + '%(ip)s:%(port)s/%(device)s (will retry later)'), + {'ip': account_ip, 'port': account_port, + 'device': account_device}) return None def DELETE(self, req): @@ -394,10 +395,8 @@ class ContainerController(object): else: res = HTTPMethodNotAllowed() except: - self.logger.exception('ERROR __call__ error with %s %s ' - 'transaction %s' % (env.get('REQUEST_METHOD', '-'), - env.get('PATH_INFO', '-'), env.get('HTTP_X_CF_TRANS_ID', - '-'))) + self.logger.exception(_('ERROR __call__ error with %(method)s' + ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) trans_time = '%.4f' % (time.time() - start_time) log_message = '%s - - [%s] "%s %s" %s %s "%s" "%s" "%s" %s' % ( diff --git a/swift/container/updater.py b/swift/container/updater.py index 646815257b..36f567ddc7 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -19,6 +19,7 @@ import signal import sys import time from random import random, shuffle +from gettext import gettext as _ from eventlet import spawn, patcher, Timeout @@ -56,7 +57,7 @@ class ContainerUpdater(Daemon): """Get the account ring. Load it if it hasn't been yet.""" if not self.account_ring: self.logger.debug( - 'Loading account ring from %s' % self.account_ring_path) + _('Loading account ring from %s'), self.account_ring_path) self.account_ring = Ring(self.account_ring_path) return self.account_ring @@ -70,7 +71,7 @@ class ContainerUpdater(Daemon): for device in os.listdir(self.devices): dev_path = os.path.join(self.devices, device) if self.mount_check and not os.path.ismount(dev_path): - self.logger.warn('%s is not mounted' % device) + self.logger.warn(_('%s is not mounted'), device) continue con_path = os.path.join(dev_path, DATADIR) if not os.path.exists(con_path): @@ -86,7 +87,7 @@ class ContainerUpdater(Daemon): """ time.sleep(random() * self.interval) while True: - self.logger.info('Begin container update sweep') + self.logger.info(_('Begin container update sweep')) begin = time.time() pids = [] # read from account ring to ensure it's fresh @@ -107,15 +108,17 @@ class ContainerUpdater(Daemon): self.container_sweep(path) elapsed = time.time() - forkbegin self.logger.debug( - 'Container update sweep of %s completed: ' - '%.02fs, %s successes, %s failures, %s with no changes' - % (path, elapsed, self.successes, self.failures, - self.no_changes)) + _('Container update sweep of %(path)s completed: ' + '%(elapsed).02fs, %(success)s successes, %(fail)s ' + 'failures, %(no_change)s with no changes'), + {'path': path, 'elapsed': elapsed, + 'success': self.successes, 'fail': self.failures, + 'no_change': self.no_changes}) sys.exit() while pids: pids.remove(os.wait()[0]) elapsed = time.time() - begin - self.logger.info('Container update sweep completed: %.02fs' % + self.logger.info(_('Container update sweep completed: %.02fs'), elapsed) if elapsed < self.interval: time.sleep(self.interval - elapsed) @@ -133,9 +136,11 @@ class ContainerUpdater(Daemon): for path in self.get_paths(): self.container_sweep(path) elapsed = time.time() - begin - self.logger.info('Container update single threaded sweep completed: ' - '%.02fs, %s successes, %s failures, %s with no changes' % - (elapsed, self.successes, self.failures, self.no_changes)) + self.logger.info(_('Container update single threaded sweep completed: ' + '%(elapsed).02fs, %(success)s successes, %(fail)s failures, ' + '%(no_change)s with no changes'), + {'elapsed': elapsed, 'success': self.successes, + 'fail': self.failures, 'no_change': self.no_changes}) def container_sweep(self, path): """ @@ -181,14 +186,16 @@ class ContainerUpdater(Daemon): if successes > failures: self.successes += 1 self.logger.debug( - 'Update report sent for %s %s' % (container, dbfile)) + _('Update report sent for %(container)s %(dbfile)s'), + {'container': container, 'dbfile': dbfile}) broker.reported(info['put_timestamp'], info['delete_timestamp'], info['object_count'], info['bytes_used']) else: self.failures += 1 self.logger.debug( - 'Update report failed for %s %s' % (container, dbfile)) + _('Update report failed for %(container)s %(dbfile)s'), + {'container': container, 'dbfile': dbfile}) else: self.no_changes += 1 @@ -216,8 +223,8 @@ class ContainerUpdater(Daemon): 'X-Bytes-Used': bytes, 'X-Account-Override-Deleted': 'yes'}) except: - self.logger.exception('ERROR account update failed with ' - '%(ip)s:%(port)s/%(device)s (will retry later): ' % node) + self.logger.exception(_('ERROR account update failed with ' + '%(ip)s:%(port)s/%(device)s (will retry later): '), node) return 500 with Timeout(self.node_timeout): try: @@ -227,5 +234,5 @@ class ContainerUpdater(Daemon): except: if self.logger.getEffectiveLevel() <= logging.DEBUG: self.logger.exception( - 'Exception with %(ip)s:%(port)s/%(device)s' % node) + _('Exception with %(ip)s:%(port)s/%(device)s'), node) return 500 diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index cdcb0c3ecf..bdd47479b1 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -17,6 +17,7 @@ import os import time from hashlib import md5 from random import random +from gettext import gettext as _ from swift.obj import server as object_server from swift.obj.replicator import invalidate_hash @@ -52,10 +53,10 @@ class ObjectAuditor(Daemon): for path, device, partition in all_locs: self.object_audit(path, device, partition) if time.time() - reported >= 3600: # once an hour - self.logger.info( - 'Since %s: Locally: %d passed audit, %d quarantined, ' - '%d errors' % (time.ctime(reported), self.passes, - self.quarantines, self.errors)) + self.logger.info(_('Since %(time)s: Locally: %(pass)d ' + 'passed audit, %(quar)d quarantined, %(error)d errors'), + {'time': time.ctime(reported), 'pass': self.passes, + 'quar': self.quarantines, 'error': self.errors}) reported = time.time() self.passes = 0 self.quarantines = 0 @@ -66,7 +67,7 @@ class ObjectAuditor(Daemon): def run_once(self): """Run the object audit once.""" - self.logger.info('Begin object audit "once" mode') + self.logger.info(_('Begin object audit "once" mode')) begin = reported = time.time() all_locs = audit_location_generator(self.devices, object_server.DATADIR, @@ -75,17 +76,17 @@ class ObjectAuditor(Daemon): for path, device, partition in all_locs: self.object_audit(path, device, partition) if time.time() - reported >= 3600: # once an hour - self.logger.info( - 'Since %s: Locally: %d passed audit, %d quarantined, ' - '%d errors' % (time.ctime(reported), self.passes, - self.quarantines, self.errors)) + self.logger.info(_('Since %(time)s: Locally: %(pass)d ' + 'passed audit, %(quar)d quarantined, %(error)d errors'), + {'time': time.ctime(reported), 'pass': self.passes, + 'quar': self.quarantines, 'error': self.errors}) reported = time.time() self.passes = 0 self.quarantines = 0 self.errors = 0 elapsed = time.time() - begin self.logger.info( - 'Object audit "once" mode completed: %.02fs' % elapsed) + _('Object audit "once" mode completed: %.02fs'), elapsed) def object_audit(self, path, device, partition): """ @@ -124,8 +125,8 @@ class ObjectAuditor(Daemon): "%s" % (df.metadata['ETag'], etag)) except AuditException, err: self.quarantines += 1 - self.logger.error('ERROR Object %s failed audit and will be ' - 'quarantined: %s' % (path, err)) + self.logger.error(_('ERROR Object %(obj)s failed audit and will be ' + 'quarantined: %(err)s'), {'obj': path, 'err': err}) invalidate_hash(os.path.dirname(path)) renamer_path = os.path.dirname(path) renamer(renamer_path, os.path.join(self.devices, device, @@ -133,6 +134,6 @@ class ObjectAuditor(Daemon): return except Exception: self.errors += 1 - self.logger.exception('ERROR Trying to audit %s' % path) + self.logger.exception(_('ERROR Trying to audit %s'), path) return self.passes += 1 diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 7559cd0933..bd2b09af25 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -22,6 +22,7 @@ import logging import hashlib import itertools import cPickle as pickle +from gettext import gettext as _ import eventlet from eventlet import GreenPool, tpool, Timeout, sleep, hubs @@ -243,26 +244,27 @@ class ObjectReplicator(Daemon): results = proc.stdout.read() ret_val = proc.wait() except Timeout: - self.logger.error("Killing long-running rsync: %s" % str(args)) + self.logger.error(_("Killing long-running rsync: %s"), str(args)) proc.kill() return 1 # failure response code total_time = time.time() - start_time - if results: - for result in results.split('\n'): - if result == '': - continue - if result.startswith('cd+'): - continue - self.logger.info(result) + for result in results.split('\n'): + if result == '': + continue + if result.startswith('cd+'): + continue + self.logger.info(result) + if ret_val: + self.logger.error(_('Bad rsync return code: %s -> %d'), + (str(args), ret_val)) + elif results: self.logger.info( - "Sync of %s at %s complete (%.03f) [%d]" % ( - args[-2], args[-1], total_time, ret_val)) + _("Successful rsync of %(src)s at %(dst)s (%(time).03f)"), + {'src': args[-2], 'dst': args[-1], 'time': total_time}) else: self.logger.debug( - "Sync of %s at %s complete (%.03f) [%d]" % ( - args[-2], args[-1], total_time, ret_val)) - if ret_val: - self.logger.error('Bad rsync return code: %d' % ret_val) + _("Successful rsync of %(src)s at %(dst)s (%(time).03f)"), + {'src': args[-2], 'dst': args[-1], 'time': total_time}) return ret_val def rsync(self, node, job, suffixes): @@ -346,10 +348,10 @@ class ObjectReplicator(Daemon): responses.append(success) if not suffixes or (len(responses) == \ self.object_ring.replica_count and all(responses)): - self.logger.info("Removing partition: %s" % job['path']) + self.logger.info(_("Removing partition: %s"), job['path']) tpool.execute(shutil.rmtree, job['path'], ignore_errors=True) except (Exception, Timeout): - self.logger.exception("Error syncing handoff partition") + self.logger.exception(_("Error syncing handoff partition")) finally: self.partition_times.append(time.time() - begin) @@ -379,13 +381,14 @@ class ObjectReplicator(Daemon): node['device'], job['partition'], 'REPLICATE', '', headers={'Content-Length': '0'}).getresponse() if resp.status == 507: - self.logger.error('%s/%s responded as unmounted' % - (node['ip'], node['device'])) + self.logger.error(_('%(ip)s/%(device)s responded' + ' as unmounted'), node) attempts_left += 1 continue if resp.status != 200: - self.logger.error("Invalid response %s from %s" % - (resp.status, node['ip'])) + self.logger.error(_("Invalid response %(resp)s " + "from %(ip)s"), + {'resp': resp.status, 'ip': node['ip']}) continue remote_hash = pickle.loads(resp.read()) del resp @@ -408,7 +411,7 @@ class ObjectReplicator(Daemon): logging.exception("Error syncing with node: %s" % node) self.suffix_count += len(local_hash) except (Exception, Timeout): - self.logger.exception("Error syncing partition") + self.logger.exception(_("Error syncing partition")) finally: self.partition_times.append(time.time() - begin) @@ -418,27 +421,30 @@ class ObjectReplicator(Daemon): """ if self.replication_count: rate = self.replication_count / (time.time() - self.start) - self.logger.info("%d/%d (%.2f%%) partitions replicated in %.2f " - "seconds (%.2f/sec, %s remaining)" - % (self.replication_count, self.job_count, - self.replication_count * 100.0 / self.job_count, - time.time() - self.start, rate, - '%d%s' % compute_eta(self.start, - self.replication_count, self.job_count))) + self.logger.info(_("%(replicated)d/%(total)d (%(percentage).2f%%)" + " partitions replicated in %(time).2fs (%(rate).2f/sec, " + "%(remaining)s remaining)"), + {'replicated': self.replication_count, 'total': self.job_count, + 'percentage': self.replication_count * 100.0 / self.job_count, + 'time': time.time() - self.start, 'rate': rate, + 'remaining': '%d%s' % compute_eta(self.start, + self.replication_count, self.job_count)}) if self.suffix_count: - self.logger.info("%d suffixes checked - %.2f%% hashed, " - "%.2f%% synced" % - (self.suffix_count, - (self.suffix_hash * 100.0) / self.suffix_count, - (self.suffix_sync * 100.0) / self.suffix_count)) + self.logger.info(_("%(checked)d suffixes checked - " + "%(hashed).2f%% hashed, %(synced).2f%% synced"), + {'checked': self.suffix_count, + 'hashed': (self.suffix_hash * 100.0) / self.suffix_count, + 'synced': (self.suffix_sync * 100.0) / self.suffix_count}) self.partition_times.sort() - self.logger.info("Partition times: max %.4fs, min %.4fs, " - "med %.4fs" - % (self.partition_times[-1], self.partition_times[0], - self.partition_times[len(self.partition_times) // 2])) + self.logger.info(_("Partition times: max %(max).4fs, " + "min %(min).4fs, med %(med).4fs"), + {'max': self.partition_times[-1], + 'min': self.partition_times[0], + 'med': self.partition_times[ + len(self.partition_times) // 2]}) else: - self.logger.info("Nothing replicated for %s seconds." - % (time.time() - self.start)) + self.logger.info(_("Nothing replicated for %s seconds."), + (time.time() - self.start)) def kill_coros(self): """Utility function that kills all coroutines currently running.""" @@ -466,7 +472,7 @@ class ObjectReplicator(Daemon): while True: eventlet.sleep(self.lockup_timeout) if self.replication_count == self.last_replication_count: - self.logger.error("Lockup detected.. killing live coros.") + self.logger.error(_("Lockup detected.. killing live coros.")) self.kill_coros() self.last_replication_count = self.replication_count @@ -483,7 +489,7 @@ class ObjectReplicator(Daemon): obj_path = join(dev_path, 'objects') tmp_path = join(dev_path, 'tmp') if self.mount_check and not os.path.ismount(dev_path): - self.logger.warn('%s is not mounted' % local_dev['device']) + self.logger.warn(_('%s is not mounted'), local_dev['device']) continue unlink_older_than(tmp_path, time.time() - self.reclaim_age) if not os.path.exists(obj_path): @@ -521,8 +527,8 @@ class ObjectReplicator(Daemon): jobs = self.collect_jobs() for job in jobs: if not self.check_ring(): - self.logger.info( - "Ring change detected. Aborting current replication pass.") + self.logger.info(_("Ring change detected. Aborting " + "current replication pass.")) return if job['delete']: self.run_pool.spawn(self.update_deleted, job) @@ -531,7 +537,7 @@ class ObjectReplicator(Daemon): with Timeout(self.lockup_timeout): self.run_pool.waitall() except (Exception, Timeout): - self.logger.exception("Exception in top-level replication loop") + self.logger.exception(_("Exception in top-level replication loop")) self.kill_coros() finally: stats.kill() @@ -540,23 +546,23 @@ class ObjectReplicator(Daemon): def run_once(self): start = time.time() - self.logger.info("Running object replicator in script mode.") + self.logger.info(_("Running object replicator in script mode.")) self.replicate() total = (time.time() - start) / 60 self.logger.info( - "Object replication complete. (%.02f minutes)" % total) + _("Object replication complete. (%.02f minutes)"), total) def run_forever(self): self.logger.info("Starting object replicator in daemon mode.") # Run the replicator continually while True: start = time.time() - self.logger.info("Starting object replication pass.") + self.logger.info(_("Starting object replication pass.")) # Run the replicator self.replicate() total = (time.time() - start) / 60 self.logger.info( - "Object replication complete. (%.02f minutes)" % total) - self.logger.debug('Replication sleeping for %s seconds.' % + _("Object replication complete. (%.02f minutes)"), total) + self.logger.debug(_('Replication sleeping for %s seconds.'), self.run_pause) sleep(self.run_pause) diff --git a/swift/obj/server.py b/swift/obj/server.py index cdddf72edf..f724c7100b 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -26,6 +26,7 @@ from hashlib import md5 from tempfile import mkstemp from urllib import unquote from contextlib import contextmanager +from gettext import gettext as _ from webob import Request, Response, UTC from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPCreated, \ @@ -292,13 +293,15 @@ class ObjectController(object): if 200 <= response.status < 300: return else: - self.logger.error('ERROR Container update failed (saving ' - 'for async update later): %d response from %s:%s/%s' % - (response.status, ip, port, contdevice)) + self.logger.error(_('ERROR Container update failed ' + '(saving for async update later): %(status)d ' + 'response from %(ip)s:%(port)s/%(dev)s'), + {'status': response.status, 'ip': ip, 'port': port, + 'dev': contdevice}) except: - self.logger.exception('ERROR container update failed with ' - '%s:%s/%s transaction %s (saving for async update later)' % - (ip, port, contdevice, headers_in.get('x-cf-trans-id', '-'))) + self.logger.exception(_('ERROR container update failed with ' + '%(ip)s:%(port)s/%(dev)s (saving for async update later)'), + {'ip': ip, 'port': port, 'dev': contdevice}) async_dir = os.path.join(self.devices, objdevice, ASYNCDIR) ohash = hash_path(account, container, obj) write_pickle( @@ -565,10 +568,8 @@ class ObjectController(object): else: res = HTTPMethodNotAllowed() except: - self.logger.exception('ERROR __call__ error with %s %s ' - 'transaction %s' % (env.get('REQUEST_METHOD', '-'), - env.get('PATH_INFO', '-'), env.get('HTTP_X_CF_TRANS_ID', - '-'))) + self.logger.exception(_('ERROR __call__ error with %(method)s' + ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) trans_time = time.time() - start_time if self.log_requests: diff --git a/swift/obj/updater.py b/swift/obj/updater.py index 3d6a15cc4f..eb4638ea16 100644 --- a/swift/obj/updater.py +++ b/swift/obj/updater.py @@ -19,6 +19,7 @@ import signal import sys import time from random import random +from gettext import gettext as _ from eventlet import patcher, Timeout @@ -54,7 +55,7 @@ class ObjectUpdater(Daemon): """Get the container ring. Load it, if it hasn't been yet.""" if not self.container_ring: self.logger.debug( - 'Loading container ring from %s' % self.container_ring_path) + _('Loading container ring from %s'), self.container_ring_path) self.container_ring = Ring(self.container_ring_path) return self.container_ring @@ -62,7 +63,7 @@ class ObjectUpdater(Daemon): """Run the updater continuously.""" time.sleep(random() * self.interval) while True: - self.logger.info('Begin object update sweep') + self.logger.info(_('Begin object update sweep')) begin = time.time() pids = [] # read from container ring to ensure it's fresh @@ -71,7 +72,7 @@ class ObjectUpdater(Daemon): if self.mount_check and not \ os.path.ismount(os.path.join(self.devices, device)): self.logger.warn( - 'Skipping %s as it is not mounted' % device) + _('Skipping %s as it is not mounted'), device) continue while len(pids) >= self.concurrency: pids.remove(os.wait()[0]) @@ -86,20 +87,23 @@ class ObjectUpdater(Daemon): forkbegin = time.time() self.object_sweep(os.path.join(self.devices, device)) elapsed = time.time() - forkbegin - self.logger.info('Object update sweep of %s completed: ' - '%.02fs, %s successes, %s failures' % - (device, elapsed, self.successes, self.failures)) + self.logger.info(_('Object update sweep of %(device)s' + ' completed: %(elapsed).02fs, %(success)s successes' + ', %(fail)s failures'), + {'device': device, 'elapsed': elapsed, + 'success': self.successes, 'fail': self.failures}) sys.exit() while pids: pids.remove(os.wait()[0]) elapsed = time.time() - begin - self.logger.info('Object update sweep completed: %.02fs' % elapsed) + self.logger.info(_('Object update sweep completed: %.02fs'), + elapsed) if elapsed < self.interval: time.sleep(self.interval - elapsed) def run_once(self): """Run the updater once""" - self.logger.info('Begin object update single threaded sweep') + self.logger.info(_('Begin object update single threaded sweep')) begin = time.time() self.successes = 0 self.failures = 0 @@ -107,13 +111,14 @@ class ObjectUpdater(Daemon): if self.mount_check and \ not os.path.ismount(os.path.join(self.devices, device)): self.logger.warn( - 'Skipping %s as it is not mounted' % device) + _('Skipping %s as it is not mounted'), device) continue self.object_sweep(os.path.join(self.devices, device)) elapsed = time.time() - begin - self.logger.info('Object update single threaded sweep completed: ' - '%.02fs, %s successes, %s failures' % - (elapsed, self.successes, self.failures)) + self.logger.info(_('Object update single threaded sweep completed: ' + '%(elapsed).02fs, %(success)s successes, %(fail)s failures'), + {'elapsed': elapsed, 'success': self.successes, + 'fail': self.failures}) def object_sweep(self, device): """ @@ -150,7 +155,7 @@ class ObjectUpdater(Daemon): update = pickle.load(open(update_path, 'rb')) except Exception: self.logger.exception( - 'ERROR Pickle problem, quarantining %s' % update_path) + _('ERROR Pickle problem, quarantining %s'), update_path) renamer(update_path, os.path.join(device, 'quarantined', 'objects', os.path.basename(update_path))) return @@ -170,11 +175,13 @@ class ObjectUpdater(Daemon): successes.append(node['id']) if success: self.successes += 1 - self.logger.debug('Update sent for %s %s' % (obj, update_path)) + self.logger.debug(_('Update sent for %(obj)s %(path)s'), + {'obj': obj, 'path': update_path}) os.unlink(update_path) else: self.failures += 1 - self.logger.debug('Update failed for %s %s' % (obj, update_path)) + self.logger.debug(_('Update failed for %(obj)s %(path)s'), + {'obj': obj, 'path': update_path}) update['successes'] = successes write_pickle(update, update_path, os.path.join(device, 'tmp')) @@ -197,6 +204,6 @@ class ObjectUpdater(Daemon): resp.read() return resp.status except: - self.logger.exception('ERROR with remote server ' - '%(ip)s:%(port)s/%(device)s' % node) + self.logger.exception(_('ERROR with remote server ' + '%(ip)s:%(port)s/%(device)s'), node) return 500 diff --git a/swift/proxy/server.py b/swift/proxy/server.py index aa532cdd85..c7f48de022 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -22,6 +22,7 @@ from ConfigParser import ConfigParser from urllib import unquote, quote import uuid import functools +from gettext import gettext as _ from eventlet.timeout import Timeout from webob.exc import HTTPBadRequest, HTTPMethodNotAllowed, \ @@ -120,8 +121,8 @@ class Controller(object): :param msg: error message """ self.error_increment(node) - self.app.logger.error( - '%s %s:%s' % (msg, node['ip'], node['port'])) + self.app.logger.error(_('%(msg)s %(ip)s:%(port)s'), + {'msg': msg, 'ip': node['ip'], 'port': node['port']}) def exception_occurred(self, node, typ, additional_info): """ @@ -132,9 +133,9 @@ class Controller(object): :param additional_info: additional information to log """ self.app.logger.exception( - 'ERROR with %s server %s:%s/%s transaction %s re: %s' % (typ, - node['ip'], node['port'], node['device'], self.trans_id, - additional_info)) + _('ERROR with %(type)s server %(ip)s:%(port)s/%(device)s re: %(info)s'), + {'type': typ, 'ip': node['ip'], 'port': node['port'], + 'device': node['device'], 'info': additional_info}) def error_limited(self, node): """ @@ -155,8 +156,7 @@ class Controller(object): limited = node['errors'] > self.app.error_suppression_limit if limited: self.app.logger.debug( - 'Node error limited %s:%s (%s)' % ( - node['ip'], node['port'], node['device'])) + _('Node error limited %(ip)s:%(port)s (%(device)s)'), node) return limited def error_limit(self, node): @@ -380,8 +380,8 @@ class Controller(object): if etag: resp.headers['etag'] = etag.strip('"') return resp - self.app.logger.error('%s returning 503 for %s, transaction %s' % - (server_type, statuses, self.trans_id)) + self.app.logger.error(_('%(type)s returning 503 for %(statuses)s'), + {'type': server_type, 'statuses': statuses}) resp.status = '503 Internal Server Error' return resp @@ -454,9 +454,7 @@ class Controller(object): res.bytes_transferred += len(chunk) except GeneratorExit: res.client_disconnect = True - self.app.logger.info( - 'Client disconnected on read transaction %s' % - self.trans_id) + self.app.logger.info(_('Client disconnected on read')) except: self.exception_occurred(node, 'Object', 'Trying to read during GET of %s' % req.path) @@ -561,7 +559,7 @@ class ObjectController(Controller): error_response = check_metadata(req, 'object') if error_response: return error_response - container_partition, containers, _, req.acl = \ + container_partition, containers, _junk, req.acl = \ self.container_info(self.account_name, self.container_name) if 'swift.authorize' in req.environ: aresp = req.environ['swift.authorize'](req) @@ -603,7 +601,7 @@ class ObjectController(Controller): @delay_denial def PUT(self, req): """HTTP PUT request handler.""" - container_partition, containers, _, req.acl = \ + container_partition, containers, _junk, req.acl = \ self.container_info(self.account_name, self.container_name) if 'swift.authorize' in req.environ: aresp = req.environ['swift.authorize'](req) @@ -618,7 +616,7 @@ class ObjectController(Controller): req.headers['X-Timestamp'] = normalize_timestamp(time.time()) # Sometimes the 'content-type' header exists, but is set to None. if not req.headers.get('content-type'): - guessed_type, _ = mimetypes.guess_type(req.path_info) + guessed_type, _junk = mimetypes.guess_type(req.path_info) if not guessed_type: req.headers['Content-Type'] = 'application/octet-stream' else: @@ -698,9 +696,9 @@ class ObjectController(Controller): containers.insert(0, container) if len(conns) <= len(nodes) / 2: self.app.logger.error( - 'Object PUT returning 503, %s/%s required connections, ' - 'transaction %s' % - (len(conns), len(nodes) / 2 + 1, self.trans_id)) + _('Object PUT returning 503, %(conns)s/%(nodes)s ' + 'required connections'), + {'conns': len(conns), 'nodes': len(nodes) // 2 + 1}) return HTTPServiceUnavailable(request=req) try: req.bytes_transferred = 0 @@ -730,27 +728,26 @@ class ObjectController(Controller): conns.remove(conn) if len(conns) <= len(nodes) / 2: self.app.logger.error( - 'Object PUT exceptions during send, %s/%s ' - 'required connections, transaction %s' % - (len(conns), len(nodes) // 2 + 1, - self.trans_id)) + _('Object PUT exceptions during send, ' + '%(conns)s/%(nodes)s required connections'), + {'conns': len(conns), + 'nodes': len(nodes) // 2 + 1}) return HTTPServiceUnavailable(request=req) if req.headers.get('transfer-encoding') and chunk == '': break except ChunkReadTimeout, err: self.app.logger.info( - 'ERROR Client read timeout (%ss)' % err.seconds) + _('ERROR Client read timeout (%ss)'), err.seconds) return HTTPRequestTimeout(request=req) except: req.client_disconnect = True self.app.logger.exception( - 'ERROR Exception causing client disconnect') + _('ERROR Exception causing client disconnect')) return Response(status='499 Client Disconnect') if req.content_length and req.bytes_transferred < req.content_length: req.client_disconnect = True self.app.logger.info( - 'Client disconnected without sending enough data %s' % - self.trans_id) + _('Client disconnected without sending enough data')) return Response(status='499 Client Disconnect') statuses = [] reasons = [] @@ -774,7 +771,7 @@ class ObjectController(Controller): 'Trying to get final status of PUT to %s' % req.path) if len(etags) > 1: self.app.logger.error( - 'Object servers returned %s mismatched etags' % len(etags)) + _('Object servers returned %s mismatched etags'), len(etags)) return HTTPServerError(request=req) etag = len(etags) and etags.pop() or None while len(statuses) < len(nodes): @@ -798,7 +795,7 @@ class ObjectController(Controller): @delay_denial def DELETE(self, req): """HTTP DELETE request handler.""" - container_partition, containers, _, req.acl = \ + container_partition, containers, _junk, req.acl = \ self.container_info(self.account_name, self.container_name) if 'swift.authorize' in req.environ: aresp = req.environ['swift.authorize'](req) @@ -848,7 +845,7 @@ class ObjectController(Controller): if not dest.startswith('/'): dest = '/' + dest try: - _, dest_container, dest_object = dest.split('/', 2) + _junk, dest_container, dest_object = dest.split('/', 2) except ValueError: return HTTPPreconditionFailed(request=req, body='Destination header must be of the form ' @@ -1116,9 +1113,8 @@ class ContainerController(Controller): # If even one node doesn't do the delete, we can't be sure # what the outcome will be once everything is in sync; so # we 503. - self.app.logger.error('Returning 503 because not all ' - 'container nodes confirmed DELETE, transaction %s' % - self.trans_id) + self.app.logger.error(_('Returning 503 because not all ' + 'container nodes confirmed DELETE')) return HTTPServiceUnavailable(request=req) if resp.status_int == 202: # Indicates no server had the container return HTTPNotFound(request=req) @@ -1440,7 +1436,7 @@ class BaseApplication(object): return resp return handler(req) except Exception: - self.logger.exception('ERROR Unhandled exception in request') + self.logger.exception(_('ERROR Unhandled exception in request')) return HTTPServerError(request=req) diff --git a/test/unit/auth/test_server.py b/test/unit/auth/test_server.py index cb9070a22f..1d691454b3 100644 --- a/test/unit/auth/test_server.py +++ b/test/unit/auth/test_server.py @@ -462,7 +462,7 @@ class TestAuthServer(unittest.TestCase): auth_server.http_connect = fake_http_connect(201) url = self.controller.create_user('test', 'tester', 'testing') self.assertEquals(log.getvalue().rsplit(' ', 1)[0], - "auth SUCCESS create_user('test', 'tester', _, False, False) " + "SUCCESS create_user('test', 'tester', _, False, False) " "= %s" % repr(url)) log.truncate(0) def start_response(*args): @@ -491,7 +491,7 @@ class TestAuthServer(unittest.TestCase): logsegs[1] = '[01/Jan/2001:01:02:03 +0000]' logsegs[2:] = logsegs[2].split(' ') logsegs[-1] = '0.1234' - self.assertEquals(' '.join(logsegs), 'auth testhost - - ' + self.assertEquals(' '.join(logsegs), 'testhost - - ' '[01/Jan/2001:01:02:03 +0000] "GET /v1/test/auth?test=True ' 'HTTP/1.0" 204 - "-" "-" - - - - - - - - - "-" "None" "-" ' '0.1234') @@ -519,7 +519,7 @@ class TestAuthServer(unittest.TestCase): logsegs[1] = '[01/Jan/2001:01:02:03 +0000]' logsegs[2:] = logsegs[2].split(' ') logsegs[-1] = '0.1234' - self.assertEquals(' '.join(logsegs), 'auth None - - [01/Jan/2001:' + self.assertEquals(' '.join(logsegs), 'None - - [01/Jan/2001:' '01:02:03 +0000] "GET /v1/test/auth HTTP/1.0" 204 - "-" "-" - ' '- - - - - - - - "-" "None" "Content-Length: 0\n' 'X-Storage-User: tester\nX-Storage-Pass: testing" 0.1234') @@ -556,7 +556,7 @@ class TestAuthServer(unittest.TestCase): 'HTTP_X_STORAGE_PASS': 'testing'}, start_response) self.assert_(log.getvalue().startswith( - 'auth ERROR Unhandled exception in ReST request'), + 'ERROR Unhandled exception in ReST request'), log.getvalue()) log.truncate(0) finally: diff --git a/test/unit/common/test_daemon.py b/test/unit/common/test_daemon.py index c5b95a3013..aa85987d25 100644 --- a/test/unit/common/test_daemon.py +++ b/test/unit/common/test_daemon.py @@ -50,7 +50,7 @@ class TestDaemon(unittest.TestCase): def test_create(self): d = daemon.Daemon({}) self.assertEquals(d.conf, {}) - self.assert_(isinstance(d.logger, utils.NamedLogger)) + self.assert_(isinstance(d.logger, utils.LogAdapter)) def test_stubs(self): d = daemon.Daemon({}) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index c41d147f79..7abc857ce8 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -283,35 +283,27 @@ Error: unable to locate %s utils.sys.stdout = orig_stdout utils.sys.stderr = orig_stderr - def test_NamedLogger(self): - sio = StringIO() - logger = logging.getLogger() - logger.addHandler(logging.StreamHandler(sio)) - nl = utils.NamedLogger(logger, 'server') - nl.warn('test') - self.assertEquals(sio.getvalue(), 'server test\n') - def test_get_logger(self): sio = StringIO() logger = logging.getLogger() logger.addHandler(logging.StreamHandler(sio)) logger = utils.get_logger(None, 'server') logger.warn('test1') - self.assertEquals(sio.getvalue(), 'server test1\n') + self.assertEquals(sio.getvalue(), 'test1\n') logger.debug('test2') - self.assertEquals(sio.getvalue(), 'server test1\n') + self.assertEquals(sio.getvalue(), 'test1\n') logger = utils.get_logger({'log_level': 'DEBUG'}, 'server') logger.debug('test3') - self.assertEquals(sio.getvalue(), 'server test1\nserver test3\n') + self.assertEquals(sio.getvalue(), 'test1\ntest3\n') # Doesn't really test that the log facility is truly being used all the # way to syslog; but exercises the code. logger = utils.get_logger({'log_facility': 'LOG_LOCAL3'}, 'server') logger.warn('test4') self.assertEquals(sio.getvalue(), - 'server test1\nserver test3\nserver test4\n') + 'test1\nserver test3\nserver test4\n') logger.debug('test5') self.assertEquals(sio.getvalue(), - 'server test1\nserver test3\nserver test4\n') + 'test1\nserver test3\nserver test4\n') def test_storage_directory(self): self.assertEquals(utils.storage_directory('objects', '1', 'ABCDEF'), From ef4e23ee435487cd7c94d9a7f092138cf05ad749 Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Mon, 20 Dec 2010 21:57:19 +0000 Subject: [PATCH 039/199] tests fixed --- test/unit/common/test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 7abc857ce8..b888686660 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -300,10 +300,10 @@ Error: unable to locate %s logger = utils.get_logger({'log_facility': 'LOG_LOCAL3'}, 'server') logger.warn('test4') self.assertEquals(sio.getvalue(), - 'test1\nserver test3\nserver test4\n') + 'test1\ntest3\ntest4\n') logger.debug('test5') self.assertEquals(sio.getvalue(), - 'test1\nserver test3\nserver test4\n') + 'test1\ntest3\ntest4\n') def test_storage_directory(self): self.assertEquals(utils.storage_directory('objects', '1', 'ABCDEF'), From 0b3fac8992360fe203d8e88caa650aa4a285ddeb Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Mon, 20 Dec 2010 22:10:58 +0000 Subject: [PATCH 040/199] add more gettext calls --- swift/proxy/server.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 9f34824c08..af0b1a48cc 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -172,10 +172,11 @@ class SegmentedIterable(object): raise except Exception, err: if not getattr(err, 'swift_logged', False): - self.controller.app.logger.exception('ERROR: While processing ' - 'manifest /%s/%s/%s %s' % (self.controller.account_name, - self.controller.container_name, - self.controller.object_name, self.controller.trans_id)) + self.controller.app.logger.exception(_('ERROR: While ' + 'processing manifest /%(acc)s/%(cont)s/%(obj)s'), + {'acc': self.controller.account_name, + 'cont': self.controller.container_name, + 'obj': self.controller.object_name}) err.swift_logged = True self.response.status_int = 503 raise @@ -204,10 +205,11 @@ class SegmentedIterable(object): raise except Exception, err: if not getattr(err, 'swift_logged', False): - self.controller.app.logger.exception('ERROR: While processing ' - 'manifest /%s/%s/%s %s' % (self.controller.account_name, - self.controller.container_name, - self.controller.object_name, self.controller.trans_id)) + self.controller.app.logger.exception(_('ERROR: While ' + 'processing manifest /%(acc)s/%(cont)s/%(obj)s'), + {'acc': self.controller.account_name, + 'cont': self.controller.container_name, + 'obj': self.controller.object_name}) err.swift_logged = True self.response.status_int = 503 raise @@ -250,10 +252,11 @@ class SegmentedIterable(object): raise except Exception, err: if not getattr(err, 'swift_logged', False): - self.controller.app.logger.exception('ERROR: While processing ' - 'manifest /%s/%s/%s %s' % (self.controller.account_name, - self.controller.container_name, - self.controller.object_name, self.controller.trans_id)) + self.controller.app.logger.exception(_('ERROR: While ' + 'processing manifest /%(acc)s/%(cont)s/%(obj)s'), + {'acc': self.controller.account_name, + 'cont': self.controller.container_name, + 'obj': self.controller.object_name}) err.swift_logged = True self.response.status_int = 503 raise From 2c7dfbe849025ea9ccbd18660abc12454f4e1388 Mon Sep 17 00:00:00 2001 From: John Dickinson Date: Tue, 21 Dec 2010 17:09:32 -0600 Subject: [PATCH 041/199] fixes variable name collision --- swift/stats/access_processor.py | 6 ++++-- swift/stats/stats_processor.py | 5 +++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/swift/stats/access_processor.py b/swift/stats/access_processor.py index 0f5814076f..b7e475833f 100644 --- a/swift/stats/access_processor.py +++ b/swift/stats/access_processor.py @@ -127,7 +127,8 @@ class AccessLogProcessor(object): d['code'] = int(d['code']) return d - def process(self, obj_stream, account, container, object_name): + def process(self, obj_stream, data_object_account, data_object_container, + data_object_name): '''generate hourly groupings of data from one access log file''' hourly_aggr_info = {} total_lines = 0 @@ -191,7 +192,8 @@ class AccessLogProcessor(object): hourly_aggr_info[aggr_key] = d if bad_lines > (total_lines * self.warn_percent): - name = '/'.join([account, container, object_name]) + name = '/'.join([data_object_account, data_object_container, + data_object_name]) self.logger.warning('I found a bunch of bad lines in %s '\ '(%d bad, %d total)' % (name, bad_lines, total_lines)) return hourly_aggr_info diff --git a/swift/stats/stats_processor.py b/swift/stats/stats_processor.py index 6caaae7840..a590a74108 100644 --- a/swift/stats/stats_processor.py +++ b/swift/stats/stats_processor.py @@ -22,10 +22,11 @@ class StatsLogProcessor(object): def __init__(self, conf): self.logger = get_logger(conf) - def process(self, obj_stream, account, container, object_name): + def process(self, obj_stream, data_object_account, data_object_container, + data_object_name): '''generate hourly groupings of data from one stats log file''' account_totals = {} - year, month, day, hour, _ = object_name.split('/') + year, month, day, hour, _ = data_object_name.split('/') for line in obj_stream: if not line: continue From 24590669d8e132c9b8fa4ac6f6bb14d7f93acc91 Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Wed, 22 Dec 2010 16:36:31 +0000 Subject: [PATCH 042/199] i18n finishup --- swift/account/auditor.py | 1 - swift/account/reaper.py | 1 - swift/account/server.py | 1 - swift/common/bench.py | 1 - swift/common/bufferedhttp.py | 1 - swift/common/daemon.py | 3 +++ swift/common/db.py | 1 - swift/common/db_replicator.py | 1 - swift/common/memcached.py | 1 - swift/common/middleware/catch_errors.py | 2 -- swift/common/middleware/ratelimit.py | 1 - swift/common/utils.py | 1 - swift/common/wsgi.py | 2 ++ swift/container/auditor.py | 1 - swift/container/server.py | 1 - swift/container/updater.py | 1 - swift/obj/auditor.py | 1 - swift/obj/replicator.py | 1 - swift/obj/server.py | 1 - swift/obj/updater.py | 1 - swift/proxy/server.py | 1 - test/__init__.py | 7 +++++++ test/unit/proxy/test_server.py | 20 ++++++++++---------- 23 files changed, 22 insertions(+), 30 deletions(-) diff --git a/swift/account/auditor.py b/swift/account/auditor.py index 01afb7d469..36e1e0a0d8 100644 --- a/swift/account/auditor.py +++ b/swift/account/auditor.py @@ -16,7 +16,6 @@ import os import time from random import random -from gettext import gettext as _ from swift.account import server as account_server from swift.common.db import AccountBroker diff --git a/swift/account/reaper.py b/swift/account/reaper.py index 0225209392..6d2112927f 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -18,7 +18,6 @@ import random from logging import DEBUG from math import sqrt from time import time -from gettext import gettext as _ from eventlet import GreenPool, sleep diff --git a/swift/account/server.py b/swift/account/server.py index 67572165f5..53d604ce93 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -20,7 +20,6 @@ import time import traceback from urllib import unquote from xml.sax import saxutils -from gettext import gettext as _ from webob import Request, Response from webob.exc import HTTPAccepted, HTTPBadRequest, \ diff --git a/swift/common/bench.py b/swift/common/bench.py index 1e525c2e7d..b698ff310b 100644 --- a/swift/common/bench.py +++ b/swift/common/bench.py @@ -18,7 +18,6 @@ import time import random from urlparse import urlparse from contextlib import contextmanager -from gettext import gettext as _ import eventlet.pools from eventlet.green.httplib import CannotSendRequest diff --git a/swift/common/bufferedhttp.py b/swift/common/bufferedhttp.py index 536793fc87..81c54d0722 100644 --- a/swift/common/bufferedhttp.py +++ b/swift/common/bufferedhttp.py @@ -29,7 +29,6 @@ BufferedHTTPResponse. from urllib import quote import logging import time -from gettext import gettext as _ from eventlet.green.httplib import CONTINUE, HTTPConnection, HTTPMessage, \ HTTPResponse, HTTPSConnection, _UNKNOWN diff --git a/swift/common/daemon.py b/swift/common/daemon.py index d305c247f6..e5ed3f7caa 100644 --- a/swift/common/daemon.py +++ b/swift/common/daemon.py @@ -16,7 +16,9 @@ import os import sys import signal +import gettext from re import sub + from swift.common import utils @@ -40,6 +42,7 @@ class Daemon(object): utils.validate_configuration() utils.capture_stdio(self.logger, **kwargs) utils.drop_privileges(self.conf.get('user', 'swift')) + gettext.install('swift', unicode=1) def kill_children(*args): signal.signal(signal.SIGTERM, signal.SIG_IGN) diff --git a/swift/common/db.py b/swift/common/db.py index 6f7372c22c..7315159bb7 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -27,7 +27,6 @@ import cPickle as pickle import errno from random import randint from tempfile import mkstemp -from gettext import gettext as _ from eventlet import sleep import simplejson as json diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 89e0590f7e..0588b841a0 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -20,7 +20,6 @@ import random import math import time import shutil -from gettext import gettext as _ from eventlet import GreenPool, sleep, Timeout from eventlet.green import subprocess diff --git a/swift/common/memcached.py b/swift/common/memcached.py index d41b25616f..193456524a 100644 --- a/swift/common/memcached.py +++ b/swift/common/memcached.py @@ -26,7 +26,6 @@ import socket import time from bisect import bisect from hashlib import md5 -from gettext import gettext as _ CONN_TIMEOUT = 0.3 IO_TIMEOUT = 2.0 diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index 0dcedd201a..5fb8c33592 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gettext import gettext as _ - from webob import Request from webob.exc import HTTPServerError diff --git a/swift/common/middleware/ratelimit.py b/swift/common/middleware/ratelimit.py index 1679c1548c..c0827da88b 100644 --- a/swift/common/middleware/ratelimit.py +++ b/swift/common/middleware/ratelimit.py @@ -15,7 +15,6 @@ import time import eventlet from webob import Request, Response from webob.exc import HTTPNotFound -from gettext import gettext as _ from swift.common.utils import split_path, cache_from_env, get_logger from swift.proxy.server import get_container_memcache_key diff --git a/swift/common/utils.py b/swift/common/utils.py index 5ddac9ce84..c837456591 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -34,7 +34,6 @@ from ConfigParser import ConfigParser, NoSectionError, NoOptionError from optparse import OptionParser from tempfile import mkstemp import cPickle as pickle -from gettext import gettext as _ import eventlet from eventlet import greenio, GreenPool, sleep, Timeout, listen diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index a93c21aa8a..079e4277e1 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -21,6 +21,7 @@ import signal import sys import time import mimetools +import gettext import eventlet from eventlet import greenio, GreenPool, sleep, wsgi, listen @@ -120,6 +121,7 @@ def run_wsgi(conf_file, app_section, *args, **kwargs): sock = get_socket(conf, default_port=kwargs.get('default_port', 8080)) # remaining tasks should not require elevated privileges drop_privileges(conf.get('user', 'swift')) + gettext.install('swift', unicode=1) # finally after binding to ports and privilege drop, run app __init__ code app = loadapp('config:%s' % conf_file, global_conf={'log_name': log_name}) diff --git a/swift/container/auditor.py b/swift/container/auditor.py index a6f25538f5..082e6e2b37 100644 --- a/swift/container/auditor.py +++ b/swift/container/auditor.py @@ -16,7 +16,6 @@ import os import time from random import random -from gettext import gettext as _ from swift.container import server as container_server from swift.common.db import ContainerBroker diff --git a/swift/container/server.py b/swift/container/server.py index 4eb92f0dd9..fc06194de6 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -21,7 +21,6 @@ import traceback from urllib import unquote from xml.sax import saxutils from datetime import datetime -from gettext import gettext as _ import simplejson from eventlet.timeout import Timeout diff --git a/swift/container/updater.py b/swift/container/updater.py index 36f567ddc7..9056de3202 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -19,7 +19,6 @@ import signal import sys import time from random import random, shuffle -from gettext import gettext as _ from eventlet import spawn, patcher, Timeout diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index bdd47479b1..1d445ec65f 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -17,7 +17,6 @@ import os import time from hashlib import md5 from random import random -from gettext import gettext as _ from swift.obj import server as object_server from swift.obj.replicator import invalidate_hash diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index bd2b09af25..ed77bf5a10 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -22,7 +22,6 @@ import logging import hashlib import itertools import cPickle as pickle -from gettext import gettext as _ import eventlet from eventlet import GreenPool, tpool, Timeout, sleep, hubs diff --git a/swift/obj/server.py b/swift/obj/server.py index d47f2b0634..7c139d7775 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -26,7 +26,6 @@ from hashlib import md5 from tempfile import mkstemp from urllib import unquote from contextlib import contextmanager -from gettext import gettext as _ from webob import Request, Response, UTC from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPCreated, \ diff --git a/swift/obj/updater.py b/swift/obj/updater.py index eb4638ea16..f958166679 100644 --- a/swift/obj/updater.py +++ b/swift/obj/updater.py @@ -19,7 +19,6 @@ import signal import sys import time from random import random -from gettext import gettext as _ from eventlet import patcher, Timeout diff --git a/swift/proxy/server.py b/swift/proxy/server.py index af0b1a48cc..9838f86802 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -28,7 +28,6 @@ from datetime import datetime from urllib import unquote, quote import uuid import functools -from gettext import gettext as _ from hashlib import md5 from eventlet import sleep diff --git a/test/__init__.py b/test/__init__.py index e69de29bb2..db145240dc 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -0,0 +1,7 @@ +# See http://code.google.com/p/python-nose/issues/detail?id=373 +# The code below enables nosetests to work with i18n _() blocks + +import __builtin__ + +setattr(__builtin__, '_', lambda x: x) + diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index b7d43c0fb2..4577cd4dac 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -3019,8 +3019,8 @@ class TestSegmentedIterable(unittest.TestCase): self.assertRaises(Exception, proxy_server.SegmentedIterable(self.controller, None, [None])._load_next_segment) - self.assertEquals(self.controller.exception_args[0], - 'ERROR: While processing manifest /a/c/o tx1') + self.assert_(self.controller.exception_args[0].startswith( + 'ERROR: While processing manifest')) def test_load_next_segment_with_no_segments(self): self.assertRaises(StopIteration, @@ -3079,8 +3079,8 @@ class TestSegmentedIterable(unittest.TestCase): self.assertRaises(Exception, proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': 'o1'}])._load_next_segment) - self.assertEquals(self.controller.exception_args[0], - 'ERROR: While processing manifest /a/c/o tx1') + self.assert_(self.controller.exception_args[0].startswith( + 'ERROR: While processing manifest')) self.assertEquals(str(self.controller.exception_info[1]), 'Could not load object segment /a/lc/o1: 404') @@ -3088,8 +3088,8 @@ class TestSegmentedIterable(unittest.TestCase): # Iterator value isn't a dict self.assertRaises(Exception, ''.join, proxy_server.SegmentedIterable(self.controller, None, [None])) - self.assertEquals(self.controller.exception_args[0], - 'ERROR: While processing manifest /a/c/o tx1') + self.assert_(self.controller.exception_args[0].startswith( + 'ERROR: While processing manifest')) def test_iter_with_no_segments(self): segit = proxy_server.SegmentedIterable(self.controller, 'lc', []) @@ -3118,8 +3118,8 @@ class TestSegmentedIterable(unittest.TestCase): self.assertRaises(Exception, ''.join, proxy_server.SegmentedIterable(self.controller, 'lc', [{'name': 'o1'}])) - self.assertEquals(self.controller.exception_args[0], - 'ERROR: While processing manifest /a/c/o tx1') + self.assert_(self.controller.exception_args[0].startswith( + 'ERROR: While processing manifest')) self.assertEquals(str(self.controller.exception_info[1]), 'Could not load object segment /a/lc/o1: 404') @@ -3128,8 +3128,8 @@ class TestSegmentedIterable(unittest.TestCase): self.assertRaises(Exception, proxy_server.SegmentedIterable(self.controller, None, [None]).app_iter_range(None, None).next) - self.assertEquals(self.controller.exception_args[0], - 'ERROR: While processing manifest /a/c/o tx1') + self.assert_(self.controller.exception_args[0].startswith( + 'ERROR: While processing manifest')) def test_app_iter_range_with_no_segments(self): self.assertEquals(''.join(proxy_server.SegmentedIterable( From 8ad87f634c3403085964619d410eae60b3c64afe Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Wed, 22 Dec 2010 16:48:25 +0000 Subject: [PATCH 043/199] fix fancy txnid logging --- bin/st | 2 ++ swift/common/utils.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/bin/st b/bin/st index b41aca67ec..f7009daf21 100755 --- a/bin/st +++ b/bin/st @@ -44,6 +44,8 @@ except: try: from swift.common.bufferedhttp \ import BufferedHTTPConnection as HTTPConnection + import __builtin__ # bufferedhttp uses automagic gettext + setattr(__builtin__, '_', lambda x: x) except: from httplib import HTTPConnection diff --git a/swift/common/utils.py b/swift/common/utils.py index c837456591..da71253e7b 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -341,7 +341,8 @@ class NamedFormatter(logging.Formatter): def format(self, record): msg = logging.Formatter.format(self, record) - if record.levelno != logging.INFO and self.logger.txn_id: + if self.logger.txn_id and (record.levelno != logging.INFO or + self.logger.txn_id not in msg): return '%s %s (txn: %s)' % (self.server, msg, self.logger.txn_id) else: return '%s %s' % (self.server, msg) From f432269013238b196681e44ed8d6726a71696795 Mon Sep 17 00:00:00 2001 From: Michael Barton Date: Wed, 22 Dec 2010 17:18:30 +0000 Subject: [PATCH 044/199] add gettext to all binaries --- bin/st | 4 ++-- bin/swift-account-audit | 2 ++ bin/swift-auth-add-user | 2 ++ bin/swift-auth-recreate-accounts | 2 ++ bin/swift-auth-update-reseller-prefixes | 2 ++ bin/swift-bench | 2 ++ bin/swift-drive-audit | 2 ++ bin/swift-get-nodes | 3 +++ bin/swift-log-uploader | 2 ++ bin/swift-object-info | 2 ++ bin/swift-ring-builder | 2 ++ bin/swift-stats-populate | 2 ++ bin/swift-stats-report | 2 ++ 13 files changed, 27 insertions(+), 2 deletions(-) diff --git a/bin/st b/bin/st index f7009daf21..79f331558f 100755 --- a/bin/st +++ b/bin/st @@ -44,8 +44,8 @@ except: try: from swift.common.bufferedhttp \ import BufferedHTTPConnection as HTTPConnection - import __builtin__ # bufferedhttp uses automagic gettext - setattr(__builtin__, '_', lambda x: x) + import gettext + gettext.install('swift', unicode=1) except: from httplib import HTTPConnection diff --git a/bin/swift-account-audit b/bin/swift-account-audit index fe611562d7..1f6aceb2c8 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -20,6 +20,7 @@ from urllib import quote from hashlib import md5 import getopt from itertools import chain +import gettext import simplejson from eventlet.greenpool import GreenPool @@ -324,6 +325,7 @@ class Auditor(object): if __name__ == '__main__': + gettext.install('swift', unicode=1) try: optlist, args = getopt.getopt(sys.argv[1:], 'c:r:e:d') except getopt.GetoptError, err: diff --git a/bin/swift-auth-add-user b/bin/swift-auth-add-user index d502dc83a8..2d9819dfc8 100755 --- a/bin/swift-auth-add-user +++ b/bin/swift-auth-add-user @@ -18,11 +18,13 @@ from ConfigParser import ConfigParser from optparse import OptionParser from os.path import basename from sys import argv, exit +import gettext from swift.common.bufferedhttp import http_connect_raw as http_connect if __name__ == '__main__': + gettext.install('swift', unicode=1) default_conf = '/etc/swift/auth-server.conf' parser = OptionParser( usage='Usage: %prog [options] ') diff --git a/bin/swift-auth-recreate-accounts b/bin/swift-auth-recreate-accounts index e17bf2da3b..a8ee20e0e8 100755 --- a/bin/swift-auth-recreate-accounts +++ b/bin/swift-auth-recreate-accounts @@ -17,10 +17,12 @@ from ConfigParser import ConfigParser from optparse import OptionParser from sys import argv, exit +import gettext from swift.common.bufferedhttp import http_connect_raw as http_connect if __name__ == '__main__': + gettext.install('swift', unicode=1) default_conf = '/etc/swift/auth-server.conf' parser = OptionParser(usage='Usage: %prog [options]') parser.add_option('-c', '--conf', dest='conf', default=default_conf, diff --git a/bin/swift-auth-update-reseller-prefixes b/bin/swift-auth-update-reseller-prefixes index 41a4bf6a76..cb09bd9872 100755 --- a/bin/swift-auth-update-reseller-prefixes +++ b/bin/swift-auth-update-reseller-prefixes @@ -16,11 +16,13 @@ from os.path import basename from sys import argv, exit +import gettext from swift.common.db import get_db_connection if __name__ == '__main__': + gettext.install('swift', unicode=1) app = basename(argv[0]) if len(argv) != 3: exit(''' diff --git a/bin/swift-bench b/bin/swift-bench index ab332482cd..2c3e08318b 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -20,6 +20,7 @@ import sys import signal import uuid from optparse import OptionParser +import gettext from swift.common.bench import BenchController from swift.common.utils import readconf, NamedLogger @@ -55,6 +56,7 @@ SAIO_DEFAULTS = { } if __name__ == '__main__': + gettext.install('swift', unicode=1) usage = "usage: %prog [OPTIONS] [CONF_FILE]" usage += """\n\nConf file with SAIO defaults: diff --git a/bin/swift-drive-audit b/bin/swift-drive-audit index cde28c1ed7..64c478e203 100755 --- a/bin/swift-drive-audit +++ b/bin/swift-drive-audit @@ -20,6 +20,7 @@ import re import subprocess import sys from ConfigParser import ConfigParser +import gettext from swift.common.utils import get_logger @@ -86,6 +87,7 @@ def comment_fstab(mount_point): os.rename('/etc/fstab.new', '/etc/fstab') if __name__ == '__main__': + gettext.install('swift', unicode=1) c = ConfigParser() try: conf_path = sys.argv[1] diff --git a/bin/swift-get-nodes b/bin/swift-get-nodes index f24dd48f96..69643f6a84 100755 --- a/bin/swift-get-nodes +++ b/bin/swift-get-nodes @@ -16,11 +16,14 @@ import sys import urllib +import gettext from swift.common.ring import Ring from swift.common.utils import hash_path +gettext.install('swift', unicode=1) + if len(sys.argv) < 3 or len(sys.argv) > 5: print 'Usage: %s [] []' \ % sys.argv[0] diff --git a/bin/swift-log-uploader b/bin/swift-log-uploader index e533cad824..972303f67b 100755 --- a/bin/swift-log-uploader +++ b/bin/swift-log-uploader @@ -15,12 +15,14 @@ # limitations under the License. import sys +import gettext from swift.stats.log_uploader import LogUploader from swift.common.utils import parse_options from swift.common import utils if __name__ == '__main__': + gettext.install('swift', unicode=1) conf_file, options = parse_options(usage="Usage: %prog CONFIG_FILE PLUGIN") try: plugin = options['extra_args'][0] diff --git a/bin/swift-object-info b/bin/swift-object-info index 57f2522071..268b991bee 100755 --- a/bin/swift-object-info +++ b/bin/swift-object-info @@ -18,12 +18,14 @@ import sys import cPickle as pickle from datetime import datetime from hashlib import md5 +import gettext from swift.common.ring import Ring from swift.obj.server import read_metadata from swift.common.utils import hash_path if __name__ == '__main__': + gettext.install('swift', unicode=1) if len(sys.argv) <= 1: print "Usage: %s OBJECT_FILE" % sys.argv[0] sys.exit(1) diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index 50353df256..1d53a30973 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -21,6 +21,7 @@ from os import mkdir from os.path import basename, dirname, exists, join as pathjoin from sys import argv, exit from time import time +import gettext from swift.common.ring import RingBuilder @@ -174,6 +175,7 @@ swift-ring-builder set_min_part_hours if __name__ == '__main__': + gettext.install('swift', unicode=1) if len(argv) < 2: print ''' swift-ring-builder %(MAJOR_VERSION)s.%(MINOR_VERSION)s diff --git a/bin/swift-stats-populate b/bin/swift-stats-populate index 8ea210cb65..985fa50c82 100755 --- a/bin/swift-stats-populate +++ b/bin/swift-stats-populate @@ -21,6 +21,7 @@ from optparse import OptionParser from sys import exit, argv from time import time from uuid import uuid4 +import gettext from eventlet import GreenPool, patcher, sleep from eventlet.pools import Pool @@ -75,6 +76,7 @@ def report(success): if __name__ == '__main__': global begun, created, item_type, next_report, need_to_create, retries_done + gettext.install('swift', unicode=1) patcher.monkey_patch() parser = OptionParser() diff --git a/bin/swift-stats-report b/bin/swift-stats-report index 3f735877cf..158ae37c75 100755 --- a/bin/swift-stats-report +++ b/bin/swift-stats-report @@ -23,6 +23,7 @@ from optparse import OptionParser from sys import argv, exit, stderr from time import time from uuid import uuid4 +import gettext from eventlet import GreenPool, hubs, patcher, sleep, Timeout from eventlet.pools import Pool @@ -746,6 +747,7 @@ def object_delete_report(coropool, connpool, options): if __name__ == '__main__': + gettext.install('swift', unicode=1) patcher.monkey_patch() hubs.get_hub().debug_exceptions = False From af99fb17e05dde0d769dfda1c90fd7dda36fdd32 Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 22 Dec 2010 11:35:11 -0800 Subject: [PATCH 045/199] Fixed probe tests to not use relativity (on imports) --- test/probe/__init__.py | 0 test/probe/test_account_failures.py | 2 +- test/probe/test_container_failures.py | 2 +- test/probe/test_object_async_update.py | 2 +- test/probe/test_object_handoff.py | 2 +- test/probe/test_running_with_each_type_down.py | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) create mode 100644 test/probe/__init__.py diff --git a/test/probe/__init__.py b/test/probe/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/probe/test_account_failures.py b/test/probe/test_account_failures.py index 10eba803bc..5ad2f965cb 100755 --- a/test/probe/test_account_failures.py +++ b/test/probe/test_account_failures.py @@ -21,7 +21,7 @@ from subprocess import Popen from time import sleep from swift.common import client -from common import get_to_final_state, kill_pids, reset_environment +from test.probe.common import get_to_final_state, kill_pids, reset_environment class TestAccountFailures(unittest.TestCase): diff --git a/test/probe/test_container_failures.py b/test/probe/test_container_failures.py index 9c497ca88b..b24e1bc169 100755 --- a/test/probe/test_container_failures.py +++ b/test/probe/test_container_failures.py @@ -23,7 +23,7 @@ from uuid import uuid4 from swift.common import client -from common import get_to_final_state, kill_pids, reset_environment +from test.probe.common import get_to_final_state, kill_pids, reset_environment class TestContainerFailures(unittest.TestCase): diff --git a/test/probe/test_object_async_update.py b/test/probe/test_object_async_update.py index a5d5852c68..7db3a75fca 100755 --- a/test/probe/test_object_async_update.py +++ b/test/probe/test_object_async_update.py @@ -23,7 +23,7 @@ from uuid import uuid4 from swift.common import client, direct_client -from common import kill_pids, reset_environment +from test.probe.common import kill_pids, reset_environment class TestObjectAsyncUpdate(unittest.TestCase): diff --git a/test/probe/test_object_handoff.py b/test/probe/test_object_handoff.py index 006f0d3a1e..7086c11de8 100755 --- a/test/probe/test_object_handoff.py +++ b/test/probe/test_object_handoff.py @@ -23,7 +23,7 @@ from uuid import uuid4 from swift.common import client, direct_client -from common import kill_pids, reset_environment +from test.probe.common import kill_pids, reset_environment class TestObjectHandoff(unittest.TestCase): diff --git a/test/probe/test_running_with_each_type_down.py b/test/probe/test_running_with_each_type_down.py index 7f2352d6ce..46fe1c5851 100755 --- a/test/probe/test_running_with_each_type_down.py +++ b/test/probe/test_running_with_each_type_down.py @@ -22,7 +22,7 @@ from time import sleep from swift.common import client -from common import get_to_final_state, kill_pids, reset_environment +from test.probe.common import get_to_final_state, kill_pids, reset_environment class TestRunningWithEachTypeDown(unittest.TestCase): From d266cd560585116e6bff57260e8a1574711aab93 Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 22 Dec 2010 13:28:51 -0800 Subject: [PATCH 046/199] i18nify log message --- swift/common/middleware/swauth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 7f985ece96..faf749464a 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -69,7 +69,7 @@ class Swauth(object): (self.dsc_parsed.scheme, repr(self.dsc_url))) self.super_admin_key = conf.get('super_admin_key') if not self.super_admin_key: - msg = 'No super_admin_key set in conf file! Exiting.' + msg = _('No super_admin_key set in conf file! Exiting.') try: self.logger.critical(msg) except Exception: From 3d36034722250bb8f45668220820eaf92975d15a Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 28 Dec 2010 09:18:37 -0800 Subject: [PATCH 047/199] Change copies of x-object-manifest objects to copy the actual contents of the object, not just the manifest marker itself --- swift/proxy/server.py | 14 +++++--------- test/unit/proxy/test_server.py | 27 +++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 9838f86802..43371fbf23 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -947,15 +947,11 @@ class ObjectController(Controller): self.container_name = orig_container_name new_req = Request.blank(req.path_info, environ=req.environ, headers=req.headers) - if 'x-object-manifest' in source_resp.headers: - data_source = iter(['']) - new_req.content_length = 0 - new_req.headers['X-Object-Manifest'] = \ - source_resp.headers['x-object-manifest'] - else: - data_source = source_resp.app_iter - new_req.content_length = source_resp.content_length - new_req.etag = source_resp.etag + data_source = source_resp.app_iter + new_req.content_length = source_resp.content_length + if new_req.content_length is None: + new_req.headers['Transfer-Encoding'] = 'chunked' + new_req.etag = source_resp.etag # we no longer need the X-Copy-From header del new_req.headers['X-Copy-From'] for k, v in source_resp.headers.items(): diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 4577cd4dac..5d4e4d7f8e 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1995,6 +1995,33 @@ class TestObjectController(unittest.TestCase): # will be sent in a single chunk. self.assertEquals(body, '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n') + # Make a copy of the manifested object, which should + # consolidate the segments. + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\nX-Copy-From: segmented/name\r\nContent-Length: ' + '0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + body = fd.read() + # Retrieve and validate the copy. + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/copy HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('x-object-manifest:' not in headers.lower()) + self.assert_('Content-Length: 25\r' in headers) + body = fd.read() + self.assertEquals(body, '1234 1234 1234 1234 1234 ') finally: prospa.kill() acc1spa.kill() From 9682f8b85d0953e47acaa7aed66412870fc4619b Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 28 Dec 2010 09:57:17 -0800 Subject: [PATCH 048/199] Made copies of ridiculously segmented objects error. --- swift/proxy/server.py | 6 +++++- test/unit/proxy/test_server.py | 17 ++++++++++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 43371fbf23..4c9e46323d 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -950,7 +950,11 @@ class ObjectController(Controller): data_source = source_resp.app_iter new_req.content_length = source_resp.content_length if new_req.content_length is None: - new_req.headers['Transfer-Encoding'] = 'chunked' + # This indicates a transfer-encoding: chunked source object, + # which currently only happens because there are more than + # CONTAINER_LISTING_LIMIT segments in a segmented object. In + # this case, we're going to refuse to do the server-side copy. + return HTTPRequestEntityTooLarge(request=req) new_req.etag = source_resp.etag # we no longer need the X-Copy-From header del new_req.headers['X-Copy-From'] diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 5d4e4d7f8e..9af432ed9b 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1996,7 +1996,22 @@ class TestObjectController(unittest.TestCase): self.assertEquals(body, '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n') # Make a copy of the manifested object, which should - # consolidate the segments. + # error since the number of segments exceeds + # CONTAINER_LISTING_LIMIT. + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\nX-Copy-From: segmented/name\r\nContent-Length: ' + '0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 413' + self.assertEquals(headers[:len(exp)], exp) + body = fd.read() + # After adjusting the CONTAINER_LISTING_LIMIT, make a copy of + # the manifested object which should consolidate the segments. + proxy_server.CONTAINER_LISTING_LIMIT = 10000 sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' From 0dc5f6fe1d5b3ecd5ae88c84e409a3f33bfe1d31 Mon Sep 17 00:00:00 2001 From: gholt Date: Tue, 28 Dec 2010 10:39:11 -0800 Subject: [PATCH 049/199] Updated the docs to better reflect the .token_[0-f] container selection. --- doc/source/overview_auth.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/doc/source/overview_auth.rst b/doc/source/overview_auth.rst index 14b5c03ef3..f9141509c2 100644 --- a/doc/source/overview_auth.rst +++ b/doc/source/overview_auth.rst @@ -126,9 +126,12 @@ But, here's an example of what the dictionary may look like in the future:: "sat": "http://ord.servers.com:8080/v1/AUTH_8980f74b1cda41e483cbe0a925f448a9"}} Lastly, the tokens themselves are stored as objects in the -/.token container. The names of the objects are the token strings -themselves, such as `AUTH_tked86bbd01864458aa2bd746879438d5a`. The contents of -the token objects are JSON dictionaries of the format:: +`/.token_[0-f]` containers. The names of the objects are the +token strings themselves, such as `AUTH_tked86bbd01864458aa2bd746879438d5a`. +The exact `.token_[0-f]` container chosen is based on the final digit of the +token name, such as `.token_a` for the token +`AUTH_tked86bbd01864458aa2bd746879438d5a`. The contents of the token objects +are JSON dictionaries of the format:: {"account": , "user": , From 2bbc9b344360dff261385e638e522a0ed962ec28 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Tue, 28 Dec 2010 13:33:36 -0600 Subject: [PATCH 050/199] added functional test for copy, fixed bug in copy method --- swift/proxy/server.py | 20 ++--- test/functionalnosetests/test_container.py | 4 + test/functionalnosetests/test_object.py | 95 +++++++++++++++++++++- 3 files changed, 105 insertions(+), 14 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 9838f86802..322294ecaa 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1149,19 +1149,17 @@ class ObjectController(Controller): return HTTPPreconditionFailed(request=req, body='Destination header must be of the form ' '/') - new_source = '/' + self.container_name + '/' + self.object_name + source = '/' + self.container_name + '/' + self.object_name self.container_name = dest_container self.object_name = dest_object - new_headers = {} - for k, v in req.headers.items(): - new_headers[k] = v - new_headers['X-Copy-From'] = new_source - new_headers['Content-Length'] = 0 - del new_headers['Destination'] - new_path = '/' + self.account_name + dest - new_req = Request.blank(new_path, environ=req.environ, - headers=new_headers) - return self.PUT(new_req) + # re-write the existing request as a PUT instead of creating a new one + # since this one is already attached to the posthooklogger + req.method = 'PUT' + req.path_info = '/' + self.account_name + dest + req.headers['Content-Length'] = 0 + req.headers['X-Copy-From'] = source + del req.headers['Destination'] + return self.PUT(req) class ContainerController(Controller): diff --git a/test/functionalnosetests/test_container.py b/test/functionalnosetests/test_container.py index 96c0be91e6..8690fcde5d 100755 --- a/test/functionalnosetests/test_container.py +++ b/test/functionalnosetests/test_container.py @@ -115,6 +115,10 @@ class TestContainer(unittest.TestCase): resp.read() self.assert_(resp.status in (200, 204), resp.status) self.assertEquals(resp.getheader('x-container-meta-test'), 'Value') + resp = retry(delete, name) + resp.read() + self.assertEquals(resp.status, 204) + name = uuid4().hex resp = retry(put, name, '') resp.read() diff --git a/test/functionalnosetests/test_object.py b/test/functionalnosetests/test_object.py index 2e1668db0e..5975cf16a2 100644 --- a/test/functionalnosetests/test_object.py +++ b/test/functionalnosetests/test_object.py @@ -38,21 +38,110 @@ class TestObject(unittest.TestCase): if skip: raise SkipTest + def delete(url, token, parsed, conn, obj): + conn.request('DELETE', + '%s/%s/%s' % (parsed.path, self.container, obj), + '', {'X-Auth-Token': token}) + return check_response(conn) + + # get list of objects in container + def list(url, token, parsed, conn): + conn.request('GET', + '%s/%s' % (parsed.path, self.container), + '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(list) + object_listing = resp.read() + self.assertEquals(resp.status, 200) + + # iterate over object listing and delete all objects + for obj in object_listing.splitlines(): + resp = retry(delete, obj) + resp.read() + self.assertEquals(resp.status, 204) + + # delete the container def delete(url, token, parsed, conn): - conn.request('DELETE', '%s/%s/%s' % (parsed.path, self.container, - self.obj), '', {'X-Auth-Token': token}) + conn.request('DELETE', parsed.path + '/' + self.container, '', + {'X-Auth-Token': token}) return check_response(conn) resp = retry(delete) resp.read() self.assertEquals(resp.status, 204) + def test_copy_object(self): + if skip: + raise SkipTest + + source = '%s/%s' % (self.container, self.obj) + dest = '%s/%s' % (self.container, 'test_copy') + + # get contents of source + def get_source(url, token, parsed, conn): + conn.request('GET', + '%s/%s' % (parsed.path, source), + '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get_source) + source_contents = resp.read() + self.assertEquals(resp.status, 200) + self.assertEquals(source_contents, 'test') + + # copy source to dest with X-Copy-From + def put(url, token, parsed, conn): + conn.request('PUT', '%s/%s' % (parsed.path, dest), '', + {'X-Auth-Token': token, + 'Content-Length': '0', + 'X-Copy-From': source}) + return check_response(conn) + resp = retry(put) + contents = resp.read() + self.assertEquals(resp.status, 201) + + # contents of dest should be the same as source + def get_dest(url, token, parsed, conn): + conn.request('GET', + '%s/%s' % (parsed.path, dest), + '', {'X-Auth-Token': token}) + return check_response(conn) + resp = retry(get_dest) + dest_contents = resp.read() + self.assertEquals(resp.status, 200) + self.assertEquals(dest_contents, source_contents) + + # delete the copy def delete(url, token, parsed, conn): - conn.request('DELETE', parsed.path + '/' + self.container, '', + conn.request('DELETE', '%s/%s' % (parsed.path, dest), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(delete) resp.read() self.assertEquals(resp.status, 204) + # verify dest does not exist + resp = retry(get_dest) + resp.read() + self.assertEquals(resp.status, 404) + + # copy source to dest with COPY + def copy(url, token, parsed, conn): + conn.request('COPY', '%s/%s' % (parsed.path, source), '', + {'X-Auth-Token': token, + 'Destination': dest}) + return check_response(conn) + resp = retry(copy) + contents = resp.read() + self.assertEquals(resp.status, 201) + + # contents of dest should be the same as source + resp = retry(get_dest) + dest_contents = resp.read() + self.assertEquals(resp.status, 200) + self.assertEquals(dest_contents, source_contents) + + # delete the copy + resp = retry(delete) + resp.read() + self.assertEquals(resp.status, 204) def test_public_object(self): if skip: From 8dee94fd7c76077038c44e76bce230b736e97943 Mon Sep 17 00:00:00 2001 From: David Goetz Date: Tue, 28 Dec 2010 14:54:00 -0800 Subject: [PATCH 051/199] adding defaults, docs, and unit tests --- doc/source/deployment_guide.rst | 7 ++- etc/object-server.conf-sample | 4 +- swift/common/utils.py | 7 ++- swift/obj/auditor.py | 18 +++++-- swift/obj/server.py | 2 +- test/unit/common/test_utils.py | 30 ++++++------ test/unit/obj/test_auditor.py | 83 +++++++++++++++++++++++++++++++-- 7 files changed, 123 insertions(+), 28 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 68f8c9b5c8..8db4360a4d 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -229,7 +229,12 @@ Option Default Description log_name object-auditor Label used when logging log_facility LOG_LOCAL0 Syslog log facility log_level INFO Logging level -interval 1800 Minimum time for a pass to take +files_per_second 20 Maximum files audited per second. Should + be tuned according to individual system + specs. 0 is unlimited. +bytes_per_second 10000000 Maximum bytes audited per second. Should + be tuned according to individual system + specs. 0 is unlimited. ================== ============== ========================================== ------------------------------ diff --git a/etc/object-server.conf-sample b/etc/object-server.conf-sample index d4523566bf..cc80c18c07 100644 --- a/etc/object-server.conf-sample +++ b/etc/object-server.conf-sample @@ -55,5 +55,5 @@ use = egg:swift#object [object-auditor] # log_name = object-auditor -# Will audit, at most, 1 object per device per interval -# interval = 1800 +# files_per_second = 20 +# bytes_per_second = 10000000 diff --git a/swift/common/utils.py b/swift/common/utils.py index 75f6409ba6..531905f407 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -726,8 +726,11 @@ def audit_location_generator(devices, datadir, mount_check=True, logger=None): on devices :param logger: a logger object ''' - for device in os.listdir(devices): - if mount_check and not\ + device_dir = os.listdir(devices) + # randomize devices in case of process restart before sweep completed + shuffle(device_dir) + for device in device_dir: + if mount_check and not \ os.path.ismount(os.path.join(devices, device)): if logger: logger.debug( diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 98dc077640..dbfee18280 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -35,14 +35,18 @@ class ObjectAuditor(Daemon): self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') - self.max_files_per_second = float(conf.get('files_per_second', 0)) - self.max_bytes_per_second = float(conf.get('bytes_per_second', 0)) + self.max_files_per_second = float(conf.get('files_per_second', 20)) + self.max_bytes_per_second = float(conf.get('bytes_per_second', + 10000000)) self.files_running_time = 0 self.bytes_running_time = 0 self.bytes_processed = 0 + self.total_bytes_processed = 0 + self.total_files_processed = 0 self.passes = 0 self.quarantines = 0 self.errors = 0 + self.log_time = 3600 # once an hour def run_forever(self): """Run the object audit until stopped.""" @@ -62,7 +66,8 @@ class ObjectAuditor(Daemon): self.object_audit(path, device, partition) self.files_running_time = ratelimit_sleep( self.files_running_time, self.max_files_per_second) - if time.time() - reported >= 3600: # once an hour + self.total_files_processed += 1 + if time.time() - reported >= self.log_time: self.logger.info( 'Since %s: Locally: %d passed audit, %d quarantined, ' '%d errors files/sec: %.2f , bytes/sec: %.2f' % ( @@ -77,7 +82,11 @@ class ObjectAuditor(Daemon): self.bytes_processed = 0 elapsed = time.time() - begin self.logger.info( - 'Object audit "%s" mode completed: %.02fs' % (mode, elapsed)) + 'Object audit "%s" mode completed: %.02fs. ' + 'Total bytes/sec: %.2f , Total files/sec: %.2f ' % ( + mode, elapsed, + self.total_bytes_processed / elapsed, + self.total_files_processed / elapsed)) def object_audit(self, path, device, partition): """ @@ -114,6 +123,7 @@ class ObjectAuditor(Daemon): incr_by=len(chunk)) etag.update(chunk) self.bytes_processed += len(chunk) + self.total_bytes_processed += len(chunk) etag = etag.hexdigest() if etag != df.metadata['ETag']: raise AuditException("ETag of %s does not match file's md5 of " diff --git a/swift/obj/server.py b/swift/obj/server.py index a690f7fe2a..5b4fc1b06b 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -219,7 +219,7 @@ class DiskFile(object): :params fd: file descriptor of the temp file :param tmppath: path to the temporary file being used - :param metadata: dictionary of metada to be written + :param metadata: dictionary of metadata to be written :param extention: extension to be used when making the file """ metadata['name'] = self.name diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index b680a27793..d0768a5a0b 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -35,6 +35,7 @@ from swift.common import utils class MockOs(): + def __init__(self, pass_funcs=[], called_funcs=[], raise_funcs=[]): self.closed_fds = [] for func in pass_funcs: @@ -184,12 +185,12 @@ class TestUtils(unittest.TestCase): print 'test2' self.assertEquals(sio.getvalue(), 'STDOUT: test2\n') sys.stderr = lfo - print >>sys.stderr, 'test4' + print >> sys.stderr, 'test4' self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n') sys.stdout = orig_stdout print 'test5' self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n') - print >>sys.stderr, 'test6' + print >> sys.stderr, 'test6' self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n' 'STDOUT: test6\n') sys.stderr = orig_stderr @@ -325,7 +326,7 @@ Error: unable to locate %s def test_hash_path(self): # Yes, these tests are deliberately very fragile. We want to make sure - # that if someones changes the results hash_path produces, they know it. + # that if someones changes the results hash_path produces, they know it self.assertEquals(utils.hash_path('a'), '1c84525acb02107ea475dcd3d09c2c58') self.assertEquals(utils.hash_path('a', 'c'), @@ -364,10 +365,12 @@ log_name = yarr''' result = utils.readconf('/tmp/test', 'section2').get('log_name') expected = 'yarr' self.assertEquals(result, expected) - result = utils.readconf('/tmp/test', 'section1', log_name='foo').get('log_name') + result = utils.readconf('/tmp/test', 'section1', + log_name='foo').get('log_name') expected = 'foo' self.assertEquals(result, expected) - result = utils.readconf('/tmp/test', 'section1', defaults={'bar': 'baz'}) + result = utils.readconf('/tmp/test', 'section1', + defaults={'bar': 'baz'}) expected = {'log_name': 'section1', 'foo': 'bar', 'bar': 'baz'} self.assertEquals(result, expected) os.unlink('/tmp/test') @@ -452,34 +455,35 @@ log_name = yarr''' start = time.time() for i in range(100): running_time = utils.ratelimit_sleep(running_time, 0) - self.assertTrue(abs((time.time() - start)* 1000) < 1) + self.assertTrue(abs((time.time() - start) * 100) < 1) running_time = 0 start = time.time() for i in range(50): running_time = utils.ratelimit_sleep(running_time, 200) - # make sure its accurate to 2/100 of a second - self.assertTrue(abs(25 - (time.time() - start)* 100) < 2) + # make sure its accurate to 10th of a second + self.assertTrue(abs(25 - (time.time() - start) * 100) < 10) def test_ratelimit_sleep_with_sleep(self): running_time = 0 start = time.time() for i in range(25): running_time = utils.ratelimit_sleep(running_time, 50) - time.sleep(1.0/75) - # make sure its accurate to 2/100 of a second - self.assertTrue(abs(50 - (time.time() - start)* 100) < 2) + time.sleep(1.0 / 75) + # make sure its accurate to 10th of a second + self.assertTrue(abs(50 - (time.time() - start) * 100) < 10) def test_ratelimit_sleep_with_incr(self): running_time = 0 start = time.time() - vals = [5,17,0,3,11,30,40,4,13,2,-1] * 2 # adds up to 250 (with no -1) + vals = [5, 17, 0, 3, 11, 30, + 40, 4, 13, 2, -1] * 2 # adds up to 250 (with no -1) total = 0 for i in vals: running_time = utils.ratelimit_sleep(running_time, 500, incr_by=i) total += i - self.assertTrue(abs(50 - (time.time() - start)* 100) < 2) + self.assertTrue(abs(50 - (time.time() - start) * 100) < 10) if __name__ == '__main__': diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 90344420a7..e068af2114 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -23,9 +23,11 @@ from shutil import rmtree from hashlib import md5 from swift.obj import auditor from swift.obj.server import DiskFile, write_metadata -from swift.common.utils import hash_path, mkdirs, normalize_timestamp +from swift.common.utils import hash_path, mkdirs, normalize_timestamp, renamer +from swift.obj.replicator import invalidate_hash from swift.common.exceptions import AuditException + class TestAuditor(unittest.TestCase): def setUp(self): @@ -33,7 +35,7 @@ class TestAuditor(unittest.TestCase): self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') if not self.path_to_test_xfs or \ not os.path.exists(self.path_to_test_xfs): - print >>sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ + print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ 'pointing to a valid directory.\n' \ 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ 'system for testing.' @@ -60,8 +62,7 @@ class TestAuditor(unittest.TestCase): self.conf = dict( devices=self.devices, - mount_check='false', - timeout='300', stats_interval='1') + mount_check='false') def tearDown(self): rmtree(self.testdir, ignore_errors=1) @@ -132,9 +133,42 @@ class TestAuditor(unittest.TestCase): 'sda', cur_part) self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + def test_object_audit_no_meta(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + cur_part = '0' + disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') + data = '0' * 1024 + etag = md5() + pre_quarantines = self.auditor.quarantines + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + timestamp = str(normalize_timestamp(time.time())) + os.fsync(fd) + invalidate_hash(os.path.dirname(disk_file.datadir)) + renamer(tmppath, os.path.join(disk_file.datadir, + timestamp + '.data')) + self.auditor.object_audit( + os.path.join(disk_file.datadir, timestamp + '.data'), + 'sda', cur_part) + self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + + def test_object_audit_bad_args(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + pre_errors = self.auditor.errors + self.auditor.object_audit(5, 'sda', '0') + self.assertEquals(self.auditor.errors, pre_errors + 1) + pre_errors = self.auditor.errors + self.auditor.object_audit('badpath', 'sda', '0') + self.assertEquals(self.auditor.errors, pre_errors) # just returns + def test_object_run_once_pass(self): self.auditor = auditor.ObjectAuditor( self.conf) + self.auditor.log_time = 0 cur_part = '0' timestamp = str(normalize_timestamp(time.time())) pre_quarantines = self.auditor.quarantines @@ -155,7 +189,7 @@ class TestAuditor(unittest.TestCase): self.auditor.run_once() self.assertEquals(self.auditor.quarantines, pre_quarantines) - def test_object_run_once_multi_devices(self): + def test_object_run_once_no_sda(self): self.auditor = auditor.ObjectAuditor( self.conf) cur_part = '0' @@ -179,6 +213,45 @@ class TestAuditor(unittest.TestCase): self.auditor.run_once() self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + def test_object_run_once_multi_devices(self): + self.auditor = auditor.ObjectAuditor( + self.conf) + cur_part = '0' + timestamp = str(normalize_timestamp(time.time())) + pre_quarantines = self.auditor.quarantines + disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') + data = '0' * 10 + etag = md5() + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + metadata = { + 'ETag': etag, + 'X-Timestamp': timestamp, + 'Content-Length': str(os.fstat(fd).st_size), + } + disk_file.put(fd, tmppath, metadata) + disk_file.close() + self.auditor.run_once() + disk_file = DiskFile(self.devices, 'sdb', cur_part, 'a', 'c', 'ob') + data = '1' * 10 + etag = md5() + with disk_file.mkstemp() as (fd, tmppath): + os.write(fd, data) + etag.update(data) + etag = etag.hexdigest() + metadata = { + 'ETag': etag, + 'X-Timestamp': timestamp, + 'Content-Length': str(os.fstat(fd).st_size), + } + disk_file.put(fd, tmppath, metadata) + disk_file.close() + os.write(fd, 'extra_data') + self.auditor.run_once() + self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + if __name__ == '__main__': unittest.main() From 1ef05f313cdf832c7fa41f5e5d08f7ade4fe72b8 Mon Sep 17 00:00:00 2001 From: David Goetz Date: Tue, 28 Dec 2010 15:27:25 -0800 Subject: [PATCH 052/199] clearing out stats for forever mode --- swift/obj/auditor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 630b91d658..dd68d8baa1 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -52,6 +52,8 @@ class ObjectAuditor(Daemon): """Run the object audit until stopped.""" while True: self.run_once('forever') + self.total_bytes_processed = 0 + self.total_files_processed = 0 time.sleep(30) def run_once(self, mode='once'): From b46392911c9b3fc3a7068d606338cbea3a8c0bc8 Mon Sep 17 00:00:00 2001 From: David Goetz Date: Tue, 28 Dec 2010 17:58:17 -0800 Subject: [PATCH 053/199] pep8 and merge fixes --- swift/common/utils.py | 1 + swift/obj/auditor.py | 6 +++--- test/unit/obj/test_auditor.py | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 8456c3ffa1..a9085fc1ce 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -334,6 +334,7 @@ class LogAdapter(object): class NamedFormatter(logging.Formatter): + def __init__(self, server, logger): logging.Formatter.__init__(self) self.server = server diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index dd68d8baa1..5604ea74a4 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -105,7 +105,7 @@ class ObjectAuditor(Daemon): name = object_server.read_metadata(path)['name'] except Exception, exc: raise AuditException('Error when reading metadata: %s' % exc) - _, account, container, obj = name.split('/', 3) + _junk, account, container, obj = name.split('/', 3) df = object_server.DiskFile(self.devices, device, partition, account, container, obj, @@ -132,8 +132,8 @@ class ObjectAuditor(Daemon): "%s" % (df.metadata['ETag'], etag)) except AuditException, err: self.quarantines += 1 - self.logger.error(_('ERROR Object %(obj)s failed audit and will be ' - 'quarantined: %(err)s'), {'obj': path, 'err': err}) + self.logger.error(_('ERROR Object %(obj)s failed audit and will ' + 'be quarantined: %(err)s'), {'obj': path, 'err': err}) invalidate_hash(os.path.dirname(path)) renamer_path = os.path.dirname(path) renamer(renamer_path, os.path.join(self.devices, device, diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index e068af2114..b17ccce83a 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -14,7 +14,7 @@ # limitations under the License. # TODO: Tests - +import gettext import unittest import tempfile import os @@ -254,4 +254,5 @@ class TestAuditor(unittest.TestCase): if __name__ == '__main__': + gettext.install('swift', unicode=1) unittest.main() From 8026f86b3e1cdac4273f7c779610652b9f52fca1 Mon Sep 17 00:00:00 2001 From: David Goetz Date: Tue, 28 Dec 2010 20:31:38 -0800 Subject: [PATCH 054/199] fix for 0 incr_by --- swift/common/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index a9085fc1ce..70799fda03 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -794,8 +794,8 @@ def ratelimit_sleep(running_time, max_rate, incr_by=1): to ratelimit 1024 bytes/sec and have differing sizes of requests. Must be >= 0. ''' - if not max_rate or incr_by < 0: - return 0 + if not max_rate or incr_by <= 0: + return running_time clock_accuracy = 1000.0 now = time.time() * clock_accuracy time_per_request = clock_accuracy * (float(incr_by) / max_rate) From e54846d850b66869811b84a71ecd93e66badaba5 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Wed, 29 Dec 2010 13:14:43 -0600 Subject: [PATCH 055/199] made this i18n importing sane --- swift/__init__.py | 2 ++ swift/common/daemon.py | 2 -- swift/common/wsgi.py | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/swift/__init__.py b/swift/__init__.py index 1a72d32e57..65989fb130 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1 +1,3 @@ __version__ = '1.1.0' +import gettext +gettext.install('swift') diff --git a/swift/common/daemon.py b/swift/common/daemon.py index e5ed3f7caa..0f7df064c7 100644 --- a/swift/common/daemon.py +++ b/swift/common/daemon.py @@ -16,7 +16,6 @@ import os import sys import signal -import gettext from re import sub from swift.common import utils @@ -42,7 +41,6 @@ class Daemon(object): utils.validate_configuration() utils.capture_stdio(self.logger, **kwargs) utils.drop_privileges(self.conf.get('user', 'swift')) - gettext.install('swift', unicode=1) def kill_children(*args): signal.signal(signal.SIGTERM, signal.SIG_IGN) diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index 079e4277e1..a93c21aa8a 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -21,7 +21,6 @@ import signal import sys import time import mimetools -import gettext import eventlet from eventlet import greenio, GreenPool, sleep, wsgi, listen @@ -121,7 +120,6 @@ def run_wsgi(conf_file, app_section, *args, **kwargs): sock = get_socket(conf, default_port=kwargs.get('default_port', 8080)) # remaining tasks should not require elevated privileges drop_privileges(conf.get('user', 'swift')) - gettext.install('swift', unicode=1) # finally after binding to ports and privilege drop, run app __init__ code app = loadapp('config:%s' % conf_file, global_conf={'log_name': log_name}) From 57970bdeb59ac17d17a59fa2d2dcefac729eaf0d Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 29 Dec 2010 12:00:08 -0800 Subject: [PATCH 056/199] Cleaned up the bins; patched the broken test (when run standalone) --- bin/st | 2 -- bin/swift-account-audit | 2 -- bin/swift-auth-add-user | 2 -- bin/swift-auth-recreate-accounts | 2 -- bin/swift-auth-update-reseller-prefixes | 2 -- bin/swift-bench | 2 -- bin/swift-drive-audit | 2 -- bin/swift-get-nodes | 3 --- bin/swift-log-uploader | 2 -- bin/swift-object-info | 2 -- bin/swift-ring-builder | 2 -- bin/swift-stats-populate | 2 -- bin/swift-stats-report | 2 -- swift/__init__.py | 4 +++- test/unit/container/test_updater.py | 2 ++ 15 files changed, 5 insertions(+), 28 deletions(-) diff --git a/bin/st b/bin/st index 79f331558f..b41aca67ec 100755 --- a/bin/st +++ b/bin/st @@ -44,8 +44,6 @@ except: try: from swift.common.bufferedhttp \ import BufferedHTTPConnection as HTTPConnection - import gettext - gettext.install('swift', unicode=1) except: from httplib import HTTPConnection diff --git a/bin/swift-account-audit b/bin/swift-account-audit index 1f6aceb2c8..fe611562d7 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -20,7 +20,6 @@ from urllib import quote from hashlib import md5 import getopt from itertools import chain -import gettext import simplejson from eventlet.greenpool import GreenPool @@ -325,7 +324,6 @@ class Auditor(object): if __name__ == '__main__': - gettext.install('swift', unicode=1) try: optlist, args = getopt.getopt(sys.argv[1:], 'c:r:e:d') except getopt.GetoptError, err: diff --git a/bin/swift-auth-add-user b/bin/swift-auth-add-user index 2d9819dfc8..d502dc83a8 100755 --- a/bin/swift-auth-add-user +++ b/bin/swift-auth-add-user @@ -18,13 +18,11 @@ from ConfigParser import ConfigParser from optparse import OptionParser from os.path import basename from sys import argv, exit -import gettext from swift.common.bufferedhttp import http_connect_raw as http_connect if __name__ == '__main__': - gettext.install('swift', unicode=1) default_conf = '/etc/swift/auth-server.conf' parser = OptionParser( usage='Usage: %prog [options] ') diff --git a/bin/swift-auth-recreate-accounts b/bin/swift-auth-recreate-accounts index a8ee20e0e8..e17bf2da3b 100755 --- a/bin/swift-auth-recreate-accounts +++ b/bin/swift-auth-recreate-accounts @@ -17,12 +17,10 @@ from ConfigParser import ConfigParser from optparse import OptionParser from sys import argv, exit -import gettext from swift.common.bufferedhttp import http_connect_raw as http_connect if __name__ == '__main__': - gettext.install('swift', unicode=1) default_conf = '/etc/swift/auth-server.conf' parser = OptionParser(usage='Usage: %prog [options]') parser.add_option('-c', '--conf', dest='conf', default=default_conf, diff --git a/bin/swift-auth-update-reseller-prefixes b/bin/swift-auth-update-reseller-prefixes index cb09bd9872..41a4bf6a76 100755 --- a/bin/swift-auth-update-reseller-prefixes +++ b/bin/swift-auth-update-reseller-prefixes @@ -16,13 +16,11 @@ from os.path import basename from sys import argv, exit -import gettext from swift.common.db import get_db_connection if __name__ == '__main__': - gettext.install('swift', unicode=1) app = basename(argv[0]) if len(argv) != 3: exit(''' diff --git a/bin/swift-bench b/bin/swift-bench index 2c3e08318b..ab332482cd 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -20,7 +20,6 @@ import sys import signal import uuid from optparse import OptionParser -import gettext from swift.common.bench import BenchController from swift.common.utils import readconf, NamedLogger @@ -56,7 +55,6 @@ SAIO_DEFAULTS = { } if __name__ == '__main__': - gettext.install('swift', unicode=1) usage = "usage: %prog [OPTIONS] [CONF_FILE]" usage += """\n\nConf file with SAIO defaults: diff --git a/bin/swift-drive-audit b/bin/swift-drive-audit index 64c478e203..cde28c1ed7 100755 --- a/bin/swift-drive-audit +++ b/bin/swift-drive-audit @@ -20,7 +20,6 @@ import re import subprocess import sys from ConfigParser import ConfigParser -import gettext from swift.common.utils import get_logger @@ -87,7 +86,6 @@ def comment_fstab(mount_point): os.rename('/etc/fstab.new', '/etc/fstab') if __name__ == '__main__': - gettext.install('swift', unicode=1) c = ConfigParser() try: conf_path = sys.argv[1] diff --git a/bin/swift-get-nodes b/bin/swift-get-nodes index 69643f6a84..f24dd48f96 100755 --- a/bin/swift-get-nodes +++ b/bin/swift-get-nodes @@ -16,14 +16,11 @@ import sys import urllib -import gettext from swift.common.ring import Ring from swift.common.utils import hash_path -gettext.install('swift', unicode=1) - if len(sys.argv) < 3 or len(sys.argv) > 5: print 'Usage: %s [] []' \ % sys.argv[0] diff --git a/bin/swift-log-uploader b/bin/swift-log-uploader index 972303f67b..e533cad824 100755 --- a/bin/swift-log-uploader +++ b/bin/swift-log-uploader @@ -15,14 +15,12 @@ # limitations under the License. import sys -import gettext from swift.stats.log_uploader import LogUploader from swift.common.utils import parse_options from swift.common import utils if __name__ == '__main__': - gettext.install('swift', unicode=1) conf_file, options = parse_options(usage="Usage: %prog CONFIG_FILE PLUGIN") try: plugin = options['extra_args'][0] diff --git a/bin/swift-object-info b/bin/swift-object-info index 268b991bee..57f2522071 100755 --- a/bin/swift-object-info +++ b/bin/swift-object-info @@ -18,14 +18,12 @@ import sys import cPickle as pickle from datetime import datetime from hashlib import md5 -import gettext from swift.common.ring import Ring from swift.obj.server import read_metadata from swift.common.utils import hash_path if __name__ == '__main__': - gettext.install('swift', unicode=1) if len(sys.argv) <= 1: print "Usage: %s OBJECT_FILE" % sys.argv[0] sys.exit(1) diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index 1d53a30973..50353df256 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -21,7 +21,6 @@ from os import mkdir from os.path import basename, dirname, exists, join as pathjoin from sys import argv, exit from time import time -import gettext from swift.common.ring import RingBuilder @@ -175,7 +174,6 @@ swift-ring-builder set_min_part_hours if __name__ == '__main__': - gettext.install('swift', unicode=1) if len(argv) < 2: print ''' swift-ring-builder %(MAJOR_VERSION)s.%(MINOR_VERSION)s diff --git a/bin/swift-stats-populate b/bin/swift-stats-populate index 985fa50c82..8ea210cb65 100755 --- a/bin/swift-stats-populate +++ b/bin/swift-stats-populate @@ -21,7 +21,6 @@ from optparse import OptionParser from sys import exit, argv from time import time from uuid import uuid4 -import gettext from eventlet import GreenPool, patcher, sleep from eventlet.pools import Pool @@ -76,7 +75,6 @@ def report(success): if __name__ == '__main__': global begun, created, item_type, next_report, need_to_create, retries_done - gettext.install('swift', unicode=1) patcher.monkey_patch() parser = OptionParser() diff --git a/bin/swift-stats-report b/bin/swift-stats-report index 158ae37c75..3f735877cf 100755 --- a/bin/swift-stats-report +++ b/bin/swift-stats-report @@ -23,7 +23,6 @@ from optparse import OptionParser from sys import argv, exit, stderr from time import time from uuid import uuid4 -import gettext from eventlet import GreenPool, hubs, patcher, sleep, Timeout from eventlet.pools import Pool @@ -747,7 +746,6 @@ def object_delete_report(coropool, connpool, options): if __name__ == '__main__': - gettext.install('swift', unicode=1) patcher.monkey_patch() hubs.get_hub().debug_exceptions = False diff --git a/swift/__init__.py b/swift/__init__.py index 65989fb130..0bd0062056 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1,3 +1,5 @@ -__version__ = '1.1.0' import gettext + + +__version__ = '1.1.0' gettext.install('swift') diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index 35e7629162..a4c638c2ec 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -23,6 +23,7 @@ from shutil import rmtree from eventlet import spawn, TimeoutError, listen from eventlet.timeout import Timeout +from swift.common import utils from swift.container import updater as container_updater from swift.container import server as container_server from swift.common.db import ContainerBroker @@ -33,6 +34,7 @@ from swift.common.utils import normalize_timestamp class TestContainerUpdater(unittest.TestCase): def setUp(self): + utils.HASH_PATH_SUFFIX = 'endcap' self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') if not self.path_to_test_xfs or \ not os.path.exists(self.path_to_test_xfs): From 42be85a7f92702b5590f52bec305e1a0c082a006 Mon Sep 17 00:00:00 2001 From: gholt Date: Wed, 29 Dec 2010 12:47:22 -0800 Subject: [PATCH 057/199] PEP8 Updates --- swift/common/middleware/cname_lookup.py | 2 +- swift/common/middleware/memcache.py | 2 ++ swift/common/wsgi.py | 5 +++-- swift/obj/replicator.py | 2 +- swift/proxy/server.py | 9 ++++++--- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/swift/common/middleware/cname_lookup.py b/swift/common/middleware/cname_lookup.py index d0ba2727c6..be7071ba16 100644 --- a/swift/common/middleware/cname_lookup.py +++ b/swift/common/middleware/cname_lookup.py @@ -61,7 +61,7 @@ class CNAMELookupMiddleware(object): port = '' if ':' in given_domain: given_domain, port = given_domain.rsplit(':', 1) - if given_domain == self.storage_domain[1:]: # strip initial '.' + if given_domain == self.storage_domain[1:]: # strip initial '.' return self.app(env, start_response) a_domain = given_domain if not a_domain.endswith(self.storage_domain): diff --git a/swift/common/middleware/memcache.py b/swift/common/middleware/memcache.py index 0eabb0fb68..0ec1288fd5 100644 --- a/swift/common/middleware/memcache.py +++ b/swift/common/middleware/memcache.py @@ -35,6 +35,8 @@ class MemcacheMiddleware(object): def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) + def cache_filter(app): return MemcacheMiddleware(app, conf) + return cache_filter diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index a93c21aa8a..e8c512f2ee 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -56,14 +56,15 @@ def monkey_patch_mimetools(): mimetools.Message.parsetype = parsetype + def get_socket(conf, default_port=8080): """Bind socket to bind ip:port in conf :param conf: Configuration dict to read settings from :param default_port: port to use if not specified in conf - :returns : a socket object as returned from socket.listen or ssl.wrap_socket - if conf specifies cert_file + :returns : a socket object as returned from socket.listen or + ssl.wrap_socket if conf specifies cert_file """ bind_addr = (conf.get('bind_ip', '0.0.0.0'), int(conf.get('bind_port', default_port))) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index ed77bf5a10..ef02b9f3ec 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -245,7 +245,7 @@ class ObjectReplicator(Daemon): except Timeout: self.logger.error(_("Killing long-running rsync: %s"), str(args)) proc.kill() - return 1 # failure response code + return 1 # failure response code total_time = time.time() - start_time for result in results.split('\n'): if result == '': diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 322294ecaa..1cde24cfe6 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -96,9 +96,11 @@ def delay_denial(func): return func(*a, **kw) return wrapped + def get_account_memcache_key(account): return 'account/%s' % account + def get_container_memcache_key(account, container): return 'container/%s/%s' % (account, container) @@ -298,7 +300,8 @@ class Controller(object): :param additional_info: additional information to log """ self.app.logger.exception( - _('ERROR with %(type)s server %(ip)s:%(port)s/%(device)s re: %(info)s'), + _('ERROR with %(type)s server %(ip)s:%(port)s/%(device)s re: ' + '%(info)s'), {'type': typ, 'ip': node['ip'], 'port': node['port'], 'device': node['device'], 'info': additional_info}) @@ -349,7 +352,7 @@ class Controller(object): if result_code == 200: return partition, nodes elif result_code == 404: - return None, None + return None, None result_code = 0 attempts_left = self.app.account_ring.replica_count path = '/%s' % account @@ -1618,7 +1621,7 @@ class BaseApplication(object): self.account_ring = account_ring or \ Ring(os.path.join(swift_dir, 'account.ring.gz')) self.memcache = memcache - mimetypes.init(mimetypes.knownfiles + + mimetypes.init(mimetypes.knownfiles + [os.path.join(swift_dir, 'mime.types')]) def get_controller(self, path): From f811be80ff0bc4a124440f22aba6f725a4e4ad24 Mon Sep 17 00:00:00 2001 From: David Goetz Date: Thu, 30 Dec 2010 12:30:04 -0800 Subject: [PATCH 058/199] changes after peer review, pep8 and eventlet.sleep --- doc/source/deployment_guide.rst | 4 ++-- swift/common/utils.py | 8 ++++---- swift/obj/auditor.py | 30 ++++++++++++++++++------------ test/unit/common/test_utils.py | 2 +- test/unit/obj/test_auditor.py | 26 ++++++++------------------ 5 files changed, 33 insertions(+), 37 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 8db4360a4d..794742ffca 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -231,10 +231,10 @@ log_facility LOG_LOCAL0 Syslog log facility log_level INFO Logging level files_per_second 20 Maximum files audited per second. Should be tuned according to individual system - specs. 0 is unlimited. + specs. 0 is unlimited. bytes_per_second 10000000 Maximum bytes audited per second. Should be tuned according to individual system - specs. 0 is unlimited. + specs. 0 is unlimited. ================== ============== ========================================== ------------------------------ diff --git a/swift/common/utils.py b/swift/common/utils.py index 70799fda03..2c5c46a8ed 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -781,11 +781,11 @@ def audit_location_generator(devices, datadir, mount_check=True, logger=None): def ratelimit_sleep(running_time, max_rate, incr_by=1): ''' - Will time.sleep() for the appropriate time so that the max_rate + Will eventlet.sleep() for the appropriate time so that the max_rate is never exceeded. If max_rate is 0, will not ratelimit. The maximum recommended rate should not exceed (1000 * incr_by) a second - as time.sleep() does involve at least a millisecond of overhead. - Returns running_time that should be used for subsequent calls. + as eventlet.sleep() does involve some overhead. Returns running_time + that should be used for subsequent calls. :param running_time: the running time of the next allowable request. Best to start at zero. @@ -802,5 +802,5 @@ def ratelimit_sleep(running_time, max_rate, incr_by=1): if running_time < now: running_time = now elif running_time - now > time_per_request: - time.sleep((running_time - now) / clock_accuracy) + eventlet.sleep((running_time - now) / clock_accuracy) return running_time + time_per_request diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 5604ea74a4..1670ddfbba 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -46,7 +46,7 @@ class ObjectAuditor(Daemon): self.passes = 0 self.quarantines = 0 self.errors = 0 - self.log_time = 3600 # once an hour + self.log_time = 3600 # once an hour def run_forever(self): """Run the object audit until stopped.""" @@ -71,12 +71,16 @@ class ObjectAuditor(Daemon): self.total_files_processed += 1 if time.time() - reported >= self.log_time: self.logger.info(_( - 'Since %s: Locally: %d passed audit, %d quarantined, ' - '%d errors files/sec: %.2f , bytes/sec: %.2f' % ( - time.ctime(reported), self.passes, - self.quarantines, self.errors, - self.passes / (time.time() - reported), - self.bytes_processed / (time.time() - reported)))) + 'Since %(start_time)s: Locally: %(passes)d passed audit, ' + '%(quars)d quarantined, %(errors)d errors ' + 'files/sec: %(frate).2f , bytes/sec: %(brate).2f' % { + 'start_time': time.ctime(reported), + 'passes': self.passes, + 'quars': self.quarantines, + 'errors': self.errors, + 'frate': self.passes / (time.time() - reported), + 'brate': self.bytes_processed / + (time.time() - reported)})) reported = time.time() self.passes = 0 self.quarantines = 0 @@ -84,11 +88,13 @@ class ObjectAuditor(Daemon): self.bytes_processed = 0 elapsed = time.time() - begin self.logger.info(_( - 'Object audit "%s" mode completed: %.02fs. ' - 'Total bytes/sec: %.2f , Total files/sec: %.2f ' % ( - mode, elapsed, - self.total_bytes_processed / elapsed, - self.total_files_processed / elapsed))) + 'Object audit "%(mode)s" mode completed: %(elapsed).02fs. ' + 'Total files/sec: %(frate).2f , ' + 'Total bytes/sec: %(brate).2f ' % { + 'mode': mode, + 'elapsed': elapsed, + 'frate': self.total_files_processed / elapsed, + 'brate': self.total_bytes_processed / elapsed})) def object_audit(self, path, device, partition): """ diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 02e9a206c3..8ef5edf8f8 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -469,7 +469,7 @@ log_name = yarr''' running_time = 0 start = time.time() vals = [5, 17, 0, 3, 11, 30, - 40, 4, 13, 2, -1] * 2 # adds up to 250 (with no -1) + 40, 4, 13, 2, -1] * 2 # adds up to 250 (with no -1) total = 0 for i in vals: running_time = utils.ratelimit_sleep(running_time, diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index b17ccce83a..baa75d05cc 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -14,7 +14,6 @@ # limitations under the License. # TODO: Tests -import gettext import unittest import tempfile import os @@ -31,7 +30,6 @@ from swift.common.exceptions import AuditException class TestAuditor(unittest.TestCase): def setUp(self): - # Setup a test ring (stolen from common/test_ring.py) self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') if not self.path_to_test_xfs or \ not os.path.exists(self.path_to_test_xfs): @@ -68,8 +66,7 @@ class TestAuditor(unittest.TestCase): rmtree(self.testdir, ignore_errors=1) def test_object_audit_extra_data(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) cur_part = '0' disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') data = '0' * 1024 @@ -99,8 +96,7 @@ class TestAuditor(unittest.TestCase): self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) def test_object_audit_diff_data(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) cur_part = '0' disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') data = '0' * 1024 @@ -134,8 +130,7 @@ class TestAuditor(unittest.TestCase): self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) def test_object_audit_no_meta(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) cur_part = '0' disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') data = '0' * 1024 @@ -156,18 +151,16 @@ class TestAuditor(unittest.TestCase): self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) def test_object_audit_bad_args(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) pre_errors = self.auditor.errors self.auditor.object_audit(5, 'sda', '0') self.assertEquals(self.auditor.errors, pre_errors + 1) pre_errors = self.auditor.errors self.auditor.object_audit('badpath', 'sda', '0') - self.assertEquals(self.auditor.errors, pre_errors) # just returns + self.assertEquals(self.auditor.errors, pre_errors) # just returns def test_object_run_once_pass(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) self.auditor.log_time = 0 cur_part = '0' timestamp = str(normalize_timestamp(time.time())) @@ -190,8 +183,7 @@ class TestAuditor(unittest.TestCase): self.assertEquals(self.auditor.quarantines, pre_quarantines) def test_object_run_once_no_sda(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) cur_part = '0' timestamp = str(normalize_timestamp(time.time())) pre_quarantines = self.auditor.quarantines @@ -214,8 +206,7 @@ class TestAuditor(unittest.TestCase): self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) def test_object_run_once_multi_devices(self): - self.auditor = auditor.ObjectAuditor( - self.conf) + self.auditor = auditor.ObjectAuditor(self.conf) cur_part = '0' timestamp = str(normalize_timestamp(time.time())) pre_quarantines = self.auditor.quarantines @@ -254,5 +245,4 @@ class TestAuditor(unittest.TestCase): if __name__ == '__main__': - gettext.install('swift', unicode=1) unittest.main() From c97e7b60546b0b2924518906ee9cbfcef147dc33 Mon Sep 17 00:00:00 2001 From: Russ Nelson Date: Fri, 31 Dec 2010 12:34:22 -0500 Subject: [PATCH 059/199] Refactor the command and help structure so it uses Python's docstrings. Also put the commands into a class so they can be listed and their docstrings automatically printed. --- bin/swift-ring-builder | 304 +++++++++++++++++++---------------------- 1 file changed, 139 insertions(+), 165 deletions(-) diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index 50353df256..eac587c364 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -26,14 +26,32 @@ from swift.common.ring import RingBuilder MAJOR_VERSION = 1 -MINOR_VERSION = 1 +MINOR_VERSION = 2 EXIT_RING_CHANGED = 0 EXIT_RING_UNCHANGED = 1 EXIT_ERROR = 2 def search_devs(builder, search_value): - # dz-:/_ + """The can be of the form: + dz-:/_ + Any part is optional, but you must include at least one part. + Examples: + d74 Matches the device id 74 + z1 Matches devices in zone 1 + z1-1.2.3.4 Matches devices in zone 1 with the ip 1.2.3.4 + 1.2.3.4 Matches devices in any zone with the ip 1.2.3.4 + z1:5678 Matches devices in zone 1 using port 5678 + :5678 Matches devices that use port 5678 + /sdb1 Matches devices with the device name sdb1 + _shiny Matches devices with shiny in the meta data + _"snet: 5.6.7.8" Matches devices with snet: 5.6.7.8 in the meta data + Most specific example: + d74z1-1.2.3.4:5678/sdb1_"snet: 5.6.7.8" + Nerd explanation: + All items require their single character prefix except the ip, in which + case the - is optional unless the device id or zone is also included.""" + orig_search_value = search_value match = [] if search_value.startswith('d'): @@ -89,142 +107,20 @@ def search_devs(builder, search_value): return devs -SEARCH_VALUE_HELP = ''' - The can be of the form: - dz-:/_ - Any part is optional, but you must include at least one part. - Examples: - d74 Matches the device id 74 - z1 Matches devices in zone 1 - z1-1.2.3.4 Matches devices in zone 1 with the ip 1.2.3.4 - 1.2.3.4 Matches devices in any zone with the ip 1.2.3.4 - z1:5678 Matches devices in zone 1 using port 5678 - :5678 Matches devices that use port 5678 - /sdb1 Matches devices with the device name sdb1 - _shiny Matches devices with shiny in the meta data - _"snet: 5.6.7.8" Matches devices with snet: 5.6.7.8 in the meta data - Most specific example: - d74z1-1.2.3.4:5678/sdb1_"snet: 5.6.7.8" - Nerd explanation: - All items require their single character prefix except the ip, in which - case the - is optional unless the device id or zone is also included. -'''.strip() +class commands: -CREATE_HELP = ''' -swift-ring-builder create + def unknown(): + print 'Unknown command: %s' % argv[2] + exit(EXIT_ERROR) + + def create(): + """swift-ring-builder create Creates with 2^ partitions and . is number of hours to restrict moving a partition more - than once. -'''.strip() + than once.""" -SEARCH_HELP = ''' -swift-ring-builder search - Shows information about matching devices. - - %(SEARCH_VALUE_HELP)s -'''.strip() % globals() - -ADD_HELP = ''' -swift-ring-builder add z-:/_ - Adds a device to the ring with the given information. No partitions will be - assigned to the new device until after running 'rebalance'. This is so you - can make multiple device changes and rebalance them all just once. -'''.strip() - -SET_WEIGHT_HELP = ''' -swift-ring-builder set_weight - Resets the device's weight. No partitions will be reassigned to or from the - device until after running 'rebalance'. This is so you can make multiple - device changes and rebalance them all just once. - - %(SEARCH_VALUE_HELP)s -'''.strip() % globals() - -SET_INFO_HELP = ''' -swift-ring-builder set_info - :/_ - Resets the device's information. This information isn't used to assign - partitions, so you can use 'write_ring' afterward to rewrite the current - ring with the newer device information. Any of the parts are optional - in the final :/_ parameter; just give what you - want to change. For instance set_info d74 _"snet: 5.6.7.8" would just - update the meta data for device id 74. - - %(SEARCH_VALUE_HELP)s -'''.strip() % globals() - -REMOVE_HELP = ''' -swift-ring-builder remove - Removes the device(s) from the ring. This should normally just be used for - a device that has failed. For a device you wish to decommission, it's best - to set its weight to 0, wait for it to drain all its data, then use this - remove command. This will not take effect until after running 'rebalance'. - This is so you can make multiple device changes and rebalance them all just - once. - - %(SEARCH_VALUE_HELP)s -'''.strip() % globals() - -SET_MIN_PART_HOURS_HELP = ''' -swift-ring-builder set_min_part_hours - Changes the to the given . This should be set to - however long a full replication/update cycle takes. We're working on a way - to determine this more easily than scanning logs. -'''.strip() - - -if __name__ == '__main__': - if len(argv) < 2: - print ''' -swift-ring-builder %(MAJOR_VERSION)s.%(MINOR_VERSION)s - -%(CREATE_HELP)s - -swift-ring-builder - Shows information about the ring and the devices within. - -%(SEARCH_HELP)s - -%(ADD_HELP)s - -%(SET_WEIGHT_HELP)s - -%(SET_INFO_HELP)s - -%(REMOVE_HELP)s - -swift-ring-builder rebalance - Attempts to rebalance the ring by reassigning partitions that haven't been - recently reassigned. - -swift-ring-builder validate - Just runs the validation routines on the ring. - -swift-ring-builder write_ring - Just rewrites the distributable ring file. This is done automatically after - a successful rebalance, so really this is only useful after one or more - 'set_info' calls when no rebalance is needed but you want to send out the - new device information. - -%(SET_MIN_PART_HOURS_HELP)s - -Quick list: create search add set_weight set_info remove rebalance write_ring - set_min_part_hours -Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error -'''.strip() % globals() - exit(EXIT_RING_UNCHANGED) - - if exists(argv[1]): - builder = pickle.load(open(argv[1], 'rb')) - for dev in builder.devs: - if dev and 'meta' not in dev: - dev['meta'] = '' - elif len(argv) < 3 or argv[2] != 'create': - print 'Ring Builder file does not exist: %s' % argv[1] - exit(EXIT_ERROR) - elif argv[2] == 'create': if len(argv) < 6: - print CREATE_HELP + print commands.create.__doc__ exit(EXIT_RING_UNCHANGED) builder = RingBuilder(int(argv[3]), int(argv[4]), int(argv[5])) backup_dir = pathjoin(dirname(argv[1]), 'backups') @@ -238,19 +134,9 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_CHANGED) - backup_dir = pathjoin(dirname(argv[1]), 'backups') - try: - mkdir(backup_dir) - except OSError, err: - if err.errno != EEXIST: - raise - - ring_file = argv[1] - if ring_file.endswith('.builder'): - ring_file = ring_file[:-len('.builder')] - ring_file += '.ring.gz' - - if len(argv) == 2: + def default(): + """swift-ring-builder + Shows information about the ring and the devices within.""" print '%s, build version %d' % (argv[1], builder.version) zones = 0 balance = 0 @@ -284,9 +170,12 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error dev['meta']) exit(EXIT_RING_UNCHANGED) - if argv[2] == 'search': + def search(): + """swift-ring-builder search + Shows information about matching devices.""" + if len(argv) < 4: - print SEARCH_HELP + print commands.search.__doc__ + "\n\n" + search_devs.__doc__ exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) if not devs: @@ -311,10 +200,14 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error dev['meta']) exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'add': - # add z-:/_ + def add(): + """swift-ring-builder add z-:/_ + Adds a device to the ring with the given information. No partitions will be + assigned to the new device until after running 'rebalance'. This is so you + can make multiple device changes and rebalance them all just once.""" + if len(argv) < 5: - print ADD_HELP + print commands.add.__doc__ exit(EXIT_RING_UNCHANGED) if not argv[3].startswith('z'): @@ -379,9 +272,14 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'set_weight': + def set_weight(): + """swift-ring-builder set_weight + Resets the device's weight. No partitions will be reassigned to or from the + device until after running 'rebalance'. This is so you can make multiple + device changes and rebalance them all just once.""" + if len(argv) != 5: - print SET_WEIGHT_HELP + print commands.set_weight.__doc__ + "\n\n" + search_devs.__doc__ exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) weight = float(argv[4]) @@ -404,9 +302,17 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'set_info': + def set_info(): + """swift-ring-builder set_info + :/_ + Resets the device's information. This information isn't used to assign + partitions, so you can use 'write_ring' afterward to rewrite the current + ring with the newer device information. Any of the parts are optional + in the final :/_ parameter; just give what you + want to change. For instance set_info d74 _"snet: 5.6.7.8" would just + update the meta data for device id 74.""" if len(argv) != 5: - print SET_INFO_HELP + print commands.set_info.__doc__ + "\n\n" + search_devs.__doc__ exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) change_value = argv[4] @@ -471,9 +377,16 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'remove': + def remove(): + """swift-ring-builder remove + Removes the device(s) from the ring. This should normally just be used for + a device that has failed. For a device you wish to decommission, it's best + to set its weight to 0, wait for it to drain all its data, then use this + remove command. This will not take effect until after running 'rebalance'. + This is so you can make multiple device changes and rebalance them all just + once.""" if len(argv) < 4: - print REMOVE_HELP + print commands.remove.__doc__ + "\n\n" + search_devs.__doc__ exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) if not devs: @@ -495,7 +408,11 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'rebalance': + def rebalance(): + """swift-ring-builder rebalance + Attempts to rebalance the ring by reassigning partitions that haven't been + recently reassigned.""" + devs_changed = builder.devs_changed last_balance = builder.get_balance() parts, balance = builder.rebalance() @@ -528,11 +445,20 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_CHANGED) - elif argv[2] == 'validate': + def validate(): + """swift-ring-builder validate + Just runs the validation routines on the ring.""" + builder.validate() exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'write_ring': + def write_ring(): + """swift-ring-builder write_ring + Just rewrites the distributable ring file. This is done automatically after + a successful rebalance, so really this is only useful after one or more + 'set_info' calls when no rebalance is needed but you want to send out the + new device information.""" + ring_data = builder.get_ring() if not ring_data._replica2part2dev_id: if ring_data.devs: @@ -545,14 +471,19 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(ring_data, GzipFile(ring_file, 'wb'), protocol=2) exit(EXIT_RING_CHANGED) - elif argv[2] == 'pretend_min_part_hours_passed': + def pretend_min_part_hours_passed(): builder.pretend_min_part_hours_passed() pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) - elif argv[2] == 'set_min_part_hours': + def set_min_part_hours(): + """swift-ring-builder set_min_part_hours + Changes the to the given . This should be set to + however long a full replication/update cycle takes. We're working on a way + to determine this more easily than scanning logs.""" + if len(argv) < 4: - print SET_MIN_PART_HOURS_HELP + print commands.set_min_part_hours.__doc__ exit(EXIT_RING_UNCHANGED) builder.change_min_part_hours(int(argv[3])) print 'The minimum number of hours before a partition can be ' \ @@ -560,5 +491,48 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) - print 'Unknown command: %s' % argv[2] - exit(EXIT_ERROR) + +if __name__ == '__main__': + if len(argv) < 2: + print """swift-ring-builder %(MAJOR_VERSION)s.%(MINOR_VERSION)s + +Quick list: create search add set_weight set_info remove rebalance write_ring + set_min_part_hours +Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error +""" % globals() + cmds = commands.__dict__.keys() + cmds.sort() + for cmd in cmds: + doc = commands.__dict__[cmd].__doc__ + if doc is not None: print doc,"\n" + print search_devs.__doc__ + + exit(EXIT_RING_UNCHANGED) + + if exists(argv[1]): + builder = pickle.load(open(argv[1], 'rb')) + for dev in builder.devs: + if dev and 'meta' not in dev: + dev['meta'] = '' + elif len(argv) < 3 or argv[2] != 'create': + print 'Ring Builder file does not exist: %s' % argv[1] + exit(EXIT_ERROR) + + backup_dir = pathjoin(dirname(argv[1]), 'backups') + try: + mkdir(backup_dir) + except OSError, err: + if err.errno != EEXIST: + raise + + ring_file = argv[1] + if ring_file.endswith('.builder'): + ring_file = ring_file[:-len('.builder')] + ring_file += '.ring.gz' + + if len(argv) == 2: + command = "default" + else: + command = argv[2] + commands.__dict__.get(command, commands.unknown)() + From 2308ecce7a1223dce7ede2aabc2ea061fd13163b Mon Sep 17 00:00:00 2001 From: David Goetz Date: Mon, 3 Jan 2011 10:25:08 -0800 Subject: [PATCH 060/199] fix i18n logs --- swift/obj/auditor.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 1670ddfbba..7d0316fed1 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -73,14 +73,14 @@ class ObjectAuditor(Daemon): self.logger.info(_( 'Since %(start_time)s: Locally: %(passes)d passed audit, ' '%(quars)d quarantined, %(errors)d errors ' - 'files/sec: %(frate).2f , bytes/sec: %(brate).2f' % { + 'files/sec: %(frate).2f , bytes/sec: %(brate).2f') % { 'start_time': time.ctime(reported), 'passes': self.passes, 'quars': self.quarantines, 'errors': self.errors, 'frate': self.passes / (time.time() - reported), 'brate': self.bytes_processed / - (time.time() - reported)})) + (time.time() - reported)}) reported = time.time() self.passes = 0 self.quarantines = 0 @@ -90,11 +90,11 @@ class ObjectAuditor(Daemon): self.logger.info(_( 'Object audit "%(mode)s" mode completed: %(elapsed).02fs. ' 'Total files/sec: %(frate).2f , ' - 'Total bytes/sec: %(brate).2f ' % { + 'Total bytes/sec: %(brate).2f ') % { 'mode': mode, 'elapsed': elapsed, 'frate': self.total_files_processed / elapsed, - 'brate': self.total_bytes_processed / elapsed})) + 'brate': self.total_bytes_processed / elapsed}) def object_audit(self, path, device, partition): """ From 6e555802bbfa21c9e138211d4ba26cf6fb86feff Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori Date: Tue, 4 Jan 2011 18:18:58 +0900 Subject: [PATCH 061/199] add S3 API support --- setup.py | 1 + swift/auth/server.py | 26 ++- swift/common/middleware/auth.py | 16 +- swift/common/middleware/s3.py | 296 ++++++++++++++++++++++++++++++++ 4 files changed, 334 insertions(+), 5 deletions(-) create mode 100644 swift/common/middleware/s3.py diff --git a/setup.py b/setup.py index f72517f0de..b29d02e54d 100644 --- a/setup.py +++ b/setup.py @@ -96,6 +96,7 @@ setup( 'cname_lookup=swift.common.middleware.cname_lookup:filter_factory', 'catch_errors=swift.common.middleware.catch_errors:filter_factory', 'domain_remap=swift.common.middleware.domain_remap:filter_factory', + 's3=swift.common.middleware.s3:filter_factory', ], }, ) diff --git a/swift/auth/server.py b/swift/auth/server.py index 3e1ef881f4..4ddf2e4647 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -21,6 +21,9 @@ from time import gmtime, strftime, time from urllib import unquote, quote from uuid import uuid4 from urlparse import urlparse +from hashlib import md5, sha1 +import hmac +import base64 import sqlite3 from webob import Request, Response @@ -238,6 +241,24 @@ YOU HAVE A FEW OPTIONS: self.conn = get_db_connection(self.db_file) raise err + def validate_s3_sign(self, request, token): + cfaccount, sign = request.headers['Authorization'].split(' ')[-1].split(':') + msg = base64.urlsafe_b64decode(unquote(token)) + rv = False + with self.get_conn() as conn: + row = conn.execute(''' + SELECT account, user, password FROM account + WHERE cfaccount = ?''', + (cfaccount,)).fetchone() + rv = (84000, row[0], row[1], cfaccount) + + if rv: + s = base64.encodestring(hmac.new(row[2], msg, sha1).digest()).strip() + self.logger.info("orig %s, calc %s" % (sign, s)) + if sign != s: + rv = False + return rv + def purge_old_tokens(self): """ Removes tokens that have expired from the auth server's database. This @@ -418,7 +439,10 @@ YOU HAVE A FEW OPTIONS: except ValueError: return HTTPBadRequest() # Retrieves (TTL, account, user, cfaccount) if valid, False otherwise - validation = self.validate_token(token) + if 'Authorization' in request.headers: + validation = self.validate_s3_sign(request, token) + else: + validation = self.validate_token(token) if not validation: return HTTPNotFound() groups = ['%s:%s' % (validation[1], validation[2]), validation[1]] diff --git a/swift/common/middleware/auth.py b/swift/common/middleware/auth.py index 1278a4a67a..66ff9b7ab4 100644 --- a/swift/common/middleware/auth.py +++ b/swift/common/middleware/auth.py @@ -53,12 +53,13 @@ class DevAuth(object): requests, acts as the fallback auth service when no other auth middleware overrides it. """ + s3 = env.get('HTTP_AUTHORIZATION') token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) - if token and token.startswith(self.reseller_prefix): + if s3 or (token and token.startswith(self.reseller_prefix)): # Note: Empty reseller_prefix will match all tokens. # Attempt to auth my token with my auth server groups = \ - self.get_groups(token, memcache_client=cache_from_env(env)) + self.get_groups(env, token, memcache_client=cache_from_env(env)) if groups: env['REMOTE_USER'] = groups user = groups and groups.split(',', 1)[0] or '' @@ -103,7 +104,7 @@ class DevAuth(object): env['swift.clean_acl'] = clean_acl return self.app(env, start_response) - def get_groups(self, token, memcache_client=None): + def get_groups(self, env, token, memcache_client=None): """ Get groups for the given token. @@ -128,10 +129,17 @@ class DevAuth(object): start, expiration, groups = cached_auth_data if time() - start > expiration: groups = None + + headers = {} + if env.get('HTTP_AUTHORIZATION'): + groups = None + if env.get('HTTP_AUTHORIZATION'): + headers["Authorization"] = env.get('HTTP_AUTHORIZATION') + if not groups: with Timeout(self.timeout): conn = http_connect(self.auth_host, self.auth_port, 'GET', - '/token/%s' % token, ssl=self.ssl) + '/token/%s' % token, headers, ssl=self.ssl) resp = conn.getresponse() resp.read() conn.close() diff --git a/swift/common/middleware/s3.py b/swift/common/middleware/s3.py new file mode 100644 index 0000000000..54751c3803 --- /dev/null +++ b/swift/common/middleware/s3.py @@ -0,0 +1,296 @@ +# Copyright (c) 2010 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from webob import Request, Response +from webob.exc import HTTPNotFound +from simplejson import loads +from swift.common.utils import split_path +from hashlib import md5, sha1 +from urllib import unquote, quote +import rfc822 +import hmac +import base64 +import errno + +def get_err_response(code): + error_table = {'AccessDenied': + (403, 'Access denied'), + 'BucketAlreadyExists': + (409, 'The requested bucket name is not available'), + 'BucketNotEmpty': + (409, 'The bucket you tried to delete is not empty'), + 'InvalidArgument': + (400, 'Invalid Argument'), + 'InvalidBucketName': + (400, 'The specified bucket is not valid'), + 'InvalidURI': + (400, 'Could not parse the specified URI'), + 'NoSuchBucket': + (404, 'The specified bucket does not exist'), + 'SignatureDoesNotMatch': + (403, 'The calculated request signature does not match your provided one'), + 'NoSuchKey': + (404, 'The resource you requested does not exist')} + + resp = Response(content_type='text/xml') + resp.status = error_table[code][0] + resp.body = error_table[code][1] + resp.body = """\r\n\r\n %s\r\n %s\r\n\r\n""" % (code, error_table[code][1]) + return resp + +class Controller(object): + def __init__(self, app): + self.app = app + +class ServiceController(Controller): + def __init__(self, env, app, account_name, token, **kwargs): + Controller.__init__(self, app) + env['HTTP_X_AUTH_TOKEN'] = token + env['PATH_INFO'] = '/v1/%s' % account_name + + def GET(self, env, start_response): + req = Request(env) + env['QUERY_STRING'] = 'format=json' + resp = self.app(env, start_response) + try: + containers = loads(''.join(list(resp))) + except: + return get_err_response('AccessDenied') + + resp = Response(content_type='text/xml') + resp.status = 200 + # we don't keep the creation time of a backet (s3cmd doesn't + # work without that) so we use something bogus. + resp.body = """%s""" % ("".join(['%s2009-02-03T16:45:09.000Z' % i['name'] for i in containers])) + return resp + +class BucketController(Controller): + def __init__(self, env, app, account_name, token, container_name, **kwargs): + Controller.__init__(self, app) + self.container_name = unquote(container_name) + env['HTTP_X_AUTH_TOKEN'] = token + env['PATH_INFO'] = '/v1/%s/%s' % (account_name, container_name) + + def GET(self, env, start_response): + req = Request(env) + env['QUERY_STRING'] = 'format=json' + resp = self.app(env, start_response) + try: + objects = loads(''.join(list(resp))) + except: + status = int(resp[0].split()[0]) + resp = Response(content_type='text/xml') + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('InvalidBucketName') + else: + print resp + return get_err_response('InvalidURI') + + resp = Response(content_type='text/xml') + resp.status = 200 + resp.body = """%s%s""" % (self.container_name, "".join(['%s%s%s%sSTANDARD' % (i['name'], i['last_modified'], i['hash'], i['bytes']) for i in objects])) + return resp + + def PUT(self, env, start_response): + req = Request(env) + resp = self.app(env, start_response) + status = int(resp[0].split()[0]) + if status == 401: + return get_err_response('AccessDenied') + elif status == 202: + return get_err_response('BucketAlreadyExists') + else: + print resp + + resp = Response() + resp.headers.add('Location', self.container_name) + resp.status = 200 + return resp + + def DELETE(self, env, start_response): + req = Request(env) + resp = self.app(env, start_response) + try: + status = int(resp[0].split()[0]) + except: + resp = Response() + resp.status = 204 + return resp + + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('InvalidBucketName') + elif status == 409: + return get_err_response('BucketNotEmpty') + else: + print resp + return get_err_response('InvalidURI') + +class ObjectController(Controller): + def __init__(self, env, app, account_name, token, container_name, object_name, **kwargs): + Controller.__init__(self, app) + self.container_name = unquote(container_name) + env['HTTP_X_AUTH_TOKEN'] = token + env['PATH_INFO'] = '/v1/%s/%s/%s' % (account_name, container_name, object_name) + + def GETorHEAD(self, env, start_response): + # there should be better ways. + # TODO: + # - we can't handle various errors properly (autorization, etc) + # - hide GETorHEAD + req = Request(env) + method = req.method + req.method = 'GET' + data = self.app(env, start_response) + if type(data) == list: + status = int(data[0][data[0].find('') + 7:].split(' ')[0]) + if status == 404: + return get_err_response('NoSuchKey') + else: + return get_err_response('AccessDenied') + + if method == 'GET': + resp = Response(content_type='text/xml') + resp.body = ''.join(list(data)) + resp.status = 200 + else: + resp = Response() + etag = md5() + etag.update(''.join(list(data))) + etag = etag.hexdigest() + resp.etag = etag + resp.status = 200 + return resp + + def HEAD(self, env, start_response): + return self.GETorHEAD(env, start_response) + + def GET(self, env, start_response): + return self.GETorHEAD(env, start_response) + + def PUT(self, env, start_response): + # TODO: how can we get etag from the response header? + req = Request(env) + etag = md5() + etag.update(req.body) + etag = etag.hexdigest() + resp = self.app(env, start_response) + status = int(resp[0].split()[0]) + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('InvalidBucketName') + elif status == 201: + resp = Response() + resp.etag = etag + resp.status = 200 + return resp + else: + print resp + return get_err_response('InvalidURI') + + def DELETE(self, env, start_response): + # TODO: how can we get the response result? + req = Request(env) + resp = self.app(env, start_response) + try: + status = int(resp[0].split()[0]) + except: + resp = Response() + resp.status = 204 + return resp + + print resp + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('NoSuchKey') + else: + return get_err_response('AccessDenied') + +class Swift3Middleware(object): + def __init__(self, app, conf, *args, **kwargs): + self.app = app + + def get_controller(self, path): + container, obj = split_path(path, 0, 2) + d = dict(container_name=container, object_name=obj) + + if container and obj: + return ObjectController, d + elif container: + return BucketController, d + return ServiceController, d + + def get_account_info(self, env, req): + if req.headers.get("content-md5"): + md5 = req.headers.get("content-md5") + else: + md5 = "" + + if req.headers.get("content-type"): + content_type = req.headers.get("content-type") + else: + content_type = "" + + if req.headers.get("date"): + date = req.headers.get("date") + else: + date = "" + + h = req.method + "\n" + md5 + "\n" + content_type + "\n" + date + "\n" + for header in req.headers: + if header.startswith("X-Amz-"): + h += header.lower()+":"+str(req.headers[header])+"\n" + h += req.path + try: + account, _ = req.headers['Authorization'].split(' ')[-1].split(':') + except: + return None, None + token = base64.urlsafe_b64encode(h) + return account, token + + def __call__(self, env, start_response): + req = Request(env) +# print req.method +# print req.path + if not'Authorization' in req.headers: + return self.app(env, start_response) + try: + controller, path_parts = self.get_controller(req.path) + except ValueError: + return get_err_response('InvalidURI')(env, start_response) + + account_name, token = self.get_account_info(env, req) + if not account_name: + return get_err_response('InvalidArgument')(env, start_response) + + controller = controller(env, self.app, account_name, token, **path_parts) + if hasattr(controller, req.method): + res = getattr(controller, req.method)(env, start_response) + else: + return get_err_response('InvalidURI')(env, start_response) + + return res(env, start_response) + +def filter_factory(global_conf, **local_conf): + conf = global_conf.copy() + conf.update(local_conf) + def swift3_filter(app): + return Swift3Middleware(app, conf) + return swift3_filter From 68cc8fba9d3ba34a8183ada895e7dd3270714c3d Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Tue, 4 Jan 2011 12:04:18 -0800 Subject: [PATCH 062/199] Fix a couple bugs; rework the rework a bit more; PEP8 --- bin/swift-ring-builder | 167 ++++++++++++++++++++++++----------------- 1 file changed, 100 insertions(+), 67 deletions(-) diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index eac587c364..af4a2331f3 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -20,6 +20,7 @@ from gzip import GzipFile from os import mkdir from os.path import basename, dirname, exists, join as pathjoin from sys import argv, exit +from textwrap import wrap from time import time from swift.common.ring import RingBuilder @@ -29,11 +30,12 @@ MAJOR_VERSION = 1 MINOR_VERSION = 2 EXIT_RING_CHANGED = 0 EXIT_RING_UNCHANGED = 1 -EXIT_ERROR = 2 +EXIT_ERROR = 2 def search_devs(builder, search_value): - """The <search-value> can be of the form: + """ +The <search-value> can be of the form: d<device_id>z<zone>-<ip>:<port>/<device_name>_<meta> Any part is optional, but you must include at least one part. Examples: @@ -50,8 +52,8 @@ def search_devs(builder, search_value): d74z1-1.2.3.4:5678/sdb1_"snet: 5.6.7.8" Nerd explanation: All items require their single character prefix except the ip, in which - case the - is optional unless the device id or zone is also included.""" - + case the - is optional unless the device id or zone is also included. + """ orig_search_value = search_value match = [] if search_value.startswith('d'): @@ -90,7 +92,8 @@ def search_devs(builder, search_value): match.append(('meta', search_value[1:])) search_value = '' if search_value: - raise ValueError('Invalid <search-value>: %s' % repr(orig_search_value)) + raise ValueError('Invalid <search-value>: %s' % + repr(orig_search_value)) devs = [] for dev in builder.devs: if not dev: @@ -107,20 +110,22 @@ def search_devs(builder, search_value): return devs -class commands: +class Commands: def unknown(): - print 'Unknown command: %s' % argv[2] - exit(EXIT_ERROR) + print 'Unknown command: %s' % argv[2] + exit(EXIT_ERROR) def create(): - """swift-ring-builder <builder_file> create <part_power> <replicas> <min_part_hours> + """ +swift-ring-builder <builder_file> create <part_power> <replicas> + <min_part_hours> Creates <builder_file> with 2^<part_power> partitions and <replicas>. <min_part_hours> is number of hours to restrict moving a partition more - than once.""" - + than once. + """ if len(argv) < 6: - print commands.create.__doc__ + print Commands.create.__doc__.strip() exit(EXIT_RING_UNCHANGED) builder = RingBuilder(int(argv[3]), int(argv[4]), int(argv[5])) backup_dir = pathjoin(dirname(argv[1]), 'backups') @@ -135,8 +140,10 @@ class commands: exit(EXIT_RING_CHANGED) def default(): - """swift-ring-builder <builder_file> - Shows information about the ring and the devices within.""" + """ +swift-ring-builder <builder_file> + Shows information about the ring and the devices within. + """ print '%s, build version %d' % (argv[1], builder.version) zones = 0 balance = 0 @@ -171,11 +178,14 @@ class commands: exit(EXIT_RING_UNCHANGED) def search(): - """swift-ring-builder <builder_file> search <search-value> - Shows information about matching devices.""" - + """ +swift-ring-builder <builder_file> search <search-value> + Shows information about matching devices. + """ if len(argv) < 4: - print commands.search.__doc__ + "\n\n" + search_devs.__doc__ + print Commands.search.__doc__.strip() + print + print search_devs.__doc__.strip() exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) if not devs: @@ -200,14 +210,16 @@ class commands: dev['meta']) exit(EXIT_RING_UNCHANGED) - def add(): - """swift-ring-builder <builder_file> add z<zone>-<ip>:<port>/<device_name>_<meta> <wght> + def add(): + """ +swift-ring-builder <builder_file> add z<zone>-<ip>:<port>/<device_name>_<meta> + <wght> Adds a device to the ring with the given information. No partitions will be assigned to the new device until after running 'rebalance'. This is so you - can make multiple device changes and rebalance them all just once.""" - + can make multiple device changes and rebalance them all just once. + """ if len(argv) < 5: - print commands.add.__doc__ + print Commands.add.__doc__.strip() exit(EXIT_RING_UNCHANGED) if not argv[3].startswith('z'): @@ -273,13 +285,16 @@ class commands: exit(EXIT_RING_UNCHANGED) def set_weight(): - """swift-ring-builder <builder_file> set_weight <search-value> <weight> + """ +swift-ring-builder <builder_file> set_weight <search-value> <weight> Resets the device's weight. No partitions will be reassigned to or from the device until after running 'rebalance'. This is so you can make multiple - device changes and rebalance them all just once.""" - + device changes and rebalance them all just once. + """ if len(argv) != 5: - print commands.set_weight.__doc__ + "\n\n" + search_devs.__doc__ + print Commands.set_weight.__doc__.strip() + print + print search_devs.__doc__.strip() exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) weight = float(argv[4]) @@ -303,16 +318,20 @@ class commands: exit(EXIT_RING_UNCHANGED) def set_info(): - """swift-ring-builder <builder_file> set_info <search-value> - <ip>:<port>/<device_name>_<meta> + """ +swift-ring-builder <builder_file> set_info <search-value> + <ip>:<port>/<device_name>_<meta> Resets the device's information. This information isn't used to assign partitions, so you can use 'write_ring' afterward to rewrite the current ring with the newer device information. Any of the parts are optional in the final <ip>:<port>/<device_name>_<meta> parameter; just give what you want to change. For instance set_info d74 _"snet: 5.6.7.8" would just - update the meta data for device id 74.""" + update the meta data for device id 74. + """ if len(argv) != 5: - print commands.set_info.__doc__ + "\n\n" + search_devs.__doc__ + print Commands.set_info.__doc__.strip() + print + print search_devs.__doc__.strip() exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) change_value = argv[4] @@ -378,15 +397,19 @@ class commands: exit(EXIT_RING_UNCHANGED) def remove(): - """swift-ring-builder <builder_file> remove <search-value> + """ +swift-ring-builder <builder_file> remove <search-value> Removes the device(s) from the ring. This should normally just be used for a device that has failed. For a device you wish to decommission, it's best to set its weight to 0, wait for it to drain all its data, then use this remove command. This will not take effect until after running 'rebalance'. This is so you can make multiple device changes and rebalance them all just - once.""" + once. + """ if len(argv) < 4: - print commands.remove.__doc__ + "\n\n" + search_devs.__doc__ + print Commands.remove.__doc__.strip() + print + print search_devs.__doc__.strip() exit(EXIT_RING_UNCHANGED) devs = search_devs(builder, argv[3]) if not devs: @@ -404,15 +427,17 @@ class commands: for dev in devs: builder.remove_dev(dev['id']) print 'd%(id)sz%(zone)s-%(ip)s:%(port)s/%(device)s_"%(meta)s" ' \ - 'marked for removal and will be removed next rebalance.' % dev + 'marked for removal and will be removed next rebalance.' \ + % dev pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) def rebalance(): - """swift-ring-builder <builder_file> rebalance + """ +swift-ring-builder <builder_file> rebalance Attempts to rebalance the ring by reassigning partitions that haven't been - recently reassigned.""" - + recently reassigned. + """ devs_changed = builder.devs_changed last_balance = builder.get_balance() parts, balance = builder.rebalance() @@ -446,25 +471,29 @@ class commands: exit(EXIT_RING_CHANGED) def validate(): - """swift-ring-builder <builder_file> validate - Just runs the validation routines on the ring.""" - + """ +swift-ring-builder <builder_file> validate + Just runs the validation routines on the ring. + """ builder.validate() exit(EXIT_RING_UNCHANGED) def write_ring(): - """swift-ring-builder <builder_file> write_ring + """ +swift-ring-builder <builder_file> write_ring Just rewrites the distributable ring file. This is done automatically after a successful rebalance, so really this is only useful after one or more 'set_info' calls when no rebalance is needed but you want to send out the - new device information.""" - + new device information. + """ ring_data = builder.get_ring() if not ring_data._replica2part2dev_id: - if ring_data.devs: - print 'Warning: Writing a ring with no partition assignments but with devices; did you forget to run "rebalance"?' - else: - print 'Warning: Writing an empty ring' + if ring_data.devs: + print 'Warning: Writing a ring with no partition ' \ + 'assignments but with devices; did you forget to run ' \ + '"rebalance"?' + else: + print 'Warning: Writing an empty ring' pickle.dump(ring_data, GzipFile(pathjoin(backup_dir, '%d.' % time() + basename(ring_file)), 'wb'), protocol=2) @@ -477,13 +506,14 @@ class commands: exit(EXIT_RING_UNCHANGED) def set_min_part_hours(): - """swift-ring-builder <builder_file> set_min_part_hours <hours> + """ +swift-ring-builder <builder_file> set_min_part_hours <hours> Changes the <min_part_hours> to the given <hours>. This should be set to however long a full replication/update cycle takes. We're working on a way - to determine this more easily than scanning logs.""" - + to determine this more easily than scanning logs. + """ if len(argv) < 4: - print commands.set_min_part_hours.__doc__ + print Commands.set_min_part_hours.__doc__.strip() exit(EXIT_RING_UNCHANGED) builder.change_min_part_hours(int(argv[3])) print 'The minimum number of hours before a partition can be ' \ @@ -494,19 +524,23 @@ class commands: if __name__ == '__main__': if len(argv) < 2: - print """swift-ring-builder %(MAJOR_VERSION)s.%(MINOR_VERSION)s - -Quick list: create search add set_weight set_info remove rebalance write_ring - set_min_part_hours -Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error -""" % globals() - cmds = commands.__dict__.keys() + print "swift-ring-builder %(MAJOR_VERSION)s.%(MINOR_VERSION)s\n" % \ + globals() + print Commands.default.__doc__.strip() + print + cmds = [c for c, f in Commands.__dict__.iteritems() + if f.__doc__ and c[0] != '_' and c != 'default'] cmds.sort() for cmd in cmds: - doc = commands.__dict__[cmd].__doc__ - if doc is not None: print doc,"\n" - print search_devs.__doc__ - + print Commands.__dict__[cmd].__doc__.strip() + print + print search_devs.__doc__.strip() + print + for line in wrap(' '.join(cmds), 79, initial_indent='Quick list: ', + subsequent_indent=' '): + print line + print 'Exit codes: 0 = ring changed, 1 = ring did not change, ' \ + '2 = error' exit(EXIT_RING_UNCHANGED) if exists(argv[1]): @@ -531,8 +565,7 @@ Exit codes: 0 = ring changed, 1 = ring did not change, 2 = error ring_file += '.ring.gz' if len(argv) == 2: - command = "default" + command = "default" else: - command = argv[2] - commands.__dict__.get(command, commands.unknown)() - + command = argv[2] + Commands.__dict__.get(command, Commands.unknown)() From a71a1a32a994be896c038b370a877378f30d735d Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Tue, 4 Jan 2011 12:51:57 -0800 Subject: [PATCH 063/199] Removed extraneous print --- swift/common/middleware/swauth.py | 1 - 1 file changed, 1 deletion(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index faf749464a..ee81bce3b6 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -1177,7 +1177,6 @@ class Swauth(object): :param key: The key to validate for the user. :returns: True if the key is valid for the user, False if not. """ - print repr(user_detail) return user_detail and user_detail.get('auth') == 'plaintext:%s' % key def is_super_admin(self, req): From 97028e0b9c2523c9e35854957a8e49c544b1f0ba Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Tue, 4 Jan 2011 16:00:01 -0600 Subject: [PATCH 064/199] fixed missing NamedLogger import in bin/swift-bench, refactored LogAdapter.tnx_id so that it works with multiple calls to get_logger, fixed common.middleware.catch_errors to only call get_logger if it needs too, renamed x-cf-trans-id to x-swift-tnx-id --- bin/swift-bench | 8 ++-- swift/account/server.py | 6 +-- swift/common/middleware/catch_errors.py | 6 ++- swift/common/utils.py | 51 ++++++++++++++++++++----- swift/container/server.py | 6 +-- swift/obj/server.py | 8 ++-- swift/proxy/server.py | 24 ++++++------ test/unit/proxy/test_server.py | 2 +- 8 files changed, 74 insertions(+), 37 deletions(-) diff --git a/bin/swift-bench b/bin/swift-bench index ab332482cd..9c03dc3d3c 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -22,7 +22,7 @@ import uuid from optparse import OptionParser from swift.common.bench import BenchController -from swift.common.utils import readconf, NamedLogger +from swift.common.utils import readconf, LogAdapter, NamedFormatter # The defaults should be sufficient to run swift-bench on a SAIO CONF_DEFAULTS = { @@ -124,10 +124,10 @@ if __name__ == '__main__': 'critical': logging.CRITICAL}.get( options.log_level.lower(), logging.INFO)) loghandler = logging.StreamHandler() - logformat = logging.Formatter('%(asctime)s %(levelname)s %(message)s') - loghandler.setFormatter(logformat) logger.addHandler(loghandler) - logger = NamedLogger(logger, 'swift-bench') + logger = LogAdapter(logger) + logformat = NamedFormatter('swift-bench', logger, fmt='%(server)s %(asctime)s %(levelname)s %(message)s') + loghandler.setFormatter(logformat) controller = BenchController(logger, options) controller.run() diff --git a/swift/account/server.py b/swift/account/server.py index 53d604ce93..c782bdc951 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -86,7 +86,7 @@ class AccountController(object): return Response(status='507 %s is not mounted' % drive) broker = self._get_account_broker(drive, part, account) if container: # put account container - if 'x-cf-trans-id' in req.headers: + if 'x-swift-txn-id' in req.headers: broker.pending_timeout = 3 if req.headers.get('x-account-override-deleted', 'no').lower() != \ 'yes' and broker.is_deleted(): @@ -296,7 +296,7 @@ class AccountController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) - self.logger.txn_id = req.headers.get('x-cf-trans-id', None) + self.logger.txn_id = req.headers.get('x-swift-txn-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -319,7 +319,7 @@ class AccountController(object): time.strftime('%d/%b/%Y:%H:%M:%S +0000', time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', - req.headers.get('x-cf-trans-id', '-'), + req.headers.get('x-swift-txn-id', '-'), req.referer or '-', req.user_agent or '-', trans_time, additional_info) diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index 5fb8c33592..f1ab249b25 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -26,7 +26,11 @@ class CatchErrorMiddleware(object): def __init__(self, app, conf): self.app = app - self.logger = get_logger(conf) + try: + # if the application already has a logger we should use that one + self.logger = app.logger + except AttributeError: + self.logger = get_logger(conf) def __call__(self, env, start_response): try: diff --git a/swift/common/utils.py b/swift/common/utils.py index da71253e7b..78e8769bca 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -284,11 +284,15 @@ class LoggerFileObject(object): class LogAdapter(object): - """Cheesy version of the LoggerAdapter available in Python 3""" + """ + A Logger like object which performs some reformatting on calls to + :meth:`exception`. Can be used to store a threadlocal transaction id. + """ + + _txn_id = threading.local() def __init__(self, logger): self.logger = logger - self._txn_id = threading.local() for proxied_method in ('debug', 'log', 'warn', 'warning', 'error', 'critical', 'info'): setattr(self, proxied_method, getattr(logger, proxied_method)) @@ -334,19 +338,45 @@ class LogAdapter(object): class NamedFormatter(logging.Formatter): - def __init__(self, server, logger): - logging.Formatter.__init__(self) + """ + NamedFormatter is used to add additional information to log messages. + Normally it will simply add the server name as an attribute on the + LogRecord and the default format string will include it at the + begining of the log message. Additionally, if the transaction id is + available and not already included in the message, NamedFormatter will + add it. + + NamedFormatter may be initialized with a format string which makes use + of the standard LogRecord attributes. In addition the format string + may include the following mapping key: + + +----------------+---------------------------------------------+ + | Format | Description | + +================+=============================================+ + | %(server)s | Name of the swift server doing logging | + +----------------+---------------------------------------------+ + + :param server: the swift server name, a string. + :param logger: a Logger or :class:`LogAdapter` instance, additional + context may be pulled from attributes on this logger if + available. + :param fmt: the format string used to construct the message, if none is + supplied it defaults to ``"%(server)s %(message)s"`` + """ + + def __init__(self, server, logger, + fmt="%(server)s %(message)s"): + logging.Formatter.__init__(self, fmt) self.server = server self.logger = logger def format(self, record): + record.server = self.server msg = logging.Formatter.format(self, record) if self.logger.txn_id and (record.levelno != logging.INFO or self.logger.txn_id not in msg): - return '%s %s (txn: %s)' % (self.server, msg, self.logger.txn_id) - else: - return '%s %s' % (self.server, msg) - + msg = "%s (txn: %s)" % (msg, self.logger.txn_id) + return msg def get_logger(conf, name=None, log_to_console=False): """ @@ -386,7 +416,10 @@ def get_logger(conf, name=None, log_to_console=False): root_logger.setLevel( getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO)) adapted_logger = LogAdapter(root_logger) - get_logger.handler.setFormatter(NamedFormatter(name, adapted_logger)) + formatter = NamedFormatter(name, adapted_logger) + get_logger.handler.setFormatter(formatter) + if hasattr(get_logger, 'console'): + get_logger.console.setFormatter(formatter) return adapted_logger diff --git a/swift/container/server.py b/swift/container/server.py index fc06194de6..58b09671ed 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -95,7 +95,7 @@ class ContainerController(object): 'x-delete-timestamp': info['delete_timestamp'], 'x-object-count': info['object_count'], 'x-bytes-used': info['bytes_used'], - 'x-cf-trans-id': req.headers.get('X-Cf-Trans-Id', '-')} + 'x-swift-txn-id': req.headers.get('x-swift-txn-id', '-')} if req.headers.get('x-account-override-deleted', 'no').lower() == \ 'yes': account_headers['x-account-override-deleted'] = 'yes' @@ -384,7 +384,7 @@ class ContainerController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) - self.logger.txn_id = req.headers.get('x-cf-trans-id', None) + self.logger.txn_id = req.headers.get('x-swift-txn-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -404,7 +404,7 @@ class ContainerController(object): time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', - req.headers.get('x-cf-trans-id', '-'), + req.headers.get('x-swift-txn-id', '-'), req.referer or '-', req.user_agent or '-', trans_time) if req.method.upper() == 'REPLICATE': diff --git a/swift/obj/server.py b/swift/obj/server.py index 7c139d7775..280ddbf49a 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -409,7 +409,7 @@ class ObjectController(object): 'x-content-type': file.metadata['Content-Type'], 'x-timestamp': file.metadata['X-Timestamp'], 'x-etag': file.metadata['ETag'], - 'x-cf-trans-id': request.headers.get('x-cf-trans-id', '-')}, + 'x-swift-txn-id': request.headers.get('x-swift-txn-id', '-')}, device) resp = HTTPCreated(request=request, etag=etag) return resp @@ -531,7 +531,7 @@ class ObjectController(object): file.unlinkold(metadata['X-Timestamp']) self.container_update('DELETE', account, container, obj, request.headers, {'x-timestamp': metadata['X-Timestamp'], - 'x-cf-trans-id': request.headers.get('x-cf-trans-id', '-')}, + 'x-swift-txn-id': request.headers.get('x-swift-txn-id', '-')}, device) resp = response_class(request=request) return resp @@ -562,7 +562,7 @@ class ObjectController(object): """WSGI Application entry point for the Swift Object Server.""" start_time = time.time() req = Request(env) - self.logger.txn_id = req.headers.get('x-cf-trans-id', None) + self.logger.txn_id = req.headers.get('x-swift-txn-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -583,7 +583,7 @@ class ObjectController(object): time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', req.referer or '-', - req.headers.get('x-cf-trans-id', '-'), + req.headers.get('x-swift-txn-id', '-'), req.user_agent or '-', trans_time) if req.method == 'REPLICATE': diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 1cde24cfe6..04187cc82b 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -356,7 +356,7 @@ class Controller(object): result_code = 0 attempts_left = self.app.account_ring.replica_count path = '/%s' % account - headers = {'x-cf-trans-id': self.trans_id} + headers = {'x-swift-txn-id': self.trans_id} for node in self.iter_nodes(partition, nodes, self.app.account_ring): if self.error_limited(node): continue @@ -430,7 +430,7 @@ class Controller(object): write_acl = None container_size = None attempts_left = self.app.container_ring.replica_count - headers = {'x-cf-trans-id': self.trans_id} + headers = {'x-swift-txn-id': self.trans_id} for node in self.iter_nodes(partition, nodes, self.app.container_ring): if self.error_limited(node): continue @@ -1247,7 +1247,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-cf-trans-id': self.trans_id} + 'x-swift-txn-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower() in self.pass_through_headers or value[0].lower().startswith('x-container-meta-')) @@ -1309,7 +1309,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-cf-trans-id': self.trans_id} + 'x-swift-txn-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower() in self.pass_through_headers or value[0].lower().startswith('x-container-meta-')) @@ -1362,7 +1362,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-cf-trans-id': self.trans_id} + 'x-swift-txn-id': self.trans_id} statuses = [] reasons = [] bodies = [] @@ -1450,7 +1450,7 @@ class AccountController(Controller): account_partition, accounts = \ self.app.account_ring.get_nodes(self.account_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-cf-trans-id': self.trans_id} + 'x-swift-txn-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower().startswith('x-account-meta-')) statuses = [] @@ -1499,7 +1499,7 @@ class AccountController(Controller): account_partition, accounts = \ self.app.account_ring.get_nodes(self.account_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'X-CF-Trans-Id': self.trans_id} + 'x-swift-txn-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower().startswith('x-account-meta-')) statuses = [] @@ -1546,7 +1546,7 @@ class AccountController(Controller): account_partition, accounts = \ self.app.account_ring.get_nodes(self.account_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'X-CF-Trans-Id': self.trans_id} + 'x-swift-txn-id': self.trans_id} statuses = [] reasons = [] bodies = [] @@ -1683,7 +1683,7 @@ class BaseApplication(object): def update_request(self, req): req.bytes_transferred = '-' req.client_disconnect = False - req.headers['x-cf-trans-id'] = 'tx' + str(uuid.uuid4()) + req.headers['x-swift-txn-id'] = 'tx' + str(uuid.uuid4()) if 'x-storage-token' in req.headers and \ 'x-auth-token' not in req.headers: req.headers['x-auth-token'] = req.headers['x-storage-token'] @@ -1707,8 +1707,8 @@ class BaseApplication(object): return HTTPPreconditionFailed(request=req, body='Bad URL') controller = controller(self, **path_parts) - controller.trans_id = req.headers.get('x-cf-trans-id', '-') - self.logger.txn_id = req.headers.get('x-cf-trans-id', None) + controller.trans_id = req.headers.get('x-swift-txn-id', '-') + self.logger.txn_id = req.headers.get('x-swift-txn-id', None) try: handler = getattr(controller, req.method) if not getattr(handler, 'publicly_accessible'): @@ -1786,7 +1786,7 @@ class Application(BaseApplication): getattr(req, 'bytes_transferred', 0) or '-', getattr(response, 'bytes_transferred', 0) or '-', req.headers.get('etag', '-'), - req.headers.get('x-cf-trans-id', '-'), + req.headers.get('x-swift-txn-id', '-'), logged_headers or '-', trans_time, ))) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 4577cd4dac..42d6a52e4d 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1715,7 +1715,7 @@ class TestObjectController(unittest.TestCase): for node in nodes: conn = proxy_server.http_connect(node['ip'], node['port'], node['device'], partition, 'PUT', '/a', - {'X-Timestamp': ts, 'X-CF-Trans-Id': 'test'}) + {'X-Timestamp': ts, 'x-swift-txn-id': 'test'}) resp = conn.getresponse() self.assertEquals(resp.status, 201) # Head account, just a double check and really is here to test From 68a9acf9b8e85c8465c883ffd9ac9cbdbde5b7f5 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Tue, 4 Jan 2011 16:12:56 -0600 Subject: [PATCH 065/199] pep8 --- bin/swift-bench | 3 ++- swift/common/utils.py | 1 + test/unit/proxy/test_server.py | 3 +++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/bin/swift-bench b/bin/swift-bench index 9c03dc3d3c..4a6158614e 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -126,7 +126,8 @@ if __name__ == '__main__': loghandler = logging.StreamHandler() logger.addHandler(loghandler) logger = LogAdapter(logger) - logformat = NamedFormatter('swift-bench', logger, fmt='%(server)s %(asctime)s %(levelname)s %(message)s') + logformat = NamedFormatter('swift-bench', logger, + fmt='%(server)s %(asctime)s %(levelname)s %(message)s') loghandler.setFormatter(logformat) controller = BenchController(logger, options) diff --git a/swift/common/utils.py b/swift/common/utils.py index 78e8769bca..c240b242cf 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -378,6 +378,7 @@ class NamedFormatter(logging.Formatter): msg = "%s (txn: %s)" % (msg, self.logger.txn_id) return msg + def get_logger(conf, name=None, log_to_console=False): """ Get the current system logger using config settings. diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 42d6a52e4d..f7e2479a16 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -218,6 +218,7 @@ def save_globals(): # tests + class TestController(unittest.TestCase): def setUp(self): @@ -372,6 +373,7 @@ class TestController(unittest.TestCase): test(404, 507, 503) test(503, 503, 503) + class TestProxyServer(unittest.TestCase): def test_unhandled_exception(self): @@ -468,6 +470,7 @@ class TestObjectController(unittest.TestCase): 'text/html', 'text/html'])) test_content_type('test.css', iter(['', '', '', 'text/css', 'text/css', 'text/css'])) + def test_custom_mime_types_files(self): swift_dir = mkdtemp() try: From 88234271615264095e80c03e7970631a9b743c02 Mon Sep 17 00:00:00 2001 From: Anne Gentle <anne@openstack.org> Date: Tue, 4 Jan 2011 17:34:43 -0600 Subject: [PATCH 066/199] Changed copyright notices on py files and the single rst file with a copyright notice --- bin/st | 2 +- bin/swift-account-audit | 2 +- bin/swift-account-auditor | 2 +- bin/swift-account-reaper | 2 +- bin/swift-account-replicator | 2 +- bin/swift-account-server | 2 +- bin/swift-account-stats-logger | 2 +- bin/swift-auth-add-user | 2 +- bin/swift-auth-recreate-accounts | 2 +- bin/swift-auth-server | 2 +- bin/swift-auth-update-reseller-prefixes | 2 +- bin/swift-bench | 2 +- bin/swift-container-auditor | 2 +- bin/swift-container-replicator | 2 +- bin/swift-container-server | 2 +- bin/swift-container-updater | 2 +- bin/swift-drive-audit | 2 +- bin/swift-get-nodes | 2 +- bin/swift-init | 2 +- bin/swift-log-stats-collector | 2 +- bin/swift-log-uploader | 2 +- bin/swift-object-auditor | 2 +- bin/swift-object-info | 2 +- bin/swift-object-replicator | 2 +- bin/swift-object-server | 2 +- bin/swift-object-updater | 2 +- bin/swift-proxy-server | 2 +- bin/swift-ring-builder | 2 +- bin/swift-stats-populate | 2 +- bin/swift-stats-report | 2 +- doc/source/conf.py | 2 +- doc/source/development_guidelines.rst | 2 +- doc/source/index.rst | 2 +- setup.py | 2 +- swift/account/auditor.py | 2 +- swift/account/reaper.py | 2 +- swift/account/replicator.py | 2 +- swift/account/server.py | 2 +- swift/auth/server.py | 2 +- swift/common/bench.py | 2 +- swift/common/bufferedhttp.py | 2 +- swift/common/client.py | 2 +- swift/common/compressing_file_reader.py | 2 +- swift/common/constraints.py | 2 +- swift/common/daemon.py | 2 +- swift/common/db.py | 2 +- swift/common/db_replicator.py | 2 +- swift/common/direct_client.py | 2 +- swift/common/exceptions.py | 2 +- swift/common/internal_proxy.py | 2 +- swift/common/memcached.py | 2 +- swift/common/middleware/acl.py | 2 +- swift/common/middleware/auth.py | 2 +- swift/common/middleware/catch_errors.py | 2 +- swift/common/middleware/cname_lookup.py | 2 +- swift/common/middleware/domain_remap.py | 2 +- swift/common/middleware/healthcheck.py | 2 +- swift/common/middleware/memcache.py | 2 +- swift/common/ring/builder.py | 2 +- swift/common/ring/ring.py | 2 +- swift/common/utils.py | 2 +- swift/common/wsgi.py | 2 +- swift/container/auditor.py | 2 +- swift/container/replicator.py | 2 +- swift/container/server.py | 2 +- swift/container/updater.py | 2 +- swift/obj/auditor.py | 2 +- swift/obj/replicator.py | 2 +- swift/obj/server.py | 2 +- swift/obj/updater.py | 2 +- swift/proxy/server.py | 2 +- swift/stats/access_processor.py | 2 +- swift/stats/account_stats.py | 2 +- swift/stats/log_processor.py | 2 +- swift/stats/log_uploader.py | 2 +- swift/stats/stats_processor.py | 2 +- test/functional/swift.py | 2 +- test/functional/tests.py | 2 +- test/probe/common.py | 2 +- test/probe/test_account_failures.py | 2 +- test/probe/test_container_failures.py | 2 +- test/probe/test_object_async_update.py | 2 +- test/probe/test_object_handoff.py | 2 +- test/probe/test_running_with_each_type_down.py | 2 +- test/unit/account/test_auditor.py | 2 +- test/unit/account/test_reaper.py | 2 +- test/unit/account/test_replicator.py | 2 +- test/unit/account/test_server.py | 2 +- test/unit/auth/test_server.py | 2 +- test/unit/common/middleware/test_acl.py | 2 +- test/unit/common/middleware/test_auth.py | 2 +- test/unit/common/middleware/test_cname_lookup.py | 2 +- test/unit/common/middleware/test_domain_remap.py | 2 +- test/unit/common/middleware/test_except.py | 2 +- test/unit/common/middleware/test_healthcheck.py | 2 +- test/unit/common/middleware/test_memcache.py | 2 +- test/unit/common/middleware/test_ratelimit.py | 2 +- test/unit/common/ring/test_builder.py | 2 +- test/unit/common/ring/test_ring.py | 2 +- test/unit/common/test_bench.py | 2 +- test/unit/common/test_bufferedhttp.py | 2 +- test/unit/common/test_client.py | 2 +- test/unit/common/test_compressing_file_reader.py | 2 +- test/unit/common/test_constraints.py | 2 +- test/unit/common/test_daemon.py | 2 +- test/unit/common/test_db.py | 2 +- test/unit/common/test_db_replicator.py | 2 +- test/unit/common/test_direct_client.py | 2 +- test/unit/common/test_exceptions.py | 2 +- test/unit/common/test_internal_proxy.py | 2 +- test/unit/common/test_memcached.py | 2 +- test/unit/common/test_utils.py | 2 +- test/unit/container/test_auditor.py | 2 +- test/unit/container/test_replicator.py | 2 +- test/unit/container/test_server.py | 2 +- test/unit/container/test_updater.py | 2 +- test/unit/obj/test_auditor.py | 2 +- test/unit/obj/test_replicator.py | 2 +- test/unit/obj/test_server.py | 2 +- test/unit/obj/test_updater.py | 2 +- test/unit/proxy/test_server.py | 2 +- test/unit/stats/test_access_processor.py | 2 +- test/unit/stats/test_account_stats.py | 2 +- test/unit/stats/test_log_processor.py | 2 +- test/unit/stats/test_log_uploader.py | 2 +- test/unit/stats/test_stats_processor.py | 2 +- 126 files changed, 126 insertions(+), 126 deletions(-) diff --git a/bin/st b/bin/st index b41aca67ec..cab398910e 100755 --- a/bin/st +++ b/bin/st @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-account-audit b/bin/swift-account-audit index fe611562d7..81823722bf 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-account-auditor b/bin/swift-account-auditor index 5707dfd515..3dc4c17609 100755 --- a/bin/swift-account-auditor +++ b/bin/swift-account-auditor @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-account-reaper b/bin/swift-account-reaper index 688b19b14d..c5df6f4c2f 100755 --- a/bin/swift-account-reaper +++ b/bin/swift-account-reaper @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-account-replicator b/bin/swift-account-replicator index 8edc7cf406..3978dc0bee 100755 --- a/bin/swift-account-replicator +++ b/bin/swift-account-replicator @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-account-server b/bin/swift-account-server index 8c627afa59..a4088fffb2 100755 --- a/bin/swift-account-server +++ b/bin/swift-account-server @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-account-stats-logger b/bin/swift-account-stats-logger index 7e9d26ba50..7b95b20249 100755 --- a/bin/swift-account-stats-logger +++ b/bin/swift-account-stats-logger @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-auth-add-user b/bin/swift-auth-add-user index d502dc83a8..6b997d8ccd 100755 --- a/bin/swift-auth-add-user +++ b/bin/swift-auth-add-user @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-auth-recreate-accounts b/bin/swift-auth-recreate-accounts index e17bf2da3b..430940f44e 100755 --- a/bin/swift-auth-recreate-accounts +++ b/bin/swift-auth-recreate-accounts @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-auth-server b/bin/swift-auth-server index 80c652f5b4..10c0fba073 100755 --- a/bin/swift-auth-server +++ b/bin/swift-auth-server @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-auth-update-reseller-prefixes b/bin/swift-auth-update-reseller-prefixes index 41a4bf6a76..52b6345e99 100755 --- a/bin/swift-auth-update-reseller-prefixes +++ b/bin/swift-auth-update-reseller-prefixes @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-bench b/bin/swift-bench index ab332482cd..93bb2b5707 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-container-auditor b/bin/swift-container-auditor index 62ff797535..9c29d4400b 100755 --- a/bin/swift-container-auditor +++ b/bin/swift-container-auditor @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-container-replicator b/bin/swift-container-replicator index 34e1dae8c7..d8443afacd 100755 --- a/bin/swift-container-replicator +++ b/bin/swift-container-replicator @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-container-server b/bin/swift-container-server index c6d4cf154b..2dbfbba090 100755 --- a/bin/swift-container-server +++ b/bin/swift-container-server @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-container-updater b/bin/swift-container-updater index 28bdaf1eae..d1b1d5ffb5 100755 --- a/bin/swift-container-updater +++ b/bin/swift-container-updater @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-drive-audit b/bin/swift-drive-audit index cde28c1ed7..95143e8b56 100755 --- a/bin/swift-drive-audit +++ b/bin/swift-drive-audit @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-get-nodes b/bin/swift-get-nodes index f24dd48f96..b84119222c 100755 --- a/bin/swift-get-nodes +++ b/bin/swift-get-nodes @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-init b/bin/swift-init index bc2dea983e..1a6b272345 100755 --- a/bin/swift-init +++ b/bin/swift-init @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-log-stats-collector b/bin/swift-log-stats-collector index 950a0f5fe0..374af49a4f 100755 --- a/bin/swift-log-stats-collector +++ b/bin/swift-log-stats-collector @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-log-uploader b/bin/swift-log-uploader index e533cad824..9d0e27836c 100755 --- a/bin/swift-log-uploader +++ b/bin/swift-log-uploader @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-object-auditor b/bin/swift-object-auditor index 033249ecca..c7371bdfeb 100755 --- a/bin/swift-object-auditor +++ b/bin/swift-object-auditor @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-object-info b/bin/swift-object-info index 57f2522071..e7befddf8c 100755 --- a/bin/swift-object-info +++ b/bin/swift-object-info @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-object-replicator b/bin/swift-object-replicator index 53a48e3942..7ae5db81c1 100755 --- a/bin/swift-object-replicator +++ b/bin/swift-object-replicator @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-object-server b/bin/swift-object-server index 5984fe6c69..3f36882c5f 100755 --- a/bin/swift-object-server +++ b/bin/swift-object-server @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-object-updater b/bin/swift-object-updater index 6d68c57c6e..779a3b7306 100755 --- a/bin/swift-object-updater +++ b/bin/swift-object-updater @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-proxy-server b/bin/swift-proxy-server index baccf568e6..6dae54e156 100755 --- a/bin/swift-proxy-server +++ b/bin/swift-proxy-server @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index 50353df256..347ba4825e 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -1,5 +1,5 @@ #!/usr/bin/python -uO -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-stats-populate b/bin/swift-stats-populate index 8ea210cb65..ba531ddc87 100755 --- a/bin/swift-stats-populate +++ b/bin/swift-stats-populate @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bin/swift-stats-report b/bin/swift-stats-report index 3f735877cf..f2504280ba 100755 --- a/bin/swift-stats-report +++ b/bin/swift-stats-report @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/doc/source/conf.py b/doc/source/conf.py index 74dfce9947..88bc171e66 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/doc/source/development_guidelines.rst b/doc/source/development_guidelines.rst index 60a70bf68e..9abda8c90e 100644 --- a/doc/source/development_guidelines.rst +++ b/doc/source/development_guidelines.rst @@ -38,7 +38,7 @@ License and Copyright Every source file should have the following copyright and license statement at the top:: - # Copyright (c) 2010 OpenStack, LLC. + # Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/doc/source/index.rst b/doc/source/index.rst index de07c132ea..3c23140e72 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -1,5 +1,5 @@ .. - Copyright 2010 OpenStack LLC + Copyright 2010-2011 OpenStack LLC All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/setup.py b/setup.py index f72517f0de..ae91a0962d 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ #!/usr/bin/python -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/account/auditor.py b/swift/account/auditor.py index 36e1e0a0d8..1f24f93acc 100644 --- a/swift/account/auditor.py +++ b/swift/account/auditor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/account/reaper.py b/swift/account/reaper.py index 6d2112927f..02e32506f0 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/account/replicator.py b/swift/account/replicator.py index 60c409d112..aa131c9c8d 100644 --- a/swift/account/replicator.py +++ b/swift/account/replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/account/server.py b/swift/account/server.py index 53d604ce93..a1e20c1f4f 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/auth/server.py b/swift/auth/server.py index 3e1ef881f4..1903035507 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/bench.py b/swift/common/bench.py index b698ff310b..4abafeb947 100644 --- a/swift/common/bench.py +++ b/swift/common/bench.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/bufferedhttp.py b/swift/common/bufferedhttp.py index 81c54d0722..4fb090ca92 100644 --- a/swift/common/bufferedhttp.py +++ b/swift/common/bufferedhttp.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/client.py b/swift/common/client.py index 1acba8cb37..e3536e894f 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/compressing_file_reader.py b/swift/common/compressing_file_reader.py index d6de9154eb..c581bddadd 100644 --- a/swift/common/compressing_file_reader.py +++ b/swift/common/compressing_file_reader.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/constraints.py b/swift/common/constraints.py index d91c136504..ad5a37b9a8 100644 --- a/swift/common/constraints.py +++ b/swift/common/constraints.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/daemon.py b/swift/common/daemon.py index 0f7df064c7..eee3428679 100644 --- a/swift/common/daemon.py +++ b/swift/common/daemon.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/db.py b/swift/common/db.py index 07bb00b390..b3c80dbc8c 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 0588b841a0..4c479a0ed0 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/direct_client.py b/swift/common/direct_client.py index 7952a1ceaf..68334a3ed4 100644 --- a/swift/common/direct_client.py +++ b/swift/common/direct_client.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/exceptions.py b/swift/common/exceptions.py index c498e2dae9..81f544bfae 100644 --- a/swift/common/exceptions.py +++ b/swift/common/exceptions.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/internal_proxy.py b/swift/common/internal_proxy.py index d305b81bc1..40c5185cfa 100644 --- a/swift/common/internal_proxy.py +++ b/swift/common/internal_proxy.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/memcached.py b/swift/common/memcached.py index 193456524a..17378e1aae 100644 --- a/swift/common/memcached.py +++ b/swift/common/memcached.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/acl.py b/swift/common/middleware/acl.py index 6403aed726..f6784953ac 100644 --- a/swift/common/middleware/acl.py +++ b/swift/common/middleware/acl.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/auth.py b/swift/common/middleware/auth.py index 1278a4a67a..04d4acff75 100644 --- a/swift/common/middleware/auth.py +++ b/swift/common/middleware/auth.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index 5fb8c33592..e2287fdbed 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/cname_lookup.py b/swift/common/middleware/cname_lookup.py index be7071ba16..4690bf6c79 100644 --- a/swift/common/middleware/cname_lookup.py +++ b/swift/common/middleware/cname_lookup.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index e55394d84b..4812182587 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/healthcheck.py b/swift/common/middleware/healthcheck.py index e0da092fc3..072ce24c85 100644 --- a/swift/common/middleware/healthcheck.py +++ b/swift/common/middleware/healthcheck.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/middleware/memcache.py b/swift/common/middleware/memcache.py index 0ec1288fd5..e2eca36d63 100644 --- a/swift/common/middleware/memcache.py +++ b/swift/common/middleware/memcache.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/ring/builder.py b/swift/common/ring/builder.py index ea1e5a17d3..4fb3bfb8b8 100644 --- a/swift/common/ring/builder.py +++ b/swift/common/ring/builder.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/ring/ring.py b/swift/common/ring/ring.py index fea22e8ffa..45ab407563 100644 --- a/swift/common/ring/ring.py +++ b/swift/common/ring/ring.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/utils.py b/swift/common/utils.py index da71253e7b..6b138ee9ee 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index e8c512f2ee..9450bcf439 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/container/auditor.py b/swift/container/auditor.py index 082e6e2b37..d1ceb4f98a 100644 --- a/swift/container/auditor.py +++ b/swift/container/auditor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/container/replicator.py b/swift/container/replicator.py index 0b344ecbcd..4cdc03b884 100644 --- a/swift/container/replicator.py +++ b/swift/container/replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/container/server.py b/swift/container/server.py index fc06194de6..7ba375ce33 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/container/updater.py b/swift/container/updater.py index 9056de3202..9dacea32d1 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 1d445ec65f..b39bb9336b 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index ef02b9f3ec..1f047102e1 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/obj/server.py b/swift/obj/server.py index 7c139d7775..02674a81b2 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/obj/updater.py b/swift/obj/updater.py index f958166679..a226d4523e 100644 --- a/swift/obj/updater.py +++ b/swift/obj/updater.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 1cde24cfe6..3c32ba96cc 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/stats/access_processor.py b/swift/stats/access_processor.py index b7e475833f..558709dccf 100644 --- a/swift/stats/access_processor.py +++ b/swift/stats/access_processor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/stats/account_stats.py b/swift/stats/account_stats.py index ddf4192119..e402bd0bc8 100644 --- a/swift/stats/account_stats.py +++ b/swift/stats/account_stats.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index 2511f5e8d6..60101b7ca2 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/stats/log_uploader.py b/swift/stats/log_uploader.py index a8cc92739f..160c948f7f 100644 --- a/swift/stats/log_uploader.py +++ b/swift/stats/log_uploader.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/swift/stats/stats_processor.py b/swift/stats/stats_processor.py index a590a74108..7854c83572 100644 --- a/swift/stats/stats_processor.py +++ b/swift/stats/stats_processor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/functional/swift.py b/test/functional/swift.py index e134de502f..21d355a8c6 100644 --- a/test/functional/swift.py +++ b/test/functional/swift.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/functional/tests.py b/test/functional/tests.py index f1ea6232b0..4cf090ae5d 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/probe/common.py b/test/probe/common.py index 0bb6f42a57..dc795d41fa 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/probe/test_account_failures.py b/test/probe/test_account_failures.py index 5ad2f965cb..807e397a57 100755 --- a/test/probe/test_account_failures.py +++ b/test/probe/test_account_failures.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/probe/test_container_failures.py b/test/probe/test_container_failures.py index b24e1bc169..656f637a15 100755 --- a/test/probe/test_container_failures.py +++ b/test/probe/test_container_failures.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/probe/test_object_async_update.py b/test/probe/test_object_async_update.py index 7db3a75fca..818f1dd740 100755 --- a/test/probe/test_object_async_update.py +++ b/test/probe/test_object_async_update.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/probe/test_object_handoff.py b/test/probe/test_object_handoff.py index 7086c11de8..ba81e4c559 100755 --- a/test/probe/test_object_handoff.py +++ b/test/probe/test_object_handoff.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/probe/test_running_with_each_type_down.py b/test/probe/test_running_with_each_type_down.py index 46fe1c5851..cb4a061540 100755 --- a/test/probe/test_running_with_each_type_down.py +++ b/test/probe/test_running_with_each_type_down.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/account/test_auditor.py b/test/unit/account/test_auditor.py index f7678ec1c1..31663d26a5 100644 --- a/test/unit/account/test_auditor.py +++ b/test/unit/account/test_auditor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/account/test_reaper.py b/test/unit/account/test_reaper.py index c69fc2229d..daa81d931b 100644 --- a/test/unit/account/test_reaper.py +++ b/test/unit/account/test_reaper.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/account/test_replicator.py b/test/unit/account/test_replicator.py index 6b3d045eaa..17f09a5e53 100644 --- a/test/unit/account/test_replicator.py +++ b/test/unit/account/test_replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/account/test_server.py b/test/unit/account/test_server.py index d7f52c280c..16800ca165 100644 --- a/test/unit/account/test_server.py +++ b/test/unit/account/test_server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/auth/test_server.py b/test/unit/auth/test_server.py index 1d691454b3..bd63b44b12 100644 --- a/test/unit/auth/test_server.py +++ b/test/unit/auth/test_server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_acl.py b/test/unit/common/middleware/test_acl.py index 03e76ce621..a6183eaad2 100644 --- a/test/unit/common/middleware/test_acl.py +++ b/test/unit/common/middleware/test_acl.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_auth.py b/test/unit/common/middleware/test_auth.py index b380d4f684..737686f3d9 100644 --- a/test/unit/common/middleware/test_auth.py +++ b/test/unit/common/middleware/test_auth.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_cname_lookup.py b/test/unit/common/middleware/test_cname_lookup.py index fdf954253f..423726bb03 100644 --- a/test/unit/common/middleware/test_cname_lookup.py +++ b/test/unit/common/middleware/test_cname_lookup.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_domain_remap.py b/test/unit/common/middleware/test_domain_remap.py index a6beb561fb..fe079cbeda 100644 --- a/test/unit/common/middleware/test_domain_remap.py +++ b/test/unit/common/middleware/test_domain_remap.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_except.py b/test/unit/common/middleware/test_except.py index 25e9486ab0..89c6e9ea1f 100644 --- a/test/unit/common/middleware/test_except.py +++ b/test/unit/common/middleware/test_except.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_healthcheck.py b/test/unit/common/middleware/test_healthcheck.py index fc1a2a198a..81872b7ed0 100644 --- a/test/unit/common/middleware/test_healthcheck.py +++ b/test/unit/common/middleware/test_healthcheck.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_memcache.py b/test/unit/common/middleware/test_memcache.py index a6f9336fec..baf50c6f30 100644 --- a/test/unit/common/middleware/test_memcache.py +++ b/test/unit/common/middleware/test_memcache.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/middleware/test_ratelimit.py b/test/unit/common/middleware/test_ratelimit.py index 344708b289..3f993a0402 100644 --- a/test/unit/common/middleware/test_ratelimit.py +++ b/test/unit/common/middleware/test_ratelimit.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/ring/test_builder.py b/test/unit/common/ring/test_builder.py index 2928558c8b..e26116d25c 100644 --- a/test/unit/common/ring/test_builder.py +++ b/test/unit/common/ring/test_builder.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/ring/test_ring.py b/test/unit/common/ring/test_ring.py index 5c668c8c57..ad72a4c990 100644 --- a/test/unit/common/ring/test_ring.py +++ b/test/unit/common/ring/test_ring.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_bench.py b/test/unit/common/test_bench.py index 7b75aba79e..6cec05f3d3 100644 --- a/test/unit/common/test_bench.py +++ b/test/unit/common/test_bench.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_bufferedhttp.py b/test/unit/common/test_bufferedhttp.py index d453442f04..fbaa83f403 100644 --- a/test/unit/common/test_bufferedhttp.py +++ b/test/unit/common/test_bufferedhttp.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_client.py b/test/unit/common/test_client.py index 23d5c6d848..739cba75e3 100644 --- a/test/unit/common/test_client.py +++ b/test/unit/common/test_client.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_compressing_file_reader.py b/test/unit/common/test_compressing_file_reader.py index 5394a97a72..65c29554d7 100644 --- a/test/unit/common/test_compressing_file_reader.py +++ b/test/unit/common/test_compressing_file_reader.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_constraints.py b/test/unit/common/test_constraints.py index bcc590f1ee..b87d310ecb 100644 --- a/test/unit/common/test_constraints.py +++ b/test/unit/common/test_constraints.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_daemon.py b/test/unit/common/test_daemon.py index aa85987d25..015928f670 100644 --- a/test/unit/common/test_daemon.py +++ b/test/unit/common/test_daemon.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_db.py b/test/unit/common/test_db.py index ca1cb670f3..49bc8a9229 100644 --- a/test/unit/common/test_db.py +++ b/test/unit/common/test_db.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_db_replicator.py b/test/unit/common/test_db_replicator.py index 1ffe1e923b..9e77f2c92f 100644 --- a/test/unit/common/test_db_replicator.py +++ b/test/unit/common/test_db_replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_direct_client.py b/test/unit/common/test_direct_client.py index 35ed07ffd7..133e779ee5 100644 --- a/test/unit/common/test_direct_client.py +++ b/test/unit/common/test_direct_client.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_exceptions.py b/test/unit/common/test_exceptions.py index 35a5801e77..6e7691137a 100644 --- a/test/unit/common/test_exceptions.py +++ b/test/unit/common/test_exceptions.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_internal_proxy.py b/test/unit/common/test_internal_proxy.py index 248bf1cf23..719970118f 100644 --- a/test/unit/common/test_internal_proxy.py +++ b/test/unit/common/test_internal_proxy.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_memcached.py b/test/unit/common/test_memcached.py index d17f0089a3..43f11650cf 100644 --- a/test/unit/common/test_memcached.py +++ b/test/unit/common/test_memcached.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index b888686660..715f336bcd 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/container/test_auditor.py b/test/unit/container/test_auditor.py index 1093cc809d..220c80f14c 100644 --- a/test/unit/container/test_auditor.py +++ b/test/unit/container/test_auditor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/container/test_replicator.py b/test/unit/container/test_replicator.py index 8f7a032893..05ecd2938b 100644 --- a/test/unit/container/test_replicator.py +++ b/test/unit/container/test_replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py index fdad12c0f8..2f9d5badea 100644 --- a/test/unit/container/test_server.py +++ b/test/unit/container/test_server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index a4c638c2ec..092944c4be 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index cf8a2bc37c..4b9db28c4b 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index 657570409d..36476ba213 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 90eed52977..64c58ff7ca 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/obj/test_updater.py b/test/unit/obj/test_updater.py index 9887c6fcaf..52e327d1b8 100644 --- a/test/unit/obj/test_updater.py +++ b/test/unit/obj/test_updater.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 4577cd4dac..20fbaea460 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/stats/test_access_processor.py b/test/unit/stats/test_access_processor.py index 47013ca8ae..7317c365aa 100644 --- a/test/unit/stats/test_access_processor.py +++ b/test/unit/stats/test_access_processor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/stats/test_account_stats.py b/test/unit/stats/test_account_stats.py index e318739dda..204cda78d2 100644 --- a/test/unit/stats/test_account_stats.py +++ b/test/unit/stats/test_account_stats.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/stats/test_log_processor.py b/test/unit/stats/test_log_processor.py index 1a6450139f..75acc02123 100644 --- a/test/unit/stats/test_log_processor.py +++ b/test/unit/stats/test_log_processor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/stats/test_log_uploader.py b/test/unit/stats/test_log_uploader.py index 8e889ad918..3585111750 100644 --- a/test/unit/stats/test_log_uploader.py +++ b/test/unit/stats/test_log_uploader.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/test/unit/stats/test_stats_processor.py b/test/unit/stats/test_stats_processor.py index 4720d1f035..c3af1c1b69 100644 --- a/test/unit/stats/test_stats_processor.py +++ b/test/unit/stats/test_stats_processor.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010 OpenStack, LLC. +# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From b58037c3aeb6d2858cd3e04b0b08a9aa761bac63 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Wed, 5 Jan 2011 11:48:43 +0900 Subject: [PATCH 067/199] s3api: clean up the response handling --- swift/common/middleware/s3.py | 205 +++++++++++++++++----------------- 1 file changed, 101 insertions(+), 104 deletions(-) diff --git a/swift/common/middleware/s3.py b/swift/common/middleware/s3.py index 54751c3803..3d7f5612bc 100644 --- a/swift/common/middleware/s3.py +++ b/swift/common/middleware/s3.py @@ -17,7 +17,6 @@ from webob import Request, Response from webob.exc import HTTPNotFound from simplejson import loads from swift.common.utils import split_path -from hashlib import md5, sha1 from urllib import unquote, quote import rfc822 import hmac @@ -53,6 +52,10 @@ def get_err_response(code): class Controller(object): def __init__(self, app): self.app = app + self.response_args = [] + + def do_start_response(self, *args): + self.response_args.extend(args) class ServiceController(Controller): def __init__(self, env, app, account_name, token, **kwargs): @@ -61,14 +64,19 @@ class ServiceController(Controller): env['PATH_INFO'] = '/v1/%s' % account_name def GET(self, env, start_response): - req = Request(env) env['QUERY_STRING'] = 'format=json' - resp = self.app(env, start_response) - try: - containers = loads(''.join(list(resp))) - except: - return get_err_response('AccessDenied') - + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) + + if status != 200: + if status == 401: + return get_err_response('AccessDenied') + else: + print status, headers, body_iter + return get_err_response('InvalidURI') + + containers = loads(''.join(list(body_iter))) resp = Response(content_type='text/xml') resp.status = 200 # we don't keep the creation time of a backet (s3cmd doesn't @@ -84,37 +92,39 @@ class BucketController(Controller): env['PATH_INFO'] = '/v1/%s/%s' % (account_name, container_name) def GET(self, env, start_response): - req = Request(env) env['QUERY_STRING'] = 'format=json' - resp = self.app(env, start_response) - try: - objects = loads(''.join(list(resp))) - except: - status = int(resp[0].split()[0]) - resp = Response(content_type='text/xml') + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) + + if status != 200: if status == 401: return get_err_response('AccessDenied') elif status == 404: return get_err_response('InvalidBucketName') else: - print resp + print status, headers, body_iter return get_err_response('InvalidURI') + objects = loads(''.join(list(body_iter))) resp = Response(content_type='text/xml') resp.status = 200 resp.body = """<?xml version="1.0" encoding="UTF-8"?><ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01"><Name>%s</Name>%s</ListBucketResult>""" % (self.container_name, "".join(['<Contents><Key>%s</Key><LastModified>%s</LastModified><ETag>%s</ETag><Size>%s</Size><StorageClass>STANDARD</StorageClass></Contents>' % (i['name'], i['last_modified'], i['hash'], i['bytes']) for i in objects])) return resp def PUT(self, env, start_response): - req = Request(env) - resp = self.app(env, start_response) - status = int(resp[0].split()[0]) - if status == 401: - return get_err_response('AccessDenied') - elif status == 202: - return get_err_response('BucketAlreadyExists') - else: - print resp + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) + + if status != 201: + if status == 401: + return get_err_response('AccessDenied') + elif status == 202: + return get_err_response('BucketAlreadyExists') + else: + print status, headers, body_iter + return get_err_response('InvalidURI') resp = Response() resp.headers.add('Location', self.container_name) @@ -122,24 +132,24 @@ class BucketController(Controller): return resp def DELETE(self, env, start_response): - req = Request(env) - resp = self.app(env, start_response) - try: - status = int(resp[0].split()[0]) - except: - resp = Response() - resp.status = 204 - return resp + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) - if status == 401: - return get_err_response('AccessDenied') - elif status == 404: - return get_err_response('InvalidBucketName') - elif status == 409: - return get_err_response('BucketNotEmpty') - else: - print resp - return get_err_response('InvalidURI') + if status != 204: + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('InvalidBucketName') + elif status == 409: + return get_err_response('BucketNotEmpty') + else: + print status, headers, body_iter + return get_err_response('InvalidURI') + + resp = Response() + resp.status = 204 + return resp class ObjectController(Controller): def __init__(self, env, app, account_name, token, container_name, object_name, **kwargs): @@ -147,34 +157,26 @@ class ObjectController(Controller): self.container_name = unquote(container_name) env['HTTP_X_AUTH_TOKEN'] = token env['PATH_INFO'] = '/v1/%s/%s/%s' % (account_name, container_name, object_name) - def GETorHEAD(self, env, start_response): - # there should be better ways. - # TODO: - # - we can't handle various errors properly (autorization, etc) - # - hide GETorHEAD - req = Request(env) - method = req.method - req.method = 'GET' - data = self.app(env, start_response) - if type(data) == list: - status = int(data[0][data[0].find('<title>') + 7:].split(' ')[0]) - if status == 404: + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) + + if status != 200: + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: return get_err_response('NoSuchKey') else: - return get_err_response('AccessDenied') - - if method == 'GET': - resp = Response(content_type='text/xml') - resp.body = ''.join(list(data)) - resp.status = 200 - else: - resp = Response() - etag = md5() - etag.update(''.join(list(data))) - etag = etag.hexdigest() - resp.etag = etag - resp.status = 200 + print status, headers, body_iter + return get_err_response('InvalidURI') + + resp = Response(content_type=headers['Content-Type']) + resp.etag = headers['etag'] + resp.status = 200 + req = Request(env) + if req.method == 'GET': + resp.body = ''.join(list(body_iter)) return resp def HEAD(self, env, start_response): @@ -184,44 +186,41 @@ class ObjectController(Controller): return self.GETorHEAD(env, start_response) def PUT(self, env, start_response): - # TODO: how can we get etag from the response header? - req = Request(env) - etag = md5() - etag.update(req.body) - etag = etag.hexdigest() - resp = self.app(env, start_response) - status = int(resp[0].split()[0]) - if status == 401: - return get_err_response('AccessDenied') - elif status == 404: - return get_err_response('InvalidBucketName') - elif status == 201: - resp = Response() - resp.etag = etag - resp.status = 200 - return resp - else: - print resp - return get_err_response('InvalidURI') + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) + + if status != 201: + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('InvalidBucketName') + else: + print status, headers, body_iter + return get_err_response('InvalidURI') + + resp = Response() + resp.etag = headers['etag'] + resp.status = 200 + return resp def DELETE(self, env, start_response): - # TODO: how can we get the response result? - req = Request(env) - resp = self.app(env, start_response) - try: - status = int(resp[0].split()[0]) - except: - resp = Response() - resp.status = 204 - return resp + body_iter = self.app(env, self.do_start_response) + status = int(self.response_args[0].split()[0]) + headers = dict(self.response_args[1]) - print resp - if status == 401: - return get_err_response('AccessDenied') - elif status == 404: - return get_err_response('NoSuchKey') - else: - return get_err_response('AccessDenied') + if status != 204: + if status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('NoSuchKey') + else: + print status, headers, body_iter + return get_err_response('InvalidURI') + + resp = Response() + resp.status = 204 + return resp class Swift3Middleware(object): def __init__(self, app, conf, *args, **kwargs): @@ -267,8 +266,6 @@ class Swift3Middleware(object): def __call__(self, env, start_response): req = Request(env) -# print req.method -# print req.path if not'Authorization' in req.headers: return self.app(env, start_response) try: From 6c1bf02a620c6cb9ccc3a72fa4b4154b6731147e Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 5 Jan 2011 09:32:19 -0600 Subject: [PATCH 068/199] pep8 fo' realz --- bin/swift-bench | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/swift-bench b/bin/swift-bench index 4a6158614e..9cdeea340e 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -127,7 +127,7 @@ if __name__ == '__main__': logger.addHandler(loghandler) logger = LogAdapter(logger) logformat = NamedFormatter('swift-bench', logger, - fmt='%(server)s %(asctime)s %(levelname)s %(message)s') + fmt='%(server)s %(asctime)s %(levelname)s %(message)s') loghandler.setFormatter(logformat) controller = BenchController(logger, options) From 1b735e63434a080e234c1f99e627406a2512b210 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 5 Jan 2011 08:14:31 -0800 Subject: [PATCH 069/199] Fix to limit account DELETEs to just reseller admins --- swift/common/middleware/auth.py | 5 ++-- swift/common/middleware/swauth.py | 5 ++-- test/unit/common/middleware/test_auth.py | 34 +++++++++++++++++++++ test/unit/common/middleware/test_swauth.py | 35 ++++++++++++++++++++++ 4 files changed, 75 insertions(+), 4 deletions(-) diff --git a/swift/common/middleware/auth.py b/swift/common/middleware/auth.py index 71fc32dafa..1ce7bb20ca 100644 --- a/swift/common/middleware/auth.py +++ b/swift/common/middleware/auth.py @@ -159,9 +159,10 @@ class DevAuth(object): user_groups = (req.remote_user or '').split(',') if '.reseller_admin' in user_groups: return None - if account in user_groups and (req.method != 'PUT' or container): + if account in user_groups and \ + (req.method not in ('DELETE', 'PUT') or container): # If the user is admin for the account and is not trying to do an - # account PUT... + # account DELETE or PUT... return None referrers, groups = parse_acl(getattr(req, 'acl', None)) if referrer_allowed(req.referer, referrers): diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index ee81bce3b6..afb4ea89d3 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -208,9 +208,10 @@ class Swauth(object): if '.reseller_admin' in user_groups and \ account[len(self.reseller_prefix)].isalnum(): return None - if account in user_groups and (req.method != 'PUT' or container): + if account in user_groups and \ + (req.method not in ('DELETE', 'PUT') or container): # If the user is admin for the account and is not trying to do an - # account PUT... + # account DELETE or PUT... return None referrers, groups = parse_acl(getattr(req, 'acl', None)) if referrer_allowed(req.referer, referrers): diff --git a/test/unit/common/middleware/test_auth.py b/test/unit/common/middleware/test_auth.py index 737686f3d9..cabc7a9523 100644 --- a/test/unit/common/middleware/test_auth.py +++ b/test/unit/common/middleware/test_auth.py @@ -432,6 +432,40 @@ class TestAuth(unittest.TestCase): resp = self.test_auth.authorize(req) self.assertEquals(resp and resp.status_int, 403) + def test_account_delete_permissions(self): + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act' + resp = self.test_auth.authorize(req) + self.assertEquals(resp and resp.status_int, 403) + + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,AUTH_other' + resp = self.test_auth.authorize(req) + self.assertEquals(resp and resp.status_int, 403) + + # Even DELETEs to your own account as account admin should fail + req = Request.blank('/v1/AUTH_old', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,AUTH_old' + resp = self.test_auth.authorize(req) + self.assertEquals(resp and resp.status_int, 403) + + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,.reseller_admin' + resp = self.test_auth.authorize(req) + self.assertEquals(resp, None) + + # .super_admin is not something the middleware should ever see or care + # about + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,.super_admin' + resp = self.test_auth.authorize(req) + self.assertEquals(resp and resp.status_int, 403) + if __name__ == '__main__': unittest.main() diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index f52a0d8d5c..43e0ed55f6 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -458,6 +458,41 @@ class TestAuth(unittest.TestCase): resp = self.test_auth.authorize(req) self.assertEquals(resp.status_int, 403) + def test_account_delete_permissions(self): + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act' + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) + + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,AUTH_other' + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) + + # Even DELETEs to your own account as account admin should fail + req = Request.blank('/v1/AUTH_old', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,AUTH_old' + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) + + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,.reseller_admin' + resp = self.test_auth.authorize(req) + self.assertEquals(resp, None) + + # .super_admin is not something the middleware should ever see or care + # about + req = Request.blank('/v1/AUTH_new', + environ={'REQUEST_METHOD': 'DELETE'}) + req.remote_user = 'act:usr,act,.super_admin' + resp = self.test_auth.authorize(req) + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) + def test_get_token_fail(self): resp = Request.blank('/auth/v1.0').get_response(self.test_auth) self.assertEquals(resp.status_int, 401) From 28a7d69e559756cfffcb8b9a94573fe9333d1aa8 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 5 Jan 2011 08:37:55 -0800 Subject: [PATCH 070/199] Fix bug where trying to access an account name that exactly matched the reseller_prefix would raise an exception --- swift/common/middleware/swauth.py | 1 + test/unit/common/middleware/test_swauth.py | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index afb4ea89d3..a00d09996d 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -206,6 +206,7 @@ class Swauth(object): return self.denied_response(req) user_groups = (req.remote_user or '').split(',') if '.reseller_admin' in user_groups and \ + account != self.reseller_prefix and \ account[len(self.reseller_prefix)].isalnum(): return None if account in user_groups and \ diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 43e0ed55f6..f0ed42d7f0 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -3095,6 +3095,18 @@ class TestAuth(unittest.TestCase): headers={'X-Auth-Admin-User': 'act:rdm', 'X-Auth-Admin-Key': 'bad'}), 'act')) + def test_reseller_admin_but_account_is_internal_use_only(self): + req = Request.blank('/v1/AUTH_.auth', environ={'REQUEST_METHOD': 'GET'}) + req.remote_user = 'act:usr,act,.reseller_admin' + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) + + def test_reseller_admin_but_account_is_exactly_reseller_prefix(self): + req = Request.blank('/v1/AUTH_', environ={'REQUEST_METHOD': 'GET'}) + req.remote_user = 'act:usr,act,.reseller_admin' + resp = self.test_auth.authorize(req) + self.assertEquals(resp.status_int, 403) + if __name__ == '__main__': unittest.main() From 75c25810050fd8b9552c3863cc3360d6b54c6ec1 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 5 Jan 2011 08:40:40 -0800 Subject: [PATCH 071/199] Make Swauth return 404 on split_path exceptions --- swift/common/middleware/swauth.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index a00d09996d..d982d77b42 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -279,8 +279,11 @@ class Swauth(object): """ req.start_time = time() handler = None - version, account, user, _ = split_path(req.path_info, minsegs=1, - maxsegs=4, rest_with_last=True) + try: + version, account, user, _ = split_path(req.path_info, minsegs=1, + maxsegs=4, rest_with_last=True) + except ValueError: + return HTTPNotFound(request=req) if version in ('v1', 'v1.0', 'auth'): if req.method == 'GET': handler = self.handle_get_token @@ -917,8 +920,11 @@ class Swauth(object): above. """ # Validate the request info - pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, - rest_with_last=True) + try: + pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3, + rest_with_last=True) + except ValueError: + return HTTPNotFound(request=req) if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') From 25bf6ebfc3b979a54a2f731a5d1dfd5abb51b91b Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 5 Jan 2011 11:15:21 -0600 Subject: [PATCH 072/199] moved the txn_id read into a utils helper --- swift/account/server.py | 8 ++++---- swift/common/utils.py | 10 ++++++++++ swift/container/server.py | 8 ++++---- swift/obj/server.py | 10 +++++----- swift/proxy/server.py | 11 ++++++----- test/unit/common/test_utils.py | 14 ++++++++++++++ 6 files changed, 43 insertions(+), 18 deletions(-) diff --git a/swift/account/server.py b/swift/account/server.py index c782bdc951..ec9ad343dc 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -29,7 +29,7 @@ import simplejson from swift.common.db import AccountBroker from swift.common.utils import get_logger, get_param, hash_path, \ - normalize_timestamp, split_path, storage_directory + normalize_timestamp, split_path, storage_directory, get_txn_id from swift.common.constraints import ACCOUNT_LISTING_LIMIT, \ check_mount, check_float, check_utf8 from swift.common.db_replicator import ReplicatorRpc @@ -86,7 +86,7 @@ class AccountController(object): return Response(status='507 %s is not mounted' % drive) broker = self._get_account_broker(drive, part, account) if container: # put account container - if 'x-swift-txn-id' in req.headers: + if get_txn_id(req, None) is None: broker.pending_timeout = 3 if req.headers.get('x-account-override-deleted', 'no').lower() != \ 'yes' and broker.is_deleted(): @@ -296,7 +296,7 @@ class AccountController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) - self.logger.txn_id = req.headers.get('x-swift-txn-id', None) + self.logger.txn_id = get_txn_id(req, None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -319,7 +319,7 @@ class AccountController(object): time.strftime('%d/%b/%Y:%H:%M:%S +0000', time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', - req.headers.get('x-swift-txn-id', '-'), + get_txn_id(req, '-'), req.referer or '-', req.user_agent or '-', trans_time, additional_info) diff --git a/swift/common/utils.py b/swift/common/utils.py index c240b242cf..d7abac2ec8 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -107,6 +107,16 @@ def get_param(req, name, default=None): value.decode('utf8') # Ensure UTF8ness return value +def get_txn_id(req, default=None): + """ + Get the transaction id from a request + + :param req: Webob request object + :param default: value to return if no transaction id is found + """ + return req.headers.get('x-swift-txn-id', + req.headers.get('x-cf-trans-id', default)) + def fallocate(fd, size): """ diff --git a/swift/container/server.py b/swift/container/server.py index 58b09671ed..010e9e0af6 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -31,7 +31,7 @@ from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ from swift.common.db import ContainerBroker from swift.common.utils import get_logger, get_param, hash_path, \ - normalize_timestamp, storage_directory, split_path + normalize_timestamp, storage_directory, split_path, get_txn_id from swift.common.constraints import CONTAINER_LISTING_LIMIT, \ check_mount, check_float, check_utf8 from swift.common.bufferedhttp import http_connect @@ -95,7 +95,7 @@ class ContainerController(object): 'x-delete-timestamp': info['delete_timestamp'], 'x-object-count': info['object_count'], 'x-bytes-used': info['bytes_used'], - 'x-swift-txn-id': req.headers.get('x-swift-txn-id', '-')} + 'x-swift-txn-id': get_txn_id(req, '-')} if req.headers.get('x-account-override-deleted', 'no').lower() == \ 'yes': account_headers['x-account-override-deleted'] = 'yes' @@ -384,7 +384,7 @@ class ContainerController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) - self.logger.txn_id = req.headers.get('x-swift-txn-id', None) + self.logger.txn_id = get_txn_id(req, None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -404,7 +404,7 @@ class ContainerController(object): time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', - req.headers.get('x-swift-txn-id', '-'), + get_txn_id(req, '-'), req.referer or '-', req.user_agent or '-', trans_time) if req.method.upper() == 'REPLICATE': diff --git a/swift/obj/server.py b/swift/obj/server.py index 280ddbf49a..6af3bdd844 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -37,7 +37,7 @@ from eventlet import sleep, Timeout from swift.common.utils import mkdirs, normalize_timestamp, \ storage_directory, hash_path, renamer, fallocate, \ - split_path, drop_buffer_cache, get_logger, write_pickle + split_path, drop_buffer_cache, get_logger, write_pickle, get_txn_id from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_object_creation, check_mount, \ check_float, check_utf8 @@ -409,7 +409,7 @@ class ObjectController(object): 'x-content-type': file.metadata['Content-Type'], 'x-timestamp': file.metadata['X-Timestamp'], 'x-etag': file.metadata['ETag'], - 'x-swift-txn-id': request.headers.get('x-swift-txn-id', '-')}, + 'x-swift-txn-id': get_txn_id(request, '-')}, device) resp = HTTPCreated(request=request, etag=etag) return resp @@ -531,7 +531,7 @@ class ObjectController(object): file.unlinkold(metadata['X-Timestamp']) self.container_update('DELETE', account, container, obj, request.headers, {'x-timestamp': metadata['X-Timestamp'], - 'x-swift-txn-id': request.headers.get('x-swift-txn-id', '-')}, + 'x-swift-txn-id': get_txn_id(request, '-')}, device) resp = response_class(request=request) return resp @@ -562,7 +562,7 @@ class ObjectController(object): """WSGI Application entry point for the Swift Object Server.""" start_time = time.time() req = Request(env) - self.logger.txn_id = req.headers.get('x-swift-txn-id', None) + self.logger.txn_id = get_txn_id(req, None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -583,7 +583,7 @@ class ObjectController(object): time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', req.referer or '-', - req.headers.get('x-swift-txn-id', '-'), + get_txn_id(req, '-'), req.user_agent or '-', trans_time) if req.method == 'REPLICATE': diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 04187cc82b..d26e626cd9 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -41,7 +41,7 @@ from webob import Request, Response from swift.common.ring import Ring from swift.common.utils import get_logger, normalize_timestamp, split_path, \ - cache_from_env + cache_from_env, get_txn_id from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_metadata, check_object_creation, \ check_utf8, CONTAINER_LISTING_LIMIT, MAX_ACCOUNT_NAME_LENGTH, \ @@ -1362,7 +1362,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'x-swift-txn-id': self.trans_id} statuses = [] reasons = [] bodies = [] @@ -1707,8 +1707,9 @@ class BaseApplication(object): return HTTPPreconditionFailed(request=req, body='Bad URL') controller = controller(self, **path_parts) - controller.trans_id = req.headers.get('x-swift-txn-id', '-') - self.logger.txn_id = req.headers.get('x-swift-txn-id', None) + txn_id = get_txn_id(req, None) + controller.trans_id = txn_id or '-' + self.logger.txn_id = txn_id try: handler = getattr(controller, req.method) if not getattr(handler, 'publicly_accessible'): @@ -1786,7 +1787,7 @@ class Application(BaseApplication): getattr(req, 'bytes_transferred', 0) or '-', getattr(response, 'bytes_transferred', 0) or '-', req.headers.get('etag', '-'), - req.headers.get('x-swift-txn-id', '-'), + get_txn_id(req, '-'), logged_headers or '-', trans_time, ))) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index b888686660..f8c44440c0 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -28,6 +28,7 @@ from StringIO import StringIO from functools import partial from tempfile import NamedTemporaryFile +from webob import Request from eventlet import sleep from swift.common import utils @@ -80,6 +81,19 @@ class TestUtils(unittest.TestCase): def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' + def test_get_txn_id(self): + req = Request.blank('') + req.headers['X-Swift-Txn-Id'] = 'tx12345' + self.assertEquals(utils.get_txn_id(req), 'tx12345') + environ = {'HTTP_X_CF_TRANS_ID': 'tx67890'} + req = Request.blank('', environ=environ) + self.assertEquals(utils.get_txn_id(req), 'tx67890') + req = Request.blank('') + self.assertEquals(utils.get_txn_id(req), None) + self.assertEquals(utils.get_txn_id(req, '-'), '-') + req.headers['X-Cf-Trans-Id'] = 'tx13579' + self.assertEquals(utils.get_txn_id(req, default='test'), 'tx13579') + def test_normalize_timestamp(self): """ Test swift.common.utils.normalize_timestamp """ self.assertEquals(utils.normalize_timestamp('1253327593.48174'), From 9786ab6687e018b3622722e379da68b64cc4af49 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 5 Jan 2011 11:48:58 -0600 Subject: [PATCH 073/199] this is to make gholt happy; well less unhappy --- swift/common/middleware/catch_errors.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index f1ab249b25..eee45b8e1f 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -26,10 +26,10 @@ class CatchErrorMiddleware(object): def __init__(self, app, conf): self.app = app - try: - # if the application already has a logger we should use that one - self.logger = app.logger - except AttributeError: + # if the application already has a logger we should use that one + self.logger = getattr(app, 'logger', None) + if not self.logger: + # and only call get_logger if we have to self.logger = get_logger(conf) def __call__(self, env, start_response): From 30fd2dd0f229d95b53d6efaff4b2792d5f49373b Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 5 Jan 2011 13:52:33 -0600 Subject: [PATCH 074/199] revert x-cf-trans-id rename --- swift/account/server.py | 8 ++++---- swift/common/utils.py | 10 ---------- swift/container/server.py | 8 ++++---- swift/obj/server.py | 10 +++++----- swift/proxy/server.py | 27 +++++++++++++-------------- test/unit/common/test_utils.py | 14 -------------- test/unit/proxy/test_server.py | 5 +---- 7 files changed, 27 insertions(+), 55 deletions(-) diff --git a/swift/account/server.py b/swift/account/server.py index ec9ad343dc..53d604ce93 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -29,7 +29,7 @@ import simplejson from swift.common.db import AccountBroker from swift.common.utils import get_logger, get_param, hash_path, \ - normalize_timestamp, split_path, storage_directory, get_txn_id + normalize_timestamp, split_path, storage_directory from swift.common.constraints import ACCOUNT_LISTING_LIMIT, \ check_mount, check_float, check_utf8 from swift.common.db_replicator import ReplicatorRpc @@ -86,7 +86,7 @@ class AccountController(object): return Response(status='507 %s is not mounted' % drive) broker = self._get_account_broker(drive, part, account) if container: # put account container - if get_txn_id(req, None) is None: + if 'x-cf-trans-id' in req.headers: broker.pending_timeout = 3 if req.headers.get('x-account-override-deleted', 'no').lower() != \ 'yes' and broker.is_deleted(): @@ -296,7 +296,7 @@ class AccountController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) - self.logger.txn_id = get_txn_id(req, None) + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -319,7 +319,7 @@ class AccountController(object): time.strftime('%d/%b/%Y:%H:%M:%S +0000', time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', - get_txn_id(req, '-'), + req.headers.get('x-cf-trans-id', '-'), req.referer or '-', req.user_agent or '-', trans_time, additional_info) diff --git a/swift/common/utils.py b/swift/common/utils.py index d7abac2ec8..c240b242cf 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -107,16 +107,6 @@ def get_param(req, name, default=None): value.decode('utf8') # Ensure UTF8ness return value -def get_txn_id(req, default=None): - """ - Get the transaction id from a request - - :param req: Webob request object - :param default: value to return if no transaction id is found - """ - return req.headers.get('x-swift-txn-id', - req.headers.get('x-cf-trans-id', default)) - def fallocate(fd, size): """ diff --git a/swift/container/server.py b/swift/container/server.py index 010e9e0af6..fc06194de6 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -31,7 +31,7 @@ from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ from swift.common.db import ContainerBroker from swift.common.utils import get_logger, get_param, hash_path, \ - normalize_timestamp, storage_directory, split_path, get_txn_id + normalize_timestamp, storage_directory, split_path from swift.common.constraints import CONTAINER_LISTING_LIMIT, \ check_mount, check_float, check_utf8 from swift.common.bufferedhttp import http_connect @@ -95,7 +95,7 @@ class ContainerController(object): 'x-delete-timestamp': info['delete_timestamp'], 'x-object-count': info['object_count'], 'x-bytes-used': info['bytes_used'], - 'x-swift-txn-id': get_txn_id(req, '-')} + 'x-cf-trans-id': req.headers.get('X-Cf-Trans-Id', '-')} if req.headers.get('x-account-override-deleted', 'no').lower() == \ 'yes': account_headers['x-account-override-deleted'] = 'yes' @@ -384,7 +384,7 @@ class ContainerController(object): def __call__(self, env, start_response): start_time = time.time() req = Request(env) - self.logger.txn_id = get_txn_id(req, None) + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -404,7 +404,7 @@ class ContainerController(object): time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', - get_txn_id(req, '-'), + req.headers.get('x-cf-trans-id', '-'), req.referer or '-', req.user_agent or '-', trans_time) if req.method.upper() == 'REPLICATE': diff --git a/swift/obj/server.py b/swift/obj/server.py index 6af3bdd844..7c139d7775 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -37,7 +37,7 @@ from eventlet import sleep, Timeout from swift.common.utils import mkdirs, normalize_timestamp, \ storage_directory, hash_path, renamer, fallocate, \ - split_path, drop_buffer_cache, get_logger, write_pickle, get_txn_id + split_path, drop_buffer_cache, get_logger, write_pickle from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_object_creation, check_mount, \ check_float, check_utf8 @@ -409,7 +409,7 @@ class ObjectController(object): 'x-content-type': file.metadata['Content-Type'], 'x-timestamp': file.metadata['X-Timestamp'], 'x-etag': file.metadata['ETag'], - 'x-swift-txn-id': get_txn_id(request, '-')}, + 'x-cf-trans-id': request.headers.get('x-cf-trans-id', '-')}, device) resp = HTTPCreated(request=request, etag=etag) return resp @@ -531,7 +531,7 @@ class ObjectController(object): file.unlinkold(metadata['X-Timestamp']) self.container_update('DELETE', account, container, obj, request.headers, {'x-timestamp': metadata['X-Timestamp'], - 'x-swift-txn-id': get_txn_id(request, '-')}, + 'x-cf-trans-id': request.headers.get('x-cf-trans-id', '-')}, device) resp = response_class(request=request) return resp @@ -562,7 +562,7 @@ class ObjectController(object): """WSGI Application entry point for the Swift Object Server.""" start_time = time.time() req = Request(env) - self.logger.txn_id = get_txn_id(req, None) + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: @@ -583,7 +583,7 @@ class ObjectController(object): time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', req.referer or '-', - get_txn_id(req, '-'), + req.headers.get('x-cf-trans-id', '-'), req.user_agent or '-', trans_time) if req.method == 'REPLICATE': diff --git a/swift/proxy/server.py b/swift/proxy/server.py index d26e626cd9..1cde24cfe6 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -41,7 +41,7 @@ from webob import Request, Response from swift.common.ring import Ring from swift.common.utils import get_logger, normalize_timestamp, split_path, \ - cache_from_env, get_txn_id + cache_from_env from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_metadata, check_object_creation, \ check_utf8, CONTAINER_LISTING_LIMIT, MAX_ACCOUNT_NAME_LENGTH, \ @@ -356,7 +356,7 @@ class Controller(object): result_code = 0 attempts_left = self.app.account_ring.replica_count path = '/%s' % account - headers = {'x-swift-txn-id': self.trans_id} + headers = {'x-cf-trans-id': self.trans_id} for node in self.iter_nodes(partition, nodes, self.app.account_ring): if self.error_limited(node): continue @@ -430,7 +430,7 @@ class Controller(object): write_acl = None container_size = None attempts_left = self.app.container_ring.replica_count - headers = {'x-swift-txn-id': self.trans_id} + headers = {'x-cf-trans-id': self.trans_id} for node in self.iter_nodes(partition, nodes, self.app.container_ring): if self.error_limited(node): continue @@ -1247,7 +1247,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'x-cf-trans-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower() in self.pass_through_headers or value[0].lower().startswith('x-container-meta-')) @@ -1309,7 +1309,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'x-cf-trans-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower() in self.pass_through_headers or value[0].lower().startswith('x-container-meta-')) @@ -1362,7 +1362,7 @@ class ContainerController(Controller): container_partition, containers = self.app.container_ring.get_nodes( self.account_name, self.container_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'x-cf-trans-id': self.trans_id} statuses = [] reasons = [] bodies = [] @@ -1450,7 +1450,7 @@ class AccountController(Controller): account_partition, accounts = \ self.app.account_ring.get_nodes(self.account_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'x-cf-trans-id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower().startswith('x-account-meta-')) statuses = [] @@ -1499,7 +1499,7 @@ class AccountController(Controller): account_partition, accounts = \ self.app.account_ring.get_nodes(self.account_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'X-CF-Trans-Id': self.trans_id} headers.update(value for value in req.headers.iteritems() if value[0].lower().startswith('x-account-meta-')) statuses = [] @@ -1546,7 +1546,7 @@ class AccountController(Controller): account_partition, accounts = \ self.app.account_ring.get_nodes(self.account_name) headers = {'X-Timestamp': normalize_timestamp(time.time()), - 'x-swift-txn-id': self.trans_id} + 'X-CF-Trans-Id': self.trans_id} statuses = [] reasons = [] bodies = [] @@ -1683,7 +1683,7 @@ class BaseApplication(object): def update_request(self, req): req.bytes_transferred = '-' req.client_disconnect = False - req.headers['x-swift-txn-id'] = 'tx' + str(uuid.uuid4()) + req.headers['x-cf-trans-id'] = 'tx' + str(uuid.uuid4()) if 'x-storage-token' in req.headers and \ 'x-auth-token' not in req.headers: req.headers['x-auth-token'] = req.headers['x-storage-token'] @@ -1707,9 +1707,8 @@ class BaseApplication(object): return HTTPPreconditionFailed(request=req, body='Bad URL') controller = controller(self, **path_parts) - txn_id = get_txn_id(req, None) - controller.trans_id = txn_id or '-' - self.logger.txn_id = txn_id + controller.trans_id = req.headers.get('x-cf-trans-id', '-') + self.logger.txn_id = req.headers.get('x-cf-trans-id', None) try: handler = getattr(controller, req.method) if not getattr(handler, 'publicly_accessible'): @@ -1787,7 +1786,7 @@ class Application(BaseApplication): getattr(req, 'bytes_transferred', 0) or '-', getattr(response, 'bytes_transferred', 0) or '-', req.headers.get('etag', '-'), - get_txn_id(req, '-'), + req.headers.get('x-cf-trans-id', '-'), logged_headers or '-', trans_time, ))) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index f8c44440c0..b888686660 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -28,7 +28,6 @@ from StringIO import StringIO from functools import partial from tempfile import NamedTemporaryFile -from webob import Request from eventlet import sleep from swift.common import utils @@ -81,19 +80,6 @@ class TestUtils(unittest.TestCase): def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' - def test_get_txn_id(self): - req = Request.blank('') - req.headers['X-Swift-Txn-Id'] = 'tx12345' - self.assertEquals(utils.get_txn_id(req), 'tx12345') - environ = {'HTTP_X_CF_TRANS_ID': 'tx67890'} - req = Request.blank('', environ=environ) - self.assertEquals(utils.get_txn_id(req), 'tx67890') - req = Request.blank('') - self.assertEquals(utils.get_txn_id(req), None) - self.assertEquals(utils.get_txn_id(req, '-'), '-') - req.headers['X-Cf-Trans-Id'] = 'tx13579' - self.assertEquals(utils.get_txn_id(req, default='test'), 'tx13579') - def test_normalize_timestamp(self): """ Test swift.common.utils.normalize_timestamp """ self.assertEquals(utils.normalize_timestamp('1253327593.48174'), diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index f7e2479a16..4577cd4dac 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -218,7 +218,6 @@ def save_globals(): # tests - class TestController(unittest.TestCase): def setUp(self): @@ -373,7 +372,6 @@ class TestController(unittest.TestCase): test(404, 507, 503) test(503, 503, 503) - class TestProxyServer(unittest.TestCase): def test_unhandled_exception(self): @@ -470,7 +468,6 @@ class TestObjectController(unittest.TestCase): 'text/html', 'text/html'])) test_content_type('test.css', iter(['', '', '', 'text/css', 'text/css', 'text/css'])) - def test_custom_mime_types_files(self): swift_dir = mkdtemp() try: @@ -1718,7 +1715,7 @@ class TestObjectController(unittest.TestCase): for node in nodes: conn = proxy_server.http_connect(node['ip'], node['port'], node['device'], partition, 'PUT', '/a', - {'X-Timestamp': ts, 'x-swift-txn-id': 'test'}) + {'X-Timestamp': ts, 'X-CF-Trans-Id': 'test'}) resp = conn.getresponse() self.assertEquals(resp.status, 201) # Head account, just a double check and really is here to test From 84d24240e5b55cdc4fa0c24509de847265593e9d Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 5 Jan 2011 12:06:55 -0800 Subject: [PATCH 075/199] Test updates suggested by glange --- test/unit/common/middleware/test_swauth.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index f0ed42d7f0..08a1efb460 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -160,9 +160,9 @@ class TestAuth(unittest.TestCase): self.assertEquals(ath.default_swift_cluster, 'local:http://host/path') ath = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:http://host/path/'})(app) + 'default_swift_cluster': 'local:https://host/path/'})(app) self.assertEquals(ath.default_swift_cluster, - 'local:http://host/path') + 'local:https://host/path') def test_top_level_ignore(self): resp = Request.blank('/').get_response(self.test_auth) @@ -455,7 +455,6 @@ class TestAuth(unittest.TestCase): req = Request.blank('/v1/AUTH_new', environ={'REQUEST_METHOD': 'PUT'}) req.remote_user = 'act:usr,act,.super_admin' resp = self.test_auth.authorize(req) - resp = self.test_auth.authorize(req) self.assertEquals(resp.status_int, 403) def test_account_delete_permissions(self): From 4338494174be0d2dc60b7dc3bf175873a6a02236 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Wed, 5 Jan 2011 21:54:45 +0000 Subject: [PATCH 076/199] GETorHEAD cleanup --- swift/common/middleware/s3.py | 49 ++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/swift/common/middleware/s3.py b/swift/common/middleware/s3.py index 3d7f5612bc..d09ce9af18 100644 --- a/swift/common/middleware/s3.py +++ b/swift/common/middleware/s3.py @@ -13,15 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from webob import Request, Response -from webob.exc import HTTPNotFound -from simplejson import loads -from swift.common.utils import split_path from urllib import unquote, quote import rfc822 import hmac import base64 import errno +import binascii + +from webob import Request, Response +from webob.exc import HTTPNotFound +from simplejson import loads + +from swift.common.utils import split_path + def get_err_response(code): error_table = {'AccessDenied': @@ -157,27 +161,30 @@ class ObjectController(Controller): self.container_name = unquote(container_name) env['HTTP_X_AUTH_TOKEN'] = token env['PATH_INFO'] = '/v1/%s/%s/%s' % (account_name, container_name, object_name) + def GETorHEAD(self, env, start_response): - body_iter = self.app(env, self.do_start_response) + app_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) - if status != 200: - if status == 401: - return get_err_response('AccessDenied') - elif status == 404: - return get_err_response('NoSuchKey') - else: - print status, headers, body_iter - return get_err_response('InvalidURI') - - resp = Response(content_type=headers['Content-Type']) - resp.etag = headers['etag'] - resp.status = 200 - req = Request(env) - if req.method == 'GET': - resp.body = ''.join(list(body_iter)) - return resp + if 200 <= status < 300: + new_hdrs = {} + for header in ('content-length', 'content-encoding', 'etag'): + if header in headers: + new_hdrs[header] = headers[header] + if 'etag' in headers: + new_hdrs['Content-MD5'] = headers['etag'].decode('hex') \ + .encode('base64') + for key, value in headers.iteritems(): + if key.startswith('x-object-meta-'): + new_hdrs['x-amz-meta-' + key[14:]] = value + return Response(status=status, headers=new_hdrs, app_iter=app_iter) + elif status == 401: + return get_err_response('AccessDenied') + elif status == 404: + return get_err_response('NoSuchKey') + else: + return get_err_response('InvalidURI') def HEAD(self, env, start_response): return self.GETorHEAD(env, start_response) From fe0d952374e92de5a4eb6bb0b9fb53d8b1c0bf00 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Wed, 5 Jan 2011 22:01:03 +0000 Subject: [PATCH 077/199] further cleanup --- swift/common/middleware/s3.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/swift/common/middleware/s3.py b/swift/common/middleware/s3.py index d09ce9af18..82d58642af 100644 --- a/swift/common/middleware/s3.py +++ b/swift/common/middleware/s3.py @@ -169,15 +169,13 @@ class ObjectController(Controller): if 200 <= status < 300: new_hdrs = {} - for header in ('content-length', 'content-encoding', 'etag'): - if header in headers: - new_hdrs[header] = headers[header] - if 'etag' in headers: - new_hdrs['Content-MD5'] = headers['etag'].decode('hex') \ - .encode('base64') - for key, value in headers.iteritems(): + for key, val in headers.iteritems(): if key.startswith('x-object-meta-'): - new_hdrs['x-amz-meta-' + key[14:]] = value + new_hdrs['x-amz-meta-' + key[14:]] = val + elif key in ('content-length', 'content-encoding', 'etag'): + new_hdrs[key] = val + if key == 'etag': + new_hdrs['Content-MD5'] = val.decode('hex').encode('base64') return Response(status=status, headers=new_hdrs, app_iter=app_iter) elif status == 401: return get_err_response('AccessDenied') From 26573e43a659a618d79b4b2a9b7d674d5b7e0812 Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Wed, 5 Jan 2011 16:24:14 -0600 Subject: [PATCH 078/199] object copy now copies the content type --- swift/proxy/server.py | 5 +++++ test/unit/proxy/test_server.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 3c32ba96cc..a00a31f42b 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -911,12 +911,14 @@ class ObjectController(Controller): self.account_name, self.container_name, self.object_name) req.headers['X-Timestamp'] = normalize_timestamp(time.time()) # Sometimes the 'content-type' header exists, but is set to None. + content_type_manually_set = True if not req.headers.get('content-type'): guessed_type, _junk = mimetypes.guess_type(req.path_info) if not guessed_type: req.headers['Content-Type'] = 'application/octet-stream' else: req.headers['Content-Type'] = guessed_type + content_type_manually_set = False error_response = check_object_creation(req, self.object_name) if error_response: return error_response @@ -961,6 +963,9 @@ class ObjectController(Controller): new_req.etag = source_resp.etag # we no longer need the X-Copy-From header del new_req.headers['X-Copy-From'] + if not content_type_manually_set: + new_req.headers['Content-Type'] = \ + source_resp.headers['Content-Type'] for k, v in source_resp.headers.items(): if k.lower().startswith('x-object-meta-'): new_req.headers[k] = v diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 20fbaea460..52045777a9 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1264,6 +1264,7 @@ class TestObjectController(unittest.TestCase): with save_globals(): controller = proxy_server.ObjectController(self.app, 'account', 'container', 'object') + # initial source object PUT req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0'}) self.app.update_request(req) @@ -1273,6 +1274,7 @@ class TestObjectController(unittest.TestCase): resp = controller.PUT(req) self.assertEquals(resp.status_int, 201) + # basic copy req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o'}) @@ -1285,6 +1287,7 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 201) self.assertEquals(resp.headers['x-copied-from'], 'c/o') + # non-zero content length req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '5', 'X-Copy-From': 'c/o'}) @@ -1296,6 +1299,7 @@ class TestObjectController(unittest.TestCase): resp = controller.PUT(req) self.assertEquals(resp.status_int, 400) + # extra source path parsing req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o/o2'}) @@ -1308,6 +1312,7 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 201) self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') + # space in soure path req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '0', 'X-Copy-From': 'c/o%20o2'}) From 08523977f5ad78d138d116f22b5bb2003279ba52 Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Wed, 5 Jan 2011 16:49:43 -0600 Subject: [PATCH 079/199] changed the order of setting the charset and content_type to ensure that charset isn't added to the object --- swift/proxy/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index a00a31f42b..8485a618da 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -632,8 +632,8 @@ class Controller(object): res.status = source.status res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): - res.charset = None res.content_type = source.getheader('Content-Type') + res.charset = None return res elif 200 <= source.status <= 399: res = status_map[source.status](request=req) @@ -641,8 +641,8 @@ class Controller(object): if req.method == 'HEAD': res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): - res.charset = None res.content_type = source.getheader('Content-Type') + res.charset = None return res statuses.append(source.status) reasons.append(source.reason) From f91070442e8677aa749ef8a61795cff7f7a29ee7 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Thu, 6 Jan 2011 18:08:48 +0900 Subject: [PATCH 080/199] s3api: fix GETorHEAD - replace 'content-length and 'content-encoding' with 'Content-Length and 'Content-Encoding' respectively. - return 'Content-Type' and 'last-modified' headers too. - remove 'Content-MD5' since seems that S3 doesn't use it for GET or HEAD response. --- swift/common/middleware/s3.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/swift/common/middleware/s3.py b/swift/common/middleware/s3.py index 82d58642af..de66a8d677 100644 --- a/swift/common/middleware/s3.py +++ b/swift/common/middleware/s3.py @@ -172,10 +172,8 @@ class ObjectController(Controller): for key, val in headers.iteritems(): if key.startswith('x-object-meta-'): new_hdrs['x-amz-meta-' + key[14:]] = val - elif key in ('content-length', 'content-encoding', 'etag'): + elif key in ('Content-Length', 'Content-Type', 'Content-Encoding', 'etag', 'last-modified'): new_hdrs[key] = val - if key == 'etag': - new_hdrs['Content-MD5'] = val.decode('hex').encode('base64') return Response(status=status, headers=new_hdrs, app_iter=app_iter) elif status == 401: return get_err_response('AccessDenied') From d17b7c9956a7e49db7d847bab4b20ce0c93de4a5 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Thu, 6 Jan 2011 18:51:26 +0900 Subject: [PATCH 081/199] rename the module from s3.py to swift3.py (suggested by Chuck Thier) --- setup.py | 2 +- swift/common/middleware/{s3.py => swift3.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename swift/common/middleware/{s3.py => swift3.py} (100%) diff --git a/setup.py b/setup.py index b29d02e54d..6ce9c7b6b5 100644 --- a/setup.py +++ b/setup.py @@ -96,7 +96,7 @@ setup( 'cname_lookup=swift.common.middleware.cname_lookup:filter_factory', 'catch_errors=swift.common.middleware.catch_errors:filter_factory', 'domain_remap=swift.common.middleware.domain_remap:filter_factory', - 's3=swift.common.middleware.s3:filter_factory', + 'swift3=swift.common.middleware.swift3:filter_factory', ], }, ) diff --git a/swift/common/middleware/s3.py b/swift/common/middleware/swift3.py similarity index 100% rename from swift/common/middleware/s3.py rename to swift/common/middleware/swift3.py From 8f799f577a021b7f649f7532ab3c174014ad45e7 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Fri, 7 Jan 2011 09:25:59 -0800 Subject: [PATCH 082/199] More error reporting --- swift/common/middleware/swauth.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index d982d77b42..bff8c64947 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -558,17 +558,25 @@ class Swauth(object): account_suffix = req.headers.get('x-account-suffix') if not account_suffix: account_suffix = str(uuid4()) - conn = self.get_conn() # Create the new account in the Swift cluster - path = quote('%s/%s%s' % (self.dsc_parsed.path, self.reseller_prefix, - account_suffix)) - conn.request('PUT', path, - headers={'X-Auth-Token': self.get_itoken(req.environ)}) - resp = conn.getresponse() - resp.read() - if resp.status // 100 != 2: - raise Exception('Could not create account on the Swift cluster: ' - '%s %s %s' % (path, resp.status, resp.reason)) + path = quote('%s/%s%s' % (self.dsc_parsed.path, + self.reseller_prefix, account_suffix)) + try: + conn = self.get_conn() + conn.request('PUT', path, + headers={'X-Auth-Token': self.get_itoken(req.environ)}) + resp = conn.getresponse() + resp.read() + if resp.status // 100 != 2: + raise Exception('Could not create account on the Swift ' + 'cluster: %s %s %s' % (path, resp.status, resp.reason)) + except: + self.logger.error(_('ERROR: Exception while trying to communicate ' + 'with %(scheme)s://%(host)s:%(port)s/%(path)s'), + {'scheme': self.dsc_parsed.scheme, + 'host': self.dsc_parsed.hostname, + 'port': self.dsc_parsed.port, 'path': path}) + raise # Record the mapping from account id back to account name path = quote('/v1/%s/.account_id/%s%s' % (self.auth_account, self.reseller_prefix, account_suffix)) From 5604404d8dffd244a0be5a1d0dc6fe74f149a7df Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 10 Jan 2011 08:43:38 -0800 Subject: [PATCH 083/199] Added public/private urls for swauth default swift cluster setting --- etc/proxy-server.conf-sample | 6 ++++ swift/common/middleware/swauth.py | 34 +++++++++++++++++----- test/unit/common/middleware/test_swauth.py | 12 ++++++-- 3 files changed, 42 insertions(+), 10 deletions(-) diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index c8cb20bc87..fda7d0d034 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -69,6 +69,12 @@ use = egg:swift#swauth # Cluster strings are of the format name:url where name is a short name for the # Swift cluster and url is the url to the proxy server(s) for the cluster. # default_swift_cluster = local:http://127.0.0.1:8080/v1 +# You may also use the format name::url::url where the first url is the one +# given to users to access their account (public url) and the second is the one +# used by swauth itself to create and delete accounts (private url). This is +# useful when a load balancer url should be used by users, but swauth itself is +# behind the load balancer. Example: +# default_swift_cluster = local::https://public.com:8080/v1::http://private.com:8080/v1 # token_life = 86400 # node_timeout = 10 # Highly recommended to change this. diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index bff8c64947..c3bd782b9d 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -61,12 +61,32 @@ class Swauth(object): self.auth_prefix += '/' self.auth_account = '%s.auth' % self.reseller_prefix self.default_swift_cluster = conf.get('default_swift_cluster', - 'local:http://127.0.0.1:8080/v1').rstrip('/') - self.dsc_name, self.dsc_url = self.default_swift_cluster.split(':', 1) + 'local:http://127.0.0.1:8080/v1') + # This setting is a little messy because of the options it has to + # provide. The basic format is cluster_name:url, such as the default + # value of local:http://127.0.0.1:8080/v1. But, often the url given to + # the user needs to be different than the url used by Swauth to + # create/delete accounts. So there's a more complex format of + # cluster_name::url::url, such as + # local::https://public.com:8080/v1::http://private.com:8080/v1. + # The double colon is what sets the two apart. + if '::' in self.default_swift_cluster: + self.dsc_name, self.dsc_url, self.dsc_url2 = \ + self.default_swift_cluster.split('::', 2) + self.dsc_url = self.dsc_url.rstrip('/') + self.dsc_url2 = self.dsc_url2.rstrip('/') + else: + self.dsc_name, self.dsc_url = \ + self.default_swift_cluster.split(':', 1) + self.dsc_url = self.dsc_url2 = self.dsc_url.rstrip('/') self.dsc_parsed = urlparse(self.dsc_url) if self.dsc_parsed.scheme not in ('http', 'https'): raise Exception('Cannot handle protocol scheme %s for url %s' % (self.dsc_parsed.scheme, repr(self.dsc_url))) + self.dsc_parsed2 = urlparse(self.dsc_url2) + if self.dsc_parsed2.scheme not in ('http', 'https'): + raise Exception('Cannot handle protocol scheme %s for url %s' % + (self.dsc_parsed2.scheme, repr(self.dsc_url2))) self.super_admin_key = conf.get('super_admin_key') if not self.super_admin_key: msg = _('No super_admin_key set in conf file! Exiting.') @@ -559,12 +579,12 @@ class Swauth(object): if not account_suffix: account_suffix = str(uuid4()) # Create the new account in the Swift cluster - path = quote('%s/%s%s' % (self.dsc_parsed.path, + path = quote('%s/%s%s' % (self.dsc_parsed2.path, self.reseller_prefix, account_suffix)) try: conn = self.get_conn() conn.request('PUT', path, - headers={'X-Auth-Token': self.get_itoken(req.environ)}) + headers={'X-Auth-Token': self.get_itoken(req.environ)}) resp = conn.getresponse() resp.read() if resp.status // 100 != 2: @@ -573,9 +593,9 @@ class Swauth(object): except: self.logger.error(_('ERROR: Exception while trying to communicate ' 'with %(scheme)s://%(host)s:%(port)s/%(path)s'), - {'scheme': self.dsc_parsed.scheme, - 'host': self.dsc_parsed.hostname, - 'port': self.dsc_parsed.port, 'path': path}) + {'scheme': self.dsc_parsed2.scheme, + 'host': self.dsc_parsed2.hostname, + 'port': self.dsc_parsed2.port, 'path': path}) raise # Record the mapping from account id back to account name path = quote('/v1/%s/.account_id/%s%s' % diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 08a1efb460..00c010b9dc 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -161,8 +161,13 @@ class TestAuth(unittest.TestCase): 'local:http://host/path') ath = auth.filter_factory({'super_admin_key': 'supertest', 'default_swift_cluster': 'local:https://host/path/'})(app) - self.assertEquals(ath.default_swift_cluster, - 'local:https://host/path') + self.assertEquals(ath.dsc_url, 'https://host/path') + self.assertEquals(ath.dsc_url2, 'https://host/path') + ath = auth.filter_factory({'super_admin_key': 'supertest', + 'default_swift_cluster': + 'local::https://host/path/::http://host2/path2/'})(app) + self.assertEquals(ath.dsc_url, 'https://host/path') + self.assertEquals(ath.dsc_url2, 'http://host2/path2') def test_top_level_ignore(self): resp = Request.blank('/').get_response(self.test_auth) @@ -3095,7 +3100,8 @@ class TestAuth(unittest.TestCase): 'X-Auth-Admin-Key': 'bad'}), 'act')) def test_reseller_admin_but_account_is_internal_use_only(self): - req = Request.blank('/v1/AUTH_.auth', environ={'REQUEST_METHOD': 'GET'}) + req = Request.blank('/v1/AUTH_.auth', + environ={'REQUEST_METHOD': 'GET'}) req.remote_user = 'act:usr,act,.reseller_admin' resp = self.test_auth.authorize(req) self.assertEquals(resp.status_int, 403) From 902dc6a5c26acef92532158f224e2436aea15e6e Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 10 Jan 2011 11:33:19 -0800 Subject: [PATCH 084/199] Fixed bug with using new internal url --- swift/common/middleware/swauth.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index c3bd782b9d..961f3a3ba4 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -1142,13 +1142,13 @@ class Swauth(object): def get_conn(self, urlparsed=None): """ Returns an HTTPConnection based on the urlparse result given or the - default Swift cluster urlparse result. + default Swift cluster (internal url) urlparse result. :param urlparsed: The result from urlparse.urlparse or None to use the default Swift cluster's value """ if not urlparsed: - urlparsed = self.dsc_parsed + urlparsed = self.dsc_parsed2 if urlparsed.scheme == 'http': return HTTPConnection(urlparsed.netloc) else: From fb5a58a061688faed337b3113f5821d9e449a8c3 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 10 Jan 2011 12:37:49 -0800 Subject: [PATCH 085/199] Fp leak fix --- swift/common/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/swift/common/utils.py b/swift/common/utils.py index b3f640c3d3..a27f5308d4 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -396,6 +396,7 @@ def get_logger(conf, name=None, log_to_console=False): root_logger = logging.getLogger() if hasattr(get_logger, 'handler') and get_logger.handler: root_logger.removeHandler(get_logger.handler) + get_logger.handler.close() get_logger.handler = None if log_to_console: # check if a previous call to get_logger already added a console logger From 2ec7e8705e7fcfa68ba39bc200c2c32b78a1dbde Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Mon, 10 Jan 2011 23:11:46 -0600 Subject: [PATCH 086/199] I did not know the wonders of test_chunked_put_and_a_bit_more --- test/unit/proxy/test_server.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 52045777a9..d6e2710ca7 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -2000,6 +2000,37 @@ class TestObjectController(unittest.TestCase): # will be sent in a single chunk. self.assertEquals(body, '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n') + # Check copy content type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nContent-Type: text/jibberish' + '\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj2 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nX-Copy-From: c/obj\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure getting the copied file gets original content-type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/obj2 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('Content-Type: text/jibberish' in headers) finally: prospa.kill() acc1spa.kill() From 7dc0753f1e8ee5c8b02564bb1bd900edef0e01ff Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Tue, 11 Jan 2011 00:08:35 -0600 Subject: [PATCH 087/199] fixed and tested charset on content types --- swift/proxy/server.py | 4 +-- test/unit/proxy/test_server.py | 46 ++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 8485a618da..a00a31f42b 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -632,8 +632,8 @@ class Controller(object): res.status = source.status res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): - res.content_type = source.getheader('Content-Type') res.charset = None + res.content_type = source.getheader('Content-Type') return res elif 200 <= source.status <= 399: res = status_map[source.status](request=req) @@ -641,8 +641,8 @@ class Controller(object): if req.method == 'HEAD': res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): - res.content_type = source.getheader('Content-Type') res.charset = None + res.content_type = source.getheader('Content-Type') return res statuses.append(source.status) reasons.append(source.reason) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index d6e2710ca7..77963fe1b2 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -2031,6 +2031,52 @@ class TestObjectController(unittest.TestCase): exp = 'HTTP/1.1 200' self.assertEquals(headers[:len(exp)], exp) self.assert_('Content-Type: text/jibberish' in headers) + # Check set content type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj3 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nContent-Type: foo/bar' + '\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure getting the copied file gets original content-type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/obj3 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('Content-Type: foo/bar' in + headers.split('\r\n'), repr(headers.split('\r\n'))) + # Check set content type with charset + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj4 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nContent-Type: foo/bar' + '; charset=UTF-8\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure getting the copied file gets original content-type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/obj4 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('Content-Type: foo/bar; charset=UTF-8' in + headers.split('\r\n'), repr(headers.split('\r\n'))) finally: prospa.kill() acc1spa.kill() From 9746823e32dc0c9518312048c2c691d33805a19f Mon Sep 17 00:00:00 2001 From: David Goetz <david.goetz@rackspace.com> Date: Tue, 11 Jan 2011 01:24:05 -0800 Subject: [PATCH 088/199] fixing error log bug --- swift/obj/replicator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 1f047102e1..edd9fd5f3a 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -254,8 +254,8 @@ class ObjectReplicator(Daemon): continue self.logger.info(result) if ret_val: - self.logger.error(_('Bad rsync return code: %s -> %d'), - (str(args), ret_val)) + self.logger.error(_('Bad rsync return code: %(args)s -> %(ret)d'), + {'args': str(args), 'ret': ret_val}) elif results: self.logger.info( _("Successful rsync of %(src)s at %(dst)s (%(time).03f)"), From 41b1452ab70feaa400a3064cb2c9bae9617b63e9 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 12 Jan 2011 08:40:55 -0800 Subject: [PATCH 089/199] Fix for GETing a manifest that has an empty listing --- swift/proxy/server.py | 16 +++++++++++----- test/unit/proxy/test_server.py | 25 +++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index e1d5824b4e..9c2ecc12ee 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -814,11 +814,17 @@ class ObjectController(Controller): else: # For objects with a reasonable number of segments, we'll serve # them with a set content-length and computed etag. - content_length = sum(o['bytes'] for o in listing) - last_modified = max(o['last_modified'] for o in listing) - last_modified = \ - datetime(*map(int, re.split('[^\d]', last_modified)[:-1])) - etag = md5('"'.join(o['hash'] for o in listing)).hexdigest() + if listing: + content_length = sum(o['bytes'] for o in listing) + last_modified = max(o['last_modified'] for o in listing) + last_modified = datetime(*map(int, re.split('[^\d]', + last_modified)[:-1])) + etag = md5( + '"'.join(o['hash'] for o in listing)).hexdigest() + else: + content_length = 0 + last_modified = resp.last_modified + etag = md5().hexdigest() headers = { 'X-Object-Manifest': resp.headers['x-object-manifest'], 'Content-Type': resp.content_type, diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 4562652b87..e5a4e40652 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -2042,6 +2042,31 @@ class TestObjectController(unittest.TestCase): self.assert_('Content-Length: 25\r' in headers) body = fd.read() self.assertEquals(body, '1234 1234 1234 1234 1234 ') + # Create an object manifest file pointing to nothing + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/empty HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nX-Object-Manifest: ' + 'segmented/empty/\r\nContent-Type: text/jibberish\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure retrieving the manifest file gives a zero-byte file + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/empty HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('X-Object-Manifest: segmented/empty/' in headers) + self.assert_('Content-Type: text/jibberish' in headers) + body = fd.read() + self.assertEquals(body, '') # Check copy content type sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() From e5c5a3778ede8b75a1cebf3f2f96ba3dfbd90d1d Mon Sep 17 00:00:00 2001 From: Greg Lange <glange@rackspace.com> Date: Wed, 12 Jan 2011 21:09:39 +0000 Subject: [PATCH 090/199] Made older functional tests look for default config file when env variable is unset --- test/functional/swift.py | 5 +++++ test/functional/tests.py | 16 +++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/test/functional/swift.py b/test/functional/swift.py index f7d580b93d..b251bcc047 100644 --- a/test/functional/swift.py +++ b/test/functional/swift.py @@ -24,6 +24,7 @@ import urllib import simplejson as json +from nose import SkipTest from xml.dom import minidom class AuthenticationFailed(Exception): @@ -79,6 +80,10 @@ def listing_items(method): class Connection(object): def __init__(self, config): + for key in 'auth_host auth_port auth_ssl account username password'.split(): + if not config.has_key(key): + raise SkipTest + self.auth_host = config['auth_host'] self.auth_port = int(config['auth_port']) self.auth_ssl = config['auth_ssl'] in ('on', 'true', 'yes', '1') diff --git a/test/functional/tests.py b/test/functional/tests.py index 4cf090ae5d..482b507b52 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -19,8 +19,10 @@ import configobj from datetime import datetime import locale import os +import os.path import random import StringIO +import sys import time import threading import uuid @@ -30,10 +32,18 @@ import urllib from swift import Account, AuthenticationFailed, Connection, Container, \ File, ResponseError -config = configobj.ConfigObj(os.environ['SWIFT_TEST_CONFIG_FILE']) -locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C')) +config_file_env_var = 'SWIFT_TEST_CONFIG_FILE' +default_config_file = '/etc/swift/func_test.conf' -NoRun = object +if os.environ.has_key(config_file_env_var): + config_file = os.environ[config_file_env_var] +elif os.path.isfile(default_config_file): + config_file = default_config_file +else: + print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG' + +config = configobj.ConfigObj(config_file) +locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C')) class Base: pass From 1955cf780d4762c5d05b73d95e140f19a1effdfd Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Wed, 12 Jan 2011 23:46:03 +0000 Subject: [PATCH 091/199] object replicator logging fixes --- swift/common/utils.py | 6 ++++-- swift/obj/replicator.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 23837d0169..d132954c48 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -320,9 +320,11 @@ class LogAdapter(object): call = self.logger.exception elif isinstance(exc, socket.error): if exc.errno == errno.ECONNREFUSED: - emsg = 'Connection refused' + emsg = _('Connection refused') elif exc.errno == errno.EHOSTUNREACH: - emsg = 'Host unreachable' + emsg = _('Host unreachable') + elif exc.errno == errno.ETIMEDOUT: + emsg = _('Connection timeout') else: call = self.logger.exception elif isinstance(exc, eventlet.Timeout): diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index edd9fd5f3a..4b401494c8 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -407,7 +407,7 @@ class ObjectReplicator(Daemon): conn.getresponse().read() self.suffix_sync += len(suffixes) except (Exception, Timeout): - logging.exception("Error syncing with node: %s" % node) + self.logger.exception("Error syncing with node: %s" % node) self.suffix_count += len(local_hash) except (Exception, Timeout): self.logger.exception(_("Error syncing partition")) From 52f691bc6f2fda6e9bee474dbb38f7af54973b09 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 13 Jan 2011 00:59:42 +0000 Subject: [PATCH 092/199] i18n --- swift/obj/replicator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 4b401494c8..ec76fb384c 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -179,7 +179,7 @@ def get_hashes(partition_dir, do_listdir=True, reclaim_age=ONE_WEEK): hashes[suffix] = hash_suffix(suffix_dir, reclaim_age) hashed += 1 except OSError: - logging.exception('Error hashing suffix') + logging.exception(_('Error hashing suffix')) hashes[suffix] = None else: del hashes[suffix] @@ -407,7 +407,7 @@ class ObjectReplicator(Daemon): conn.getresponse().read() self.suffix_sync += len(suffixes) except (Exception, Timeout): - self.logger.exception("Error syncing with node: %s" % node) + self.logger.exception(_("Error syncing with node: %s") % node) self.suffix_count += len(local_hash) except (Exception, Timeout): self.logger.exception(_("Error syncing partition")) From db7111d00996ea430d5cfb7cff64586ae205f142 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 13 Jan 2011 01:32:38 +0000 Subject: [PATCH 093/199] fix --- swift/common/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index d132954c48..27125d2c25 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -310,7 +310,7 @@ class LogAdapter(object): return self.logger.getEffectiveLevel() def exception(self, msg, *args): - _, exc, _ = sys.exc_info() + _junk, exc, _junk = sys.exc_info() call = self.logger.error emsg = '' if isinstance(exc, OSError): From c806569373f53e5fa4a9ed3e75c660a6793ad6e2 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Thu, 13 Jan 2011 20:31:28 +0900 Subject: [PATCH 094/199] s3api: add unit tests --- test/unit/common/middleware/test_swift3.py | 397 +++++++++++++++++++++ 1 file changed, 397 insertions(+) create mode 100644 test/unit/common/middleware/test_swift3.py diff --git a/test/unit/common/middleware/test_swift3.py b/test/unit/common/middleware/test_swift3.py new file mode 100644 index 0000000000..a28af4f3bf --- /dev/null +++ b/test/unit/common/middleware/test_swift3.py @@ -0,0 +1,397 @@ +# Copyright (c) 2011 OpenStack, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from webob import Request, Response +from webob.exc import HTTPUnauthorized, HTTPCreated, HTTPNoContent,\ + HTTPAccepted, HTTPBadRequest, HTTPNotFound, HTTPConflict +import xml.dom.minidom +import simplejson + +from swift.common.middleware import swift3 + + +class FakeApp(object): + def __init__(self): + self.app = self + self.response_args = [] + + def __call__(self, env, start_response): + return "FAKE APP" + + def do_start_response(self, *args): + self.response_args.extend(args) + + +class FakeAppService(FakeApp): + def __init__(self, status=200): + FakeApp.__init__(self) + self.status = status + self.buckets = (('apple', 1, 200), ('orange', 3, 430)) + + def __call__(self, env, start_response): + if self.status == 200: + start_response(Response().status) + start_response({'Content-Type': 'text/xml'}) + json_pattern = ['"name":%s', '"count":%s', '"bytes":%s'] + json_pattern = '{' + ','.join(json_pattern) + '}' + json_out = [] + for b in self.buckets: + name = simplejson.dumps(b[0]) + json_out.append(json_pattern % + (name, b[1], b[2])) + account_list = '[' + ','.join(json_out) + ']' + return account_list + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + + +class FakeAppBucket(FakeApp): + def __init__(self, status=200): + FakeApp.__init__(self) + self.status = status + self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303), + ('viola', '2011-01-05T02:19:14.275290', 0, 3909), + ('lily', '2011-01-05T02:19:14.275290', 0, 3909)) + + def __call__(self, env, start_response): + if env['REQUEST_METHOD'] == 'GET': + if self.status == 200: + start_response(Response().status) + start_response({'Content-Type': 'text/xml'}) + json_pattern = ['"name":%s', '"last_modified":%s', '"hash":%s', + '"bytes":%s'] + json_pattern = '{' + ','.join(json_pattern) + '}' + json_out = [] + for b in self.objects: + name = simplejson.dumps(b[0]) + time = simplejson.dumps(b[1]) + json_out.append(json_pattern % + (name, time, b[2], b[3])) + account_list = '[' + ','.join(json_out) + ']' + return account_list + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + elif self.status == 404: + start_response(HTTPNotFound().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + elif env['REQUEST_METHOD'] == 'PUT': + if self.status == 201: + start_response(HTTPCreated().status) + start_response({}) + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + elif self.status == 202: + start_response(HTTPAccepted().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + elif env['REQUEST_METHOD'] == 'DELETE': + if self.status == 204: + start_response(HTTPNoContent().status) + start_response({}) + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + elif self.status == 404: + start_response(HTTPNotFound().status) + start_response({}) + elif self.status == 409: + start_response(HTTPConflict().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + + +class FakeAppObject(FakeApp): + def __init__(self, status=200): + FakeApp.__init__(self) + self.status = status + self.object_body = 'hello' + self.response_headers = {'Content-Type': 'text/html', + 'Content-Length': len(self.object_body), + 'x-object-meta-test': 'swift', + 'etag': '1b2cf535f27731c974343645a3985328', + 'last-modified': '2011-01-05T02:19:14.275290'} + + def __call__(self, env, start_response): + if env['REQUEST_METHOD'] == 'GET' or env['REQUEST_METHOD'] == 'HEAD': + if self.status == 200: + start_response(Response().status) + start_response(self.response_headers) + if env['REQUEST_METHOD'] == 'GET': + return self.object_body + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + elif self.status == 404: + start_response(HTTPNotFound().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + elif env['REQUEST_METHOD'] == 'PUT': + if self.status == 201: + start_response(HTTPCreated().status) + start_response({'etag': self.response_headers['etag']}) + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + elif self.status == 404: + start_response(HTTPNotFound().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + elif env['REQUEST_METHOD'] == 'DELETE': + if self.status == 204: + start_response(HTTPNoContent().status) + start_response({}) + elif self.status == 401: + start_response(HTTPUnauthorized().status) + start_response({}) + elif self.status == 404: + start_response(HTTPNotFound().status) + start_response({}) + else: + start_response(HTTPBadRequest().status) + start_response({}) + + +def start_response(*args): + pass + + +class TestSwift3(unittest.TestCase): + def setUp(self): + self.app = swift3.filter_factory({})(FakeApp()) + + def test_non_s3_request_passthrough(self): + req = Request.blank('/something') + resp = self.app(req.environ, start_response) + self.assertEquals(resp, 'FAKE APP') + + def test_bad_format_authorization(self): + req = Request.blank('/something', + headers={'Authorization': 'hoge'}) + resp = self.app(req.environ, start_response) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.firstChild.nodeName, 'Error') + code = dom.getElementsByTagName('Code')[0].childNodes[0].nodeValue + self.assertEquals(code, 'InvalidArgument') + + def test_bad_path(self): + req = Request.blank('/bucket/object/bad', + environ={'REQUEST_METHOD': 'GET'}, + headers={'Authorization': 'AUTH_something:hoge'}) + resp = self.app(req.environ, start_response) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.firstChild.nodeName, 'Error') + code = dom.getElementsByTagName('Code')[0].childNodes[0].nodeValue + self.assertEquals(code, 'InvalidURI') + + def _test_method_error(self, cl, method, path, status): + local_app = swift3.filter_factory({})(cl(status)) + req = Request.blank(path, + environ={'REQUEST_METHOD': method}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, start_response) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.firstChild.nodeName, 'Error') + return dom.getElementsByTagName('Code')[0].childNodes[0].nodeValue + + def test_service_GET_error(self): + code = self._test_method_error(FakeAppService, 'GET', '/', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppService, 'GET', '/', 0) + self.assertEquals(code, 'InvalidURI') + + def test_service_GET(self): + local_app = swift3.filter_factory({})(FakeAppService()) + req = Request.blank('/', + environ={'REQUEST_METHOD': 'GET'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '200') + + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.firstChild.nodeName, 'ListAllMyBucketsResult') + + buckets = [n for n in dom.getElementsByTagName('Bucket')] + listing = [n for n in buckets[0].childNodes if n.nodeName != '#text'] + self.assertEquals(len(listing), 2) + + names = [] + for b in buckets: + if b.childNodes[0].nodeName == 'Name': + names.append(b.childNodes[0].childNodes[0].nodeValue) + + self.assertEquals(len(names), len(FakeAppService().buckets)) + for i in FakeAppService().buckets: + self.assertTrue(i[0] in names) + + def test_bucket_GET_error(self): + code = self._test_method_error(FakeAppBucket, 'GET', '/bucket', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppBucket, 'GET', '/bucket', 404) + self.assertEquals(code, 'InvalidBucketName') + code = self._test_method_error(FakeAppBucket, 'GET', '/bucket', 0) + self.assertEquals(code, 'InvalidURI') + + def test_bucket_GET(self): + local_app = swift3.filter_factory({})(FakeAppBucket()) + bucket_name = 'junk' + req = Request.blank('/%s' % bucket_name, + environ={'REQUEST_METHOD': 'GET'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '200') + + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.firstChild.nodeName, 'ListBucketResult') + name = dom.getElementsByTagName('Name')[0].childNodes[0].nodeValue + self.assertEquals(name, bucket_name) + + objects = [n for n in dom.getElementsByTagName('Contents')] + listing = [n for n in objects[0].childNodes if n.nodeName != '#text'] + + names = [] + for o in objects: + if o.childNodes[0].nodeName == 'Key': + names.append(o.childNodes[0].childNodes[0].nodeValue) + + self.assertEquals(len(names), len(FakeAppBucket().objects)) + for i in FakeAppBucket().objects: + self.assertTrue(i[0] in names) + + def test_bucket_PUT_error(self): + code = self._test_method_error(FakeAppBucket, 'PUT', '/bucket', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppBucket, 'PUT', '/bucket', 202) + self.assertEquals(code, 'BucketAlreadyExists') + code = self._test_method_error(FakeAppBucket, 'PUT', '/bucket', 0) + self.assertEquals(code, 'InvalidURI') + + def test_bucket_PUT(self): + local_app = swift3.filter_factory({})(FakeAppBucket(201)) + req = Request.blank('/bucket', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '200') + + def test_bucket_DELETE_error(self): + code = self._test_method_error(FakeAppBucket, 'DELETE', '/bucket', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppBucket, 'DELETE', '/bucket', 404) + self.assertEquals(code, 'InvalidBucketName') + code = self._test_method_error(FakeAppBucket, 'DELETE', '/bucket', 409) + self.assertEquals(code, 'BucketNotEmpty') + code = self._test_method_error(FakeAppBucket, 'DELETE', '/bucket', 0) + self.assertEquals(code, 'InvalidURI') + + def test_bucket_DELETE(self): + local_app = swift3.filter_factory({})(FakeAppBucket(204)) + req = Request.blank('/bucket', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '204') + + def _test_object_GETorHEAD(self, method): + local_app = swift3.filter_factory({})(FakeAppObject()) + req = Request.blank('/bucket/object', + environ={'REQUEST_METHOD': method}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '200') + + headers = dict(local_app.app.response_args[1]) + for key, val in local_app.app.response_headers.iteritems(): + if key in ('Content-Length', 'Content-Type', 'Content-Encoding', + 'etag', 'last-modified'): + self.assertTrue(key in headers) + self.assertEquals(headers[key], val) + + elif key.startswith('x-object-meta-'): + self.assertTrue('x-amz-meta-' + key[14:] in headers) + self.assertEquals(headers['x-amz-meta-' + key[14:]], val) + + if method == 'GET': + self.assertEquals(resp, local_app.app.object_body) + + def test_object_HEAD(self): + self._test_object_GETorHEAD('HEAD') + + def test_object_GET(self): + self._test_object_GETorHEAD('GET') + + def test_object_PUT_error(self): + code = self._test_method_error(FakeAppObject, 'PUT', + '/bucket/object', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppObject, 'PUT', + '/bucket/object', 404) + self.assertEquals(code, 'InvalidBucketName') + code = self._test_method_error(FakeAppObject, 'PUT', + '/bucket/object', 0) + self.assertEquals(code, 'InvalidURI') + + def test_object_PUT(self): + local_app = swift3.filter_factory({})(FakeAppObject(201)) + req = Request.blank('/bucket/object', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '200') + + headers = dict(local_app.app.response_args[1]) + self.assertEquals(headers['ETag'], + "\"%s\"" % local_app.app.response_headers['etag']) + + def test_object_DELETE_error(self): + code = self._test_method_error(FakeAppObject, 'DELETE', + '/bucket/object', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppObject, 'DELETE', + '/bucket/object', 404) + self.assertEquals(code, 'NoSuchKey') + code = self._test_method_error(FakeAppObject, 'DELETE', + '/bucket/object', 0) + self.assertEquals(code, 'InvalidURI') + + def test_object_DELETE(self): + local_app = swift3.filter_factory({})(FakeAppObject(204)) + req = Request.blank('/bucket/object', + environ={'REQUEST_METHOD': 'DELETE'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + self.assertEquals(local_app.app.response_args[0].split()[0], '204') + + +if __name__ == '__main__': + unittest.main() From 72d6059bba14afb2ef771a6052aa415d09e27824 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Thu, 13 Jan 2011 21:05:17 +0900 Subject: [PATCH 095/199] s3api: add more unit tests --- test/unit/common/middleware/test_swift3.py | 28 +++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/test/unit/common/middleware/test_swift3.py b/test/unit/common/middleware/test_swift3.py index a28af4f3bf..d4e9aec46a 100644 --- a/test/unit/common/middleware/test_swift3.py +++ b/test/unit/common/middleware/test_swift3.py @@ -14,6 +14,7 @@ # limitations under the License. import unittest +from datetime import datetime from webob import Request, Response from webob.exc import HTTPUnauthorized, HTTPCreated, HTTPNoContent,\ @@ -214,6 +215,16 @@ class TestSwift3(unittest.TestCase): code = dom.getElementsByTagName('Code')[0].childNodes[0].nodeValue self.assertEquals(code, 'InvalidURI') + def test_bad_method(self): + req = Request.blank('/', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'Authorization': 'AUTH_something:hoge'}) + resp = self.app(req.environ, start_response) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.firstChild.nodeName, 'Error') + code = dom.getElementsByTagName('Code')[0].childNodes[0].nodeValue + self.assertEquals(code, 'InvalidURI') + def _test_method_error(self, cl, method, path, status): local_app = swift3.filter_factory({})(cl(status)) req = Request.blank(path, @@ -347,6 +358,17 @@ class TestSwift3(unittest.TestCase): def test_object_HEAD(self): self._test_object_GETorHEAD('HEAD') + def test_object_GET_error(self): + code = self._test_method_error(FakeAppObject, 'GET', + '/bucket/object', 401) + self.assertEquals(code, 'AccessDenied') + code = self._test_method_error(FakeAppObject, 'GET', + '/bucket/object', 404) + self.assertEquals(code, 'NoSuchKey') + code = self._test_method_error(FakeAppObject, 'GET', + '/bucket/object', 0) + self.assertEquals(code, 'InvalidURI') + def test_object_GET(self): self._test_object_GETorHEAD('GET') @@ -365,7 +387,11 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppObject(201)) req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AUTH_who:password', + 'x-amz-storage-class': 'REDUCED_REDUNDANCY', + 'Content-MD5': '1b2cf535f27731c974343645a3985328'}) + req.date = datetime.now() + req.content_type = 'text/plain' resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '200') From d6aaba670bca6285cf1578e478df5d9f93b0ec68 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Thu, 13 Jan 2011 09:05:44 -0800 Subject: [PATCH 096/199] Shuffle the partitions to reassign on a ring rebalance. --- swift/common/ring/builder.py | 3 ++- test/unit/common/ring/test_builder.py | 35 +++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/swift/common/ring/builder.py b/swift/common/ring/builder.py index 4fb3bfb8b8..5b66b8a9bf 100644 --- a/swift/common/ring/builder.py +++ b/swift/common/ring/builder.py @@ -14,7 +14,7 @@ # limitations under the License. from array import array -from random import randint +from random import randint, shuffle from time import time from swift.common.ring import RingData @@ -413,6 +413,7 @@ class RingBuilder(object): dev['parts_wanted'] += 1 dev['parts'] -= 1 reassign_parts.append(part) + shuffle(reassign_parts) return reassign_parts def _reassign_parts(self, reassign_parts): diff --git a/test/unit/common/ring/test_builder.py b/test/unit/common/ring/test_builder.py index e26116d25c..77be1df634 100644 --- a/test/unit/common/ring/test_builder.py +++ b/test/unit/common/ring/test_builder.py @@ -125,6 +125,41 @@ class TestRingBuilder(unittest.TestCase): counts[dev_id] = counts.get(dev_id, 0) + 1 self.assertEquals(counts, {0: 256, 2: 256, 3: 256}) + def test_shuffled_gather(self): + if self._shuffled_gather_helper() and \ + self._shuffled_gather_helper(): + raise AssertionError('It is highly likely the ring is no ' + 'longer shuffling the set of partitions to reassign on a ' + 'rebalance.') + + def _shuffled_gather_helper(self): + rb = ring.RingBuilder(8, 3, 1) + rb.add_dev({'id': 0, 'zone': 0, 'weight': 1, 'ip': '127.0.0.1', + 'port': 10000, 'device': 'sda1'}) + rb.add_dev({'id': 1, 'zone': 1, 'weight': 1, 'ip': '127.0.0.1', + 'port': 10001, 'device': 'sda1'}) + rb.add_dev({'id': 2, 'zone': 2, 'weight': 1, 'ip': '127.0.0.1', + 'port': 10002, 'device': 'sda1'}) + rb.rebalance() + rb.add_dev({'id': 3, 'zone': 3, 'weight': 1, 'ip': '127.0.0.1', + 'port': 10003, 'device': 'sda1'}) + rb.pretend_min_part_hours_passed() + parts = rb._gather_reassign_parts() + max_run = 0 + run = 0 + last_part = 0 + for part in parts: + if part > last_part: + run += 1 + else: + if run > max_run: + max_run = run + run = 0 + last_part = part + if run > max_run: + max_run = run + return max_run > len(parts) / 2 + def test_rerebalance(self): rb = ring.RingBuilder(8, 3, 1) rb.add_dev({'id': 0, 'zone': 0, 'weight': 1, 'ip': '127.0.0.1', From e0987d609b6ee106907e0410222368eeb4abaed3 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 13 Jan 2011 21:00:51 +0000 Subject: [PATCH 097/199] netifaces --- swift/common/utils.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 27125d2c25..ce174e4cf5 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -38,6 +38,7 @@ import cPickle as pickle import eventlet from eventlet import greenio, GreenPool, sleep, Timeout, listen from eventlet.green import socket, subprocess, ssl, thread, threading +import netifaces from swift.common.exceptions import LockTimeout, MessageTimeout @@ -522,15 +523,19 @@ def parse_options(usage="%prog CONFIG [options]", once=False, test_args=None): def whataremyips(): """ - Get the machine's ip addresses using ifconfig + Get the machine's ip addresses - :returns: list of Strings of IPv4 ip addresses + :returns: list of Strings of ip addresses """ - proc = subprocess.Popen(['/sbin/ifconfig'], stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - ret_val = proc.wait() - results = proc.stdout.read().split('\n') - return [x.split(':')[1].split()[0] for x in results if 'inet addr' in x] + addresses = [] + for interface in netifaces.interfaces(): + iface_data = netifaces.ifaddresses(interface) + for family in iface_data: + if family not in (netifaces.AF_INET, netifaces.AF_INET6): + continue + for address in iface_data[family]: + addresses.append(address['addr']) + return addresses def storage_directory(datadir, partition, hash): From ebd703fff056554228a3b2749e9a5c01aa102e98 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 04:10:52 +0000 Subject: [PATCH 098/199] pep8 and xml escapes --- swift/common/middleware/swift3.py | 69 +++++++++++++++++++++++-------- 1 file changed, 52 insertions(+), 17 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index de66a8d677..768ca356c2 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -18,7 +18,7 @@ import rfc822 import hmac import base64 import errno -import binascii +from xml.sax.saxutils import escape as xml_escape from webob import Request, Response from webob.exc import HTTPNotFound @@ -43,16 +43,20 @@ def get_err_response(code): 'NoSuchBucket': (404, 'The specified bucket does not exist'), 'SignatureDoesNotMatch': - (403, 'The calculated request signature does not match your provided one'), + (403, 'The calculated request signature does not match '\ + 'your provided one'), 'NoSuchKey': (404, 'The resource you requested does not exist')} resp = Response(content_type='text/xml') resp.status = error_table[code][0] resp.body = error_table[code][1] - resp.body = """<?xml version="1.0" encoding="UTF-8"?>\r\n<Error>\r\n <Code>%s</Code>\r\n <Message>%s</Message>\r\n</Error>\r\n""" % (code, error_table[code][1]) + resp.body = '<?xml version="1.0" encoding="UTF-8"?>\r\n<Error>\r\n ' \ + '<Code>%s</Code>\r\n <Message>%s</Message>\r\n</Error>\r\n' \ + % (code, error_table[code][1]) return resp + class Controller(object): def __init__(self, app): self.app = app @@ -61,6 +65,7 @@ class Controller(object): def do_start_response(self, *args): self.response_args.extend(args) + class ServiceController(Controller): def __init__(self, env, app, account_name, token, **kwargs): Controller.__init__(self, app) @@ -85,16 +90,25 @@ class ServiceController(Controller): resp.status = 200 # we don't keep the creation time of a backet (s3cmd doesn't # work without that) so we use something bogus. - resp.body = """<?xml version="1.0" encoding="UTF-8"?><ListAllMyBucketsResult xmlns="http://doc.s3.amazonaws.com/2006-03-01"><Buckets>%s</Buckets></ListAllMyBucketsResult>""" % ("".join(['<Bucket><Name>%s</Name><CreationDate>2009-02-03T16:45:09.000Z</CreationDate></Bucket>' % i['name'] for i in containers])) + resp.body = '<?xml version="1.0" encoding="UTF-8"?>' \ + '<ListAllMyBucketsResult ' \ + 'xmlns="http://doc.s3.amazonaws.com/2006-03-01">' \ + '<Buckets>%s</Buckets>' \ + '</ListAllMyBucketsResult>' \ + % ("".join(['<Bucket><Name>%s</Name><CreationDate>' \ + '2009-02-03T16:45:09.000Z</CreationDate></Bucket>' % + xml_escape(i['name']) for i in containers])) return resp + class BucketController(Controller): - def __init__(self, env, app, account_name, token, container_name, **kwargs): + def __init__(self, env, app, account_name, token, container_name, + **kwargs): Controller.__init__(self, app) self.container_name = unquote(container_name) env['HTTP_X_AUTH_TOKEN'] = token env['PATH_INFO'] = '/v1/%s/%s' % (account_name, container_name) - + def GET(self, env, start_response): env['QUERY_STRING'] = 'format=json' body_iter = self.app(env, self.do_start_response) @@ -113,7 +127,18 @@ class BucketController(Controller): objects = loads(''.join(list(body_iter))) resp = Response(content_type='text/xml') resp.status = 200 - resp.body = """<?xml version="1.0" encoding="UTF-8"?><ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01"><Name>%s</Name>%s</ListBucketResult>""" % (self.container_name, "".join(['<Contents><Key>%s</Key><LastModified>%s</LastModified><ETag>%s</ETag><Size>%s</Size><StorageClass>STANDARD</StorageClass></Contents>' % (i['name'], i['last_modified'], i['hash'], i['bytes']) for i in objects])) + resp.body = '<?xml version="1.0" encoding="UTF-8"?>' \ + '<ListBucketResult ' \ + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01">' \ + '<Name>%s</Name>' \ + '%s' \ + '</ListBucketResult>' % \ + (self.container_name, + "".join(['<Contents><Key>%s</Key><LastModified>%s</LastModified>'\ + '<ETag>%s</ETag><Size>%s</Size><StorageClass>STANDARD'\ + '</StorageClass></Contents>' % + (xml_escape(i['name']), i['last_modified'], i['hash'], + i['bytes']) for i in objects])) return resp def PUT(self, env, start_response): @@ -155,12 +180,15 @@ class BucketController(Controller): resp.status = 204 return resp + class ObjectController(Controller): - def __init__(self, env, app, account_name, token, container_name, object_name, **kwargs): + def __init__(self, env, app, account_name, token, container_name, + object_name, **kwargs): Controller.__init__(self, app) self.container_name = unquote(container_name) env['HTTP_X_AUTH_TOKEN'] = token - env['PATH_INFO'] = '/v1/%s/%s/%s' % (account_name, container_name, object_name) + env['PATH_INFO'] = '/v1/%s/%s/%s' % (account_name, container_name, + object_name) def GETorHEAD(self, env, start_response): app_iter = self.app(env, self.do_start_response) @@ -170,9 +198,11 @@ class ObjectController(Controller): if 200 <= status < 300: new_hdrs = {} for key, val in headers.iteritems(): + key = key.lower() if key.startswith('x-object-meta-'): new_hdrs['x-amz-meta-' + key[14:]] = val - elif key in ('Content-Length', 'Content-Type', 'Content-Encoding', 'etag', 'last-modified'): + elif key in ('content-length', 'content-type', + 'content-encoding', 'etag', 'last-modified'): new_hdrs[key] = val return Response(status=status, headers=new_hdrs, app_iter=app_iter) elif status == 401: @@ -187,7 +217,7 @@ class ObjectController(Controller): def GET(self, env, start_response): return self.GETorHEAD(env, start_response) - + def PUT(self, env, start_response): body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) @@ -211,7 +241,7 @@ class ObjectController(Controller): body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) - + if status != 204: if status == 401: return get_err_response('AccessDenied') @@ -225,6 +255,7 @@ class ObjectController(Controller): resp.status = 204 return resp + class Swift3Middleware(object): def __init__(self, app, conf, *args, **kwargs): self.app = app @@ -238,7 +269,7 @@ class Swift3Middleware(object): elif container: return BucketController, d return ServiceController, d - + def get_account_info(self, env, req): if req.headers.get("content-md5"): md5 = req.headers.get("content-md5") @@ -258,7 +289,7 @@ class Swift3Middleware(object): h = req.method + "\n" + md5 + "\n" + content_type + "\n" + date + "\n" for header in req.headers: if header.startswith("X-Amz-"): - h += header.lower()+":"+str(req.headers[header])+"\n" + h += header.lower() + ":" + str(req.headers[header]) + "\n" h += req.path try: account, _ = req.headers['Authorization'].split(' ')[-1].split(':') @@ -279,18 +310,22 @@ class Swift3Middleware(object): account_name, token = self.get_account_info(env, req) if not account_name: return get_err_response('InvalidArgument')(env, start_response) - - controller = controller(env, self.app, account_name, token, **path_parts) + + controller = controller(env, self.app, account_name, token, + **path_parts) if hasattr(controller, req.method): res = getattr(controller, req.method)(env, start_response) else: return get_err_response('InvalidURI')(env, start_response) - + return res(env, start_response) + def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) + def swift3_filter(app): return Swift3Middleware(app, conf) + return swift3_filter From 0f8089ccdfd56e2a7db3c5ae3f46081fa1e12e8f Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 04:14:25 +0000 Subject: [PATCH 099/199] unit tests --- swift/common/middleware/swift3.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 768ca356c2..979c007538 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -198,10 +198,10 @@ class ObjectController(Controller): if 200 <= status < 300: new_hdrs = {} for key, val in headers.iteritems(): - key = key.lower() - if key.startswith('x-object-meta-'): + _key = key.lower() + if _key.startswith('x-object-meta-'): new_hdrs['x-amz-meta-' + key[14:]] = val - elif key in ('content-length', 'content-type', + elif _key in ('content-length', 'content-type', 'content-encoding', 'etag', 'last-modified'): new_hdrs[key] = val return Response(status=status, headers=new_hdrs, app_iter=app_iter) From 0149e962d49e7b3b46c088796df509da47293da4 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 05:43:23 +0000 Subject: [PATCH 100/199] swift3 bucket listing args --- swift/common/middleware/swift3.py | 69 ++++++++++++------ test/unit/common/middleware/test_swift3.py | 81 ++++++++++++++++++++++ 2 files changed, 128 insertions(+), 22 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 979c007538..6f6b830a0f 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -19,6 +19,7 @@ import hmac import base64 import errno from xml.sax.saxutils import escape as xml_escape +import cgi from webob import Request, Response from webob.exc import HTTPNotFound @@ -27,6 +28,9 @@ from simplejson import loads from swift.common.utils import split_path +MAX_BUCKET_LISTING = 1000 + + def get_err_response(code): error_table = {'AccessDenied': (403, 'Access denied'), @@ -82,7 +86,6 @@ class ServiceController(Controller): if status == 401: return get_err_response('AccessDenied') else: - print status, headers, body_iter return get_err_response('InvalidURI') containers = loads(''.join(list(body_iter))) @@ -110,7 +113,19 @@ class BucketController(Controller): env['PATH_INFO'] = '/v1/%s/%s' % (account_name, container_name) def GET(self, env, start_response): - env['QUERY_STRING'] = 'format=json' + if 'QUERY_STRING' in env: + args = dict(cgi.parse_qsl(env['QUERY_STRING'])) + else: + args = {} + max_keys = min(int(args.get('max-keys', MAX_BUCKET_LISTING)), + MAX_BUCKET_LISTING) + env['QUERY_STRING'] = 'format=json&limit=%s' % (max_keys + 1) + if 'marker' in args: + env['QUERY_STRING'] += '&marker=%s' % quote(args['marker']) + if 'prefix' in args: + env['QUERY_STRING'] += '&prefix=%s' % quote(args['prefix']) + if 'delimiter' in args: + env['QUERY_STRING'] += '&delimiter=%s' % quote(args['delimiter']) body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) @@ -121,25 +136,39 @@ class BucketController(Controller): elif status == 404: return get_err_response('InvalidBucketName') else: - print status, headers, body_iter return get_err_response('InvalidURI') objects = loads(''.join(list(body_iter))) - resp = Response(content_type='text/xml') - resp.status = 200 - resp.body = '<?xml version="1.0" encoding="UTF-8"?>' \ - '<ListBucketResult ' \ - 'xmlns="http://s3.amazonaws.com/doc/2006-03-01">' \ - '<Name>%s</Name>' \ - '%s' \ - '</ListBucketResult>' % \ - (self.container_name, - "".join(['<Contents><Key>%s</Key><LastModified>%s</LastModified>'\ - '<ETag>%s</ETag><Size>%s</Size><StorageClass>STANDARD'\ - '</StorageClass></Contents>' % - (xml_escape(i['name']), i['last_modified'], i['hash'], - i['bytes']) for i in objects])) - return resp + body = ('<?xml version="1.0" encoding="UTF-8"?>' + '<ListBucketResult ' + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01">' + '<Prefix>%s</Prefix>' + '<Marker>%s</Marker>' + '<Delimiter>%s</Delimiter>' + '<IsTruncated>%s</IsTruncated>' + '<MaxKeys>%s</MaxKeys>' + '<Name>%s</Name>' + '%s' + '%s' + '</ListBucketResult>' % + ( + xml_escape(args.get('prefix', '')), + xml_escape(args.get('marker', '')), + xml_escape(args.get('delimiter', '')), + 'true' if len(objects) == (max_keys + 1) else 'false', + max_keys, + xml_escape(self.container_name), + "".join(['<Contents><Key>%s</Key><LastModified>%s</LastModif'\ + 'ied><ETag>%s</ETag><Size>%s</Size><StorageClass>STA'\ + 'NDARD</StorageClass></Contents>' % + (xml_escape(i['name']), i['last_modified'], i['hash'], + i['bytes']) + for i in objects[:max_keys] if 'subdir' not in i]), + "".join(['<CommonPrefixes><Prefix>%s</Prefix><CommonPrefixes>' + % xml_escape(i['subdir']) + for i in objects[:max_keys] if 'subdir' in i]) + )) + return Response(body=body, content_type='text/xml') def PUT(self, env, start_response): body_iter = self.app(env, self.do_start_response) @@ -152,7 +181,6 @@ class BucketController(Controller): elif status == 202: return get_err_response('BucketAlreadyExists') else: - print status, headers, body_iter return get_err_response('InvalidURI') resp = Response() @@ -173,7 +201,6 @@ class BucketController(Controller): elif status == 409: return get_err_response('BucketNotEmpty') else: - print status, headers, body_iter return get_err_response('InvalidURI') resp = Response() @@ -229,7 +256,6 @@ class ObjectController(Controller): elif status == 404: return get_err_response('InvalidBucketName') else: - print status, headers, body_iter return get_err_response('InvalidURI') resp = Response() @@ -248,7 +274,6 @@ class ObjectController(Controller): elif status == 404: return get_err_response('NoSuchKey') else: - print status, headers, body_iter return get_err_response('InvalidURI') resp = Response() diff --git a/test/unit/common/middleware/test_swift3.py b/test/unit/common/middleware/test_swift3.py index d4e9aec46a..528c05f7f2 100644 --- a/test/unit/common/middleware/test_swift3.py +++ b/test/unit/common/middleware/test_swift3.py @@ -15,6 +15,7 @@ import unittest from datetime import datetime +import cgi from webob import Request, Response from webob.exc import HTTPUnauthorized, HTTPCreated, HTTPNoContent,\ @@ -299,6 +300,86 @@ class TestSwift3(unittest.TestCase): for i in FakeAppBucket().objects: self.assertTrue(i[0] in names) + def test_bucket_GET_is_truncated(self): + local_app = swift3.filter_factory({})(FakeAppBucket()) + bucket_name = 'junk' + + req = Request.blank('/%s' % bucket_name, + environ={'REQUEST_METHOD': 'GET', + 'QUERY_STRING': 'max-keys=3'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.getElementsByTagName('IsTruncated')[0]. + childNodes[0].nodeValue, 'false') + + req = Request.blank('/%s' % bucket_name, + environ={'REQUEST_METHOD': 'GET', + 'QUERY_STRING': 'max-keys=2'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, local_app.app.do_start_response) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.getElementsByTagName('IsTruncated')[0]. + childNodes[0].nodeValue, 'true') + + def test_bucket_GET_max_keys(self): + class FakeApp(object): + def __call__(self, env, start_response): + self.query_string = env['QUERY_STRING'] + start_response('200 OK', []) + return '[]' + fake_app = FakeApp() + local_app = swift3.filter_factory({})(fake_app) + bucket_name = 'junk' + + req = Request.blank('/%s' % bucket_name, + environ={'REQUEST_METHOD': 'GET', + 'QUERY_STRING': 'max-keys=5'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, lambda *args: None) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.getElementsByTagName('MaxKeys')[0]. + childNodes[0].nodeValue, '5') + args = dict(cgi.parse_qsl(fake_app.query_string)) + self.assert_(args['limit'] == '6') + + req = Request.blank('/%s' % bucket_name, + environ={'REQUEST_METHOD': 'GET', + 'QUERY_STRING': 'max-keys=5000'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, lambda *args: None) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.getElementsByTagName('MaxKeys')[0]. + childNodes[0].nodeValue, '1000') + args = dict(cgi.parse_qsl(fake_app.query_string)) + self.assertEquals(args['limit'], '1001') + + def test_bucket_GET_passthroughs(self): + class FakeApp(object): + def __call__(self, env, start_response): + self.query_string = env['QUERY_STRING'] + start_response('200 OK', []) + return '[]' + fake_app = FakeApp() + local_app = swift3.filter_factory({})(fake_app) + bucket_name = 'junk' + req = Request.blank('/%s' % bucket_name, + environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': + 'delimiter=a&marker=b&prefix=c'}, + headers={'Authorization': 'AUTH_who:password'}) + resp = local_app(req.environ, lambda *args: None) + dom = xml.dom.minidom.parseString("".join(resp)) + self.assertEquals(dom.getElementsByTagName('Prefix')[0]. + childNodes[0].nodeValue, 'c') + self.assertEquals(dom.getElementsByTagName('Marker')[0]. + childNodes[0].nodeValue, 'b') + self.assertEquals(dom.getElementsByTagName('Delimiter')[0]. + childNodes[0].nodeValue, 'a') + args = dict(cgi.parse_qsl(fake_app.query_string)) + self.assertEquals(args['delimiter'], 'a') + self.assertEquals(args['marker'], 'b') + self.assertEquals(args['prefix'], 'c') + def test_bucket_PUT_error(self): code = self._test_method_error(FakeAppBucket, 'PUT', '/bucket', 401) self.assertEquals(code, 'AccessDenied') From 15dabb6da0038db0cb0eb49f3c82242146a6ff29 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 05:47:44 +0000 Subject: [PATCH 101/199] typo --- swift/common/middleware/swift3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 6f6b830a0f..5f7159c47c 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -164,7 +164,7 @@ class BucketController(Controller): (xml_escape(i['name']), i['last_modified'], i['hash'], i['bytes']) for i in objects[:max_keys] if 'subdir' not in i]), - "".join(['<CommonPrefixes><Prefix>%s</Prefix><CommonPrefixes>' + "".join(['<CommonPrefixes><Prefix>%s</Prefix></CommonPrefixes>' % xml_escape(i['subdir']) for i in objects[:max_keys] if 'subdir' in i]) )) From e618dd567f44715e857dd49fc6a86965f83e0e96 Mon Sep 17 00:00:00 2001 From: Chris Wedgwood <cw@f00f.org> Date: Thu, 13 Jan 2011 23:17:36 -0800 Subject: [PATCH 102/199] Don't unnecessarily quote account, container or object values. This fixed the problem where containers or objects with characters that need quoting can't be audited because they aren't found. --- bin/swift-account-audit | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/swift-account-audit b/bin/swift-account-audit index 81823722bf..9fd1b13e80 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -72,7 +72,7 @@ class Auditor(object): self.in_progress = {} def audit_object(self, account, container, name): - path = '/%s/%s/%s' % (quote(account), quote(container), quote(name)) + path = '/%s/%s/%s' % (account, container, name) part, nodes = self.object_ring.get_nodes(account, container, name) container_listing = self.audit_container(account, container) consistent = True @@ -145,7 +145,7 @@ class Auditor(object): return self.list_cache[(account, name)] self.in_progress[(account, name)] = Event() print 'Auditing container "%s"...' % name - path = '/%s/%s' % (quote(account), quote(name)) + path = '/%s/%s' % (account, name) account_listing = self.audit_account(account) consistent = True if name not in account_listing: @@ -189,7 +189,7 @@ class Auditor(object): self.container_obj_mismatch += 1 consistent = False print " Different versions of %s/%s in container dbs." % \ - (quote(name), quote(obj['name'])) + name, obj['name'] if obj['last_modified'] > rec_d[obj_name]['last_modified']: rec_d[obj_name] = obj obj_counts = [int(header['x-container-object-count']) From bd2125c9de9423cfa06ce403d1214ce4e687dd38 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 08:26:50 +0000 Subject: [PATCH 103/199] PUT headers --- swift/common/middleware/swift3.py | 19 ++++++++++------ test/unit/common/middleware/test_swift3.py | 25 +++++++++++++++++++++- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 5f7159c47c..c3e1b7fb5c 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -166,8 +166,7 @@ class BucketController(Controller): for i in objects[:max_keys] if 'subdir' not in i]), "".join(['<CommonPrefixes><Prefix>%s</Prefix></CommonPrefixes>' % xml_escape(i['subdir']) - for i in objects[:max_keys] if 'subdir' in i]) - )) + for i in objects[:max_keys] if 'subdir' in i]))) return Response(body=body, content_type='text/xml') def PUT(self, env, start_response): @@ -246,6 +245,15 @@ class ObjectController(Controller): return self.GETorHEAD(env, start_response) def PUT(self, env, start_response): + for key, value in env.items(): + if key.startswith('HTTP_X_AMZ_META_'): + del env[key] + env['HTTP_X_OBJECT_META_' + key[16:]] = value + elif key == 'HTTP_CONTENT_MD5': + env['HTTP_ETAG'] = value.decode('base64').encode('hex') + elif key == 'HTTP_X_AMZ_COPY_SOURCE': + env['HTTP_X_OBJECT_COPY'] = value + body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) @@ -258,10 +266,9 @@ class ObjectController(Controller): else: return get_err_response('InvalidURI') - resp = Response() - resp.etag = headers['etag'] - resp.status = 200 - return resp + etag = headers['etag'] + del headers['etag'] + return Response(status=200, headers=headers, etag=etag) def DELETE(self, env, start_response): body_iter = self.app(env, self.do_start_response) diff --git a/test/unit/common/middleware/test_swift3.py b/test/unit/common/middleware/test_swift3.py index 528c05f7f2..c7c974c965 100644 --- a/test/unit/common/middleware/test_swift3.py +++ b/test/unit/common/middleware/test_swift3.py @@ -470,7 +470,7 @@ class TestSwift3(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT'}, headers={'Authorization': 'AUTH_who:password', 'x-amz-storage-class': 'REDUCED_REDUNDANCY', - 'Content-MD5': '1b2cf535f27731c974343645a3985328'}) + 'Content-MD5': 'Gyz1NfJ3Mcl0NDZFo5hTKA=='}) req.date = datetime.now() req.content_type = 'text/plain' resp = local_app(req.environ, local_app.app.do_start_response) @@ -480,6 +480,29 @@ class TestSwift3(unittest.TestCase): self.assertEquals(headers['ETag'], "\"%s\"" % local_app.app.response_headers['etag']) + def test_object_PUT_headers(self): + class FakeApp(object): + def __call__(self, env, start_response): + self.req = Request(env) + start_response('200 OK') + start_response([]) + app = FakeApp() + local_app = swift3.filter_factory({})(app) + req = Request.blank('/bucket/object', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'Authorization': 'AUTH_who:password', + 'X-Amz-Storage-Class': 'REDUCED_REDUNDANCY', + 'X-Amz-Meta-Something': 'oh hai', + 'X-Amz-Copy-Source': '/some/source', + 'Content-MD5': 'ffoHqOWd280dyE1MT4KuoQ=='}) + req.date = datetime.now() + req.content_type = 'text/plain' + resp = local_app(req.environ, lambda *args: None) + self.assertEquals(app.req.headers['ETag'], + '7dfa07a8e59ddbcd1dc84d4c4f82aea1') + self.assertEquals(app.req.headers['X-Object-Meta-Something'], 'oh hai') + self.assertEquals(app.req.headers['X-Object-Copy'], '/some/source') + def test_object_DELETE_error(self): code = self._test_method_error(FakeAppObject, 'DELETE', '/bucket/object', 401) From 5189a3e0b57659fa91b7a974e3ba10c8fc587fe3 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 08:33:10 +0000 Subject: [PATCH 104/199] simplify put response --- swift/common/middleware/swift3.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index c3e1b7fb5c..41b7caf792 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -266,9 +266,7 @@ class ObjectController(Controller): else: return get_err_response('InvalidURI') - etag = headers['etag'] - del headers['etag'] - return Response(status=200, headers=headers, etag=etag) + return Response(status=200, etag=headers['etag']) def DELETE(self, env, start_response): body_iter = self.app(env, self.do_start_response) From 867a6ab0c381e7ca955ec0ee4a5a2ddb15a95b0a Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 08:45:39 +0000 Subject: [PATCH 105/199] i18n stuff --- swift/auth/server.py | 16 +++--- swift/common/middleware/cname_lookup.py | 12 +++-- swift/common/utils.py | 2 +- swift/proxy/server.py | 70 +++++++++++++------------ swift/stats/access_processor.py | 16 +++--- swift/stats/account_stats.py | 8 +-- swift/stats/log_processor.py | 26 +++++---- swift/stats/log_uploader.py | 20 +++---- swift/stats/stats_processor.py | 2 +- 9 files changed, 91 insertions(+), 81 deletions(-) diff --git a/swift/auth/server.py b/swift/auth/server.py index 1903035507..afd2164207 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -90,7 +90,7 @@ class AuthController(object): self.logger = get_logger(conf) self.super_admin_key = conf.get('super_admin_key') if not self.super_admin_key: - msg = 'No super_admin_key set in conf file! Exiting.' + msg = _('No super_admin_key set in conf file! Exiting.') try: self.logger.critical(msg) except: @@ -206,8 +206,9 @@ YOU HAVE A FEW OPTIONS: resp = conn.getresponse() resp.read() if resp.status // 100 != 2: - self.logger.error('ERROR attempting to create account %s: %s %s' % - (url, resp.status, resp.reason)) + self.logger.error(_('ERROR attempting to create account %(url)s:' \ + ' %(status)s %(reason)s') % + {'url': url, 'status': resp.status, 'reason': resp.reason}) return False return account_name @@ -320,7 +321,7 @@ YOU HAVE A FEW OPTIONS: (account, user)).fetchone() if row: self.logger.info( - 'ALREADY EXISTS create_user(%s, %s, _, %s, %s) [%.02f]' % + _('ALREADY EXISTS create_user(%s, %s, _, %s, %s) [%.02f]') % (repr(account), repr(user), repr(admin), repr(reseller_admin), time() - begin)) return 'already exists' @@ -334,7 +335,7 @@ YOU HAVE A FEW OPTIONS: account_hash = self.add_storage_account() if not account_hash: self.logger.info( - 'FAILED create_user(%s, %s, _, %s, %s) [%.02f]' % + _('FAILED create_user(%s, %s, _, %s, %s) [%.02f]') % (repr(account), repr(user), repr(admin), repr(reseller_admin), time() - begin)) return False @@ -347,7 +348,7 @@ YOU HAVE A FEW OPTIONS: admin and 't' or '', reseller_admin and 't' or '')) conn.commit() self.logger.info( - 'SUCCESS create_user(%s, %s, _, %s, %s) = %s [%.02f]' % + _('SUCCESS create_user(%s, %s, _, %s, %s) = %s [%.02f]') % (repr(account), repr(user), repr(admin), repr(reseller_admin), repr(url), time() - begin)) return url @@ -611,7 +612,8 @@ YOU HAVE A FEW OPTIONS: return HTTPBadRequest(request=env)(env, start_response) response = handler(req) except: - self.logger.exception('ERROR Unhandled exception in ReST request') + self.logger.exception( + _('ERROR Unhandled exception in ReST request')) return HTTPServiceUnavailable(request=req)(env, start_response) trans_time = '%.4f' % (time() - start_time) if not response.content_length and response.app_iter and \ diff --git a/swift/common/middleware/cname_lookup.py b/swift/common/middleware/cname_lookup.py index 4690bf6c79..e48d209e54 100644 --- a/swift/common/middleware/cname_lookup.py +++ b/swift/common/middleware/cname_lookup.py @@ -86,8 +86,10 @@ class CNAMELookupMiddleware(object): break elif found_domain.endswith(self.storage_domain): # Found it! - self.logger.info('Mapped %s to %s' % (given_domain, - found_domain)) + self.logger.info( + _('Mapped %(given_domain)s to %(found_domain)s') % + {'given_domain': given_domain, + 'found_domain': found_domain}) if port: env['HTTP_HOST'] = ':'.join([found_domain, port]) else: @@ -96,8 +98,10 @@ class CNAMELookupMiddleware(object): break else: # try one more deep in the chain - self.logger.debug('Following CNAME chain for %s to %s' % - (given_domain, found_domain)) + self.logger.debug(_('Following CNAME chain for ' \ + '%(given_domain)s to %(found_domain)s') % + {'given_domain': given_domain, + 'found_domain': found_domain}) a_domain = found_domain if error: if found_domain: diff --git a/swift/common/utils.py b/swift/common/utils.py index ce174e4cf5..299980493a 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -453,7 +453,7 @@ def capture_stdio(logger, **kwargs): """ # log uncaught exceptions sys.excepthook = lambda * exc_info: \ - logger.critical('UNCAUGHT EXCEPTION', exc_info=exc_info) + logger.critical(_('UNCAUGHT EXCEPTION'), exc_info=exc_info) # collect stdio file desc not in use for logging stdio_fds = [0, 1, 2] diff --git a/swift/proxy/server.py b/swift/proxy/server.py index e1d5824b4e..3dd76ccdee 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -384,8 +384,8 @@ class Controller(object): if attempts_left <= 0: break except: - self.exception_occurred(node, 'Account', - 'Trying to get account info for %s' % path) + self.exception_occurred(node, _('Account'), + _('Trying to get account info for %s') % path) if self.app.memcache and result_code in (200, 404): if result_code == 200: cache_timeout = self.app.recheck_account_existence @@ -462,8 +462,8 @@ class Controller(object): if attempts_left <= 0: break except: - self.exception_occurred(node, 'Container', - 'Trying to get container info for %s' % path) + self.exception_occurred(node, _('Container'), + _('Trying to get container info for %s') % path) if self.app.memcache and result_code in (200, 404): if result_code == 200: cache_timeout = self.app.recheck_container_existence @@ -594,7 +594,8 @@ class Controller(object): source = conn.getresponse() except: self.exception_occurred(node, server_type, - 'Trying to %s %s' % (req.method, req.path)) + _('Trying to %(method)s %(path)s') % + {'method': req.method, 'path': req.path}) continue if source.status == 507: self.error_limit(node) @@ -624,8 +625,8 @@ class Controller(object): res.client_disconnect = True self.app.logger.info(_('Client disconnected on read')) except: - self.exception_occurred(node, 'Object', - 'Trying to read during GET of %s' % req.path) + self.exception_occurred(node, _('Object'), + _('Trying to read during GET of %s') % req.path) raise res.app_iter = file_iter() update_headers(res, source.getheaders()) @@ -648,8 +649,9 @@ class Controller(object): reasons.append(source.reason) bodies.append(source.read()) if source.status >= 500: - self.error_occurred(node, 'ERROR %d %s From %s Server' % - (source.status, bodies[-1][:1024], server_type)) + self.error_occurred(node, _('ERROR %(status)d %(body)s ' \ + 'From %(type)s Server') % {'status': source.status, + 'body': bodies[-1][:1024], 'type': server_type}) return self.best_response(req, statuses, reasons, bodies, '%s %s' % (server_type, req.method)) @@ -686,12 +688,13 @@ class ObjectController(Controller): self.error_limit(node) elif response.status >= 500: self.error_occurred(node, - 'ERROR %d %s From Object Server' % - (response.status, body[:1024])) + _('ERROR %(status)d %(body)s From Object Server') % + {'status': response.status, 'body': body[:1024]}) return response.status, response.reason, body except: - self.exception_occurred(node, 'Object', - 'Trying to %s %s' % (req.method, req.path)) + self.exception_occurred(node, _('Object'), + _('Trying to %(method)s %(path)s') % + {'method': req.method, 'path': req.path}) return 500, '', '' def GETorHEAD(self, req): @@ -990,8 +993,8 @@ class ObjectController(Controller): with Timeout(self.app.node_timeout): resp = conn.getexpect() except: - self.exception_occurred(node, 'Object', - 'Expect: 100-continue on %s' % req.path) + self.exception_occurred(node, _('Object'), + _('Expect: 100-continue on %s') % req.path) if conn and resp: if resp.status == 100: conns.append(conn) @@ -1030,8 +1033,8 @@ class ObjectController(Controller): else: conn.send(chunk) except: - self.exception_occurred(conn.node, 'Object', - 'Trying to write to %s' % req.path) + self.exception_occurred(conn.node, _('Object'), + _('Trying to write to %s') % req.path) conns.remove(conn) if len(conns) <= len(nodes) / 2: self.app.logger.error( @@ -1069,13 +1072,14 @@ class ObjectController(Controller): bodies.append(response.read()) if response.status >= 500: self.error_occurred(conn.node, - 'ERROR %d %s From Object Server re: %s' % - (response.status, bodies[-1][:1024], req.path)) + _('ERROR %(status)d %(body)s From Object Server ' \ + 're: %(path)s') % {'status': response.status, + 'body': bodies[-1][:1024], 'path': req.path}) elif 200 <= response.status < 300: etags.add(response.getheader('etag').strip('"')) except: - self.exception_occurred(conn.node, 'Object', - 'Trying to get final status of PUT to %s' % req.path) + self.exception_occurred(conn.node, _('Object'), + _('Trying to get final status of PUT to %s') % req.path) if len(etags) > 1: self.app.logger.error( _('Object servers returned %s mismatched etags'), len(etags)) @@ -1286,8 +1290,8 @@ class ContainerController(Controller): accounts.insert(0, account) except: accounts.insert(0, account) - self.exception_occurred(node, 'Container', - 'Trying to PUT to %s' % req.path) + self.exception_occurred(node, _('Container'), + _('Trying to PUT to %s') % req.path) if not accounts: break while len(statuses) < len(containers): @@ -1341,8 +1345,8 @@ class ContainerController(Controller): elif source.status == 507: self.error_limit(node) except: - self.exception_occurred(node, 'Container', - 'Trying to POST %s' % req.path) + self.exception_occurred(node, _('Container'), + _('Trying to POST %s') % req.path) if len(statuses) >= len(containers): break while len(statuses) < len(containers): @@ -1398,8 +1402,8 @@ class ContainerController(Controller): accounts.insert(0, account) except: accounts.insert(0, account) - self.exception_occurred(node, 'Container', - 'Trying to DELETE %s' % req.path) + self.exception_occurred(node, _('Container'), + _('Trying to DELETE %s') % req.path) if not accounts: break while len(statuses) < len(containers): @@ -1482,8 +1486,8 @@ class AccountController(Controller): if source.status == 507: self.error_limit(node) except: - self.exception_occurred(node, 'Account', - 'Trying to PUT to %s' % req.path) + self.exception_occurred(node, _('Account'), + _('Trying to PUT to %s') % req.path) if len(statuses) >= len(accounts): break while len(statuses) < len(accounts): @@ -1530,8 +1534,8 @@ class AccountController(Controller): elif source.status == 507: self.error_limit(node) except: - self.exception_occurred(node, 'Account', - 'Trying to POST %s' % req.path) + self.exception_occurred(node, _('Account'), + _('Trying to POST %s') % req.path) if len(statuses) >= len(accounts): break while len(statuses) < len(accounts): @@ -1575,8 +1579,8 @@ class AccountController(Controller): elif source.status == 507: self.error_limit(node) except: - self.exception_occurred(node, 'Account', - 'Trying to DELETE %s' % req.path) + self.exception_occurred(node, _('Account'), + _('Trying to DELETE %s') % req.path) if len(statuses) >= len(accounts): break while len(statuses) < len(accounts): diff --git a/swift/stats/access_processor.py b/swift/stats/access_processor.py index 558709dccf..08c3971a84 100644 --- a/swift/stats/access_processor.py +++ b/swift/stats/access_processor.py @@ -59,19 +59,20 @@ class AccessLogProcessor(object): headers, processing_time) = (unquote(x) for x in raw_log[16:].split(' ')) except ValueError: - self.logger.debug('Bad line data: %s' % repr(raw_log)) + self.logger.debug(_('Bad line data: %s') % repr(raw_log)) return {} if server != self.server_name: # incorrect server name in log line - self.logger.debug('Bad server name: found "%s" expected "%s"' \ - % (server, self.server_name)) + self.logger.debug(_('Bad server name: found "%(found)s" ' \ + 'expected "%(expected)s"') % + {'found': server, 'expected': self.server_name}) return {} try: (version, account, container_name, object_name) = \ split_path(request, 2, 4, True) except ValueError, e: - self.logger.debug( - 'Invalid path: %s from data: %s' % (e, repr(raw_log))) + self.logger.debug(_('Invalid path: %(error)s from data: %(log)s') % + {'error': e, 'log': repr(raw_log)}) return {} if container_name is not None: container_name = container_name.split('?', 1)[0] @@ -194,8 +195,9 @@ class AccessLogProcessor(object): if bad_lines > (total_lines * self.warn_percent): name = '/'.join([data_object_account, data_object_container, data_object_name]) - self.logger.warning('I found a bunch of bad lines in %s '\ - '(%d bad, %d total)' % (name, bad_lines, total_lines)) + self.logger.warning(_('I found a bunch of bad lines in %(name)s '\ + '(%(bad)d bad, %(total)d total)') % + {'name': name, 'bad': bad_lines, 'total': total_lines}) return hourly_aggr_info def keylist_mapping(self): diff --git a/swift/stats/account_stats.py b/swift/stats/account_stats.py index e402bd0bc8..91d31f39ad 100644 --- a/swift/stats/account_stats.py +++ b/swift/stats/account_stats.py @@ -52,10 +52,10 @@ class AccountStat(Daemon): self.logger = get_logger(stats_conf, 'swift-account-stats-logger') def run_once(self): - self.logger.info("Gathering account stats") + self.logger.info(_("Gathering account stats")) start = time.time() self.find_and_process() - self.logger.info("Gathering account stats complete (%0.2f minutes)" % + self.logger.info(_("Gathering account stats complete (%0.2f minutes)") % ((time.time() - start) / 60)) def find_and_process(self): @@ -70,14 +70,14 @@ class AccountStat(Daemon): # Account Name, Container Count, Object Count, Bytes Used for device in os.listdir(self.devices): if self.mount_check and not check_mount(self.devices, device): - self.logger.error("Device %s is not mounted, skipping." % + self.logger.error(_("Device %s is not mounted, skipping.") % device) continue accounts = os.path.join(self.devices, device, account_server_data_dir) if not os.path.exists(accounts): - self.logger.debug("Path %s does not exist, skipping." % + self.logger.debug(_("Path %s does not exist, skipping.") % accounts) continue for root, dirs, files in os.walk(accounts, topdown=False): diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index 60101b7ca2..f8938ddbc2 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -59,7 +59,7 @@ class LogProcessor(object): module = __import__(import_target, fromlist=[import_target]) klass = getattr(module, class_name) self.plugins[plugin_name]['instance'] = klass(plugin_conf) - self.logger.debug('Loaded plugin "%s"' % plugin_name) + self.logger.debug(_('Loaded plugin "%s"') % plugin_name) @property def internal_proxy(self): @@ -76,10 +76,9 @@ class LogProcessor(object): return self._internal_proxy def process_one_file(self, plugin_name, account, container, object_name): - self.logger.info('Processing %s/%s/%s with plugin "%s"' % (account, - container, - object_name, - plugin_name)) + self.logger.info(_('Processing %(obj)s with plugin "%(plugin)s"') % + {'obj': '/'.join((account, container, object_name)), + 'plugin': plugin_name}) # get an iter of the object data compressed = object_name.endswith('.gz') stream = self.get_object_data(account, container, object_name, @@ -177,10 +176,9 @@ class LogProcessor(object): try: chunk = d.decompress(chunk) except zlib.error: - self.logger.debug('Bad compressed data for %s/%s/%s' % - (swift_account, - container_name, - object_name)) + self.logger.debug(_('Bad compressed data for %s') + % '/'.join((swift_account, container_name, + object_name))) raise BadFileDownload() # bad compressed data parts = chunk.split('\n') parts[0] = last_part + parts[0] @@ -239,7 +237,7 @@ class LogProcessorDaemon(Daemon): self.worker_count = int(c.get('worker_count', '1')) def run_once(self): - self.logger.info("Beginning log processing") + self.logger.info(_("Beginning log processing")) start = time.time() if self.lookback_hours == 0: lookback_start = None @@ -277,14 +275,14 @@ class LogProcessorDaemon(Daemon): already_processed_files = set() except: already_processed_files = set() - self.logger.debug('found %d processed files' % \ + self.logger.debug(_('found %d processed files') % \ len(already_processed_files)) logs_to_process = self.log_processor.get_data_list(lookback_start, lookback_end, already_processed_files) - self.logger.info('loaded %d files to process' % len(logs_to_process)) + self.logger.info(_('loaded %d files to process') % len(logs_to_process)) if not logs_to_process: - self.logger.info("Log processing done (%0.2f minutes)" % + self.logger.info(_("Log processing done (%0.2f minutes)") % ((time.time() - start) / 60)) return @@ -358,7 +356,7 @@ class LogProcessorDaemon(Daemon): self.log_processor_container, 'processed_files.pickle.gz') - self.logger.info("Log processing done (%0.2f minutes)" % + self.logger.info(_("Log processing done (%0.2f minutes)") % ((time.time() - start) / 60)) diff --git a/swift/stats/log_uploader.py b/swift/stats/log_uploader.py index 160c948f7f..b425738938 100644 --- a/swift/stats/log_uploader.py +++ b/swift/stats/log_uploader.py @@ -68,10 +68,10 @@ class LogUploader(Daemon): self.logger = utils.get_logger(uploader_conf, plugin_name) def run_once(self): - self.logger.info("Uploading logs") + self.logger.info(_("Uploading logs")) start = time.time() self.upload_all_logs() - self.logger.info("Uploading logs complete (%0.2f minutes)" % + self.logger.info(_("Uploading logs complete (%0.2f minutes)") % ((time.time() - start) / 60)) def upload_all_logs(self): @@ -126,22 +126,22 @@ class LogUploader(Daemon): hour = filename[slice(*hour_offset)] except IndexError: # unexpected filename format, move on - self.logger.error("Unexpected log: %s" % filename) + self.logger.error(_("Unexpected log: %s") % filename) continue if ((time.time() - os.stat(filename).st_mtime) < self.new_log_cutoff): # don't process very new logs self.logger.debug( - "Skipping log: %s (< %d seconds old)" % (filename, - self.new_log_cutoff)) + _("Skipping log: %(file)s (< %(cutoff)d seconds old)") % + {'file': filename, 'cutoff': self.new_log_cutoff}) continue self.upload_one_log(filename, year, month, day, hour) def upload_one_log(self, filename, year, month, day, hour): if os.path.getsize(filename) == 0: - self.logger.debug("Log %s is 0 length, skipping" % filename) + self.logger.debug(_("Log %s is 0 length, skipping") % filename) return - self.logger.debug("Processing log: %s" % filename) + self.logger.debug(_("Processing log: %s") % filename) filehash = hashlib.md5() already_compressed = True if filename.endswith('.gz') else False opener = gzip.open if already_compressed else open @@ -162,9 +162,9 @@ class LogUploader(Daemon): self.container_name, target_filename, compress=(not already_compressed)): - self.logger.debug("Uploaded log %s to %s" % - (filename, target_filename)) + self.logger.debug(_("Uploaded log %(file)s to %(target)s") % + {'file': filename, 'target': target_filename}) if self.unlink_log: os.unlink(filename) else: - self.logger.error("ERROR: Upload of log %s failed!" % filename) + self.logger.error(_("ERROR: Upload of log %s failed!") % filename) diff --git a/swift/stats/stats_processor.py b/swift/stats/stats_processor.py index 7854c83572..dc07d85199 100644 --- a/swift/stats/stats_processor.py +++ b/swift/stats/stats_processor.py @@ -37,7 +37,7 @@ class StatsLogProcessor(object): bytes_used) = line.split(',') except (IndexError, ValueError): # bad line data - self.logger.debug('Bad line data: %s' % repr(line)) + self.logger.debug(_('Bad line data: %s') % repr(line)) continue account = account.strip('"') container_count = int(container_count.strip('"')) From fbb0241df493849b3d6fb148a664aa147724f9fd Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 11:17:33 +0000 Subject: [PATCH 106/199] few more from proxy --- swift/proxy/server.py | 46 +++++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 3dd76ccdee..f426bea16c 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -161,13 +161,13 @@ class SegmentedIterable(object): if self.segment > 10: sleep(max(self.next_get_time - time.time(), 0)) self.next_get_time = time.time() + 1 - resp = self.controller.GETorHEAD_base(req, 'Object', partition, + resp = self.controller.GETorHEAD_base(req, _('Object'), partition, self.controller.iter_nodes(partition, nodes, self.controller.app.object_ring), path, self.controller.app.object_ring.replica_count) if resp.status_int // 100 != 2: - raise Exception('Could not load object segment %s: %s' % (path, - resp.status_int)) + raise Exception(_('Could not load object segment %(path)s:' \ + ' %(status)s') % {'path': path, 'status': resp.status_int}) self.segment_iter = resp.app_iter except StopIteration: raise @@ -707,7 +707,7 @@ class ObjectController(Controller): return aresp partition, nodes = self.app.object_ring.get_nodes( self.account_name, self.container_name, self.object_name) - resp = self.GETorHEAD_base(req, 'Object', partition, + resp = self.GETorHEAD_base(req, _('Object'), partition, self.iter_nodes(partition, nodes, self.app.object_ring), req.path_info, self.app.object_ring.replica_count) # If we get a 416 Requested Range Not Satisfiable we have to check if @@ -716,7 +716,7 @@ class ObjectController(Controller): if resp.status_int == 416: req_range = req.range req.range = None - resp2 = self.GETorHEAD_base(req, 'Object', partition, + resp2 = self.GETorHEAD_base(req, _('Object'), partition, self.iter_nodes(partition, nodes, self.app.object_ring), req.path_info, self.app.object_ring.replica_count) if 'x-object-manifest' not in resp2.headers: @@ -735,7 +735,7 @@ class ObjectController(Controller): lreq = Request.blank('/%s/%s?prefix=%s&format=json&marker=%s' % (quote(self.account_name), quote(lcontainer), quote(lprefix), quote(marker))) - lresp = self.GETorHEAD_base(lreq, 'Container', lpartition, + lresp = self.GETorHEAD_base(lreq, _('Container'), lpartition, lnodes, lreq.path_info, self.app.container_ring.replica_count) if lresp.status_int // 100 != 2: @@ -767,19 +767,19 @@ class ObjectController(Controller): '/%s/%s?prefix=%s&format=json&marker=%s' % (quote(self.account_name), quote(lcontainer), quote(lprefix), quote(marker))) - lresp = self.GETorHEAD_base(lreq, 'Container', + lresp = self.GETorHEAD_base(lreq, _('Container'), lpartition, lnodes, lreq.path_info, self.app.container_ring.replica_count) if lresp.status_int // 100 != 2: - raise Exception('Object manifest GET could not ' - 'continue listing: %s %s' % + raise Exception(_('Object manifest GET could not ' + 'continue listing: %s %s') % (req.path, lreq.path)) if 'swift.authorize' in req.environ: req.acl = lresp.headers.get('x-container-read') aresp = req.environ['swift.authorize'](req) if aresp: - raise Exception('Object manifest GET could ' - 'not continue listing: %s %s' % + raise Exception(_('Object manifest GET could ' + 'not continue listing: %s %s') % (req.path, aresp)) sublisting = json.loads(lresp.body) if not sublisting: @@ -894,7 +894,7 @@ class ObjectController(Controller): reasons.append('') bodies.append('') return self.best_response(req, statuses, reasons, - bodies, 'Object POST') + bodies, _('Object POST')) @public @delay_denial @@ -1089,8 +1089,8 @@ class ObjectController(Controller): statuses.append(503) reasons.append('') bodies.append('') - resp = self.best_response(req, statuses, reasons, bodies, 'Object PUT', - etag=etag) + resp = self.best_response(req, statuses, reasons, bodies, + _('Object PUT'), etag=etag) if source_header: resp.headers['X-Copied-From'] = quote( source_header.split('/', 2)[2]) @@ -1142,7 +1142,7 @@ class ObjectController(Controller): reasons.append('') bodies.append('') return self.best_response(req, statuses, reasons, bodies, - 'Object DELETE') + _('Object DELETE')) @public @delay_denial @@ -1203,7 +1203,7 @@ class ContainerController(Controller): return HTTPNotFound(request=req) part, nodes = self.app.container_ring.get_nodes( self.account_name, self.container_name) - resp = self.GETorHEAD_base(req, 'Container', part, nodes, + resp = self.GETorHEAD_base(req, _('Container'), part, nodes, req.path_info, self.app.container_ring.replica_count) if self.app.memcache: @@ -1303,7 +1303,7 @@ class ContainerController(Controller): self.container_name) self.app.memcache.delete(cache_key) return self.best_response(req, statuses, reasons, bodies, - 'Container PUT') + _('Container PUT')) @public def POST(self, req): @@ -1358,7 +1358,7 @@ class ContainerController(Controller): self.container_name) self.app.memcache.delete(cache_key) return self.best_response(req, statuses, reasons, bodies, - 'Container POST') + _('Container POST')) @public def DELETE(self, req): @@ -1415,7 +1415,7 @@ class ContainerController(Controller): self.container_name) self.app.memcache.delete(cache_key) resp = self.best_response(req, statuses, reasons, bodies, - 'Container DELETE') + _('Container DELETE')) if 200 <= resp.status_int <= 299: for status in statuses: if status < 200 or status > 299: @@ -1440,7 +1440,7 @@ class AccountController(Controller): def GETorHEAD(self, req): """Handler for HTTP GET/HEAD requests.""" partition, nodes = self.app.account_ring.get_nodes(self.account_name) - return self.GETorHEAD_base(req, 'Account', partition, nodes, + return self.GETorHEAD_base(req, _('Account'), partition, nodes, req.path_info.rstrip('/'), self.app.account_ring.replica_count) @public @@ -1497,7 +1497,7 @@ class AccountController(Controller): if self.app.memcache: self.app.memcache.delete('account%s' % req.path_info.rstrip('/')) return self.best_response(req, statuses, reasons, bodies, - 'Account PUT') + _('Account PUT')) @public def POST(self, req): @@ -1545,7 +1545,7 @@ class AccountController(Controller): if self.app.memcache: self.app.memcache.delete('account%s' % req.path_info.rstrip('/')) return self.best_response(req, statuses, reasons, bodies, - 'Account POST') + _('Account POST')) @public def DELETE(self, req): @@ -1590,7 +1590,7 @@ class AccountController(Controller): if self.app.memcache: self.app.memcache.delete('account%s' % req.path_info.rstrip('/')) return self.best_response(req, statuses, reasons, bodies, - 'Account DELETE') + _('Account DELETE')) class BaseApplication(object): From 4cf7ec25d752cac2bb21cc87fc5cf206a63face7 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 11:30:17 +0000 Subject: [PATCH 107/199] missed things --- swift/account/reaper.py | 18 +++++++++--------- swift/auth/server.py | 10 +++++----- swift/container/updater.py | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/swift/account/reaper.py b/swift/account/reaper.py index 02e32506f0..d31558b9c6 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -241,28 +241,28 @@ class AccountReaper(Daemon): except Exception: self.logger.exception( _('Exception with account %s'), account) - log = 'Incomplete pass on account %s' % account + log = _('Incomplete pass on account %s') % account if self.stats_containers_deleted: - log += ', %s containers deleted' % self.stats_containers_deleted + log += _(', %s containers deleted') % self.stats_containers_deleted if self.stats_objects_deleted: - log += ', %s objects deleted' % self.stats_objects_deleted + log += _(', %s objects deleted') % self.stats_objects_deleted if self.stats_containers_remaining: - log += ', %s containers remaining' % \ + log += _(', %s containers remaining') % \ self.stats_containers_remaining if self.stats_objects_remaining: - log += ', %s objects remaining' % self.stats_objects_remaining + log += _(', %s objects remaining') % self.stats_objects_remaining if self.stats_containers_possibly_remaining: - log += ', %s containers possibly remaining' % \ + log += _(', %s containers possibly remaining') % \ self.stats_containers_possibly_remaining if self.stats_objects_possibly_remaining: - log += ', %s objects possibly remaining' % \ + log += _(', %s objects possibly remaining') % \ self.stats_objects_possibly_remaining if self.stats_return_codes: - log += ', return codes: ' + log += _(', return codes: ') for code in sorted(self.stats_return_codes.keys()): log += '%s %sxxs, ' % (self.stats_return_codes[code], code) log = log[:-2] - log += ', elapsed: %.02fs' % (time() - begin) + log += _(', elapsed: %.02fs') % (time() - begin) self.logger.info(log) def reap_container(self, account, account_partition, account_nodes, diff --git a/swift/auth/server.py b/swift/auth/server.py index afd2164207..0582d9cfda 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -146,7 +146,7 @@ class AuthController(object): previous_prefix = '' if '_' in row[0]: previous_prefix = row[0].split('_', 1)[0] - msg = (''' + msg = _((''' THERE ARE ACCOUNTS IN YOUR auth.db THAT DO NOT BEGIN WITH YOUR NEW RESELLER PREFIX OF "%s". YOU HAVE A FEW OPTIONS: @@ -164,14 +164,14 @@ YOU HAVE A FEW OPTIONS: TO REVERT BACK TO YOUR PREVIOUS RESELLER PREFIX. %s - ''' % (self.reseller_prefix.rstrip('_'), self.db_file, + ''') % (self.reseller_prefix.rstrip('_'), self.db_file, self.reseller_prefix.rstrip('_'), self.db_file, - previous_prefix, previous_prefix and ' ' or ''' + previous_prefix, previous_prefix and ' ' or _(''' SINCE YOUR PREVIOUS RESELLER PREFIX WAS AN EMPTY STRING, IT IS NOT RECOMMENDED TO PERFORM OPTION 3 AS THAT WOULD MAKE SUPPORTING MULTIPLE RESELLERS MORE DIFFICULT. - '''.strip())).strip() - self.logger.critical('CRITICAL: ' + ' '.join(msg.split())) + ''').strip())).strip() + self.logger.critical(_('CRITICAL: ') + ' '.join(msg.split())) raise Exception('\n' + msg) def add_storage_account(self, account_name=''): diff --git a/swift/container/updater.py b/swift/container/updater.py index 9dacea32d1..d6b1beb2b1 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -127,7 +127,7 @@ class ContainerUpdater(Daemon): Run the updater once. """ patcher.monkey_patch(all=False, socket=True) - self.logger.info('Begin container update single threaded sweep') + self.logger.info(_('Begin container update single threaded sweep')) begin = time.time() self.no_changes = 0 self.successes = 0 From e2c0a238396233c1107b94ef54e397434ffe2f7d Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Fri, 14 Jan 2011 13:49:05 -0600 Subject: [PATCH 108/199] Added doc strings and pointers to docs for swift3 --- doc/source/development_saio.rst | 2 +- doc/source/getting_started.rst | 3 +- doc/source/misc.rst | 8 +++ swift/common/middleware/swift3.py | 115 +++++++++++++++++++++++++----- 4 files changed, 107 insertions(+), 21 deletions(-) diff --git a/doc/source/development_saio.rst b/doc/source/development_saio.rst index 072db9b327..a74e6df8c9 100644 --- a/doc/source/development_saio.rst +++ b/doc/source/development_saio.rst @@ -31,7 +31,7 @@ Installing dependencies and the core code #. `apt-get install curl gcc bzr memcached python-configobj python-coverage python-dev python-nose python-setuptools python-simplejson python-xattr sqlite3 xfsprogs python-webob python-eventlet - python-greenlet python-pastedeploy` + python-greenlet python-pastedeploy python-netifaces` #. Install anything else you want, like screen, ssh, vim, etc. #. Next, choose either :ref:`partition-section` or :ref:`loopback-section`. diff --git a/doc/source/getting_started.rst b/doc/source/getting_started.rst index 59087c7011..219adfb462 100644 --- a/doc/source/getting_started.rst +++ b/doc/source/getting_started.rst @@ -21,6 +21,7 @@ And the following python libraries: * Xattr * Nose * Sphinx +* netifaces ----------- Development @@ -38,4 +39,4 @@ Production If you want to set up and configure Swift for a production cluster, the following doc should be useful: -* :doc:`Multiple Server Swift Installation <howto_installmultinode>` \ No newline at end of file +* :doc:`Multiple Server Swift Installation <howto_installmultinode>` diff --git a/doc/source/misc.rst b/doc/source/misc.rst index eaea545a0f..6d6ae04dbd 100644 --- a/doc/source/misc.rst +++ b/doc/source/misc.rst @@ -122,3 +122,11 @@ Ratelimit .. automodule:: swift.common.middleware.ratelimit :members: :show-inheritance: + +Swift3 +====== + +.. automodule:: swift.common.middleware.swift3 + :members: + :show-inheritance: + diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 41b7caf792..fc8cd3cfc6 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -32,25 +32,32 @@ MAX_BUCKET_LISTING = 1000 def get_err_response(code): - error_table = {'AccessDenied': - (403, 'Access denied'), - 'BucketAlreadyExists': - (409, 'The requested bucket name is not available'), - 'BucketNotEmpty': - (409, 'The bucket you tried to delete is not empty'), - 'InvalidArgument': - (400, 'Invalid Argument'), - 'InvalidBucketName': - (400, 'The specified bucket is not valid'), - 'InvalidURI': - (400, 'Could not parse the specified URI'), - 'NoSuchBucket': - (404, 'The specified bucket does not exist'), - 'SignatureDoesNotMatch': - (403, 'The calculated request signature does not match '\ - 'your provided one'), - 'NoSuchKey': - (404, 'The resource you requested does not exist')} + """ + Given an HTTP response code, create a properly formatted xml error response + + :param code: error code + :returns: webob.response object + """ + error_table = { + 'AccessDenied': + (403, 'Access denied'), + 'BucketAlreadyExists': + (409, 'The requested bucket name is not available'), + 'BucketNotEmpty': + (409, 'The bucket you tried to delete is not empty'), + 'InvalidArgument': + (400, 'Invalid Argument'), + 'InvalidBucketName': + (400, 'The specified bucket is not valid'), + 'InvalidURI': + (400, 'Could not parse the specified URI'), + 'NoSuchBucket': + (404, 'The specified bucket does not exist'), + 'SignatureDoesNotMatch': + (403, 'The calculated request signature does not match '\ + 'your provided one'), + 'NoSuchKey': + (404, 'The resource you requested does not exist')} resp = Response(content_type='text/xml') resp.status = error_table[code][0] @@ -71,12 +78,18 @@ class Controller(object): class ServiceController(Controller): + """ + Handles account level requests. + """ def __init__(self, env, app, account_name, token, **kwargs): Controller.__init__(self, app) env['HTTP_X_AUTH_TOKEN'] = token env['PATH_INFO'] = '/v1/%s' % account_name def GET(self, env, start_response): + """ + Handle GET Service request + """ env['QUERY_STRING'] = 'format=json' body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) @@ -105,6 +118,9 @@ class ServiceController(Controller): class BucketController(Controller): + """ + Handles bucket request. + """ def __init__(self, env, app, account_name, token, container_name, **kwargs): Controller.__init__(self, app) @@ -113,6 +129,9 @@ class BucketController(Controller): env['PATH_INFO'] = '/v1/%s/%s' % (account_name, container_name) def GET(self, env, start_response): + """ + Handle GET Bucket (List Objects) request + """ if 'QUERY_STRING' in env: args = dict(cgi.parse_qsl(env['QUERY_STRING'])) else: @@ -170,6 +189,9 @@ class BucketController(Controller): return Response(body=body, content_type='text/xml') def PUT(self, env, start_response): + """ + Handle PUT Bucket request + """ body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) @@ -188,6 +210,9 @@ class BucketController(Controller): return resp def DELETE(self, env, start_response): + """ + Handle DELETE Bucket request + """ body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) @@ -208,6 +233,9 @@ class BucketController(Controller): class ObjectController(Controller): + """ + Handles requests on objects + """ def __init__(self, env, app, account_name, token, container_name, object_name, **kwargs): Controller.__init__(self, app) @@ -239,12 +267,21 @@ class ObjectController(Controller): return get_err_response('InvalidURI') def HEAD(self, env, start_response): + """ + Handle HEAD Object request + """ return self.GETorHEAD(env, start_response) def GET(self, env, start_response): + """ + Handle GET Object request + """ return self.GETorHEAD(env, start_response) def PUT(self, env, start_response): + """ + Handle PUT Object and PUT Object (Copy) request + """ for key, value in env.items(): if key.startswith('HTTP_X_AMZ_META_'): del env[key] @@ -269,6 +306,9 @@ class ObjectController(Controller): return Response(status=200, etag=headers['etag']) def DELETE(self, env, start_response): + """ + Handle DELETE Object request + """ body_iter = self.app(env, self.do_start_response) status = int(self.response_args[0].split()[0]) headers = dict(self.response_args[1]) @@ -287,6 +327,42 @@ class ObjectController(Controller): class Swift3Middleware(object): + """ + The swift3 middleware will emulate the S3 REST api on top of swift. + + The following opperations are currently supported: + + * GET Service + * DELETE Bucket + * GET Bucket (List Objects) + * PUT Bucket + * DELETE Object + * GET Object + * HEAD Object + * PUT Object + * PUT Object (Copy) + + To add this middleware to your configuration, add the swift3 middleware + in front of the auth middleware, and before any other middleware that + look at swift requests (like rate limiting). + + To set up your client, the access key will be the account string that + should look like AUTH_d305e9dbedbc47df8b25ab46f3152f81, and the + secret access key is the account password. The host should also point + to the swift storage hostname. It also will have to use the old style + calling format, and not the hostname based container format. + + An example client using the python boto library might look like the + following for an SAIO setup:: + + connection = boto.s3.Connection( + aws_access_key_id='AUTH_d305e9dbedbc47df8b25ab46f3152f81', + aws_secret_access_key='testing', + port=8080, + host='127.0.0.1', + is_secure=False, + calling_format=boto.s3.connection.OrdinaryCallingFormat()) + """ def __init__(self, app, conf, *args, **kwargs): self.app = app @@ -352,6 +428,7 @@ class Swift3Middleware(object): def filter_factory(global_conf, **local_conf): + """Standard filter factory to use the middleware with paste.deploy""" conf = global_conf.copy() conf.update(local_conf) From 782b98b5bb29f9555cb96fa5738b0d047d75ffb0 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Fri, 14 Jan 2011 14:30:05 -0600 Subject: [PATCH 109/199] Moved the main middleware docs to the module level --- swift/common/middleware/swift3.py | 74 ++++++++++++++++--------------- 1 file changed, 38 insertions(+), 36 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index fc8cd3cfc6..85f03902ac 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -13,6 +13,43 @@ # See the License for the specific language governing permissions and # limitations under the License. +""" +The swift3 middleware will emulate the S3 REST api on top of swift. + +The following opperations are currently supported: + + * GET Service + * DELETE Bucket + * GET Bucket (List Objects) + * PUT Bucket + * DELETE Object + * GET Object + * HEAD Object + * PUT Object + * PUT Object (Copy) + +To add this middleware to your configuration, add the swift3 middleware +in front of the auth middleware, and before any other middleware that +look at swift requests (like rate limiting). + +To set up your client, the access key will be the account string that +should look like AUTH_d305e9dbedbc47df8b25ab46f3152f81, and the +secret access key is the account password. The host should also point +to the swift storage hostname. It also will have to use the old style +calling format, and not the hostname based container format. + +An example client using the python boto library might look like the +following for an SAIO setup:: + + connection = boto.s3.Connection( + aws_access_key_id='AUTH_d305e9dbedbc47df8b25ab46f3152f81', + aws_secret_access_key='testing', + port=8080, + host='127.0.0.1', + is_secure=False, + calling_format=boto.s3.connection.OrdinaryCallingFormat()) +""" + from urllib import unquote, quote import rfc822 import hmac @@ -327,42 +364,7 @@ class ObjectController(Controller): class Swift3Middleware(object): - """ - The swift3 middleware will emulate the S3 REST api on top of swift. - - The following opperations are currently supported: - - * GET Service - * DELETE Bucket - * GET Bucket (List Objects) - * PUT Bucket - * DELETE Object - * GET Object - * HEAD Object - * PUT Object - * PUT Object (Copy) - - To add this middleware to your configuration, add the swift3 middleware - in front of the auth middleware, and before any other middleware that - look at swift requests (like rate limiting). - - To set up your client, the access key will be the account string that - should look like AUTH_d305e9dbedbc47df8b25ab46f3152f81, and the - secret access key is the account password. The host should also point - to the swift storage hostname. It also will have to use the old style - calling format, and not the hostname based container format. - - An example client using the python boto library might look like the - following for an SAIO setup:: - - connection = boto.s3.Connection( - aws_access_key_id='AUTH_d305e9dbedbc47df8b25ab46f3152f81', - aws_secret_access_key='testing', - port=8080, - host='127.0.0.1', - is_secure=False, - calling_format=boto.s3.connection.OrdinaryCallingFormat()) - """ + """Swift3 S3 compatibility midleware""" def __init__(self, app, conf, *args, **kwargs): self.app = app From a4de88c3b4e4cc47291cec147f8eae131a2abd04 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Fri, 14 Jan 2011 20:30:38 +0000 Subject: [PATCH 110/199] Execute fsyncs in a thread pool. --- swift/obj/server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/swift/obj/server.py b/swift/obj/server.py index c25d00b47b..4afc38057d 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -33,7 +33,7 @@ from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPCreated, \ HTTPNotModified, HTTPPreconditionFailed, \ HTTPRequestTimeout, HTTPUnprocessableEntity, HTTPMethodNotAllowed from xattr import getxattr, setxattr -from eventlet import sleep, Timeout +from eventlet import sleep, Timeout, tpool from swift.common.utils import mkdirs, normalize_timestamp, \ storage_directory, hash_path, renamer, fallocate, \ @@ -227,7 +227,7 @@ class DiskFile(object): write_metadata(fd, metadata) if 'Content-Length' in metadata: drop_buffer_cache(fd, 0, int(metadata['Content-Length'])) - os.fsync(fd) + tpool.execute(os.fsync, fd) invalidate_hash(os.path.dirname(self.datadir)) renamer(tmppath, os.path.join(self.datadir, timestamp + extension)) self.metadata = metadata @@ -387,7 +387,7 @@ class ObjectController(object): chunk = chunk[written:] # For large files sync every 512MB (by default) written if upload_size - last_sync >= self.bytes_per_sync: - os.fdatasync(fd) + tpool.execute(os.fdatasync, fd) drop_buffer_cache(fd, last_sync, upload_size - last_sync) last_sync = upload_size From 67de0c88f456a5bd8a812fc8cbfd7fad209a7ab4 Mon Sep 17 00:00:00 2001 From: Michael Barton <mike-launchpad@weirdlooking.com> Date: Sun, 16 Jan 2011 09:52:08 +0000 Subject: [PATCH 111/199] ipv6 support --- bin/swauth-add-account | 2 +- bin/swauth-add-user | 2 +- bin/swauth-delete-account | 2 +- bin/swauth-delete-user | 2 +- bin/swauth-list | 2 +- bin/swauth-prep | 2 +- bin/swauth-set-account-service | 2 +- bin/swift-ring-builder | 13 +++++++-- etc/proxy-server.conf-sample | 4 +-- swift/auth/server.py | 3 +- swift/common/bench.py | 3 +- swift/common/middleware/acl.py | 2 +- swift/common/middleware/swauth.py | 15 +++++----- swift/common/utils.py | 33 ++++++++++++++++++++++ swift/common/wsgi.py | 6 +++- swift/container/server.py | 2 +- swift/obj/server.py | 2 +- test/unit/common/middleware/test_swauth.py | 18 ++++++------ test/unit/common/test_utils.py | 21 ++++++++++++++ 19 files changed, 99 insertions(+), 37 deletions(-) diff --git a/bin/swauth-add-account b/bin/swauth-add-account index 32aceffc7b..fe18b5a72d 100755 --- a/bin/swauth-add-account +++ b/bin/swauth-add-account @@ -18,9 +18,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swauth-add-user b/bin/swauth-add-user index a844ed2a37..045dc0a766 100755 --- a/bin/swauth-add-user +++ b/bin/swauth-add-user @@ -18,9 +18,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swauth-delete-account b/bin/swauth-delete-account index c46e5e3b91..3d98f6ec4e 100755 --- a/bin/swauth-delete-account +++ b/bin/swauth-delete-account @@ -18,9 +18,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swauth-delete-user b/bin/swauth-delete-user index 5ee162437c..ede076dd5b 100755 --- a/bin/swauth-delete-user +++ b/bin/swauth-delete-user @@ -18,9 +18,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swauth-list b/bin/swauth-list index 7433e3ddfd..85a7633966 100755 --- a/bin/swauth-list +++ b/bin/swauth-list @@ -22,9 +22,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swauth-prep b/bin/swauth-prep index 5a931ae1d0..3d2cb7d3eb 100755 --- a/bin/swauth-prep +++ b/bin/swauth-prep @@ -18,9 +18,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swauth-set-account-service b/bin/swauth-set-account-service index 32eb06dc6b..054e4cfc4b 100755 --- a/bin/swauth-set-account-service +++ b/bin/swauth-set-account-service @@ -22,9 +22,9 @@ import gettext from optparse import OptionParser from os.path import basename from sys import argv, exit -from urlparse import urlparse from swift.common.bufferedhttp import http_connect_raw as http_connect +from swift.common.utils import urlparse if __name__ == '__main__': diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index c448bea5ca..41293f7d37 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -235,10 +235,17 @@ swift-ring-builder <builder_file> add z<zone>-<ip>:<port>/<device_name>_<meta> print 'Invalid add value: %s' % argv[3] exit(EXIT_ERROR) i = 1 - while i < len(rest) and rest[i] in '0123456789.': + if rest[i] == '[': + while i < len(rest) and rest[i] != ']': + i += 1 + ip = rest[2:i] i += 1 - ip = rest[1:i] - rest = rest[i:] + rest = rest[i:] + else: + while i < len(rest) and rest[i] in '0123456789.': + i += 1 + ip = rest[1:i] + rest = rest[i:] if not rest.startswith(':'): print 'Invalid add value: %s' % argv[3] diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index fda7d0d034..2d85f19508 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -68,13 +68,13 @@ use = egg:swift#swauth # auth_prefix = /auth/ # Cluster strings are of the format name:url where name is a short name for the # Swift cluster and url is the url to the proxy server(s) for the cluster. -# default_swift_cluster = local:http://127.0.0.1:8080/v1 +# default_swift_cluster = local#http://127.0.0.1:8080/v1 # You may also use the format name::url::url where the first url is the one # given to users to access their account (public url) and the second is the one # used by swauth itself to create and delete accounts (private url). This is # useful when a load balancer url should be used by users, but swauth itself is # behind the load balancer. Example: -# default_swift_cluster = local::https://public.com:8080/v1::http://private.com:8080/v1 +# default_swift_cluster = local##https://public.com:8080/v1##http://private.com:8080/v1 # token_life = 86400 # node_timeout = 10 # Highly recommended to change this. diff --git a/swift/auth/server.py b/swift/auth/server.py index a0bd31ccda..967f853291 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -20,7 +20,6 @@ from contextlib import contextmanager from time import gmtime, strftime, time from urllib import unquote, quote from uuid import uuid4 -from urlparse import urlparse from hashlib import md5, sha1 import hmac import base64 @@ -32,7 +31,7 @@ from webob.exc import HTTPBadRequest, HTTPConflict, HTTPForbidden, \ from swift.common.bufferedhttp import http_connect_raw as http_connect from swift.common.db import get_db_connection -from swift.common.utils import get_logger, split_path +from swift.common.utils import get_logger, split_path, urlparse class AuthController(object): diff --git a/swift/common/bench.py b/swift/common/bench.py index 4abafeb947..169497ef13 100644 --- a/swift/common/bench.py +++ b/swift/common/bench.py @@ -16,13 +16,12 @@ import uuid import time import random -from urlparse import urlparse from contextlib import contextmanager import eventlet.pools from eventlet.green.httplib import CannotSendRequest -from swift.common.utils import TRUE_VALUES +from swift.common.utils import TRUE_VALUES, urlparse from swift.common import client from swift.common import direct_client diff --git a/swift/common/middleware/acl.py b/swift/common/middleware/acl.py index f6784953ac..f08780eedb 100644 --- a/swift/common/middleware/acl.py +++ b/swift/common/middleware/acl.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from urlparse import urlparse +from swift.common.utils import urlparse def clean_acl(name, value): diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 961f3a3ba4..568b00fb35 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -21,7 +21,6 @@ from httplib import HTTPConnection, HTTPSConnection from time import gmtime, strftime, time from traceback import format_exc from urllib import quote, unquote -from urlparse import urlparse from uuid import uuid4 from eventlet.timeout import Timeout @@ -32,7 +31,7 @@ from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ from swift.common.bufferedhttp import http_connect_raw as http_connect from swift.common.middleware.acl import clean_acl, parse_acl, referrer_allowed -from swift.common.utils import cache_from_env, get_logger, split_path +from swift.common.utils import cache_from_env, get_logger, split_path, urlparse class Swauth(object): @@ -61,23 +60,23 @@ class Swauth(object): self.auth_prefix += '/' self.auth_account = '%s.auth' % self.reseller_prefix self.default_swift_cluster = conf.get('default_swift_cluster', - 'local:http://127.0.0.1:8080/v1') + 'local#http://127.0.0.1:8080/v1') # This setting is a little messy because of the options it has to # provide. The basic format is cluster_name:url, such as the default - # value of local:http://127.0.0.1:8080/v1. But, often the url given to + # value of local#http://127.0.0.1:8080/v1. But, often the url given to # the user needs to be different than the url used by Swauth to # create/delete accounts. So there's a more complex format of # cluster_name::url::url, such as - # local::https://public.com:8080/v1::http://private.com:8080/v1. + # local##https://public.com:8080/v1##http://private.com:8080/v1. # The double colon is what sets the two apart. - if '::' in self.default_swift_cluster: + if '##' in self.default_swift_cluster: self.dsc_name, self.dsc_url, self.dsc_url2 = \ - self.default_swift_cluster.split('::', 2) + self.default_swift_cluster.split('##', 2) self.dsc_url = self.dsc_url.rstrip('/') self.dsc_url2 = self.dsc_url2.rstrip('/') else: self.dsc_name, self.dsc_url = \ - self.default_swift_cluster.split(':', 1) + self.default_swift_cluster.split('#', 1) self.dsc_url = self.dsc_url2 = self.dsc_url.rstrip('/') self.dsc_parsed = urlparse(self.dsc_url) if self.dsc_parsed.scheme not in ('http', 'https'): diff --git a/swift/common/utils.py b/swift/common/utils.py index 299980493a..05b15e99fa 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -34,6 +34,7 @@ from ConfigParser import ConfigParser, NoSectionError, NoOptionError from optparse import OptionParser from tempfile import mkstemp import cPickle as pickle +from urlparse import urlparse as stdlib_urlparse, ParseResult import eventlet from eventlet import greenio, GreenPool, sleep, Timeout, listen @@ -845,3 +846,35 @@ def ratelimit_sleep(running_time, max_rate, incr_by=1): elif running_time - now > time_per_request: eventlet.sleep((running_time - now) / clock_accuracy) return running_time + time_per_request + + +class ModifiedParseResult(ParseResult): + "Parse results class for urlparse." + + @property + def hostname(self): + netloc = self.netloc.split('@', 1)[-1] + if netloc.startswith('['): + return netloc[1:].split(']')[0] + elif ':' in netloc: + return netloc.rsplit(':')[0] + return netloc + + @property + def port(self): + netloc = self.netloc.split('@', 1)[-1] + if netloc.startswith('['): + netloc = netloc.rsplit(']')[1] + if ':' in netloc: + return int(netloc.rsplit(':')[1]) + return None + + +def urlparse(url): + """ + urlparse augmentation. + This is necessary because urlparse can't handle RFC 2732 URLs. + + :param url: URL to parse. + """ + return ModifiedParseResult(*stdlib_urlparse(url)) diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index 9450bcf439..cedc4b2c8b 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -68,11 +68,15 @@ def get_socket(conf, default_port=8080): """ bind_addr = (conf.get('bind_ip', '0.0.0.0'), int(conf.get('bind_port', default_port))) + address_family = [addr[0] for addr in socket.getaddrinfo(bind_addr[0], + bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM) + if addr[0] in (socket.AF_INET, socket.AF_INET6)][0] sock = None retry_until = time.time() + 30 while not sock and time.time() < retry_until: try: - sock = listen(bind_addr, backlog=int(conf.get('backlog', 4096))) + sock = listen(bind_addr, backlog=int(conf.get('backlog', 4096)), + family=address_family) if 'cert_file' in conf: sock = ssl.wrap_socket(sock, certfile=conf['cert_file'], keyfile=conf['key_file']) diff --git a/swift/container/server.py b/swift/container/server.py index 7ba375ce33..1ffba8a909 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -88,7 +88,7 @@ class ContainerController(object): account_partition = req.headers.get('X-Account-Partition') account_device = req.headers.get('X-Account-Device') if all([account_host, account_partition, account_device]): - account_ip, account_port = account_host.split(':') + account_ip, account_port = account_host.rsplit(':', 1) new_path = '/' + '/'.join([account, container]) info = broker.get_info() account_headers = {'x-put-timestamp': info['put_timestamp'], diff --git a/swift/obj/server.py b/swift/obj/server.py index 4afc38057d..f20b40d57a 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -294,7 +294,7 @@ class ObjectController(object): full_path = '/%s/%s/%s' % (account, container, obj) try: with ConnectionTimeout(self.conn_timeout): - ip, port = host.split(':') + ip, port = host.rsplit(':', 1) conn = http_connect(ip, port, contdevice, partition, op, full_path, headers_out) with Timeout(self.node_timeout): diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 00c010b9dc..a6edab9c2c 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -151,21 +151,21 @@ class TestAuth(unittest.TestCase): app = FakeApp() self.assertRaises(Exception, auth.filter_factory({ 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:badscheme://host/path'}), app) + 'default_swift_cluster': 'local#badscheme://host/path'}), app) ath = auth.filter_factory({'super_admin_key': 'supertest'})(app) self.assertEquals(ath.default_swift_cluster, - 'local:http://127.0.0.1:8080/v1') + 'local#http://127.0.0.1:8080/v1') ath = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:http://host/path'})(app) + 'default_swift_cluster': 'local#http://host/path'})(app) self.assertEquals(ath.default_swift_cluster, - 'local:http://host/path') + 'local#http://host/path') ath = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:https://host/path/'})(app) + 'default_swift_cluster': 'local#https://host/path/'})(app) self.assertEquals(ath.dsc_url, 'https://host/path') self.assertEquals(ath.dsc_url2, 'https://host/path') ath = auth.filter_factory({'super_admin_key': 'supertest', 'default_swift_cluster': - 'local::https://host/path/::http://host2/path2/'})(app) + 'local##https://host/path/##http://host2/path2/'})(app) self.assertEquals(ath.dsc_url, 'https://host/path') self.assertEquals(ath.dsc_url2, 'http://host2/path2') @@ -2882,7 +2882,7 @@ class TestAuth(unittest.TestCase): def test_get_conn_default_https(self): local_auth = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:https://1.2.3.4/v1'})(FakeApp()) + 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) conn = local_auth.get_conn() self.assertEquals(conn.__class__, auth.HTTPSConnection) self.assertEquals(conn.host, '1.2.3.4') @@ -2890,7 +2890,7 @@ class TestAuth(unittest.TestCase): def test_get_conn_overridden(self): local_auth = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:https://1.2.3.4/v1'})(FakeApp()) + 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) conn = \ local_auth.get_conn(urlparsed=auth.urlparse('http://5.6.7.8/v1')) self.assertEquals(conn.__class__, auth.HTTPConnection) @@ -2899,7 +2899,7 @@ class TestAuth(unittest.TestCase): def test_get_conn_overridden_https(self): local_auth = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:http://1.2.3.4/v1'})(FakeApp()) + 'default_swift_cluster': 'local#http://1.2.3.4/v1'})(FakeApp()) conn = \ local_auth.get_conn(urlparsed=auth.urlparse('https://5.6.7.8/v1')) self.assertEquals(conn.__class__, auth.HTTPSConnection) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 1f5a94edd5..b9e8a3f81b 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -477,6 +477,27 @@ log_name = yarr''' total += i self.assertTrue(abs(50 - (time.time() - start) * 100) < 10) + def test_urlparse(self): + parsed = utils.urlparse('http://127.0.0.1/') + self.assertEquals(parsed.scheme, 'http') + self.assertEquals(parsed.hostname, '127.0.0.1') + self.assertEquals(parsed.path, '/') + + parsed = utils.urlparse('http://127.0.0.1:8080/') + self.assertEquals(parsed.port, 8080) + + parsed = utils.urlparse('https://127.0.0.1/') + self.assertEquals(parsed.scheme, 'https') + + parsed = utils.urlparse('http://[::1]/') + self.assertEquals(parsed.hostname, '::1') + + parsed = utils.urlparse('http://[::1]:8080/') + self.assertEquals(parsed.hostname, '::1') + self.assertEquals(parsed.port, 8080) + + parsed = utils.urlparse('www.example.com') + self.assertEquals(parsed.hostname, '') if __name__ == '__main__': unittest.main() From af1813ba4e9ad4362072e69d5841f9dff550bd1e Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Mon, 17 Jan 2011 15:42:20 +0900 Subject: [PATCH 112/199] s3api: fix AWSAccessKeyId We use cfaccount as AWSAccessKeyId (something like AUTH_89308df71f274e33af17779606f08fa0). However, users with the same account use the same cfaccount. In such case, we can't know which password should be used as a secret key to calculate the HMAC. This changes AWSAccessKeyId to the combination of account and user: Authorization: AWS test:tester:xQE0diMbLRepdf3YB+FIEXAMPLE= The auth validates the HMAC and sends a cfaccount back to the proxy. The proxy rewrites the path with the cfaccount. --- swift/auth/server.py | 20 +++++++++++--------- swift/common/middleware/auth.py | 10 ++++++++-- swift/common/middleware/swift3.py | 4 ++-- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/swift/auth/server.py b/swift/auth/server.py index a0bd31ccda..dac3a78a3e 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -243,18 +243,17 @@ YOU HAVE A FEW OPTIONS: raise err def validate_s3_sign(self, request, token): - cfaccount, sign = request.headers['Authorization'].split(' ')[-1].split(':') + account, user, sign = request.headers['Authorization'].split(' ')[-1].split(':') msg = base64.urlsafe_b64decode(unquote(token)) rv = False with self.get_conn() as conn: row = conn.execute(''' - SELECT account, user, password FROM account - WHERE cfaccount = ?''', - (cfaccount,)).fetchone() - rv = (84000, row[0], row[1], cfaccount) - + SELECT password, cfaccount FROM account + WHERE account = ? AND user = ?''', + (account, user)).fetchone() + rv = (84000, account, user, row[1]) if rv: - s = base64.encodestring(hmac.new(row[2], msg, sha1).digest()).strip() + s = base64.encodestring(hmac.new(row[0], msg, sha1).digest()).strip() self.logger.info("orig %s, calc %s" % (sign, s)) if sign != s: rv = False @@ -440,8 +439,10 @@ YOU HAVE A FEW OPTIONS: except ValueError: return HTTPBadRequest() # Retrieves (TTL, account, user, cfaccount) if valid, False otherwise + headers = {} if 'Authorization' in request.headers: validation = self.validate_s3_sign(request, token) + headers['X-Auth-Account-Suffix'] = validation[3] else: validation = self.validate_token(token) if not validation: @@ -451,8 +452,9 @@ YOU HAVE A FEW OPTIONS: # admin access to a cfaccount or ".reseller_admin" to access to all # accounts, including creating new ones. groups.append(validation[3]) - return HTTPNoContent(headers={'X-Auth-TTL': validation[0], - 'X-Auth-Groups': ','.join(groups)}) + headers['X-Auth-TTL'] = validation[0] + headers['X-Auth-Groups'] = ','.join(groups) + return HTTPNoContent(headers=headers) def handle_add_user(self, request): """ diff --git a/swift/common/middleware/auth.py b/swift/common/middleware/auth.py index 59cf83ddba..a2c71a3070 100644 --- a/swift/common/middleware/auth.py +++ b/swift/common/middleware/auth.py @@ -134,8 +134,7 @@ class DevAuth(object): headers = {} if env.get('HTTP_AUTHORIZATION'): groups = None - if env.get('HTTP_AUTHORIZATION'): - headers["Authorization"] = env.get('HTTP_AUTHORIZATION') + headers["Authorization"] = env.get('HTTP_AUTHORIZATION') if not groups: with Timeout(self.timeout): @@ -153,6 +152,13 @@ class DevAuth(object): if memcache_client: memcache_client.set(key, (time(), expiration, groups), timeout=expiration) + + if env.get('HTTP_AUTHORIZATION'): + account, user, sign = env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':') + cfaccount = resp.getheader('x-auth-account-suffix') + path = env['PATH_INFO'] + env['PATH_INFO'] = path.replace("%s:%s" % (account, user), cfaccount, 1) + return groups def authorize(self, req): diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 85f03902ac..f6a5126693 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -400,11 +400,11 @@ class Swift3Middleware(object): h += header.lower() + ":" + str(req.headers[header]) + "\n" h += req.path try: - account, _ = req.headers['Authorization'].split(' ')[-1].split(':') + account, user, _ = req.headers['Authorization'].split(' ')[-1].split(':') except: return None, None token = base64.urlsafe_b64encode(h) - return account, token + return '%s:%s' % (account, user), token def __call__(self, env, start_response): req = Request(env) From d7b59e0b94433ce928ccb7e5c7fd1a49b2d0ff62 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Mon, 17 Jan 2011 15:51:59 +0900 Subject: [PATCH 113/199] s3api: update unit tests for AWSAccessKeyId change --- test/unit/common/middleware/test_swift3.py | 32 +++++++++++----------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/test/unit/common/middleware/test_swift3.py b/test/unit/common/middleware/test_swift3.py index c7c974c965..f84a0ffe8a 100644 --- a/test/unit/common/middleware/test_swift3.py +++ b/test/unit/common/middleware/test_swift3.py @@ -209,7 +209,7 @@ class TestSwift3(unittest.TestCase): def test_bad_path(self): req = Request.blank('/bucket/object/bad', environ={'REQUEST_METHOD': 'GET'}, - headers={'Authorization': 'AUTH_something:hoge'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = self.app(req.environ, start_response) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.firstChild.nodeName, 'Error') @@ -219,7 +219,7 @@ class TestSwift3(unittest.TestCase): def test_bad_method(self): req = Request.blank('/', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Authorization': 'AUTH_something:hoge'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = self.app(req.environ, start_response) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.firstChild.nodeName, 'Error') @@ -230,7 +230,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(cl(status)) req = Request.blank(path, environ={'REQUEST_METHOD': method}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, start_response) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.firstChild.nodeName, 'Error') @@ -246,7 +246,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppService()) req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '200') @@ -279,7 +279,7 @@ class TestSwift3(unittest.TestCase): bucket_name = 'junk' req = Request.blank('/%s' % bucket_name, environ={'REQUEST_METHOD': 'GET'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '200') @@ -307,7 +307,7 @@ class TestSwift3(unittest.TestCase): req = Request.blank('/%s' % bucket_name, environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'max-keys=3'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.getElementsByTagName('IsTruncated')[0]. @@ -316,7 +316,7 @@ class TestSwift3(unittest.TestCase): req = Request.blank('/%s' % bucket_name, environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'max-keys=2'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.getElementsByTagName('IsTruncated')[0]. @@ -335,7 +335,7 @@ class TestSwift3(unittest.TestCase): req = Request.blank('/%s' % bucket_name, environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'max-keys=5'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, lambda *args: None) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.getElementsByTagName('MaxKeys')[0]. @@ -346,7 +346,7 @@ class TestSwift3(unittest.TestCase): req = Request.blank('/%s' % bucket_name, environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'max-keys=5000'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, lambda *args: None) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.getElementsByTagName('MaxKeys')[0]. @@ -366,7 +366,7 @@ class TestSwift3(unittest.TestCase): req = Request.blank('/%s' % bucket_name, environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'delimiter=a&marker=b&prefix=c'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, lambda *args: None) dom = xml.dom.minidom.parseString("".join(resp)) self.assertEquals(dom.getElementsByTagName('Prefix')[0]. @@ -392,7 +392,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppBucket(201)) req = Request.blank('/bucket', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '200') @@ -410,7 +410,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppBucket(204)) req = Request.blank('/bucket', environ={'REQUEST_METHOD': 'DELETE'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '204') @@ -418,7 +418,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppObject()) req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': method}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '200') @@ -468,7 +468,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppObject(201)) req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Authorization': 'AUTH_who:password', + headers={'Authorization': 'AWS test:tester:hmac', 'x-amz-storage-class': 'REDUCED_REDUNDANCY', 'Content-MD5': 'Gyz1NfJ3Mcl0NDZFo5hTKA=='}) req.date = datetime.now() @@ -490,7 +490,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(app) req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, - headers={'Authorization': 'AUTH_who:password', + headers={'Authorization': 'AWS test:tester:hmac', 'X-Amz-Storage-Class': 'REDUCED_REDUNDANCY', 'X-Amz-Meta-Something': 'oh hai', 'X-Amz-Copy-Source': '/some/source', @@ -518,7 +518,7 @@ class TestSwift3(unittest.TestCase): local_app = swift3.filter_factory({})(FakeAppObject(204)) req = Request.blank('/bucket/object', environ={'REQUEST_METHOD': 'DELETE'}, - headers={'Authorization': 'AUTH_who:password'}) + headers={'Authorization': 'AWS test:tester:hmac'}) resp = local_app(req.environ, local_app.app.do_start_response) self.assertEquals(local_app.app.response_args[0].split()[0], '204') From d8810a1ed7e31a16ffc5e31a7c3e9445e463ce82 Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Mon, 17 Jan 2011 11:07:58 -0600 Subject: [PATCH 114/199] access processor now handles extra fields in the log line --- swift/stats/access_processor.py | 5 +-- test/unit/stats/test_access_processor.py | 45 ++++++++++++++++++++++-- 2 files changed, 46 insertions(+), 4 deletions(-) diff --git a/swift/stats/access_processor.py b/swift/stats/access_processor.py index 08c3971a84..2aee505415 100644 --- a/swift/stats/access_processor.py +++ b/swift/stats/access_processor.py @@ -40,7 +40,7 @@ class AccessLogProcessor(object): '''given a raw access log line, return a dict of the good parts''' d = {} try: - (_, + (unused, server, client_ip, lb_ip, @@ -57,7 +57,8 @@ class AccessLogProcessor(object): etag, trans_id, headers, - processing_time) = (unquote(x) for x in raw_log[16:].split(' ')) + processing_time) = (unquote(x) for x in + raw_log[16:].split(' ')[:18]) except ValueError: self.logger.debug(_('Bad line data: %s') % repr(raw_log)) return {} diff --git a/test/unit/stats/test_access_processor.py b/test/unit/stats/test_access_processor.py index 7317c365aa..2e431ac334 100644 --- a/test/unit/stats/test_access_processor.py +++ b/test/unit/stats/test_access_processor.py @@ -21,8 +21,49 @@ from swift.stats import access_processor class TestAccessProcessor(unittest.TestCase): - def test_placeholder(self): - pass + def test_log_line_parser_field_count(self): + p = access_processor.AccessLogProcessor({}) + # too few fields + log_line = [str(x) for x in range(17)] + log_line[1] = 'proxy-server' + log_line[4] = '1/Jan/3/4/5/6' + log_line[6] = '/v1/a/c/o' + log_line = 'x'*16 + ' '.join(log_line) + res = p.log_line_parser(log_line) + expected = {} + self.assertEquals(res, expected) + # right amount of fields + log_line = [str(x) for x in range(18)] + log_line[1] = 'proxy-server' + log_line[4] = '1/Jan/3/4/5/6' + log_line[6] = '/v1/a/c/o' + log_line = 'x'*16 + ' '.join(log_line) + res = p.log_line_parser(log_line) + expected = {'code': 8, 'processing_time': '17', 'auth_token': '11', + 'month': '01', 'second': '6', 'year': '3', 'tz': '+0000', + 'http_version': '7', 'object_name': 'o', 'etag': '14', + 'method': '5', 'trans_id': '15', 'client_ip': '2', + 'bytes_out': 13, 'container_name': 'c', 'day': '1', + 'minute': '5', 'account': 'a', 'hour': '4', + 'referrer': '9', 'request': '/v1/a/c/o', + 'user_agent': '10', 'bytes_in': 12, 'lb_ip': '3'} + self.assertEquals(res, expected) + # too many fields + log_line = [str(x) for x in range(19)] + log_line[1] = 'proxy-server' + log_line[4] = '1/Jan/3/4/5/6' + log_line[6] = '/v1/a/c/o' + log_line = 'x'*16 + ' '.join(log_line) + res = p.log_line_parser(log_line) + expected = {'code': 8, 'processing_time': '17', 'auth_token': '11', + 'month': '01', 'second': '6', 'year': '3', 'tz': '+0000', + 'http_version': '7', 'object_name': 'o', 'etag': '14', + 'method': '5', 'trans_id': '15', 'client_ip': '2', + 'bytes_out': 13, 'container_name': 'c', 'day': '1', + 'minute': '5', 'account': 'a', 'hour': '4', + 'referrer': '9', 'request': '/v1/a/c/o', + 'user_agent': '10', 'bytes_in': 12, 'lb_ip': '3'} + self.assertEquals(res, expected) if __name__ == '__main__': From 2fac1d53efec34a2c51a80741f935c384fde5ec9 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Mon, 17 Jan 2011 21:49:48 +0000 Subject: [PATCH 115/199] modify default_swift_cluster option format --- doc/source/deployment_guide.rst | 2 +- doc/source/howto_installmultinode.rst | 4 +-- etc/proxy-server.conf-sample | 8 +++--- swift/common/middleware/swauth.py | 31 +++++++++++----------- test/unit/common/middleware/test_swauth.py | 18 ++++++------- 5 files changed, 31 insertions(+), 32 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 7af25ad631..40854b0a1f 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -515,7 +515,7 @@ auth_prefix /auth/ The HTTP request path reserves anything beginning with the letter `v`. -default_swift_cluster local:http://127.0.0.1:8080/v1 The default Swift +default_swift_cluster local#http://127.0.0.1:8080/v1 The default Swift cluster to place newly created accounts on. token_life 86400 The number of seconds a diff --git a/doc/source/howto_installmultinode.rst b/doc/source/howto_installmultinode.rst index 2a84357bb9..bec4f9510b 100644 --- a/doc/source/howto_installmultinode.rst +++ b/doc/source/howto_installmultinode.rst @@ -138,7 +138,7 @@ Configure the Proxy node # Only needed for Swauth [filter:swauth] use = egg:swift#swauth - default_swift_cluster = https://<PROXY_LOCAL_NET_IP>:8080/v1 + default_swift_cluster = local#https://<PROXY_LOCAL_NET_IP>:8080/v1 # Highly recommended to change this key to something else! super_admin_key = swauthkey @@ -437,7 +437,7 @@ See :ref:`config-proxy` for the initial setup, and then follow these additional # For Swauth, in /etc/swift/proxy-server.conf [filter:swauth] use = egg:swift#swauth - default_swift_cluster = local:http://<LOAD_BALANCER_HOSTNAME>/v1 + default_swift_cluster = local#http://<LOAD_BALANCER_HOSTNAME>/v1 # Highly recommended to change this key to something else! super_admin_key = swauthkey diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index fda7d0d034..a3f64f8415 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -66,15 +66,15 @@ use = egg:swift#swauth # The auth prefix will cause requests beginning with this prefix to be routed # to the auth subsystem, for granting tokens, creating accounts, users, etc. # auth_prefix = /auth/ -# Cluster strings are of the format name:url where name is a short name for the +# Cluster strings are of the format name#url where name is a short name for the # Swift cluster and url is the url to the proxy server(s) for the cluster. -# default_swift_cluster = local:http://127.0.0.1:8080/v1 -# You may also use the format name::url::url where the first url is the one +# default_swift_cluster = local#http://127.0.0.1:8080/v1 +# You may also use the format name#url#url where the first url is the one # given to users to access their account (public url) and the second is the one # used by swauth itself to create and delete accounts (private url). This is # useful when a load balancer url should be used by users, but swauth itself is # behind the load balancer. Example: -# default_swift_cluster = local::https://public.com:8080/v1::http://private.com:8080/v1 +# default_swift_cluster = local#https://public.com:8080/v1#http://private.com:8080/v1 # token_life = 86400 # node_timeout = 10 # Highly recommended to change this. diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 961f3a3ba4..105098c807 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -61,24 +61,23 @@ class Swauth(object): self.auth_prefix += '/' self.auth_account = '%s.auth' % self.reseller_prefix self.default_swift_cluster = conf.get('default_swift_cluster', - 'local:http://127.0.0.1:8080/v1') + 'local#http://127.0.0.1:8080/v1') # This setting is a little messy because of the options it has to - # provide. The basic format is cluster_name:url, such as the default - # value of local:http://127.0.0.1:8080/v1. But, often the url given to - # the user needs to be different than the url used by Swauth to - # create/delete accounts. So there's a more complex format of - # cluster_name::url::url, such as - # local::https://public.com:8080/v1::http://private.com:8080/v1. - # The double colon is what sets the two apart. - if '::' in self.default_swift_cluster: - self.dsc_name, self.dsc_url, self.dsc_url2 = \ - self.default_swift_cluster.split('::', 2) - self.dsc_url = self.dsc_url.rstrip('/') - self.dsc_url2 = self.dsc_url2.rstrip('/') + # provide. The basic format is cluster_name#url, such as the default + # value of local#http://127.0.0.1:8080/v1. + # If the URL given to the user needs to differ from the url used by + # Swauth to create/delete accounts, there's a more complex format: + # cluster_name#url#url, such as + # local#https://public.com:8080/v1#http://private.com:8080/v1. + cluster_parts = self.default_swift_cluster.split('#', 2) + self.dsc_name = cluster_parts[0] + if len(cluster_parts) == 3: + self.dsc_url = cluster_parts[1].rstrip('/') + self.dsc_url2 = cluster_parts[2].rstrip('/') + elif len(cluster_parts) == 2: + self.dsc_url = self.dsc_url2 = cluster_parts[1].rstrip('/') else: - self.dsc_name, self.dsc_url = \ - self.default_swift_cluster.split(':', 1) - self.dsc_url = self.dsc_url2 = self.dsc_url.rstrip('/') + raise Exception('Invalid cluster format') self.dsc_parsed = urlparse(self.dsc_url) if self.dsc_parsed.scheme not in ('http', 'https'): raise Exception('Cannot handle protocol scheme %s for url %s' % diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 00c010b9dc..2e4d958a44 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -151,21 +151,21 @@ class TestAuth(unittest.TestCase): app = FakeApp() self.assertRaises(Exception, auth.filter_factory({ 'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:badscheme://host/path'}), app) + 'default_swift_cluster': 'local#badscheme://host/path'}), app) ath = auth.filter_factory({'super_admin_key': 'supertest'})(app) self.assertEquals(ath.default_swift_cluster, - 'local:http://127.0.0.1:8080/v1') + 'local#http://127.0.0.1:8080/v1') ath = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:http://host/path'})(app) + 'default_swift_cluster': 'local#http://host/path'})(app) self.assertEquals(ath.default_swift_cluster, - 'local:http://host/path') + 'local#http://host/path') ath = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:https://host/path/'})(app) + 'default_swift_cluster': 'local#https://host/path/'})(app) self.assertEquals(ath.dsc_url, 'https://host/path') self.assertEquals(ath.dsc_url2, 'https://host/path') ath = auth.filter_factory({'super_admin_key': 'supertest', 'default_swift_cluster': - 'local::https://host/path/::http://host2/path2/'})(app) + 'local#https://host/path/#http://host2/path2/'})(app) self.assertEquals(ath.dsc_url, 'https://host/path') self.assertEquals(ath.dsc_url2, 'http://host2/path2') @@ -2882,7 +2882,7 @@ class TestAuth(unittest.TestCase): def test_get_conn_default_https(self): local_auth = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:https://1.2.3.4/v1'})(FakeApp()) + 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) conn = local_auth.get_conn() self.assertEquals(conn.__class__, auth.HTTPSConnection) self.assertEquals(conn.host, '1.2.3.4') @@ -2890,7 +2890,7 @@ class TestAuth(unittest.TestCase): def test_get_conn_overridden(self): local_auth = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:https://1.2.3.4/v1'})(FakeApp()) + 'default_swift_cluster': 'local#https://1.2.3.4/v1'})(FakeApp()) conn = \ local_auth.get_conn(urlparsed=auth.urlparse('http://5.6.7.8/v1')) self.assertEquals(conn.__class__, auth.HTTPConnection) @@ -2899,7 +2899,7 @@ class TestAuth(unittest.TestCase): def test_get_conn_overridden_https(self): local_auth = auth.filter_factory({'super_admin_key': 'supertest', - 'default_swift_cluster': 'local:http://1.2.3.4/v1'})(FakeApp()) + 'default_swift_cluster': 'local#http://1.2.3.4/v1'})(FakeApp()) conn = \ local_auth.get_conn(urlparsed=auth.urlparse('https://5.6.7.8/v1')) self.assertEquals(conn.__class__, auth.HTTPSConnection) From c63e6c7c9b5c9b7f184b66d74a1d74cbfde2efbd Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Mon, 17 Jan 2011 21:51:09 +0000 Subject: [PATCH 116/199] Fixes issue of HTTPS not being greened --- swift/common/client.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/swift/common/client.py b/swift/common/client.py index e3536e894f..0255bff708 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -18,12 +18,17 @@ Cloud Files client library used internally """ import socket from cStringIO import StringIO -from httplib import HTTPException, HTTPSConnection +from httplib import HTTPException from re import compile, DOTALL from tokenize import generate_tokens, STRING, NAME, OP from urllib import quote as _quote, unquote from urlparse import urlparse, urlunparse +try: + from eventlet.green.httplib import HTTPSConnection +except: + from httplib import HTTPSConnection + try: from eventlet import sleep except: @@ -33,7 +38,10 @@ try: from swift.common.bufferedhttp \ import BufferedHTTPConnection as HTTPConnection except: - from httplib import HTTPConnection + try: + from eventlet.green.httplib import HTTPConnection + except: + from httplib import HTTPConnection def quote(value, safe='/'): From 9b702c64df53f66ee46d4007c62333dab2d4bca9 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Mon, 17 Jan 2011 22:08:07 +0000 Subject: [PATCH 117/199] Removed the bare excepts --- swift/common/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/swift/common/client.py b/swift/common/client.py index 0255bff708..9e80e9cef1 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -26,21 +26,21 @@ from urlparse import urlparse, urlunparse try: from eventlet.green.httplib import HTTPSConnection -except: +except ImportError: from httplib import HTTPSConnection try: from eventlet import sleep -except: +except ImportError: from time import sleep try: from swift.common.bufferedhttp \ import BufferedHTTPConnection as HTTPConnection -except: +except ImportError: try: from eventlet.green.httplib import HTTPConnection - except: + except ImportError: from httplib import HTTPConnection From a623aa5be57b103b41bc20afb954a6a5f0effc78 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 17 Jan 2011 14:36:28 -0800 Subject: [PATCH 118/199] client.py: Reset of streams during upload retries --- swift/common/client.py | 53 ++++++++++++------ test/unit/common/test_client.py | 95 +++++++++++++++++++++++++++++++++ 2 files changed, 132 insertions(+), 16 deletions(-) diff --git a/swift/common/client.py b/swift/common/client.py index e3536e894f..b12742e4ae 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -688,7 +688,7 @@ class Connection(object): """Convenience class to make requests that will also retry the request""" def __init__(self, authurl, user, key, retries=5, preauthurl=None, - preauthtoken=None, snet=False): + preauthtoken=None, snet=False, starting_backoff=1): """ :param authurl: authenitcation URL :param user: user name to authenticate as @@ -708,6 +708,7 @@ class Connection(object): self.token = preauthtoken self.attempts = 0 self.snet = snet + self.starting_backoff = starting_backoff def get_auth(self): return get_auth(self.authurl, self.user, self.key, snet=self.snet) @@ -715,9 +716,9 @@ class Connection(object): def http_connection(self): return http_connection(self.url) - def _retry(self, func, *args, **kwargs): + def _retry(self, reset_func, func, *args, **kwargs): self.attempts = 0 - backoff = 1 + backoff = self.starting_backoff while self.attempts <= self.retries: self.attempts += 1 try: @@ -746,10 +747,12 @@ class Connection(object): raise sleep(backoff) backoff *= 2 + if reset_func: + reset_func(func, *args, **kwargs) def head_account(self): """Wrapper for :func:`head_account`""" - return self._retry(head_account) + return self._retry(None, head_account) def get_account(self, marker=None, limit=None, prefix=None, full_listing=False): @@ -757,16 +760,16 @@ class Connection(object): # TODO(unknown): With full_listing=True this will restart the entire # listing with each retry. Need to make a better version that just # retries where it left off. - return self._retry(get_account, marker=marker, limit=limit, + return self._retry(None, get_account, marker=marker, limit=limit, prefix=prefix, full_listing=full_listing) def post_account(self, headers): """Wrapper for :func:`post_account`""" - return self._retry(post_account, headers) + return self._retry(None, post_account, headers) def head_container(self, container): """Wrapper for :func:`head_container`""" - return self._retry(head_container, container) + return self._retry(None, head_container, container) def get_container(self, container, marker=None, limit=None, prefix=None, delimiter=None, full_listing=False): @@ -774,43 +777,61 @@ class Connection(object): # TODO(unknown): With full_listing=True this will restart the entire # listing with each retry. Need to make a better version that just # retries where it left off. - return self._retry(get_container, container, marker=marker, + return self._retry(None, get_container, container, marker=marker, limit=limit, prefix=prefix, delimiter=delimiter, full_listing=full_listing) def put_container(self, container, headers=None): """Wrapper for :func:`put_container`""" - return self._retry(put_container, container, headers=headers) + return self._retry(None, put_container, container, headers=headers) def post_container(self, container, headers): """Wrapper for :func:`post_container`""" - return self._retry(post_container, container, headers) + return self._retry(None, post_container, container, headers) def delete_container(self, container): """Wrapper for :func:`delete_container`""" - return self._retry(delete_container, container) + return self._retry(None, delete_container, container) def head_object(self, container, obj): """Wrapper for :func:`head_object`""" - return self._retry(head_object, container, obj) + return self._retry(None, head_object, container, obj) def get_object(self, container, obj, resp_chunk_size=None): """Wrapper for :func:`get_object`""" - return self._retry(get_object, container, obj, + return self._retry(None, get_object, container, obj, resp_chunk_size=resp_chunk_size) + def _put_object_reset(self, func, container, obj, contents, *args, + **kwargs): + seek = getattr(contents, 'seek', None) + if seek: + seek(0) + def put_object(self, container, obj, contents, content_length=None, etag=None, chunk_size=65536, content_type=None, headers=None): """Wrapper for :func:`put_object`""" - return self._retry(put_object, container, obj, contents, + + def _default_reset(*args, **kwargs): + raise ClientException('put_object(%r, %r, ...) failure and no ' + 'ability to reset contents for reupload.' % (container, obj)) + + reset_func = _default_reset + tell = getattr(contents, 'tell', None) + seek = getattr(contents, 'seek', None) + if tell and seek: + orig_pos = tell() + reset_func = lambda *a, **k: seek(orig_pos) + + return self._retry(reset_func, put_object, container, obj, contents, content_length=content_length, etag=etag, chunk_size=chunk_size, content_type=content_type, headers=headers) def post_object(self, container, obj, headers): """Wrapper for :func:`post_object`""" - return self._retry(post_object, container, obj, headers) + return self._retry(None, post_object, container, obj, headers) def delete_object(self, container, obj): """Wrapper for :func:`delete_object`""" - return self._retry(delete_object, container, obj) + return self._retry(None, delete_object, container, obj) diff --git a/test/unit/common/test_client.py b/test/unit/common/test_client.py index 739cba75e3..233ec429f7 100644 --- a/test/unit/common/test_client.py +++ b/test/unit/common/test_client.py @@ -14,7 +14,10 @@ # limitations under the License. # TODO: More tests +import socket import unittest +from StringIO import StringIO +from urlparse import urlparse # TODO: mock http connection class with more control over headers from test.unit.proxy.test_server import fake_http_connect @@ -377,5 +380,97 @@ class TestConnection(MockHttpTest): self.assertEquals(conn.url, 'http://www.new.com') self.assertEquals(conn.token, 'new') + def test_reset_stream(self): + + class LocalContents(object): + + def __init__(self, tell_value=0): + self.already_read = False + self.seeks = [] + self.tell_value = tell_value + + def tell(self): + return self.tell_value + + def seek(self, position): + self.seeks.append(position) + self.already_read = False + + def read(self, size=-1): + if self.already_read: + return '' + else: + self.already_read = True + return 'abcdef' + + class LocalConnection(object): + + def putrequest(self, *args, **kwargs): + return + + def putheader(self, *args, **kwargs): + return + + def endheaders(self, *args, **kwargs): + return + + def send(self, *args, **kwargs): + raise socket.error('oops') + + def request(self, *args, **kwargs): + return + + def getresponse(self, *args, **kwargs): + self.status = 200 + return self + + def getheader(self, *args, **kwargs): + return '' + + def read(self, *args, **kwargs): + return '' + + def local_http_connection(url): + parsed = urlparse(url) + return parsed, LocalConnection() + + orig_conn = c.http_connection + try: + c.http_connection = local_http_connection + conn = c.Connection('http://www.example.com', 'asdf', 'asdf', + retries=1, starting_backoff=.0001) + + contents = LocalContents() + exc = None + try: + conn.put_object('c', 'o', contents) + except socket.error, err: + exc = err + self.assertEquals(contents.seeks, [0]) + self.assertEquals(str(exc), 'oops') + + contents = LocalContents(tell_value=123) + exc = None + try: + conn.put_object('c', 'o', contents) + except socket.error, err: + exc = err + self.assertEquals(contents.seeks, [123]) + self.assertEquals(str(exc), 'oops') + + contents = LocalContents() + contents.tell = None + exc = None + try: + conn.put_object('c', 'o', contents) + except c.ClientException, err: + exc = err + self.assertEquals(contents.seeks, []) + self.assertEquals(str(exc), "put_object('c', 'o', ...) failure " + "and no ability to reset contents for reupload.") + finally: + c.http_connection = orig_conn + + if __name__ == '__main__': unittest.main() From 56738b161fe024c8f5725cb382dd61a29d77fa64 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 17 Jan 2011 14:44:55 -0800 Subject: [PATCH 119/199] client.py: Dead code removal --- swift/common/client.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/swift/common/client.py b/swift/common/client.py index bc9dfa1ed5..8d144735ee 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -810,12 +810,6 @@ class Connection(object): return self._retry(None, get_object, container, obj, resp_chunk_size=resp_chunk_size) - def _put_object_reset(self, func, container, obj, contents, *args, - **kwargs): - seek = getattr(contents, 'seek', None) - if seek: - seek(0) - def put_object(self, container, obj, contents, content_length=None, etag=None, chunk_size=65536, content_type=None, headers=None): From 48bade1a11bc61280e2889072299a7483a58f3b2 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Tue, 18 Jan 2011 10:55:26 +0900 Subject: [PATCH 120/199] s3api: fix unicode name container and object --- swift/common/middleware/swift3.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index f6a5126693..61cfa86eb6 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -139,11 +139,9 @@ class ServiceController(Controller): return get_err_response('InvalidURI') containers = loads(''.join(list(body_iter))) - resp = Response(content_type='text/xml') - resp.status = 200 # we don't keep the creation time of a backet (s3cmd doesn't # work without that) so we use something bogus. - resp.body = '<?xml version="1.0" encoding="UTF-8"?>' \ + body = '<?xml version="1.0" encoding="UTF-8"?>' \ '<ListAllMyBucketsResult ' \ 'xmlns="http://doc.s3.amazonaws.com/2006-03-01">' \ '<Buckets>%s</Buckets>' \ @@ -151,6 +149,7 @@ class ServiceController(Controller): % ("".join(['<Bucket><Name>%s</Name><CreationDate>' \ '2009-02-03T16:45:09.000Z</CreationDate></Bucket>' % xml_escape(i['name']) for i in containers])) + resp = Response(status=200, content_type='text/xml', body=body) return resp From ec32c55e4069d4ee16cfae1b058355d26b85f1aa Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Tue, 18 Jan 2011 14:44:00 +0900 Subject: [PATCH 121/199] s3api: fix devauth auth failure handling --- swift/auth/server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/swift/auth/server.py b/swift/auth/server.py index dac3a78a3e..32c768f212 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -442,7 +442,8 @@ YOU HAVE A FEW OPTIONS: headers = {} if 'Authorization' in request.headers: validation = self.validate_s3_sign(request, token) - headers['X-Auth-Account-Suffix'] = validation[3] + if validation: + headers['X-Auth-Account-Suffix'] = validation[3] else: validation = self.validate_token(token) if not validation: From 8a5f21b3ac3dfc6ab90cd932c1763822864e8339 Mon Sep 17 00:00:00 2001 From: Chris Wedgwood <cw@f00f.org> Date: Tue, 18 Jan 2011 12:08:22 -0800 Subject: [PATCH 122/199] Fix output formatting (missing tuple). Fix to be robust when container and object counts are None (this happens if accounts can't be located). --- bin/swift-account-audit | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bin/swift-account-audit b/bin/swift-account-audit index 9fd1b13e80..ec71a05f45 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -189,7 +189,7 @@ class Auditor(object): self.container_obj_mismatch += 1 consistent = False print " Different versions of %s/%s in container dbs." % \ - name, obj['name'] + (name, obj['name']) if obj['last_modified'] > rec_d[obj_name]['last_modified']: rec_d[obj_name] = obj obj_counts = [int(header['x-container-object-count']) @@ -259,14 +259,16 @@ class Auditor(object): self.account_container_mismatch += 1 consistent = False print " Account databases don't agree on number of containers." - print " Max: %s, Min: %s" % (max(cont_counts), min(cont_counts)) + if cont_counts: + print " Max: %s, Min: %s" % (max(cont_counts), min(cont_counts)) obj_counts = [int(header['x-account-object-count']) for header in headers] if len(set(obj_counts)) != 1: self.account_object_mismatch += 1 consistent = False print " Account databases don't agree on number of objects." - print " Max: %s, Min: %s" % (max(obj_counts), min(obj_counts)) + if obj_counts: + print " Max: %s, Min: %s" % (max(obj_counts), min(obj_counts)) containers = set() for resp in responses.values(): containers.update(container['name'] for container in resp[1]) From 9ec4cf00235e6db8ab163e627562aec1a73e7d04 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Tue, 18 Jan 2011 20:55:19 +0000 Subject: [PATCH 123/199] shuffle nodes and cache files for public access performance --- swift/obj/server.py | 17 ++++++++++++++--- swift/proxy/server.py | 2 ++ test/unit/proxy/test_server.py | 1 + 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/swift/obj/server.py b/swift/obj/server.py index 4afc38057d..aabece3176 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -51,6 +51,7 @@ ASYNCDIR = 'async_pending' PICKLE_PROTOCOL = 2 METADATA_KEY = 'user.swift.metadata' MAX_OBJECT_NAME_LENGTH = 1024 +KEEP_CACHE_SIZE = (5 * 1024 * 1024) def read_metadata(fd): @@ -113,6 +114,7 @@ class DiskFile(object): self.meta_file = None self.data_file = None self.fp = None + self.keep_cache = False if not os.path.exists(self.datadir): return files = sorted(os.listdir(self.datadir), reverse=True) @@ -150,12 +152,12 @@ class DiskFile(object): if chunk: read += len(chunk) if read - dropped_cache > (1024 * 1024): - drop_buffer_cache(self.fp.fileno(), dropped_cache, + self.drop_cache(self.fp.fileno(), dropped_cache, read - dropped_cache) dropped_cache = read yield chunk else: - drop_buffer_cache(self.fp.fileno(), dropped_cache, + self.drop_cache(self.fp.fileno(), dropped_cache, read - dropped_cache) break finally: @@ -226,7 +228,7 @@ class DiskFile(object): timestamp = normalize_timestamp(metadata['X-Timestamp']) write_metadata(fd, metadata) if 'Content-Length' in metadata: - drop_buffer_cache(fd, 0, int(metadata['Content-Length'])) + self.drop_cache(fd, 0, int(metadata['Content-Length'])) tpool.execute(os.fsync, fd) invalidate_hash(os.path.dirname(self.datadir)) renamer(tmppath, os.path.join(self.datadir, timestamp + extension)) @@ -248,6 +250,11 @@ class DiskFile(object): if err.errno != errno.ENOENT: raise + def drop_cache(self, fd, offset, length): + """Method for no-oping buffer cache drop method.""" + if not self.keep_cache: + drop_buffer_cache(fd, offset, length) + class ObjectController(object): """Implements the WSGI application for the Swift Object Server.""" @@ -482,6 +489,10 @@ class ObjectController(object): response.etag = file.metadata['ETag'] response.last_modified = float(file.metadata['X-Timestamp']) response.content_length = int(file.metadata['Content-Length']) + if response.content_length < KEEP_CACHE_SIZE and \ + 'X-Auth-Token' not in request.headers and \ + 'X-Storage-Token' not in request.headers: + file.keep_cache = True if 'Content-Encoding' in file.metadata: response.content_encoding = file.metadata['Content-Encoding'] return request.get_response(response) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 32c7ad9004..4892c42267 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -29,6 +29,7 @@ from urllib import unquote, quote import uuid import functools from hashlib import md5 +from random import shuffle from eventlet import sleep from eventlet.timeout import Timeout @@ -707,6 +708,7 @@ class ObjectController(Controller): return aresp partition, nodes = self.app.object_ring.get_nodes( self.account_name, self.container_name, self.object_name) + shuffle(nodes) resp = self.GETorHEAD_base(req, _('Object'), partition, self.iter_nodes(partition, nodes, self.app.object_ring), req.path_info, self.app.object_ring.replica_count) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index e5a4e40652..103ce2a665 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1044,6 +1044,7 @@ class TestObjectController(unittest.TestCase): def test_error_limiting(self): with save_globals(): + proxy_server.shuffle = lambda l: None controller = proxy_server.ObjectController(self.app, 'account', 'container', 'object') self.assert_status_map(controller.HEAD, (503, 200, 200), 200) From 7e392b70e9a7c5f326714f313c24e780ca5efd27 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Wed, 19 Jan 2011 08:41:11 +0900 Subject: [PATCH 124/199] s3api: update the comment for the id scheme change --- swift/common/middleware/swift3.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index f6a5126693..f01c510dad 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -32,8 +32,8 @@ To add this middleware to your configuration, add the swift3 middleware in front of the auth middleware, and before any other middleware that look at swift requests (like rate limiting). -To set up your client, the access key will be the account string that -should look like AUTH_d305e9dbedbc47df8b25ab46f3152f81, and the +To set up your client, the access key will be the concatenation of the +account and user strings that should look like test:tester, and the secret access key is the account password. The host should also point to the swift storage hostname. It also will have to use the old style calling format, and not the hostname based container format. @@ -42,7 +42,7 @@ An example client using the python boto library might look like the following for an SAIO setup:: connection = boto.s3.Connection( - aws_access_key_id='AUTH_d305e9dbedbc47df8b25ab46f3152f81', + aws_access_key_id='test:tester', aws_secret_access_key='testing', port=8080, host='127.0.0.1', From 98c201043c52893f3ec23b990b14c3441a2f77b3 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Wed, 19 Jan 2011 09:56:58 +0900 Subject: [PATCH 125/199] swauth: add s3api support This changes handle_put_user() to put 'x-object-meta-account-id' to a user object. The metadata includes an cfaccount like x-container-meta-account-id. The above enables swauth to avoid issuing two HTTP requests per single S3 request, that is, swauth get the password and cfaccount from the account and user by issuing 'GET /v1/(auth_account)/(account)/(user)' If swauth can't get 'x-object-meta-account-id' metadata from a user object (the existing user objects), it issues 'GET /v1/(auth_account)/(account)' to get the cfaccount. --- swift/common/middleware/swauth.py | 50 +++++++++++++++++++++++++++++-- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 105098c807..29dfa6228c 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -23,6 +23,9 @@ from traceback import format_exc from urllib import quote, unquote from urlparse import urlparse from uuid import uuid4 +from hashlib import md5, sha1 +import hmac +import base64 from eventlet.timeout import Timeout from webob import Response, Request @@ -123,8 +126,9 @@ class Swauth(object): env['HTTP_X_CF_TRANS_ID'] = 'tx' + str(uuid4()) if env.get('PATH_INFO', '').startswith(self.auth_prefix): return self.handle(env, start_response) + s3 = env.get('HTTP_AUTHORIZATION') token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN')) - if token and token.startswith(self.reseller_prefix): + if s3 or (token and token.startswith(self.reseller_prefix)): # Note: Empty reseller_prefix will match all tokens. groups = self.get_groups(env, token) if groups: @@ -192,6 +196,40 @@ class Swauth(object): expires, groups = cached_auth_data if expires < time(): groups = None + + if env.get('HTTP_AUTHORIZATION'): + account, user, sign = env['HTTP_AUTHORIZATION'].split(' ')[1].split(':') + path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) + resp = self.make_request(env, 'GET', path).get_response(self.app) + if resp.status_int // 100 != 2: + return None + + if 'x-object-meta-account-id' in resp.headers: + account_id = resp.headers['x-object-meta-account-id'] + else: + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp2 = self.make_request(env, 'GET', path).get_response(self.app) + if resp2.status_int // 100 != 2: + return None + account_id = resp2.headers['x-container-meta-account-id'] + + path = env['PATH_INFO'] + env['PATH_INFO'] = path.replace("%s:%s" % (account, user), account_id, 1) + detail = json.loads(resp.body) + + password = detail['auth'].split(':')[-1] + msg = base64.urlsafe_b64decode(unquote(token)) + s = base64.encodestring(hmac.new(detail['auth'].split(':')[-1], msg, sha1).digest()).strip() + if s != sign: + return None + + groups = [g['name'] for g in detail['groups']] + if '.admin' in groups: + groups.remove('.admin') + groups.append(account_id) + groups = ','.join(groups) + return groups + if not groups: path = quote('/v1/%s/.token_%s/%s' % (self.auth_account, token[-1], token)) @@ -839,6 +877,13 @@ class Swauth(object): return HTTPForbidden(request=req) elif not self.is_account_admin(req, account): return HTTPForbidden(request=req) + + path = quote('/v1/%s/%s' % (self.auth_account, account)) + resp = self.make_request(req.environ, 'GET', path).get_response(self.app) + if resp.status_int // 100 != 2: + raise Exception('Could not create user object: %s %s' % + (path, resp.status)) + headers={'X-Object-Meta-Account-Id': '%s' % resp.headers['x-container-meta-account-id']} # Create the object in the main auth account (this object represents # the user) path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) @@ -847,9 +892,10 @@ class Swauth(object): groups.append('.admin') if reseller_admin: groups.append('.reseller_admin') + resp = self.make_request(req.environ, 'PUT', path, json.dumps({'auth': 'plaintext:%s' % key, - 'groups': [{'name': g} for g in groups]})).get_response(self.app) + 'groups': [{'name': g} for g in groups]}), headers=headers).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: From 2fc5c401fa3b90135232a20d34362dffffff336c Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Wed, 19 Jan 2011 03:56:13 +0000 Subject: [PATCH 126/199] break up that chunked put test --- test/unit/proxy/test_server.py | 1181 +++++++++++++++++--------------- 1 file changed, 619 insertions(+), 562 deletions(-) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 103ce2a665..9b5d85bc0f 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -47,11 +47,111 @@ from swift.common import ring from swift.common.constraints import MAX_META_NAME_LENGTH, \ MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, MAX_FILE_SIZE from swift.common.utils import mkdirs, normalize_timestamp, NullLogger +from swift.common.wsgi import monkey_patch_mimetools # mocks logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) +def setup(): + global _testdir, _test_servers, _test_sockets, \ + _orig_container_listing_limit, _test_coros + monkey_patch_mimetools() + # Since we're starting up a lot here, we're going to test more than + # just chunked puts; we're also going to test parts of + # proxy_server.Application we couldn't get to easily otherwise. + path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') + if not path_to_test_xfs or not os.path.exists(path_to_test_xfs): + print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ + 'pointing to a valid directory.\n' \ + 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ + 'system for testing.' + raise SkipTest + _testdir = \ + os.path.join(path_to_test_xfs, 'tmp_test_proxy_server_chunked') + mkdirs(_testdir) + rmtree(_testdir) + mkdirs(os.path.join(_testdir, 'sda1')) + mkdirs(os.path.join(_testdir, 'sda1', 'tmp')) + mkdirs(os.path.join(_testdir, 'sdb1')) + mkdirs(os.path.join(_testdir, 'sdb1', 'tmp')) + _orig_container_listing_limit = proxy_server.CONTAINER_LISTING_LIMIT + conf = {'devices': _testdir, 'swift_dir': _testdir, + 'mount_check': 'false'} + prolis = listen(('localhost', 0)) + acc1lis = listen(('localhost', 0)) + acc2lis = listen(('localhost', 0)) + con1lis = listen(('localhost', 0)) + con2lis = listen(('localhost', 0)) + obj1lis = listen(('localhost', 0)) + obj2lis = listen(('localhost', 0)) + _test_sockets = \ + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) + pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], + [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', + 'port': acc1lis.getsockname()[1]}, + {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', + 'port': acc2lis.getsockname()[1]}], 30), + GzipFile(os.path.join(_testdir, 'account.ring.gz'), 'wb')) + pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], + [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', + 'port': con1lis.getsockname()[1]}, + {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', + 'port': con2lis.getsockname()[1]}], 30), + GzipFile(os.path.join(_testdir, 'container.ring.gz'), 'wb')) + pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], + [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', + 'port': obj1lis.getsockname()[1]}, + {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', + 'port': obj2lis.getsockname()[1]}], 30), + GzipFile(os.path.join(_testdir, 'object.ring.gz'), 'wb')) + prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone()) + acc1srv = account_server.AccountController(conf) + acc2srv = account_server.AccountController(conf) + con1srv = container_server.ContainerController(conf) + con2srv = container_server.ContainerController(conf) + obj1srv = object_server.ObjectController(conf) + obj2srv = object_server.ObjectController(conf) + _test_servers = \ + (prosrv, acc1srv, acc2srv, con2srv, con2srv, obj1srv, obj2srv) + nl = NullLogger() + prospa = spawn(wsgi.server, prolis, prosrv, nl) + acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl) + acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl) + con1spa = spawn(wsgi.server, con1lis, con1srv, nl) + con2spa = spawn(wsgi.server, con2lis, con2srv, nl) + obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl) + obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl) + _test_coros = \ + (prospa, acc1spa, acc2spa, con2spa, con2spa, obj1spa, obj2spa) + # Create account + ts = normalize_timestamp(time()) + partition, nodes = prosrv.account_ring.get_nodes('a') + for node in nodes: + conn = proxy_server.http_connect(node['ip'], node['port'], + node['device'], partition, 'PUT', '/a', + {'X-Timestamp': ts, 'X-CF-Trans-Id': 'test'}) + resp = conn.getresponse() + assert(resp.status == 201) + # Create container + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + assert(headers[:len(exp)] == exp) + + +def teardown(): + for server in _test_coros: + server.kill() + proxy_server.CONTAINER_LISTING_LIMIT = _orig_container_listing_limit + rmtree(_testdir) + + def fake_http_connect(*code_iter, **kwargs): class FakeConn(object): @@ -427,6 +527,10 @@ class TestObjectController(unittest.TestCase): account_ring=FakeRing(), container_ring=FakeRing(), object_ring=FakeRing()) + + def tearDown(self): + proxy_server.CONTAINER_LISTING_LIMIT = _orig_container_listing_limit + def assert_status_map(self, method, statuses, expected, raise_exc=False): with save_globals(): kwargs = {} @@ -1586,576 +1690,527 @@ class TestObjectController(unittest.TestCase): finally: server.MAX_FILE_SIZE = MAX_FILE_SIZE - def test_chunked_put_and_a_bit_more(self): - # Since we're starting up a lot here, we're going to test more than - # just chunked puts; we're also going to test parts of - # proxy_server.Application we couldn't get to easily otherwise. - path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') - if not path_to_test_xfs or not os.path.exists(path_to_test_xfs): - print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ - 'pointing to a valid directory.\n' \ - 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ - 'system for testing.' - raise SkipTest - testdir = \ - os.path.join(path_to_test_xfs, 'tmp_test_proxy_server_chunked') - mkdirs(testdir) - rmtree(testdir) - mkdirs(os.path.join(testdir, 'sda1')) - mkdirs(os.path.join(testdir, 'sda1', 'tmp')) - mkdirs(os.path.join(testdir, 'sdb1')) - mkdirs(os.path.join(testdir, 'sdb1', 'tmp')) - try: - orig_container_listing_limit = proxy_server.CONTAINER_LISTING_LIMIT - conf = {'devices': testdir, 'swift_dir': testdir, - 'mount_check': 'false'} - prolis = listen(('localhost', 0)) - acc1lis = listen(('localhost', 0)) - acc2lis = listen(('localhost', 0)) - con1lis = listen(('localhost', 0)) - con2lis = listen(('localhost', 0)) - obj1lis = listen(('localhost', 0)) - obj2lis = listen(('localhost', 0)) - pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], - [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', - 'port': acc1lis.getsockname()[1]}, - {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', - 'port': acc2lis.getsockname()[1]}], 30), - GzipFile(os.path.join(testdir, 'account.ring.gz'), 'wb')) - pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], - [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', - 'port': con1lis.getsockname()[1]}, - {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', - 'port': con2lis.getsockname()[1]}], 30), - GzipFile(os.path.join(testdir, 'container.ring.gz'), 'wb')) - pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], - [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', - 'port': obj1lis.getsockname()[1]}, - {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', - 'port': obj2lis.getsockname()[1]}], 30), - GzipFile(os.path.join(testdir, 'object.ring.gz'), 'wb')) - prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone()) - acc1srv = account_server.AccountController(conf) - acc2srv = account_server.AccountController(conf) - con1srv = container_server.ContainerController(conf) - con2srv = container_server.ContainerController(conf) - obj1srv = object_server.ObjectController(conf) - obj2srv = object_server.ObjectController(conf) - nl = NullLogger() - prospa = spawn(wsgi.server, prolis, prosrv, nl) - acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl) - acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl) - con1spa = spawn(wsgi.server, con1lis, con1srv, nl) - con2spa = spawn(wsgi.server, con2lis, con2srv, nl) - obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl) - obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl) - try: - # Check bad version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v0 HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - # Check bad path - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET invalid HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 404' - self.assertEquals(headers[:len(exp)], exp) - # Check invalid utf-8 - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a%80 HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - # Check bad path, no controller - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1 HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) - # Check bad method - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('LICK /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 405' - self.assertEquals(headers[:len(exp)], exp) - # Check unhandled exception - orig_update_request = prosrv.update_request + def test_chunked_put_bad_version(self): + # Check bad version + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v0 HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nContent-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 412' + self.assertEquals(headers[:len(exp)], exp) - def broken_update_request(env, req): - raise Exception('fake') + def test_chunked_put_bad_path(self): + # Check bad path + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET invalid HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nContent-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 404' + self.assertEquals(headers[:len(exp)], exp) - prosrv.update_request = broken_update_request - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 500' - self.assertEquals(headers[:len(exp)], exp) - prosrv.update_request = orig_update_request - # Okay, back to chunked put testing; Create account - ts = normalize_timestamp(time()) - partition, nodes = prosrv.account_ring.get_nodes('a') - for node in nodes: - conn = proxy_server.http_connect(node['ip'], node['port'], - node['device'], partition, 'PUT', '/a', - {'X-Timestamp': ts, 'X-CF-Trans-Id': 'test'}) - resp = conn.getresponse() - self.assertEquals(resp.status, 201) - # Head account, just a double check and really is here to test - # the part Application.log_request that 'enforces' a - # content_length on the response. - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 204' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('\r\nContent-Length: 0\r\n' in headers) - # Create container - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # GET account with a query string to test that - # Application.log_request logs the query string. Also, throws - # in a test for logging x-forwarded-for (first entry only). + def test_chunked_put_bad_utf8(self): + # Check invalid utf-8 + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a%80 HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 412' + self.assertEquals(headers[:len(exp)], exp) - class Logger(object): + def test_chunked_put_bad_path_no_controller(self): + # Check bad path, no controller + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1 HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 412' + self.assertEquals(headers[:len(exp)], exp) - def info(self, msg): - self.msg = msg + def test_chunked_put_bad_method(self): + # Check bad method + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('LICK /v1/a HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 405' + self.assertEquals(headers[:len(exp)], exp) - orig_logger = prosrv.logger - prosrv.logger = Logger() - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write( - 'GET /v1/a?format=json HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\nX-Forwarded-For: host1, host2\r\n' - '\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('/v1/a%3Fformat%3Djson' in prosrv.logger.msg, - prosrv.logger.msg) - exp = 'host1' - self.assertEquals(prosrv.logger.msg[:len(exp)], exp) - prosrv.logger = orig_logger - # Turn on header logging. + def test_chunked_put_unhandled_exception(self): + # Check unhandled exception + (prosrv, acc1srv, acc2srv, con2srv, con2srv, obj1srv, obj2srv) = \ + _test_servers + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + orig_update_request = prosrv.update_request - class Logger(object): + def broken_update_request(env, req): + raise Exception('fake') - def info(self, msg): - self.msg = msg + prosrv.update_request = broken_update_request + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 500' + self.assertEquals(headers[:len(exp)], exp) + prosrv.update_request = orig_update_request - orig_logger = prosrv.logger - prosrv.logger = Logger() - prosrv.log_headers = True - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n' - 'Content-Length: 0\r\nGoofy-Header: True\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Goofy-Header%3A%20True' in prosrv.logger.msg, - prosrv.logger.msg) - prosrv.log_headers = False - prosrv.logger = orig_logger - # Test UTF-8 Unicode all the way through the system - ustr = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xba \xe1\xbc\xb0\xce' \ - '\xbf\xe1\xbd\xbb\xce\x87 \xcf\x84\xe1\xbd\xb0 \xcf' \ - '\x80\xe1\xbd\xb1\xce\xbd\xcf\x84\xca\xbc \xe1\xbc' \ - '\x82\xce\xbd \xe1\xbc\x90\xce\xbe\xe1\xbd\xb5\xce' \ - '\xba\xce\xbf\xce\xb9 \xcf\x83\xce\xb1\xcf\x86\xe1' \ - '\xbf\x86.Test' - ustr_short = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xbatest' - # Create ustr container - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # List account with ustr container (test plain) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - containers = fd.read().split('\n') - self.assert_(ustr in containers) - # List account with ustr container (test json) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a?format=json HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - listing = simplejson.loads(fd.read()) - self.assertEquals(listing[1]['name'], ustr.decode('utf8')) - # List account with ustr container (test xml) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a?format=xml HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('<name>%s</name>' % ustr in fd.read()) - # Create ustr object with ustr metadata in ustr container - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'X-Object-Meta-%s: %s\r\nContent-Length: 0\r\n\r\n' % - (quote(ustr), quote(ustr), quote(ustr_short), - quote(ustr))) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # List ustr container with ustr object (test plain) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - objects = fd.read().split('\n') - self.assert_(ustr in objects) - # List ustr container with ustr object (test json) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?format=json HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' % - quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - listing = simplejson.loads(fd.read()) - self.assertEquals(listing[0]['name'], ustr.decode('utf8')) - # List ustr container with ustr object (test xml) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?format=xml HTTP/1.1\r\n' - 'Host: localhost\r\nConnection: close\r\n' - 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' % - quote(ustr)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('<name>%s</name>' % ustr in fd.read()) - # Retrieve ustr object with ustr metadata - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % - (quote(ustr), quote(ustr))) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('\r\nX-Object-Meta-%s: %s\r\n' % - (quote(ustr_short).lower(), quote(ustr)) in headers) - # Do chunked object put - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - # Also happens to assert that x-storage-token is taken as a - # replacement for x-auth-token. - fd.write('PUT /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Transfer-Encoding: chunked\r\n\r\n' - '2\r\noh\r\n4\r\n hai\r\nf\r\n123456789abcdef\r\n' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure we get what we put - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - self.assertEquals(body, 'oh hai123456789abcdef') - # Create a container for our segmented/manifest object testing - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/segmented HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Create the object segments - for segment in xrange(5): - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/segmented/name/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n1234 ' % str(segment)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Create the object manifest file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/segmented/name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nX-Object-Manifest: ' - 'segmented/name/\r\nContent-Type: text/jibberish\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure retrieving the manifest file gets the whole object - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/segmented/name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('X-Object-Manifest: segmented/name/' in headers) - self.assert_('Content-Type: text/jibberish' in headers) - body = fd.read() - self.assertEquals(body, '1234 1234 1234 1234 1234 ') - # Do it again but exceeding the container listing limit - proxy_server.CONTAINER_LISTING_LIMIT = 2 - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/segmented/name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('X-Object-Manifest: segmented/name/' in headers) - self.assert_('Content-Type: text/jibberish' in headers) - body = fd.read() - # A bit fragile of a test; as it makes the assumption that all - # will be sent in a single chunk. - self.assertEquals(body, - '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n') - # Make a copy of the manifested object, which should - # error since the number of segments exceeds - # CONTAINER_LISTING_LIMIT. - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nX-Copy-From: segmented/name\r\nContent-Length: ' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 413' - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - # After adjusting the CONTAINER_LISTING_LIMIT, make a copy of - # the manifested object which should consolidate the segments. - proxy_server.CONTAINER_LISTING_LIMIT = 10000 - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nX-Copy-From: segmented/name\r\nContent-Length: ' - '0\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - body = fd.read() - # Retrieve and validate the copy. - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/segmented/copy HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('x-object-manifest:' not in headers.lower()) - self.assert_('Content-Length: 25\r' in headers) - body = fd.read() - self.assertEquals(body, '1234 1234 1234 1234 1234 ') - # Create an object manifest file pointing to nothing - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/segmented/empty HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nX-Object-Manifest: ' - 'segmented/empty/\r\nContent-Type: text/jibberish\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure retrieving the manifest file gives a zero-byte file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/segmented/empty HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('X-Object-Manifest: segmented/empty/' in headers) - self.assert_('Content-Type: text/jibberish' in headers) - body = fd.read() - self.assertEquals(body, '') - # Check copy content type - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/obj HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nContent-Type: text/jibberish' - '\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/obj2 HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nX-Copy-From: c/obj\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure getting the copied file gets original content-type - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/obj2 HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Content-Type: text/jibberish' in headers) - # Check set content type - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/obj3 HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nContent-Type: foo/bar' - '\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure getting the copied file gets original content-type - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/obj3 HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Content-Type: foo/bar' in - headers.split('\r\n'), repr(headers.split('\r\n'))) - # Check set content type with charset - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/c/obj4 HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nContent-Type: foo/bar' - '; charset=UTF-8\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) - # Ensure getting the copied file gets original content-type - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/c/obj4 HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\n\r\n') - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) - self.assert_('Content-Type: foo/bar; charset=UTF-8' in - headers.split('\r\n'), repr(headers.split('\r\n'))) - finally: - prospa.kill() - acc1spa.kill() - acc2spa.kill() - con1spa.kill() - con2spa.kill() - obj1spa.kill() - obj2spa.kill() - finally: - proxy_server.CONTAINER_LISTING_LIMIT = orig_container_listing_limit - rmtree(testdir) + def test_chunked_put_head_account(self): + # Head account, just a double check and really is here to test + # the part Application.log_request that 'enforces' a + # content_length on the response. + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 204' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('\r\nContent-Length: 0\r\n' in headers) + + def test_chunked_put_logging(self): + # GET account with a query string to test that + # Application.log_request logs the query string. Also, throws + # in a test for logging x-forwarded-for (first entry only). + (prosrv, acc1srv, acc2srv, con2srv, con2srv, obj1srv, obj2srv) = \ + _test_servers + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + + class Logger(object): + + def info(self, msg): + self.msg = msg + + orig_logger = prosrv.logger + prosrv.logger = Logger() + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write( + 'GET /v1/a?format=json HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\nX-Forwarded-For: host1, host2\r\n' + '\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('/v1/a%3Fformat%3Djson' in prosrv.logger.msg, + prosrv.logger.msg) + exp = 'host1' + self.assertEquals(prosrv.logger.msg[:len(exp)], exp) + prosrv.logger = orig_logger + # Turn on header logging. + + orig_logger = prosrv.logger + prosrv.logger = Logger() + prosrv.log_headers = True + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n' + 'Content-Length: 0\r\nGoofy-Header: True\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('Goofy-Header%3A%20True' in prosrv.logger.msg, + prosrv.logger.msg) + prosrv.log_headers = False + prosrv.logger = orig_logger + + def test_chunked_put_utf8_all_the_way_down(self): + # Test UTF-8 Unicode all the way through the system + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + ustr = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xba \xe1\xbc\xb0\xce' \ + '\xbf\xe1\xbd\xbb\xce\x87 \xcf\x84\xe1\xbd\xb0 \xcf' \ + '\x80\xe1\xbd\xb1\xce\xbd\xcf\x84\xca\xbc \xe1\xbc' \ + '\x82\xce\xbd \xe1\xbc\x90\xce\xbe\xe1\xbd\xb5\xce' \ + '\xba\xce\xbf\xce\xb9 \xcf\x83\xce\xb1\xcf\x86\xe1' \ + '\xbf\x86.Test' + ustr_short = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xbatest' + # Create ustr container + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n' % quote(ustr)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # List account with ustr container (test plain) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + containers = fd.read().split('\n') + self.assert_(ustr in containers) + # List account with ustr container (test json) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a?format=json HTTP/1.1\r\n' + 'Host: localhost\r\nConnection: close\r\n' + 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + listing = simplejson.loads(fd.read()) + self.assert_(ustr.decode('utf8') in [l['name'] for l in listing]) + # List account with ustr container (test xml) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a?format=xml HTTP/1.1\r\n' + 'Host: localhost\r\nConnection: close\r\n' + 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('<name>%s</name>' % ustr in fd.read()) + # Create ustr object with ustr metadata in ustr container + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'X-Object-Meta-%s: %s\r\nContent-Length: 0\r\n\r\n' % + (quote(ustr), quote(ustr), quote(ustr_short), + quote(ustr))) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # List ustr container with ustr object (test plain) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n' % quote(ustr)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + objects = fd.read().split('\n') + self.assert_(ustr in objects) + # List ustr container with ustr object (test json) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s?format=json HTTP/1.1\r\n' + 'Host: localhost\r\nConnection: close\r\n' + 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' % + quote(ustr)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + listing = simplejson.loads(fd.read()) + self.assertEquals(listing[0]['name'], ustr.decode('utf8')) + # List ustr container with ustr object (test xml) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s?format=xml HTTP/1.1\r\n' + 'Host: localhost\r\nConnection: close\r\n' + 'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' % + quote(ustr)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('<name>%s</name>' % ustr in fd.read()) + # Retrieve ustr object with ustr metadata + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n' % + (quote(ustr), quote(ustr))) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('\r\nX-Object-Meta-%s: %s\r\n' % + (quote(ustr_short).lower(), quote(ustr)) in headers) + + def test_chunked_put_chunked_put(self): + # Do chunked object put + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + # Also happens to assert that x-storage-token is taken as a + # replacement for x-auth-token. + fd.write('PUT /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Transfer-Encoding: chunked\r\n\r\n' + '2\r\noh\r\n4\r\n hai\r\nf\r\n123456789abcdef\r\n' + '0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure we get what we put + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + body = fd.read() + self.assertEquals(body, 'oh hai123456789abcdef') + + def test_chunked_put_lobjects(self): + # Create a container for our segmented/manifest object testing + (prolis, acc1lis, acc2lis, con2lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Create the object segments + for segment in xrange(5): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/name/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\n\r\n1234 ' % str(segment)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Create the object manifest file + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nX-Object-Manifest: ' + 'segmented/name/\r\nContent-Type: text/jibberish\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure retrieving the manifest file gets the whole object + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('X-Object-Manifest: segmented/name/' in headers) + self.assert_('Content-Type: text/jibberish' in headers) + body = fd.read() + self.assertEquals(body, '1234 1234 1234 1234 1234 ') + # Do it again but exceeding the container listing limit + proxy_server.CONTAINER_LISTING_LIMIT = 2 + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('X-Object-Manifest: segmented/name/' in headers) + self.assert_('Content-Type: text/jibberish' in headers) + body = fd.read() + # A bit fragile of a test; as it makes the assumption that all + # will be sent in a single chunk. + self.assertEquals(body, + '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n') + # Make a copy of the manifested object, which should + # error since the number of segments exceeds + # CONTAINER_LISTING_LIMIT. + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\nX-Copy-From: segmented/name\r\nContent-Length: ' + '0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 413' + self.assertEquals(headers[:len(exp)], exp) + body = fd.read() + # After adjusting the CONTAINER_LISTING_LIMIT, make a copy of + # the manifested object which should consolidate the segments. + proxy_server.CONTAINER_LISTING_LIMIT = 10000 + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/copy HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\nX-Copy-From: segmented/name\r\nContent-Length: ' + '0\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + body = fd.read() + # Retrieve and validate the copy. + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/copy HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('x-object-manifest:' not in headers.lower()) + self.assert_('Content-Length: 25\r' in headers) + body = fd.read() + self.assertEquals(body, '1234 1234 1234 1234 1234 ') + # Create an object manifest file pointing to nothing + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/segmented/empty HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nX-Object-Manifest: ' + 'segmented/empty/\r\nContent-Type: text/jibberish\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure retrieving the manifest file gives a zero-byte file + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/segmented/empty HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('X-Object-Manifest: segmented/empty/' in headers) + self.assert_('Content-Type: text/jibberish' in headers) + body = fd.read() + self.assertEquals(body, '') + # Check copy content type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nContent-Type: text/jibberish' + '\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj2 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nX-Copy-From: c/obj\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure getting the copied file gets original content-type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/obj2 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + print headers + self.assert_('Content-Type: text/jibberish' in headers) + # Check set content type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj3 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nContent-Type: foo/bar' + '\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure getting the copied file gets original content-type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/obj3 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('Content-Type: foo/bar' in + headers.split('\r\n'), repr(headers.split('\r\n'))) + # Check set content type with charset + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/c/obj4 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\nContent-Type: foo/bar' + '; charset=UTF-8\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) + # Ensure getting the copied file gets original content-type + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/c/obj4 HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\n\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEquals(headers[:len(exp)], exp) + self.assert_('Content-Type: foo/bar; charset=UTF-8' in + headers.split('\r\n'), repr(headers.split('\r\n'))) def test_mismatched_etags(self): with save_globals(): @@ -3377,4 +3432,6 @@ class TestSegmentedIterable(unittest.TestCase): if __name__ == '__main__': + setup() unittest.main() + teardown() From ba8255affcae6f8ed27071121ead1e755ed56ca5 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Wed, 19 Jan 2011 15:23:29 +0900 Subject: [PATCH 127/199] swauth: update the unit tests for s3api changes --- test/unit/common/middleware/test_swauth.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 2e4d958a44..c1dff69599 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -2561,6 +2561,7 @@ class TestAuth(unittest.TestCase): def test_put_user_regular_success(self): self.test_auth.app = FakeApp(iter([ + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of user object ('201 Created', {}, '')])) resp = Request.blank('/auth/v2/act/usr', @@ -2570,13 +2571,14 @@ class TestAuth(unittest.TestCase): 'X-Auth-User-Key': 'key'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(self.test_auth.app.calls, 2) self.assertEquals(json.loads(self.test_auth.app.request.body), {"groups": [{"name": "act:usr"}, {"name": "act"}], "auth": "plaintext:key"}) def test_put_user_account_admin_success(self): self.test_auth.app = FakeApp(iter([ + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of user object ('201 Created', {}, '')])) resp = Request.blank('/auth/v2/act/usr', @@ -2587,7 +2589,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-User-Admin': 'true'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(self.test_auth.app.calls, 2) self.assertEquals(json.loads(self.test_auth.app.request.body), {"groups": [{"name": "act:usr"}, {"name": "act"}, {"name": ".admin"}], @@ -2595,6 +2597,7 @@ class TestAuth(unittest.TestCase): def test_put_user_reseller_admin_success(self): self.test_auth.app = FakeApp(iter([ + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of user object ('201 Created', {}, '')])) resp = Request.blank('/auth/v2/act/usr', @@ -2605,7 +2608,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-User-Reseller-Admin': 'true'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 201) - self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(self.test_auth.app.calls, 2) self.assertEquals(json.loads(self.test_auth.app.request.body), {"groups": [{"name": "act:usr"}, {"name": "act"}, {"name": ".admin"}, {"name": ".reseller_admin"}], @@ -2613,6 +2616,7 @@ class TestAuth(unittest.TestCase): def test_put_user_fail_not_found(self): self.test_auth.app = FakeApp(iter([ + ('200 Ok', {'X-Container-Meta-Account-Id': 'AUTH_cfa'}, ''), # PUT of user object ('404 Not Found', {}, '')])) resp = Request.blank('/auth/v2/act/usr', @@ -2622,7 +2626,7 @@ class TestAuth(unittest.TestCase): 'X-Auth-User-Key': 'key'} ).get_response(self.test_auth) self.assertEquals(resp.status_int, 404) - self.assertEquals(self.test_auth.app.calls, 1) + self.assertEquals(self.test_auth.app.calls, 2) def test_put_user_fail(self): self.test_auth.app = FakeApp(iter([ From ea9ccf33b87488d9660e57c64bde0f980fa8617b Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Wed, 19 Jan 2011 15:23:53 +0900 Subject: [PATCH 128/199] swauth: pep8 fixes --- swift/common/middleware/swauth.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 29dfa6228c..0989c30bd5 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -198,7 +198,8 @@ class Swauth(object): groups = None if env.get('HTTP_AUTHORIZATION'): - account, user, sign = env['HTTP_AUTHORIZATION'].split(' ')[1].split(':') + account = env['HTTP_AUTHORIZATION'].split(' ')[1] + account, user, sign = account.split(':') path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) resp = self.make_request(env, 'GET', path).get_response(self.app) if resp.status_int // 100 != 2: @@ -208,21 +209,23 @@ class Swauth(object): account_id = resp.headers['x-object-meta-account-id'] else: path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp2 = self.make_request(env, 'GET', path).get_response(self.app) + resp2 = self.make_request(env, 'GET', + path).get_response(self.app) if resp2.status_int // 100 != 2: return None account_id = resp2.headers['x-container-meta-account-id'] path = env['PATH_INFO'] - env['PATH_INFO'] = path.replace("%s:%s" % (account, user), account_id, 1) + env['PATH_INFO'] = path.replace("%s:%s" % (account, user), + account_id, 1) detail = json.loads(resp.body) - password = detail['auth'].split(':')[-1] + password = detail['auth'].split(':')[-1] msg = base64.urlsafe_b64decode(unquote(token)) - s = base64.encodestring(hmac.new(detail['auth'].split(':')[-1], msg, sha1).digest()).strip() + s = base64.encodestring(hmac.new(detail['auth'].split(':')[-1], + msg, sha1).digest()).strip() if s != sign: return None - groups = [g['name'] for g in detail['groups']] if '.admin' in groups: groups.remove('.admin') @@ -883,7 +886,7 @@ class Swauth(object): if resp.status_int // 100 != 2: raise Exception('Could not create user object: %s %s' % (path, resp.status)) - headers={'X-Object-Meta-Account-Id': '%s' % resp.headers['x-container-meta-account-id']} + headers = {'X-Object-Meta-Account-Id': '%s' % resp.headers['x-container-meta-account-id']} # Create the object in the main auth account (this object represents # the user) path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) @@ -892,7 +895,6 @@ class Swauth(object): groups.append('.admin') if reseller_admin: groups.append('.reseller_admin') - resp = self.make_request(req.environ, 'PUT', path, json.dumps({'auth': 'plaintext:%s' % key, 'groups': [{'name': g} for g in groups]}), headers=headers).get_response(self.app) From efda6c0736323916f8403870fd734c2a4823d1cf Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 19 Jan 2011 11:05:42 -0600 Subject: [PATCH 129/199] mocked out xattr in test_proxy --- test/unit/proxy/test_server.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 9b5d85bc0f..5f44e258b0 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -60,15 +60,24 @@ def setup(): # Since we're starting up a lot here, we're going to test more than # just chunked puts; we're also going to test parts of # proxy_server.Application we couldn't get to easily otherwise. - path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') - if not path_to_test_xfs or not os.path.exists(path_to_test_xfs): - print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ - 'pointing to a valid directory.\n' \ - 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ - 'system for testing.' - raise SkipTest + xattr_data = {} + + def mock_setxattr(fd, k, v): + inode = os.fstat(fd).st_ino + data = xattr_data.get(inode, {}) + data[k] = v + xattr_data[inode] = data + + def mock_getxattr(fd, k): + inode = os.stat(fd.name).st_ino + data = xattr_data.get(inode, {}).get(k) + if not data: + raise IOError + return data + object_server.setxattr = mock_setxattr + object_server.getxattr = mock_getxattr _testdir = \ - os.path.join(path_to_test_xfs, 'tmp_test_proxy_server_chunked') + os.path.join(mkdtemp(), 'tmp_test_proxy_server_chunked') mkdirs(_testdir) rmtree(_testdir) mkdirs(os.path.join(_testdir, 'sda1')) From 105315dfc40468fb31ca1a2e56f5b84e3d84ee41 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 19 Jan 2011 14:18:37 -0600 Subject: [PATCH 130/199] obsolete PATH_TO_TEST_XFS --- doc/source/development_saio.rst | 1 - test/unit/__init__.py | 27 ++++++++++++++ test/unit/container/test_server.py | 13 +------ test/unit/container/test_updater.py | 13 +------ test/unit/obj/test_auditor.py | 16 ++------ test/unit/obj/test_server.py | 58 ++--------------------------- test/unit/proxy/test_server.py | 17 --------- 7 files changed, 38 insertions(+), 107 deletions(-) diff --git a/doc/source/development_saio.rst b/doc/source/development_saio.rst index a74e6df8c9..1967215d4b 100644 --- a/doc/source/development_saio.rst +++ b/doc/source/development_saio.rst @@ -204,7 +204,6 @@ Do these commands as you on guest: #. `cd ~/swift/trunk; sudo python setup.py develop` #. Edit `~/.bashrc` and add to the end:: - export PATH_TO_TEST_XFS=/mnt/sdb1/test export SWIFT_TEST_CONFIG_FILE=/etc/swift/func_test.conf export PATH=${PATH}:~/bin diff --git a/test/unit/__init__.py b/test/unit/__init__.py index 1895098c2e..7c3b169cbb 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -37,6 +37,33 @@ def tmpfile(content): finally: os.unlink(file_name) +xattr_data = {} + +def _get_inode(fd): + if not isinstance(fd, int): + try: + fd = fd.fileno() + except AttributeError: + return os.stat(fd).st_ino + return os.fstat(fd).st_ino + +def _setxattr(fd, k, v): + inode = _get_inode(fd) + data = xattr_data.get(inode, {}) + data[k] = v + xattr_data[inode] = data + +def _getxattr(fd, k): + inode = _get_inode(fd) + data = xattr_data.get(inode, {}).get(k) + if not data: + raise IOError + return data + +import xattr +xattr.setxattr = _setxattr +xattr.getxattr = _getxattr + class MockTrue(object): """ diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py index 2f9d5badea..cbd44624ad 100644 --- a/test/unit/container/test_server.py +++ b/test/unit/container/test_server.py @@ -19,6 +19,7 @@ import unittest from shutil import rmtree from StringIO import StringIO from time import time +from tempfile import mkdtemp from eventlet import spawn, TimeoutError, listen from eventlet.timeout import Timeout @@ -33,17 +34,7 @@ class TestContainerController(unittest.TestCase): """ Test swift.container_server.ContainerController """ def setUp(self): """ Set up for testing swift.object_server.ObjectController """ - self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') - if not self.path_to_test_xfs or \ - not os.path.exists(self.path_to_test_xfs): - print >>sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ - 'pointing to a valid directory.\n' \ - 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ - 'system for testing.' - self.testdir = '/tmp/SWIFTUNITTEST' - else: - self.testdir = os.path.join(self.path_to_test_xfs, - 'tmp_test_object_server_ObjectController') + self.testdir = os.path.join(mkdtemp(), 'tmp_test_object_server_ObjectController') mkdirs(self.testdir) rmtree(self.testdir) mkdirs(os.path.join(self.testdir, 'sda1')) diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index 092944c4be..0302382b38 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -19,6 +19,7 @@ import sys import unittest from gzip import GzipFile from shutil import rmtree +from tempfile import mkdtemp from eventlet import spawn, TimeoutError, listen from eventlet.timeout import Timeout @@ -35,17 +36,7 @@ class TestContainerUpdater(unittest.TestCase): def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' - self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') - if not self.path_to_test_xfs or \ - not os.path.exists(self.path_to_test_xfs): - print >>sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ - 'pointing to a valid directory.\n' \ - 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ - 'system for testing.' - self.testdir = '/tmp/SWIFTUNITTEST' - else: - self.testdir = os.path.join(self.path_to_test_xfs, - 'tmp_test_container_updater') + self.testdir = os.path.join(mkdtemp(), 'tmp_test_container_updater') rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) pickle.dump(RingData([[0, 1, 0, 1], [1, 0, 1, 0]], diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 9ee42cb3ec..41d4453ce8 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -20,7 +20,9 @@ import os import time from shutil import rmtree from hashlib import md5 +from tempfile import mkdtemp from swift.obj import auditor +from swift.obj import server as object_server from swift.obj.server import DiskFile, write_metadata from swift.common.utils import hash_path, mkdirs, normalize_timestamp, renamer from swift.obj.replicator import invalidate_hash @@ -30,18 +32,8 @@ from swift.common.exceptions import AuditException class TestAuditor(unittest.TestCase): def setUp(self): - self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') - if not self.path_to_test_xfs or \ - not os.path.exists(self.path_to_test_xfs): - print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ - 'pointing to a valid directory.\n' \ - 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ - 'system for testing.' - self.testdir = '/tmp/SWIFTUNITTEST' - else: - self.testdir = os.path.join(self.path_to_test_xfs, - 'tmp_test_object_auditor') - + self.testdir = \ + os.path.join(mkdtemp(), 'tmp_test_object_auditor') self.devices = os.path.join(self.testdir, 'node') rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 64c58ff7ca..e9c186b6b3 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -23,6 +23,7 @@ from nose import SkipTest from shutil import rmtree from StringIO import StringIO from time import gmtime, sleep, strftime, time +from tempfile import mkdtemp from eventlet import sleep, spawn, wsgi, listen from webob import Request @@ -39,17 +40,8 @@ class TestObjectController(unittest.TestCase): def setUp(self): """ Set up for testing swift.object_server.ObjectController """ - self.path_to_test_xfs = os.environ.get('PATH_TO_TEST_XFS') - if not self.path_to_test_xfs or \ - not os.path.exists(self.path_to_test_xfs): - print >> sys.stderr, 'WARNING: PATH_TO_TEST_XFS not set or not ' \ - 'pointing to a valid directory.\n' \ - 'Please set PATH_TO_TEST_XFS to a directory on an XFS file ' \ - 'system for testing.' - self.testdir = '/tmp/SWIFTUNITTEST' - else: - self.testdir = os.path.join(self.path_to_test_xfs, - 'tmp_test_object_server_ObjectController') + self.testdir = \ + os.path.join(mkdtemp(), 'tmp_test_object_server_ObjectController') mkdirs(self.testdir) rmtree(self.testdir) mkdirs(os.path.join(self.testdir, 'sda1')) @@ -64,8 +56,6 @@ class TestObjectController(unittest.TestCase): def test_POST_update_meta(self): """ Test swift.object_server.ObjectController.POST """ - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': timestamp, @@ -93,8 +83,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.headers['Content-Type'], 'application/x-test') def test_POST_not_exist(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/fail', environ={'REQUEST_METHOD': 'POST'}, @@ -116,8 +104,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 400) def test_POST_container_connection(self): - if not self.path_to_test_xfs: - raise SkipTest def mock_http_connect(response, with_exc=False): @@ -222,8 +208,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 411) def test_PUT_common(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': timestamp, @@ -247,8 +231,6 @@ class TestObjectController(unittest.TestCase): 'name': '/a/c/o'}) def test_PUT_overwrite(self): - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Length': '6', @@ -281,8 +263,6 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip'}) def test_PUT_no_etag(self): - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Type': 'text/plain'}) @@ -300,8 +280,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 422) def test_PUT_user_metadata(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': timestamp, @@ -329,8 +307,6 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Two': 'Two'}) def test_PUT_container_connection(self): - if not self.path_to_test_xfs: - raise SkipTest def mock_http_connect(response, with_exc=False): @@ -399,8 +375,6 @@ class TestObjectController(unittest.TestCase): def test_HEAD(self): """ Test swift.object_server.ObjectController.HEAD """ - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c') resp = self.object_controller.HEAD(req) self.assertEquals(resp.status_int, 400) @@ -466,8 +440,6 @@ class TestObjectController(unittest.TestCase): def test_GET(self): """ Test swift.object_server.ObjectController.GET """ - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c') resp = self.object_controller.GET(req) self.assertEquals(resp.status_int, 400) @@ -555,8 +527,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 404) def test_GET_if_match(self): - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={ 'X-Timestamp': normalize_timestamp(time()), @@ -610,8 +580,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 412) def test_GET_if_none_match(self): - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={ 'X-Timestamp': normalize_timestamp(time()), @@ -661,8 +629,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.etag, etag) def test_GET_if_modified_since(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={ @@ -698,8 +664,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 304) def test_GET_if_unmodified_since(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={ @@ -737,8 +701,6 @@ class TestObjectController(unittest.TestCase): def test_DELETE(self): """ Test swift.object_server.ObjectController.DELETE """ - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}) resp = self.object_controller.DELETE(req) @@ -865,8 +827,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(outbuf.getvalue()[:4], '405 ') def test_chunked_put(self): - if not self.path_to_test_xfs: - raise SkipTest listener = listen(('localhost', 0)) port = listener.getsockname()[1] killer = spawn(wsgi.server, listener, self.object_controller, @@ -891,8 +851,6 @@ class TestObjectController(unittest.TestCase): killer.kill() def test_max_object_name_length(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/' + ('1' * 1024), environ={'REQUEST_METHOD': 'PUT'}, @@ -912,8 +870,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 400) def test_disk_file_app_iter_corners(self): - if not self.path_to_test_xfs: - raise SkipTest df = object_server.DiskFile(self.testdir, 'sda1', '0', 'a', 'c', 'o') mkdirs(df.datadir) f = open(os.path.join(df.datadir, @@ -946,8 +902,6 @@ class TestObjectController(unittest.TestCase): self.assert_(os.path.exists(tmpdir)) def test_max_upload_time(self): - if not self.path_to_test_xfs: - raise SkipTest class SlowBody(): @@ -996,8 +950,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 499) def test_bad_sinces(self): - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Length': '4', 'Content-Type': 'text/plain'}, @@ -1022,8 +974,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.status_int, 412) def test_content_encoding(self): - if not self.path_to_test_xfs: - raise SkipTest req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Length': '4', 'Content-Type': 'text/plain', @@ -1042,8 +992,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.headers['content-encoding'], 'gzip') def test_manifest_header(self): - if not self.path_to_test_xfs: - raise SkipTest timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': timestamp, diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 5f44e258b0..0183947f93 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -60,22 +60,6 @@ def setup(): # Since we're starting up a lot here, we're going to test more than # just chunked puts; we're also going to test parts of # proxy_server.Application we couldn't get to easily otherwise. - xattr_data = {} - - def mock_setxattr(fd, k, v): - inode = os.fstat(fd).st_ino - data = xattr_data.get(inode, {}) - data[k] = v - xattr_data[inode] = data - - def mock_getxattr(fd, k): - inode = os.stat(fd.name).st_ino - data = xattr_data.get(inode, {}).get(k) - if not data: - raise IOError - return data - object_server.setxattr = mock_setxattr - object_server.getxattr = mock_getxattr _testdir = \ os.path.join(mkdtemp(), 'tmp_test_proxy_server_chunked') mkdirs(_testdir) @@ -1648,7 +1632,6 @@ class TestObjectController(unittest.TestCase): self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') def test_chunked_put(self): - # quick test of chunked put w/o PATH_TO_TEST_XFS class ChunkedFile(): From b2673df12498781b5c6abc6faace73e588b83654 Mon Sep 17 00:00:00 2001 From: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Date: Thu, 20 Jan 2011 06:22:05 +0900 Subject: [PATCH 131/199] http_connect and http_connect_raw use the default http ports if no port is given --- swift/common/bufferedhttp.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/swift/common/bufferedhttp.py b/swift/common/bufferedhttp.py index 4fb090ca92..cf74dbe8fa 100644 --- a/swift/common/bufferedhttp.py +++ b/swift/common/bufferedhttp.py @@ -118,6 +118,8 @@ def http_connect(ipaddr, port, device, partition, method, path, :param ssl: set True if SSL should be used (default: False) :returns: HTTPConnection object """ + if not port: + port = 443 if ssl else 80 if ssl: conn = HTTPSConnection('%s:%s' % (ipaddr, port)) else: @@ -150,6 +152,8 @@ def http_connect_raw(ipaddr, port, method, path, headers=None, :param ssl: set True if SSL should be used (default: False) :returns: HTTPConnection object """ + if not port: + port = 443 if ssl else 80 if ssl: conn = HTTPSConnection('%s:%s' % (ipaddr, port)) else: From 29eddb8c24c1faf06e73c3ff6ba8d438645f66a2 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 19 Jan 2011 16:05:22 -0600 Subject: [PATCH 132/199] made tests play nice with standalone unittest, fixed some doc stuff --- doc/source/development_saio.rst | 6 +++--- test/unit/obj/test_auditor.py | 1 + test/unit/obj/test_server.py | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/source/development_saio.rst b/doc/source/development_saio.rst index 1967215d4b..bd0753794e 100644 --- a/doc/source/development_saio.rst +++ b/doc/source/development_saio.rst @@ -50,7 +50,7 @@ If you are going to use a separate partition for Swift data, be sure to add anot `/dev/sdb1 /mnt/sdb1 xfs noatime,nodiratime,nobarrier,logbufs=8 0 0` #. `mkdir /mnt/sdb1` #. `mount /mnt/sdb1` - #. `mkdir /mnt/sdb1/1 /mnt/sdb1/2 /mnt/sdb1/3 /mnt/sdb1/4 /mnt/sdb1/test` + #. `mkdir /mnt/sdb1/1 /mnt/sdb1/2 /mnt/sdb1/3 /mnt/sdb1/4` #. `chown <your-user-name>:<your-group-name> /mnt/sdb1/*` #. `mkdir /srv` #. `for x in {1..4}; do ln -s /mnt/sdb1/$x /srv/$x; done` @@ -77,7 +77,7 @@ If you want to use a loopback device instead of another partition, follow these `/srv/swift-disk /mnt/sdb1 xfs loop,noatime,nodiratime,nobarrier,logbufs=8 0 0` #. `mkdir /mnt/sdb1` #. `mount /mnt/sdb1` - #. `mkdir /mnt/sdb1/1 /mnt/sdb1/2 /mnt/sdb1/3 /mnt/sdb1/4 /mnt/sdb1/test` + #. `mkdir /mnt/sdb1/1 /mnt/sdb1/2 /mnt/sdb1/3 /mnt/sdb1/4` #. `chown <your-user-name>:<your-group-name> /mnt/sdb1/*` #. `mkdir /srv` #. `for x in {1..4}; do ln -s /mnt/sdb1/$x /srv/$x; done` @@ -535,7 +535,7 @@ Setting up scripts for running Swift sudo umount /mnt/sdb1 sudo mkfs.xfs -f -i size=1024 /dev/sdb1 sudo mount /mnt/sdb1 - sudo mkdir /mnt/sdb1/1 /mnt/sdb1/2 /mnt/sdb1/3 /mnt/sdb1/4 /mnt/sdb1/test + sudo mkdir /mnt/sdb1/1 /mnt/sdb1/2 /mnt/sdb1/3 /mnt/sdb1/4 sudo chown <your-user-name>:<your-group-name> /mnt/sdb1/* mkdir -p /srv/1/node/sdb1 /srv/2/node/sdb2 /srv/3/node/sdb3 /srv/4/node/sdb4 sudo rm -f /var/log/debug /var/log/messages /var/log/rsyncd.log /var/log/syslog diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 41d4453ce8..8c1d08e522 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -14,6 +14,7 @@ # limitations under the License. # TODO: Tests +from test import unit as _setup_mocks import unittest import tempfile import os diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index e9c186b6b3..735b944d2d 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -27,7 +27,8 @@ from tempfile import mkdtemp from eventlet import sleep, spawn, wsgi, listen from webob import Request -from xattr import getxattr, setxattr +from test.unit import _getxattr as getxattr +from test.unit import _setxattr as setxattr from test.unit import connect_tcp, readuntil2crlfs from swift.obj import server as object_server From e64e21dd01aee4fa20d2023e2b4cf7fb96d930bd Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Wed, 19 Jan 2011 16:19:43 -0600 Subject: [PATCH 133/199] pep8 --- test/unit/__init__.py | 3 +++ test/unit/container/test_server.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/test/unit/__init__.py b/test/unit/__init__.py index 7c3b169cbb..50b06766de 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -39,6 +39,7 @@ def tmpfile(content): xattr_data = {} + def _get_inode(fd): if not isinstance(fd, int): try: @@ -47,12 +48,14 @@ def _get_inode(fd): return os.stat(fd).st_ino return os.fstat(fd).st_ino + def _setxattr(fd, k, v): inode = _get_inode(fd) data = xattr_data.get(inode, {}) data[k] = v xattr_data[inode] = data + def _getxattr(fd, k): inode = _get_inode(fd) data = xattr_data.get(inode, {}).get(k) diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py index cbd44624ad..62d62a1f87 100644 --- a/test/unit/container/test_server.py +++ b/test/unit/container/test_server.py @@ -34,7 +34,8 @@ class TestContainerController(unittest.TestCase): """ Test swift.container_server.ContainerController """ def setUp(self): """ Set up for testing swift.object_server.ObjectController """ - self.testdir = os.path.join(mkdtemp(), 'tmp_test_object_server_ObjectController') + self.testdir = os.path.join(mkdtemp(), + 'tmp_test_object_server_ObjectController') mkdirs(self.testdir) rmtree(self.testdir) mkdirs(os.path.join(self.testdir, 'sda1')) From 9dd1e2ae8456e3624d332ce7c46c025fd0bada6c Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 19 Jan 2011 15:21:57 -0800 Subject: [PATCH 134/199] Updates to remove _ usage that is not i18n related --- bin/st | 19 ++++++++++--------- bin/swift-auth-to-swauth | 2 +- bin/swift-bench | 2 +- bin/swift-init | 4 ++-- bin/swift-stats-populate | 4 ++-- bin/swift-stats-report | 12 ++++++------ swift/account/reaper.py | 2 +- swift/auth/server.py | 5 +++-- swift/common/client.py | 2 +- swift/common/db.py | 4 ++-- swift/common/middleware/swauth.py | 4 ++-- swift/common/middleware/swift3.py | 3 ++- swift/common/ring/builder.py | 9 +++++---- swift/obj/server.py | 2 +- swift/stats/account_stats.py | 4 ++-- swift/stats/log_processor.py | 4 ++-- swift/stats/stats_processor.py | 2 +- test/unit/auth/test_server.py | 18 +++++++++--------- test/unit/common/middleware/test_auth.py | 2 +- test/unit/common/test_client.py | 4 ++-- test/unit/container/test_updater.py | 4 ++-- test/unit/proxy/test_server.py | 4 ++-- 22 files changed, 60 insertions(+), 56 deletions(-) diff --git a/bin/st b/bin/st index cab398910e..83d19db09c 100755 --- a/bin/st +++ b/bin/st @@ -80,7 +80,7 @@ except ImportError: res = [] consts = {'true': True, 'false': False, 'null': None} string = '(' + comments.sub('', string) + ')' - for type, val, _, _, _ in \ + for type, val, _junk, _junk, _junk in \ generate_tokens(StringIO(string).readline): if (type == OP and val not in '[]{}:,()-') or \ (type == NAME and val not in consts): @@ -914,7 +914,7 @@ def st_delete(parser, args, print_queue, error_queue): segment_queue.put((scontainer, delobj['name'])) if not segment_queue.empty(): segment_threads = [QueueFunctionThread(segment_queue, - _delete_segment, create_connection()) for _ in + _delete_segment, create_connection()) for _junk in xrange(10)] for thread in segment_threads: thread.start() @@ -972,11 +972,11 @@ def st_delete(parser, args, print_queue, error_queue): create_connection = lambda: Connection(options.auth, options.user, options.key, preauthurl=url, preauthtoken=token, snet=options.snet) object_threads = [QueueFunctionThread(object_queue, _delete_object, - create_connection()) for _ in xrange(10)] + create_connection()) for _junk in xrange(10)] for thread in object_threads: thread.start() container_threads = [QueueFunctionThread(container_queue, - _delete_container, create_connection()) for _ in xrange(10)] + _delete_container, create_connection()) for _junk in xrange(10)] for thread in container_threads: thread.start() if not args: @@ -1142,11 +1142,11 @@ def st_download(options, args, print_queue, error_queue): create_connection = lambda: Connection(options.auth, options.user, options.key, preauthurl=url, preauthtoken=token, snet=options.snet) object_threads = [QueueFunctionThread(object_queue, _download_object, - create_connection()) for _ in xrange(10)] + create_connection()) for _junk in xrange(10)] for thread in object_threads: thread.start() container_threads = [QueueFunctionThread(container_queue, - _download_container, create_connection()) for _ in xrange(10)] + _download_container, create_connection()) for _junk in xrange(10)] for thread in container_threads: thread.start() if not args: @@ -1525,7 +1525,8 @@ def st_upload(options, args, print_queue, error_queue): full_size = getsize(path) segment_queue = Queue(10000) segment_threads = [QueueFunctionThread(segment_queue, - _segment_job, create_connection()) for _ in xrange(10)] + _segment_job, create_connection()) for _junk in + xrange(10)] for thread in segment_threads: thread.start() segment = 0 @@ -1569,7 +1570,7 @@ def st_upload(options, args, print_queue, error_queue): 'container': scontainer, 'obj': delobj['name']}) if not segment_queue.empty(): segment_threads = [QueueFunctionThread(segment_queue, - _segment_job, create_connection()) for _ in + _segment_job, create_connection()) for _junk in xrange(10)] for thread in segment_threads: thread.start() @@ -1603,7 +1604,7 @@ def st_upload(options, args, print_queue, error_queue): create_connection = lambda: Connection(options.auth, options.user, options.key, preauthurl=url, preauthtoken=token, snet=options.snet) object_threads = [QueueFunctionThread(object_queue, _object_job, - create_connection()) for _ in xrange(10)] + create_connection()) for _junk in xrange(10)] for thread in object_threads: thread.start() conn = create_connection() diff --git a/bin/swift-auth-to-swauth b/bin/swift-auth-to-swauth index a84c6cd1a5..1b2d80eda0 100755 --- a/bin/swift-auth-to-swauth +++ b/bin/swift-auth-to-swauth @@ -25,7 +25,7 @@ if __name__ == '__main__': gettext.install('swift', unicode=1) if len(argv) != 4 or argv[1] != '-K': exit('Syntax: %s -K <super_admin_key> <path to auth.db>' % argv[0]) - _, _, super_admin_key, auth_db = argv + _junk, _junk, super_admin_key, auth_db = argv call(['swauth-prep', '-K', super_admin_key]) conn = sqlite3.connect(auth_db) for account, cfaccount, user, password, admin, reseller_admin in \ diff --git a/bin/swift-bench b/bin/swift-bench index 447d82724d..3c167ee06f 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -105,7 +105,7 @@ if __name__ == '__main__': else: conf = CONF_DEFAULTS parser.set_defaults(**conf) - options, _ = parser.parse_args() + options, _junk = parser.parse_args() if options.concurrency is not '': options.put_concurrency = options.concurrency options.get_concurrency = options.concurrency diff --git a/bin/swift-init b/bin/swift-init index 1a6b272345..cdbde28d4d 100755 --- a/bin/swift-init +++ b/bin/swift-init @@ -32,7 +32,7 @@ GRACEFUL_SHUTDOWN_SERVERS = ['account-server', 'container-server', MAX_DESCRIPTORS = 32768 MAX_MEMORY = (1024 * 1024 * 1024) * 2 # 2 GB -_, server, command = sys.argv +_junk, server, command = sys.argv if server == 'all': servers = ALL_SERVERS else: @@ -155,7 +155,7 @@ def do_stop(server, graceful=False): except OSError: pass for pid_file, pid in pfiles: - for _ in xrange(150): # 15 seconds + for _junk in xrange(150): # 15 seconds if not os.path.exists('/proc/%s' % pid): break time.sleep(0.1) diff --git a/bin/swift-stats-populate b/bin/swift-stats-populate index ba531ddc87..d957853633 100755 --- a/bin/swift-stats-populate +++ b/bin/swift-stats-populate @@ -127,7 +127,7 @@ if __name__ == '__main__': next_report += 2 while need_to_queue >= 1: container = 'stats_container_dispersion_%s' % uuid4() - part, _ = container_ring.get_nodes(account, container) + part, _junk = container_ring.get_nodes(account, container) if part in parts_left: coropool.spawn(put_container, connpool, container, report) sleep() @@ -152,7 +152,7 @@ if __name__ == '__main__': next_report += 2 while need_to_queue >= 1: obj = 'stats_object_dispersion_%s' % uuid4() - part, _ = object_ring.get_nodes(account, container, obj) + part, _junk = object_ring.get_nodes(account, container, obj) if part in parts_left: coropool.spawn(put_object, connpool, container, obj, report) sleep() diff --git a/bin/swift-stats-report b/bin/swift-stats-report index f2504280ba..4c47b404de 100755 --- a/bin/swift-stats-report +++ b/bin/swift-stats-report @@ -107,7 +107,7 @@ def audit(coropool, connpool, account, container_ring, object_ring, options): found = False error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node) try: - attempts, _ = direct_client.retry( + attempts, _junk = direct_client.retry( direct_client.direct_head_object, node, part, account, container, obj, error_log=error_log, retries=options.retries) @@ -160,7 +160,7 @@ def audit(coropool, connpool, account, container_ring, object_ring, options): print 'Containers Missing' print '-' * 78 for container in sorted(containers_missing_replicas.keys()): - part, _ = container_ring.get_nodes(account, container) + part, _junk = container_ring.get_nodes(account, container) for node in containers_missing_replicas[container]: print 'http://%s:%s/%s/%s/%s/%s' % (node['ip'], node['port'], node['device'], part, account, container) @@ -170,8 +170,8 @@ def audit(coropool, connpool, account, container_ring, object_ring, options): print 'Objects Missing' print '-' * 78 for opath in sorted(objects_missing_replicas.keys()): - _, container, obj = opath.split('/', 2) - part, _ = object_ring.get_nodes(account, container, obj) + _junk, container, obj = opath.split('/', 2) + part, _junk = object_ring.get_nodes(account, container, obj) for node in objects_missing_replicas[opath]: print 'http://%s:%s/%s/%s/%s/%s/%s' % (node['ip'], node['port'], node['device'], part, account, container, @@ -200,7 +200,7 @@ def container_dispersion_report(coropool, connpool, account, container_ring, for node in nodes: error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node) try: - attempts, _ = direct_client.retry( + attempts, _junk = direct_client.retry( direct_client.direct_head_container, node, part, account, container, error_log=error_log, retries=options.retries) @@ -284,7 +284,7 @@ def object_dispersion_report(coropool, connpool, account, object_ring, options): for node in nodes: error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node) try: - attempts, _ = direct_client.retry( + attempts, _junk = direct_client.retry( direct_client.direct_head_object, node, part, account, container, obj, error_log=error_log, retries=options.retries) diff --git a/swift/account/reaper.py b/swift/account/reaper.py index d31558b9c6..dd0d4b3890 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -229,7 +229,7 @@ class AccountReaper(Daemon): if not containers: break try: - for (container, _, _, _) in containers: + for (container, _junk, _junk, _junk) in containers: self.container_pool.spawn(self.reap_container, account, partition, nodes, container) self.container_pool.waitall() diff --git a/swift/auth/server.py b/swift/auth/server.py index 32c768f212..413c03cce1 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -435,7 +435,7 @@ YOU HAVE A FEW OPTIONS: :param request: webob.Request object """ try: - _, token = split_path(request.path, minsegs=2) + _junk, token = split_path(request.path, minsegs=2) except ValueError: return HTTPBadRequest() # Retrieves (TTL, account, user, cfaccount) if valid, False otherwise @@ -478,7 +478,8 @@ YOU HAVE A FEW OPTIONS: :param request: webob.Request object """ try: - _, account_name, user_name = split_path(request.path, minsegs=3) + _junk, account_name, user_name = \ + split_path(request.path, minsegs=3) except ValueError: return HTTPBadRequest() create_reseller_admin = \ diff --git a/swift/common/client.py b/swift/common/client.py index 8d144735ee..fc4376abfa 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -76,7 +76,7 @@ except ImportError: res = [] consts = {'true': True, 'false': False, 'null': None} string = '(' + comments.sub('', string) + ')' - for type, val, _, _, _ in \ + for type, val, _junk, _junk, _junk in \ generate_tokens(StringIO(string).readline): if (type == OP and val not in '[]{}:,()-') or \ (type == NAME and val not in consts): diff --git a/swift/common/db.py b/swift/common/db.py index b3c80dbc8c..9cbcd63713 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -932,7 +932,7 @@ class ContainerBroker(DatabaseBroker): if not row: return [] max_rowid = row['ROWID'] - for _ in xrange(min(max_count, max_rowid)): + for _junk in xrange(min(max_count, max_rowid)): row = conn.execute(''' SELECT name FROM object WHERE ROWID >= ? AND +deleted = 0 LIMIT 1 @@ -1435,7 +1435,7 @@ class AccountBroker(DatabaseBroker): if not row: return [] max_rowid = row['ROWID'] - for _ in xrange(min(max_count, max_rowid)): + for _junk in xrange(min(max_count, max_rowid)): row = conn.execute(''' SELECT name FROM container WHERE ROWID >= ? AND +deleted = 0 diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 105098c807..7516bb785f 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -299,8 +299,8 @@ class Swauth(object): req.start_time = time() handler = None try: - version, account, user, _ = split_path(req.path_info, minsegs=1, - maxsegs=4, rest_with_last=True) + version, account, user, _junk = split_path(req.path_info, + minsegs=1, maxsegs=4, rest_with_last=True) except ValueError: return HTTPNotFound(request=req) if version in ('v1', 'v1.0', 'auth'): diff --git a/swift/common/middleware/swift3.py b/swift/common/middleware/swift3.py index 9a6f9c382d..00610420ad 100644 --- a/swift/common/middleware/swift3.py +++ b/swift/common/middleware/swift3.py @@ -399,7 +399,8 @@ class Swift3Middleware(object): h += header.lower() + ":" + str(req.headers[header]) + "\n" h += req.path try: - account, user, _ = req.headers['Authorization'].split(' ')[-1].split(':') + account, user, _junk = \ + req.headers['Authorization'].split(' ')[-1].split(':') except: return None, None token = base64.urlsafe_b64encode(h) diff --git a/swift/common/ring/builder.py b/swift/common/ring/builder.py index 5b66b8a9bf..3f728e307a 100644 --- a/swift/common/ring/builder.py +++ b/swift/common/ring/builder.py @@ -239,7 +239,7 @@ class RingBuilder(object): (sum(d['parts'] for d in self.devs if d is not None), self.parts * self.replicas)) if stats: - dev_usage = array('I', (0 for _ in xrange(len(self.devs)))) + dev_usage = array('I', (0 for _junk in xrange(len(self.devs)))) for part in xrange(self.parts): zones = {} for replica in xrange(self.replicas): @@ -342,8 +342,9 @@ class RingBuilder(object): '%08x.%04x' % (dev['parts_wanted'], randint(0, 0xffff)) available_devs = sorted((d for d in self.devs if d is not None), key=lambda x: x['sort_key']) - self._replica2part2dev = [array('H') for _ in xrange(self.replicas)] - for _ in xrange(self.parts): + self._replica2part2dev = \ + [array('H') for _junk in xrange(self.replicas)] + for _junk in xrange(self.parts): other_zones = array('H') for replica in xrange(self.replicas): index = len(available_devs) - 1 @@ -365,7 +366,7 @@ class RingBuilder(object): index = mid + 1 available_devs.insert(index, dev) other_zones.append(dev['zone']) - self._last_part_moves = array('B', (0 for _ in xrange(self.parts))) + self._last_part_moves = array('B', (0 for _junk in xrange(self.parts))) self._last_part_moves_epoch = int(time()) for dev in self.devs: del dev['sort_key'] diff --git a/swift/obj/server.py b/swift/obj/server.py index aabece3176..e3a6068c79 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -577,7 +577,7 @@ class ObjectController(object): if suffix: recalculate_hashes(path, suffix.split('-')) return Response() - _, hashes = get_hashes(path, do_listdir=False) + _junk, hashes = get_hashes(path, do_listdir=False) return Response(body=pickle.dumps(hashes)) def __call__(self, env, start_response): diff --git a/swift/stats/account_stats.py b/swift/stats/account_stats.py index 91d31f39ad..859151d238 100644 --- a/swift/stats/account_stats.py +++ b/swift/stats/account_stats.py @@ -87,11 +87,11 @@ class AccountStat(Daemon): broker = AccountBroker(db_path) if not broker.is_deleted(): (account_name, - _, _, _, + _junk, _junk, _junk, container_count, object_count, bytes_used, - _, _) = broker.get_info() + _junk, _junk) = broker.get_info() line_data = '"%s",%d,%d,%d\n' % ( account_name, container_count, object_count, bytes_used) diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index f8938ddbc2..8174fb265e 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -365,7 +365,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count): results = [] in_queue = multiprocessing.Queue() out_queue = multiprocessing.Queue() - for _ in range(worker_count): + for _junk in range(worker_count): p = multiprocessing.Process(target=collate_worker, args=(processor_args, in_queue, @@ -374,7 +374,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count): results.append(p) for x in logs_to_process: in_queue.put(x) - for _ in range(worker_count): + for _junk in range(worker_count): in_queue.put(None) count = 0 while True: diff --git a/swift/stats/stats_processor.py b/swift/stats/stats_processor.py index dc07d85199..95dba7604c 100644 --- a/swift/stats/stats_processor.py +++ b/swift/stats/stats_processor.py @@ -26,7 +26,7 @@ class StatsLogProcessor(object): data_object_name): '''generate hourly groupings of data from one stats log file''' account_totals = {} - year, month, day, hour, _ = data_object_name.split('/') + year, month, day, hour, _junk = data_object_name.split('/') for line in obj_stream: if not line: continue diff --git a/test/unit/auth/test_server.py b/test/unit/auth/test_server.py index bd63b44b12..4060766d65 100644 --- a/test/unit/auth/test_server.py +++ b/test/unit/auth/test_server.py @@ -119,7 +119,7 @@ class TestAuthServer(unittest.TestCase): headers={'X-Storage-User': 'tester', 'X-Storage-Pass': 'testing'})) token = res.headers['x-storage-token'] - ttl, _, _, _ = self.controller.validate_token(token) + ttl, _junk, _junk, _junk = self.controller.validate_token(token) self.assert_(ttl > 0, repr(ttl)) def test_validate_token_expired(self): @@ -134,7 +134,7 @@ class TestAuthServer(unittest.TestCase): headers={'X-Storage-User': 'tester', 'X-Storage-Pass': 'testing'})) token = res.headers['x-storage-token'] - ttl, _, _, _ = self.controller.validate_token(token) + ttl, _junk, _junk, _junk = self.controller.validate_token(token) self.assert_(ttl > 0, repr(ttl)) auth_server.time = lambda: 1 + self.controller.token_life self.assertEquals(self.controller.validate_token(token), False) @@ -318,7 +318,7 @@ class TestAuthServer(unittest.TestCase): headers={'X-Storage-User': 'tester', 'X-Storage-Pass': 'testing'})) token = res.headers['x-storage-token'] - ttl, _, _, _ = self.controller.validate_token(token) + ttl, _junk, _junk, _junk = self.controller.validate_token(token) self.assert_(ttl > 0, repr(ttl)) def test_auth_SOSO_good_Mosso_headers(self): @@ -330,7 +330,7 @@ class TestAuthServer(unittest.TestCase): headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})) token = res.headers['x-storage-token'] - ttl, _, _, _ = self.controller.validate_token(token) + ttl, _junk, _junk, _junk = self.controller.validate_token(token) self.assert_(ttl > 0, repr(ttl)) def test_auth_SOSO_bad_Mosso_headers(self): @@ -438,7 +438,7 @@ class TestAuthServer(unittest.TestCase): headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})) token = res.headers['x-storage-token'] - ttl, _, _, _ = self.controller.validate_token(token) + ttl, _junk, _junk, _junk = self.controller.validate_token(token) self.assert_(ttl > 0, repr(ttl)) def test_auth_Mosso_good_SOSO_header_names(self): @@ -450,7 +450,7 @@ class TestAuthServer(unittest.TestCase): headers={'X-Storage-User': 'test:tester', 'X-Storage-Pass': 'testing'})) token = res.headers['x-storage-token'] - ttl, _, _, _ = self.controller.validate_token(token) + ttl, _junk, _junk, _junk = self.controller.validate_token(token) self.assert_(ttl > 0, repr(ttl)) def test_basic_logging(self): @@ -712,7 +712,7 @@ class TestAuthServer(unittest.TestCase): res = self.controller.handle_auth(Request.blank('/v1.0', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'})) - _, _, _, stgact = \ + _junk, _junk, _junk, stgact = \ self.controller.validate_token(res.headers['x-auth-token']) self.assertEquals(stgact, '') @@ -723,7 +723,7 @@ class TestAuthServer(unittest.TestCase): res = self.controller.handle_auth(Request.blank('/v1.0', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'})) - _, _, _, vstgact = \ + _junk, _junk, _junk, vstgact = \ self.controller.validate_token(res.headers['x-auth-token']) self.assertEquals(stgact, vstgact) @@ -734,7 +734,7 @@ class TestAuthServer(unittest.TestCase): res = self.controller.handle_auth(Request.blank('/v1.0', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'})) - _, _, _, stgact = \ + _junk, _junk, _junk, stgact = \ self.controller.validate_token(res.headers['x-auth-token']) self.assertEquals(stgact, '.reseller_admin') diff --git a/test/unit/common/middleware/test_auth.py b/test/unit/common/middleware/test_auth.py index cabc7a9523..6565a0af69 100644 --- a/test/unit/common/middleware/test_auth.py +++ b/test/unit/common/middleware/test_auth.py @@ -95,7 +95,7 @@ class Logger(object): self.error_value = (msg, args, kwargs) def exception(self, msg, *args, **kwargs): - _, exc, _ = sys.exc_info() + _junk, exc, _junk = sys.exc_info() self.exception_value = (msg, '%s %s' % (exc.__class__.__name__, str(exc)), args, kwargs) diff --git a/test/unit/common/test_client.py b/test/unit/common/test_client.py index 233ec429f7..e6e1abb1dc 100644 --- a/test/unit/common/test_client.py +++ b/test/unit/common/test_client.py @@ -35,10 +35,10 @@ class TestHttpHelpers(unittest.TestCase): def test_http_connection(self): url = 'http://www.test.com' - _, conn = c.http_connection(url) + _junk, conn = c.http_connection(url) self.assertTrue(isinstance(conn, c.HTTPConnection)) url = 'https://www.test.com' - _, conn = c.http_connection(url) + _junk, conn = c.http_connection(url) self.assertTrue(isinstance(conn, c.HTTPSConnection)) url = 'ftp://www.test.com' self.assertRaises(c.ClientException, c.http_connection, url) diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index 092944c4be..a9b46760fd 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -142,7 +142,7 @@ class TestContainerUpdater(unittest.TestCase): bindsock = listen(('127.0.0.1', 0)) def spawn_accepts(): events = [] - for _ in xrange(2): + for _junk in xrange(2): sock, addr = bindsock.accept() events.append(spawn(accept, sock, addr, 201)) return events @@ -195,7 +195,7 @@ class TestContainerUpdater(unittest.TestCase): bindsock = listen(('127.0.0.1', 0)) def spawn_accepts(): events = [] - for _ in xrange(2): + for _junk in xrange(2): with Timeout(3): sock, addr = bindsock.accept() events.append(spawn(accept, sock, addr)) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 9b5d85bc0f..e237f8221a 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1154,7 +1154,7 @@ class TestObjectController(unittest.TestCase): self.assert_status_map(controller.HEAD, (503, 200, 200), 200) self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2) self.assert_('last_error' in controller.app.object_ring.devs[0]) - for _ in xrange(self.app.error_suppression_limit): + for _junk in xrange(self.app.error_suppression_limit): self.assert_status_map(controller.HEAD, (503, 503, 503), 503) self.assertEquals(controller.app.object_ring.devs[0]['errors'], self.app.error_suppression_limit + 1) @@ -2590,7 +2590,7 @@ class TestContainerController(unittest.TestCase): self.assertEquals( controller.app.container_ring.devs[0]['errors'], 2) self.assert_('last_error' in controller.app.container_ring.devs[0]) - for _ in xrange(self.app.error_suppression_limit): + for _junk in xrange(self.app.error_suppression_limit): self.assert_status_map(controller.HEAD, (200, 503, 503, 503), 503) self.assertEquals(controller.app.container_ring.devs[0]['errors'], From 727a84d0dc13a1f6eb552d99cbea8441c4210430 Mon Sep 17 00:00:00 2001 From: Colin Nicholson <colin.nicholson@iomart.com> Date: Thu, 20 Jan 2011 11:55:52 +0000 Subject: [PATCH 135/199] Check account starts with correct case version of reseller_prefix. --- etc/proxy-server.conf-sample | 1 + swift/common/middleware/domain_remap.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index a3f64f8415..09ed568292 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -118,6 +118,7 @@ use = egg:swift#ratelimit use = egg:swift#domain_remap # storage_domain = example.com # path_root = v1 +# reseller_prefix = AUTH [filter:catch_errors] use = egg:swift#catch_errors diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index 4812182587..d6163a40d8 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -27,6 +27,10 @@ class DomainRemapMiddleware(object): account.storageurl/path_root/container/object gets translated to account.storageurl/path_root/account/container/object + + Browsers can convert a url to lowercase, so check that reseller_prefix + is the correct case and fix if necessary + """ def __init__(self, app, conf): @@ -35,6 +39,7 @@ class DomainRemapMiddleware(object): if self.storage_domain and self.storage_domain[0] != '.': self.storage_domain = '.' + self.storage_domain self.path_root = conf.get('path_root', 'v1').strip('/') + self.reseller_prefix = conf.get('reseller_prefix','AUTH'); def __call__(self, env, start_response): if not self.storage_domain: @@ -58,6 +63,9 @@ class DomainRemapMiddleware(object): return resp(env, start_response) if '_' not in account and '-' in account: account = account.replace('-', '_', 1) + if account.lower().startswith(self.reseller_prefix.lower()) and not account.startswith(self.reseller_prefix): + account_suffix = account[len(self.reseller_prefix):] + account = self.reseller_prefix + account_suffix path = env['PATH_INFO'].strip('/') new_path_parts = ['', self.path_root, account] if container: @@ -78,3 +86,4 @@ def filter_factory(global_conf, **local_conf): def domain_filter(app): return DomainRemapMiddleware(app, conf) return domain_filter + From e76598fa58773e7ce2a2cdc40b7586262f3bb0c9 Mon Sep 17 00:00:00 2001 From: Colin Nicholson <colin.nicholson@iomart.com> Date: Thu, 20 Jan 2011 15:50:55 +0000 Subject: [PATCH 136/199] Wrapped long line --- swift/common/middleware/domain_remap.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index d6163a40d8..de1cbc3cad 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -63,7 +63,8 @@ class DomainRemapMiddleware(object): return resp(env, start_response) if '_' not in account and '-' in account: account = account.replace('-', '_', 1) - if account.lower().startswith(self.reseller_prefix.lower()) and not account.startswith(self.reseller_prefix): + if account.lower().startswith(self.reseller_prefix.lower()) and \ + not account.startswith(self.reseller_prefix): account_suffix = account[len(self.reseller_prefix):] account = self.reseller_prefix + account_suffix path = env['PATH_INFO'].strip('/') From 66bef83cbc9c77bb4930ed2d54cb88ed9fa649d8 Mon Sep 17 00:00:00 2001 From: Colin Nicholson <colin.nicholson@iomart.com> Date: Thu, 20 Jan 2011 16:01:37 +0000 Subject: [PATCH 137/199] also catch NXDOMAIN exception --- swift/common/middleware/cname_lookup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/swift/common/middleware/cname_lookup.py b/swift/common/middleware/cname_lookup.py index e48d209e54..950562b834 100644 --- a/swift/common/middleware/cname_lookup.py +++ b/swift/common/middleware/cname_lookup.py @@ -17,6 +17,7 @@ from webob import Request from webob.exc import HTTPBadRequest import dns.resolver from dns.exception import DNSException +from dns.resolver import NXDOMAIN from swift.common.utils import cache_from_env, get_logger @@ -34,7 +35,7 @@ def lookup_cname(domain): # pragma: no cover result = answer.items[0].to_text() result = result.rstrip('.') return ttl, result - except DNSException: + except (DNSException, NXDOMAIN): return 0, None From 74a39c0f26ea5d1770958afd2a445ef4d6aecdf1 Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Thu, 20 Jan 2011 14:15:05 -0600 Subject: [PATCH 138/199] added NoAnswer error catching --- swift/common/middleware/cname_lookup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/common/middleware/cname_lookup.py b/swift/common/middleware/cname_lookup.py index 950562b834..f13155c1fe 100644 --- a/swift/common/middleware/cname_lookup.py +++ b/swift/common/middleware/cname_lookup.py @@ -17,7 +17,7 @@ from webob import Request from webob.exc import HTTPBadRequest import dns.resolver from dns.exception import DNSException -from dns.resolver import NXDOMAIN +from dns.resolver import NXDOMAIN, NoAnswer from swift.common.utils import cache_from_env, get_logger @@ -35,7 +35,7 @@ def lookup_cname(domain): # pragma: no cover result = answer.items[0].to_text() result = result.rstrip('.') return ttl, result - except (DNSException, NXDOMAIN): + except (DNSException, NXDOMAIN, NoAnswer): return 0, None From 92e336462259d404eb42f9db88ca691c413faaac Mon Sep 17 00:00:00 2001 From: Chris Wedgwood <cw@f00f.org> Date: Thu, 20 Jan 2011 14:57:06 -0800 Subject: [PATCH 139/199] Make sure we pass strings correctly to functions that will behave badly otherwise (fix suggested by Chuck Thier). Tweak account & container name output slightly (makes cut & paste easier). --- bin/swift-account-audit | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bin/swift-account-audit b/bin/swift-account-audit index ec71a05f45..c80a5ed49a 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -73,7 +73,7 @@ class Auditor(object): def audit_object(self, account, container, name): path = '/%s/%s/%s' % (account, container, name) - part, nodes = self.object_ring.get_nodes(account, container, name) + part, nodes = self.object_ring.get_nodes(account, container.encode('utf-8'), name.encode('utf-8')) container_listing = self.audit_container(account, container) consistent = True if name not in container_listing: @@ -109,7 +109,7 @@ class Auditor(object): etags.append(resp.getheader('ETag')) else: conn = http_connect(node['ip'], node['port'], - node['device'], part, 'HEAD', path, {}) + node['device'], part, 'HEAD', path.encode('utf-8'), {}) resp = conn.getresponse() if resp.status // 100 != 2: self.object_not_found += 1 @@ -144,7 +144,7 @@ class Auditor(object): if (account, name) in self.list_cache: return self.list_cache[(account, name)] self.in_progress[(account, name)] = Event() - print 'Auditing container "%s"...' % name + print 'Auditing container "%s"' % name path = '/%s/%s' % (account, name) account_listing = self.audit_account(account) consistent = True @@ -162,7 +162,7 @@ class Auditor(object): try: conn = http_connect(node['ip'], node['port'], node['device'], part, 'GET', path, {}, - 'format=json&marker=%s' % quote(marker)) + 'format=json&marker=%s' % quote(marker.encode('utf-8'))) resp = conn.getresponse() if resp.status // 100 != 2: self.container_not_found += 1 @@ -220,7 +220,7 @@ class Auditor(object): if account in self.list_cache: return self.list_cache[account] self.in_progress[account] = Event() - print "Auditing account %s..." % account + print 'Auditing account "%s"' % account consistent = True path = '/%s' % account part, nodes = self.account_ring.get_nodes(account) From 4b4f07a0398d00e81db8b6eac648ec25dbc7b7ee Mon Sep 17 00:00:00 2001 From: Chris Wedgwood <cw@f00f.org> Date: Thu, 20 Jan 2011 16:11:23 -0800 Subject: [PATCH 140/199] Additional utf-8 encoding fixes to deal with containers. --- bin/swift-account-audit | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/swift-account-audit b/bin/swift-account-audit index c80a5ed49a..ac0f18c2c7 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -151,7 +151,7 @@ class Auditor(object): if name not in account_listing: consistent = False print " Container %s not in account listing!" % path - part, nodes = self.container_ring.get_nodes(account, name) + part, nodes = self.container_ring.get_nodes(account, name.encode('utf-8')) rec_d = {} responses = {} for node in nodes: @@ -161,7 +161,7 @@ class Auditor(object): node_id = node['id'] try: conn = http_connect(node['ip'], node['port'], node['device'], - part, 'GET', path, {}, + part, 'GET', path.encode('utf-8'), {}, 'format=json&marker=%s' % quote(marker.encode('utf-8'))) resp = conn.getresponse() if resp.status // 100 != 2: @@ -233,7 +233,7 @@ class Auditor(object): try: conn = http_connect(node['ip'], node['port'], node['device'], part, 'GET', path, {}, - 'format=json&marker=%s' % quote(marker)) + 'format=json&marker=%s' % quote(marker.encode('utf-8'))) resp = conn.getresponse() if resp.status // 100 != 2: self.account_not_found += 1 From 59f996b552d9ef1c8fe0ee7486c7143e3ebd38ec Mon Sep 17 00:00:00 2001 From: David Goetz <david.goetz@rackspace.com> Date: Thu, 20 Jan 2011 17:05:44 -0800 Subject: [PATCH 141/199] fixing rate limiting to allow for catch up --- doc/source/deployment_guide.rst | 1 + doc/source/ratelimit.rst | 5 +++++ etc/object-server.conf-sample | 1 + etc/proxy-server.conf-sample | 2 ++ swift/common/middleware/ratelimit.py | 5 +++-- swift/common/utils.py | 8 ++++++-- swift/obj/auditor.py | 2 +- test/unit/common/test_utils.py | 20 +++++++++++--------- 8 files changed, 30 insertions(+), 14 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 40854b0a1f..133f8043df 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -229,6 +229,7 @@ Option Default Description log_name object-auditor Label used when logging log_facility LOG_LOCAL0 Syslog log facility log_level INFO Logging level +log_time 3600 Frequency of status logs in seconds. files_per_second 20 Maximum files audited per second. Should be tuned according to individual system specs. 0 is unlimited. diff --git a/doc/source/ratelimit.rst b/doc/source/ratelimit.rst index 80db870773..4924b71153 100644 --- a/doc/source/ratelimit.rst +++ b/doc/source/ratelimit.rst @@ -30,6 +30,11 @@ max_sleep_time_seconds 60 App will immediately return a 498 response log_sleep_time_seconds 0 To allow visibility into rate limiting set this value > 0 and all sleeps greater than the number will be logged. +rate_buffer_seconds 5 Number of seconds the rate counter can + drop and be allowed to catch up (at a + faster than listed rate). A larger number + will result in larger spikes in rate but + better average accuracy. account_ratelimit 0 If set, will limit all requests to /account_name and PUTs to /account_name/container_name. Number is in diff --git a/etc/object-server.conf-sample b/etc/object-server.conf-sample index cc80c18c07..92e37ed58c 100644 --- a/etc/object-server.conf-sample +++ b/etc/object-server.conf-sample @@ -57,3 +57,4 @@ use = egg:swift#object # log_name = object-auditor # files_per_second = 20 # bytes_per_second = 10000000 +# log_time = 3600 \ No newline at end of file diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index a3f64f8415..6b31d9dafe 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -99,6 +99,8 @@ use = egg:swift#ratelimit # max_sleep_time_seconds = 60 # log_sleep_time_seconds of 0 means disabled # log_sleep_time_seconds = 0 +# allows for slow rates (e.g. running up to 5 sec's behind) to catch up. +# rate_buffer_seconds = 5 # account_ratelimit of 0 means disabled # account_ratelimit = 0 diff --git a/swift/common/middleware/ratelimit.py b/swift/common/middleware/ratelimit.py index c0827da88b..cc2c8b1417 100644 --- a/swift/common/middleware/ratelimit.py +++ b/swift/common/middleware/ratelimit.py @@ -44,6 +44,7 @@ class RateLimitMiddleware(object): self.log_sleep_time_seconds = float(conf.get('log_sleep_time_seconds', 0)) self.clock_accuracy = int(conf.get('clock_accuracy', 1000)) + self.rate_buffer_seconds = int(conf.get('rate_buffer_seconds', 5)) self.ratelimit_whitelist = [acc.strip() for acc in conf.get('account_whitelist', '').split(',') if acc.strip()] @@ -140,8 +141,8 @@ class RateLimitMiddleware(object): running_time_m = self.memcache_client.incr(key, delta=time_per_request_m) need_to_sleep_m = 0 - request_time_limit = now_m + (time_per_request_m * max_rate) - if running_time_m < now_m: + if (now_m - running_time_m > + self.rate_buffer_seconds * self.clock_accuracy): next_avail_time = int(now_m + time_per_request_m) self.memcache_client.set(key, str(next_avail_time), serialize=False) diff --git a/swift/common/utils.py b/swift/common/utils.py index 299980493a..80f8125e1c 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -820,7 +820,7 @@ def audit_location_generator(devices, datadir, mount_check=True, logger=None): yield path, device, partition -def ratelimit_sleep(running_time, max_rate, incr_by=1): +def ratelimit_sleep(running_time, max_rate, incr_by=1, rate_buffer=5): ''' Will eventlet.sleep() for the appropriate time so that the max_rate is never exceeded. If max_rate is 0, will not ratelimit. The @@ -834,13 +834,17 @@ def ratelimit_sleep(running_time, max_rate, incr_by=1): :param incr_by: How much to increment the counter. Useful if you want to ratelimit 1024 bytes/sec and have differing sizes of requests. Must be >= 0. + :param rate_buffer: Number of seconds the rate counter can drop and be + allowed to catch up (at a faster than listed rate). + A larger number will result in larger spikes in rate + but better average accuracy. ''' if not max_rate or incr_by <= 0: return running_time clock_accuracy = 1000.0 now = time.time() * clock_accuracy time_per_request = clock_accuracy * (float(incr_by) / max_rate) - if running_time < now: + if now - running_time > rate_buffer * clock_accuracy: running_time = now elif running_time - now > time_per_request: eventlet.sleep((running_time - now) / clock_accuracy) diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 62fc747e86..09fdd77774 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -38,6 +38,7 @@ class ObjectAuditor(Daemon): self.max_files_per_second = float(conf.get('files_per_second', 20)) self.max_bytes_per_second = float(conf.get('bytes_per_second', 10000000)) + self.log_time = int(conf.get('log_time', 3600)) self.files_running_time = 0 self.bytes_running_time = 0 self.bytes_processed = 0 @@ -46,7 +47,6 @@ class ObjectAuditor(Daemon): self.passes = 0 self.quarantines = 0 self.errors = 0 - self.log_time = 3600 # once an hour def run_forever(self): """Run the object audit until stopped.""" diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 1f5a94edd5..ee95f5f129 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -456,15 +456,6 @@ log_name = yarr''' # make sure its accurate to 10th of a second self.assertTrue(abs(25 - (time.time() - start) * 100) < 10) - def test_ratelimit_sleep_with_sleep(self): - running_time = 0 - start = time.time() - for i in range(25): - running_time = utils.ratelimit_sleep(running_time, 50) - time.sleep(1.0 / 75) - # make sure its accurate to 10th of a second - self.assertTrue(abs(50 - (time.time() - start) * 100) < 10) - def test_ratelimit_sleep_with_incr(self): running_time = 0 start = time.time() @@ -477,6 +468,17 @@ log_name = yarr''' total += i self.assertTrue(abs(50 - (time.time() - start) * 100) < 10) + def test_ratelimit_sleep_with_sleep(self): + running_time = 0 + start = time.time() + sleeps = [0] * 7 + [.2] * 3 + [0] * 30 + for i in sleeps: + running_time = utils.ratelimit_sleep(running_time, 40, + rate_buffer = 1) + time.sleep(i) + # make sure its accurate to 10th of a second + self.assertTrue(abs(100 - (time.time() - start) * 100) < 10) + if __name__ == '__main__': unittest.main() From 189a3584f0fdff35ffd38e8194ce2e3d7a8de87a Mon Sep 17 00:00:00 2001 From: David Goetz <david.goetz@rackspace.com> Date: Thu, 20 Jan 2011 17:07:01 -0800 Subject: [PATCH 142/199] pep8 --- test/unit/common/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index ee95f5f129..7b30ac768d 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -474,7 +474,7 @@ log_name = yarr''' sleeps = [0] * 7 + [.2] * 3 + [0] * 30 for i in sleeps: running_time = utils.ratelimit_sleep(running_time, 40, - rate_buffer = 1) + rate_buffer=1) time.sleep(i) # make sure its accurate to 10th of a second self.assertTrue(abs(100 - (time.time() - start) * 100) < 10) From 643476c3b082e1960db8ce25c4bbfcbcff9f388c Mon Sep 17 00:00:00 2001 From: Colin Nicholson <colin.nicholson@iomart.com> Date: Fri, 21 Jan 2011 10:11:38 +0000 Subject: [PATCH 143/199] changed domain_remap to handle multiple reseller prefixes --- swift/common/middleware/domain_remap.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index de1cbc3cad..6d0f423e36 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -39,7 +39,7 @@ class DomainRemapMiddleware(object): if self.storage_domain and self.storage_domain[0] != '.': self.storage_domain = '.' + self.storage_domain self.path_root = conf.get('path_root', 'v1').strip('/') - self.reseller_prefix = conf.get('reseller_prefix','AUTH'); + self.reseller_prefixes = conf.get('reseller_prefixes','AUTH').split(','); def __call__(self, env, start_response): if not self.storage_domain: @@ -63,10 +63,12 @@ class DomainRemapMiddleware(object): return resp(env, start_response) if '_' not in account and '-' in account: account = account.replace('-', '_', 1) - if account.lower().startswith(self.reseller_prefix.lower()) and \ - not account.startswith(self.reseller_prefix): - account_suffix = account[len(self.reseller_prefix):] - account = self.reseller_prefix + account_suffix + for reseller_prefix in self.reseller_prefixes: + if account.lower().startswith(reseller_prefix.lower()): + if not account.startswith(reseller_prefix): + account_suffix = account[len(reseller_prefix):] + account = reseller_prefix + account_suffix + break path = env['PATH_INFO'].strip('/') new_path_parts = ['', self.path_root, account] if container: From 217198b83b69095724c8cb8aae6746fbbfbe8556 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Fri, 21 Jan 2011 12:43:50 -0800 Subject: [PATCH 144/199] container-updater: temporrar account update suppression on errors --- etc/container-server.conf-sample | 2 ++ swift/container/updater.py | 44 +++++++++++++++++++++++++---- test/unit/container/test_updater.py | 1 + 3 files changed, 41 insertions(+), 6 deletions(-) diff --git a/etc/container-server.conf-sample b/etc/container-server.conf-sample index fb250708fe..96c1f7b3cb 100644 --- a/etc/container-server.conf-sample +++ b/etc/container-server.conf-sample @@ -38,6 +38,8 @@ use = egg:swift#container # conn_timeout = 0.5 # slowdown will sleep that amount between containers # slowdown = 0.01 +# Seconds to suppress updating an account that has generated an error +# account_suppression_time = 60 [container-auditor] # log_name = container-auditor diff --git a/swift/container/updater.py b/swift/container/updater.py index d6b1beb2b1..c5d927f2b9 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -19,6 +19,7 @@ import signal import sys import time from random import random, shuffle +from tempfile import mkstemp from eventlet import spawn, patcher, Timeout @@ -51,6 +52,10 @@ class ContainerUpdater(Daemon): self.no_changes = 0 self.successes = 0 self.failures = 0 + self.account_suppressions = {} + self.account_suppression_time = \ + float(conf.get('account_suppression_time', 60)) + self.new_account_suppressions = None def get_account_ring(self): """Get the account ring. Load it if it hasn't been yet.""" @@ -88,21 +93,41 @@ class ContainerUpdater(Daemon): while True: self.logger.info(_('Begin container update sweep')) begin = time.time() - pids = [] + now = time.time() + expired_suppressions = \ + [a for a, u in self.account_suppressions.iteritems() if u < now] + for account in expired_suppressions: + del self.account_suppressions[account] + pid2filename = {} # read from account ring to ensure it's fresh self.get_account_ring().get_nodes('') for path in self.get_paths(): - while len(pids) >= self.concurrency: - pids.remove(os.wait()[0]) + while len(pid2filename) >= self.concurrency: + pid = os.wait()[0] + try: + with open(pid2filename[pid], 'r') as tmpfile: + for line in tmpfile: + account, until = line.split() + until = float(until) + self.account_suppressions[account] = until + except: + self.logger.exception(_('ERROR with pid2filename ' + '%(pid)s %(filename)s: ') % {'pid': pid, + 'filename': pid2filename[pid]}) + os.unlink(pid2filename[pid]) + del pid2filename[pid] + fd, tmpfilename = mkstemp() + os.close(fd) pid = os.fork() if pid: - pids.append(pid) + pid2filename[pid] = tmpfilename else: signal.signal(signal.SIGTERM, signal.SIG_DFL) patcher.monkey_patch(all=False, socket=True) self.no_changes = 0 self.successes = 0 self.failures = 0 + self.new_account_suppressions = open(tmpfilename, 'w') forkbegin = time.time() self.container_sweep(path) elapsed = time.time() - forkbegin @@ -114,8 +139,8 @@ class ContainerUpdater(Daemon): 'success': self.successes, 'fail': self.failures, 'no_change': self.no_changes}) sys.exit() - while pids: - pids.remove(os.wait()[0]) + while pid2filename: + del pid2filename[os.wait()[0]] elapsed = time.time() - begin self.logger.info(_('Container update sweep completed: %.02fs'), elapsed) @@ -165,6 +190,8 @@ class ContainerUpdater(Daemon): # definitely doesn't have up to date statistics. if float(info['put_timestamp']) <= 0: return + if self.account_suppressions.get(info['account'], 0) > time.time(): + return if info['put_timestamp'] > info['reported_put_timestamp'] or \ info['delete_timestamp'] > info['reported_delete_timestamp'] \ or info['object_count'] != info['reported_object_count'] or \ @@ -195,6 +222,11 @@ class ContainerUpdater(Daemon): self.logger.debug( _('Update report failed for %(container)s %(dbfile)s'), {'container': container, 'dbfile': dbfile}) + self.account_suppressions[info['account']] = until = \ + time.time() + self.account_suppression_time + if self.new_account_suppressions: + print >>self.new_account_suppressions, \ + info['account'], until else: self.no_changes += 1 diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index b7dbe6dd6d..9ee265a566 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -78,6 +78,7 @@ class TestContainerUpdater(unittest.TestCase): 'interval': '1', 'concurrency': '1', 'node_timeout': '15', + 'account_suppression_time': 0 }) cu.run_once() containers_dir = os.path.join(self.sda1, container_server.DATADIR) From 7964eee5b3ff9e3e762c846cf44e5b8570e3763d Mon Sep 17 00:00:00 2001 From: Colin Nicholson <colin.nicholson@iomart.com> Date: Fri, 21 Jan 2011 22:32:43 +0000 Subject: [PATCH 145/199] update proxy-server.conf-sample for domain_remap reseller prefix list --- etc/proxy-server.conf-sample | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index 09ed568292..7cb44fca8f 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -118,7 +118,7 @@ use = egg:swift#ratelimit use = egg:swift#domain_remap # storage_domain = example.com # path_root = v1 -# reseller_prefix = AUTH +# reseller_prefixes = AUTH [filter:catch_errors] use = egg:swift#catch_errors From d3f6e88baad176a5b6ad521ecb93b133eb13a04f Mon Sep 17 00:00:00 2001 From: David Goetz <david.goetz@rackspace.com> Date: Fri, 21 Jan 2011 16:28:58 -0800 Subject: [PATCH 146/199] fixes after code walk through --- swift/common/middleware/ratelimit.py | 58 +++++++++---------- test/unit/common/middleware/test_ratelimit.py | 3 +- 2 files changed, 30 insertions(+), 31 deletions(-) diff --git a/swift/common/middleware/ratelimit.py b/swift/common/middleware/ratelimit.py index cc2c8b1417..4657b6abcd 100644 --- a/swift/common/middleware/ratelimit.py +++ b/swift/common/middleware/ratelimit.py @@ -20,7 +20,7 @@ from swift.common.utils import split_path, cache_from_env, get_logger from swift.proxy.server import get_container_memcache_key -class MaxSleepTimeHit(Exception): +class MaxSleepTimeHitError(Exception): pass @@ -32,6 +32,8 @@ class RateLimitMiddleware(object): configurable. """ + BLACK_LIST_SLEEP = 1 + def __init__(self, app, conf, logger=None): self.app = app if logger: @@ -39,18 +41,16 @@ class RateLimitMiddleware(object): else: self.logger = get_logger(conf) self.account_ratelimit = float(conf.get('account_ratelimit', 0)) - self.max_sleep_time_seconds = float(conf.get('max_sleep_time_seconds', - 60)) - self.log_sleep_time_seconds = float(conf.get('log_sleep_time_seconds', - 0)) + self.max_sleep_time_seconds = \ + float(conf.get('max_sleep_time_seconds', 60)) + self.log_sleep_time_seconds = \ + float(conf.get('log_sleep_time_seconds', 0)) self.clock_accuracy = int(conf.get('clock_accuracy', 1000)) self.rate_buffer_seconds = int(conf.get('rate_buffer_seconds', 5)) self.ratelimit_whitelist = [acc.strip() for acc in - conf.get('account_whitelist', '').split(',') - if acc.strip()] + conf.get('account_whitelist', '').split(',') if acc.strip()] self.ratelimit_blacklist = [acc.strip() for acc in - conf.get('account_blacklist', '').split(',') - if acc.strip()] + conf.get('account_blacklist', '').split(',') if acc.strip()] self.memcache_client = None conf_limits = [] for conf_key in conf.keys(): @@ -93,8 +93,7 @@ class RateLimitMiddleware(object): return None def get_ratelimitable_key_tuples(self, req_method, account_name, - container_name=None, - obj_name=None): + container_name=None, obj_name=None): """ Returns a list of key (used in memcache), ratelimit tuples. Keys should be checked in order. @@ -106,19 +105,20 @@ class RateLimitMiddleware(object): """ keys = [] if self.account_ratelimit and account_name and ( - not (container_name or obj_name) or - (container_name and not obj_name and req_method == 'PUT')): + not (container_name or obj_name) or + (container_name and not obj_name and + req_method in ('PUT', 'DELETE'))): keys.append(("ratelimit/%s" % account_name, self.account_ratelimit)) if account_name and container_name and ( - (not obj_name and req_method in ('GET', 'HEAD')) or - (obj_name and req_method in ('PUT', 'DELETE'))): + (not obj_name and req_method in ('GET', 'HEAD')) or + (obj_name and req_method in ('PUT', 'DELETE'))): container_size = None memcache_key = get_container_memcache_key(account_name, container_name) container_info = self.memcache_client.get(memcache_key) - if type(container_info) == dict: + if isinstance(container_info, dict): container_size = container_info.get('container_size', 0) container_rate = self.get_container_maxrate(container_size) if container_rate: @@ -130,11 +130,11 @@ class RateLimitMiddleware(object): def _get_sleep_time(self, key, max_rate): ''' Returns the amount of time (a float in seconds) that the app - should sleep. Throws a MaxSleepTimeHit exception if maximum - sleep time is exceeded. + should sleep. :param key: a memcache key :param max_rate: maximum rate allowed in requests per second + :raises: MaxSleepTimeHitError if max sleep time is exceeded. ''' now_m = int(round(time.time() * self.clock_accuracy)) time_per_request_m = int(round(self.clock_accuracy / max_rate)) @@ -142,19 +142,20 @@ class RateLimitMiddleware(object): delta=time_per_request_m) need_to_sleep_m = 0 if (now_m - running_time_m > - self.rate_buffer_seconds * self.clock_accuracy): + self.rate_buffer_seconds * self.clock_accuracy): next_avail_time = int(now_m + time_per_request_m) self.memcache_client.set(key, str(next_avail_time), serialize=False) - elif running_time_m - now_m - time_per_request_m > 0: - need_to_sleep_m = running_time_m - now_m - time_per_request_m + else: + need_to_sleep_m = \ + max(running_time_m - now_m - time_per_request_m, 0) max_sleep_m = self.max_sleep_time_seconds * self.clock_accuracy if max_sleep_m - need_to_sleep_m <= self.clock_accuracy * 0.01: # treat as no-op decrement time self.memcache_client.decr(key, delta=time_per_request_m) - raise MaxSleepTimeHit("Max Sleep Time Exceeded: %s" % - need_to_sleep_m) + raise MaxSleepTimeHitError("Max Sleep Time Exceeded: %s" % + need_to_sleep_m) return float(need_to_sleep_m) / self.clock_accuracy @@ -169,26 +170,25 @@ class RateLimitMiddleware(object): ''' if account_name in self.ratelimit_blacklist: self.logger.error(_('Returning 497 because of blacklisting')) + eventlet.sleep(self.BLACK_LIST_SLEEP) return Response(status='497 Blacklisted', body='Your account has been blacklisted', request=req) if account_name in self.ratelimit_whitelist: return None for key, max_rate in self.get_ratelimitable_key_tuples( - req.method, - account_name, - container_name=container_name, - obj_name=obj_name): + req.method, account_name, container_name=container_name, + obj_name=obj_name): try: need_to_sleep = self._get_sleep_time(key, max_rate) if self.log_sleep_time_seconds and \ need_to_sleep > self.log_sleep_time_seconds: - self.logger.info(_("Ratelimit sleep log: %(sleep)s for " + self.logger.warning(_("Ratelimit sleep log: %(sleep)s for " "%(account)s/%(container)s/%(object)s"), {'sleep': need_to_sleep, 'account': account_name, 'container': container_name, 'object': obj_name}) if need_to_sleep > 0: eventlet.sleep(need_to_sleep) - except MaxSleepTimeHit, e: + except MaxSleepTimeHitError, e: self.logger.error(_('Returning 498 because of ops rate ' 'limiting (Max Sleep) %s') % str(e)) error_resp = Response(status='498 Rate Limited', diff --git a/test/unit/common/middleware/test_ratelimit.py b/test/unit/common/middleware/test_ratelimit.py index 3f993a0402..497179cad1 100644 --- a/test/unit/common/middleware/test_ratelimit.py +++ b/test/unit/common/middleware/test_ratelimit.py @@ -224,6 +224,7 @@ class TestRateLimit(unittest.TestCase): 'account_whitelist': 'a', 'account_blacklist': 'b'} self.test_ratelimit = dummy_filter_factory(conf_dict)(FakeApp()) + self.test_ratelimit.BLACK_LIST_SLEEP = 0 ratelimit.http_connect = mock_http_connect(204) req = Request.blank('/v/b/c') req.environ['swift.cache'] = FakeMemcache() @@ -402,7 +403,5 @@ class TestRateLimit(unittest.TestCase): self._run(make_app_call, num_calls, current_rate) - - if __name__ == '__main__': unittest.main() From eeade255f37511b194c0b4730d74a36be66843f6 Mon Sep 17 00:00:00 2001 From: David Goetz <david.goetz@rackspace.com> Date: Fri, 21 Jan 2011 16:51:19 -0800 Subject: [PATCH 147/199] fix unit test --- test/unit/common/middleware/test_ratelimit.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/unit/common/middleware/test_ratelimit.py b/test/unit/common/middleware/test_ratelimit.py index 497179cad1..b930180e5a 100644 --- a/test/unit/common/middleware/test_ratelimit.py +++ b/test/unit/common/middleware/test_ratelimit.py @@ -95,13 +95,13 @@ class FakeApp(object): class FakeLogger(object): # a thread safe logger - def error(self, msg): + def error(self, *args, **kwargs): pass - def info(self, msg): + def info(self, *args, **kwargs): pass - def warning(self, msg): + def warning(self, *args, **kwargs): pass @@ -261,6 +261,7 @@ class TestRateLimit(unittest.TestCase): # making clock less accurate for nosetests running slow self.test_ratelimit = dummy_filter_factory(conf_dict)(FakeApp()) ratelimit.http_connect = mock_http_connect(204) + self.test_ratelimit.log_sleep_time_seconds = .00001 req = Request.blank('/v/a') req.environ['swift.cache'] = FakeMemcache() begin = time.time() From fe1befe91e5d178e0cf8303857edb43175fd2f3b Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Sat, 22 Jan 2011 10:01:43 -0800 Subject: [PATCH 148/199] Doc update --- doc/source/deployment_guide.rst | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 40854b0a1f..ab84ae1550 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -299,19 +299,25 @@ reclaim_age 604800 Time elapsed in seconds before a [container-updater] -================== ================= ======================================= -Option Default Description ------------------- ----------------- --------------------------------------- -log_name container-updater Label used when logging -log_facility LOG_LOCAL0 Syslog log facility -log_level INFO Logging level -interval 300 Minimum time for a pass to take -concurrency 4 Number of updater workers to spawn -node_timeout 3 Request timeout to external services -conn_timeout 0.5 Connection timeout to external services -slowdown 0.01 Time in seconds to wait between - containers -================== ================= ======================================= +======================== ================= ================================== +Option Default Description +------------------------ ----------------- ---------------------------------- +log_name container-updater Label used when logging +log_facility LOG_LOCAL0 Syslog log facility +log_level INFO Logging level +interval 300 Minimum time for a pass to take +concurrency 4 Number of updater workers to spawn +node_timeout 3 Request timeout to external + services +conn_timeout 0.5 Connection timeout to external + services +slowdown 0.01 Time in seconds to wait between + containers +account_suppression_time 60 Seconds to suppress updating an + account that has generated an + error (timeout, not yet found, + etc.) +======================== ================= ================================== [container-auditor] From 778cb9dedc96f774242e38b301f35c89e002c0a0 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Sun, 23 Jan 2011 10:42:31 -0800 Subject: [PATCH 149/199] In-depth documentation on paste.deploy configuration files --- doc/source/deployment_guide.rst | 77 +++++++++++++++++++++++++++++++-- 1 file changed, 74 insertions(+), 3 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 40854b0a1f..c0047847e5 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -134,9 +134,80 @@ can be found in the :doc:`Ring Overview <overview_ring>`. General Server Configuration ---------------------------- -Swift uses paste.deploy to manage server configurations. Default configuration -options are set in the `[DEFAULT]` section, and any options specified there -can be overridden in any of the other sections. +Swift uses paste.deploy (http://pythonpaste.org/deploy/) to manage server +configurations. Default configuration options are set in the `[DEFAULT]` +section, and any options specified there can be overridden in any of the other +sections BUT ONLY BY USING THE SYNTAX ``set option_name = value``. This is the +unfortunate way paste.deploy works and I'll try to explain it in full. + +First, here's an example paste.deploy configuration file:: + + [DEFAULT] + name1 = globalvalue + name2 = globalvalue + name3 = globalvalue + set name4 = globalvalue + + [pipeline:main] + pipeline = myapp + + [app:myapp] + use = egg:mypkg#myapp + name2 = localvalue + set name3 = localvalue + set name5 = localvalue + name6 = localvalue + +The resulting configuration that myapp receives is:: + + global {'__file__': '/etc/mypkg/wsgi.conf', 'here': '/etc/mypkg', + 'name1': 'globalvalue', + 'name2': 'globalvalue', + 'name3': 'localvalue', + 'name4': 'globalvalue', + 'name5': 'localvalue', + 'set name4': 'globalvalue'} + local {'name6': 'localvalue'} + +So, `name1` got the global value which is fine since it's only in the `DEFAULT` +section anyway. + +`name2` got the global value from `DEFAULT` even though it's seemingly +overridden in the `app:myapp` subsection. This is just the unfortunate way +paste.deploy works (at least at the time of this writing.) + +`name3` got the local value from the `app:myapp` subsection because it using +the special paste.deploy syntax of ``set option_name = value``. So, if you want +a default value for most app/filters but want to overridde it in one +subsection, this is how you do it. + +`name4` got the global value from `DEFAULT` since it's only in that section +anyway. But, since we used the ``set`` syntax in the `DEFAULT` section even +though we shouldn't, notice we also got a ``set name4`` variable. Weird, but +probably not harmful. + +`name5` got the local value from the `app:myapp` subsection since it's only +there anyway, but notice that it is in the global configuration and not the +local configuration. This is because we used the ``set`` syntax to set the +value. Again, weird, but not harmful since Swift just treats the two sets of +configuration values as one set anyway. + +`name6` got the local value from `app:myapp` subsection since it's only there, +and since we didn't use the ``set`` syntax, it's only in the local +configuration and not the global one. Though, as indicated above, there is no +special distinction with Swift. + +That's quite an explanation for something that should be so much simpler, but +it might be important to know how paste.deploy interprets configuration files. +The main rule to remember when working with Swift configuration files is: + +.. note:: + + Always use the ``set option_name = value`` syntax in subsections if the + option is also set in the ``[DEFAULT]`` section. Don't get in the habit of + always using the ``set`` syntax or you'll probably mess up your + non-paste.deploy configuration files. + --------------------------- Object Server Configuration From e274741f25d7da02199398d287a58961bba2190d Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Sun, 23 Jan 2011 10:50:55 -0800 Subject: [PATCH 150/199] Remove the misleading "Always" --- doc/source/deployment_guide.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index c0047847e5..900cb8577b 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -203,10 +203,10 @@ The main rule to remember when working with Swift configuration files is: .. note:: - Always use the ``set option_name = value`` syntax in subsections if the - option is also set in the ``[DEFAULT]`` section. Don't get in the habit of - always using the ``set`` syntax or you'll probably mess up your - non-paste.deploy configuration files. + Use the ``set option_name = value`` syntax in subsections if the option is + also set in the ``[DEFAULT]`` section. Don't get in the habit of always + using the ``set`` syntax or you'll probably mess up your non-paste.deploy + configuration files. --------------------------- From cc638e7ed5243496e1cde80cdf9dfa5af139678e Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Sun, 23 Jan 2011 12:32:10 -0800 Subject: [PATCH 151/199] get_logger now uses python's log routing to separate multiple log_names in the same process --- swift/common/utils.py | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 299980493a..661e23eeb3 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -382,7 +382,7 @@ class NamedFormatter(logging.Formatter): return msg -def get_logger(conf, name=None, log_to_console=False): +def get_logger(conf, name=None, log_to_console=False, log_route=None): """ Get the current system logger using config settings. @@ -396,31 +396,39 @@ def get_logger(conf, name=None, log_to_console=False): :param name: Name of the logger :param log_to_console: Add handler which writes to console on stderr """ - root_logger = logging.getLogger() + if not hasattr(get_logger, 'root_logger_configured'): + get_logger.root_logger_configured = True + get_logger(conf, name, log_to_console, log_route='root') + if name is None: + name = conf.get('log_name', 'swift') if conf else 'swift' + if not log_route: + log_route = name + if log_route == 'root': + logger = logging.getLogger() + else: + logger = logging.getLogger(log_route) if hasattr(get_logger, 'handler') and get_logger.handler: - root_logger.removeHandler(get_logger.handler) + logger.removeHandler(get_logger.handler) get_logger.handler.close() get_logger.handler = None if log_to_console: # check if a previous call to get_logger already added a console logger if hasattr(get_logger, 'console') and get_logger.console: - root_logger.removeHandler(get_logger.console) + logger.removeHandler(get_logger.console) get_logger.console = logging.StreamHandler(sys.__stderr__) - root_logger.addHandler(get_logger.console) + logger.addHandler(get_logger.console) if conf is None: - root_logger.setLevel(logging.INFO) - adapted_logger = LogAdapter(root_logger) + logger.setLevel(logging.INFO) + adapted_logger = LogAdapter(logger) return adapted_logger - if name is None: - name = conf.get('log_name', 'swift') get_logger.handler = SysLogHandler(address='/dev/log', facility=getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'), SysLogHandler.LOG_LOCAL0)) - root_logger.addHandler(get_logger.handler) - root_logger.setLevel( + logger.addHandler(get_logger.handler) + logger.setLevel( getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO)) - adapted_logger = LogAdapter(root_logger) + adapted_logger = LogAdapter(logger) formatter = NamedFormatter(name, adapted_logger) get_logger.handler.setFormatter(formatter) if hasattr(get_logger, 'console'): From 9c3f4a17cf94771b98364e82cd230c3dc1f2afb0 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Sun, 23 Jan 2011 12:58:54 -0800 Subject: [PATCH 152/199] get_logger now separates different log_levels --- swift/common/utils.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 661e23eeb3..8524ffb711 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -396,41 +396,41 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None): :param name: Name of the logger :param log_to_console: Add handler which writes to console on stderr """ + if not conf: + conf = {} if not hasattr(get_logger, 'root_logger_configured'): get_logger.root_logger_configured = True get_logger(conf, name, log_to_console, log_route='root') if name is None: - name = conf.get('log_name', 'swift') if conf else 'swift' + name = conf.get('log_name', 'swift') if not log_route: log_route = name if log_route == 'root': logger = logging.getLogger() else: logger = logging.getLogger(log_route) - if hasattr(get_logger, 'handler') and get_logger.handler: - logger.removeHandler(get_logger.handler) - get_logger.handler.close() - get_logger.handler = None + if not hasattr(get_logger, 'handlers'): + get_logger.handlers = {} + facility = getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'), + SysLogHandler.LOG_LOCAL0) + if facility in get_logger.handlers: + logger.removeHandler(get_logger.handlers[facility]) + get_logger.handlers[facility].close() + del get_logger.handlers[facility] if log_to_console: # check if a previous call to get_logger already added a console logger if hasattr(get_logger, 'console') and get_logger.console: logger.removeHandler(get_logger.console) get_logger.console = logging.StreamHandler(sys.__stderr__) logger.addHandler(get_logger.console) - if conf is None: - logger.setLevel(logging.INFO) - adapted_logger = LogAdapter(logger) - return adapted_logger - get_logger.handler = SysLogHandler(address='/dev/log', - facility=getattr(SysLogHandler, - conf.get('log_facility', 'LOG_LOCAL0'), - SysLogHandler.LOG_LOCAL0)) - logger.addHandler(get_logger.handler) + get_logger.handlers[facility] = \ + SysLogHandler(address='/dev/log', facility=facility) + logger.addHandler(get_logger.handlers[facility]) logger.setLevel( getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO)) adapted_logger = LogAdapter(logger) formatter = NamedFormatter(name, adapted_logger) - get_logger.handler.setFormatter(formatter) + get_logger.handlers[facility].setFormatter(formatter) if hasattr(get_logger, 'console'): get_logger.console.setFormatter(formatter) return adapted_logger From 4905c71669b270654aa02b69fdb4bc4d15ade7a4 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Sun, 23 Jan 2011 13:18:28 -0800 Subject: [PATCH 153/199] More doc updates for logger stuff --- doc/source/deployment_guide.rst | 36 ++++++++++---------- etc/account-server.conf-sample | 24 ++++++++++++-- etc/auth-server.conf-sample | 13 +++++--- etc/container-server.conf-sample | 21 ++++++++++-- etc/object-server.conf-sample | 22 +++++++++--- etc/proxy-server.conf-sample | 57 +++++++++++++++++++++++++++----- 6 files changed, 133 insertions(+), 40 deletions(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 900cb8577b..59a6e8d76c 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -241,10 +241,10 @@ Option Default Description use paste.deploy entry point for the object server. For most cases, this should be `egg:swift#object`. -log_name object-server Label used when logging -log_facility LOG_LOCAL0 Syslog log facility -log_level INFO Logging level -log_requests True Whether or not to log each request +set log_name object-server Label used when logging +set log_facility LOG_LOCAL0 Syslog log facility +set log_level INFO Logging level +set log_requests True Whether or not to log each request user swift User to run as node_timeout 3 Request timeout to external services conn_timeout 0.5 Connection timeout to external services @@ -341,9 +341,9 @@ Option Default Description use paste.deploy entry point for the container server. For most cases, this should be `egg:swift#container`. -log_name container-server Label used when logging -log_facility LOG_LOCAL0 Syslog log facility -log_level INFO Logging level +set log_name container-server Label used when logging +set log_facility LOG_LOCAL0 Syslog log facility +set log_level INFO Logging level node_timeout 3 Request timeout to external services conn_timeout 0.5 Connection timeout to external services ================== ================ ======================================== @@ -428,9 +428,9 @@ Option Default Description use Entry point for paste.deploy for the account server. For most cases, this should be `egg:swift#account`. -log_name account-server Label used when logging -log_facility LOG_LOCAL0 Syslog log facility -log_level INFO Logging level +set log_name account-server Label used when logging +set log_facility LOG_LOCAL0 Syslog log facility +set log_level INFO Logging level ================== ============== ========================================== [account-replicator] @@ -509,10 +509,10 @@ use Entry point for paste.deploy for the proxy server. For most cases, this should be `egg:swift#proxy`. -log_name proxy-server Label used when logging -log_facility LOG_LOCAL0 Syslog log facility -log_level INFO Log level -log_headers True If True, log headers in each +set log_name proxy-server Label used when logging +set log_facility LOG_LOCAL0 Syslog log facility +set log_level INFO Log level +set log_headers True If True, log headers in each request recheck_account_existence 60 Cache timeout in seconds to send memcached for account @@ -570,10 +570,10 @@ use Entry point for auth. To use the swauth set to: `egg:swift#swauth` -log_name auth-server Label used when logging -log_facility LOG_LOCAL0 Syslog log facility -log_level INFO Log level -log_headers True If True, log headers in +set log_name auth-server Label used when logging +set log_facility LOG_LOCAL0 Syslog log facility +set log_level INFO Log level +set log_headers True If True, log headers in each request reseller_prefix AUTH The naming scope for the auth service. Swift diff --git a/etc/account-server.conf-sample b/etc/account-server.conf-sample index e48650f4a5..1fac948619 100644 --- a/etc/account-server.conf-sample +++ b/etc/account-server.conf-sample @@ -7,18 +7,27 @@ # swift_dir = /etc/swift # devices = /srv/node # mount_check = true +# You can specify default log routing here if you want: +# log_name = swift +# log_facility = LOG_LOCAL0 +# log_level = INFO [pipeline:main] pipeline = account-server [app:account-server] use = egg:swift#account -# log_name = account-server -# log_facility = LOG_LOCAL0 -# log_level = INFO +# You can override the default log routing for this app here: +# set log_name = account-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_requests = True [account-replicator] +# You can override the default log routing for this app here (don't use set!): # log_name = account-replicator +# log_facility = LOG_LOCAL0 +# log_level = INFO # vm_test_mode = no # log_facility = LOG_LOCAL0 # log_level = INFO @@ -36,7 +45,10 @@ use = egg:swift#account # reclaim_age = 86400 [account-stats] +# You can override the default log routing for this app here (don't use set!): # log_name = account-stats +# log_facility = LOG_LOCAL0 +# log_level = INFO # cf_account = AUTH_7abbc116-8a07-4b63-819d-02715d3e0f31 # container_name = account_stats # proxy_server_conf = /etc/swift/proxy-server.conf @@ -44,14 +56,20 @@ use = egg:swift#account # log_level = INFO [account-auditor] +# You can override the default log routing for this app here (don't use set!): # log_name = account-auditor +# log_facility = LOG_LOCAL0 +# log_level = INFO # Will audit, at most, 1 account per device per interval # interval = 1800 # log_facility = LOG_LOCAL0 # log_level = INFO [account-reaper] +# You can override the default log routing for this app here (don't use set!): # log_name = account-reaper +# log_facility = LOG_LOCAL0 +# log_level = INFO # concurrency = 25 # interval = 3600 # node_timeout = 10 diff --git a/etc/auth-server.conf-sample b/etc/auth-server.conf-sample index 27b6cf3e14..711f48d564 100644 --- a/etc/auth-server.conf-sample +++ b/etc/auth-server.conf-sample @@ -7,6 +7,10 @@ # swift_dir = /etc/swift # cert_file = Default is no cert; format is path like /etc/swift/auth.crt # key_file = Default is no key; format is path like /etc/swift/auth.key +# You can specify default log routing here if you want: +# log_name = swift +# log_facility = LOG_LOCAL0 +# log_level = INFO [pipeline:main] pipeline = auth-server @@ -15,11 +19,12 @@ pipeline = auth-server use = egg:swift#auth # Highly recommended to change this. super_admin_key = devauth -# log_name = auth-server -# log_facility = LOG_LOCAL0 -# log_level = INFO +# You can override the default log routing for this app here: +# set log_name = proxy-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # reseller_prefix = AUTH # default_cluster_url = http://127.0.0.1:8080/v1 # token_life = 86400 -# log_headers = False # node_timeout = 10 diff --git a/etc/container-server.conf-sample b/etc/container-server.conf-sample index fb250708fe..183f20c6a0 100644 --- a/etc/container-server.conf-sample +++ b/etc/container-server.conf-sample @@ -7,20 +7,29 @@ # swift_dir = /etc/swift # devices = /srv/node # mount_check = true +# You can specify default log routing here if you want: +# log_name = swift +# log_facility = LOG_LOCAL0 +# log_level = INFO [pipeline:main] pipeline = container-server [app:container-server] use = egg:swift#container -# log_name = container-server -# log_facility = LOG_LOCAL0 -# log_level = INFO +# You can override the default log routing for this app here: +# set log_name = container-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_requests = True # node_timeout = 3 # conn_timeout = 0.5 [container-replicator] +# You can override the default log routing for this app here (don't use set!): # log_name = container-replicator +# log_facility = LOG_LOCAL0 +# log_level = INFO # vm_test_mode = no # per_diff = 1000 # concurrency = 8 @@ -31,7 +40,10 @@ use = egg:swift#container # reclaim_age = 604800 [container-updater] +# You can override the default log routing for this app here (don't use set!): # log_name = container-updater +# log_facility = LOG_LOCAL0 +# log_level = INFO # interval = 300 # concurrency = 4 # node_timeout = 3 @@ -40,6 +52,9 @@ use = egg:swift#container # slowdown = 0.01 [container-auditor] +# You can override the default log routing for this app here (don't use set!): # log_name = container-auditor +# log_facility = LOG_LOCAL0 +# log_level = INFO # Will audit, at most, 1 container per device per interval # interval = 1800 diff --git a/etc/object-server.conf-sample b/etc/object-server.conf-sample index cc80c18c07..1fadffb941 100644 --- a/etc/object-server.conf-sample +++ b/etc/object-server.conf-sample @@ -7,16 +7,21 @@ # swift_dir = /etc/swift # devices = /srv/node # mount_check = true +# You can specify default log routing here if you want: +# log_name = swift +# log_facility = LOG_LOCAL0 +# log_level = INFO [pipeline:main] pipeline = object-server [app:object-server] use = egg:swift#object -# log_name = object-server -# log_facility = LOG_LOCAL0 -# log_level = INFO -# log_requests = True +# You can override the default log routing for this app here: +# set log_name = object-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_requests = True # node_timeout = 3 # conn_timeout = 0.5 # network_chunk_size = 65536 @@ -27,7 +32,10 @@ use = egg:swift#object # mb_per_sync = 512 [object-replicator] +# You can override the default log routing for this app here (don't use set!): # log_name = object-replicator +# log_facility = LOG_LOCAL0 +# log_level = INFO # vm_test_mode = no # daemonize = on # run_pause = 30 @@ -45,7 +53,10 @@ use = egg:swift#object # reclaim_age = 604800 [object-updater] +# You can override the default log routing for this app here (don't use set!): # log_name = object-updater +# log_facility = LOG_LOCAL0 +# log_level = INFO # interval = 300 # concurrency = 1 # node_timeout = 10 @@ -54,6 +65,9 @@ use = egg:swift#object # slowdown = 0.01 [object-auditor] +# You can override the default log routing for this app here (don't use set!): # log_name = object-auditor +# log_facility = LOG_LOCAL0 +# log_level = INFO # files_per_second = 20 # bytes_per_second = 10000000 diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index a3f64f8415..cca2f8c620 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -7,6 +7,10 @@ # user = swift # cert_file = /etc/swift/proxy.crt # key_file = /etc/swift/proxy.key +# You can specify default log routing here if you want: +# log_name = swift +# log_facility = LOG_LOCAL0 +# log_level = INFO [pipeline:main] # For DevAuth: @@ -16,10 +20,11 @@ pipeline = catch_errors healthcheck cache ratelimit auth proxy-server [app:proxy-server] use = egg:swift#proxy -# log_name = proxy-server -# log_facility = LOG_LOCAL0 -# log_level = INFO -# log_headers = False +# You can override the default log routing for this app here: +# set log_name = proxy-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # recheck_account_existence = 60 # recheck_container_existence = 60 # object_chunk_size = 8192 @@ -39,6 +44,11 @@ use = egg:swift#proxy # Only needed for DevAuth [filter:auth] use = egg:swift#auth +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # The reseller prefix will verify a token begins with this prefix before even # attempting to validate it with the external authentication server. Also, with # authorization, only Swift storage accounts with this prefix will be @@ -54,10 +64,11 @@ use = egg:swift#auth # Only needed for Swauth [filter:swauth] use = egg:swift#swauth -# log_name = auth-server -# log_facility = LOG_LOCAL0 -# log_level = INFO -# log_headers = False +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # The reseller prefix will verify a token begins with this prefix before even # attempting to validate it. Also, with authorization, only Swift storage # accounts with this prefix will be authorized by this middleware. Useful if @@ -82,15 +93,30 @@ super_admin_key = swauthkey [filter:healthcheck] use = egg:swift#healthcheck +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False [filter:cache] use = egg:swift#memcache +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # Default for memcache_servers is below, but you can specify multiple servers # with the format: 10.1.2.3:11211,10.1.2.4:11211 # memcache_servers = 127.0.0.1:11211 [filter:ratelimit] use = egg:swift#ratelimit +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # clock_accuracy should represent how accurate the proxy servers' system clocks # are with each other. 1000 means that all the proxies' clock are accurate to # each other within 1 millisecond. No ratelimit should be higher than the @@ -116,14 +142,29 @@ use = egg:swift#ratelimit [filter:domain_remap] use = egg:swift#domain_remap +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # storage_domain = example.com # path_root = v1 [filter:catch_errors] use = egg:swift#catch_errors +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False [filter:cname_lookup] # Note: this middleware requires python-dnspython use = egg:swift#cname_lookup +# You can override the default log routing for this filter here: +# set log_name = auth-server +# set log_facility = LOG_LOCAL0 +# set log_level = INFO +# set log_headers = False # storage_domain = example.com # lookup_depth = 1 From 918d5feaa87aafce756bd5fdc8d64de38f74adc2 Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Mon, 24 Jan 2011 13:24:47 -0600 Subject: [PATCH 154/199] changed domain_remap to use an exclusive list in reseller_prefixes. added tests --- swift/common/middleware/domain_remap.py | 42 ++++++++++++---- .../common/middleware/test_domain_remap.py | 48 ++++++++++++------- 2 files changed, 64 insertions(+), 26 deletions(-) diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index 6d0f423e36..081aa67b9a 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -28,9 +28,23 @@ class DomainRemapMiddleware(object): account.storageurl/path_root/container/object gets translated to account.storageurl/path_root/account/container/object - Browsers can convert a url to lowercase, so check that reseller_prefix - is the correct case and fix if necessary - + Browsers can convert a host header to lowercase, so check that reseller + prefix on the account is the correct case. This is done by comparing the + items in the reseller_prefixes config option to the found prefix. If they + match except for case, the item from reseller_prefixes will be used + instead of the found reseller prefix. The reseller_prefixes list is + exclusive. If defined, any request with an account prefix not in that list + will be ignored by this middleware. reseller_prefixes defaults to 'AUTH'. + + Note that this middleware requires that container names and account names + (except as described above) must be DNS-compatible. This means that the + account name created in the system and the containers created by users + cannot exceede 63 characters or have UTF-8 characters. These are + restrictions over and above what swift requires and are not explicitly + checked. Simply put, the this middleware will do a best-effort attempt to + derive account and container names from elements in the domain name and + put those derived values into the URL path (leaving the Host header + unchanged). """ def __init__(self, app, conf): @@ -39,7 +53,11 @@ class DomainRemapMiddleware(object): if self.storage_domain and self.storage_domain[0] != '.': self.storage_domain = '.' + self.storage_domain self.path_root = conf.get('path_root', 'v1').strip('/') - self.reseller_prefixes = conf.get('reseller_prefixes','AUTH').split(','); + prefixes = conf.get('reseller_prefixes', 'AUTH') + self.reseller_prefixes = [x.strip() for x in prefixes.split(',') + if x.strip()] + self.reseller_prefixes_lower = [x.lower() + for x in self.reseller_prefixes] def __call__(self, env, start_response): if not self.storage_domain: @@ -63,12 +81,16 @@ class DomainRemapMiddleware(object): return resp(env, start_response) if '_' not in account and '-' in account: account = account.replace('-', '_', 1) - for reseller_prefix in self.reseller_prefixes: - if account.lower().startswith(reseller_prefix.lower()): - if not account.startswith(reseller_prefix): - account_suffix = account[len(reseller_prefix):] - account = reseller_prefix + account_suffix - break + account_reseller_prefix = account.split('_', 1)[0].lower() + if account_reseller_prefix not in self.reseller_prefixes_lower: + # account prefix is not in config list. bail. + return self.app(env, start_response) + prefix_index = self.reseller_prefixes_lower.index( + account_reseller_prefix) + real_prefix = self.reseller_prefixes[prefix_index] + if not account.startswith(real_prefix): + account_suffix = account[len(real_prefix):] + account = real_prefix + account_suffix path = env['PATH_INFO'].strip('/') new_path_parts = ['', self.path_root, account] if container: diff --git a/test/unit/common/middleware/test_domain_remap.py b/test/unit/common/middleware/test_domain_remap.py index fe079cbeda..b7b000a053 100644 --- a/test/unit/common/middleware/test_domain_remap.py +++ b/test/unit/common/middleware/test_domain_remap.py @@ -47,49 +47,49 @@ class TestDomainRemap(unittest.TestCase): def test_domain_remap_account(self): req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'a.example.com'}) + headers={'Host': 'AUTH_a.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a') + self.assertEquals(resp, '/v1/AUTH_a') req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'a-uuid.example.com'}) + headers={'Host': 'AUTH-uuid.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a_uuid') + self.assertEquals(resp, '/v1/AUTH_uuid') def test_domain_remap_account_container(self): req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'c.a.example.com'}) + headers={'Host': 'c.AUTH_a.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a/c') + self.assertEquals(resp, '/v1/AUTH_a/c') def test_domain_remap_extra_subdomains(self): req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'x.y.c.a.example.com'}) + headers={'Host': 'x.y.c.AUTH_a.example.com'}) resp = self.app(req.environ, start_response) self.assertEquals(resp, ['Bad domain in host header']) def test_domain_remap_account_with_path_root(self): req = Request.blank('/v1', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'a.example.com'}) + headers={'Host': 'AUTH_a.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a') + self.assertEquals(resp, '/v1/AUTH_a') def test_domain_remap_account_container_with_path_root(self): req = Request.blank('/v1', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'c.a.example.com'}) + headers={'Host': 'c.AUTH_a.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a/c') + self.assertEquals(resp, '/v1/AUTH_a/c') def test_domain_remap_account_container_with_path(self): req = Request.blank('/obj', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'c.a.example.com'}) + headers={'Host': 'c.AUTH_a.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a/c/obj') + self.assertEquals(resp, '/v1/AUTH_a/c/obj') def test_domain_remap_account_container_with_path_root_and_path(self): req = Request.blank('/v1/obj', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'c.a.example.com'}) + headers={'Host': 'c.AUTH_a.example.com'}) resp = self.app(req.environ, start_response) - self.assertEquals(resp, '/v1/a/c/obj') + self.assertEquals(resp, '/v1/AUTH_a/c/obj') def test_domain_remap_account_matching_ending_not_domain(self): req = Request.blank('/dontchange', environ={'REQUEST_METHOD': 'GET'}, @@ -101,7 +101,23 @@ class TestDomainRemap(unittest.TestCase): self.app = domain_remap.DomainRemapMiddleware(FakeApp(), {'storage_domain': ''}) req = Request.blank('/test', environ={'REQUEST_METHOD': 'GET'}, - headers={'Host': 'c.a.example.com'}) + headers={'Host': 'c.AUTH_a.example.com'}) + resp = self.app(req.environ, start_response) + self.assertEquals(resp, '/test') + + def test_domain_remap_configured_with_prefixes(self): + conf = {'reseller_prefixes': 'PREFIX'} + self.app = domain_remap.DomainRemapMiddleware(FakeApp(), conf) + req = Request.blank('/test', environ={'REQUEST_METHOD': 'GET'}, + headers={'Host': 'c.prefix_uuid.example.com'}) + resp = self.app(req.environ, start_response) + self.assertEquals(resp, '/v1/PREFIX_uuid/c/test') + + def test_domain_remap_configured_with_bad_prefixes(self): + conf = {'reseller_prefixes': 'UNKNOWN'} + self.app = domain_remap.DomainRemapMiddleware(FakeApp(), conf) + req = Request.blank('/test', environ={'REQUEST_METHOD': 'GET'}, + headers={'Host': 'c.prefix_uuid.example.com'}) resp = self.app(req.environ, start_response) self.assertEquals(resp, '/test') From 2ccc0028518de26c4e8ce2b16d278ee5c7fc8b17 Mon Sep 17 00:00:00 2001 From: Colin Nicholson <colin.nicholson@iomart.com> Date: Mon, 24 Jan 2011 19:37:50 +0000 Subject: [PATCH 155/199] fix small typo in domain_remap docs --- swift/common/middleware/domain_remap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index 081aa67b9a..e959276733 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -39,7 +39,7 @@ class DomainRemapMiddleware(object): Note that this middleware requires that container names and account names (except as described above) must be DNS-compatible. This means that the account name created in the system and the containers created by users - cannot exceede 63 characters or have UTF-8 characters. These are + cannot exceed 63 characters or have UTF-8 characters. These are restrictions over and above what swift requires and are not explicitly checked. Simply put, the this middleware will do a best-effort attempt to derive account and container names from elements in the domain name and From 264fc584b2979e4d6269c51e480efc4399c2f195 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 24 Jan 2011 13:09:06 -0800 Subject: [PATCH 156/199] swauth: log s3 in place of token when in use; changed a couple GETs to HEADs; pep8 --- swift/common/middleware/swauth.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 0989c30bd5..dd846898bb 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -136,7 +136,8 @@ class Swauth(object): user = groups and groups.split(',', 1)[0] or '' # We know the proxy logs the token, so we augment it just a bit # to also log the authenticated user. - env['HTTP_X_AUTH_TOKEN'] = '%s,%s' % (user, token) + env['HTTP_X_AUTH_TOKEN'] = \ + '%s,%s' % (user, 's3' if s3 else token) env['swift.authorize'] = self.authorize env['swift.clean_acl'] = clean_acl else: @@ -209,7 +210,7 @@ class Swauth(object): account_id = resp.headers['x-object-meta-account-id'] else: path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp2 = self.make_request(env, 'GET', + resp2 = self.make_request(env, 'HEAD', path).get_response(self.app) if resp2.status_int // 100 != 2: return None @@ -882,11 +883,13 @@ class Swauth(object): return HTTPForbidden(request=req) path = quote('/v1/%s/%s' % (self.auth_account, account)) - resp = self.make_request(req.environ, 'GET', path).get_response(self.app) + resp = self.make_request(req.environ, 'HEAD', + path).get_response(self.app) if resp.status_int // 100 != 2: - raise Exception('Could not create user object: %s %s' % + raise Exception('Could not retrieve account id value: %s %s' % (path, resp.status)) - headers = {'X-Object-Meta-Account-Id': '%s' % resp.headers['x-container-meta-account-id']} + headers = {'X-Object-Meta-Account-Id': + resp.headers['x-container-meta-account-id']} # Create the object in the main auth account (this object represents # the user) path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user)) @@ -895,9 +898,10 @@ class Swauth(object): groups.append('.admin') if reseller_admin: groups.append('.reseller_admin') - resp = self.make_request(req.environ, 'PUT', path, json.dumps({'auth': - 'plaintext:%s' % key, - 'groups': [{'name': g} for g in groups]}), headers=headers).get_response(self.app) + resp = self.make_request(req.environ, 'PUT', path, + json.dumps({'auth': 'plaintext:%s' % key, + 'groups': [{'name': g} for g in groups]}), + headers=headers).get_response(self.app) if resp.status_int == 404: return HTTPNotFound(request=req) if resp.status_int // 100 != 2: From 615d90b80d9c5908a860519864eef4d75e492acf Mon Sep 17 00:00:00 2001 From: Chris Wedgwood <cw@f00f.org> Date: Mon, 24 Jan 2011 13:56:39 -0800 Subject: [PATCH 157/199] Show account names in output strings. This helps when the output comes from concurrent workers doing the checking as it's not strictly ordered. --- bin/swift-account-audit | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/bin/swift-account-audit b/bin/swift-account-audit index ac0f18c2c7..9a19198853 100755 --- a/bin/swift-account-audit +++ b/bin/swift-account-audit @@ -238,14 +238,13 @@ class Auditor(object): if resp.status // 100 != 2: self.account_not_found += 1 consistent = False - print " Bad status GETting account %(ip)s:%(device)s" \ - % node + print " Bad status GETting account '%s' from %ss:%ss" % (account, node['ip'], node['device']) break results = simplejson.loads(resp.read()) except Exception: self.account_exceptions += 1 consistent = False - print " Exception GETting account %(ip)s:%(device)s" % node + print " Exception GETting account '%s' on %ss:%ss" % (account, node['ip'], node['device']) break if node_id not in responses: responses[node_id] = [dict(resp.getheaders()), []] @@ -258,7 +257,7 @@ class Auditor(object): if len(set(cont_counts)) != 1: self.account_container_mismatch += 1 consistent = False - print " Account databases don't agree on number of containers." + print " Account databases for '%s' don't agree on number of containers." % account if cont_counts: print " Max: %s, Min: %s" % (max(cont_counts), min(cont_counts)) obj_counts = [int(header['x-account-object-count']) @@ -266,7 +265,7 @@ class Auditor(object): if len(set(obj_counts)) != 1: self.account_object_mismatch += 1 consistent = False - print " Account databases don't agree on number of objects." + print " Account databases for '%s' don't agree on number of objects." % account if obj_counts: print " Max: %s, Min: %s" % (max(obj_counts), min(obj_counts)) containers = set() From cc00bd40e0726b89ff460053e7bfb54e5055c038 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 24 Jan 2011 17:12:38 -0800 Subject: [PATCH 158/199] Fix tests to cleanup their /tmp dirs --- test/unit/container/test_server.py | 2 +- test/unit/container/test_updater.py | 2 +- test/unit/obj/test_auditor.py | 30 +++++++++++++---------------- test/unit/obj/test_server.py | 2 +- test/unit/proxy/test_server.py | 8 +++++--- 5 files changed, 21 insertions(+), 23 deletions(-) diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py index 62d62a1f87..194ffff83d 100644 --- a/test/unit/container/test_server.py +++ b/test/unit/container/test_server.py @@ -45,7 +45,7 @@ class TestContainerController(unittest.TestCase): def tearDown(self): """ Tear down for testing swift.object_server.ObjectController """ - rmtree(self.testdir, ignore_errors=1) + rmtree(os.path.dirname(self.testdir), ignore_errors=1) def test_acl_container(self): # Ensure no acl by default diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index b7dbe6dd6d..a7a2094824 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -51,7 +51,7 @@ class TestContainerUpdater(unittest.TestCase): os.mkdir(self.sda1) def tearDown(self): - rmtree(self.testdir, ignore_errors=1) + rmtree(os.path.dirname(self.testdir), ignore_errors=1) def test_creation(self): cu = container_updater.ContainerUpdater({ diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 8c1d08e522..66540a3693 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -56,7 +56,7 @@ class TestAuditor(unittest.TestCase): mount_check='false') def tearDown(self): - rmtree(self.testdir, ignore_errors=1) + rmtree(os.path.dirname(self.testdir), ignore_errors=1) def test_object_audit_extra_data(self): self.auditor = auditor.ObjectAuditor(self.conf) @@ -123,25 +123,21 @@ class TestAuditor(unittest.TestCase): self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) def test_object_audit_no_meta(self): - self.auditor = auditor.ObjectAuditor(self.conf) cur_part = '0' disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o') - data = '0' * 1024 - etag = md5() + timestamp = str(normalize_timestamp(time.time())) + path = os.path.join(disk_file.datadir, timestamp + '.data') + mkdirs(disk_file.datadir) + fp = open(path, 'w') + fp.write('0' * 1024) + fp.close() + invalidate_hash(os.path.dirname(disk_file.datadir)) + self.auditor = auditor.ObjectAuditor(self.conf) pre_quarantines = self.auditor.quarantines - with disk_file.mkstemp() as (fd, tmppath): - os.write(fd, data) - etag.update(data) - etag = etag.hexdigest() - timestamp = str(normalize_timestamp(time.time())) - os.fsync(fd) - invalidate_hash(os.path.dirname(disk_file.datadir)) - renamer(tmppath, os.path.join(disk_file.datadir, - timestamp + '.data')) - self.auditor.object_audit( - os.path.join(disk_file.datadir, timestamp + '.data'), - 'sda', cur_part) - self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) + self.auditor.object_audit( + os.path.join(disk_file.datadir, timestamp + '.data'), + 'sda', cur_part) + self.assertEquals(self.auditor.quarantines, pre_quarantines + 1) def test_object_audit_bad_args(self): self.auditor = auditor.ObjectAuditor(self.conf) diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 735b944d2d..22d2fe20a4 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -53,7 +53,7 @@ class TestObjectController(unittest.TestCase): def tearDown(self): """ Tear down for testing swift.object_server.ObjectController """ - rmtree(self.testdir) + rmtree(os.path.dirname(self.testdir)) def test_POST_update_meta(self): """ Test swift.object_server.ObjectController.POST """ diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index b327b99a46..1a3a442bea 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -142,7 +142,7 @@ def teardown(): for server in _test_coros: server.kill() proxy_server.CONTAINER_LISTING_LIMIT = _orig_container_listing_limit - rmtree(_testdir) + rmtree(os.path.dirname(_testdir)) def fake_http_connect(*code_iter, **kwargs): @@ -3425,5 +3425,7 @@ class TestSegmentedIterable(unittest.TestCase): if __name__ == '__main__': setup() - unittest.main() - teardown() + try: + unittest.main() + finally: + teardown() From f57250ae11314424b393c99e1bd9e9d0049f6434 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Mon, 24 Jan 2011 21:39:00 -0800 Subject: [PATCH 159/199] pep8 and i18n fixes --- swift/auth/server.py | 60 +++++++++++++++---------- swift/common/middleware/auth.py | 10 +++-- swift/common/middleware/domain_remap.py | 3 +- swift/obj/replicator.py | 3 +- swift/stats/account_stats.py | 7 +-- swift/stats/log_processor.py | 3 +- 6 files changed, 51 insertions(+), 35 deletions(-) diff --git a/swift/auth/server.py b/swift/auth/server.py index 413c03cce1..8f1483ab27 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -149,31 +149,32 @@ class AuthController(object): previous_prefix = '' if '_' in row[0]: previous_prefix = row[0].split('_', 1)[0] - msg = _((''' + msg = (_(''' THERE ARE ACCOUNTS IN YOUR auth.db THAT DO NOT BEGIN WITH YOUR NEW RESELLER -PREFIX OF "%s". +PREFIX OF "%(reseller)s". YOU HAVE A FEW OPTIONS: - 1) RUN "swift-auth-update-reseller-prefixes %s %s", + 1. RUN "swift-auth-update-reseller-prefixes %(db_file)s %(reseller)s", "swift-init auth-server restart", AND "swift-auth-recreate-accounts -K ..." TO CREATE FRESH ACCOUNTS. OR - 2) REMOVE %s, RUN "swift-init auth-server restart", AND RUN + 2. REMOVE %(db_file)s, RUN "swift-init auth-server restart", AND RUN "swift-auth-add-user ..." TO CREATE BRAND NEW ACCOUNTS THAT WAY. OR - 3) ADD "reseller_prefix = %s" (WITHOUT THE QUOTES) TO YOUR + 3. ADD "reseller_prefix = %(previous)s" (WITHOUT THE QUOTES) TO YOUR proxy-server.conf IN THE [filter:auth] SECTION AND TO YOUR auth-server.conf IN THE [app:auth-server] SECTION AND RUN "swift-init proxy-server restart" AND "swift-init auth-server restart" TO REVERT BACK TO YOUR PREVIOUS RESELLER PREFIX. - %s - ''') % (self.reseller_prefix.rstrip('_'), self.db_file, - self.reseller_prefix.rstrip('_'), self.db_file, - previous_prefix, previous_prefix and ' ' or _(''' + %(note)s + ''') % {'reseller': self.reseller_prefix.rstrip('_'), + 'db_file': self.db_file, + 'previous': previous_prefix, + 'note': previous_prefix and ' ' or _(''' SINCE YOUR PREVIOUS RESELLER PREFIX WAS AN EMPTY STRING, IT IS NOT RECOMMENDED TO PERFORM OPTION 3 AS THAT WOULD MAKE SUPPORTING MULTIPLE RESELLERS MORE DIFFICULT. - ''').strip())).strip() + ''').strip()}).strip() self.logger.critical(_('CRITICAL: ') + ' '.join(msg.split())) raise Exception('\n' + msg) @@ -243,7 +244,8 @@ YOU HAVE A FEW OPTIONS: raise err def validate_s3_sign(self, request, token): - account, user, sign = request.headers['Authorization'].split(' ')[-1].split(':') + account, user, sign = \ + request.headers['Authorization'].split(' ')[-1].split(':') msg = base64.urlsafe_b64decode(unquote(token)) rv = False with self.get_conn() as conn: @@ -253,7 +255,8 @@ YOU HAVE A FEW OPTIONS: (account, user)).fetchone() rv = (84000, account, user, row[1]) if rv: - s = base64.encodestring(hmac.new(row[0], msg, sha1).digest()).strip() + s = base64.encodestring(hmac.new(row[0], msg, + sha1).digest()).strip() self.logger.info("orig %s, calc %s" % (sign, s)) if sign != s: rv = False @@ -340,10 +343,14 @@ YOU HAVE A FEW OPTIONS: 'SELECT url FROM account WHERE account = ? AND user = ?', (account, user)).fetchone() if row: - self.logger.info( - _('ALREADY EXISTS create_user(%s, %s, _, %s, %s) [%.02f]') % - (repr(account), repr(user), repr(admin), - repr(reseller_admin), time() - begin)) + self.logger.info(_('ALREADY EXISTS create_user(%(account)s, ' + '%(user)s, _, %(admin)s, %(reseller_admin)s) ' + '[%(elapsed).02f]') % + {'account': repr(account), + 'user': repr(user), + 'admin': repr(admin), + 'reseller_admin': repr(reseller_admin), + 'elapsed': time() - begin}) return 'already exists' row = conn.execute( 'SELECT url, cfaccount FROM account WHERE account = ?', @@ -354,10 +361,14 @@ YOU HAVE A FEW OPTIONS: else: account_hash = self.add_storage_account() if not account_hash: - self.logger.info( - _('FAILED create_user(%s, %s, _, %s, %s) [%.02f]') % - (repr(account), repr(user), repr(admin), - repr(reseller_admin), time() - begin)) + self.logger.info(_('FAILED create_user(%(account)s, ' + '%(user)s, _, %(admin)s, %(reseller_admin)s) ' + '[%(elapsed).02f]') % + {'account': repr(account), + 'user': repr(user), + 'admin': repr(admin), + 'reseller_admin': repr(reseller_admin), + 'elapsed': time() - begin}) return False url = self.default_cluster_url.rstrip('/') + '/' + account_hash conn.execute('''INSERT INTO account @@ -367,10 +378,11 @@ YOU HAVE A FEW OPTIONS: (account, url, account_hash, user, password, admin and 't' or '', reseller_admin and 't' or '')) conn.commit() - self.logger.info( - _('SUCCESS create_user(%s, %s, _, %s, %s) = %s [%.02f]') % - (repr(account), repr(user), repr(admin), repr(reseller_admin), - repr(url), time() - begin)) + self.logger.info(_('SUCCESS create_user(%(account)s, %(user)s, _, ' + '%(admin)s, %(reseller_admin)s) = %(url)s [%(elapsed).02f]') % + {'account': repr(account), 'user': repr(user), + 'admin': repr(admin), 'reseller_admin': repr(reseller_admin), + 'url': repr(url), 'elapsed': time() - begin}) return url def recreate_accounts(self): diff --git a/swift/common/middleware/auth.py b/swift/common/middleware/auth.py index a2c71a3070..a51788f7b7 100644 --- a/swift/common/middleware/auth.py +++ b/swift/common/middleware/auth.py @@ -59,8 +59,8 @@ class DevAuth(object): if s3 or (token and token.startswith(self.reseller_prefix)): # Note: Empty reseller_prefix will match all tokens. # Attempt to auth my token with my auth server - groups = \ - self.get_groups(env, token, memcache_client=cache_from_env(env)) + groups = self.get_groups(env, token, + memcache_client=cache_from_env(env)) if groups: env['REMOTE_USER'] = groups user = groups and groups.split(',', 1)[0] or '' @@ -154,10 +154,12 @@ class DevAuth(object): timeout=expiration) if env.get('HTTP_AUTHORIZATION'): - account, user, sign = env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':') + account, user, sign = \ + env['HTTP_AUTHORIZATION'].split(' ')[-1].split(':') cfaccount = resp.getheader('x-auth-account-suffix') path = env['PATH_INFO'] - env['PATH_INFO'] = path.replace("%s:%s" % (account, user), cfaccount, 1) + env['PATH_INFO'] = \ + path.replace("%s:%s" % (account, user), cfaccount, 1) return groups diff --git a/swift/common/middleware/domain_remap.py b/swift/common/middleware/domain_remap.py index e959276733..a6ed943bb2 100644 --- a/swift/common/middleware/domain_remap.py +++ b/swift/common/middleware/domain_remap.py @@ -35,7 +35,7 @@ class DomainRemapMiddleware(object): instead of the found reseller prefix. The reseller_prefixes list is exclusive. If defined, any request with an account prefix not in that list will be ignored by this middleware. reseller_prefixes defaults to 'AUTH'. - + Note that this middleware requires that container names and account names (except as described above) must be DNS-compatible. This means that the account name created in the system and the containers created by users @@ -111,4 +111,3 @@ def filter_factory(global_conf, **local_conf): def domain_filter(app): return DomainRemapMiddleware(app, conf) return domain_filter - diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index ec76fb384c..fc7e7cbd07 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -407,7 +407,8 @@ class ObjectReplicator(Daemon): conn.getresponse().read() self.suffix_sync += len(suffixes) except (Exception, Timeout): - self.logger.exception(_("Error syncing with node: %s") % node) + self.logger.exception(_("Error syncing with node: %s") % + node) self.suffix_count += len(local_hash) except (Exception, Timeout): self.logger.exception(_("Error syncing partition")) diff --git a/swift/stats/account_stats.py b/swift/stats/account_stats.py index 859151d238..6a9688831f 100644 --- a/swift/stats/account_stats.py +++ b/swift/stats/account_stats.py @@ -55,7 +55,8 @@ class AccountStat(Daemon): self.logger.info(_("Gathering account stats")) start = time.time() self.find_and_process() - self.logger.info(_("Gathering account stats complete (%0.2f minutes)") % + self.logger.info( + _("Gathering account stats complete (%0.2f minutes)") % ((time.time() - start) / 60)) def find_and_process(self): @@ -70,8 +71,8 @@ class AccountStat(Daemon): # Account Name, Container Count, Object Count, Bytes Used for device in os.listdir(self.devices): if self.mount_check and not check_mount(self.devices, device): - self.logger.error(_("Device %s is not mounted, skipping.") % - device) + self.logger.error( + _("Device %s is not mounted, skipping.") % device) continue accounts = os.path.join(self.devices, device, diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index 8174fb265e..531ec3e0ac 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -280,7 +280,8 @@ class LogProcessorDaemon(Daemon): logs_to_process = self.log_processor.get_data_list(lookback_start, lookback_end, already_processed_files) - self.logger.info(_('loaded %d files to process') % len(logs_to_process)) + self.logger.info(_('loaded %d files to process') % + len(logs_to_process)) if not logs_to_process: self.logger.info(_("Log processing done (%0.2f minutes)") % ((time.time() - start) / 60)) From 1eaae6a074d934299cfa691105e4b974733397c5 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Tue, 25 Jan 2011 16:19:38 +0000 Subject: [PATCH 160/199] Bumped version to 1.2 to get ready for release, and updated the AUTHORS file --- AUTHORS | 4 ++++ swift/__init__.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index e7bc59b72e..bf834db788 100644 --- a/AUTHORS +++ b/AUTHORS @@ -24,9 +24,13 @@ Paul Jimenez Brian K. Jones Ed Leafe Stephen Milton +Russ Nelson +Colin Nicholson Andrew Clay Shafer Monty Taylor Caleb Tennis +FUJITA Tomonori Kapil Thangavelu Conrad Weidenkeller +Chris Wedgwood Cory Wright diff --git a/swift/__init__.py b/swift/__init__.py index 0bd0062056..899047889e 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1,5 +1,5 @@ import gettext -__version__ = '1.1.0' +__version__ = '1.2.0' gettext.install('swift') From 9b8a8b1791d900db79a529383d9a4544c4a05dfd Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Tue, 25 Jan 2011 15:21:49 -0800 Subject: [PATCH 161/199] Update to load suppressions from both os.wait points --- swift/container/updater.py | 33 +++++++++++++++++++++------------ 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/swift/container/updater.py b/swift/container/updater.py index c5d927f2b9..928a53be92 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -85,6 +85,19 @@ class ContainerUpdater(Daemon): shuffle(paths) return paths + def _load_suppressions(self, filename): + try: + with open(filename, 'r') as tmpfile: + for line in tmpfile: + account, until = line.split() + until = float(until) + self.account_suppressions[account] = until + except: + self.logger.exception( + _('ERROR with loading suppressions from %s: ') % filename) + finally: + os.unlink(filename) + def run_forever(self): # pragma: no cover """ Run the updator continuously. @@ -105,17 +118,9 @@ class ContainerUpdater(Daemon): while len(pid2filename) >= self.concurrency: pid = os.wait()[0] try: - with open(pid2filename[pid], 'r') as tmpfile: - for line in tmpfile: - account, until = line.split() - until = float(until) - self.account_suppressions[account] = until - except: - self.logger.exception(_('ERROR with pid2filename ' - '%(pid)s %(filename)s: ') % {'pid': pid, - 'filename': pid2filename[pid]}) - os.unlink(pid2filename[pid]) - del pid2filename[pid] + self._load_suppressions(pid2filename[pid]) + finally: + del pid2filename[pid] fd, tmpfilename = mkstemp() os.close(fd) pid = os.fork() @@ -140,7 +145,11 @@ class ContainerUpdater(Daemon): 'no_change': self.no_changes}) sys.exit() while pid2filename: - del pid2filename[os.wait()[0]] + pid = os.wait()[0] + try: + self._load_suppressions(pid2filename[pid]) + finally: + del pid2filename[pid] elapsed = time.time() - begin self.logger.info(_('Container update sweep completed: %.02fs'), elapsed) From 86cb12036b4ee9554b0d011431ce0c928a6e9b4a Mon Sep 17 00:00:00 2001 From: David Goetz <david.goetz@rackspace.com> Date: Wed, 26 Jan 2011 14:31:33 -0800 Subject: [PATCH 162/199] removing blank excepts --- bin/st | 10 +++--- bin/swift-drive-audit | 2 +- bin/swift-object-info | 2 +- bin/swift-stats-populate | 4 +-- swift/account/server.py | 2 +- swift/auth/server.py | 6 ++-- swift/common/bench.py | 2 +- swift/common/client.py | 2 +- swift/common/db.py | 10 +++--- swift/common/db_replicator.py | 8 ++--- swift/common/middleware/swauth.py | 5 +-- swift/container/server.py | 5 +-- swift/container/updater.py | 6 ++-- swift/obj/server.py | 6 ++-- swift/obj/updater.py | 4 +-- swift/proxy/server.py | 34 +++++++++---------- swift/stats/log_processor.py | 2 +- test/probe/common.py | 2 +- test/probe/test_object_handoff.py | 10 +++--- test/unit/common/middleware/test_auth.py | 2 +- test/unit/common/middleware/test_ratelimit.py | 2 +- test/unit/common/middleware/test_swauth.py | 2 +- test/unit/common/test_db.py | 14 ++++---- test/unit/common/test_utils.py | 6 ++-- test/unit/proxy/test_server.py | 3 +- 25 files changed, 77 insertions(+), 74 deletions(-) diff --git a/bin/st b/bin/st index cab398910e..39872e8bd0 100755 --- a/bin/st +++ b/bin/st @@ -38,13 +38,13 @@ from urlparse import urlparse, urlunparse try: from eventlet import sleep -except: +except Exception: from time import sleep try: from swift.common.bufferedhttp \ import BufferedHTTPConnection as HTTPConnection -except: +except Exception: from httplib import HTTPConnection @@ -91,7 +91,7 @@ except ImportError: else: res.append(val) return eval(''.join(res), {}, consts) - except: + except Exception: raise AttributeError() @@ -1615,7 +1615,7 @@ def st_upload(options, args, print_queue, error_queue): conn.put_container(args[0]) if options.segment_size is not None: conn.put_container(args[0] + '_segments') - except: + except Exception: pass try: for arg in args[1:]: @@ -1722,7 +1722,7 @@ Example: error_thread.abort = True while error_thread.isAlive(): error_thread.join(0.01) - except: + except Exception: for thread in threading_enumerate(): thread.abort = True raise diff --git a/bin/swift-drive-audit b/bin/swift-drive-audit index 95143e8b56..e92d1e3c12 100755 --- a/bin/swift-drive-audit +++ b/bin/swift-drive-audit @@ -89,7 +89,7 @@ if __name__ == '__main__': c = ConfigParser() try: conf_path = sys.argv[1] - except: + except Exception: print "Usage: %s CONF_FILE" % sys.argv[0].split('/')[-1] sys.exit(1) if not c.read(conf_path): diff --git a/bin/swift-object-info b/bin/swift-object-info index e7befddf8c..278a7de0f2 100755 --- a/bin/swift-object-info +++ b/bin/swift-object-info @@ -29,7 +29,7 @@ if __name__ == '__main__': sys.exit(1) try: ring = Ring('/etc/swift/object.ring.gz') - except: + except Exception: ring = None datafile = sys.argv[1] fp = open(datafile, 'rb') diff --git a/bin/swift-stats-populate b/bin/swift-stats-populate index ba531ddc87..483f22dbb6 100755 --- a/bin/swift-stats-populate +++ b/bin/swift-stats-populate @@ -38,7 +38,7 @@ def put_container(connpool, container, report): retries_done += conn.attempts - 1 if report: report(True) - except: + except Exception: if report: report(False) raise @@ -53,7 +53,7 @@ def put_object(connpool, container, obj, report): retries_done += conn.attempts - 1 if report: report(True) - except: + except Exception: if report: report(False) raise diff --git a/swift/account/server.py b/swift/account/server.py index a1e20c1f4f..2c83f51cc6 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -305,7 +305,7 @@ class AccountController(object): res = getattr(self, req.method)(req) else: res = HTTPMethodNotAllowed() - except: + except Exception: self.logger.exception(_('ERROR __call__ error with %(method)s' ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) diff --git a/swift/auth/server.py b/swift/auth/server.py index dac3a78a3e..a51ccdeb72 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -96,7 +96,7 @@ class AuthController(object): msg = _('No super_admin_key set in conf file! Exiting.') try: self.logger.critical(msg) - except: + except Exception: pass raise ValueError(msg) self.swift_dir = conf.get('swift_dir', '/etc/swift') @@ -237,7 +237,7 @@ YOU HAVE A FEW OPTIONS: except Exception, err: try: conn.close() - except: + except Exception: pass self.conn = get_db_connection(self.db_file) raise err @@ -637,7 +637,7 @@ YOU HAVE A FEW OPTIONS: else: return HTTPBadRequest(request=env)(env, start_response) response = handler(req) - except: + except Exception: self.logger.exception( _('ERROR Unhandled exception in ReST request')) return HTTPServiceUnavailable(request=req)(env, start_response) diff --git a/swift/common/bench.py b/swift/common/bench.py index 4abafeb947..943f9ebe4d 100644 --- a/swift/common/bench.py +++ b/swift/common/bench.py @@ -97,7 +97,7 @@ class Bench(object): self.logger.info(_("CannotSendRequest. Skipping...")) try: hc.close() - except: + except Exception: pass self.failures += 1 hc = self.conn_pool.create() diff --git a/swift/common/client.py b/swift/common/client.py index 9e80e9cef1..ebef5154dd 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -87,7 +87,7 @@ except ImportError: else: res.append(val) return eval(''.join(res), {}, consts) - except: + except Exception: raise AttributeError() diff --git a/swift/common/db.py b/swift/common/db.py index b3c80dbc8c..82648f9e49 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -269,7 +269,7 @@ class DatabaseBroker(object): yield conn conn.rollback() self.conn = conn - except: + except Exception: conn.close() raise @@ -288,13 +288,13 @@ class DatabaseBroker(object): conn.execute('BEGIN IMMEDIATE') try: yield True - except: + except Exception: pass try: conn.execute('ROLLBACK') conn.isolation_level = orig_isolation_level self.conn = conn - except: # pragma: no cover + except Exception: logging.exception( _('Broker error trying to rollback locked connection')) conn.close() @@ -749,7 +749,7 @@ class ContainerBroker(DatabaseBroker): timestamp, 'size': size, 'content_type': content_type, 'etag': etag, 'deleted': deleted}) - except: + except Exception: self.logger.exception( _('Invalid pending entry %(file)s: %(entry)s'), {'file': self.pending_file, 'entry': entry}) @@ -1216,7 +1216,7 @@ class AccountBroker(DatabaseBroker): 'object_count': object_count, 'bytes_used': bytes_used, 'deleted': deleted}) - except: + except Exception: self.logger.exception( _('Invalid pending entry %(file)s: %(entry)s'), {'file': self.pending_file, 'entry': entry}) diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 4c479a0ed0..49756f1f7b 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -21,7 +21,7 @@ import math import time import shutil -from eventlet import GreenPool, sleep, Timeout +from eventlet import GreenPool, sleep, Timeout, TimeoutError from eventlet.green import subprocess import simplejson from webob import Response @@ -79,7 +79,7 @@ class ReplConnection(BufferedHTTPConnection): response = self.getresponse() response.data = response.read() return response - except: + except Exception: self.logger.exception( _('ERROR reading HTTP response from %s'), self.node) return None @@ -359,7 +359,7 @@ class Replicator(Daemon): except DriveNotMounted: repl_nodes.append(more_nodes.next()) self.logger.error(_('ERROR Remote drive not mounted %s'), node) - except: + except (Exception, TimeoutError): self.logger.exception(_('ERROR syncing %(file)s with node' ' %(node)s'), {'file': object_file, 'node': node}) self.stats['success' if success else 'failure'] += 1 @@ -432,7 +432,7 @@ class Replicator(Daemon): while True: try: self.run_once() - except: + except (Exception, TimeoutError): self.logger.exception(_('ERROR trying to replicate')) sleep(self.run_pause) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 105098c807..0c8440aa86 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -25,6 +25,7 @@ from urlparse import urlparse from uuid import uuid4 from eventlet.timeout import Timeout +from eventlet import TimeoutError from webob import Response, Request from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ HTTPCreated, HTTPForbidden, HTTPNoContent, HTTPNotFound, \ @@ -283,7 +284,7 @@ class Swauth(object): response = self.handle_request(req)(env, start_response) self.posthooklogger(env, req) return response - except: + except (Exception, TimeoutError): print "EXCEPTION IN handle: %s: %s" % (format_exc(), env) start_response('500 Server Error', [('Content-Type', 'text/plain')]) @@ -589,7 +590,7 @@ class Swauth(object): if resp.status // 100 != 2: raise Exception('Could not create account on the Swift ' 'cluster: %s %s %s' % (path, resp.status, resp.reason)) - except: + except (Exception, TimeoutError): self.logger.error(_('ERROR: Exception while trying to communicate ' 'with %(scheme)s://%(host)s:%(port)s/%(path)s'), {'scheme': self.dsc_parsed2.scheme, diff --git a/swift/container/server.py b/swift/container/server.py index 7ba375ce33..cfcdded1e4 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -24,6 +24,7 @@ from datetime import datetime import simplejson from eventlet.timeout import Timeout +from eventlet import TimeoutError from webob import Request, Response from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPConflict, \ HTTPCreated, HTTPInternalServerError, HTTPNoContent, \ @@ -118,7 +119,7 @@ class ContainerController(object): 'device': account_device, 'status': account_response.status, 'reason': account_response.reason}) - except: + except (Exception, TimeoutError): self.logger.exception(_('ERROR account update failed with ' '%(ip)s:%(port)s/%(device)s (will retry later)'), {'ip': account_ip, 'port': account_port, @@ -393,7 +394,7 @@ class ContainerController(object): res = getattr(self, req.method)(req) else: res = HTTPMethodNotAllowed() - except: + except Exception: self.logger.exception(_('ERROR __call__ error with %(method)s' ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) diff --git a/swift/container/updater.py b/swift/container/updater.py index d6b1beb2b1..aea65e2425 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -20,7 +20,7 @@ import sys import time from random import random, shuffle -from eventlet import spawn, patcher, Timeout +from eventlet import spawn, patcher, Timeout, TimeoutError from swift.container.server import DATADIR from swift.common.bufferedhttp import http_connect @@ -221,7 +221,7 @@ class ContainerUpdater(Daemon): 'X-Object-Count': count, 'X-Bytes-Used': bytes, 'X-Account-Override-Deleted': 'yes'}) - except: + except (Exception, TimeoutError): self.logger.exception(_('ERROR account update failed with ' '%(ip)s:%(port)s/%(device)s (will retry later): '), node) return 500 @@ -230,7 +230,7 @@ class ContainerUpdater(Daemon): resp = conn.getresponse() resp.read() return resp.status - except: + except (Exception, TimeoutError): if self.logger.getEffectiveLevel() <= logging.DEBUG: self.logger.exception( _('Exception with %(ip)s:%(port)s/%(device)s'), node) diff --git a/swift/obj/server.py b/swift/obj/server.py index 4afc38057d..c1ddd63310 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -33,7 +33,7 @@ from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPCreated, \ HTTPNotModified, HTTPPreconditionFailed, \ HTTPRequestTimeout, HTTPUnprocessableEntity, HTTPMethodNotAllowed from xattr import getxattr, setxattr -from eventlet import sleep, Timeout, tpool +from eventlet import sleep, Timeout, TimeoutError, tpool from swift.common.utils import mkdirs, normalize_timestamp, \ storage_directory, hash_path, renamer, fallocate, \ @@ -308,7 +308,7 @@ class ObjectController(object): 'response from %(ip)s:%(port)s/%(dev)s'), {'status': response.status, 'ip': ip, 'port': port, 'dev': contdevice}) - except: + except (Exception, TimeoutError): self.logger.exception(_('ERROR container update failed with ' '%(ip)s:%(port)s/%(dev)s (saving for async update later)'), {'ip': ip, 'port': port, 'dev': contdevice}) @@ -582,7 +582,7 @@ class ObjectController(object): res = getattr(self, req.method)(req) else: res = HTTPMethodNotAllowed() - except: + except Exception: self.logger.exception(_('ERROR __call__ error with %(method)s' ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) diff --git a/swift/obj/updater.py b/swift/obj/updater.py index a226d4523e..2b28ff08c5 100644 --- a/swift/obj/updater.py +++ b/swift/obj/updater.py @@ -20,7 +20,7 @@ import sys import time from random import random -from eventlet import patcher, Timeout +from eventlet import patcher, Timeout, TimeoutError from swift.common.bufferedhttp import http_connect from swift.common.exceptions import ConnectionTimeout @@ -202,7 +202,7 @@ class ObjectUpdater(Daemon): resp = conn.getresponse() resp.read() return resp.status - except: + except (Exception, TimeoutError): self.logger.exception(_('ERROR with remote server ' '%(ip)s:%(port)s/%(device)s'), node) return 500 diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 32c7ad9004..f872199335 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -30,7 +30,7 @@ import uuid import functools from hashlib import md5 -from eventlet import sleep +from eventlet import sleep, TimeoutError from eventlet.timeout import Timeout from webob.exc import HTTPBadRequest, HTTPMethodNotAllowed, \ HTTPNotFound, HTTPPreconditionFailed, \ @@ -383,7 +383,7 @@ class Controller(object): attempts_left -= 1 if attempts_left <= 0: break - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Account'), _('Trying to get account info for %s') % path) if self.app.memcache and result_code in (200, 404): @@ -461,7 +461,7 @@ class Controller(object): attempts_left -= 1 if attempts_left <= 0: break - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Container'), _('Trying to get container info for %s') % path) if self.app.memcache and result_code in (200, 404): @@ -592,7 +592,7 @@ class Controller(object): query_string=req.query_string) with Timeout(self.app.node_timeout): source = conn.getresponse() - except: + except (Exception, TimeoutError): self.exception_occurred(node, server_type, _('Trying to %(method)s %(path)s') % {'method': req.method, 'path': req.path}) @@ -624,7 +624,7 @@ class Controller(object): except GeneratorExit: res.client_disconnect = True self.app.logger.info(_('Client disconnected on read')) - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Object'), _('Trying to read during GET of %s') % req.path) raise @@ -691,7 +691,7 @@ class ObjectController(Controller): _('ERROR %(status)d %(body)s From Object Server') % {'status': response.status, 'body': body[:1024]}) return response.status, response.reason, body - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Object'), _('Trying to %(method)s %(path)s') % {'method': req.method, 'path': req.path}) @@ -998,7 +998,7 @@ class ObjectController(Controller): conn.node = node with Timeout(self.app.node_timeout): resp = conn.getexpect() - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Object'), _('Expect: 100-continue on %s') % req.path) if conn and resp: @@ -1038,7 +1038,7 @@ class ObjectController(Controller): conn.send('%x\r\n%s\r\n' % (len_chunk, chunk)) else: conn.send(chunk) - except: + except (Exception, TimeoutError): self.exception_occurred(conn.node, _('Object'), _('Trying to write to %s') % req.path) conns.remove(conn) @@ -1055,7 +1055,7 @@ class ObjectController(Controller): self.app.logger.info( _('ERROR Client read timeout (%ss)'), err.seconds) return HTTPRequestTimeout(request=req) - except: + except Exception: req.client_disconnect = True self.app.logger.exception( _('ERROR Exception causing client disconnect')) @@ -1083,7 +1083,7 @@ class ObjectController(Controller): 'body': bodies[-1][:1024], 'path': req.path}) elif 200 <= response.status < 300: etags.add(response.getheader('etag').strip('"')) - except: + except (Exception, TimeoutError): self.exception_occurred(conn.node, _('Object'), _('Trying to get final status of PUT to %s') % req.path) if len(etags) > 1: @@ -1294,7 +1294,7 @@ class ContainerController(Controller): if source.status == 507: self.error_limit(node) accounts.insert(0, account) - except: + except (Exception, TimeoutError): accounts.insert(0, account) self.exception_occurred(node, _('Container'), _('Trying to PUT to %s') % req.path) @@ -1350,7 +1350,7 @@ class ContainerController(Controller): bodies.append(body) elif source.status == 507: self.error_limit(node) - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Container'), _('Trying to POST %s') % req.path) if len(statuses) >= len(containers): @@ -1406,7 +1406,7 @@ class ContainerController(Controller): if source.status == 507: self.error_limit(node) accounts.insert(0, account) - except: + except (Exception, TimeoutError): accounts.insert(0, account) self.exception_occurred(node, _('Container'), _('Trying to DELETE %s') % req.path) @@ -1491,7 +1491,7 @@ class AccountController(Controller): else: if source.status == 507: self.error_limit(node) - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Account'), _('Trying to PUT to %s') % req.path) if len(statuses) >= len(accounts): @@ -1539,7 +1539,7 @@ class AccountController(Controller): bodies.append(body) elif source.status == 507: self.error_limit(node) - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Account'), _('Trying to POST %s') % req.path) if len(statuses) >= len(accounts): @@ -1584,7 +1584,7 @@ class AccountController(Controller): bodies.append(body) elif source.status == 507: self.error_limit(node) - except: + except (Exception, TimeoutError): self.exception_occurred(node, _('Account'), _('Trying to DELETE %s') % req.path) if len(statuses) >= len(accounts): @@ -1685,7 +1685,7 @@ class BaseApplication(object): response = self.handle_request(req)(env, start_response) self.posthooklogger(env, req) return response - except: + except Exception: print "EXCEPTION IN __call__: %s: %s" % \ (traceback.format_exc(), env) start_response('500 Server Error', diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index f8938ddbc2..884e949842 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -273,7 +273,7 @@ class LogProcessorDaemon(Daemon): already_processed_files = cPickle.loads(buf) else: already_processed_files = set() - except: + except Exception: already_processed_files = set() self.logger.debug(_('found %d processed files') % \ len(already_processed_files)) diff --git a/test/probe/common.py b/test/probe/common.py index c29f142713..08e8309a4b 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -49,7 +49,7 @@ def kill_pids(pids): for pid in pids.values(): try: kill(pid, SIGTERM) - except: + except Exception: pass diff --git a/test/probe/test_object_handoff.py b/test/probe/test_object_handoff.py index ba81e4c559..212fcc2c5e 100755 --- a/test/probe/test_object_handoff.py +++ b/test/probe/test_object_handoff.py @@ -95,7 +95,7 @@ class TestObjectHandoff(unittest.TestCase): try: direct_client.direct_get_object(onode, opart, self.account, container, obj) - except: + except Exception: exc = True if not exc: raise Exception('Previously downed object server had test object') @@ -119,7 +119,7 @@ class TestObjectHandoff(unittest.TestCase): try: direct_client.direct_get_object(another_onode, opart, self.account, container, obj) - except: + except Exception: exc = True if not exc: raise Exception('Handoff object server still had test object') @@ -134,7 +134,7 @@ class TestObjectHandoff(unittest.TestCase): try: direct_client.direct_get_object(another_onode, opart, self.account, container, obj) - except: + except Exception: exc = True if not exc: raise Exception('Handoff server claimed it had the object when ' @@ -171,7 +171,7 @@ class TestObjectHandoff(unittest.TestCase): exc = False try: client.head_object(self.url, self.token, container, obj) - except: + except Exception: exc = True if not exc: raise Exception('Regular object HEAD was still successful') @@ -209,7 +209,7 @@ class TestObjectHandoff(unittest.TestCase): try: direct_client.direct_get_object(another_onode, opart, self.account, container, obj) - except: + except Exception: exc = True if not exc: raise Exception('Handoff object server still had the object') diff --git a/test/unit/common/middleware/test_auth.py b/test/unit/common/middleware/test_auth.py index cabc7a9523..d3a73bb50a 100644 --- a/test/unit/common/middleware/test_auth.py +++ b/test/unit/common/middleware/test_auth.py @@ -51,7 +51,7 @@ class FakeMemcache(object): def delete(self, key): try: del self.store[key] - except: + except Exception: pass return True diff --git a/test/unit/common/middleware/test_ratelimit.py b/test/unit/common/middleware/test_ratelimit.py index 3f993a0402..48c07f1f65 100644 --- a/test/unit/common/middleware/test_ratelimit.py +++ b/test/unit/common/middleware/test_ratelimit.py @@ -51,7 +51,7 @@ class FakeMemcache(object): def delete(self, key): try: del self.store[key] - except: + except Exception: pass return True diff --git a/test/unit/common/middleware/test_swauth.py b/test/unit/common/middleware/test_swauth.py index 2e4d958a44..a69f272788 100644 --- a/test/unit/common/middleware/test_swauth.py +++ b/test/unit/common/middleware/test_swauth.py @@ -49,7 +49,7 @@ class FakeMemcache(object): def delete(self, key): try: del self.store[key] - except: + except Exception: pass return True diff --git a/test/unit/common/test_db.py b/test/unit/common/test_db.py index 49bc8a9229..e63397c954 100644 --- a/test/unit/common/test_db.py +++ b/test/unit/common/test_db.py @@ -165,14 +165,14 @@ class TestDatabaseBroker(unittest.TestCase): try: with broker.get() as conn: conn.execute('SELECT 1') - except: + except Exception: got_exc = True broker = DatabaseBroker(os.path.join(self.testdir, '1.db')) got_exc = False try: with broker.get() as conn: conn.execute('SELECT 1') - except: + except Exception: got_exc = True self.assert_(got_exc) def stub(*args, **kwargs): @@ -186,7 +186,7 @@ class TestDatabaseBroker(unittest.TestCase): conn.execute('INSERT INTO test (one) VALUES ("1")') raise Exception('test') conn.commit() - except: + except Exception: pass broker = DatabaseBroker(os.path.join(self.testdir, '1.db')) with broker.get() as conn: @@ -230,7 +230,7 @@ class TestDatabaseBroker(unittest.TestCase): try: with broker.lock(): raise Exception('test') - except: + except Exception: pass with broker.lock(): pass @@ -548,7 +548,7 @@ class TestContainerBroker(unittest.TestCase): with broker.get() as conn: self.assertEquals(first_conn, conn) raise Exception('OMG') - except: + except Exception: pass self.assert_(broker.conn == None) @@ -1363,7 +1363,7 @@ class TestAccountBroker(unittest.TestCase): try: with broker.get() as conn: pass - except: + except Exception: got_exc = True self.assert_(got_exc) broker.initialize(normalize_timestamp('1')) @@ -1384,7 +1384,7 @@ class TestAccountBroker(unittest.TestCase): with broker.get() as conn: self.assertEquals(first_conn, conn) raise Exception('OMG') - except: + except Exception: pass self.assert_(broker.conn == None) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 1f5a94edd5..60ca38c2ca 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -107,7 +107,7 @@ class TestUtils(unittest.TestCase): testroot = os.path.join(os.path.dirname(__file__), 'mkdirs') try: os.unlink(testroot) - except: + except Exception: pass rmtree(testroot, ignore_errors=1) self.assert_(not os.path.exists(testroot)) @@ -211,14 +211,14 @@ class TestUtils(unittest.TestCase): try: for line in lfo: pass - except: + except Exception: got_exc = True self.assert_(got_exc) got_exc = False try: for line in lfo.xreadlines(): pass - except: + except Exception: got_exc = True self.assert_(got_exc) self.assertRaises(IOError, lfo.read) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index e5a4e40652..7e69502364 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -47,7 +47,7 @@ from swift.common import ring from swift.common.constraints import MAX_META_NAME_LENGTH, \ MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, MAX_FILE_SIZE from swift.common.utils import mkdirs, normalize_timestamp, NullLogger - +from swift.common.wsgi import monkey_patch_mimetools # mocks logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) @@ -426,6 +426,7 @@ class TestObjectController(unittest.TestCase): self.app = proxy_server.Application(None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), object_ring=FakeRing()) + monkey_patch_mimetools() def assert_status_map(self, method, statuses, expected, raise_exc=False): with save_globals(): From 9fcb3ad4f645e0c1c6f264b8ccc9bc4c5a9ed339 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 27 Jan 2011 00:01:24 +0000 Subject: [PATCH 163/199] add pybabel setup.py commands and initial .pot --- babel.cfg | 2 + locale/swift.pot | 1030 ++++++++++++++++++++++++++++++++++++++++++++++ setup.cfg | 14 + setup.py | 9 +- 4 files changed, 1054 insertions(+), 1 deletion(-) create mode 100644 babel.cfg create mode 100644 locale/swift.pot diff --git a/babel.cfg b/babel.cfg new file mode 100644 index 0000000000..15cd6cb76b --- /dev/null +++ b/babel.cfg @@ -0,0 +1,2 @@ +[python: **.py] + diff --git a/locale/swift.pot b/locale/swift.pot new file mode 100644 index 0000000000..7f905f2940 --- /dev/null +++ b/locale/swift.pot @@ -0,0 +1,1030 @@ +# Translations template for swift. +# Copyright (C) 2011 ORGANIZATION +# This file is distributed under the same license as the swift project. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2011. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: swift 1.2.0\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2011-01-26 23:59+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 0.9.4\n" + +#: swift/account/auditor.py:52 swift/account/auditor.py:75 +#, python-format +msgid "" +"Since %(time)s: Account audits: %(passed)s passed audit, %(failed)s " +"failed audit" +msgstr "" + +#: swift/account/auditor.py:100 swift/container/auditor.py:103 +#, python-format +msgid "Audit passed for %s" +msgstr "" + +#: swift/account/auditor.py:103 +#, python-format +msgid "ERROR Could not get account info %s" +msgstr "" + +#: swift/account/reaper.py:80 swift/container/updater.py:64 +#, python-format +msgid "Loading account ring from %s" +msgstr "" + +#: swift/account/reaper.py:88 swift/obj/updater.py:57 +#, python-format +msgid "Loading container ring from %s" +msgstr "" + +#: swift/account/reaper.py:96 +#, python-format +msgid "Loading object ring from %s" +msgstr "" + +#: swift/account/reaper.py:106 +msgid "Daemon started." +msgstr "" + +#: swift/account/reaper.py:122 +#, python-format +msgid "Begin devices pass: %s" +msgstr "" + +#: swift/account/reaper.py:128 swift/common/utils.py:805 +#: swift/obj/updater.py:74 swift/obj/updater.py:113 +#, python-format +msgid "Skipping %s as it is not mounted" +msgstr "" + +#: swift/account/reaper.py:132 +#, python-format +msgid "Devices pass completed: %.02fs" +msgstr "" + +#: swift/account/reaper.py:215 +#, python-format +msgid "Beginning pass on account %s" +msgstr "" + +#: swift/account/reaper.py:238 +#, python-format +msgid "Exception with containers for account %s" +msgstr "" + +#: swift/account/reaper.py:243 +#, python-format +msgid "Exception with account %s" +msgstr "" + +#: swift/account/reaper.py:244 +#, python-format +msgid "Incomplete pass on account %s" +msgstr "" + +#: swift/account/reaper.py:246 +#, python-format +msgid ", %s containers deleted" +msgstr "" + +#: swift/account/reaper.py:248 +#, python-format +msgid ", %s objects deleted" +msgstr "" + +#: swift/account/reaper.py:250 +#, python-format +msgid ", %s containers remaining" +msgstr "" + +#: swift/account/reaper.py:253 +#, python-format +msgid ", %s objects remaining" +msgstr "" + +#: swift/account/reaper.py:255 +#, python-format +msgid ", %s containers possibly remaining" +msgstr "" + +#: swift/account/reaper.py:258 +#, python-format +msgid ", %s objects possibly remaining" +msgstr "" + +#: swift/account/reaper.py:261 +msgid ", return codes: " +msgstr "" + +#: swift/account/reaper.py:265 +#, python-format +msgid ", elapsed: %.02fs" +msgstr "" + +#: swift/account/reaper.py:320 swift/account/reaper.py:355 +#: swift/account/reaper.py:406 swift/container/updater.py:277 +#, python-format +msgid "Exception with %(ip)s:%(port)s/%(device)s" +msgstr "" + +#: swift/account/reaper.py:333 +#, python-format +msgid "Exception with objects for container %(container)s for account %(account)s" +msgstr "" + +#: swift/account/server.py:309 swift/container/server.py:397 +#: swift/obj/server.py:597 +#, python-format +msgid "ERROR __call__ error with %(method)s %(path)s " +msgstr "" + +#: swift/auth/server.py:96 swift/common/middleware/swauth.py:94 +msgid "No super_admin_key set in conf file! Exiting." +msgstr "" + +#: swift/auth/server.py:152 +#, python-format +msgid "" +"\n" +"THERE ARE ACCOUNTS IN YOUR auth.db THAT DO NOT BEGIN WITH YOUR NEW " +"RESELLER\n" +"PREFIX OF \"%(reseller)s\".\n" +"YOU HAVE A FEW OPTIONS:\n" +" 1. RUN \"swift-auth-update-reseller-prefixes %(db_file)s " +"%(reseller)s\",\n" +" \"swift-init auth-server restart\", AND\n" +" \"swift-auth-recreate-accounts -K ...\" TO CREATE FRESH ACCOUNTS.\n" +" OR\n" +" 2. REMOVE %(db_file)s, RUN \"swift-init auth-server restart\", AND " +"RUN\n" +" \"swift-auth-add-user ...\" TO CREATE BRAND NEW ACCOUNTS THAT WAY." +"\n" +" OR\n" +" 3. ADD \"reseller_prefix = %(previous)s\" (WITHOUT THE QUOTES) TO " +"YOUR\n" +" proxy-server.conf IN THE [filter:auth] SECTION AND TO YOUR\n" +" auth-server.conf IN THE [app:auth-server] SECTION AND RUN\n" +" \"swift-init proxy-server restart\" AND \"swift-init auth-server " +"restart\"\n" +" TO REVERT BACK TO YOUR PREVIOUS RESELLER PREFIX.\n" +"\n" +" %(note)s\n" +" " +msgstr "" + +#: swift/auth/server.py:173 +msgid "" +"\n" +" SINCE YOUR PREVIOUS RESELLER PREFIX WAS AN EMPTY STRING, IT IS NOT\n" +" RECOMMENDED TO PERFORM OPTION 3 AS THAT WOULD MAKE SUPPORTING " +"MULTIPLE\n" +" RESELLERS MORE DIFFICULT.\n" +" " +msgstr "" + +#: swift/auth/server.py:178 +msgid "CRITICAL: " +msgstr "" + +#: swift/auth/server.py:213 +#, python-format +msgid "ERROR attempting to create account %(url)s: %(status)s %(reason)s" +msgstr "" + +#: swift/auth/server.py:346 +#, python-format +msgid "" +"ALREADY EXISTS create_user(%(account)s, %(user)s, _, %(admin)s, " +"%(reseller_admin)s) [%(elapsed).02f]" +msgstr "" + +#: swift/auth/server.py:364 +#, python-format +msgid "" +"FAILED create_user(%(account)s, %(user)s, _, %(admin)s, " +"%(reseller_admin)s) [%(elapsed).02f]" +msgstr "" + +#: swift/auth/server.py:381 +#, python-format +msgid "" +"SUCCESS create_user(%(account)s, %(user)s, _, %(admin)s, " +"%(reseller_admin)s) = %(url)s [%(elapsed).02f]" +msgstr "" + +#: swift/auth/server.py:656 +msgid "ERROR Unhandled exception in ReST request" +msgstr "" + +#: swift/common/bench.py:85 +#, python-format +msgid "%(complete)s %(title)s [%(fail)s failures], %(rate).01f/s" +msgstr "" + +#: swift/common/bench.py:97 +msgid "CannotSendRequest. Skipping..." +msgstr "" + +#: swift/common/bufferedhttp.py:96 +#, python-format +msgid "HTTP PERF: %(time).5f seconds to %(method)s %(host)s:%(port)s %(path)s)" +msgstr "" + +#: swift/common/db.py:299 +msgid "Broker error trying to rollback locked connection" +msgstr "" + +#: swift/common/db.py:754 swift/common/db.py:1221 +#, python-format +msgid "Invalid pending entry %(file)s: %(entry)s" +msgstr "" + +#: swift/common/db_replicator.py:84 +#, python-format +msgid "ERROR reading HTTP response from %s" +msgstr "" + +#: swift/common/db_replicator.py:123 +#, python-format +msgid "Attempted to replicate %(count)d dbs in %(time).5f seconds (%(rate).5f/s)" +msgstr "" + +#: swift/common/db_replicator.py:129 +#, python-format +msgid "Removed %(remove)d dbs" +msgstr "" + +#: swift/common/db_replicator.py:130 +#, python-format +msgid "%(success)s successes, %(failure)s failures" +msgstr "" + +#: swift/common/db_replicator.py:155 +#, python-format +msgid "ERROR rsync failed with %(code)s: %(args)s" +msgstr "" + +#: swift/common/db_replicator.py:205 +#, python-format +msgid "Syncing chunks with %s" +msgstr "" + +#: swift/common/db_replicator.py:213 +#, python-format +msgid "ERROR Bad response %(status)s from %(host)s" +msgstr "" + +#: swift/common/db_replicator.py:278 +#, python-format +msgid "ERROR Unable to connect to remote server: %s" +msgstr "" + +#: swift/common/db_replicator.py:316 +#, python-format +msgid "Replicating db %s" +msgstr "" + +#: swift/common/db_replicator.py:325 swift/common/db_replicator.py:479 +#, python-format +msgid "Quarantining DB %s" +msgstr "" + +#: swift/common/db_replicator.py:328 +#, python-format +msgid "ERROR reading db %s" +msgstr "" + +#: swift/common/db_replicator.py:361 +#, python-format +msgid "ERROR Remote drive not mounted %s" +msgstr "" + +#: swift/common/db_replicator.py:363 +#, python-format +msgid "ERROR syncing %(file)s with node %(node)s" +msgstr "" + +#: swift/common/db_replicator.py:405 +msgid "ERROR Failed to get my own IPs?" +msgstr "" + +#: swift/common/db_replicator.py:412 +#, python-format +msgid "Skipping %(device)s as it is not mounted" +msgstr "" + +#: swift/common/db_replicator.py:420 +msgid "Beginning replication run" +msgstr "" + +#: swift/common/db_replicator.py:425 +msgid "Replication run OVER" +msgstr "" + +#: swift/common/db_replicator.py:436 +msgid "ERROR trying to replicate" +msgstr "" + +#: swift/common/memcached.py:69 +#, python-format +msgid "Timeout %(action)s to memcached: %(server)s" +msgstr "" + +#: swift/common/memcached.py:72 +#, python-format +msgid "Error %(action)s to memcached: %(server)s" +msgstr "" + +#: swift/common/memcached.py:81 +#, python-format +msgid "Error limiting server %s" +msgstr "" + +#: swift/common/utils.py:88 +#, python-format +msgid "Unable to locate %s in libc. Leaving as a no-op." +msgstr "" + +#: swift/common/utils.py:255 +msgid "STDOUT: Connection reset by peer" +msgstr "" + +#: swift/common/utils.py:257 swift/common/utils.py:260 +#, python-format +msgid "STDOUT: %s" +msgstr "" + +#: swift/common/utils.py:324 +msgid "Connection refused" +msgstr "" + +#: swift/common/utils.py:326 +msgid "Host unreachable" +msgstr "" + +#: swift/common/utils.py:328 +msgid "Connection timeout" +msgstr "" + +#: swift/common/utils.py:464 +msgid "UNCAUGHT EXCEPTION" +msgstr "" + +#: swift/common/utils.py:511 +msgid "Error: missing config file argument" +msgstr "" + +#: swift/common/utils.py:516 +#, python-format +msgid "Error: unable to locate %s" +msgstr "" + +#: swift/common/utils.py:743 +#, python-format +msgid "Unable to read config file %s" +msgstr "" + +#: swift/common/utils.py:749 +#, python-format +msgid "Unable to find %s config section in %s" +msgstr "" + +#: swift/common/middleware/catch_errors.py:39 +#, python-format +msgid "Error: %s" +msgstr "" + +#: swift/common/middleware/cname_lookup.py:91 +#, python-format +msgid "Mapped %(given_domain)s to %(found_domain)s" +msgstr "" + +#: swift/common/middleware/cname_lookup.py:102 +#, python-format +msgid "Following CNAME chain for %(given_domain)s to %(found_domain)s" +msgstr "" + +#: swift/common/middleware/ratelimit.py:172 +msgid "Returning 497 because of blacklisting" +msgstr "" + +#: swift/common/middleware/ratelimit.py:185 +#, python-format +msgid "Ratelimit sleep log: %(sleep)s for %(account)s/%(container)s/%(object)s" +msgstr "" + +#: swift/common/middleware/ratelimit.py:192 +#, python-format +msgid "Returning 498 because of ops rate limiting (Max Sleep) %s" +msgstr "" + +#: swift/common/middleware/ratelimit.py:212 +msgid "Warning: Cannot ratelimit without a memcached client" +msgstr "" + +#: swift/common/middleware/swauth.py:635 +#, python-format +msgid "" +"ERROR: Exception while trying to communicate with " +"%(scheme)s://%(host)s:%(port)s/%(path)s" +msgstr "" + +#: swift/container/auditor.py:54 swift/container/auditor.py:78 +#, python-format +msgid "" +"Since %(time)s: Container audits: %(pass)s passed audit, %(fail)s failed " +"audit" +msgstr "" + +#: swift/container/auditor.py:68 +msgid "Begin container audit \"once\" mode" +msgstr "" + +#: swift/container/auditor.py:88 +#, python-format +msgid "Container audit \"once\" mode completed: %.02fs" +msgstr "" + +#: swift/container/auditor.py:106 +#, python-format +msgid "ERROR Could not get container info %s" +msgstr "" + +#: swift/container/server.py:114 +#, python-format +msgid "" +"ERROR Account update failed with %(ip)s:%(port)s/%(device)s (will retry " +"later): Response %(status)s %(reason)s" +msgstr "" + +#: swift/container/server.py:122 +#, python-format +msgid "" +"ERROR account update failed with %(ip)s:%(port)s/%(device)s (will retry " +"later)" +msgstr "" + +#: swift/container/updater.py:78 swift/obj/replicator.py:492 +#, python-format +msgid "%s is not mounted" +msgstr "" + +#: swift/container/updater.py:97 +#, python-format +msgid "ERROR with loading suppressions from %s: " +msgstr "" + +#: swift/container/updater.py:107 +msgid "Begin container update sweep" +msgstr "" + +#: swift/container/updater.py:140 +#, python-format +msgid "" +"Container update sweep of %(path)s completed: %(elapsed).02fs, " +"%(success)s successes, %(fail)s failures, %(no_change)s with no changes" +msgstr "" + +#: swift/container/updater.py:154 +#, python-format +msgid "Container update sweep completed: %.02fs" +msgstr "" + +#: swift/container/updater.py:164 +msgid "Begin container update single threaded sweep" +msgstr "" + +#: swift/container/updater.py:172 +#, python-format +msgid "" +"Container update single threaded sweep completed: %(elapsed).02fs, " +"%(success)s successes, %(fail)s failures, %(no_change)s with no changes" +msgstr "" + +#: swift/container/updater.py:224 +#, python-format +msgid "Update report sent for %(container)s %(dbfile)s" +msgstr "" + +#: swift/container/updater.py:232 +#, python-format +msgid "Update report failed for %(container)s %(dbfile)s" +msgstr "" + +#: swift/container/updater.py:266 +#, python-format +msgid "" +"ERROR account update failed with %(ip)s:%(port)s/%(device)s (will retry " +"later): " +msgstr "" + +#: swift/obj/auditor.py:61 +#, python-format +msgid "Begin object audit \"%s\" mode" +msgstr "" + +#: swift/obj/auditor.py:73 +#, python-format +msgid "" +"Since %(start_time)s: Locally: %(passes)d passed audit, %(quars)d " +"quarantined, %(errors)d errors files/sec: %(frate).2f , bytes/sec: " +"%(brate).2f" +msgstr "" + +#: swift/obj/auditor.py:90 +#, python-format +msgid "" +"Object audit \"%(mode)s\" mode completed: %(elapsed).02fs. Total " +"files/sec: %(frate).2f , Total bytes/sec: %(brate).2f " +msgstr "" + +#: swift/obj/auditor.py:141 +#, python-format +msgid "ERROR Object %(obj)s failed audit and will be quarantined: %(err)s" +msgstr "" + +#: swift/obj/auditor.py:150 +#, python-format +msgid "ERROR Trying to audit %s" +msgstr "" + +#: swift/obj/replicator.py:182 +msgid "Error hashing suffix" +msgstr "" + +#: swift/obj/replicator.py:246 +#, python-format +msgid "Killing long-running rsync: %s" +msgstr "" + +#: swift/obj/replicator.py:257 +#, python-format +msgid "Bad rsync return code: %(args)s -> %(ret)d" +msgstr "" + +#: swift/obj/replicator.py:261 swift/obj/replicator.py:265 +#, python-format +msgid "Successful rsync of %(src)s at %(dst)s (%(time).03f)" +msgstr "" + +#: swift/obj/replicator.py:350 +#, python-format +msgid "Removing partition: %s" +msgstr "" + +#: swift/obj/replicator.py:353 +msgid "Error syncing handoff partition" +msgstr "" + +#: swift/obj/replicator.py:383 +#, python-format +msgid "%(ip)s/%(device)s responded as unmounted" +msgstr "" + +#: swift/obj/replicator.py:388 +#, python-format +msgid "Invalid response %(resp)s from %(ip)s" +msgstr "" + +#: swift/obj/replicator.py:410 +#, python-format +msgid "Error syncing with node: %s" +msgstr "" + +#: swift/obj/replicator.py:414 +msgid "Error syncing partition" +msgstr "" + +#: swift/obj/replicator.py:424 +#, python-format +msgid "" +"%(replicated)d/%(total)d (%(percentage).2f%%) partitions replicated in " +"%(time).2fs (%(rate).2f/sec, %(remaining)s remaining)" +msgstr "" + +#: swift/obj/replicator.py:433 +#, python-format +msgid "" +"%(checked)d suffixes checked - %(hashed).2f%% hashed, %(synced).2f%% " +"synced" +msgstr "" + +#: swift/obj/replicator.py:439 +#, python-format +msgid "Partition times: max %(max).4fs, min %(min).4fs, med %(med).4fs" +msgstr "" + +#: swift/obj/replicator.py:446 +#, python-format +msgid "Nothing replicated for %s seconds." +msgstr "" + +#: swift/obj/replicator.py:475 +msgid "Lockup detected.. killing live coros." +msgstr "" + +#: swift/obj/replicator.py:530 +msgid "Ring change detected. Aborting current replication pass." +msgstr "" + +#: swift/obj/replicator.py:540 +msgid "Exception in top-level replication loop" +msgstr "" + +#: swift/obj/replicator.py:549 +msgid "Running object replicator in script mode." +msgstr "" + +#: swift/obj/replicator.py:553 swift/obj/replicator.py:565 +#, python-format +msgid "Object replication complete. (%.02f minutes)" +msgstr "" + +#: swift/obj/replicator.py:560 +msgid "Starting object replication pass." +msgstr "" + +#: swift/obj/replicator.py:566 +#, python-format +msgid "Replication sleeping for %s seconds." +msgstr "" + +#: swift/obj/server.py:313 +#, python-format +msgid "" +"ERROR Container update failed (saving for async update later): %(status)d" +" response from %(ip)s:%(port)s/%(dev)s" +msgstr "" + +#: swift/obj/server.py:319 +#, python-format +msgid "" +"ERROR container update failed with %(ip)s:%(port)s/%(dev)s (saving for " +"async update later)" +msgstr "" + +#: swift/obj/updater.py:65 +msgid "Begin object update sweep" +msgstr "" + +#: swift/obj/updater.py:89 +#, python-format +msgid "" +"Object update sweep of %(device)s completed: %(elapsed).02fs, %(success)s" +" successes, %(fail)s failures" +msgstr "" + +#: swift/obj/updater.py:98 +#, python-format +msgid "Object update sweep completed: %.02fs" +msgstr "" + +#: swift/obj/updater.py:105 +msgid "Begin object update single threaded sweep" +msgstr "" + +#: swift/obj/updater.py:117 +#, python-format +msgid "" +"Object update single threaded sweep completed: %(elapsed).02fs, " +"%(success)s successes, %(fail)s failures" +msgstr "" + +#: swift/obj/updater.py:157 +#, python-format +msgid "ERROR Pickle problem, quarantining %s" +msgstr "" + +#: swift/obj/updater.py:177 +#, python-format +msgid "Update sent for %(obj)s %(path)s" +msgstr "" + +#: swift/obj/updater.py:182 +#, python-format +msgid "Update failed for %(obj)s %(path)s" +msgstr "" + +#: swift/obj/updater.py:206 +#, python-format +msgid "ERROR with remote server %(ip)s:%(port)s/%(device)s" +msgstr "" + +#: swift/proxy/server.py:165 swift/proxy/server.py:629 +#: swift/proxy/server.py:696 swift/proxy/server.py:712 +#: swift/proxy/server.py:721 swift/proxy/server.py:1004 +#: swift/proxy/server.py:1044 swift/proxy/server.py:1089 +msgid "Object" +msgstr "" + +#: swift/proxy/server.py:170 +#, python-format +msgid "Could not load object segment %(path)s: %(status)s" +msgstr "" + +#: swift/proxy/server.py:177 swift/proxy/server.py:210 +#: swift/proxy/server.py:257 +#, python-format +msgid "ERROR: While processing manifest /%(acc)s/%(cont)s/%(obj)s" +msgstr "" + +#: swift/proxy/server.py:292 +#, python-format +msgid "%(msg)s %(ip)s:%(port)s" +msgstr "" + +#: swift/proxy/server.py:304 +#, python-format +msgid "ERROR with %(type)s server %(ip)s:%(port)s/%(device)s re: %(info)s" +msgstr "" + +#: swift/proxy/server.py:328 +#, python-format +msgid "Node error limited %(ip)s:%(port)s (%(device)s)" +msgstr "" + +#: swift/proxy/server.py:388 swift/proxy/server.py:1451 +#: swift/proxy/server.py:1497 swift/proxy/server.py:1545 +#: swift/proxy/server.py:1590 +msgid "Account" +msgstr "" + +#: swift/proxy/server.py:389 +#, python-format +msgid "Trying to get account info for %s" +msgstr "" + +#: swift/proxy/server.py:466 swift/proxy/server.py:740 +#: swift/proxy/server.py:772 swift/proxy/server.py:1214 +#: swift/proxy/server.py:1301 swift/proxy/server.py:1356 +#: swift/proxy/server.py:1413 +msgid "Container" +msgstr "" + +#: swift/proxy/server.py:467 +#, python-format +msgid "Trying to get container info for %s" +msgstr "" + +#: swift/proxy/server.py:552 +#, python-format +msgid "%(type)s returning 503 for %(statuses)s" +msgstr "" + +#: swift/proxy/server.py:598 swift/proxy/server.py:697 +#, python-format +msgid "Trying to %(method)s %(path)s" +msgstr "" + +#: swift/proxy/server.py:627 +msgid "Client disconnected on read" +msgstr "" + +#: swift/proxy/server.py:630 +#, python-format +msgid "Trying to read during GET of %s" +msgstr "" + +#: swift/proxy/server.py:653 +#, python-format +msgid "ERROR %(status)d %(body)s From %(type)s Server" +msgstr "" + +#: swift/proxy/server.py:692 +#, python-format +msgid "ERROR %(status)d %(body)s From Object Server" +msgstr "" + +#: swift/proxy/server.py:776 swift/proxy/server.py:783 +#, python-format +msgid "Object manifest GET could not continue listing: %s %s" +msgstr "" + +#: swift/proxy/server.py:905 +msgid "Object POST" +msgstr "" + +#: swift/proxy/server.py:1005 +#, python-format +msgid "Expect: 100-continue on %s" +msgstr "" + +#: swift/proxy/server.py:1017 +#, python-format +msgid "Object PUT returning 503, %(conns)s/%(nodes)s required connections" +msgstr "" + +#: swift/proxy/server.py:1045 +#, python-format +msgid "Trying to write to %s" +msgstr "" + +#: swift/proxy/server.py:1049 +#, python-format +msgid "" +"Object PUT exceptions during send, %(conns)s/%(nodes)s required " +"connections" +msgstr "" + +#: swift/proxy/server.py:1058 +#, python-format +msgid "ERROR Client read timeout (%ss)" +msgstr "" + +#: swift/proxy/server.py:1063 +msgid "ERROR Exception causing client disconnect" +msgstr "" + +#: swift/proxy/server.py:1068 +msgid "Client disconnected without sending enough data" +msgstr "" + +#: swift/proxy/server.py:1083 +#, python-format +msgid "ERROR %(status)d %(body)s From Object Server re: %(path)s" +msgstr "" + +#: swift/proxy/server.py:1090 +#, python-format +msgid "Trying to get final status of PUT to %s" +msgstr "" + +#: swift/proxy/server.py:1093 +#, python-format +msgid "Object servers returned %s mismatched etags" +msgstr "" + +#: swift/proxy/server.py:1101 +msgid "Object PUT" +msgstr "" + +#: swift/proxy/server.py:1153 +msgid "Object DELETE" +msgstr "" + +#: swift/proxy/server.py:1302 swift/proxy/server.py:1498 +#, python-format +msgid "Trying to PUT to %s" +msgstr "" + +#: swift/proxy/server.py:1314 +msgid "Container PUT" +msgstr "" + +#: swift/proxy/server.py:1357 swift/proxy/server.py:1546 +#, python-format +msgid "Trying to POST %s" +msgstr "" + +#: swift/proxy/server.py:1369 +msgid "Container POST" +msgstr "" + +#: swift/proxy/server.py:1414 swift/proxy/server.py:1591 +#, python-format +msgid "Trying to DELETE %s" +msgstr "" + +#: swift/proxy/server.py:1426 +msgid "Container DELETE" +msgstr "" + +#: swift/proxy/server.py:1433 +msgid "Returning 503 because not all container nodes confirmed DELETE" +msgstr "" + +#: swift/proxy/server.py:1508 +msgid "Account PUT" +msgstr "" + +#: swift/proxy/server.py:1556 +msgid "Account POST" +msgstr "" + +#: swift/proxy/server.py:1601 +msgid "Account DELETE" +msgstr "" + +#: swift/proxy/server.py:1757 +msgid "ERROR Unhandled exception in request" +msgstr "" + +#: swift/stats/access_processor.py:63 swift/stats/stats_processor.py:40 +#, python-format +msgid "Bad line data: %s" +msgstr "" + +#: swift/stats/access_processor.py:67 +#, python-format +msgid "Bad server name: found \"%(found)s\" expected \"%(expected)s\"" +msgstr "" + +#: swift/stats/access_processor.py:75 +#, python-format +msgid "Invalid path: %(error)s from data: %(log)s" +msgstr "" + +#: swift/stats/access_processor.py:199 +#, python-format +msgid "I found a bunch of bad lines in %(name)s (%(bad)d bad, %(total)d total)" +msgstr "" + +#: swift/stats/account_stats.py:55 +msgid "Gathering account stats" +msgstr "" + +#: swift/stats/account_stats.py:59 +#, python-format +msgid "Gathering account stats complete (%0.2f minutes)" +msgstr "" + +#: swift/stats/account_stats.py:75 +#, python-format +msgid "Device %s is not mounted, skipping." +msgstr "" + +#: swift/stats/account_stats.py:81 +#, python-format +msgid "Path %s does not exist, skipping." +msgstr "" + +#: swift/stats/log_processor.py:62 +#, python-format +msgid "Loaded plugin \"%s\"" +msgstr "" + +#: swift/stats/log_processor.py:79 +#, python-format +msgid "Processing %(obj)s with plugin \"%(plugin)s\"" +msgstr "" + +#: swift/stats/log_processor.py:179 +#, python-format +msgid "Bad compressed data for %s" +msgstr "" + +#: swift/stats/log_processor.py:240 +msgid "Beginning log processing" +msgstr "" + +#: swift/stats/log_processor.py:278 +#, python-format +msgid "found %d processed files" +msgstr "" + +#: swift/stats/log_processor.py:283 +#, python-format +msgid "loaded %d files to process" +msgstr "" + +#: swift/stats/log_processor.py:286 swift/stats/log_processor.py:360 +#, python-format +msgid "Log processing done (%0.2f minutes)" +msgstr "" + +#: swift/stats/log_uploader.py:71 +msgid "Uploading logs" +msgstr "" + +#: swift/stats/log_uploader.py:74 +#, python-format +msgid "Uploading logs complete (%0.2f minutes)" +msgstr "" + +#: swift/stats/log_uploader.py:129 +#, python-format +msgid "Unexpected log: %s" +msgstr "" + +#: swift/stats/log_uploader.py:135 +#, python-format +msgid "Skipping log: %(file)s (< %(cutoff)d seconds old)" +msgstr "" + +#: swift/stats/log_uploader.py:142 +#, python-format +msgid "Log %s is 0 length, skipping" +msgstr "" + +#: swift/stats/log_uploader.py:144 +#, python-format +msgid "Processing log: %s" +msgstr "" + +#: swift/stats/log_uploader.py:165 +#, python-format +msgid "Uploaded log %(file)s to %(target)s" +msgstr "" + +#: swift/stats/log_uploader.py:170 +#, python-format +msgid "ERROR: Upload of log %s failed!" +msgstr "" + diff --git a/setup.cfg b/setup.cfg index d53addcbf3..50cfaf10fc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,3 +7,17 @@ source-dir = doc/source tag_build = tag_date = 0 tag_svn_revision = 0 + +[compile_catalog] +directory = locale +domain = swift + +[update_catalog] +domain = swift +output_dir = locale +input_file = locale/swift.pot + +[extract_messages] +keywords = _ l_ lazy_gettext +mapping_file = babel.cfg +output_file = locale/swift.pot diff --git a/setup.py b/setup.py index 912213c1c9..1c410ec988 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ from setuptools import setup, find_packages from setuptools.command.sdist import sdist import os import subprocess +from babel.messages import frontend from swift import __version__ as version @@ -49,7 +50,13 @@ setup( url='https://launchpad.net/swift', packages=find_packages(exclude=['test', 'bin']), test_suite='nose.collector', - cmdclass={'sdist': local_sdist}, + cmdclass={ + 'sdist': local_sdist, + 'compile_catalog': frontend.compile_catalog, + 'extract_messages': frontend.extract_messages, + 'init_catalog': frontend.init_catalog, + 'update_catalog': frontend.update_catalog, + }, classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', From ccaaa55b0e41b1602131eb9b889c4d62f1146ae0 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 27 Jan 2011 00:06:20 +0000 Subject: [PATCH 164/199] refactor setup.py --- setup.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index 1c410ec988..c80d62ddc8 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,10 @@ from setuptools import setup, find_packages from setuptools.command.sdist import sdist import os import subprocess -from babel.messages import frontend +try: + from babel.messages import frontend +except ImportError: + frontend = None from swift import __version__ as version @@ -40,6 +43,19 @@ class local_sdist(sdist): name = 'swift' + +cmdclass = {'sdist': local_sdist} + + +if frontend: + cmdclass.update({ + 'compile_catalog': frontend.compile_catalog, + 'extract_messages': frontend.extract_messages, + 'init_catalog': frontend.init_catalog, + 'update_catalog': frontend.update_catalog, + }) + + setup( name=name, version=version, @@ -50,13 +66,7 @@ setup( url='https://launchpad.net/swift', packages=find_packages(exclude=['test', 'bin']), test_suite='nose.collector', - cmdclass={ - 'sdist': local_sdist, - 'compile_catalog': frontend.compile_catalog, - 'extract_messages': frontend.extract_messages, - 'init_catalog': frontend.init_catalog, - 'update_catalog': frontend.update_catalog, - }, + cmdclass=cmdclass, classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', From c3c5e5a3975cb8fb4d9181c052c11eb19f06412a Mon Sep 17 00:00:00 2001 From: Jay Payne <letterj@racklabs.com> Date: Thu, 27 Jan 2011 20:23:14 +0000 Subject: [PATCH 165/199] Commit out the both calls --- bin/swift-auth-to-swauth | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bin/swift-auth-to-swauth b/bin/swift-auth-to-swauth index 1b2d80eda0..93cb4fe199 100755 --- a/bin/swift-auth-to-swauth +++ b/bin/swift-auth-to-swauth @@ -26,7 +26,8 @@ if __name__ == '__main__': if len(argv) != 4 or argv[1] != '-K': exit('Syntax: %s -K <super_admin_key> <path to auth.db>' % argv[0]) _junk, _junk, super_admin_key, auth_db = argv - call(['swauth-prep', '-K', super_admin_key]) + # This version will not attempt to prep swauth + # call(['swauth-prep', '-K', super_admin_key]) conn = sqlite3.connect(auth_db) for account, cfaccount, user, password, admin, reseller_admin in \ conn.execute('SELECT account, cfaccount, user, password, admin, ' @@ -39,7 +40,8 @@ if __name__ == '__main__': cmd.append('-r') cmd.extend([account, user, password]) print ' '.join(cmd) - call(cmd) + # For this version, the script will only print out the commands + # call(cmd) print '----------------------------------------------------------------' print ' Assuming the above worked perfectly, you should copy and paste ' print ' those lines into your ~/bin/recreateaccounts script.' From 304ea48213f0251d9768aa3e6b7e64e15329103e Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 27 Jan 2011 21:02:53 +0000 Subject: [PATCH 166/199] object replicator logging and increase rsync timeouts --- etc/object-server.conf-sample | 8 ++++---- swift/obj/replicator.py | 11 +++++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/etc/object-server.conf-sample b/etc/object-server.conf-sample index 6c696a5872..f3f7f07346 100644 --- a/etc/object-server.conf-sample +++ b/etc/object-server.conf-sample @@ -42,13 +42,13 @@ use = egg:swift#object # concurrency = 1 # stats_interval = 300 # max duration of a partition rsync -# rsync_timeout = 600 +# rsync_timeout = 900 # passed to rsync for io op timeout -# rsync_io_timeout = 10 +# rsync_io_timeout = 30 # max duration of an http request # http_timeout = 60 # attempts to kill all workers if nothing replicates for lockup_timeout seconds -# lockup_timeout = 900 +# lockup_timeout = 1800 # The replicator also performs reclamation # reclaim_age = 604800 @@ -71,4 +71,4 @@ use = egg:swift#object # log_level = INFO # files_per_second = 20 # bytes_per_second = 10000000 -# log_time = 3600 \ No newline at end of file +# log_time = 3600 diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index fc7e7cbd07..2c04829138 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -223,10 +223,10 @@ class ObjectReplicator(Daemon): self.reclaim_age = int(conf.get('reclaim_age', 86400 * 7)) self.partition_times = [] self.run_pause = int(conf.get('run_pause', 30)) - self.rsync_timeout = int(conf.get('rsync_timeout', 300)) - self.rsync_io_timeout = conf.get('rsync_io_timeout', '10') + self.rsync_timeout = int(conf.get('rsync_timeout', 900)) + self.rsync_io_timeout = conf.get('rsync_io_timeout', '30') self.http_timeout = int(conf.get('http_timeout', 60)) - self.lockup_timeout = int(conf.get('lockup_timeout', 900)) + self.lockup_timeout = int(conf.get('lockup_timeout', 1800)) def _rsync(self, args): """ @@ -252,7 +252,10 @@ class ObjectReplicator(Daemon): continue if result.startswith('cd+'): continue - self.logger.info(result) + if not retval: + self.logger.info(result) + else: + self.logger.error(result) if ret_val: self.logger.error(_('Bad rsync return code: %(args)s -> %(ret)d'), {'args': str(args), 'ret': ret_val}) From 148459abf15fe0982022365c6ee3065d5eb901a3 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 27 Jan 2011 22:42:36 +0000 Subject: [PATCH 167/199] fix typo --- swift/obj/replicator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 2c04829138..dcfcb926f9 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -252,7 +252,7 @@ class ObjectReplicator(Daemon): continue if result.startswith('cd+'): continue - if not retval: + if not ret_val: self.logger.info(result) else: self.logger.error(result) From 988b20a302acb4572bd81e3faef3cbd2de31b9f5 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Fri, 28 Jan 2011 10:00:10 -0600 Subject: [PATCH 168/199] Changing version to add gamma --- swift/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/__init__.py b/swift/__init__.py index 899047889e..316208f929 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1,5 +1,5 @@ import gettext -__version__ = '1.2.0' +__version__ = '1.2-gamma' gettext.install('swift') From a44635ca9767d8ee737e0189063cf3b5b3842285 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Sat, 29 Jan 2011 00:54:12 +0000 Subject: [PATCH 169/199] support WAL journaling instead of .pending files --- swift/common/db.py | 195 +++++++-------------------------------------- 1 file changed, 29 insertions(+), 166 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index be96411619..36ef1f3c91 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -33,7 +33,7 @@ import simplejson as json import sqlite3 from swift.common.utils import normalize_timestamp, renamer, \ - mkdirs, lock_parent_directory, fallocate + mkdirs, lock_parent_directory from swift.common.exceptions import LockTimeout @@ -41,8 +41,7 @@ from swift.common.exceptions import LockTimeout BROKER_TIMEOUT = 25 #: Pickle protocol to use PICKLE_PROTOCOL = 2 -#: Max number of pending entries -PENDING_CAP = 131072 +PENDING_COMMIT_TIMEOUT = 900 class DatabaseConnectionError(sqlite3.DatabaseError): @@ -139,7 +138,7 @@ def get_db_connection(path, timeout=30, okay_to_create=False): conn.execute('PRAGMA synchronous = NORMAL') conn.execute('PRAGMA count_changes = OFF') conn.execute('PRAGMA temp_store = MEMORY') - conn.execute('PRAGMA journal_mode = DELETE') + conn.execute('PRAGMA journal_mode = WAL') conn.create_function('chexor', 3, chexor) except sqlite3.DatabaseError: import traceback @@ -152,13 +151,10 @@ class DatabaseBroker(object): """Encapsulates working with a database.""" def __init__(self, db_file, timeout=BROKER_TIMEOUT, logger=None, - account=None, container=None, pending_timeout=10, - stale_reads_ok=False): + account=None, container=None, stale_reads_ok=False): """ Encapsulates working with a database. """ self.conn = None self.db_file = db_file - self.pending_file = self.db_file + '.pending' - self.pending_timeout = pending_timeout self.stale_reads_ok = stale_reads_ok self.db_dir = os.path.dirname(db_file) self.timeout = timeout @@ -233,7 +229,7 @@ class DatabaseBroker(object): conn.close() with open(tmp_db_file, 'r+b') as fp: os.fsync(fp.fileno()) - with lock_parent_directory(self.db_file, self.pending_timeout): + with lock_parent_directory(self.db_file, self.timeout): if os.path.exists(self.db_file): # It's as if there was a "condition" where different parts # of the system were "racing" each other. @@ -348,11 +344,6 @@ class DatabaseBroker(object): :param count: number to get :returns: list of objects between start and end """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: curs = conn.execute(''' SELECT * FROM %s WHERE ROWID > ? ORDER BY ROWID ASC LIMIT ? @@ -401,11 +392,7 @@ class DatabaseBroker(object): :returns: dict containing keys: hash, id, created_at, put_timestamp, delete_timestamp, count, max_row, and metadata """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise + self._commit_puts() query_part1 = ''' SELECT hash, id, created_at, put_timestamp, delete_timestamp, %s_count AS count, @@ -455,34 +442,6 @@ class DatabaseBroker(object): (rec['sync_point'], rec['remote_id'])) conn.commit() - def _preallocate(self): - """ - The idea is to allocate space in front of an expanding db. If it gets - within 512k of a boundary, it allocates to the next boundary. - Boundaries are 2m, 5m, 10m, 25m, 50m, then every 50m after. - """ - if self.db_file == ':memory:': - return - MB = (1024 * 1024) - - def prealloc_points(): - for pm in (1, 2, 5, 10, 25, 50): - yield pm * MB - while True: - pm += 50 - yield pm * MB - - stat = os.stat(self.db_file) - file_size = stat.st_size - allocated_size = stat.st_blocks * 512 - for point in prealloc_points(): - if file_size <= point - MB / 2: - prealloc_size = point - break - if allocated_size < prealloc_size: - with open(self.db_file, 'rb+') as fp: - fallocate(fp.fileno(), int(prealloc_size)) - @property def metadata(self): """ @@ -717,11 +676,6 @@ class ContainerBroker(DatabaseBroker): :returns: True if the database has no active objects, False otherwise """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: row = conn.execute( 'SELECT object_count from container_stat').fetchone() @@ -729,17 +683,16 @@ class ContainerBroker(DatabaseBroker): def _commit_puts(self, item_list=None): """Handles commiting rows in .pending files.""" - if self.db_file == ':memory:' or not os.path.exists(self.pending_file): + pending_file = self.db_file + '.pending' + if self.db_file == ':memory:' or not os.path.exists(pending_file): + return + if not os.path.getsize(pending_file): + os.unlink(pending_file) return if item_list is None: item_list = [] - with lock_parent_directory(self.pending_file, self.pending_timeout): - self._preallocate() - if not os.path.getsize(self.pending_file): - if item_list: - self.merge_items(item_list) - return - with open(self.pending_file, 'r+b') as fp: + with lock_parent_directory(pending_file, PENDING_COMMIT_TIMEOUT): + with open(pending_file, 'r+b') as fp: for entry in fp.read().split(':'): if entry: try: @@ -752,11 +705,11 @@ class ContainerBroker(DatabaseBroker): except Exception: self.logger.exception( _('Invalid pending entry %(file)s: %(entry)s'), - {'file': self.pending_file, 'entry': entry}) + {'file': pending_file, 'entry': entry}) if item_list: self.merge_items(item_list) try: - os.ftruncate(fp.fileno(), 0) + os.unlink(pending_file) except OSError, err: if err.errno != errno.ENOENT: raise @@ -774,7 +727,6 @@ class ContainerBroker(DatabaseBroker): delete :param sync_timestamp: max update_at timestamp of sync rows to delete """ - self._commit_puts() with self.get() as conn: conn.execute(""" DELETE FROM object @@ -818,30 +770,9 @@ class ContainerBroker(DatabaseBroker): record = {'name': name, 'created_at': timestamp, 'size': size, 'content_type': content_type, 'etag': etag, 'deleted': deleted} - if self.db_file == ':memory:': - self.merge_items([record]) - return - if not os.path.exists(self.db_file): + if self.db_file != ':memory:' and not os.path.exists(self.db_file): raise DatabaseConnectionError(self.db_file, "DB doesn't exist") - pending_size = 0 - try: - pending_size = os.path.getsize(self.pending_file) - except OSError, err: - if err.errno != errno.ENOENT: - raise - if pending_size > PENDING_CAP: - self._commit_puts([record]) - else: - with lock_parent_directory( - self.pending_file, self.pending_timeout): - with open(self.pending_file, 'a+b') as fp: - # Colons aren't used in base64 encoding; so they are our - # delimiter - fp.write(':') - fp.write(pickle.dumps( - (name, timestamp, size, content_type, etag, deleted), - protocol=PICKLE_PROTOCOL).encode('base64')) - fp.flush() + self.merge_items([record]) def is_deleted(self, timestamp=None): """ @@ -851,11 +782,6 @@ class ContainerBroker(DatabaseBroker): """ if self.db_file != ':memory:' and not os.path.exists(self.db_file): return True - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: row = conn.execute(''' SELECT put_timestamp, delete_timestamp, object_count @@ -878,11 +804,6 @@ class ContainerBroker(DatabaseBroker): reported_put_timestamp, reported_delete_timestamp, reported_object_count, reported_bytes_used, hash, id) """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: return conn.execute(''' SELECT account, container, created_at, put_timestamp, @@ -919,11 +840,6 @@ class ContainerBroker(DatabaseBroker): :returns: list of object names """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise rv = [] with self.get() as conn: row = conn.execute(''' @@ -960,11 +876,6 @@ class ContainerBroker(DatabaseBroker): :returns: list of tuples of (name, created_at, size, content_type, etag) """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise if path is not None: prefix = path if path: @@ -1193,17 +1104,16 @@ class AccountBroker(DatabaseBroker): def _commit_puts(self, item_list=None): """Handles commiting rows in .pending files.""" - if self.db_file == ':memory:' or not os.path.exists(self.pending_file): + pending_file = self.db_file + '.pending' + if self.db_file == ':memory:' or not os.path.exists(pending_file): + return + if not os.path.getsize(pending_file): + os.unlink(pending_file) return if item_list is None: item_list = [] - with lock_parent_directory(self.pending_file, self.pending_timeout): - self._preallocate() - if not os.path.getsize(self.pending_file): - if item_list: - self.merge_items(item_list) - return - with open(self.pending_file, 'r+b') as fp: + with lock_parent_directory(pending_file, PENDING_COMMIT_TIMEOUT): + with open(pending_file, 'r+b') as fp: for entry in fp.read().split(':'): if entry: try: @@ -1219,11 +1129,11 @@ class AccountBroker(DatabaseBroker): except Exception: self.logger.exception( _('Invalid pending entry %(file)s: %(entry)s'), - {'file': self.pending_file, 'entry': entry}) + {'file': pending_file, 'entry': entry}) if item_list: self.merge_items(item_list) try: - os.ftruncate(fp.fileno(), 0) + os.unlink(pending_file) except OSError, err: if err.errno != errno.ENOENT: raise @@ -1234,11 +1144,6 @@ class AccountBroker(DatabaseBroker): :returns: True if the database has no active containers. """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: row = conn.execute( 'SELECT container_count from account_stat').fetchone() @@ -1258,7 +1163,6 @@ class AccountBroker(DatabaseBroker): :param sync_timestamp: max update_at timestamp of sync rows to delete """ - self._commit_puts() with self.get() as conn: conn.execute(''' DELETE FROM container WHERE @@ -1286,11 +1190,6 @@ class AccountBroker(DatabaseBroker): :returns: put_timestamp of the container """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: ret = conn.execute(''' SELECT put_timestamp FROM container @@ -1311,6 +1210,8 @@ class AccountBroker(DatabaseBroker): :param object_count: number of objects in the container :param bytes_used: number of bytes used by the container """ + if self.db_file != ':memory:' and not os.path.exists(self.db_file): + raise DatabaseConnectionError(self.db_file, "DB doesn't exist") if delete_timestamp > put_timestamp and \ object_count in (None, '', 0, '0'): deleted = 1 @@ -1321,24 +1222,7 @@ class AccountBroker(DatabaseBroker): 'object_count': object_count, 'bytes_used': bytes_used, 'deleted': deleted} - if self.db_file == ':memory:': - self.merge_items([record]) - return - commit = False - with lock_parent_directory(self.pending_file, self.pending_timeout): - with open(self.pending_file, 'a+b') as fp: - # Colons aren't used in base64 encoding; so they are our - # delimiter - fp.write(':') - fp.write(pickle.dumps( - (name, put_timestamp, delete_timestamp, object_count, - bytes_used, deleted), - protocol=PICKLE_PROTOCOL).encode('base64')) - fp.flush() - if fp.tell() > PENDING_CAP: - commit = True - if commit: - self._commit_puts() + self.merge_items([record]) def can_delete_db(self, cutoff): """ @@ -1346,7 +1230,6 @@ class AccountBroker(DatabaseBroker): :returns: True if the account can be deleted, False otherwise """ - self._commit_puts() with self.get() as conn: row = conn.execute(''' SELECT status, put_timestamp, delete_timestamp, container_count @@ -1372,11 +1255,6 @@ class AccountBroker(DatabaseBroker): """ if self.db_file != ':memory:' and not os.path.exists(self.db_file): return True - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: row = conn.execute(''' SELECT put_timestamp, delete_timestamp, container_count, status @@ -1401,11 +1279,6 @@ class AccountBroker(DatabaseBroker): delete_timestamp, container_count, object_count, bytes_used, hash, id) """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise with self.get() as conn: return conn.execute(''' SELECT account, created_at, put_timestamp, delete_timestamp, @@ -1422,11 +1295,6 @@ class AccountBroker(DatabaseBroker): :returns: list of container names """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise rv = [] with self.get() as conn: row = conn.execute(''' @@ -1460,11 +1328,6 @@ class AccountBroker(DatabaseBroker): :returns: list of tuples of (name, object_count, bytes_used, 0) """ - try: - self._commit_puts() - except LockTimeout: - if not self.stale_reads_ok: - raise if delimiter and not prefix: prefix = '' orig_marker = marker From 4e100f6b325cbc5b2d83b4f3b622636ca25b069d Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Sat, 29 Jan 2011 01:23:18 +0000 Subject: [PATCH 170/199] retry connect refactor --- swift/common/db.py | 49 +++++++++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index 36ef1f3c91..ca667edf1a 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -27,6 +27,7 @@ import cPickle as pickle import errno from random import randint from tempfile import mkstemp +import traceback from eventlet import sleep import simplejson as json @@ -41,6 +42,7 @@ from swift.common.exceptions import LockTimeout BROKER_TIMEOUT = 25 #: Pickle protocol to use PICKLE_PROTOCOL = 2 +CONNECT_ATTEMPTS = 4 PENDING_COMMIT_TIMEOUT = 900 @@ -122,29 +124,32 @@ def get_db_connection(path, timeout=30, okay_to_create=False): :param okay_to_create: if True, create the DB if it doesn't exist :returns: DB connection object """ - try: - connect_time = time.time() - conn = sqlite3.connect(path, check_same_thread=False, - factory=GreenDBConnection, timeout=timeout) - if path != ':memory:' and not okay_to_create: + # retry logic to address: + # http://www.mail-archive.com/sqlite-users@sqlite.org/msg57092.html + for tries in xrange(1, CONNECT_ATTEMPTS + 1): + try: + connect_time = time.time() + conn = sqlite3.connect(path, check_same_thread=False, + factory=GreenDBConnection, timeout=timeout) # attempt to detect and fail when connect creates the db file - stat = os.stat(path) - if stat.st_size == 0 and stat.st_ctime >= connect_time: - os.unlink(path) - raise DatabaseConnectionError(path, - 'DB file created by connect?') - conn.row_factory = sqlite3.Row - conn.text_factory = str - conn.execute('PRAGMA synchronous = NORMAL') - conn.execute('PRAGMA count_changes = OFF') - conn.execute('PRAGMA temp_store = MEMORY') - conn.execute('PRAGMA journal_mode = WAL') - conn.create_function('chexor', 3, chexor) - except sqlite3.DatabaseError: - import traceback - raise DatabaseConnectionError(path, traceback.format_exc(), - timeout=timeout) - return conn + if path != ':memory:' and not okay_to_create: + stat = os.stat(path) + if stat.st_size == 0 and stat.st_ctime >= connect_time: + os.unlink(path) + raise DatabaseConnectionError(path, + 'DB file created by connect?') + conn.execute('PRAGMA synchronous = NORMAL') + conn.execute('PRAGMA count_changes = OFF') + conn.execute('PRAGMA temp_store = MEMORY') + conn.execute('PRAGMA journal_mode = WAL') + conn.create_function('chexor', 3, chexor) + conn.row_factory = sqlite3.Row + conn.text_factory = str + return conn + except sqlite3.DatabaseError, e: + if tries == CONNECT_ATTEMPTS or 'locking protocol' not in str(e): + raise DatabaseConnectionError(path, traceback.format_exc(), + timeout=timeout) class DatabaseBroker(object): From d83ce428afec5af180e5f85104da6242f8801fc1 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Sat, 29 Jan 2011 01:40:55 +0000 Subject: [PATCH 171/199] increase WAL autocheckpoint --- swift/common/db.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/swift/common/db.py b/swift/common/db.py index ca667edf1a..e06739d85f 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -44,6 +44,7 @@ BROKER_TIMEOUT = 25 PICKLE_PROTOCOL = 2 CONNECT_ATTEMPTS = 4 PENDING_COMMIT_TIMEOUT = 900 +AUTOCHECKPOINT = 8192 class DatabaseConnectionError(sqlite3.DatabaseError): @@ -142,6 +143,7 @@ def get_db_connection(path, timeout=30, okay_to_create=False): conn.execute('PRAGMA count_changes = OFF') conn.execute('PRAGMA temp_store = MEMORY') conn.execute('PRAGMA journal_mode = WAL') + conn.execute('PRAGMA wal_autocheckpoint = %s' % AUTOCHECKPOINT) conn.create_function('chexor', 3, chexor) conn.row_factory = sqlite3.Row conn.text_factory = str From 0649d9cc602baaacdd428e3455df4f6a9254e681 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Sat, 29 Jan 2011 03:26:26 +0000 Subject: [PATCH 172/199] replication fixes for WAL --- swift/common/db.py | 2 ++ swift/common/db_replicator.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/swift/common/db.py b/swift/common/db.py index e06739d85f..0f288b74a1 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -288,6 +288,7 @@ class DatabaseBroker(object): self.conn = None orig_isolation_level = conn.isolation_level conn.isolation_level = None + conn.execute('PRAGMA journal_mode = DELETE') # remove any journal files conn.execute('BEGIN IMMEDIATE') try: yield True @@ -295,6 +296,7 @@ class DatabaseBroker(object): pass try: conn.execute('ROLLBACK') + conn.execute('PRAGMA journal_mode = WAL') # back to WAL mode conn.isolation_level = orig_isolation_level self.conn = conn except Exception: diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 49756f1f7b..01a7d202de 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -180,7 +180,9 @@ class Replicator(Daemon): return False # perform block-level sync if the db was modified during the first sync if os.path.exists(broker.db_file + '-journal') or \ - os.path.getmtime(broker.db_file) > mtime: + os.path.exists(broker.db_file + '-wal') or \ + os.path.exists(broker.db_file + '-shm') or \ + os.path.getmtime(broker.db_file) > mtime: # grab a lock so nobody else can modify it with broker.lock(): if not self._rsync_file(broker.db_file, remote_file, False): From 6f5d69e7b5b52f9c407e0ca83231778a42a48722 Mon Sep 17 00:00:00 2001 From: Jay Payne <letterj@racklabs.com> Date: Sat, 29 Jan 2011 16:43:02 +0000 Subject: [PATCH 173/199] listing is a tuple correcting reference rv[1] --- swift/common/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/common/client.py b/swift/common/client.py index bf402adb76..1fffaa493d 100644 --- a/swift/common/client.py +++ b/swift/common/client.py @@ -222,7 +222,7 @@ def get_account(url, token, marker=None, limit=None, prefix=None, listing = \ get_account(url, token, marker, limit, prefix, http_conn)[1] if listing: - rv.extend(listing) + rv[1].extend(listing) return rv parsed, conn = http_conn qs = 'format=json' From 68cda9b72446df358120bd9fed00a8804e960375 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Sat, 29 Jan 2011 18:22:16 +0000 Subject: [PATCH 174/199] refactor db open retry loop slightly --- swift/common/db.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index 0f288b74a1..1e0057908c 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -127,7 +127,7 @@ def get_db_connection(path, timeout=30, okay_to_create=False): """ # retry logic to address: # http://www.mail-archive.com/sqlite-users@sqlite.org/msg57092.html - for tries in xrange(1, CONNECT_ATTEMPTS + 1): + for attempt in xrange(CONNECT_ATTEMPTS): try: connect_time = time.time() conn = sqlite3.connect(path, check_same_thread=False, @@ -139,19 +139,18 @@ def get_db_connection(path, timeout=30, okay_to_create=False): os.unlink(path) raise DatabaseConnectionError(path, 'DB file created by connect?') + conn.execute('PRAGMA journal_mode = WAL') conn.execute('PRAGMA synchronous = NORMAL') + conn.execute('PRAGMA wal_autocheckpoint = %s' % AUTOCHECKPOINT) conn.execute('PRAGMA count_changes = OFF') conn.execute('PRAGMA temp_store = MEMORY') - conn.execute('PRAGMA journal_mode = WAL') - conn.execute('PRAGMA wal_autocheckpoint = %s' % AUTOCHECKPOINT) conn.create_function('chexor', 3, chexor) conn.row_factory = sqlite3.Row conn.text_factory = str return conn except sqlite3.DatabaseError, e: - if tries == CONNECT_ATTEMPTS or 'locking protocol' not in str(e): - raise DatabaseConnectionError(path, traceback.format_exc(), - timeout=timeout) + errstr = traceback.format_exc() + raise DatabaseConnectionError(path, errstr, timeout=timeout) class DatabaseBroker(object): From 625255da39d0dda986c47f4390343513a34e5943 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Sat, 29 Jan 2011 19:26:06 +0000 Subject: [PATCH 175/199] remove pending_timeout references --- swift/account/server.py | 7 ------- swift/common/db.py | 3 +-- swift/common/db_replicator.py | 2 +- swift/container/server.py | 4 ---- 4 files changed, 2 insertions(+), 14 deletions(-) diff --git a/swift/account/server.py b/swift/account/server.py index 2c83f51cc6..94399eec22 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -86,8 +86,6 @@ class AccountController(object): return Response(status='507 %s is not mounted' % drive) broker = self._get_account_broker(drive, part, account) if container: # put account container - if 'x-cf-trans-id' in req.headers: - broker.pending_timeout = 3 if req.headers.get('x-account-override-deleted', 'no').lower() != \ 'yes' and broker.is_deleted(): return HTTPNotFound(request=req) @@ -140,9 +138,6 @@ class AccountController(object): if self.mount_check and not check_mount(self.root, drive): return Response(status='507 %s is not mounted' % drive) broker = self._get_account_broker(drive, part, account) - if not container: - broker.pending_timeout = 0.1 - broker.stale_reads_ok = True if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() @@ -171,8 +166,6 @@ class AccountController(object): if self.mount_check and not check_mount(self.root, drive): return Response(status='507 %s is not mounted' % drive) broker = self._get_account_broker(drive, part, account) - broker.pending_timeout = 0.1 - broker.stale_reads_ok = True if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() diff --git a/swift/common/db.py b/swift/common/db.py index 1e0057908c..7040b2446a 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -157,11 +157,10 @@ class DatabaseBroker(object): """Encapsulates working with a database.""" def __init__(self, db_file, timeout=BROKER_TIMEOUT, logger=None, - account=None, container=None, stale_reads_ok=False): + account=None, container=None): """ Encapsulates working with a database. """ self.conn = None self.db_file = db_file - self.stale_reads_ok = stale_reads_ok self.db_dir = os.path.dirname(db_file) self.timeout = timeout self.logger = logger or logging.getLogger() diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 01a7d202de..5c4d4ebd8e 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -318,7 +318,7 @@ class Replicator(Daemon): self.logger.debug(_('Replicating db %s'), object_file) self.stats['attempted'] += 1 try: - broker = self.brokerclass(object_file, pending_timeout=30) + broker = self.brokerclass(object_file) broker.reclaim(time.time() - self.reclaim_age, time.time() - (self.reclaim_age * 2)) info = broker.get_replication_info() diff --git a/swift/container/server.py b/swift/container/server.py index cfcdded1e4..9a6b4aa210 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -219,8 +219,6 @@ class ContainerController(object): if self.mount_check and not check_mount(self.root, drive): return Response(status='507 %s is not mounted' % drive) broker = self._get_container_broker(drive, part, account, container) - broker.pending_timeout = 0.1 - broker.stale_reads_ok = True if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() @@ -246,8 +244,6 @@ class ContainerController(object): if self.mount_check and not check_mount(self.root, drive): return Response(status='507 %s is not mounted' % drive) broker = self._get_container_broker(drive, part, account, container) - broker.pending_timeout = 0.1 - broker.stale_reads_ok = True if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() From a5d31b0d3a6206e14d3e55f37c43815fd309ad51 Mon Sep 17 00:00:00 2001 From: John Dickinson <john.dickinson@rackspace.com> Date: Sun, 30 Jan 2011 08:59:35 -0600 Subject: [PATCH 176/199] removed extra import in account stats logger --- swift/stats/account_stats.py | 1 - 1 file changed, 1 deletion(-) diff --git a/swift/stats/account_stats.py b/swift/stats/account_stats.py index 6a9688831f..325746386c 100644 --- a/swift/stats/account_stats.py +++ b/swift/stats/account_stats.py @@ -21,7 +21,6 @@ import hashlib from swift.account.server import DATADIR as account_server_data_dir from swift.common.db import AccountBroker -from swift.common.internal_proxy import InternalProxy from swift.common.utils import renamer, get_logger, readconf, mkdirs from swift.common.constraints import check_mount from swift.common.daemon import Daemon From f457d772fbe3c9b9603eb809a5c1c4e6f7636514 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Tue, 1 Feb 2011 15:48:46 -0600 Subject: [PATCH 177/199] Bumping version to 1.2-rc --- swift/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/__init__.py b/swift/__init__.py index 316208f929..9e4b1ee0cd 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1,5 +1,5 @@ import gettext -__version__ = '1.2-gamma' +__version__ = '1.2-rc' gettext.install('swift') From fdf20184e47383505865580560235af83ef9c35f Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 2 Feb 2011 09:38:17 -0800 Subject: [PATCH 178/199] Fix duplicate logging --- swift/common/utils.py | 1 + test/unit/common/test_daemon.py | 4 ++-- test/unit/common/test_utils.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 479c4ecc30..a207153a74 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -409,6 +409,7 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None): logger = logging.getLogger() else: logger = logging.getLogger(log_route) + logger.propagate = False if not hasattr(get_logger, 'handlers'): get_logger.handlers = {} facility = getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'), diff --git a/test/unit/common/test_daemon.py b/test/unit/common/test_daemon.py index 015928f670..a4addcee51 100644 --- a/test/unit/common/test_daemon.py +++ b/test/unit/common/test_daemon.py @@ -28,7 +28,7 @@ class MyDaemon(daemon.Daemon): def __init__(self, conf): self.conf = conf - self.logger = utils.get_logger(None) + self.logger = utils.get_logger(None, 'server') MyDaemon.forever_called = False MyDaemon.once_called = False @@ -97,7 +97,7 @@ user = %s # test user quit MyDaemon.run_forever = MyDaemon.run_quit sio = StringIO() - logger = logging.getLogger() + logger = logging.getLogger('server') logger.addHandler(logging.StreamHandler(sio)) logger = utils.get_logger(None, 'server') daemon.run_daemon(MyDaemon, conf_file, logger=logger) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 94257415f1..0c81b15698 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -287,7 +287,7 @@ Error: unable to locate %s def test_get_logger(self): sio = StringIO() - logger = logging.getLogger() + logger = logging.getLogger('server') logger.addHandler(logging.StreamHandler(sio)) logger = utils.get_logger(None, 'server') logger.warn('test1') From 812fe86ea8108231039a5dbb06901f3de6bd71fb Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 2 Feb 2011 10:23:01 -0800 Subject: [PATCH 179/199] Make swauth only log requests it handles --- swift/common/middleware/swauth.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 3399fd06a4..5965e710ac 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -1321,6 +1321,8 @@ class Swauth(object): return False def posthooklogger(self, env, req): + if not req.path.startswith(self.auth_prefix): + return response = getattr(req, 'response', None) if not response: return From ee794fa79c95d3c9ee6eb8dd1ff0d00dd8165e8d Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 2 Feb 2011 10:47:56 -0800 Subject: [PATCH 180/199] logging: Remove old handler before installing a new handler --- swift/common/utils.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index a207153a74..8da3d1f8f4 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -410,28 +410,34 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None): else: logger = logging.getLogger(log_route) logger.propagate = False - if not hasattr(get_logger, 'handlers'): - get_logger.handlers = {} + if not hasattr(get_logger, 'handler4facility'): + get_logger.handler4facility = {} facility = getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'), SysLogHandler.LOG_LOCAL0) - if facility in get_logger.handlers: - logger.removeHandler(get_logger.handlers[facility]) - get_logger.handlers[facility].close() - del get_logger.handlers[facility] + if facility in get_logger.handler4facility: + logger.removeHandler(get_logger.handler4facility[facility]) + get_logger.handler4facility[facility].close() + del get_logger.handler4facility[facility] if log_to_console: # check if a previous call to get_logger already added a console logger if hasattr(get_logger, 'console') and get_logger.console: logger.removeHandler(get_logger.console) get_logger.console = logging.StreamHandler(sys.__stderr__) logger.addHandler(get_logger.console) - get_logger.handlers[facility] = \ + get_logger.handler4facility[facility] = \ SysLogHandler(address='/dev/log', facility=facility) - logger.addHandler(get_logger.handlers[facility]) + if not hasattr(get_logger, 'handler4logger'): + get_logger.handler4logger = {} + if logger in get_logger.handler4logger: + logger.removeHandler(get_logger.handler4logger[logger]) + get_logger.handler4logger[logger] = \ + get_logger.handler4facility[facility] + logger.addHandler(get_logger.handler4facility[facility]) logger.setLevel( getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO)) adapted_logger = LogAdapter(logger) formatter = NamedFormatter(name, adapted_logger) - get_logger.handlers[facility].setFormatter(formatter) + get_logger.handler4facility[facility].setFormatter(formatter) if hasattr(get_logger, 'console'): get_logger.console.setFormatter(formatter) return adapted_logger From cb584303218805d1c1374d732caac26457a6ffe8 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Wed, 2 Feb 2011 13:39:08 -0800 Subject: [PATCH 181/199] logging: use routes to separate logging configurations --- bin/swift-drive-audit | 2 +- bin/swift-log-uploader | 2 +- swift/account/auditor.py | 2 +- swift/account/reaper.py | 2 +- swift/account/server.py | 2 +- swift/auth/server.py | 2 +- swift/common/daemon.py | 4 ++-- swift/common/db_replicator.py | 2 +- swift/common/middleware/catch_errors.py | 2 +- swift/common/middleware/cname_lookup.py | 2 +- swift/common/middleware/ratelimit.py | 2 +- swift/common/middleware/swauth.py | 2 +- swift/common/utils.py | 2 ++ swift/common/wsgi.py | 2 +- swift/container/auditor.py | 2 +- swift/container/server.py | 2 +- swift/container/updater.py | 2 +- swift/obj/auditor.py | 2 +- swift/obj/replicator.py | 2 +- swift/obj/server.py | 2 +- swift/obj/updater.py | 2 +- swift/proxy/server.py | 2 +- swift/stats/access_processor.py | 2 +- swift/stats/account_stats.py | 3 ++- swift/stats/log_processor.py | 4 ++-- swift/stats/log_uploader.py | 3 ++- swift/stats/stats_processor.py | 2 +- test/unit/auth/test_server.py | 4 ++-- test/unit/common/test_daemon.py | 4 ++-- test/unit/common/test_utils.py | 8 +++++--- 30 files changed, 41 insertions(+), 35 deletions(-) diff --git a/bin/swift-drive-audit b/bin/swift-drive-audit index e92d1e3c12..5203f54b6b 100755 --- a/bin/swift-drive-audit +++ b/bin/swift-drive-audit @@ -99,7 +99,7 @@ if __name__ == '__main__': device_dir = conf.get('device_dir', '/srv/node') minutes = int(conf.get('minutes', 60)) error_limit = int(conf.get('error_limit', 1)) - logger = get_logger(conf, 'drive-audit') + logger = get_logger(conf, log_route='drive-audit') devices = get_devices(device_dir, logger) logger.debug("Devices found: %s" % str(devices)) if not devices: diff --git a/bin/swift-log-uploader b/bin/swift-log-uploader index 9d0e27836c..93cb8f6f97 100755 --- a/bin/swift-log-uploader +++ b/bin/swift-log-uploader @@ -34,7 +34,7 @@ if __name__ == '__main__': uploader_conf.update(plugin_conf) # pre-configure logger - logger = utils.get_logger(uploader_conf, plugin, + logger = utils.get_logger(uploader_conf, plugin, log_route='log-uploader', log_to_console=options.get('verbose', False)) # currently LogUploader only supports run_once options['once'] = True diff --git a/swift/account/auditor.py b/swift/account/auditor.py index 1f24f93acc..63551354d8 100644 --- a/swift/account/auditor.py +++ b/swift/account/auditor.py @@ -28,7 +28,7 @@ class AccountAuditor(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf, 'account-auditor') + self.logger = get_logger(conf, log_route='account-auditor') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/account/reaper.py b/swift/account/reaper.py index dd0d4b3890..ba78db8d98 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -53,7 +53,7 @@ class AccountReaper(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='account-reaper') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/account/server.py b/swift/account/server.py index 2c83f51cc6..f15ac38c11 100644 --- a/swift/account/server.py +++ b/swift/account/server.py @@ -42,7 +42,7 @@ class AccountController(object): """WSGI controller for the account server.""" def __init__(self, conf): - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='account-server') self.root = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/auth/server.py b/swift/auth/server.py index f9cd56dd0e..1258a706e7 100644 --- a/swift/auth/server.py +++ b/swift/auth/server.py @@ -90,7 +90,7 @@ class AuthController(object): """ def __init__(self, conf): - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='auth-server') self.super_admin_key = conf.get('super_admin_key') if not self.super_admin_key: msg = _('No super_admin_key set in conf file! Exiting.') diff --git a/swift/common/daemon.py b/swift/common/daemon.py index eee3428679..91230e4d2b 100644 --- a/swift/common/daemon.py +++ b/swift/common/daemon.py @@ -26,7 +26,7 @@ class Daemon(object): def __init__(self, conf): self.conf = conf - self.logger = utils.get_logger(conf, 'swift-daemon') + self.logger = utils.get_logger(conf, log_route='daemon') def run_once(self): """Override this to run the script once""" @@ -84,7 +84,7 @@ def run_daemon(klass, conf_file, section_name='', logger = kwargs.pop('logger') else: logger = utils.get_logger(conf, conf.get('log_name', section_name), - log_to_console=kwargs.pop('verbose', False)) + log_to_console=kwargs.pop('verbose', False), log_route=section_name) try: klass(conf).run(once=once, **kwargs) except KeyboardInterrupt: diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 49756f1f7b..3c3731d45a 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -92,7 +92,7 @@ class Replicator(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='replicator') self.root = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index 10d8614194..16ade84689 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -30,7 +30,7 @@ class CatchErrorMiddleware(object): self.logger = getattr(app, 'logger', None) if not self.logger: # and only call get_logger if we have to - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='catch-errors') def __call__(self, env, start_response): try: diff --git a/swift/common/middleware/cname_lookup.py b/swift/common/middleware/cname_lookup.py index f13155c1fe..8ea9f88071 100644 --- a/swift/common/middleware/cname_lookup.py +++ b/swift/common/middleware/cname_lookup.py @@ -53,7 +53,7 @@ class CNAMELookupMiddleware(object): self.storage_domain = '.' + self.storage_domain self.lookup_depth = int(conf.get('lookup_depth', '1')) self.memcache = None - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='cname-lookup') def __call__(self, env, start_response): if not self.storage_domain: diff --git a/swift/common/middleware/ratelimit.py b/swift/common/middleware/ratelimit.py index 4657b6abcd..485b1db26e 100644 --- a/swift/common/middleware/ratelimit.py +++ b/swift/common/middleware/ratelimit.py @@ -39,7 +39,7 @@ class RateLimitMiddleware(object): if logger: self.logger = logger else: - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='ratelimit') self.account_ratelimit = float(conf.get('account_ratelimit', 0)) self.max_sleep_time_seconds = \ float(conf.get('max_sleep_time_seconds', 60)) diff --git a/swift/common/middleware/swauth.py b/swift/common/middleware/swauth.py index 5965e710ac..32328e8eb6 100644 --- a/swift/common/middleware/swauth.py +++ b/swift/common/middleware/swauth.py @@ -51,7 +51,7 @@ class Swauth(object): def __init__(self, app, conf): self.app = app self.conf = conf - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='swauth') self.log_headers = conf.get('log_headers') == 'True' self.reseller_prefix = conf.get('reseller_prefix', 'AUTH').strip() if self.reseller_prefix and self.reseller_prefix[-1] != '_': diff --git a/swift/common/utils.py b/swift/common/utils.py index 8da3d1f8f4..698fae2cc1 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -395,6 +395,8 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None): :param conf: Configuration dict to read settings from :param name: Name of the logger :param log_to_console: Add handler which writes to console on stderr + :param log_route: Route for the logging, not emitted to the log, just used + to separate logging configurations """ if not conf: conf = {} diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index 9450bcf439..46207fe8e1 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -113,7 +113,7 @@ def run_wsgi(conf_file, app_section, *args, **kwargs): logger = kwargs.pop('logger') else: logger = get_logger(conf, log_name, - log_to_console=kwargs.pop('verbose', False)) + log_to_console=kwargs.pop('verbose', False), log_route='wsgi') # redirect errors to logger and close stdio capture_stdio(logger) diff --git a/swift/container/auditor.py b/swift/container/auditor.py index d1ceb4f98a..0b1c10e03e 100644 --- a/swift/container/auditor.py +++ b/swift/container/auditor.py @@ -28,7 +28,7 @@ class ContainerAuditor(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf, 'container-auditor') + self.logger = get_logger(conf, log_route='container-auditor') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/container/server.py b/swift/container/server.py index cfcdded1e4..561dad2ea9 100644 --- a/swift/container/server.py +++ b/swift/container/server.py @@ -49,7 +49,7 @@ class ContainerController(object): save_headers = ['x-container-read', 'x-container-write'] def __init__(self, conf): - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='container-server') self.root = conf.get('devices', '/srv/node/') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/container/updater.py b/swift/container/updater.py index 883dd17101..0bd000f3f2 100644 --- a/swift/container/updater.py +++ b/swift/container/updater.py @@ -37,7 +37,7 @@ class ContainerUpdater(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf, 'container-updater') + self.logger = get_logger(conf, log_route='container-updater') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 09fdd77774..8ed05049f3 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -31,7 +31,7 @@ class ObjectAuditor(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf, 'object-auditor') + self.logger = get_logger(conf, log_route='object-auditor') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index dcfcb926f9..8dec8aa801 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -207,7 +207,7 @@ class ObjectReplicator(Daemon): :param logger: logging object """ self.conf = conf - self.logger = get_logger(conf, 'object-replicator') + self.logger = get_logger(conf, log_route='object-replicator') self.devices_dir = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/obj/server.py b/swift/obj/server.py index f2e2b31314..e3626bf692 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -266,7 +266,7 @@ class ObjectController(object): <source-dir>/etc/object-server.conf-sample or /etc/swift/object-server.conf-sample. """ - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='object-server') self.devices = conf.get('devices', '/srv/node/') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/obj/updater.py b/swift/obj/updater.py index 2b28ff08c5..356be64da4 100644 --- a/swift/obj/updater.py +++ b/swift/obj/updater.py @@ -35,7 +35,7 @@ class ObjectUpdater(Daemon): def __init__(self, conf): self.conf = conf - self.logger = get_logger(conf, 'object-updater') + self.logger = get_logger(conf, log_route='object-updater') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 1eae0dfc30..a66f643a68 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1607,7 +1607,7 @@ class BaseApplication(object): def __init__(self, conf, memcache=None, logger=None, account_ring=None, container_ring=None, object_ring=None): if logger is None: - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='proxy-server') else: self.logger = logger if conf is None: diff --git a/swift/stats/access_processor.py b/swift/stats/access_processor.py index 2aee505415..6965ef2b4a 100644 --- a/swift/stats/access_processor.py +++ b/swift/stats/access_processor.py @@ -34,7 +34,7 @@ class AccessLogProcessor(object): conf.get('service_ips', '').split(',')\ if x.strip()] self.warn_percent = float(conf.get('warn_percent', '0.8')) - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='access-processor') def log_line_parser(self, raw_log): '''given a raw access log line, return a dict of the good parts''' diff --git a/swift/stats/account_stats.py b/swift/stats/account_stats.py index 325746386c..34b024d2c2 100644 --- a/swift/stats/account_stats.py +++ b/swift/stats/account_stats.py @@ -48,7 +48,8 @@ class AccountStat(Daemon): self.devices = server_conf.get('devices', '/srv/node') self.mount_check = server_conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') - self.logger = get_logger(stats_conf, 'swift-account-stats-logger') + self.logger = \ + get_logger(stats_conf, log_route='account-stats') def run_once(self): self.logger.info(_("Gathering account stats")) diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index 5dbc92afbe..727e687f38 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -40,7 +40,7 @@ class LogProcessor(object): def __init__(self, conf, logger): if isinstance(logger, tuple): - self.logger = get_logger(*logger) + self.logger = get_logger(*logger, log_route='log-processor') else: self.logger = logger @@ -226,7 +226,7 @@ class LogProcessorDaemon(Daemon): c = conf.get('log-processor') super(LogProcessorDaemon, self).__init__(c) self.total_conf = conf - self.logger = get_logger(c) + self.logger = get_logger(c, log_route='log-processor') self.log_processor = LogProcessor(conf, self.logger) self.lookback_hours = int(c.get('lookback_hours', '120')) self.lookback_window = int(c.get('lookback_window', diff --git a/swift/stats/log_uploader.py b/swift/stats/log_uploader.py index b425738938..a828188eb7 100644 --- a/swift/stats/log_uploader.py +++ b/swift/stats/log_uploader.py @@ -65,7 +65,8 @@ class LogUploader(Daemon): self.filename_format = source_filename_format self.internal_proxy = InternalProxy(proxy_server_conf) log_name = 'swift-log-uploader-%s' % plugin_name - self.logger = utils.get_logger(uploader_conf, plugin_name) + self.logger = \ + utils.get_logger(uploader_conf, plugin_name, log_route=plugin_name) def run_once(self): self.logger.info(_("Uploading logs")) diff --git a/swift/stats/stats_processor.py b/swift/stats/stats_processor.py index 95dba7604c..f9496c1df9 100644 --- a/swift/stats/stats_processor.py +++ b/swift/stats/stats_processor.py @@ -20,7 +20,7 @@ class StatsLogProcessor(object): """Transform account storage stat logs""" def __init__(self, conf): - self.logger = get_logger(conf) + self.logger = get_logger(conf, log_route='stats-processor') def process(self, obj_stream, data_object_account, data_object_container, data_object_name): diff --git a/test/unit/auth/test_server.py b/test/unit/auth/test_server.py index 4060766d65..d58556ab22 100644 --- a/test/unit/auth/test_server.py +++ b/test/unit/auth/test_server.py @@ -456,7 +456,7 @@ class TestAuthServer(unittest.TestCase): def test_basic_logging(self): log = StringIO() log_handler = StreamHandler(log) - logger = get_logger(self.conf, 'auth') + logger = get_logger(self.conf, 'auth-server', log_route='auth-server') logger.logger.addHandler(log_handler) try: auth_server.http_connect = fake_http_connect(201) @@ -534,7 +534,7 @@ class TestAuthServer(unittest.TestCase): orig_Request = auth_server.Request log = StringIO() log_handler = StreamHandler(log) - logger = get_logger(self.conf, 'auth') + logger = get_logger(self.conf, 'auth-server', log_route='auth-server') logger.logger.addHandler(log_handler) try: auth_server.Request = request_causing_exception diff --git a/test/unit/common/test_daemon.py b/test/unit/common/test_daemon.py index a4addcee51..1d54e78c3e 100644 --- a/test/unit/common/test_daemon.py +++ b/test/unit/common/test_daemon.py @@ -28,7 +28,7 @@ class MyDaemon(daemon.Daemon): def __init__(self, conf): self.conf = conf - self.logger = utils.get_logger(None, 'server') + self.logger = utils.get_logger(None, 'server', log_route='server') MyDaemon.forever_called = False MyDaemon.once_called = False @@ -99,7 +99,7 @@ user = %s sio = StringIO() logger = logging.getLogger('server') logger.addHandler(logging.StreamHandler(sio)) - logger = utils.get_logger(None, 'server') + logger = utils.get_logger(None, 'server', log_route='server') daemon.run_daemon(MyDaemon, conf_file, logger=logger) self.assert_('user quit' in sio.getvalue().lower()) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 0c81b15698..8da913c489 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -289,17 +289,19 @@ Error: unable to locate %s sio = StringIO() logger = logging.getLogger('server') logger.addHandler(logging.StreamHandler(sio)) - logger = utils.get_logger(None, 'server') + logger = utils.get_logger(None, 'server', log_route='server') logger.warn('test1') self.assertEquals(sio.getvalue(), 'test1\n') logger.debug('test2') self.assertEquals(sio.getvalue(), 'test1\n') - logger = utils.get_logger({'log_level': 'DEBUG'}, 'server') + logger = utils.get_logger({'log_level': 'DEBUG'}, 'server', + log_route='server') logger.debug('test3') self.assertEquals(sio.getvalue(), 'test1\ntest3\n') # Doesn't really test that the log facility is truly being used all the # way to syslog; but exercises the code. - logger = utils.get_logger({'log_facility': 'LOG_LOCAL3'}, 'server') + logger = utils.get_logger({'log_facility': 'LOG_LOCAL3'}, 'server', + log_route='server') logger.warn('test4') self.assertEquals(sio.getvalue(), 'test1\ntest3\ntest4\n') From bf34239d79cc78cafa138f02fcde327ea4db69c1 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Wed, 2 Feb 2011 22:37:55 +0000 Subject: [PATCH 182/199] Bumping version to 1.2.0 in preparation for release --- swift/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/__init__.py b/swift/__init__.py index 9e4b1ee0cd..899047889e 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1,5 +1,5 @@ import gettext -__version__ = '1.2-rc' +__version__ = '1.2.0' gettext.install('swift') From 4bee91515e00bc32b487df767c629f62d77aec8e Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Thu, 3 Feb 2011 19:27:25 +0000 Subject: [PATCH 183/199] Updating version to 1.3-dev --- swift/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/__init__.py b/swift/__init__.py index 899047889e..25a1c6b8c7 100644 --- a/swift/__init__.py +++ b/swift/__init__.py @@ -1,5 +1,5 @@ import gettext -__version__ = '1.2.0' +__version__ = '1.3-dev' gettext.install('swift') From ee3886f2ca45e8983b13af04a06fee000aab61bb Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Thu, 3 Feb 2011 13:46:28 -0600 Subject: [PATCH 184/199] moved warning messages out of proxy.logger.info A few warning/client error messages were useing the .info log level which we reserve for access logs. They were changed to warnings. --- swift/proxy/server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 1eae0dfc30..14f79a2c61 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -624,7 +624,7 @@ class Controller(object): res.bytes_transferred += len(chunk) except GeneratorExit: res.client_disconnect = True - self.app.logger.info(_('Client disconnected on read')) + self.app.logger.warn(_('Client disconnected on read')) except (Exception, TimeoutError): self.exception_occurred(node, _('Object'), _('Trying to read during GET of %s') % req.path) @@ -1054,7 +1054,7 @@ class ObjectController(Controller): if req.headers.get('transfer-encoding') and chunk == '': break except ChunkReadTimeout, err: - self.app.logger.info( + self.app.logger.warn( _('ERROR Client read timeout (%ss)'), err.seconds) return HTTPRequestTimeout(request=req) except Exception: @@ -1064,7 +1064,7 @@ class ObjectController(Controller): return Response(status='499 Client Disconnect') if req.content_length and req.bytes_transferred < req.content_length: req.client_disconnect = True - self.app.logger.info( + self.app.logger.warn( _('Client disconnected without sending enough data')) return Response(status='499 Client Disconnect') statuses = [] From c62842bfd111c9f2ac4dbd343582332452ec13b3 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 3 Feb 2011 19:50:16 +0000 Subject: [PATCH 185/199] update all ring-builder grammars --- bin/swift-ring-builder | 47 +++++++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index 41293f7d37..c6d91f92b4 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -48,6 +48,8 @@ The <search-value> can be of the form: /sdb1 Matches devices with the device name sdb1 _shiny Matches devices with shiny in the meta data _"snet: 5.6.7.8" Matches devices with snet: 5.6.7.8 in the meta data + [::1] Matches devices in any zone with the ip ::1 + z1-[::1]:5678 Matches devices in zone 1 with the ip ::1 and port 5678 Most specific example: d74z1-1.2.3.4:5678/sdb1_"snet: 5.6.7.8" Nerd explanation: @@ -76,6 +78,13 @@ The <search-value> can be of the form: i += 1 match.append(('ip', search_value[:i])) search_value = search_value[i:] + elif len(search_value) and search_value[0] == '[': + i = 1 + while i < len(search_value) and search_value[i] != ']': + i += 1 + i += 1 + match.append(('ip', search_value[:i].lstrip('[').rstrip(']'))) + search_value = search_value[i:] if search_value.startswith(':'): i = 1 while i < len(search_value) and search_value[i].isdigit(): @@ -110,6 +119,16 @@ The <search-value> can be of the form: return devs +def format_device(dev): + """ + Format a device for display. + """ + if ':' in dev['ip']: + return 'd%(id)sz%(zone)s-[%(ip)s]:%(port)s/%(device)s_"%(meta)s"' % dev + else: + return 'd%(id)sz%(zone)s-%(ip)s:%(port)s/%(device)s_"%(meta)s"' % dev + + class Commands: def unknown(): @@ -236,10 +255,11 @@ swift-ring-builder <builder_file> add z<zone>-<ip>:<port>/<device_name>_<meta> exit(EXIT_ERROR) i = 1 if rest[i] == '[': + i += 1 while i < len(rest) and rest[i] != ']': i += 1 - ip = rest[2:i] i += 1 + ip = rest[1:i].lstrip('[').rstrip(']') rest = rest[i:] else: while i < len(rest) and rest[i] in '0123456789.': @@ -286,8 +306,12 @@ swift-ring-builder <builder_file> add z<zone>-<ip>:<port>/<device_name>_<meta> builder.add_dev({'id': next_dev_id, 'zone': zone, 'ip': ip, 'port': port, 'device': device_name, 'weight': weight, 'meta': meta}) - print 'Device z%s-%s:%s/%s_"%s" with %s weight got id %s' % \ - (zone, ip, port, device_name, meta, weight, next_dev_id) + if ':' in ip: + print 'Device z%s-[%s]:%s/%s_"%s" with %s weight got id %s' % \ + (zone, ip, port, device_name, meta, weight, next_dev_id) + else: + print 'Device z%s-%s:%s/%s_"%s" with %s weight got id %s' % \ + (zone, ip, port, device_name, meta, weight, next_dev_id) pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) @@ -349,6 +373,13 @@ swift-ring-builder <builder_file> set_info <search-value> i += 1 change.append(('ip', change_value[:i])) change_value = change_value[i:] + elif len(change_value) and change_value[0] == '[': + i = 1 + while i < len(change_value) and change_value[i] != ']': + i += 1 + i += 1 + change.append(('ip', change_value[:i].lstrip('[').rstrip(']'))) + change_value = change_value[i:] if change_value.startswith(':'): i = 1 while i < len(change_value) and change_value[i].isdigit(): @@ -373,15 +404,13 @@ swift-ring-builder <builder_file> set_info <search-value> if len(devs) > 1: print 'Matched more than one device:' for dev in devs: - print ' d%(id)sz%(zone)s-%(ip)s:%(port)s/%(device)s_' \ - '"%(meta)s"' % dev + print ' %s' % format_device(dev) if raw_input('Are you sure you want to update the info for ' 'these %s devices? (y/N) ' % len(devs)) != 'y': print 'Aborting device modifications' exit(EXIT_ERROR) for dev in devs: - orig_dev_string = \ - 'd%(id)sz%(zone)s-%(ip)s:%(port)s/%(device)s_"%(meta)s"' % dev + orig_dev_string = format_device(dev) test_dev = dict(dev) for key, value in change: test_dev[key] = value @@ -397,9 +426,7 @@ swift-ring-builder <builder_file> set_info <search-value> exit(EXIT_ERROR) for key, value in change: dev[key] = value - new_dev_string = \ - 'd%(id)sz%(zone)s-%(ip)s:%(port)s/%(device)s_"%(meta)s"' % dev - print 'Device %s is now %s' % (orig_dev_string, new_dev_string) + print 'Device %s is now %s' % (orig_dev_string, format_device(dev)) pickle.dump(builder, open(argv[1], 'wb'), protocol=2) exit(EXIT_RING_UNCHANGED) From c2931e157c4db364e4ce9d971920be2a83641431 Mon Sep 17 00:00:00 2001 From: Michael Barton <michael.barton@rackspace.com> Date: Thu, 3 Feb 2011 19:53:47 +0000 Subject: [PATCH 186/199] random newline --- test/unit/common/test_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 69d24759b7..af38b18208 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -501,5 +501,6 @@ log_name = yarr''' # make sure its accurate to 10th of a second self.assertTrue(abs(100 - (time.time() - start) * 100) < 10) + if __name__ == '__main__': unittest.main() From f9fa63686c802ce8d3f2e4e29ecc7fb686835ba9 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Thu, 3 Feb 2011 15:23:07 -0600 Subject: [PATCH 187/199] Moved proxy server access log messages into their own log level Added new "access" log level available on swift loggers that will be routed to the LOG_NOTICE priority in syslog for easy redirection of access log messages via rsyslog and syslog-ng. --- doc/source/overview_stats.rst | 15 ++++++++------- swift/common/utils.py | 15 +++++++++++++++ swift/proxy/server.py | 2 +- test/unit/common/test_utils.py | 6 ++++++ test/unit/proxy/test_server.py | 2 +- 5 files changed, 31 insertions(+), 9 deletions(-) diff --git a/doc/source/overview_stats.rst b/doc/source/overview_stats.rst index 6364de4611..40a5dd01af 100644 --- a/doc/source/overview_stats.rst +++ b/doc/source/overview_stats.rst @@ -15,9 +15,10 @@ Access logs *********** Access logs are the proxy server logs. Rackspace uses syslog-ng to redirect -the proxy log output to an hourly log file. For example, a proxy request that -is made on August 4, 2010 at 12:37 gets logged in a file named 2010080412. -This allows easy log rotation and easy per-hour log processing. +proxy log messages with the syslog priority LOG_NOTICE to an hourly log +file. For example, a proxy request that is made on August 4, 2010 at 12:37 gets +logged in a file named 2010080412. This allows easy log rotation and easy +per-hour log processing. ****************** Account stats logs @@ -99,11 +100,11 @@ Running the stats system on SAIO destination df_local1 { file("/var/log/swift/proxy.log" owner(<username>) group(<groupname>)); }; destination df_local1_err { file("/var/log/swift/proxy.error" owner(<username>) group(<groupname>)); }; destination df_local1_hourly { file("/var/log/swift/hourly/$YEAR$MONTH$DAY$HOUR" owner(<username>) group(<groupname>)); }; - filter f_local1 { facility(local1) and level(info); }; + filter f_local1 { facility(local1) and level(notice); }; - filter f_local1_err { facility(local1) and not level(info); }; + filter f_local1_err { facility(local1) and not level(notice); }; - # local1.info -/var/log/swift/proxy.log + # local1.notice -/var/log/swift/proxy.log # write to local file and to remove log server log { source(s_all); @@ -181,4 +182,4 @@ earlier. This file will have one entry per account per hour for each account with activity in that hour. One .csv file should be produced per hour. Note that the stats will be delayed by at least two hours by default. This can be changed with the new_log_cutoff variable in the config file. See -`log-processing.conf-sample` for more details. \ No newline at end of file +`log-processing.conf-sample` for more details. diff --git a/swift/common/utils.py b/swift/common/utils.py index 8da3d1f8f4..5c462b5d08 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -48,6 +48,11 @@ import logging logging.thread = eventlet.green.thread logging.threading = eventlet.green.threading logging._lock = logging.threading.RLock() +# setup access level logging +ACCESS = 25 +logging._levelNames[ACCESS] = 'ACCESS' +# syslog priority "notice" is used for proxy access log lines +SysLogHandler.priority_map['ACCESS'] = 'notice' # These are lazily pulled from libc elsewhere _sys_fallocate = None @@ -310,6 +315,16 @@ class LogAdapter(object): def getEffectiveLevel(self): return self.logger.getEffectiveLevel() + def access(self, msg, *args): + """ + Convenience function for proxy access request log level. Only + proxy access log messages should use this method. The python + logging lvl is set to 25, just above info. SysLogHandler is + monkey patched to map this log lvl to the LOG_NOTICE syslog + priority. + """ + self.logger.log(ACCESS, msg, *args) + def exception(self, msg, *args): _junk, exc, _junk = sys.exc_info() call = self.logger.error diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 14f79a2c61..dc501faba5 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1790,7 +1790,7 @@ class Application(BaseApplication): if getattr(req, 'client_disconnect', False) or \ getattr(response, 'client_disconnect', False): status_int = 499 - self.logger.info(' '.join(quote(str(x)) for x in ( + self.logger.access(' '.join(quote(str(x)) for x in ( client or '-', req.remote_addr or '-', time.strftime('%d/%b/%Y/%H/%M/%S', time.gmtime()), diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 0c81b15698..1fd18b6ee6 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -303,9 +303,15 @@ Error: unable to locate %s logger.warn('test4') self.assertEquals(sio.getvalue(), 'test1\ntest3\ntest4\n') + # make sure debug doesn't log by default logger.debug('test5') self.assertEquals(sio.getvalue(), 'test1\ntest3\ntest4\n') + # make sure access lvl logs by default + logger.access('test6') + self.assertEquals(sio.getvalue(), + 'test1\ntest3\ntest4\ntest6\n') + def test_storage_directory(self): self.assertEquals(utils.storage_directory('objects', '1', 'ABCDEF'), diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index e991d84084..4b0404454f 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1802,7 +1802,7 @@ class TestObjectController(unittest.TestCase): class Logger(object): - def info(self, msg): + def access(self, msg): self.msg = msg orig_logger = prosrv.logger From ee4a9a85ac8763b14deb9c55e6c9be2a163bb5a8 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Fri, 4 Feb 2011 11:16:21 -0800 Subject: [PATCH 188/199] Indexing and integrity changes in dbs. --- swift/common/db.py | 125 +++++++++++++++++++++++++-------------------- 1 file changed, 69 insertions(+), 56 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index 7040b2446a..4327ffa311 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -166,6 +166,7 @@ class DatabaseBroker(object): self.logger = logger or logging.getLogger() self.account = account self.container = container + self.db_version = -1 def initialize(self, put_timestamp=None): """ @@ -573,7 +574,7 @@ class ContainerBroker(DatabaseBroker): conn.executescript(""" CREATE TABLE object ( ROWID INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT UNIQUE, + name TEXT, created_at TEXT, size INTEGER, content_type TEXT, @@ -581,7 +582,7 @@ class ContainerBroker(DatabaseBroker): deleted INTEGER DEFAULT 0 ); - CREATE INDEX ix_object_deleted ON object (deleted); + CREATE INDEX ix_object_deleted_name ON object (deleted, name); CREATE TRIGGER object_insert AFTER INSERT ON object BEGIN @@ -812,6 +813,12 @@ class ContainerBroker(DatabaseBroker): reported_object_count, reported_bytes_used, hash, id) """ with self.get() as conn: + if self.db_version == -1: + self.db_version = 0 + for row in conn.execute(''' + SELECT name FROM sqlite_master + WHERE name = 'ix_object_deleted_name' '''): + self.db_version = 1 return conn.execute(''' SELECT account, container, created_at, put_timestamp, delete_timestamp, object_count, bytes_used, @@ -906,7 +913,10 @@ class ContainerBroker(DatabaseBroker): elif prefix: query += ' name >= ? AND' query_args.append(prefix) - query += ' +deleted = 0 ORDER BY name LIMIT ?' + if self.db_version < 1: + query += ' +deleted = 0 ORDER BY name LIMIT ?' + else: + query += ' deleted = 0 ORDER BY name LIMIT ?' query_args.append(limit - len(results)) curs = conn.execute(query, query_args) curs.row_factory = None @@ -954,18 +964,19 @@ class ContainerBroker(DatabaseBroker): max_rowid = -1 for rec in item_list: conn.execute(''' - DELETE FROM object WHERE name = ? AND - (created_at < ?) + DELETE FROM object WHERE name = ? AND created_at < ? AND + deleted IN (0, 1) ''', (rec['name'], rec['created_at'])) - try: + if not conn.execute(''' + SELECT name FROM object WHERE name = ? AND + deleted IN (0, 1) + ''', (rec['name'],)).fetchall(): conn.execute(''' INSERT INTO object (name, created_at, size, content_type, etag, deleted) VALUES (?, ?, ?, ?, ?, ?) ''', ([rec['name'], rec['created_at'], rec['size'], rec['content_type'], rec['etag'], rec['deleted']])) - except sqlite3.IntegrityError: - pass if source: max_rowid = max(max_rowid, rec['ROWID']) if source: @@ -1009,7 +1020,7 @@ class AccountBroker(DatabaseBroker): conn.executescript(""" CREATE TABLE container ( ROWID INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT UNIQUE, + name TEXT, put_timestamp TEXT, delete_timestamp TEXT, object_count INTEGER, @@ -1017,8 +1028,9 @@ class AccountBroker(DatabaseBroker): deleted INTEGER DEFAULT 0 ); - CREATE INDEX ix_container_deleted ON container (deleted); - CREATE INDEX ix_container_name ON container (name); + CREATE INDEX ix_container_deleted_name ON + container (deleted, name); + CREATE TRIGGER container_insert AFTER INSERT ON container BEGIN UPDATE account_stat @@ -1287,6 +1299,12 @@ class AccountBroker(DatabaseBroker): bytes_used, hash, id) """ with self.get() as conn: + if self.db_version == -1: + self.db_version = 0 + for row in conn.execute(''' + SELECT name FROM sqlite_master + WHERE name = 'ix_container_deleted_name' '''): + self.db_version = 1 return conn.execute(''' SELECT account, created_at, put_timestamp, delete_timestamp, container_count, object_count, bytes_used, hash, id @@ -1355,7 +1373,10 @@ class AccountBroker(DatabaseBroker): elif prefix: query += ' name >= ? AND' query_args.append(prefix) - query += ' +deleted = 0 ORDER BY name LIMIT ?' + if self.db_version < 1: + query += ' +deleted = 0 ORDER BY name LIMIT ?' + else: + query += ' deleted = 0 ORDER BY name LIMIT ?' query_args.append(limit - len(results)) curs = conn.execute(query, query_args) curs.row_factory = None @@ -1399,51 +1420,43 @@ class AccountBroker(DatabaseBroker): record = [rec['name'], rec['put_timestamp'], rec['delete_timestamp'], rec['object_count'], rec['bytes_used'], rec['deleted']] - try: + curs = conn.execute(''' + SELECT name, put_timestamp, delete_timestamp, + object_count, bytes_used, deleted + FROM container WHERE name = ? AND + (put_timestamp < ? OR delete_timestamp < ? OR + object_count != ? OR bytes_used != ?) AND + deleted IN (0, 1)''', + (rec['name'], rec['put_timestamp'], + rec['delete_timestamp'], rec['object_count'], + rec['bytes_used'])) + curs.row_factory = None + row = curs.fetchone() + if row: + row = list(row) + for i in xrange(5): + if record[i] is None and row[i] is not None: + record[i] = row[i] + if row[1] > record[1]: # Keep newest put_timestamp + record[1] = row[1] + if row[2] > record[2]: # Keep newest delete_timestamp + record[2] = row[2] conn.execute(''' - INSERT INTO container (name, put_timestamp, - delete_timestamp, object_count, bytes_used, - deleted) - VALUES (?, ?, ?, ?, ?, ?) - ''', record) - except sqlite3.IntegrityError: - curs = conn.execute(''' - SELECT name, put_timestamp, delete_timestamp, - object_count, bytes_used, deleted - FROM container WHERE name = ? AND - (put_timestamp < ? OR delete_timestamp < ? OR - object_count != ? OR bytes_used != ?)''', - (rec['name'], rec['put_timestamp'], - rec['delete_timestamp'], rec['object_count'], - rec['bytes_used'])) - curs.row_factory = None - row = curs.fetchone() - if row: - row = list(row) - for i in xrange(5): - if record[i] is None and row[i] is not None: - record[i] = row[i] - if row[1] > record[1]: # Keep newest put_timestamp - record[1] = row[1] - if row[2] > record[2]: # Keep newest delete_timestamp - record[2] = row[2] - conn.execute('DELETE FROM container WHERE name = ?', - (record[0],)) - # If deleted, mark as such - if record[2] > record[1] and \ - record[3] in (None, '', 0, '0'): - record[5] = 1 - else: - record[5] = 0 - try: - conn.execute(''' - INSERT INTO container (name, put_timestamp, - delete_timestamp, object_count, bytes_used, - deleted) - VALUES (?, ?, ?, ?, ?, ?) - ''', record) - except sqlite3.IntegrityError: - continue + DELETE FROM container WHERE name = ? AND + deleted IN (0, 1) + ''', (record[0],)) + # If deleted, mark as such + if record[2] > record[1] and \ + record[3] in (None, '', 0, '0'): + record[5] = 1 + else: + record[5] = 0 + conn.execute(''' + INSERT INTO container (name, put_timestamp, + delete_timestamp, object_count, bytes_used, + deleted) + VALUES (?, ?, ?, ?, ?, ?) + ''', record) if source: max_rowid = max(max_rowid, rec['ROWID']) if source: From 2fffdfede24f79df757faffd197afff32ca432eb Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Fri, 4 Feb 2011 11:37:35 -0800 Subject: [PATCH 189/199] Move db version resolution to its own function --- swift/common/db.py | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index 4327ffa311..2341f8141f 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -166,7 +166,7 @@ class DatabaseBroker(object): self.logger = logger or logging.getLogger() self.account = account self.container = container - self.db_version = -1 + self._db_version = -1 def initialize(self, put_timestamp=None): """ @@ -645,6 +645,15 @@ class ContainerBroker(DatabaseBroker): ''', (self.account, self.container, normalize_timestamp(time.time()), str(uuid4()), put_timestamp)) + def _get_db_version(self, conn): + if self._db_version == -1: + self._db_version = 0 + for row in conn.execute(''' + SELECT name FROM sqlite_master + WHERE name = 'ix_object_deleted_name' '''): + self._db_version = 1 + return self._db_version + def _newid(self, conn): conn.execute(''' UPDATE container_stat @@ -813,12 +822,6 @@ class ContainerBroker(DatabaseBroker): reported_object_count, reported_bytes_used, hash, id) """ with self.get() as conn: - if self.db_version == -1: - self.db_version = 0 - for row in conn.execute(''' - SELECT name FROM sqlite_master - WHERE name = 'ix_object_deleted_name' '''): - self.db_version = 1 return conn.execute(''' SELECT account, container, created_at, put_timestamp, delete_timestamp, object_count, bytes_used, @@ -913,7 +916,7 @@ class ContainerBroker(DatabaseBroker): elif prefix: query += ' name >= ? AND' query_args.append(prefix) - if self.db_version < 1: + if self._get_db_version(conn) < 1: query += ' +deleted = 0 ORDER BY name LIMIT ?' else: query += ' deleted = 0 ORDER BY name LIMIT ?' @@ -1094,6 +1097,15 @@ class AccountBroker(DatabaseBroker): ''', (self.account, normalize_timestamp(time.time()), str(uuid4()), put_timestamp)) + def _get_db_version(self, conn): + if self._db_version == -1: + self._db_version = 0 + for row in conn.execute(''' + SELECT name FROM sqlite_master + WHERE name = 'ix_container_deleted_name' '''): + self._db_version = 1 + return self._db_version + def update_put_timestamp(self, timestamp): """ Update the put_timestamp. Only modifies it if it is greater than @@ -1299,12 +1311,6 @@ class AccountBroker(DatabaseBroker): bytes_used, hash, id) """ with self.get() as conn: - if self.db_version == -1: - self.db_version = 0 - for row in conn.execute(''' - SELECT name FROM sqlite_master - WHERE name = 'ix_container_deleted_name' '''): - self.db_version = 1 return conn.execute(''' SELECT account, created_at, put_timestamp, delete_timestamp, container_count, object_count, bytes_used, hash, id @@ -1373,7 +1379,7 @@ class AccountBroker(DatabaseBroker): elif prefix: query += ' name >= ? AND' query_args.append(prefix) - if self.db_version < 1: + if self._get_db_version(conn) < 1: query += ' +deleted = 0 ORDER BY name LIMIT ?' else: query += ' deleted = 0 ORDER BY name LIMIT ?' From 98090b7217c69bba06b2f9ecb1dfaceb29de877d Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Fri, 4 Feb 2011 11:50:30 -0800 Subject: [PATCH 190/199] Fix account db change --- swift/common/db.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index 2341f8141f..83cd0e8188 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -1430,12 +1430,8 @@ class AccountBroker(DatabaseBroker): SELECT name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted FROM container WHERE name = ? AND - (put_timestamp < ? OR delete_timestamp < ? OR - object_count != ? OR bytes_used != ?) AND - deleted IN (0, 1)''', - (rec['name'], rec['put_timestamp'], - rec['delete_timestamp'], rec['object_count'], - rec['bytes_used'])) + deleted IN (0, 1) + ''', (rec['name'],)) curs.row_factory = None row = curs.fetchone() if row: @@ -1447,16 +1443,16 @@ class AccountBroker(DatabaseBroker): record[1] = row[1] if row[2] > record[2]: # Keep newest delete_timestamp record[2] = row[2] - conn.execute(''' - DELETE FROM container WHERE name = ? AND - deleted IN (0, 1) - ''', (record[0],)) # If deleted, mark as such if record[2] > record[1] and \ record[3] in (None, '', 0, '0'): record[5] = 1 else: record[5] = 0 + conn.execute(''' + DELETE FROM container WHERE name = ? AND + deleted IN (0, 1) + ''', (record[0],)) conn.execute(''' INSERT INTO container (name, put_timestamp, delete_timestamp, object_count, bytes_used, From 0f0e093972dccad7ec00f7e3fd73573aa09f4f46 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Fri, 4 Feb 2011 20:58:31 -0600 Subject: [PATCH 191/199] fix st command help hangs Before running the command function global the main func would start the print and error queues. Inside the command function the the option parser would see the the help option, print the help text, and raise SystemExit, which wasn't getting caught. --- bin/st | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/st b/bin/st index 58285423bd..4e6024f84f 100755 --- a/bin/st +++ b/bin/st @@ -1723,7 +1723,7 @@ Example: error_thread.abort = True while error_thread.isAlive(): error_thread.join(0.01) - except Exception: + except (SystemExit, Exception): for thread in threading_enumerate(): thread.abort = True raise From 461bf8df712f1b03ed547bdf0a068aae434d50ef Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Sat, 5 Feb 2011 15:38:49 -0600 Subject: [PATCH 192/199] added new proxy-server configuration options for access_log_facility and access_log_name --- swift/common/utils.py | 15 +++++++-------- swift/common/wsgi.py | 8 ++++---- swift/proxy/server.py | 12 +++++++++++- test/unit/common/test_utils.py | 4 ++-- test/unit/proxy/test_server.py | 10 +++++++--- 5 files changed, 31 insertions(+), 18 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 5c462b5d08..595ad3ac03 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -48,11 +48,11 @@ import logging logging.thread = eventlet.green.thread logging.threading = eventlet.green.threading logging._lock = logging.threading.RLock() -# setup access level logging -ACCESS = 25 -logging._levelNames[ACCESS] = 'ACCESS' +# setup notice level logging +NOTICE = 25 +logging._levelNames[NOTICE] = 'NOTICE' # syslog priority "notice" is used for proxy access log lines -SysLogHandler.priority_map['ACCESS'] = 'notice' +SysLogHandler.priority_map['NOTICE'] = 'notice' # These are lazily pulled from libc elsewhere _sys_fallocate = None @@ -315,15 +315,14 @@ class LogAdapter(object): def getEffectiveLevel(self): return self.logger.getEffectiveLevel() - def access(self, msg, *args): + def notice(self, msg, *args): """ - Convenience function for proxy access request log level. Only - proxy access log messages should use this method. The python + Convenience function for syslog priority LOG_NOTICE. The python logging lvl is set to 25, just above info. SysLogHandler is monkey patched to map this log lvl to the LOG_NOTICE syslog priority. """ - self.logger.log(ACCESS, msg, *args) + self.logger.log(NOTICE, msg, *args) def exception(self, msg, *args): _junk, exc, _junk = sys.exc_info() diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index 9450bcf439..e1e6e0c8f1 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -168,10 +168,10 @@ def run_wsgi(conf_file, app_section, *args, **kwargs): signal.signal(signal.SIGHUP, signal.SIG_DFL) signal.signal(signal.SIGTERM, signal.SIG_DFL) run_server() - logger.info('Child %d exiting normally' % os.getpid()) + logger.notice('Child %d exiting normally' % os.getpid()) return else: - logger.info('Started child %s' % pid) + logger.notice('Started child %s' % pid) children.append(pid) try: pid, status = os.wait() @@ -182,8 +182,8 @@ def run_wsgi(conf_file, app_section, *args, **kwargs): if err.errno not in (errno.EINTR, errno.ECHILD): raise except KeyboardInterrupt: - logger.info('User quit') + logger.notice('User quit') break greenio.shutdown_safe(sock) sock.close() - logger.info('Exited') + logger.notice('Exited') diff --git a/swift/proxy/server.py b/swift/proxy/server.py index dc501faba5..c4d8178c61 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1612,6 +1612,16 @@ class BaseApplication(object): self.logger = logger if conf is None: conf = {} + if 'access_log_name' in conf or 'access_log_facility' in conf: + access_log_conf = { + 'log_name': conf.get('access_log_name', conf.get('log_name', + 'proxy-server')), + 'log_facility': conf.get('access_log_facility', + conf.get('log_facility', 'LOG_LOCAL0')), + } + self.access_logger = get_logger(access_log_conf) + else: + self.access_logger = self.logger swift_dir = conf.get('swift_dir', '/etc/swift') self.node_timeout = int(conf.get('node_timeout', 10)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) @@ -1790,7 +1800,7 @@ class Application(BaseApplication): if getattr(req, 'client_disconnect', False) or \ getattr(response, 'client_disconnect', False): status_int = 499 - self.logger.access(' '.join(quote(str(x)) for x in ( + self.access_logger.info(' '.join(quote(str(x)) for x in ( client or '-', req.remote_addr or '-', time.strftime('%d/%b/%Y/%H/%M/%S', time.gmtime()), diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 1fd18b6ee6..d709c65d3e 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -307,8 +307,8 @@ Error: unable to locate %s logger.debug('test5') self.assertEquals(sio.getvalue(), 'test1\ntest3\ntest4\n') - # make sure access lvl logs by default - logger.access('test6') + # make sure notice lvl logs by default + logger.notice('test7') self.assertEquals(sio.getvalue(), 'test1\ntest3\ntest4\ntest6\n') diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 4b0404454f..9e49b09e74 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1802,11 +1802,12 @@ class TestObjectController(unittest.TestCase): class Logger(object): - def access(self, msg): + def info(self, msg): self.msg = msg orig_logger = prosrv.logger - prosrv.logger = Logger() + orig_access_logger = prosrv.access_logger + prosrv.logger = prosrv.access_logger = Logger() sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write( @@ -1822,11 +1823,13 @@ class TestObjectController(unittest.TestCase): prosrv.logger.msg) exp = 'host1' self.assertEquals(prosrv.logger.msg[:len(exp)], exp) + prosrv.access_logger = orig_access_logger prosrv.logger = orig_logger # Turn on header logging. orig_logger = prosrv.logger - prosrv.logger = Logger() + orig_access_logger = prosrv.access_logger + prosrv.logger = prosrv.access_logger = Logger() prosrv.log_headers = True sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -1840,6 +1843,7 @@ class TestObjectController(unittest.TestCase): self.assert_('Goofy-Header%3A%20True' in prosrv.logger.msg, prosrv.logger.msg) prosrv.log_headers = False + prosrv.access_logger = orig_access_logger prosrv.logger = orig_logger def test_chunked_put_utf8_all_the_way_down(self): From bb57e753b02cdd115aa9a440aed4597bc2ab20de Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Thu, 10 Feb 2011 00:01:07 -0800 Subject: [PATCH 193/199] Fix drive-audit's default log_name --- bin/swift-drive-audit | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/swift-drive-audit b/bin/swift-drive-audit index 5203f54b6b..77912e720e 100755 --- a/bin/swift-drive-audit +++ b/bin/swift-drive-audit @@ -99,6 +99,7 @@ if __name__ == '__main__': device_dir = conf.get('device_dir', '/srv/node') minutes = int(conf.get('minutes', 60)) error_limit = int(conf.get('error_limit', 1)) + conf['log_name'] = conf.get('log_name', 'drive-audit') logger = get_logger(conf, log_route='drive-audit') devices = get_devices(device_dir, logger) logger.debug("Devices found: %s" % str(devices)) From f73d7ad52fafe95fbd06771c0bde8131e004b7d1 Mon Sep 17 00:00:00 2001 From: Chuck Thier <cthier@gmail.com> Date: Thu, 10 Feb 2011 10:09:31 -0600 Subject: [PATCH 194/199] Adding python-netifaces to dependencies for packaging docs --- doc/source/debian_package_guide.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/debian_package_guide.rst b/doc/source/debian_package_guide.rst index 4f82f97858..e8086adc16 100644 --- a/doc/source/debian_package_guide.rst +++ b/doc/source/debian_package_guide.rst @@ -107,6 +107,7 @@ Instructions for Deploying Debian Packages for Swift apt-get install rsync python-openssl python-setuptools python-webob python-simplejson python-xattr python-greenlet python-eventlet + python-netifaces #. Install base packages:: From 8193e517af7be246da9e2d3a2aca7b85d3242191 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Thu, 10 Feb 2011 11:57:51 -0600 Subject: [PATCH 195/199] slightly more consistant stats process log names Also a quick fix to the auditor tests xattr mock --- bin/swift-account-stats-logger | 2 +- bin/swift-log-uploader | 2 +- swift/common/middleware/catch_errors.py | 6 +----- swift/stats/log_uploader.py | 6 +++--- test/unit/obj/test_auditor.py | 3 ++- 5 files changed, 8 insertions(+), 11 deletions(-) diff --git a/bin/swift-account-stats-logger b/bin/swift-account-stats-logger index 7b95b20249..6256b690b5 100755 --- a/bin/swift-account-stats-logger +++ b/bin/swift-account-stats-logger @@ -23,4 +23,4 @@ if __name__ == '__main__': # currently AccountStat only supports run_once options['once'] = True run_daemon(AccountStat, conf_file, section_name='log-processor-stats', - **options) + log_name="account-stats", **options) diff --git a/bin/swift-log-uploader b/bin/swift-log-uploader index 93cb8f6f97..7c36e2c2cc 100755 --- a/bin/swift-log-uploader +++ b/bin/swift-log-uploader @@ -34,7 +34,7 @@ if __name__ == '__main__': uploader_conf.update(plugin_conf) # pre-configure logger - logger = utils.get_logger(uploader_conf, plugin, log_route='log-uploader', + logger = utils.get_logger(uploader_conf, log_route='log-uploader', log_to_console=options.get('verbose', False)) # currently LogUploader only supports run_once options['once'] = True diff --git a/swift/common/middleware/catch_errors.py b/swift/common/middleware/catch_errors.py index 16ade84689..716bda4da1 100644 --- a/swift/common/middleware/catch_errors.py +++ b/swift/common/middleware/catch_errors.py @@ -26,11 +26,7 @@ class CatchErrorMiddleware(object): def __init__(self, app, conf): self.app = app - # if the application already has a logger we should use that one - self.logger = getattr(app, 'logger', None) - if not self.logger: - # and only call get_logger if we have to - self.logger = get_logger(conf, log_route='catch-errors') + self.logger = get_logger(conf, log_route='catch-errors') def __call__(self, env, start_response): try: diff --git a/swift/stats/log_uploader.py b/swift/stats/log_uploader.py index a828188eb7..d87d799324 100644 --- a/swift/stats/log_uploader.py +++ b/swift/stats/log_uploader.py @@ -64,9 +64,9 @@ class LogUploader(Daemon): self.container_name = container_name self.filename_format = source_filename_format self.internal_proxy = InternalProxy(proxy_server_conf) - log_name = 'swift-log-uploader-%s' % plugin_name - self.logger = \ - utils.get_logger(uploader_conf, plugin_name, log_route=plugin_name) + log_name = '%s-log-uploader' % plugin_name + self.logger = utils.get_logger(uploader_conf, log_name, + log_route=plugin_name) def run_once(self): self.logger.info(_("Uploading logs")) diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 66540a3693..14d58480dd 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -14,7 +14,7 @@ # limitations under the License. # TODO: Tests -from test import unit as _setup_mocks +from test import unit import unittest import tempfile import os @@ -57,6 +57,7 @@ class TestAuditor(unittest.TestCase): def tearDown(self): rmtree(os.path.dirname(self.testdir), ignore_errors=1) + unit.xattr_data = {} def test_object_audit_extra_data(self): self.auditor = auditor.ObjectAuditor(self.conf) From 5d0bc6b9c76756a07648f04b4a309677fbec3635 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Thu, 10 Feb 2011 14:59:52 -0600 Subject: [PATCH 196/199] logging refactor to support proxy access logs New log level "notice" set to python log level 25 maps to syslog priority LOG_NOTICE. Used for some messages in the proxy server, but will be available to all apps using the LogAdapter returned from get_logger. Cleaned up some code in get_logger so that console logging works with log_routes and removed some unneeded bits. NamedFormatter functionality was split between LogAdapter (which now inherits from logging.LoggerAdapter) and TxnFormatter (which now is only responsible for adding the log records txn_id). The proxy server app now configures a separate logger for access line logging. By default it will use the same settings as the regular proxy logger. --- bin/swift-bench | 8 +- doc/source/overview_stats.rst | 15 +-- swift/common/utils.py | 149 +++++++++------------ swift/proxy/server.py | 24 ++-- test/unit/common/test_utils.py | 234 +++++++++++++++++++++++++++------ test/unit/proxy/test_server.py | 141 ++++++++++++++++++-- 6 files changed, 412 insertions(+), 159 deletions(-) diff --git a/bin/swift-bench b/bin/swift-bench index 3c167ee06f..0554782a06 100755 --- a/bin/swift-bench +++ b/bin/swift-bench @@ -22,7 +22,7 @@ import uuid from optparse import OptionParser from swift.common.bench import BenchController -from swift.common.utils import readconf, LogAdapter, NamedFormatter +from swift.common.utils import readconf, LogAdapter # The defaults should be sufficient to run swift-bench on a SAIO CONF_DEFAULTS = { @@ -125,9 +125,9 @@ if __name__ == '__main__': options.log_level.lower(), logging.INFO)) loghandler = logging.StreamHandler() logger.addHandler(loghandler) - logger = LogAdapter(logger) - logformat = NamedFormatter('swift-bench', logger, - fmt='%(server)s %(asctime)s %(levelname)s %(message)s') + logger = LogAdapter(logger, 'swift-bench') + logformat = logging.Formatter('%(server)s %(asctime)s %(levelname)s ' + '%(message)s') loghandler.setFormatter(logformat) controller = BenchController(logger, options) diff --git a/doc/source/overview_stats.rst b/doc/source/overview_stats.rst index 40a5dd01af..6364de4611 100644 --- a/doc/source/overview_stats.rst +++ b/doc/source/overview_stats.rst @@ -15,10 +15,9 @@ Access logs *********** Access logs are the proxy server logs. Rackspace uses syslog-ng to redirect -proxy log messages with the syslog priority LOG_NOTICE to an hourly log -file. For example, a proxy request that is made on August 4, 2010 at 12:37 gets -logged in a file named 2010080412. This allows easy log rotation and easy -per-hour log processing. +the proxy log output to an hourly log file. For example, a proxy request that +is made on August 4, 2010 at 12:37 gets logged in a file named 2010080412. +This allows easy log rotation and easy per-hour log processing. ****************** Account stats logs @@ -100,11 +99,11 @@ Running the stats system on SAIO destination df_local1 { file("/var/log/swift/proxy.log" owner(<username>) group(<groupname>)); }; destination df_local1_err { file("/var/log/swift/proxy.error" owner(<username>) group(<groupname>)); }; destination df_local1_hourly { file("/var/log/swift/hourly/$YEAR$MONTH$DAY$HOUR" owner(<username>) group(<groupname>)); }; - filter f_local1 { facility(local1) and level(notice); }; + filter f_local1 { facility(local1) and level(info); }; - filter f_local1_err { facility(local1) and not level(notice); }; + filter f_local1_err { facility(local1) and not level(info); }; - # local1.notice -/var/log/swift/proxy.log + # local1.info -/var/log/swift/proxy.log # write to local file and to remove log server log { source(s_all); @@ -182,4 +181,4 @@ earlier. This file will have one entry per account per hour for each account with activity in that hour. One .csv file should be produced per hour. Note that the stats will be delayed by at least two hours by default. This can be changed with the new_log_cutoff variable in the config file. See -`log-processing.conf-sample` for more details. +`log-processing.conf-sample` for more details. \ No newline at end of file diff --git a/swift/common/utils.py b/swift/common/utils.py index 595ad3ac03..3ba291e266 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -289,7 +289,8 @@ class LoggerFileObject(object): return self -class LogAdapter(object): +# double inhereitence to support property with setter +class LogAdapter(logging.LoggerAdapter, object): """ A Logger like object which performs some reformatting on calls to :meth:`exception`. Can be used to store a threadlocal transaction id. @@ -297,11 +298,10 @@ class LogAdapter(object): _txn_id = threading.local() - def __init__(self, logger): - self.logger = logger - for proxied_method in ('debug', 'log', 'warn', 'warning', 'error', - 'critical', 'info'): - setattr(self, proxied_method, getattr(logger, proxied_method)) + def __init__(self, logger, server): + logging.LoggerAdapter.__init__(self, logger, {}) + self.server = server + setattr(self, 'warn', self.warning) @property def txn_id(self): @@ -315,24 +315,34 @@ class LogAdapter(object): def getEffectiveLevel(self): return self.logger.getEffectiveLevel() - def notice(self, msg, *args): + def process(self, msg, kwargs): + """ + Add extra info to message + """ + kwargs['extra'] = {'server': self.server, 'txn_id': self.txn_id} + return msg, kwargs + + def notice(self, msg, *args, **kwargs): """ Convenience function for syslog priority LOG_NOTICE. The python logging lvl is set to 25, just above info. SysLogHandler is monkey patched to map this log lvl to the LOG_NOTICE syslog priority. """ - self.logger.log(NOTICE, msg, *args) + self.log(NOTICE, msg, *args, **kwargs) - def exception(self, msg, *args): + def _exception(self, msg, *args, **kwargs): + logging.LoggerAdapter.exception(self, msg, *args, **kwargs) + + def exception(self, msg, *args, **kwargs): _junk, exc, _junk = sys.exc_info() - call = self.logger.error + call = self.error emsg = '' if isinstance(exc, OSError): if exc.errno in (errno.EIO, errno.ENOSPC): emsg = str(exc) else: - call = self.logger.exception + call = self._exception elif isinstance(exc, socket.error): if exc.errno == errno.ECONNREFUSED: emsg = _('Connection refused') @@ -341,7 +351,7 @@ class LogAdapter(object): elif exc.errno == errno.ETIMEDOUT: emsg = _('Connection timeout') else: - call = self.logger.exception + call = self._exception elif isinstance(exc, eventlet.Timeout): emsg = exc.__class__.__name__ if hasattr(exc, 'seconds'): @@ -350,53 +360,25 @@ class LogAdapter(object): if exc.msg: emsg += ' %s' % exc.msg else: - call = self.logger.exception - call('%s: %s' % (msg, emsg), *args) + call = self._exception + call('%s: %s' % (msg, emsg), *args, **kwargs) -class NamedFormatter(logging.Formatter): +class TxnFormatter(logging.Formatter): """ - NamedFormatter is used to add additional information to log messages. - Normally it will simply add the server name as an attribute on the - LogRecord and the default format string will include it at the - begining of the log message. Additionally, if the transaction id is - available and not already included in the message, NamedFormatter will - add it. - - NamedFormatter may be initialized with a format string which makes use - of the standard LogRecord attributes. In addition the format string - may include the following mapping key: - - +----------------+---------------------------------------------+ - | Format | Description | - +================+=============================================+ - | %(server)s | Name of the swift server doing logging | - +----------------+---------------------------------------------+ - - :param server: the swift server name, a string. - :param logger: a Logger or :class:`LogAdapter` instance, additional - context may be pulled from attributes on this logger if - available. - :param fmt: the format string used to construct the message, if none is - supplied it defaults to ``"%(server)s %(message)s"`` + Custom logging.Formatter will append txn_id to a log message if the record + has one and the message does not. """ - - def __init__(self, server, logger, - fmt="%(server)s %(message)s"): - logging.Formatter.__init__(self, fmt) - self.server = server - self.logger = logger - def format(self, record): - record.server = self.server msg = logging.Formatter.format(self, record) - if self.logger.txn_id and (record.levelno != logging.INFO or - self.logger.txn_id not in msg): - msg = "%s (txn: %s)" % (msg, self.logger.txn_id) + if (record.txn_id and record.levelno != logging.INFO and + record.txn_id not in msg): + msg = "%s (txn: %s)" % (msg, record.txn_id) return msg -def get_logger(conf, name=None, log_to_console=False, log_route=None): +def get_logger(conf, name=None, log_to_console=False, log_route=None, + fmt="%(server)s %(message)s"): """ Get the current system logger using config settings. @@ -412,48 +394,46 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None): """ if not conf: conf = {} - if not hasattr(get_logger, 'root_logger_configured'): - get_logger.root_logger_configured = True - get_logger(conf, name, log_to_console, log_route='root') if name is None: name = conf.get('log_name', 'swift') if not log_route: log_route = name - if log_route == 'root': - logger = logging.getLogger() - else: - logger = logging.getLogger(log_route) - logger.propagate = False - if not hasattr(get_logger, 'handler4facility'): - get_logger.handler4facility = {} - facility = getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'), - SysLogHandler.LOG_LOCAL0) - if facility in get_logger.handler4facility: - logger.removeHandler(get_logger.handler4facility[facility]) - get_logger.handler4facility[facility].close() - del get_logger.handler4facility[facility] - if log_to_console: - # check if a previous call to get_logger already added a console logger - if hasattr(get_logger, 'console') and get_logger.console: - logger.removeHandler(get_logger.console) - get_logger.console = logging.StreamHandler(sys.__stderr__) - logger.addHandler(get_logger.console) - get_logger.handler4facility[facility] = \ - SysLogHandler(address='/dev/log', facility=facility) + logger = logging.getLogger(log_route) + logger.propagate = False + # all swift new handlers will get the same formatter + formatter = TxnFormatter(fmt) + + # a single swift logger will only get one SysLog Handler if not hasattr(get_logger, 'handler4logger'): get_logger.handler4logger = {} if logger in get_logger.handler4logger: logger.removeHandler(get_logger.handler4logger[logger]) - get_logger.handler4logger[logger] = \ - get_logger.handler4facility[facility] - logger.addHandler(get_logger.handler4facility[facility]) + + # facility for this logger will be set by last call wins + facility = getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'), + SysLogHandler.LOG_LOCAL0) + handler = SysLogHandler(address='/dev/log', facility=facility) + handler.setFormatter(formatter) + logger.addHandler(handler) + get_logger.handler4logger[logger] = handler + + # setup console logging + if log_to_console or hasattr(get_logger, 'console_handler4logger'): + # remove pre-existing console handler for this logger + if not hasattr(get_logger, 'console_handler4logger'): + get_logger.console_handler4logger = {} + if logger in get_logger.console_handler4logger: + logger.removeHandler(get_logger.console_handler4logger[logger]) + + console_handler = logging.StreamHandler(sys.__stderr__) + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + get_logger.console_handler4logger[logger] = console_handler + + # set the level for the logger logger.setLevel( getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO)) - adapted_logger = LogAdapter(logger) - formatter = NamedFormatter(name, adapted_logger) - get_logger.handler4facility[facility].setFormatter(formatter) - if hasattr(get_logger, 'console'): - get_logger.console.setFormatter(formatter) + adapted_logger = LogAdapter(logger, name) return adapted_logger @@ -486,8 +466,9 @@ def capture_stdio(logger, **kwargs): # collect stdio file desc not in use for logging stdio_fds = [0, 1, 2] - if hasattr(get_logger, 'console'): - stdio_fds.remove(get_logger.console.stream.fileno()) + for _junk, handler in getattr(get_logger, + 'console_handler4logger', {}).items(): + stdio_fds.remove(handler.stream.fileno()) with open(os.devnull, 'r+b') as nullfile: # close stdio (excludes fds open for logging) diff --git a/swift/proxy/server.py b/swift/proxy/server.py index c4d8178c61..b2b41d6434 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -1606,22 +1606,20 @@ class BaseApplication(object): def __init__(self, conf, memcache=None, logger=None, account_ring=None, container_ring=None, object_ring=None): - if logger is None: - self.logger = get_logger(conf) - else: - self.logger = logger if conf is None: conf = {} - if 'access_log_name' in conf or 'access_log_facility' in conf: - access_log_conf = { - 'log_name': conf.get('access_log_name', conf.get('log_name', - 'proxy-server')), - 'log_facility': conf.get('access_log_facility', - conf.get('log_facility', 'LOG_LOCAL0')), - } - self.access_logger = get_logger(access_log_conf) + if logger is None: + self.logger = get_logger(conf) + access_log_conf = {} + for key in ('log_facility', 'log_name', 'log_level'): + value = conf.get('access_' + key, conf.get(key, None)) + if value: + access_log_conf[key] = value + self.access_logger = get_logger(access_log_conf, + log_route='proxy-access') else: - self.access_logger = self.logger + self.logger = self.access_logger = logger + swift_dir = conf.get('swift_dir', '/etc/swift') self.node_timeout = int(conf.get('node_timeout', 10)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index d709c65d3e..959caa8919 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -19,6 +19,7 @@ from __future__ import with_statement import logging import mimetools import os +import errno import socket import sys import time @@ -31,6 +32,8 @@ from tempfile import NamedTemporaryFile from eventlet import sleep +from swift.common.exceptions import TimeoutError, MessageTimeout, \ + ConnectionTimeout from swift.common import utils @@ -76,6 +79,17 @@ class MockSys(): __stderr__ = sys.__stderr__ +def reset_loggers(): + if hasattr(utils.get_logger, 'handler4logger'): + for logger, handler in utils.get_logger.handler4logger.items(): + logger.removeHandler(handler) + delattr(utils.get_logger, 'handler4logger') + if hasattr(utils.get_logger, 'console_handler4logger'): + for logger, h in utils.get_logger.console_handler4logger.items(): + logger.removeHandler(h) + delattr(utils.get_logger, 'console_handler4logger') + + class TestUtils(unittest.TestCase): """ Tests for swift.common.utils """ @@ -308,10 +322,131 @@ Error: unable to locate %s self.assertEquals(sio.getvalue(), 'test1\ntest3\ntest4\n') # make sure notice lvl logs by default - logger.notice('test7') - self.assertEquals(sio.getvalue(), - 'test1\ntest3\ntest4\ntest6\n') + logger.notice('test6') + def test_clean_logger_exception(self): + # setup stream logging + sio = StringIO() + logger = utils.get_logger(None) + handler = logging.StreamHandler(sio) + logger.logger.addHandler(handler) + + def strip_value(sio): + v = sio.getvalue() + sio.truncate(0) + return v + + def log_exception(exc): + try: + raise exc + except (Exception, TimeoutError): + logger.exception('blah') + try: + # establish base case + self.assertEquals(strip_value(sio), '') + logger.info('test') + self.assertEquals(strip_value(sio), 'test\n') + self.assertEquals(strip_value(sio), '') + logger.info('test') + logger.info('test') + self.assertEquals(strip_value(sio), 'test\ntest\n') + self.assertEquals(strip_value(sio), '') + + # test OSError + for en in (errno.EIO, errno.ENOSPC): + log_exception(OSError(en, 'my %s error message' % en)) + log_msg = strip_value(sio) + self.assert_('Traceback' not in log_msg) + self.assert_('my %s error message' % en in log_msg) + # unfiltered + log_exception(OSError()) + self.assert_('Traceback' in strip_value(sio)) + + # test socket.error + log_exception(socket.error(errno.ECONNREFUSED, + 'my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' not in log_msg) + self.assert_('errno.ECONNREFUSED message test' not in log_msg) + self.assert_('Connection refused' in log_msg) + log_exception(socket.error(errno.EHOSTUNREACH, + 'my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' not in log_msg) + self.assert_('my error message' not in log_msg) + self.assert_('Host unreachable' in log_msg) + log_exception(socket.error(errno.ETIMEDOUT, 'my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' not in log_msg) + self.assert_('my error message' not in log_msg) + self.assert_('Connection timeout' in log_msg) + # unfiltered + log_exception(socket.error(0, 'my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' in log_msg) + self.assert_('my error message' in log_msg) + + # test eventlet.Timeout + log_exception(ConnectionTimeout(42, 'my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' not in log_msg) + self.assert_('ConnectionTimeout' in log_msg) + self.assert_('(42s)' in log_msg) + self.assert_('my error message' not in log_msg) + log_exception(MessageTimeout(42, 'my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' not in log_msg) + self.assert_('MessageTimeout' in log_msg) + self.assert_('(42s)' in log_msg) + self.assert_('my error message' in log_msg) + + # test unhandled + log_exception(Exception('my error message')) + log_msg = strip_value(sio) + self.assert_('Traceback' in log_msg) + self.assert_('my error message' in log_msg) + + finally: + logger.logger.removeHandler(handler) + reset_loggers() + + def test_txn_formatter(self): + # setup stream logging + sio = StringIO() + logger = utils.get_logger(None) + handler = logging.StreamHandler(sio) + handler.setFormatter(utils.TxnFormatter()) + logger.logger.addHandler(handler) + + def strip_value(sio): + v = sio.getvalue() + sio.truncate(0) + return v + + try: + self.assertFalse(logger.txn_id) + logger.error('my error message') + log_msg = strip_value(sio) + self.assert_('my error message' in log_msg) + self.assert_('txn' not in log_msg) + logger.txn_id = '12345' + logger.error('test') + log_msg = strip_value(sio) + self.assert_('txn' in log_msg) + self.assert_('12345' in log_msg) + # test no txn on info message + self.assertEquals(logger.txn_id, '12345') + logger.info('test') + log_msg = strip_value(sio) + self.assert_('txn' not in log_msg) + self.assert_('12345' not in log_msg) + # test txn already in message + self.assertEquals(logger.txn_id, '12345') + logger.warn('test 12345 test') + self.assertEquals(strip_value(sio), 'test 12345 test\n') + finally: + logger.logger.removeHandler(handler) + reset_loggers() def test_storage_directory(self): self.assertEquals(utils.storage_directory('objects', '1', 'ABCDEF'), @@ -397,56 +532,71 @@ log_name = yarr''' logger = utils.get_logger(None, 'dummy') # mock utils system modules - utils.sys = MockSys() - utils.os = MockOs() + _orig_sys = utils.sys + _orig_os = utils.os + try: + utils.sys = MockSys() + utils.os = MockOs() - # basic test - utils.capture_stdio(logger) - self.assert_(utils.sys.excepthook is not None) - self.assertEquals(utils.os.closed_fds, [0, 1, 2]) - self.assert_(utils.sys.stdout is not None) - self.assert_(utils.sys.stderr is not None) + # basic test + utils.capture_stdio(logger) + self.assert_(utils.sys.excepthook is not None) + self.assertEquals(utils.os.closed_fds, [0, 1, 2]) + self.assert_(utils.sys.stdout is not None) + self.assert_(utils.sys.stderr is not None) - # reset; test same args, but exc when trying to close stdio - utils.os = MockOs(raise_funcs=('dup2',)) - utils.sys = MockSys() + # reset; test same args, but exc when trying to close stdio + utils.os = MockOs(raise_funcs=('dup2',)) + utils.sys = MockSys() - # test unable to close stdio - utils.capture_stdio(logger) - self.assert_(utils.sys.excepthook is not None) - self.assertEquals(utils.os.closed_fds, []) - self.assert_(utils.sys.stdout is not None) - self.assert_(utils.sys.stderr is not None) + # test unable to close stdio + utils.capture_stdio(logger) + self.assert_(utils.sys.excepthook is not None) + self.assertEquals(utils.os.closed_fds, []) + self.assert_(utils.sys.stdout is not None) + self.assert_(utils.sys.stderr is not None) - # reset; test some other args - logger = utils.get_logger(None, log_to_console=True) - utils.os = MockOs() - utils.sys = MockSys() + # reset; test some other args + logger = utils.get_logger(None, log_to_console=True) + utils.os = MockOs() + utils.sys = MockSys() - # test console log - utils.capture_stdio(logger, capture_stdout=False, - capture_stderr=False) - self.assert_(utils.sys.excepthook is not None) - # when logging to console, stderr remains open - self.assertEquals(utils.os.closed_fds, [0, 1]) - logger.logger.removeHandler(utils.get_logger.console) - # stdio not captured - self.assertFalse(hasattr(utils.sys, 'stdout')) - self.assertFalse(hasattr(utils.sys, 'stderr')) + # test console log + utils.capture_stdio(logger, capture_stdout=False, + capture_stderr=False) + self.assert_(utils.sys.excepthook is not None) + # when logging to console, stderr remains open + self.assertEquals(utils.os.closed_fds, [0, 1]) + reset_loggers() + + # stdio not captured + self.assertFalse(hasattr(utils.sys, 'stdout')) + self.assertFalse(hasattr(utils.sys, 'stderr')) + reset_loggers() + finally: + utils.sys = _orig_sys + utils.os = _orig_os def test_get_logger_console(self): - reload(utils) # reset get_logger attrs + reset_loggers() logger = utils.get_logger(None) - self.assertFalse(hasattr(utils.get_logger, 'console')) + console_handlers = [h for h in logger.logger.handlers if + isinstance(h, logging.StreamHandler)] + self.assertFalse(console_handlers) logger = utils.get_logger(None, log_to_console=True) - self.assert_(hasattr(utils.get_logger, 'console')) - self.assert_(isinstance(utils.get_logger.console, - logging.StreamHandler)) + console_handlers = [h for h in logger.logger.handlers if + isinstance(h, logging.StreamHandler)] + self.assert_(console_handlers) # make sure you can't have two console handlers - old_handler = utils.get_logger.console + self.assertEquals(len(console_handlers), 1) + old_handler = console_handlers[0] logger = utils.get_logger(None, log_to_console=True) - self.assertNotEquals(utils.get_logger.console, old_handler) - logger.logger.removeHandler(utils.get_logger.console) + console_handlers = [h for h in logger.logger.handlers if + isinstance(h, logging.StreamHandler)] + self.assertEquals(len(console_handlers), 1) + new_handler = console_handlers[0] + self.assertNotEquals(new_handler, old_handler) + reset_loggers() def test_ratelimit_sleep(self): running_time = 0 diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 9e49b09e74..71b61e1e2c 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -16,6 +16,7 @@ from __future__ import with_statement import cPickle as pickle import logging +from logging.handlers import SysLogHandler import os import sys import unittest @@ -465,8 +466,138 @@ class TestController(unittest.TestCase): test(404, 507, 503) test(503, 503, 503) + class TestProxyServer(unittest.TestCase): + def test_access_log(self): + + class MyApp(proxy_server.Application): + + def handle_request(self, req): + resp = Response(request=req) + req.response = resp + req.start_time = time() + return resp + + def start_response(*args): + pass + + class MockLogger(): + + def __init__(self): + self.buffer = StringIO() + + def info(self, msg, args=None): + if args: + msg = msg % args + self.buffer.write(msg) + + def strip_value(self): + rv = self.buffer.getvalue() + self.buffer.truncate(0) + return rv + + class SnarfStream(object): + # i can't seem to subclass cStringIO + + def __init__(self, *args, **kwargs): + self.sio = StringIO() + + def strip_value(self): + rv = self.getvalue().strip() + self.truncate(0) + return rv + + def __getattr__(self, name): + try: + return object.__getattr__(self, name) + except AttributeError: + try: + return getattr(self.sio, name) + except AttributeError: + return self.__getattribute__(name) + + snarf = SnarfStream() + _orig_get_logger = proxy_server.get_logger + + def mock_get_logger(*args, **kwargs): + if kwargs.get('log_route') != 'proxy-access': + return _orig_get_logger(*args, **kwargs) + kwargs['log_route'] = 'snarf' + logger = _orig_get_logger(*args, **kwargs) + if [h for h in logger.logger.handlers if + isinstance(h, logging.StreamHandler) and h.stream is snarf]: + # snarf handler already setup! + return logger + formatter = logger.logger.handlers[0].formatter + formatter._fmt += ' %(levelname)s' + snarf_handler = logging.StreamHandler(snarf) + snarf_handler.setFormatter(formatter) + logger.logger.addHandler(snarf_handler) + return logger + + def test_conf(conf): + app = MyApp(conf, memcache=FakeMemcache(), account_ring=FakeRing(), + container_ring=FakeRing(), object_ring=FakeRing()) + req = Request.blank('') + app(req.environ, start_response) + + try: + proxy_server.get_logger = mock_get_logger + test_conf({}) + line = snarf.strip_value() + print line + self.assert_(line.startswith('swift')) + self.assert_(line.endswith('INFO')) + test_conf({'log_name': 'snarf-test'}) + line = snarf.strip_value() + print line + self.assert_(line.startswith('snarf-test')) + self.assert_(line.endswith('INFO')) + test_conf({'log_name': 'snarf-test', 'log_level': 'ERROR'}) + line = snarf.strip_value() + print line + self.assertFalse(line) + test_conf({'log_name': 'snarf-test', 'log_level': 'ERROR', + 'access_log_name': 'access-test', + 'access_log_level': 'INFO'}) + line = snarf.strip_value() + print line + self.assert_(line.startswith('access-test')) + self.assert_(line.endswith('INFO')) + + # test facility + def get_facility(logger): + h = [h for h in logger.logger.handlers if + isinstance(h, SysLogHandler)][0] + return h.facility + + conf = {'log_facility': 'LOG_LOCAL0'} + app = MyApp(conf, memcache=FakeMemcache(), account_ring=FakeRing(), + container_ring=FakeRing(), object_ring=FakeRing()) + self.assertEquals(get_facility(app.logger), + SysLogHandler.LOG_LOCAL0) + self.assertEquals(get_facility(app.access_logger), + SysLogHandler.LOG_LOCAL0) + conf = {'log_facility': 'LOG_LOCAL0', + 'access_log_facility': 'LOG_LOCAL1'} + app = MyApp(conf, memcache=FakeMemcache(), account_ring=FakeRing(), + container_ring=FakeRing(), object_ring=FakeRing()) + self.assertEquals(get_facility(app.logger), + SysLogHandler.LOG_LOCAL0) + self.assertEquals(get_facility(app.access_logger), + SysLogHandler.LOG_LOCAL1) + conf = {'access_log_facility': 'LOG_LOCAL1'} + app = MyApp(conf, memcache=FakeMemcache(), account_ring=FakeRing(), + container_ring=FakeRing(), object_ring=FakeRing()) + self.assertEquals(get_facility(app.logger), + SysLogHandler.LOG_LOCAL0) + self.assertEquals(get_facility(app.access_logger), + SysLogHandler.LOG_LOCAL1) + + finally: + proxy_server.get_logger = _orig_get_logger + def test_unhandled_exception(self): class MyApp(proxy_server.Application): @@ -1805,8 +1936,7 @@ class TestObjectController(unittest.TestCase): def info(self, msg): self.msg = msg - orig_logger = prosrv.logger - orig_access_logger = prosrv.access_logger + orig_logger, orig_access_logger = prosrv.logger, prosrv.access_logger prosrv.logger = prosrv.access_logger = Logger() sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -1823,12 +1953,8 @@ class TestObjectController(unittest.TestCase): prosrv.logger.msg) exp = 'host1' self.assertEquals(prosrv.logger.msg[:len(exp)], exp) - prosrv.access_logger = orig_access_logger - prosrv.logger = orig_logger # Turn on header logging. - orig_logger = prosrv.logger - orig_access_logger = prosrv.access_logger prosrv.logger = prosrv.access_logger = Logger() prosrv.log_headers = True sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -1843,8 +1969,7 @@ class TestObjectController(unittest.TestCase): self.assert_('Goofy-Header%3A%20True' in prosrv.logger.msg, prosrv.logger.msg) prosrv.log_headers = False - prosrv.access_logger = orig_access_logger - prosrv.logger = orig_logger + prosrv.logger, prosrv.access_logger = orig_logger, orig_access_logger def test_chunked_put_utf8_all_the_way_down(self): # Test UTF-8 Unicode all the way through the system From 5082b6d38958234351756757385f0301ff704087 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Thu, 10 Feb 2011 15:05:53 -0600 Subject: [PATCH 197/199] updated proxy-server.conf-sample to include access_log_* defaults --- etc/proxy-server.conf-sample | 3 +++ 1 file changed, 3 insertions(+) diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index fad511ca30..3af7db0f8a 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -24,6 +24,9 @@ use = egg:swift#proxy # set log_name = proxy-server # set log_facility = LOG_LOCAL0 # set log_level = INFO +# set access_log_name = proxy-server +# set access_log_facility = LOG_LOCAL0 +# set access_log_level = INFO # set log_headers = False # recheck_account_existence = 60 # recheck_container_existence = 60 From c973bf53fb08efd93943511f367b1fb5075fde41 Mon Sep 17 00:00:00 2001 From: Clay Gerrard <clay.gerrard@rackspace.com> Date: Thu, 10 Feb 2011 15:23:59 -0600 Subject: [PATCH 198/199] cleaned up some comments --- swift/common/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 3ba291e266..4df8b624bc 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -51,7 +51,6 @@ logging._lock = logging.threading.RLock() # setup notice level logging NOTICE = 25 logging._levelNames[NOTICE] = 'NOTICE' -# syslog priority "notice" is used for proxy access log lines SysLogHandler.priority_map['NOTICE'] = 'notice' # These are lazily pulled from libc elsewhere @@ -289,7 +288,7 @@ class LoggerFileObject(object): return self -# double inhereitence to support property with setter +# double inheritance to support property with setter class LogAdapter(logging.LoggerAdapter, object): """ A Logger like object which performs some reformatting on calls to @@ -391,6 +390,7 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None, :param conf: Configuration dict to read settings from :param name: Name of the logger :param log_to_console: Add handler which writes to console on stderr + :param fmt: Override log format """ if not conf: conf = {} @@ -400,10 +400,10 @@ def get_logger(conf, name=None, log_to_console=False, log_route=None, log_route = name logger = logging.getLogger(log_route) logger.propagate = False - # all swift new handlers will get the same formatter + # all new handlers will get the same formatter formatter = TxnFormatter(fmt) - # a single swift logger will only get one SysLog Handler + # get_logger will only ever add one SysLog Handler to a logger if not hasattr(get_logger, 'handler4logger'): get_logger.handler4logger = {} if logger in get_logger.handler4logger: From 0c0920701a95d0b96c3e50dff31f7b1acc81b905 Mon Sep 17 00:00:00 2001 From: gholt <gholt@rackspace.com> Date: Thu, 10 Feb 2011 15:10:53 -0800 Subject: [PATCH 199/199] PEP8 Fixes --- swift/common/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/common/db.py b/swift/common/db.py index 83cd0e8188..9f322e7b7d 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -287,7 +287,7 @@ class DatabaseBroker(object): self.conn = None orig_isolation_level = conn.isolation_level conn.isolation_level = None - conn.execute('PRAGMA journal_mode = DELETE') # remove any journal files + conn.execute('PRAGMA journal_mode = DELETE') # remove journal files conn.execute('BEGIN IMMEDIATE') try: yield True @@ -295,7 +295,7 @@ class DatabaseBroker(object): pass try: conn.execute('ROLLBACK') - conn.execute('PRAGMA journal_mode = WAL') # back to WAL mode + conn.execute('PRAGMA journal_mode = WAL') # back to WAL mode conn.isolation_level = orig_isolation_level self.conn = conn except Exception: