Added some de-facto style guidelines to HACKING and fixed violations

This patch adds several guidelines:

* Global constants should be ALL_CAPS (cfg => CFG)
* Prefer single-quotes over double-quotes ("foo" => 'foo')
* Place a space before TODO in comments ("#TODO" => "# TODO")

Change-Id: Ib5b5c5916744856eca2ecaa37e949a3cdc4b3bd7
This commit is contained in:
kgriffs 2013-06-14 14:38:26 -04:00
parent 38370ee30d
commit baf3d2e372
39 changed files with 561 additions and 539 deletions

View File

@ -12,9 +12,27 @@ General
- Put two newlines between top-level code (funcs, classes, etc.) - Put two newlines between top-level code (funcs, classes, etc.)
- Put one newline between methods in classes and anywhere else. - Put one newline between methods in classes and anywhere else.
- Use blank lines to group related logic. - Use blank lines to group related logic.
- Do not write "except:", use "except Exception:" at the very least. - Never write ``except:`` (use ``except Exception:`` instead, at
- Include your name with TODOs as in "#TODO(termie)". the very least).
- All classes must inherit from "object" (explicitly). - All classes must inherit from ``object`` (explicitly).
- Use single-quotes for strings unless the string contains a
single-quote.
- Use the double-quote character for blockquotes (``"""``, not ``'''``)
- USE_ALL_CAPS_FOR_GLOBAL_CONSTANTS
Comments
--------
- In general use comments as "memory pegs" for those coming after you up
the trail.
- Guide the reader though long functions with a comments introducing
different sections of the code.
- Choose clean, descriptive names for functions and variables to make
them self-documenting.
- Include your name with TODOs as in ``# TODO(termie): blah blah...``.
- Add ``# NOTE(termie): blah blah...`` comments to clarify your intent, or
to explain a tricky algorithm, when it isn't obvious from just reading
the code.
Identifiers Identifiers
----------- -----------

View File

@ -15,13 +15,13 @@
# Import guard. No module level import during the setup procedure. # Import guard. No module level import during the setup procedure.
try: try:
if __MARCONI_SETUP__: if __MARCONI_SETUP__: # NOQA
import sys as _sys import sys as _sys
_sys.stderr.write('Running from marconi source directory.\n') _sys.stderr.write('Running from marconi source directory.\n')
del _sys del _sys
except NameError: except NameError:
import gettext import gettext
gettext.install("marconi", unicode=1) gettext.install('marconi', unicode=1)
import marconi.bootstrap import marconi.bootstrap
Bootstrap = marconi.bootstrap.Bootstrap Bootstrap = marconi.bootstrap.Bootstrap

View File

@ -19,11 +19,11 @@ from marconi.common import config
from marconi.common import decorators from marconi.common import decorators
from marconi.common import exceptions from marconi.common import exceptions
from marconi.openstack.common import log from marconi.openstack.common import log
from marconi import transport # NOQA. from marconi import transport # NOQA
cfg_handle = config.project('marconi') PROJECT_CFG = config.project('marconi')
cfg = config.namespace('drivers').from_options( CFG = config.namespace('drivers').from_options(
transport='wsgi', transport='wsgi',
storage='sqlite') storage='sqlite')
@ -38,15 +38,15 @@ class Bootstrap(object):
""" """
def __init__(self, config_file=None, cli_args=None): def __init__(self, config_file=None, cli_args=None):
cfg_handle.load(filename=config_file, args=cli_args) PROJECT_CFG.load(filename=config_file, args=cli_args)
log.setup("marconi") log.setup('marconi')
@decorators.lazy_property(write=False) @decorators.lazy_property(write=False)
def storage(self): def storage(self):
LOG.debug(_("Loading Storage Driver")) LOG.debug(_('Loading Storage Driver'))
try: try:
mgr = driver.DriverManager('marconi.storage', mgr = driver.DriverManager('marconi.storage',
cfg.storage, CFG.storage,
invoke_on_load=True) invoke_on_load=True)
return mgr.driver return mgr.driver
except RuntimeError as exc: except RuntimeError as exc:
@ -54,10 +54,10 @@ class Bootstrap(object):
@decorators.lazy_property(write=False) @decorators.lazy_property(write=False)
def transport(self): def transport(self):
LOG.debug(_("Loading Transport Driver")) LOG.debug(_('Loading Transport Driver'))
try: try:
mgr = driver.DriverManager('marconi.transport', mgr = driver.DriverManager('marconi.transport',
cfg.transport, CFG.transport,
invoke_on_load=True, invoke_on_load=True,
invoke_args=[self.storage]) invoke_args=[self.storage])
return mgr.driver return mgr.driver

View File

@ -19,7 +19,7 @@ from marconi import bootstrap
def fail(returncode, e): def fail(returncode, e):
sys.stderr.write("ERROR: %s\n" % e) sys.stderr.write('ERROR: %s\n' % e)
sys.exit(returncode) sys.exit(returncode)

View File

@ -22,7 +22,7 @@ A config variable `foo` is a read-only property accessible through
, where `CFG` is either a global configuration accessible through , where `CFG` is either a global configuration accessible through
CFG = config.project('marconi').from_options( CFG = config.project('marconi').from_options(
foo=("bar", "usage"), foo=('bar', 'usage'),
...) ...)
, or a local configuration associated with a namespace , or a local configuration associated with a namespace
@ -44,7 +44,7 @@ sections named by their associated namespaces.
To load the configurations from a file: To load the configurations from a file:
PROJECT_CFG = config.project('marconi') PROJECT_CFG = config.project('marconi')
PROJECT_CFG.load(filename="/path/to/example.conf") PROJECT_CFG.load(filename='/path/to/example.conf')
A call to `.load` without a filename looks up for the default ones: A call to `.load` without a filename looks up for the default ones:
@ -54,7 +54,7 @@ A call to `.load` without a filename looks up for the default ones:
Global config variables, if any, can also be read from the command line Global config variables, if any, can also be read from the command line
arguments: arguments:
PROJECT_CFG.load(filename="example.conf", args=sys.argv[1:]) PROJECT_CFG.load(filename='example.conf', args=sys.argv[1:])
""" """
from oslo.config import cfg from oslo.config import cfg
@ -197,4 +197,4 @@ def _make_opt(name, default):
try: try:
return deduction[type(default)](name, help=help, default=default) return deduction[type(default)](name, help=help, default=default)
except KeyError: except KeyError:
raise cfg.Error("unrecognized option type") raise cfg.Error('unrecognized option type')

View File

@ -120,7 +120,7 @@ class QueueBase(ControllerBase):
:returns: True if a queue was created and False :returns: True if a queue was created and False
if it was updated. if it was updated.
""" """
msg = _("Metadata should be an instance of dict") msg = _('Metadata should be an instance of dict')
assert isinstance(metadata, dict), msg assert isinstance(metadata, dict), msg
@abc.abstractmethod @abc.abstractmethod

View File

@ -47,9 +47,9 @@ class MessageConflict(Conflict):
posted. Note that these must be in the same order as the posted. Note that these must be in the same order as the
list of messages originally submitted to be enqueued. list of messages originally submitted to be enqueued.
""" """
msg = (_("Message could not be enqueued due to a conflict " msg = (_('Message could not be enqueued due to a conflict '
"with another message that is already in " 'with another message that is already in '
"queue %(queue)s for project %(project)s") % 'queue %(queue)s for project %(project)s') %
dict(queue=queue, project=project)) dict(queue=queue, project=project))
super(MessageConflict, self).__init__(msg) super(MessageConflict, self).__init__(msg)
@ -64,7 +64,7 @@ class MessageConflict(Conflict):
class QueueDoesNotExist(DoesNotExist): class QueueDoesNotExist(DoesNotExist):
def __init__(self, name, project): def __init__(self, name, project):
msg = (_("Queue %(name)s does not exist for project %(project)s") % msg = (_('Queue %(name)s does not exist for project %(project)s') %
dict(name=name, project=project)) dict(name=name, project=project))
super(QueueDoesNotExist, self).__init__(msg) super(QueueDoesNotExist, self).__init__(msg)
@ -72,8 +72,8 @@ class QueueDoesNotExist(DoesNotExist):
class MessageDoesNotExist(DoesNotExist): class MessageDoesNotExist(DoesNotExist):
def __init__(self, mid, queue, project): def __init__(self, mid, queue, project):
msg = (_("Message %(mid)s does not exist in " msg = (_('Message %(mid)s does not exist in '
"queue %(queue)s for project %(project)s") % 'queue %(queue)s for project %(project)s') %
dict(mid=mid, queue=queue, project=project)) dict(mid=mid, queue=queue, project=project))
super(MessageDoesNotExist, self).__init__(msg) super(MessageDoesNotExist, self).__init__(msg)
@ -81,8 +81,8 @@ class MessageDoesNotExist(DoesNotExist):
class ClaimDoesNotExist(DoesNotExist): class ClaimDoesNotExist(DoesNotExist):
def __init__(self, cid, queue, project): def __init__(self, cid, queue, project):
msg = (_("Claim %(cid)s does not exist in " msg = (_('Claim %(cid)s does not exist in '
"queue %(queue)s for project %(project)s") % 'queue %(queue)s for project %(project)s') %
dict(cid=cid, queue=queue, project=project)) dict(cid=cid, queue=queue, project=project))
super(ClaimDoesNotExist, self).__init__(msg) super(ClaimDoesNotExist, self).__init__(msg)
@ -90,6 +90,6 @@ class ClaimDoesNotExist(DoesNotExist):
class ClaimNotPermitted(NotPermitted): class ClaimNotPermitted(NotPermitted):
def __init__(self, mid, cid): def __init__(self, mid, cid):
msg = (_("Message %(mid)s is not claimed by %(cid)s") % msg = (_('Message %(mid)s is not claimed by %(cid)s') %
dict(cid=cid, mid=mid)) dict(cid=cid, mid=mid))
super(ClaimNotPermitted, self).__init__(msg) super(ClaimNotPermitted, self).__init__(msg)

View File

@ -56,20 +56,20 @@ class QueueController(storage.QueueBase):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(QueueController, self).__init__(*args, **kwargs) super(QueueController, self).__init__(*args, **kwargs)
self._col = self.driver.db["queues"] self._col = self.driver.db['queues']
# NOTE(flaper87): This creates a unique compound index for # NOTE(flaper87): This creates a unique compound index for
# project and name. Using project as the first field of the # project and name. Using project as the first field of the
# index allows for querying by project and project+name. # index allows for querying by project and project+name.
# This is also useful for retrieving the queues list for # This is also useful for retrieving the queues list for
# as specific project, for example. Order Matters! # as specific project, for example. Order Matters!
self._col.ensure_index([("p", 1), ("n", 1)], unique=True) self._col.ensure_index([('p', 1), ('n', 1)], unique=True)
#----------------------------------------------------------------------- #-----------------------------------------------------------------------
# Helpers # Helpers
#----------------------------------------------------------------------- #-----------------------------------------------------------------------
def _get(self, name, project=None, fields={"m": 1, "_id": 0}): def _get(self, name, project=None, fields={'m': 1, '_id': 0}):
queue = self._col.find_one({"p": project, "n": name}, fields=fields) queue = self._col.find_one({'p': project, 'n': name}, fields=fields)
if queue is None: if queue is None:
raise exceptions.QueueDoesNotExist(name, project) raise exceptions.QueueDoesNotExist(name, project)
@ -80,13 +80,13 @@ class QueueController(storage.QueueBase):
:returns: Queue's `ObjectId` :returns: Queue's `ObjectId`
""" """
queue = self._get(name, project, fields=["_id"]) queue = self._get(name, project, fields=['_id'])
return queue.get("_id") return queue.get('_id')
def _get_ids(self): def _get_ids(self):
"""Returns a generator producing a list of all queue IDs.""" """Returns a generator producing a list of all queue IDs."""
cursor = self._col.find({}, fields={"_id": 1}) cursor = self._col.find({}, fields={'_id': 1})
return (doc["_id"] for doc in cursor) return (doc['_id'] for doc in cursor)
#----------------------------------------------------------------------- #-----------------------------------------------------------------------
# Interface # Interface
@ -94,47 +94,47 @@ class QueueController(storage.QueueBase):
def list(self, project=None, marker=None, def list(self, project=None, marker=None,
limit=10, detailed=False): limit=10, detailed=False):
query = {"p": project} query = {'p': project}
if marker: if marker:
query["n"] = {"$gt": marker} query['n'] = {'$gt': marker}
fields = {"n": 1, "_id": 0} fields = {'n': 1, '_id': 0}
if detailed: if detailed:
fields["m"] = 1 fields['m'] = 1
cursor = self._col.find(query, fields=fields) cursor = self._col.find(query, fields=fields)
cursor = cursor.limit(limit).sort("n") cursor = cursor.limit(limit).sort('n')
marker_name = {} marker_name = {}
def normalizer(records): def normalizer(records):
for rec in records: for rec in records:
queue = {"name": rec["n"]} queue = {'name': rec['n']}
marker_name["next"] = queue["name"] marker_name['next'] = queue['name']
if detailed: if detailed:
queue["metadata"] = rec["m"] queue['metadata'] = rec['m']
yield queue yield queue
yield normalizer(cursor) yield normalizer(cursor)
yield marker_name["next"] yield marker_name['next']
def get(self, name, project=None): def get(self, name, project=None):
queue = self._get(name, project) queue = self._get(name, project)
return queue.get("m", {}) return queue.get('m', {})
def upsert(self, name, metadata, project=None): def upsert(self, name, metadata, project=None):
super(QueueController, self).upsert(name, metadata, project) super(QueueController, self).upsert(name, metadata, project)
rst = self._col.update({"p": project, "n": name}, rst = self._col.update({'p': project, 'n': name},
{"$set": {"m": metadata, "c": 1}}, {'$set': {'m': metadata, 'c': 1}},
multi=False, multi=False,
upsert=True, upsert=True,
manipulate=False) manipulate=False)
return not rst["updatedExisting"] return not rst['updatedExisting']
def delete(self, name, project=None): def delete(self, name, project=None):
self.driver.message_controller._purge_queue(name, project) self.driver.message_controller._purge_queue(name, project)
self._col.remove({"p": project, "n": name}) self._col.remove({'p': project, 'n': name})
def stats(self, name, project=None): def stats(self, name, project=None):
queue_id = self._get_id(name, project) queue_id = self._get_id(name, project)
@ -143,10 +143,10 @@ class QueueController(storage.QueueBase):
claimed = controller.claimed(queue_id) claimed = controller.claimed(queue_id)
return { return {
"actions": 0, 'actions': 0,
"messages": { 'messages': {
"claimed": claimed.count(), 'claimed': claimed.count(),
"free": active.count(), 'free': active.count(),
} }
} }
@ -179,7 +179,7 @@ class MessageController(storage.MessageBase):
# Make sure indexes exist before, # Make sure indexes exist before,
# doing anything. # doing anything.
self._col = self._db["messages"] self._col = self._db['messages']
# NOTE(flaper87): This index is used mostly in the # NOTE(flaper87): This index is used mostly in the
# active method but some parts of it are used in # active method but some parts of it are used in
@ -189,27 +189,27 @@ class MessageController(storage.MessageBase):
# * e: Together with q is used for getting a # * e: Together with q is used for getting a
# specific message. (see `get`) # specific message. (see `get`)
active_fields = [ active_fields = [
("q", 1), ('q', 1),
("e", 1), ('e', 1),
("c.e", 1), ('c.e', 1),
("k", 1), ('k', 1),
("_id", -1), ('_id', -1),
] ]
self._col.ensure_index(active_fields, self._col.ensure_index(active_fields,
name="active", name='active',
background=True) background=True)
# Index used for claims # Index used for claims
claimed_fields = [ claimed_fields = [
("q", 1), ('q', 1),
("c.id", 1), ('c.id', 1),
("c.e", 1), ('c.e', 1),
("_id", -1), ('_id', -1),
] ]
self._col.ensure_index(claimed_fields, self._col.ensure_index(claimed_fields,
name="claimed", name='claimed',
background=True) background=True)
# Index used for _next_marker() and also to ensure # Index used for _next_marker() and also to ensure
@ -222,8 +222,8 @@ class MessageController(storage.MessageBase):
# to miss a message when there is more than one # to miss a message when there is more than one
# producer posting messages to the same queue, in # producer posting messages to the same queue, in
# parallel. # parallel.
self._col.ensure_index([("q", 1), ("k", -1)], self._col.ensure_index([('q', 1), ('k', -1)],
name="queue_marker", name='queue_marker',
unique=True, unique=True,
background=True) background=True)
@ -260,12 +260,12 @@ class MessageController(storage.MessageBase):
:returns: next message marker as an integer :returns: next message marker as an integer
""" """
document = self._col.find_one({"q": queue_id}, document = self._col.find_one({'q': queue_id},
sort=[("k", -1)], sort=[('k', -1)],
fields={"k": 1, "_id": 0}) fields={'k': 1, '_id': 0})
# NOTE(kgriffs): this approach is faster than using "or" # NOTE(kgriffs): this approach is faster than using 'or'
return 1 if document is None else (document["k"] + 1) return 1 if document is None else (document['k'] + 1)
def _backoff_sleep(self, attempt): def _backoff_sleep(self, attempt):
"""Sleep between retries using a jitter algorithm. """Sleep between retries using a jitter algorithm.
@ -289,8 +289,8 @@ class MessageController(storage.MessageBase):
""" """
query = { query = {
"q": queue_id, 'q': queue_id,
"e": {"$lte": timeutils.utcnow()}, 'e': {'$lte': timeutils.utcnow()},
} }
return self._col.find(query).count() return self._col.find(query).count()
@ -313,19 +313,19 @@ class MessageController(storage.MessageBase):
if options.CFG.gc_threshold <= self._count_expired(queue_id): if options.CFG.gc_threshold <= self._count_expired(queue_id):
# Get the message with the highest marker, and leave # Get the message with the highest marker, and leave
# it in the queue # it in the queue
head = self._col.find_one({"q": queue_id}, head = self._col.find_one({'q': queue_id},
sort=[("k", -1)], sort=[('k', -1)],
fields={"_id": 1}) fields={'_id': 1})
if head is None: if head is None:
# Assume queue was just deleted via a parallel request # Assume queue was just deleted via a parallel request
LOG.warning(_("Queue %s is empty or missing.") % queue_id) LOG.warning(_('Queue %s is empty or missing.') % queue_id)
return return
query = { query = {
"q": queue_id, 'q': queue_id,
"e": {"$lte": timeutils.utcnow()}, 'e': {'$lte': timeutils.utcnow()},
"_id": {"$ne": head["_id"]} '_id': {'$ne': head['_id']}
} }
self._col.remove(query) self._col.remove(query)
@ -344,7 +344,7 @@ class MessageController(storage.MessageBase):
""" """
try: try:
qid = self._get_queue_id(queue, project) qid = self._get_queue_id(queue, project)
self._col.remove({"q": qid}, w=0) self._col.remove({'q': qid}, w=0)
except exceptions.QueueDoesNotExist: except exceptions.QueueDoesNotExist:
pass pass
@ -362,36 +362,36 @@ class MessageController(storage.MessageBase):
query = { query = {
# Messages must belong to this queue # Messages must belong to this queue
"q": utils.to_oid(queue_id), 'q': utils.to_oid(queue_id),
# The messages can not be expired # The messages can not be expired
"e": {"$gt": now}, 'e': {'$gt': now},
# Include messages that are part of expired claims # Include messages that are part of expired claims
"c.e": {"$lte": now}, 'c.e': {'$lte': now},
} }
if fields and not isinstance(fields, (dict, list)): if fields and not isinstance(fields, (dict, list)):
raise TypeError(_("Fields must be an instance of list / dict")) raise TypeError(_('Fields must be an instance of list / dict'))
if not echo and client_uuid: if not echo and client_uuid:
query["u"] = {"$ne": client_uuid} query['u'] = {'$ne': client_uuid}
if marker: if marker:
query["k"] = {"$gt": marker} query['k'] = {'$gt': marker}
return self._col.find(query, fields=fields) return self._col.find(query, fields=fields)
def claimed(self, queue_id, claim_id=None, expires=None, limit=None): def claimed(self, queue_id, claim_id=None, expires=None, limit=None):
query = { query = {
"c.id": claim_id, 'c.id': claim_id,
"c.e": {"$gt": expires or timeutils.utcnow()}, 'c.e': {'$gt': expires or timeutils.utcnow()},
"q": utils.to_oid(queue_id), 'q': utils.to_oid(queue_id),
} }
if not claim_id: if not claim_id:
# lookup over c.id to use the index # lookup over c.id to use the index
query["c.id"] = {"$ne": None} query['c.id'] = {'$ne': None}
msgs = self._col.find(query, sort=[("_id", 1)]) msgs = self._col.find(query, sort=[('_id', 1)])
if limit: if limit:
msgs = msgs.limit(limit) msgs = msgs.limit(limit)
@ -399,15 +399,15 @@ class MessageController(storage.MessageBase):
now = timeutils.utcnow() now = timeutils.utcnow()
def denormalizer(msg): def denormalizer(msg):
oid = msg["_id"] oid = msg['_id']
age = now - utils.oid_utc(oid) age = now - utils.oid_utc(oid)
return { return {
"id": str(oid), 'id': str(oid),
"age": age.seconds, 'age': age.seconds,
"ttl": msg["t"], 'ttl': msg['t'],
"body": msg["b"], 'body': msg['b'],
"claim": msg["c"] 'claim': msg['c']
} }
return utils.HookedCursor(msgs, denormalizer) return utils.HookedCursor(msgs, denormalizer)
@ -418,8 +418,8 @@ class MessageController(storage.MessageBase):
except ValueError: except ValueError:
return return
self._col.update({"c.id": cid}, self._col.update({'c.id': cid},
{"$set": {"c": {"id": None, "e": 0}}}, {'$set': {'c': {'id': None, 'e': 0}}},
upsert=False, multi=True) upsert=False, multi=True)
def remove_expired(self, project=None): def remove_expired(self, project=None):
@ -443,7 +443,7 @@ class MessageController(storage.MessageBase):
""" """
# TODO(kgriffs): Optimize first by batching the .removes, second # TODO(kgriffs): Optimize first by batching the .removes, second
# by setting a "last inserted ID" in the queue collection for # by setting a 'last inserted ID' in the queue collection for
# each message inserted (TBD, may cause problematic side-effect), # each message inserted (TBD, may cause problematic side-effect),
# and third, by changing the marker algorithm such that it no # and third, by changing the marker algorithm such that it no
# longer depends on retaining the last message in the queue! # longer depends on retaining the last message in the queue!
@ -462,21 +462,21 @@ class MessageController(storage.MessageBase):
qid = self._get_queue_id(queue, project) qid = self._get_queue_id(queue, project)
messages = self.active(qid, marker, echo, client_uuid) messages = self.active(qid, marker, echo, client_uuid)
messages = messages.limit(limit).sort("_id") messages = messages.limit(limit).sort('_id')
marker_id = {} marker_id = {}
now = timeutils.utcnow() now = timeutils.utcnow()
def denormalizer(msg): def denormalizer(msg):
oid = msg["_id"] oid = msg['_id']
age = now - utils.oid_utc(oid) age = now - utils.oid_utc(oid)
marker_id['next'] = msg["k"] marker_id['next'] = msg['k']
return { return {
"id": str(oid), 'id': str(oid),
"age": age.seconds, 'age': age.seconds,
"ttl": msg["t"], 'ttl': msg['t'],
"body": msg["b"], 'body': msg['b'],
} }
yield utils.HookedCursor(messages, denormalizer) yield utils.HookedCursor(messages, denormalizer)
@ -488,9 +488,9 @@ class MessageController(storage.MessageBase):
# Base query, always check expire time # Base query, always check expire time
query = { query = {
"q": self._get_queue_id(queue, project), 'q': self._get_queue_id(queue, project),
"e": {"$gt": now}, 'e': {'$gt': now},
"_id": mid '_id': mid
} }
message = self._col.find_one(query) message = self._col.find_one(query)
@ -498,14 +498,14 @@ class MessageController(storage.MessageBase):
if message is None: if message is None:
raise exceptions.MessageDoesNotExist(message_id, queue, project) raise exceptions.MessageDoesNotExist(message_id, queue, project)
oid = message["_id"] oid = message['_id']
age = now - utils.oid_utc(oid) age = now - utils.oid_utc(oid)
return { return {
"id": str(oid), 'id': str(oid),
"age": age.seconds, 'age': age.seconds,
"ttl": message["t"], 'ttl': message['t'],
"body": message["b"], 'body': message['b'],
} }
def post(self, queue, messages, client_uuid, project=None): def post(self, queue, messages, client_uuid, project=None):
@ -527,13 +527,13 @@ class MessageController(storage.MessageBase):
# cached in case we need to retry any of them. # cached in case we need to retry any of them.
message_gen = ( message_gen = (
{ {
"t": message["ttl"], 't': message['ttl'],
"q": queue_id, 'q': queue_id,
"e": now + datetime.timedelta(seconds=message["ttl"]), 'e': now + datetime.timedelta(seconds=message['ttl']),
"u": client_uuid, 'u': client_uuid,
"c": {"id": None, "e": now}, 'c': {'id': None, 'e': now},
"b": message["body"] if "body" in message else {}, 'b': message['body'] if 'body' in message else {},
"k": next_marker + index, 'k': next_marker + index,
} }
for index, message in enumerate(messages) for index, message in enumerate(messages)
@ -556,9 +556,9 @@ class MessageController(storage.MessageBase):
# Log a message if we retried, for debugging perf issues # Log a message if we retried, for debugging perf issues
if attempt != 0: if attempt != 0:
message = _("%(attempts)d attempt(s) required to post " message = _('%(attempts)d attempt(s) required to post '
"%(num_messages)d messages to queue " '%(num_messages)d messages to queue '
"%(queue_id)s") '%(queue_id)s')
message %= dict(queue_id=queue_id, attempts=attempt + 1, message %= dict(queue_id=queue_id, attempts=attempt + 1,
num_messages=len(ids)) num_messages=len(ids))
@ -576,8 +576,8 @@ class MessageController(storage.MessageBase):
# #
# TODO(kgriffs): Add transaction ID to help match up loglines # TODO(kgriffs): Add transaction ID to help match up loglines
if attempt == 0: if attempt == 0:
message = _("First attempt failed while adding messages " message = _('First attempt failed while adding messages '
"to queue %s for current request") % queue_id 'to queue %s for current request') % queue_id
LOG.debug(message) LOG.debug(message)
@ -600,7 +600,7 @@ class MessageController(storage.MessageBase):
# Put the successful one's IDs into aggregated_results. # Put the successful one's IDs into aggregated_results.
succeeded_messages = cached_messages[:failed_index] succeeded_messages = cached_messages[:failed_index]
succeeded_ids = [msg["_id"] for msg in succeeded_messages] succeeded_ids = [msg['_id'] for msg in succeeded_messages]
# Results are aggregated across all attempts # Results are aggregated across all attempts
if aggregated_results is None: if aggregated_results is None:
@ -613,7 +613,7 @@ class MessageController(storage.MessageBase):
prepared_messages = cached_messages[failed_index:] prepared_messages = cached_messages[failed_index:]
next_marker = self._next_marker(queue_id) next_marker = self._next_marker(queue_id)
for index, message in enumerate(prepared_messages): for index, message in enumerate(prepared_messages):
message["k"] = next_marker + index message['k'] = next_marker + index
# Chill out to avoid thrashing/thundering # Chill out to avoid thrashing/thundering
self._backoff_sleep(attempt) self._backoff_sleep(attempt)
@ -627,8 +627,8 @@ class MessageController(storage.MessageBase):
LOG.exception(ex) LOG.exception(ex)
raise raise
message = _("Hit maximum number of attempts (%(max)s) for queue " message = _('Hit maximum number of attempts (%(max)s) for queue '
"%(id)s in project %(project)s") '%(id)s in project %(project)s')
message %= dict(max=options.CFG.max_attempts, id=queue_id, message %= dict(max=options.CFG.max_attempts, id=queue_id,
project=project) project=project)
@ -642,13 +642,13 @@ class MessageController(storage.MessageBase):
mid = utils.to_oid(message_id) mid = utils.to_oid(message_id)
query = { query = {
"q": self._get_queue_id(queue, project), 'q': self._get_queue_id(queue, project),
"_id": mid '_id': mid
} }
if claim: if claim:
now = timeutils.utcnow() now = timeutils.utcnow()
query["e"] = {"$gt": now} query['e'] = {'$gt': now}
message = self._col.find_one(query) message = self._col.find_one(query)
if message is None: if message is None:
@ -656,12 +656,12 @@ class MessageController(storage.MessageBase):
cid = utils.to_oid(claim) cid = utils.to_oid(claim)
if not ("c" in message and if not ('c' in message and
message["c"]["id"] == cid and message['c']['id'] == cid and
message["c"]["e"] > now): message['c']['e'] > now):
raise exceptions.ClaimNotPermitted(message_id, claim) raise exceptions.ClaimNotPermitted(message_id, claim)
self._col.remove(query["_id"], w=0) self._col.remove(query['_id'], w=0)
else: else:
self._col.remove(query, w=0) self._col.remove(query, w=0)
except exceptions.QueueDoesNotExist: except exceptions.QueueDoesNotExist:
@ -711,12 +711,12 @@ class ClaimController(storage.ClaimBase):
def messages(msg_iter): def messages(msg_iter):
msg = msg_iter.next() msg = msg_iter.next()
yield msg.pop("claim") yield msg.pop('claim')
yield msg yield msg
# Smoke it! # Smoke it!
for msg in msg_iter: for msg in msg_iter:
del msg["claim"] del msg['claim']
yield msg yield msg
try: try:
@ -726,9 +726,9 @@ class ClaimController(storage.ClaimBase):
messages = messages(msg_ctrl.claimed(qid, cid, now)) messages = messages(msg_ctrl.claimed(qid, cid, now))
claim = messages.next() claim = messages.next()
claim = { claim = {
"age": age.seconds, 'age': age.seconds,
"ttl": claim.pop("t"), 'ttl': claim.pop('t'),
"id": str(claim["id"]), 'id': str(claim['id']),
} }
except StopIteration: except StopIteration:
raise exceptions.ClaimDoesNotExist(cid, queue, project) raise exceptions.ClaimDoesNotExist(cid, queue, project)
@ -759,7 +759,7 @@ class ClaimController(storage.ClaimBase):
# we need to verify it exists. # we need to verify it exists.
qid = self._get_queue_id(queue, project) qid = self._get_queue_id(queue, project)
ttl = int(metadata.get("ttl", 60)) ttl = int(metadata.get('ttl', 60))
oid = objectid.ObjectId() oid = objectid.ObjectId()
now = timeutils.utcnow() now = timeutils.utcnow()
@ -767,15 +767,15 @@ class ClaimController(storage.ClaimBase):
expires = now + ttl_delta expires = now + ttl_delta
meta = { meta = {
"id": oid, 'id': oid,
"t": ttl, 't': ttl,
"e": expires, 'e': expires,
} }
# Get a list of active, not claimed nor expired # Get a list of active, not claimed nor expired
# messages that could be claimed. # messages that could be claimed.
msgs = msg_ctrl.active(qid, fields={"_id": 1}) msgs = msg_ctrl.active(qid, fields={'_id': 1})
msgs = msgs.limit(limit).sort("_id") msgs = msgs.limit(limit).sort('_id')
messages = iter([]) messages = iter([])
@ -784,29 +784,29 @@ class ClaimController(storage.ClaimBase):
if msgs.count(True) == 0: if msgs.count(True) == 0:
return (str(oid), messages) return (str(oid), messages)
ids = [msg["_id"] for msg in msgs] ids = [msg['_id'] for msg in msgs]
now = timeutils.utcnow() now = timeutils.utcnow()
# Set claim field for messages in ids # Set claim field for messages in ids
updated = msg_ctrl._col.update({"_id": {"$in": ids}, updated = msg_ctrl._col.update({'_id': {'$in': ids},
"$or": [ '$or': [
{"c.id": None}, {'c.id': None},
{ {
"c.id": {"$ne": None}, 'c.id': {'$ne': None},
"c.e": {"$lte": now} 'c.e': {'$lte': now}
} }
]}, ]},
{"$set": {"c": meta}}, upsert=False, {'$set': {'c': meta}}, upsert=False,
multi=True)["n"] multi=True)['n']
# NOTE(flaper87): Dirty hack! # NOTE(flaper87): Dirty hack!
# This sets the expiration time to # This sets the expiration time to
# `expires` on messages that would # `expires` on messages that would
# expire before claim. # expire before claim.
msg_ctrl._col.update({"q": queue, msg_ctrl._col.update({'q': queue,
"e": {"$lt": expires}, 'e': {'$lt': expires},
"c.id": oid}, 'c.id': oid},
{"$set": {"e": expires, "t": ttl}}, {'$set': {'e': expires, 't': ttl}},
upsert=False, multi=True) upsert=False, multi=True)
if updated != 0: if updated != 0:
@ -820,13 +820,13 @@ class ClaimController(storage.ClaimBase):
raise exceptions.ClaimDoesNotExist(claim_id, queue, project) raise exceptions.ClaimDoesNotExist(claim_id, queue, project)
now = timeutils.utcnow() now = timeutils.utcnow()
ttl = int(metadata.get("ttl", 60)) ttl = int(metadata.get('ttl', 60))
ttl_delta = datetime.timedelta(seconds=ttl) ttl_delta = datetime.timedelta(seconds=ttl)
expires = now + ttl_delta expires = now + ttl_delta
if now > expires: if now > expires:
msg = _("New ttl will make the claim expires") msg = _('New ttl will make the claim expires')
raise ValueError(msg) raise ValueError(msg)
qid = self._get_queue_id(queue, project) qid = self._get_queue_id(queue, project)
@ -839,23 +839,23 @@ class ClaimController(storage.ClaimBase):
raise exceptions.ClaimDoesNotExist(claim_id, queue, project) raise exceptions.ClaimDoesNotExist(claim_id, queue, project)
meta = { meta = {
"id": cid, 'id': cid,
"t": ttl, 't': ttl,
"e": expires, 'e': expires,
} }
msg_ctrl._col.update({"q": qid, "c.id": cid}, msg_ctrl._col.update({'q': qid, 'c.id': cid},
{"$set": {"c": meta}}, {'$set': {'c': meta}},
upsert=False, multi=True) upsert=False, multi=True)
# NOTE(flaper87): Dirty hack! # NOTE(flaper87): Dirty hack!
# This sets the expiration time to # This sets the expiration time to
# `expires` on messages that would # `expires` on messages that would
# expire before claim. # expire before claim.
msg_ctrl._col.update({"q": qid, msg_ctrl._col.update({'q': qid,
"e": {"$lt": expires}, 'e': {'$lt': expires},
"c.id": cid}, 'c.id': cid},
{"$set": {"e": expires, "t": ttl}}, {'$set': {'e': expires, 't': ttl}},
upsert=False, multi=True) upsert=False, multi=True)
def delete(self, queue, claim_id, project=None): def delete(self, queue, claim_id, project=None):

View File

@ -46,7 +46,7 @@ class Driver(storage.DriverBase):
return self._database return self._database
def gc(self): def gc(self):
LOG.info("Performing garbage collection.") LOG.info('Performing garbage collection.')
try: try:
self.message_controller.remove_expired() self.message_controller.remove_expired()

View File

@ -20,28 +20,28 @@ from marconi.common import config
OPTIONS = { OPTIONS = {
# Connection string # Connection string
"uri": None, 'uri': None,
# Database name # Database name
# TODO(kgriffs): Consider local sharding across DBs to mitigate # TODO(kgriffs): Consider local sharding across DBs to mitigate
# per-DB locking latency. # per-DB locking latency.
"database": "marconi", 'database': 'marconi',
# Maximum number of times to retry a failed operation. Currently # Maximum number of times to retry a failed operation. Currently
# only used for retrying a message post. # only used for retrying a message post.
"max_attempts": 1000, 'max_attempts': 1000,
# Maximum sleep interval between retries (actual sleep time # Maximum sleep interval between retries (actual sleep time
# increases linearly according to number of attempts performed). # increases linearly according to number of attempts performed).
"max_retry_sleep": 0.1, 'max_retry_sleep': 0.1,
# Maximum jitter interval, to be added to the sleep interval, in # Maximum jitter interval, to be added to the sleep interval, in
# order to decrease probability that parallel requests will retry # order to decrease probability that parallel requests will retry
# at the same instant. # at the same instant.
"max_retry_jitter": 0.005, 'max_retry_jitter': 0.005,
# Frequency of message garbage collections, in seconds # Frequency of message garbage collections, in seconds
"gc_interval": 5 * 60, 'gc_interval': 5 * 60,
# Threshold of number of expired messages to reach in a given # Threshold of number of expired messages to reach in a given
# queue, before performing the GC. Useful for reducing frequent # queue, before performing the GC. Useful for reducing frequent
@ -51,7 +51,7 @@ OPTIONS = {
# #
# Note: The higher this number, the larger the memory-mapped DB # Note: The higher this number, the larger the memory-mapped DB
# files will be. # files will be.
"gc_threshold": 1000, 'gc_threshold': 1000,
} }
CFG = config.namespace('drivers:storage:mongodb').from_options(**OPTIONS) CFG = config.namespace('drivers:storage:mongodb').from_options(**OPTIONS)

View File

@ -25,7 +25,7 @@ from marconi.openstack.common import timeutils
from marconi.storage import exceptions as storage_exceptions from marconi.storage import exceptions as storage_exceptions
DUP_MARKER_REGEX = re.compile(r"\$queue_marker\s+dup key: { : [^:]+: (\d)+") DUP_MARKER_REGEX = re.compile(r'\$queue_marker\s+dup key: { : [^:]+: (\d)+')
def dup_marker_from_error(error_message): def dup_marker_from_error(error_message):
@ -39,7 +39,7 @@ def dup_marker_from_error(error_message):
""" """
match = DUP_MARKER_REGEX.search(error_message) match = DUP_MARKER_REGEX.search(error_message)
if match is None: if match is None:
description = (_("Error message could not be parsed: %s") % description = (_('Error message could not be parsed: %s') %
error_message) error_message)
raise exceptions.PatternNotFound(description) raise exceptions.PatternNotFound(description)
@ -91,16 +91,16 @@ def calculate_backoff(attempt, max_attempts, max_sleep, max_jitter=0):
the ratio attempt / max_attempts, with optional jitter. the ratio attempt / max_attempts, with optional jitter.
""" """
if max_attempts < 0: if max_attempts < 0:
raise ValueError("max_attempts must be >= 0") raise ValueError('max_attempts must be >= 0')
if max_sleep < 0: if max_sleep < 0:
raise ValueError("max_sleep must be >= 0") raise ValueError('max_sleep must be >= 0')
if max_jitter < 0: if max_jitter < 0:
raise ValueError("max_jitter must be >= 0") raise ValueError('max_jitter must be >= 0')
if not (0 <= attempt < max_attempts): if not (0 <= attempt < max_attempts):
raise ValueError("attempt value is out of range") raise ValueError('attempt value is out of range')
ratio = float(attempt) / float(max_attempts) ratio = float(attempt) / float(max_attempts)
backoff_sec = ratio * max_sleep backoff_sec = ratio * max_sleep
@ -123,7 +123,7 @@ def to_oid(obj):
try: try:
return objectid.ObjectId(obj) return objectid.ObjectId(obj)
except (TypeError, berrors.InvalidId): except (TypeError, berrors.InvalidId):
msg = _("Wrong id %s") % obj msg = _('Wrong id %s') % obj
raise storage_exceptions.MalformedID(msg) raise storage_exceptions.MalformedID(msg)
@ -132,7 +132,7 @@ def oid_utc(oid):
try: try:
return timeutils.normalize_time(oid.generation_time) return timeutils.normalize_time(oid.generation_time)
except AttributeError: except AttributeError:
raise TypeError(_("Expected ObjectId and got %s") % type(oid)) raise TypeError(_('Expected ObjectId and got %s') % type(oid))
class HookedCursor(object): class HookedCursor(object):

View File

@ -40,17 +40,17 @@ class TestLazyProperty(testing.TestBase):
def test_write_delete(self): def test_write_delete(self):
self.assertTrue(self.cls_instance.read_write_delete) self.assertTrue(self.cls_instance.read_write_delete)
self.assertTrue(hasattr(self.cls_instance, "_lazy_read_write_delete")) self.assertTrue(hasattr(self.cls_instance, '_lazy_read_write_delete'))
self.cls_instance.read_write_delete = False self.cls_instance.read_write_delete = False
self.assertFalse(self.cls_instance.read_write_delete) self.assertFalse(self.cls_instance.read_write_delete)
del self.cls_instance.read_write_delete del self.cls_instance.read_write_delete
self.assertFalse(hasattr(self.cls_instance, "_lazy_read_write_delete")) self.assertFalse(hasattr(self.cls_instance, '_lazy_read_write_delete'))
def test_write(self): def test_write(self):
self.assertTrue(self.cls_instance.read_write) self.assertTrue(self.cls_instance.read_write)
self.assertTrue(hasattr(self.cls_instance, "_lazy_read_write")) self.assertTrue(hasattr(self.cls_instance, '_lazy_read_write'))
self.cls_instance.read_write = False self.cls_instance.read_write = False
self.assertFalse(self.cls_instance.read_write) self.assertFalse(self.cls_instance.read_write)
@ -60,11 +60,11 @@ class TestLazyProperty(testing.TestBase):
self.fail() self.fail()
except TypeError: except TypeError:
# Bool object is not callable # Bool object is not callable
self.assertTrue(hasattr(self.cls_instance, "_lazy_read_write")) self.assertTrue(hasattr(self.cls_instance, '_lazy_read_write'))
def test_delete(self): def test_delete(self):
self.assertTrue(self.cls_instance.read_delete) self.assertTrue(self.cls_instance.read_delete)
self.assertTrue(hasattr(self.cls_instance, "_lazy_read_delete")) self.assertTrue(hasattr(self.cls_instance, '_lazy_read_delete'))
try: try:
self.cls_instance.read_delete = False self.cls_instance.read_delete = False
@ -74,4 +74,4 @@ class TestLazyProperty(testing.TestBase):
pass pass
del self.cls_instance.read_delete del self.cls_instance.read_delete
self.assertFalse(hasattr(self.cls_instance, "_lazy_read_delete")) self.assertFalse(hasattr(self.cls_instance, '_lazy_read_delete'))

View File

@ -10,6 +10,6 @@ storage = mongodb
port = 8888 port = 8888
[drivers:storage:mongodb] [drivers:storage:mongodb]
uri = "mongodb://127.0.0.1:27017" uri = mongodb://127.0.0.1:27017
database = "marconi_test" database = marconi_test
gc_threshold = 100 gc_threshold = 100

View File

@ -20,7 +20,7 @@ from marconi.tests import util as testing
class ControllerBaseTest(testing.TestBase): class ControllerBaseTest(testing.TestBase):
project = "project" project = 'project'
driver_class = None driver_class = None
controller_class = None controller_class = None
controller_base_class = None controller_base_class = None
@ -29,10 +29,10 @@ class ControllerBaseTest(testing.TestBase):
super(ControllerBaseTest, self).setUp() super(ControllerBaseTest, self).setUp()
if not self.driver_class: if not self.driver_class:
self.skipTest("No driver class specified") self.skipTest('No driver class specified')
if not issubclass(self.controller_class, self.controller_base_class): if not issubclass(self.controller_class, self.controller_base_class):
self.skipTest("%s is not an instance of %s. Tests not supported" % self.skipTest('%s is not an instance of %s. Tests not supported' %
(self.controller_class, self.controller_base_class)) (self.controller_class, self.controller_base_class))
self.driver = self.driver_class() self.driver = self.driver_class()
@ -73,36 +73,36 @@ class QueueControllerTest(ControllerBaseTest):
def test_queue_lifecycle(self): def test_queue_lifecycle(self):
# Test Queue Creation # Test Queue Creation
created = self.controller.upsert("test", project=self.project, created = self.controller.upsert('test', project=self.project,
metadata=dict(topic="test_queue")) metadata=dict(topic='test_queue'))
self.assertTrue(created) self.assertTrue(created)
# Test Queue retrieval # Test Queue retrieval
queue = self.controller.get("test", project=self.project) queue = self.controller.get('test', project=self.project)
self.assertIsNotNone(queue) self.assertIsNotNone(queue)
# Test Queue Update # Test Queue Update
created = self.controller.upsert("test", project=self.project, created = self.controller.upsert('test', project=self.project,
metadata=dict(meta="test_meta")) metadata=dict(meta='test_meta'))
self.assertFalse(created) self.assertFalse(created)
queue = self.controller.get("test", project=self.project) queue = self.controller.get('test', project=self.project)
self.assertEqual(queue["meta"], "test_meta") self.assertEqual(queue['meta'], 'test_meta')
# Test Queue Statistic # Test Queue Statistic
_insert_fixtures(self.message_controller, "test", _insert_fixtures(self.message_controller, 'test',
project=self.project, client_uuid="my_uuid", num=12) project=self.project, client_uuid='my_uuid', num=12)
countof = self.controller.stats("test", project=self.project) countof = self.controller.stats('test', project=self.project)
self.assertEqual(countof['messages']['free'], 12) self.assertEqual(countof['messages']['free'], 12)
# Test Queue Deletion # Test Queue Deletion
self.controller.delete("test", project=self.project) self.controller.delete('test', project=self.project)
# Test DoesNotExist Exception # Test DoesNotExist Exception
self.assertRaises(storage.exceptions.DoesNotExist, self.assertRaises(storage.exceptions.DoesNotExist,
self.controller.get, "test", self.controller.get, 'test',
project=self.project) project=self.project)
@ -113,7 +113,7 @@ class MessageControllerTest(ControllerBaseTest):
override the tearDown method in order override the tearDown method in order
to clean up storage's state. to clean up storage's state.
""" """
queue_name = "test_queue" queue_name = 'test_queue'
controller_base_class = storage.MessageBase controller_base_class = storage.MessageBase
def setUp(self): def setUp(self):
@ -134,10 +134,10 @@ class MessageControllerTest(ControllerBaseTest):
messages = [ messages = [
{ {
"ttl": 60, 'ttl': 60,
"body": { 'body': {
"event": "BackupStarted", 'event': 'BackupStarted',
"backupId": "c378813c-3f0b-11e2-ad92-7823d2b0f3ce" 'backupId': 'c378813c-3f0b-11e2-ad92-7823d2b0f3ce'
} }
}, },
] ]
@ -145,7 +145,7 @@ class MessageControllerTest(ControllerBaseTest):
# Test Message Creation # Test Message Creation
created = list(self.controller.post(queue_name, messages, created = list(self.controller.post(queue_name, messages,
project=self.project, project=self.project,
client_uuid="unused")) client_uuid='unused'))
self.assertEqual(len(created), 1) self.assertEqual(len(created), 1)
# Test Message Get # Test Message Get
@ -162,7 +162,7 @@ class MessageControllerTest(ControllerBaseTest):
def test_get_multi(self): def test_get_multi(self):
_insert_fixtures(self.controller, self.queue_name, _insert_fixtures(self.controller, self.queue_name,
project=self.project, client_uuid="my_uuid", num=15) project=self.project, client_uuid='my_uuid', num=15)
def load_messages(expected, *args, **kwargs): def load_messages(expected, *args, **kwargs):
interaction = self.controller.list(*args, **kwargs) interaction = self.controller.list(*args, **kwargs)
@ -172,7 +172,7 @@ class MessageControllerTest(ControllerBaseTest):
# Test all messages, echo False and uuid # Test all messages, echo False and uuid
load_messages(0, self.queue_name, project=self.project, load_messages(0, self.queue_name, project=self.project,
client_uuid="my_uuid") client_uuid='my_uuid')
# Test all messages and limit # Test all messages and limit
load_messages(15, self.queue_name, project=self.project, limit=20, load_messages(15, self.queue_name, project=self.project, limit=20,
@ -181,17 +181,17 @@ class MessageControllerTest(ControllerBaseTest):
# Test all messages, echo True, and uuid # Test all messages, echo True, and uuid
interaction = load_messages(10, self.queue_name, echo=True, interaction = load_messages(10, self.queue_name, echo=True,
project=self.project, project=self.project,
client_uuid="my_uuid") client_uuid='my_uuid')
# Test all messages, echo True, uuid and marker # Test all messages, echo True, uuid and marker
load_messages(5, self.queue_name, echo=True, project=self.project, load_messages(5, self.queue_name, echo=True, project=self.project,
marker=interaction.next(), client_uuid="my_uuid") marker=interaction.next(), client_uuid='my_uuid')
def test_claim_effects(self): def test_claim_effects(self):
_insert_fixtures(self.controller, self.queue_name, _insert_fixtures(self.controller, self.queue_name,
project=self.project, client_uuid="my_uuid", num=12) project=self.project, client_uuid='my_uuid', num=12)
meta = {"ttl": 70} meta = {'ttl': 70}
another_cid, _ = self.claim_controller.create(self.queue_name, meta, another_cid, _ = self.claim_controller.create(self.queue_name, meta,
project=self.project) project=self.project)
@ -201,21 +201,21 @@ class MessageControllerTest(ControllerBaseTest):
# A wrong claim does not ensure the message deletion # A wrong claim does not ensure the message deletion
with testing.expect(storage.exceptions.NotPermitted): with testing.expect(storage.exceptions.NotPermitted):
self.controller.delete(self.queue_name, msg1["id"], self.controller.delete(self.queue_name, msg1['id'],
project=self.project, project=self.project,
claim=another_cid) claim=another_cid)
# Make sure a message can be deleted with a claim # Make sure a message can be deleted with a claim
self.controller.delete(self.queue_name, msg1["id"], self.controller.delete(self.queue_name, msg1['id'],
project=self.project, project=self.project,
claim=cid) claim=cid)
with testing.expect(storage.exceptions.DoesNotExist): with testing.expect(storage.exceptions.DoesNotExist):
self.controller.get(self.queue_name, msg1["id"], self.controller.get(self.queue_name, msg1['id'],
project=self.project) project=self.project)
# Make sure such a deletion is idempotent # Make sure such a deletion is idempotent
self.controller.delete(self.queue_name, msg1["id"], self.controller.delete(self.queue_name, msg1['id'],
project=self.project, project=self.project,
claim=cid) claim=cid)
@ -224,7 +224,7 @@ class MessageControllerTest(ControllerBaseTest):
project=self.project) project=self.project)
with testing.expect(storage.exceptions.NotPermitted): with testing.expect(storage.exceptions.NotPermitted):
self.controller.delete(self.queue_name, msg2["id"], self.controller.delete(self.queue_name, msg2['id'],
project=self.project, project=self.project,
claim=cid) claim=cid)
@ -294,7 +294,7 @@ class ClaimControllerTest(ControllerBaseTest):
override the tearDown method in order override the tearDown method in order
to clean up storage's state. to clean up storage's state.
""" """
queue_name = "test_queue" queue_name = 'test_queue'
controller_base_class = storage.ClaimBase controller_base_class = storage.ClaimBase
def setUp(self): def setUp(self):
@ -312,9 +312,9 @@ class ClaimControllerTest(ControllerBaseTest):
def test_claim_lifecycle(self): def test_claim_lifecycle(self):
_insert_fixtures(self.message_controller, self.queue_name, _insert_fixtures(self.message_controller, self.queue_name,
project=self.project, client_uuid="my_uuid", num=20) project=self.project, client_uuid='my_uuid', num=20)
meta = {"ttl": 70} meta = {'ttl': 70}
# Make sure create works # Make sure create works
claim_id, messages = self.controller.create(self.queue_name, meta, claim_id, messages = self.controller.create(self.queue_name, meta,
@ -337,10 +337,10 @@ class ClaimControllerTest(ControllerBaseTest):
messages2 = list(messages2) messages2 = list(messages2)
self.assertEquals(len(messages2), 15) self.assertEquals(len(messages2), 15)
self.assertEquals(messages, messages2) self.assertEquals(messages, messages2)
self.assertEquals(claim["ttl"], 70) self.assertEquals(claim['ttl'], 70)
self.assertEquals(claim["id"], claim_id) self.assertEquals(claim['id'], claim_id)
new_meta = {"ttl": 100} new_meta = {'ttl': 100}
self.controller.update(self.queue_name, claim_id, self.controller.update(self.queue_name, claim_id,
new_meta, project=self.project) new_meta, project=self.project)
@ -350,11 +350,14 @@ class ClaimControllerTest(ControllerBaseTest):
messages2 = list(messages2) messages2 = list(messages2)
self.assertEquals(len(messages2), 15) self.assertEquals(len(messages2), 15)
#TODO(zyuan): Add some tests to ensure the ttl is extended/not-extended
# TODO(zyuan): Add some tests to ensure the ttl is
# extended/not-extended.
for msg1, msg2 in zip(messages, messages2): for msg1, msg2 in zip(messages, messages2):
self.assertEquals(msg1['body'], msg2['body']) self.assertEquals(msg1['body'], msg2['body'])
self.assertEquals(claim["ttl"], 100)
self.assertEquals(claim["id"], claim_id) self.assertEquals(claim['ttl'], 100)
self.assertEquals(claim['id'], claim_id)
# Make sure delete works # Make sure delete works
self.controller.delete(self.queue_name, claim_id, self.controller.delete(self.queue_name, claim_id,
@ -365,7 +368,7 @@ class ClaimControllerTest(ControllerBaseTest):
claim_id, project=self.project) claim_id, project=self.project)
def test_expired_claim(self): def test_expired_claim(self):
meta = {"ttl": 0} meta = {'ttl': 0}
claim_id, messages = self.controller.create(self.queue_name, meta, claim_id, messages = self.controller.create(self.queue_name, meta,
project=self.project) project=self.project)
@ -395,9 +398,9 @@ def _insert_fixtures(controller, queue_name, project=None,
def messages(): def messages():
for n in xrange(num): for n in xrange(num):
yield { yield {
"ttl": 120, 'ttl': 120,
"body": { 'body': {
"event": "Event number %s" % n 'event': 'Event number %s' % n
}} }}
controller.post(queue_name, messages(), controller.post(queue_name, messages(),
project=project, client_uuid=client_uuid) project=project, client_uuid=client_uuid)

View File

@ -65,6 +65,6 @@ class TestQueueBase(testing.TestBase):
def test_upsert(self): def test_upsert(self):
self.assertRaises(AssertionError, self.controller.upsert, self.assertRaises(AssertionError, self.controller.upsert,
"test", metadata=[]) 'test', metadata=[])
self.assertIsNone(self.controller.upsert("test", metadata={})) self.assertIsNone(self.controller.upsert('test', metadata={}))

View File

@ -32,23 +32,23 @@ from marconi.tests import util as testing
class MongodbUtilsTest(testing.TestBase): class MongodbUtilsTest(testing.TestBase):
def test_dup_marker_from_error(self): def test_dup_marker_from_error(self):
error_message = ("E11000 duplicate key error index: " error_message = ('E11000 duplicate key error index: '
"marconi.messages.$queue_marker dup key: " 'marconi.messages.$queue_marker dup key: '
"{ : ObjectId('51adff46b100eb85d8a93a2d'), : 3 }") '{ : ObjectId("51adff46b100eb85d8a93a2d"), : 3 }')
marker = utils.dup_marker_from_error(error_message) marker = utils.dup_marker_from_error(error_message)
self.assertEquals(marker, 3) self.assertEquals(marker, 3)
error_message = ("E11000 duplicate key error index: " error_message = ('E11000 duplicate key error index: '
"marconi.messages.$x_y dup key: " 'marconi.messages.$x_y dup key: '
"{ : ObjectId('51adff46b100eb85d8a93a2d'), : 3 }") '{ : ObjectId("51adff46b100eb85d8a93a2d"), : 3 }')
self.assertRaises(exceptions.PatternNotFound, self.assertRaises(exceptions.PatternNotFound,
utils.dup_marker_from_error, error_message) utils.dup_marker_from_error, error_message)
error_message = ("E11000 duplicate key error index: " error_message = ('E11000 duplicate key error index: '
"marconi.messages.$queue_marker dup key: " 'marconi.messages.$queue_marker dup key: '
"{ : ObjectId('51adff46b100eb85d8a93a2d') }") '{ : ObjectId("51adff46b100eb85d8a93a2d") }')
self.assertRaises(exceptions.PatternNotFound, self.assertRaises(exceptions.PatternNotFound,
utils.dup_marker_from_error, error_message) utils.dup_marker_from_error, error_message)
@ -83,11 +83,11 @@ class MongodbUtilsTest(testing.TestBase):
class MongodbDriverTest(testing.TestBase): class MongodbDriverTest(testing.TestBase):
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(MongodbDriverTest, self).setUp() super(MongodbDriverTest, self).setUp()
self.load_conf("wsgi_mongodb.conf") self.load_conf('wsgi_mongodb.conf')
def test_db_instance(self): def test_db_instance(self):
driver = mongodb.Driver() driver = mongodb.Driver()
@ -101,11 +101,11 @@ class MongodbQueueTests(base.QueueControllerTest):
controller_class = controllers.QueueController controller_class = controllers.QueueController
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(MongodbQueueTests, self).setUp() super(MongodbQueueTests, self).setUp()
self.load_conf("wsgi_mongodb.conf") self.load_conf('wsgi_mongodb.conf')
def tearDown(self): def tearDown(self):
self.controller._col.drop() self.controller._col.drop()
@ -114,18 +114,18 @@ class MongodbQueueTests(base.QueueControllerTest):
def test_indexes(self): def test_indexes(self):
col = self.controller._col col = self.controller._col
indexes = col.index_information() indexes = col.index_information()
self.assertIn("p_1_n_1", indexes) self.assertIn('p_1_n_1', indexes)
def test_messages_purged(self): def test_messages_purged(self):
queue_name = "test" queue_name = 'test'
self.controller.upsert(queue_name, {}) self.controller.upsert(queue_name, {})
qid = self.controller._get_id(queue_name) qid = self.controller._get_id(queue_name)
self.message_controller.post(queue_name, self.message_controller.post(queue_name,
[{"ttl": 60}], [{'ttl': 60}],
1234) 1234)
self.controller.delete(queue_name) self.controller.delete(queue_name)
col = self.message_controller._col col = self.message_controller._col
self.assertEqual(col.find({"q": qid}).count(), 0) self.assertEqual(col.find({'q': qid}).count(), 0)
class MongodbMessageTests(base.MessageControllerTest): class MongodbMessageTests(base.MessageControllerTest):
@ -134,11 +134,11 @@ class MongodbMessageTests(base.MessageControllerTest):
controller_class = controllers.MessageController controller_class = controllers.MessageController
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(MongodbMessageTests, self).setUp() super(MongodbMessageTests, self).setUp()
self.load_conf("wsgi_mongodb.conf") self.load_conf('wsgi_mongodb.conf')
def tearDown(self): def tearDown(self):
self.controller._col.drop() self.controller._col.drop()
@ -151,22 +151,22 @@ class MongodbMessageTests(base.MessageControllerTest):
def test_indexes(self): def test_indexes(self):
col = self.controller._col col = self.controller._col
indexes = col.index_information() indexes = col.index_information()
self.assertIn("active", indexes) self.assertIn('active', indexes)
self.assertIn("claimed", indexes) self.assertIn('claimed', indexes)
self.assertIn("queue_marker", indexes) self.assertIn('queue_marker', indexes)
def test_next_marker(self): def test_next_marker(self):
queue_name = "marker_test" queue_name = 'marker_test'
iterations = 10 iterations = 10
self.queue_controller.upsert(queue_name, {}) self.queue_controller.upsert(queue_name, {})
queue_id = self.queue_controller._get_id(queue_name) queue_id = self.queue_controller._get_id(queue_name)
seed_marker1 = self.controller._next_marker(queue_name) seed_marker1 = self.controller._next_marker(queue_name)
self.assertEqual(seed_marker1, 1, "First marker is 1") self.assertEqual(seed_marker1, 1, 'First marker is 1')
for i in range(iterations): for i in range(iterations):
self.controller.post(queue_name, [{"ttl": 60}], "uuid") self.controller.post(queue_name, [{'ttl': 60}], 'uuid')
marker1 = self.controller._next_marker(queue_id) marker1 = self.controller._next_marker(queue_id)
marker2 = self.controller._next_marker(queue_id) marker2 = self.controller._next_marker(queue_id)
marker3 = self.controller._next_marker(queue_id) marker3 = self.controller._next_marker(queue_id)
@ -183,10 +183,10 @@ class MongodbMessageTests(base.MessageControllerTest):
messages_per_queue = gc_threshold messages_per_queue = gc_threshold
nogc_messages_per_queue = gc_threshold - 1 nogc_messages_per_queue = gc_threshold - 1
projects = ["gc-test-project-%s" % i for i in range(num_projects)] projects = ['gc-test-project-%s' % i for i in range(num_projects)]
queue_names = ["gc-test-%s" % i for i in range(num_queues)] queue_names = ['gc-test-%s' % i for i in range(num_queues)]
client_uuid = "b623c53c-cf75-11e2-84e1-a1187188419e" client_uuid = 'b623c53c-cf75-11e2-84e1-a1187188419e'
messages = [{"ttl": 0, "body": str(i)} messages = [{'ttl': 0, 'body': str(i)}
for i in range(messages_per_queue)] for i in range(messages_per_queue)]
for project in projects: for project in projects:
@ -195,11 +195,11 @@ class MongodbMessageTests(base.MessageControllerTest):
self.controller.post(queue, messages, client_uuid, project) self.controller.post(queue, messages, client_uuid, project)
# Add one that should not be gc'd due to being under threshold # Add one that should not be gc'd due to being under threshold
self.queue_controller.upsert("nogc-test", {}, "nogc-test-project") self.queue_controller.upsert('nogc-test', {}, 'nogc-test-project')
nogc_messages = [{"ttl": 0, "body": str(i)} nogc_messages = [{'ttl': 0, 'body': str(i)}
for i in range(nogc_messages_per_queue)] for i in range(nogc_messages_per_queue)]
self.controller.post("nogc-test", nogc_messages, self.controller.post('nogc-test', nogc_messages,
client_uuid, "nogc-test-project") client_uuid, 'nogc-test-project')
total_expired = sum( total_expired = sum(
self._count_expired(queue, project) self._count_expired(queue, project)
@ -212,7 +212,7 @@ class MongodbMessageTests(base.MessageControllerTest):
# Make sure the messages in this queue were not gc'd since # Make sure the messages in this queue were not gc'd since
# the count was under the threshold. # the count was under the threshold.
self.assertEquals( self.assertEquals(
self._count_expired("nogc-test", "nogc-test-project"), self._count_expired('nogc-test', 'nogc-test-project'),
len(nogc_messages)) len(nogc_messages))
total_expired = sum( total_expired = sum(
@ -228,8 +228,8 @@ class MongodbMessageTests(base.MessageControllerTest):
# one remaining in the queue. # one remaining in the queue.
queue = random.choice(queue_names) queue = random.choice(queue_names)
queue_id = self.queue_controller._get_id(queue, project) queue_id = self.queue_controller._get_id(queue, project)
message = self.driver.db.messages.find_one({"q": queue_id}) message = self.driver.db.messages.find_one({'q': queue_id})
self.assertEquals(message["k"], messages_per_queue) self.assertEquals(message['k'], messages_per_queue)
class MongodbClaimTests(base.ClaimControllerTest): class MongodbClaimTests(base.ClaimControllerTest):
@ -237,11 +237,11 @@ class MongodbClaimTests(base.ClaimControllerTest):
controller_class = controllers.ClaimController controller_class = controllers.ClaimController
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(MongodbClaimTests, self).setUp() super(MongodbClaimTests, self).setUp()
self.load_conf("wsgi_mongodb.conf") self.load_conf('wsgi_mongodb.conf')
def test_claim_doesnt_exist(self): def test_claim_doesnt_exist(self):
"""Verifies that operations fail on expired/missing claims. """Verifies that operations fail on expired/missing claims.
@ -255,7 +255,7 @@ class MongodbClaimTests(base.ClaimControllerTest):
epoch, project=self.project) epoch, project=self.project)
claim_id, messages = self.controller.create(self.queue_name, claim_id, messages = self.controller.create(self.queue_name,
{"ttl": 1}, {'ttl': 1},
project=self.project) project=self.project)
# Lets let it expire # Lets let it expire

View File

@ -37,7 +37,7 @@ def verify_claim_msg(count, *claim_response):
if msg_length_flag: if msg_length_flag:
query_claim(headers, body) query_claim(headers, body)
else: else:
assert msg_length_flag, "More msgs returned than specified in limit" assert msg_length_flag, 'More msgs returned than specified in limit'
def verify_claim_msglength(count, *body): def verify_claim_msglength(count, *body):
@ -65,30 +65,30 @@ def query_claim(headers, *body):
msg_list = body[0] msg_list = body[0]
msg_list = json.loads(msg_list) msg_list = json.loads(msg_list)
location = headers["Location"] location = headers['Location']
url = functionlib.create_url_from_appender(location) url = functionlib.create_url_from_appender(location)
header = functionlib.create_marconi_headers() header = functionlib.create_marconi_headers()
get_msg = http.get(url, header) get_msg = http.get(url, header)
if get_msg.status_code == 200: if get_msg.status_code == 200:
query_body = json.loads(get_msg.text) query_body = json.loads(get_msg.text)
query_msgs = query_body["messages"] query_msgs = query_body['messages']
test_result_flag = verify_query_msgs(query_msgs, msg_list) test_result_flag = verify_query_msgs(query_msgs, msg_list)
if test_result_flag: if test_result_flag:
return test_result_flag return test_result_flag
else: else:
print "URL" print 'URL'
print url print url
print "HEADER" print 'HEADER'
print header print header
print "Messages returned by Query Claim" print 'Messages returned by Query Claim'
print query_msgs print query_msgs
print "# of Messages returned by Query Claim", len(query_msgs) print '# of Messages returned by Query Claim', len(query_msgs)
print 'Messages returned by Claim Messages' print 'Messages returned by Claim Messages'
print msg_list print msg_list
print "# of Messages returned by Claim messages", len(msg_list) print '# of Messages returned by Claim messages', len(msg_list)
assert test_result_flag, "Query Claim Failed" assert test_result_flag, 'Query Claim Failed'
def verify_query_msgs(querymsgs, msg_list): def verify_query_msgs(querymsgs, msg_list):
@ -103,9 +103,9 @@ def verify_query_msgs(querymsgs, msg_list):
idx = 0 idx = 0
for msg in querymsgs: for msg in querymsgs:
if ((msg["body"] != msg_list[idx]["body"]) or if ((msg['body'] != msg_list[idx]['body']) or
(msg["href"] != msg_list[idx]["href"]) or (msg['href'] != msg_list[idx]['href']) or
(msg["ttl"] != msg_list[idx]["ttl"])): (msg['ttl'] != msg_list[idx]['ttl'])):
test_result_flag = False test_result_flag = False
idx = idx + 1 idx = idx + 1
@ -122,25 +122,25 @@ def patch_claim(*claim_response):
test_result_flag = False test_result_flag = False
headers = claim_response[0] headers = claim_response[0]
location = headers["Location"] location = headers['Location']
url = functionlib.create_url_from_appender(location) url = functionlib.create_url_from_appender(location)
header = functionlib.create_marconi_headers() header = functionlib.create_marconi_headers()
ttl_value = 300 ttl_value = 300
payload = '{"ttl": ttlvalue }' payload = '{"ttl": ttlvalue }'
payload = payload.replace("ttlvalue", str(ttl_value)) payload = payload.replace('ttlvalue', str(ttl_value))
patch_response = http.patch(url, header, body=payload) patch_response = http.patch(url, header, body=payload)
if patch_response.status_code == 204: if patch_response.status_code == 204:
test_result_flag = verify_patch_claim(url, header, ttl_value) test_result_flag = verify_patch_claim(url, header, ttl_value)
else: else:
print "Patch HTTP Response code: {}".format(patch_response.status_code) print 'Patch HTTP Response code: {}'.format(patch_response.status_code)
print patch_response.headers print patch_response.headers
print patch_response.text print patch_response.text
assert test_result_flag, "Patch Claim Failed" assert test_result_flag, 'Patch Claim Failed'
if not test_result_flag: if not test_result_flag:
assert test_result_flag, "Query claim after the patch failed" assert test_result_flag, 'Query claim after the patch failed'
def verify_patch_claim(url, header, ttl_extended): def verify_patch_claim(url, header, ttl_extended):
@ -157,7 +157,7 @@ def verify_patch_claim(url, header, ttl_extended):
get_claim = http.get(url, header) get_claim = http.get(url, header)
response_body = json.loads(get_claim.text) response_body = json.loads(get_claim.text)
ttl = response_body["ttl"] ttl = response_body['ttl']
if ttl < ttl_extended: if ttl < ttl_extended:
print get_claim.status_code print get_claim.status_code
print get_claim.headers print get_claim.headers
@ -173,7 +173,7 @@ def create_urllist_fromhref(*response):
:param *response : http response containing the list of messages. :param *response : http response containing the list of messages.
""" """
rspbody = json.loads(response[1]) rspbody = json.loads(response[1])
urllist = [functionlib.create_url_from_appender(item["href"]) urllist = [functionlib.create_url_from_appender(item['href'])
for item in rspbody] for item in rspbody]
return urllist return urllist
@ -194,14 +194,14 @@ def delete_claimed_msgs(*claim_response):
if delete_response.status_code == 204: if delete_response.status_code == 204:
test_result_flag = functionlib.verify_delete(url, header) test_result_flag = functionlib.verify_delete(url, header)
else: else:
print "DELETE message with claim ID: {}".format(url) print 'DELETE message with claim ID: {}'.format(url)
print delete_response.status_code print delete_response.status_code
print delete_response.headers print delete_response.headers
print delete_response.text print delete_response.text
assert test_result_flag, "Delete Claimed Message Failed" assert test_result_flag, 'Delete Claimed Message Failed'
if not test_result_flag: if not test_result_flag:
assert test_result_flag, "Get message after DELETE did not return 404" assert test_result_flag, 'Get message after DELETE did not return 404'
def get_claimed_msgs(*claim_response): def get_claimed_msgs(*claim_response):
@ -219,11 +219,11 @@ def get_claimed_msgs(*claim_response):
if get_response.status_code != 200: if get_response.status_code != 200:
print url print url
print header print header
print "Get Response Code: {}".format(get_response.status_code) print 'Get Response Code: {}'.format(get_response.status_code)
test_result_flag = False test_result_flag = False
if not test_result_flag: if not test_result_flag:
assert test_result_flag, "Get Claimed message Failed" assert test_result_flag, 'Get Claimed message Failed'
def release_claim(*claim_response): def release_claim(*claim_response):
@ -236,7 +236,7 @@ def release_claim(*claim_response):
test_result_flag = False test_result_flag = False
headers = claim_response[0] headers = claim_response[0]
location = headers["Location"] location = headers['Location']
url = functionlib.create_url_from_appender(location) url = functionlib.create_url_from_appender(location)
header = functionlib.create_marconi_headers() header = functionlib.create_marconi_headers()
@ -244,10 +244,10 @@ def release_claim(*claim_response):
if release_response.status_code == 204: if release_response.status_code == 204:
test_result_flag = functionlib.verify_delete(url, header) test_result_flag = functionlib.verify_delete(url, header)
else: else:
print "Release Claim HTTP code:{}".format(release_response.status_code) print 'Release Claim HTTP code:{}'.format(release_response.status_code)
print release_response.headers print release_response.headers
print release_response.text print release_response.text
assert test_result_flag, "Release Claim Failed" assert test_result_flag, 'Release Claim Failed'
if not test_result_flag: if not test_result_flag:
assert test_result_flag, "Get claim after the release failed" assert test_result_flag, 'Get claim after the release failed'

View File

@ -19,7 +19,7 @@ from marconi.tests.system.common import config
from marconi.tests.system.common import functionlib from marconi.tests.system.common import functionlib
cfg = config.Config() CFG = config.Config()
def get_data(): def get_data():
@ -32,7 +32,7 @@ def get_data():
for row in data: for row in data:
row['header'] = functionlib.get_headers(row['header']) row['header'] = functionlib.get_headers(row['header'])
row['url'] = row['url'].replace("<BASE_URL>", cfg.base_url) row['url'] = row['url'].replace('<BASE_URL>', CFG.base_url)
return data return data

View File

@ -21,38 +21,38 @@ from marconi.tests.system.common import config
from marconi.tests.system.common import http from marconi.tests.system.common import http
cfg = config.Config() CFG = config.Config()
def get_keystone_token(): def get_keystone_token():
"""Gets Keystone Auth token.""" """Gets Keystone Auth token."""
req_json = { req_json = {
"auth": { 'auth': {
"passwordCredentials": { 'passwordCredentials': {
"username": cfg.username, 'username': CFG.username,
"password": cfg.password 'password': CFG.password
} }
}, },
} }
header = '{"Host": "identity.api.rackspacecloud.com",' header = '{"Host": "identity.api.rackspacecloud.com",'
header += '"Content-Type": "application/json","Accept":"application/json"}' header += '"Content-Type": "application/json","Accept":"application/json"}'
url = cfg.auth_url url = CFG.auth_url
response = http.post(url=url, header=header, body=req_json) response = http.post(url=url, header=header, body=req_json)
response_body = json.loads(response.text) response_body = json.loads(response.text)
auth_token = response_body["access"]["token"]["id"] auth_token = response_body['access']['token']['id']
return auth_token return auth_token
def get_auth_token(): def get_auth_token():
"""Returns a valid auth token if auth is turned on.""" """Returns a valid auth token if auth is turned on."""
if cfg.auth_enabled == "true": if CFG.auth_enabled == 'true':
auth_token = get_keystone_token() auth_token = get_keystone_token()
else: else:
auth_token = "notrealtoken" auth_token = 'notrealtoken'
return auth_token return auth_token
@ -64,10 +64,10 @@ def create_marconi_headers():
headers = '{"Host": "<HOST>","User-Agent": "<USER-AGENT>","Date":"<DATE>",' headers = '{"Host": "<HOST>","User-Agent": "<USER-AGENT>","Date":"<DATE>",'
headers += '"Accept": "application/json","Accept-Encoding": "gzip",' headers += '"Accept": "application/json","Accept-Encoding": "gzip",'
headers += '"X-Auth-Token": "<auth_token>","Client-ID": "<UUID>"}' headers += '"X-Auth-Token": "<auth_token>","Client-ID": "<UUID>"}'
headers = headers.replace("<auth_token>", auth_token) headers = headers.replace('<auth_token>', auth_token)
headers = headers.replace("<HOST>", cfg.host) headers = headers.replace('<HOST>', CFG.host)
headers = headers.replace("<USER-AGENT>", cfg.user_agent) headers = headers.replace('<USER-AGENT>', CFG.user_agent)
headers = headers.replace("<UUID>", cfg.uuid) headers = headers.replace('<UUID>', CFG.uuid)
return headers return headers
@ -78,10 +78,10 @@ def invalid_auth_token_header():
headers = '{"Host":"<HOST>","User-Agent":"<USER-AGENT>","Date":"<DATE>",' headers = '{"Host":"<HOST>","User-Agent":"<USER-AGENT>","Date":"<DATE>",'
headers += '"Accept": "application/json","Accept-Encoding": "gzip",' headers += '"Accept": "application/json","Accept-Encoding": "gzip",'
headers += '"X-Auth-Token": "<auth_token>"}' headers += 'X-Auth-Token: <auth_token>}'
headers = headers.replace("<auth_token>", auth_token) headers = headers.replace('<auth_token>', auth_token)
headers = headers.replace("<HOST>", cfg.host) headers = headers.replace('<HOST>', CFG.host)
headers = headers.replace("<USER-AGENT>", cfg.user_agent) headers = headers.replace('<USER-AGENT>', CFG.user_agent)
return headers return headers
@ -93,8 +93,8 @@ def missing_header_fields():
headers = '{"Host": "<HOST>","Date": "<DATE>",' headers = '{"Host": "<HOST>","Date": "<DATE>",'
headers += '"Accept": "application/json","Accept-Encoding": "gzip",' headers += '"Accept": "application/json","Accept-Encoding": "gzip",'
headers += '"X-Auth-Token": "<auth_token>"}' headers += '"X-Auth-Token": "<auth_token>"}'
headers = headers.replace("<auth_token>", auth_token) headers = headers.replace('<auth_token>', auth_token)
headers = headers.replace("<HOST>", cfg.host) headers = headers.replace('<HOST>', CFG.host)
return headers return headers
@ -106,9 +106,9 @@ def plain_text_in_header():
headers = '{"Host":"<HOST>","User-Agent":"<USER-AGENT>","Date":"<DATE>",' headers = '{"Host":"<HOST>","User-Agent":"<USER-AGENT>","Date":"<DATE>",'
headers += '"Accept": "text/plain","Accept-Encoding": "gzip",' headers += '"Accept": "text/plain","Accept-Encoding": "gzip",'
headers += '"X-Auth-Token": "<auth_token>"}' headers += '"X-Auth-Token": "<auth_token>"}'
headers = headers.replace("<auth_token>", auth_token) headers = headers.replace('<auth_token>', auth_token)
headers = headers.replace("<HOST>", cfg.host) headers = headers.replace('<HOST>', CFG.host)
headers = headers.replace("<USER-AGENT>", cfg.user_agent) headers = headers.replace('<USER-AGENT>', CFG.user_agent)
return headers return headers
@ -120,9 +120,9 @@ def asterisk_in_header():
headers = '{"Host":"<HOST>","User-Agent":"<USER-AGENT>","Date":"<DATE>",' headers = '{"Host":"<HOST>","User-Agent":"<USER-AGENT>","Date":"<DATE>",'
headers += '"Accept": "*/*","Accept-Encoding": "gzip",' headers += '"Accept": "*/*","Accept-Encoding": "gzip",'
headers += '"X-Auth-Token": "<auth_token>"}' headers += '"X-Auth-Token": "<auth_token>"}'
headers = headers.replace("<auth_token>", auth_token) headers = headers.replace('<auth_token>', auth_token)
headers = headers.replace("<HOST>", cfg.host) headers = headers.replace('<HOST>', CFG.host)
headers = headers.replace("<USER-AGENT>", cfg.user_agent) headers = headers.replace('<USER-AGENT>', CFG.user_agent)
return headers return headers
@ -146,23 +146,23 @@ def get_headers(input_header):
def get_custom_body(kwargs): def get_custom_body(kwargs):
"""Returns a custom request body.""" """Returns a custom request body."""
req_body = {"data": "<DATA>"} req_body = {'data': '<DATA>'}
if "metadatasize" in kwargs.keys(): if 'metadatasize' in kwargs.keys():
random_data = binascii.b2a_hex(os.urandom(kwargs["metadatasize"])) random_data = binascii.b2a_hex(os.urandom(kwargs['metadatasize']))
req_body["data"] = random_data req_body['data'] = random_data
return json.dumps(req_body) return json.dumps(req_body)
def create_url_from_appender(appender): def create_url_from_appender(appender):
"""Returns complete url using the appender (with a a preceding '/').""" """Returns complete url using the appender (with a a preceding '/')."""
next_url = str(cfg.base_server + appender) next_url = str(CFG.base_server + appender)
return(next_url) return(next_url)
def get_url_from_location(header): def get_url_from_location(header):
"""returns : the complete url referring to the location.""" """returns : the complete url referring to the location."""
location = header["location"] location = header['location']
url = create_url_from_appender(location) url = create_url_from_appender(location)
return url return url
@ -177,10 +177,10 @@ def verify_metadata(get_data, posted_body):
print(posted_body, type(posted_body)) print(posted_body, type(posted_body))
if get_data in posted_body: if get_data in posted_body:
print("AYYY") print('AYYY')
else: else:
test_result_flag = False test_result_flag = False
print("NAYYY") print('NAYYY')
return test_result_flag return test_result_flag
@ -193,13 +193,13 @@ def verify_delete(url, header):
if getmsg.status_code == 404: if getmsg.status_code == 404:
test_result_flag = True test_result_flag = True
else: else:
print("GET after DELETE failed") print('GET after DELETE failed')
print("URL") print('URL')
print url print url
print("headers") print('headers')
print header print header
print("Response Body") print('Response Body')
print getmsg.text print getmsg.text
assert test_result_flag, "GET Code {}".format(getmsg.status_code) assert test_result_flag, 'GET Code {}'.format(getmsg.status_code)
return test_result_flag return test_result_flag

View File

@ -24,13 +24,13 @@ def get(url, header='', param=''):
try: try:
response = requests.get(url, headers=header, params=param) response = requests.get(url, headers=header, params=param)
except requests.ConnectionError as detail: except requests.ConnectionError as detail:
print("ConnectionError: Exception in http.get {}".format(detail)) print('ConnectionError: Exception in http.get {}'.format(detail))
except requests.HTTPError as detail: except requests.HTTPError as detail:
print("HTTPError: Exception in http.get {}".format(detail)) print('HTTPError: Exception in http.get {}'.format(detail))
except requests.Timeout as detail: except requests.Timeout as detail:
print("Timeout: Exception in http.get {}".format(detail)) print('Timeout: Exception in http.get {}'.format(detail))
except requests.TooManyRedirects as detail: except requests.TooManyRedirects as detail:
print("TooManyRedirects: Exception in http.get {}".format(detail)) print('TooManyRedirects: Exception in http.get {}'.format(detail))
return response return response
@ -44,13 +44,13 @@ def post(url, header='', body='', param=''):
response = requests.post(url, headers=header, data=body, response = requests.post(url, headers=header, data=body,
params=param) params=param)
except requests.ConnectionError as detail: except requests.ConnectionError as detail:
print("ConnectionError: Exception in http.post {}".format(detail)) print('ConnectionError: Exception in http.post {}'.format(detail))
except requests.HTTPError as detail: except requests.HTTPError as detail:
print("HTTPError: Exception in http.post {}".format(detail)) print('HTTPError: Exception in http.post {}'.format(detail))
except requests.Timeout as detail: except requests.Timeout as detail:
print("Timeout: Exception in http.post {}".format(detail)) print('Timeout: Exception in http.post {}'.format(detail))
except requests.TooManyRedirects as detail: except requests.TooManyRedirects as detail:
print("TooManyRedirects: Exception in http.post {}".format(detail)) print('TooManyRedirects: Exception in http.post {}'.format(detail))
return response return response
@ -64,13 +64,13 @@ def put(url, header='', body='', param=''):
response = requests.put(url, headers=header, data=body, response = requests.put(url, headers=header, data=body,
params=param) params=param)
except requests.ConnectionError as detail: except requests.ConnectionError as detail:
print("ConnectionError: Exception in http.put {}".format(detail)) print('ConnectionError: Exception in http.put {}'.format(detail))
except requests.HTTPError as detail: except requests.HTTPError as detail:
print("HTTPError: Exception in http.put {}".format(detail)) print('HTTPError: Exception in http.put {}'.format(detail))
except requests.Timeout as detail: except requests.Timeout as detail:
print("Timeout: Exception in http.put {}".format(detail)) print('Timeout: Exception in http.put {}'.format(detail))
except requests.TooManyRedirects as detail: except requests.TooManyRedirects as detail:
print("TooManyRedirects: Exception in http.put {}".format(detail)) print('TooManyRedirects: Exception in http.put {}'.format(detail))
return response return response
@ -83,13 +83,13 @@ def delete(url, header='', param=''):
try: try:
response = requests.delete(url, headers=header, params=param) response = requests.delete(url, headers=header, params=param)
except requests.ConnectionError as detail: except requests.ConnectionError as detail:
print("ConnectionError: Exception in http.delete {}".format(detail)) print('ConnectionError: Exception in http.delete {}'.format(detail))
except requests.HTTPError as detail: except requests.HTTPError as detail:
print("HTTPError: Exception in http.delete {}".format(detail)) print('HTTPError: Exception in http.delete {}'.format(detail))
except requests.Timeout as detail: except requests.Timeout as detail:
print("Timeout: Exception in http.delete {}".format(detail)) print('Timeout: Exception in http.delete {}'.format(detail))
except requests.TooManyRedirects as detail: except requests.TooManyRedirects as detail:
print("TooManyRedirects: Exception in http.delete {}".format(detail)) print('TooManyRedirects: Exception in http.delete {}'.format(detail))
return response return response
@ -103,13 +103,13 @@ def patch(url, header='', body='', param=''):
response = requests.patch(url, headers=header, data=body, response = requests.patch(url, headers=header, data=body,
params=param) params=param)
except requests.ConnectionError as detail: except requests.ConnectionError as detail:
print("ConnectionError: Exception in http.patch {}".format(detail)) print('ConnectionError: Exception in http.patch {}'.format(detail))
except requests.HTTPError as detail: except requests.HTTPError as detail:
print("HTTPError: Exception in http.patch {}".format(detail)) print('HTTPError: Exception in http.patch {}'.format(detail))
except requests.Timeout as detail: except requests.Timeout as detail:
print("Timeout: Exception in http.patch {}".format(detail)) print('Timeout: Exception in http.patch {}'.format(detail))
except requests.TooManyRedirects as detail: except requests.TooManyRedirects as detail:
print("TooManyRedirects: Exception in http.patch {}".format(detail)) print('TooManyRedirects: Exception in http.patch {}'.format(detail))
return response return response
@ -153,14 +153,14 @@ def executetests(row):
print url print url
print header print header
print body print body
print "Actual Response: {}".format(response.status_code) print 'Actual Response: {}'.format(response.status_code)
print "Actual Response Headers" print 'Actual Response Headers'
print response.headers print response.headers
print"Actual Response Body" print'Actual Response Body'
print response.text print response.text
print"ExpectedRC: {}".format(expected_RC) print'ExpectedRC: {}'.format(expected_RC)
print"expectedresponsebody: {}".format(expected_response_body) print'expectedresponsebody: {}'.format(expected_response_body)
assert test_result_flag, "Actual Response does not match the Expected" assert test_result_flag, 'Actual Response does not match the Expected'
def verify_response(response, expected_RC): def verify_response(response, expected_RC):
@ -171,8 +171,8 @@ def verify_response(response, expected_RC):
if actual_RC != expected_RC: if actual_RC != expected_RC:
test_result_flag = False test_result_flag = False
print("Unexpected http Response code {}".format(actual_RC)) print('Unexpected http Response code {}'.format(actual_RC))
print "Response Body returned" print 'Response Body returned'
print actual_response_body print actual_response_body
return test_result_flag return test_result_flag

View File

@ -19,7 +19,7 @@ from marconi.tests.system.common import config
from marconi.tests.system.common import functionlib from marconi.tests.system.common import functionlib
cfg = config.Config() CFG = config.Config()
def get_data(): def get_data():
@ -32,7 +32,7 @@ def get_data():
for row in data: for row in data:
row['header'] = functionlib.get_headers(row['header']) row['header'] = functionlib.get_headers(row['header'])
row['url'] = row['url'].replace("<BASE_URL>", cfg.base_url) row['url'] = row['url'].replace('<BASE_URL>', CFG.base_url)
return data return data

View File

@ -22,7 +22,7 @@ from marconi.tests.system.common import functionlib
from marconi.tests.system.common import http from marconi.tests.system.common import http
cfg = config.Config() CFG = config.Config()
def generate_dict(dict_length): def generate_dict(dict_length):
@ -51,17 +51,17 @@ def single_message_body(**kwargs):
""" """
valid_ttl = random.randint(60, 1209600) valid_ttl = random.randint(60, 1209600)
if "messagesize" in kwargs.keys(): if 'messagesize' in kwargs.keys():
body = generate_dict(kwargs["messagesize"]) body = generate_dict(kwargs['messagesize'])
else: else:
body = generate_dict(2) body = generate_dict(2)
if "ttl" in kwargs.keys(): if 'ttl' in kwargs.keys():
ttl = kwargs["ttl"] ttl = kwargs['ttl']
else: else:
ttl = valid_ttl ttl = valid_ttl
message_body = {"ttl": ttl, "body": body} message_body = {'ttl': ttl, 'body': body}
return message_body return message_body
@ -70,7 +70,7 @@ def get_message_body(**kwargs):
:param **kwargs: can be {messagecount: x} , where x is the # of messages. :param **kwargs: can be {messagecount: x} , where x is the # of messages.
""" """
message_count = kwargs["messagecount"] message_count = kwargs['messagecount']
multiple_message_body = [] multiple_message_body = []
for i in range[message_count]: for i in range[message_count]:
message_body = single_message_body(**kwargs) message_body = single_message_body(**kwargs)
@ -84,7 +84,7 @@ def dummyget_message_body(dict):
return dict return dict
def create_url(base_url=cfg.base_url, *msg_id_list): def create_url(base_url=CFG.base_url, *msg_id_list):
"""Creates url list for retrieving messages with message id.""" """Creates url list for retrieving messages with message id."""
url = [(base_url + msg_id) for msg_id in msg_id_list] url = [(base_url + msg_id) for msg_id in msg_id_list]
return url return url
@ -98,7 +98,7 @@ def verify_msg_length(count=10, *msg_list):
""" """
test_result_flag = False test_result_flag = False
msg_body = json.loads(msg_list[0]) msg_body = json.loads(msg_list[0])
msg_list = msg_body["messages"] msg_list = msg_body['messages']
msg_count = len(msg_list) msg_count = len(msg_list)
if (msg_count <= count): if (msg_count <= count):
test_result_flag = True test_result_flag = True
@ -113,8 +113,8 @@ def get_href(*msg_list):
:param *msg_list: list of messages returned by the server. :param *msg_list: list of messages returned by the server.
""" """
msg_body = json.loads(msg_list[0]) msg_body = json.loads(msg_list[0])
link = msg_body["links"] link = msg_body['links']
href = link[0]["href"] href = link[0]['href']
return href return href
@ -136,14 +136,14 @@ def verify_post_msg(msg_headers, posted_body):
test_result_flag = functionlib.verify_metadata(getmsg.text, test_result_flag = functionlib.verify_metadata(getmsg.text,
posted_body) posted_body)
else: else:
print("Failed to GET {}".format(url)) print('Failed to GET {}'.format(url))
print("Request Header") print('Request Header')
print header print header
print("Response Headers") print('Response Headers')
print getmsg.headers print getmsg.headers
print("Response Body") print('Response Body')
print getmsg.text print getmsg.text
assert test_result_flag, "HTTP code {}".format(getmsg.status_code) assert test_result_flag, 'HTTP code {}'.format(getmsg.status_code)
def get_next_msgset(responsetext): def get_next_msgset(responsetext):
@ -162,9 +162,9 @@ def get_next_msgset(responsetext):
return test_result_flag return test_result_flag
else: else:
test_result_flag = False test_result_flag = False
print("Failed to GET {}".format(url)) print('Failed to GET {}'.format(url))
print(getmsg.text) print(getmsg.text)
assert test_result_flag, "HTTP code {}".format(getmsg.status_code) assert test_result_flag, 'HTTP code {}'.format(getmsg.status_code)
def verify_get_msgs(count, *getresponse): def verify_get_msgs(count, *getresponse):
@ -181,11 +181,11 @@ def verify_get_msgs(count, *getresponse):
if msglengthflag: if msglengthflag:
test_result_flag = get_next_msgset(body) test_result_flag = get_next_msgset(body)
else: else:
print("Messages returned exceed requested number of messages") print('Messages returned exceed requested number of messages')
test_result_flag = False test_result_flag = False
if not test_result_flag: if not test_result_flag:
assert test_result_flag, "Recursive Get Messages Failed" assert test_result_flag, 'Recursive Get Messages Failed'
def delete_msg(*postresponse): def delete_msg(*postresponse):
@ -204,11 +204,11 @@ def delete_msg(*postresponse):
if deletemsg.status_code == 204: if deletemsg.status_code == 204:
test_result_flag = functionlib.verify_delete(url, header) test_result_flag = functionlib.verify_delete(url, header)
else: else:
print("DELETE message failed") print('DELETE message failed')
print("URL") print('URL')
print url print url
print("headers") print('headers')
print header print header
print("Response Body") print('Response Body')
print deletemsg.text print deletemsg.text
assert test_result_flag, "DELETE Code {}".format(deletemsg.status_code) assert test_result_flag, 'DELETE Code {}'.format(deletemsg.status_code)

View File

@ -19,7 +19,7 @@ from marconi.tests.system.common import config
from marconi.tests.system.common import functionlib from marconi.tests.system.common import functionlib
cfg = config.Config() CFG = config.Config()
def get_data(): def get_data():
@ -32,7 +32,7 @@ def get_data():
for row in data: for row in data:
row['header'] = functionlib.get_headers(row['header']) row['header'] = functionlib.get_headers(row['header'])
row['url'] = row['url'].replace("<BASE_URL>", cfg.base_url) row['url'] = row['url'].replace('<BASE_URL>', CFG.base_url)
return data return data

View File

@ -38,14 +38,14 @@ def verify_queue_stats(*get_response):
keys_in_body = body.keys() keys_in_body = body.keys()
keys_in_body.sort() keys_in_body.sort()
if (keys_in_body == ["actions", "messages"]): if (keys_in_body == ['actions', 'messages']):
stats = body["messages"] stats = body['messages']
keys_in_stats = stats.keys() keys_in_stats = stats.keys()
keys_in_stats.sort() keys_in_stats.sort()
if (keys_in_stats == ["claimed", "free"]): if (keys_in_stats == ['claimed', 'free']):
try: try:
int(stats["claimed"]) int(stats['claimed'])
int(stats["free"]) int(stats['free'])
except Exception: except Exception:
test_result_flag = False test_result_flag = False
else: else:
@ -58,7 +58,7 @@ def verify_queue_stats(*get_response):
else: else:
print headers print headers
print body print body
assert test_result_flag, "Get Request stats failed" assert test_result_flag, 'Get Request stats failed'
def get_queue_name(namelength=65): def get_queue_name(namelength=65):
@ -68,7 +68,7 @@ def get_queue_name(namelength=65):
:param namelength: length of the queue name. :param namelength: length of the queue name.
""" """
appender = "/queues/" + binascii.b2a_hex(os.urandom(namelength)) appender = '/queues/' + binascii.b2a_hex(os.urandom(namelength))
url = functionlib.create_url_from_appender(appender) url = functionlib.create_url_from_appender(appender)
return url return url

View File

@ -20,7 +20,7 @@ from marconi.tests import util as testing
PROJECT_CONFIG = config.project() PROJECT_CONFIG = config.project()
CFG = PROJECT_CONFIG.from_options( CFG = PROJECT_CONFIG.from_options(
without_help=3, without_help=3,
with_help=(None, "nonsense")) with_help=(None, 'nonsense'))
class TestConfig(testing.TestBase): class TestConfig(testing.TestBase):

View File

@ -32,6 +32,6 @@ class TestTransportAuth(util.TestBase):
self.cfg.conf = cfg.ConfigOpts() self.cfg.conf = cfg.ConfigOpts()
def test_configs(self): def test_configs(self):
auth.strategy("keystone")._register_opts(self.cfg.conf) auth.strategy('keystone')._register_opts(self.cfg.conf)
self.assertIn("keystone_authtoken", self.cfg.conf) self.assertIn('keystone_authtoken', self.cfg.conf)
self.assertIn("keystone_authtoken", dir(self.cfg.from_options())) self.assertIn('keystone_authtoken', dir(self.cfg.from_options()))

View File

@ -29,7 +29,7 @@ class TestBase(util.TestBase):
super(TestBase, self).setUp() super(TestBase, self).setUp()
if self.config_filename is None: if self.config_filename is None:
self.skipTest("No config specified") self.skipTest('No config specified')
conf_file = self.conf_path(self.config_filename) conf_file = self.conf_path(self.config_filename)
boot = marconi.Bootstrap(conf_file) boot = marconi.Bootstrap(conf_file)
@ -43,9 +43,9 @@ class TestBaseFaulty(TestBase):
def setUp(self): def setUp(self):
self._storage_backup = marconi.Bootstrap.storage self._storage_backup = marconi.Bootstrap.storage
faulty = faulty_storage.Driver() faulty = faulty_storage.Driver()
setattr(marconi.Bootstrap, "storage", faulty) setattr(marconi.Bootstrap, 'storage', faulty)
super(TestBaseFaulty, self).setUp() super(TestBaseFaulty, self).setUp()
def tearDown(self): def tearDown(self):
setattr(marconi.Bootstrap, "storage", self._storage_backup) setattr(marconi.Bootstrap, 'storage', self._storage_backup)
super(TestBaseFaulty, self).tearDown() super(TestBaseFaulty, self).tearDown()

View File

@ -35,7 +35,7 @@ class TestWSGIAuth(base.TestBase):
def test_non_authenticated(self): def test_non_authenticated(self):
env = testing.create_environ('/v1/480924/queues/', env = testing.create_environ('/v1/480924/queues/',
method="GET", method='GET',
headers=self.headers) headers=self.headers)
self.app(env, self.srmock) self.app(env, self.srmock)

View File

@ -32,49 +32,49 @@ class ClaimsBaseTest(base.TestBase):
doc = '{"_ttl": 60 }' doc = '{"_ttl": 60 }'
env = testing.create_environ('/v1/480924/queues/fizbit', env = testing.create_environ('/v1/480924/queues/fizbit',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
doc = json.dumps([{"body": 239, "ttl": 30}] * 10) doc = json.dumps([{'body': 239, 'ttl': 30}] * 10)
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
body=doc, body=doc,
headers={'Client-ID': '30387f00'}) headers={'Client-ID': '30387f00'})
self.app(env, self.srmock) self.app(env, self.srmock)
def test_bad_claim(self): def test_bad_claim(self):
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST") method='POST')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST", body='[') method='POST', body='[')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST", body='{}') method='POST', body='{}')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
def test_bad_patch(self): def test_bad_patch(self):
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST", method='POST',
body='{"ttl": 10}') body='{"ttl": 10}')
self.app(env, self.srmock) self.app(env, self.srmock)
target = self.srmock.headers_dict['Location'] target = self.srmock.headers_dict['Location']
env = testing.create_environ(target, method="PATCH") env = testing.create_environ(target, method='PATCH')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ(target, method="PATCH", body='{') env = testing.create_environ(target, method='PATCH', body='{')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
@ -85,7 +85,7 @@ class ClaimsBaseTest(base.TestBase):
# claim some messages # claim some messages
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST", method='POST',
body=doc) body=doc)
body = self.app(env, self.srmock) body = self.app(env, self.srmock)
@ -98,7 +98,7 @@ class ClaimsBaseTest(base.TestBase):
# no more messages to claim # no more messages to claim
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST", method='POST',
body=doc, body=doc,
query_string='limit=3') query_string='limit=3')
@ -107,7 +107,7 @@ class ClaimsBaseTest(base.TestBase):
# check its metadata # check its metadata
env = testing.create_environ(target, method="GET") env = testing.create_environ(target, method='GET')
body = self.app(env, self.srmock) body = self.app(env, self.srmock)
st = json.loads(body[0]) st = json.loads(body[0])
@ -121,7 +121,7 @@ class ClaimsBaseTest(base.TestBase):
# delete a message with its associated claim # delete a message with its associated claim
env = testing.create_environ(msg_target, query_string=params, env = testing.create_environ(msg_target, query_string=params,
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_204) self.assertEquals(self.srmock.status, falcon.HTTP_204)
@ -135,14 +135,14 @@ class ClaimsBaseTest(base.TestBase):
env = testing.create_environ(target, env = testing.create_environ(target,
body='{"ttl": 60}', body='{"ttl": 60}',
method="PATCH") method='PATCH')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_204) self.assertEquals(self.srmock.status, falcon.HTTP_204)
# get the claimed messages again # get the claimed messages again
env = testing.create_environ(target, method="GET") env = testing.create_environ(target, method='GET')
body = self.app(env, self.srmock) body = self.app(env, self.srmock)
st = json.loads(body[0]) st = json.loads(body[0])
@ -152,7 +152,7 @@ class ClaimsBaseTest(base.TestBase):
# delete the claim # delete the claim
env = testing.create_environ(st['href'], method="DELETE") env = testing.create_environ(st['href'], method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_204) self.assertEquals(self.srmock.status, falcon.HTTP_204)
@ -160,7 +160,7 @@ class ClaimsBaseTest(base.TestBase):
# can not delete a message with a non-existing claim # can not delete a message with a non-existing claim
env = testing.create_environ(msg_target, query_string=params, env = testing.create_environ(msg_target, query_string=params,
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_403) self.assertEquals(self.srmock.status, falcon.HTTP_403)
@ -172,12 +172,12 @@ class ClaimsBaseTest(base.TestBase):
# get & update a non existing claim # get & update a non existing claim
env = testing.create_environ(st['href'], method="GET") env = testing.create_environ(st['href'], method='GET')
body = self.app(env, self.srmock) body = self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_404) self.assertEquals(self.srmock.status, falcon.HTTP_404)
env = testing.create_environ(st['href'], method="PATCH", body=doc) env = testing.create_environ(st['href'], method='PATCH', body=doc)
body = self.app(env, self.srmock) body = self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_404) self.assertEquals(self.srmock.status, falcon.HTTP_404)
@ -185,14 +185,14 @@ class ClaimsBaseTest(base.TestBase):
def test_nonexistent(self): def test_nonexistent(self):
doc = '{"ttl": 10}' doc = '{"ttl": 10}'
env = testing.create_environ('/v1/480924/queues/nonexistent/claims', env = testing.create_environ('/v1/480924/queues/nonexistent/claims',
method="POST", body=doc) method='POST', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_404) self.assertEquals(self.srmock.status, falcon.HTTP_404)
def tearDown(self): def tearDown(self):
env = testing.create_environ('/v1/480924/queues/fizbit', env = testing.create_environ('/v1/480924/queues/fizbit',
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
super(ClaimsBaseTest, self).tearDown() super(ClaimsBaseTest, self).tearDown()
@ -203,11 +203,11 @@ class ClaimsMongoDBTests(ClaimsBaseTest):
config_filename = 'wsgi_mongodb.conf' config_filename = 'wsgi_mongodb.conf'
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(ClaimsMongoDBTests, self).setUp() super(ClaimsMongoDBTests, self).setUp()
self.cfg = config.namespace("drivers:storage:mongodb").from_options() self.cfg = config.namespace('drivers:storage:mongodb').from_options()
def tearDown(self): def tearDown(self):
conn = pymongo.MongoClient(self.cfg.uri) conn = pymongo.MongoClient(self.cfg.uri)
@ -227,7 +227,7 @@ class ClaimsFaultyDriverTests(base.TestBaseFaulty):
def test_simple(self): def test_simple(self):
doc = '{"ttl": 100}' doc = '{"ttl": 100}'
env = testing.create_environ('/v1/480924/queues/fizbit/claims', env = testing.create_environ('/v1/480924/queues/fizbit/claims',
method="POST", method='POST',
body=doc) body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
@ -235,14 +235,14 @@ class ClaimsFaultyDriverTests(base.TestBaseFaulty):
env = testing.create_environ('/v1/480924/queues/fizbit/claims' env = testing.create_environ('/v1/480924/queues/fizbit/claims'
'/nonexistent', '/nonexistent',
method="GET") method='GET')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_503) self.assertEquals(self.srmock.status, falcon.HTTP_503)
env = testing.create_environ('/v1/480924/queues/fizbit/claims' env = testing.create_environ('/v1/480924/queues/fizbit/claims'
'/nonexistent', '/nonexistent',
method="PATCH", method='PATCH',
body=doc) body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
@ -250,7 +250,7 @@ class ClaimsFaultyDriverTests(base.TestBaseFaulty):
env = testing.create_environ('/v1/480924/queues/fizbit/claims' env = testing.create_environ('/v1/480924/queues/fizbit/claims'
'/nonexistent', '/nonexistent',
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_503) self.assertEquals(self.srmock.status, falcon.HTTP_503)

View File

@ -29,7 +29,7 @@ class MessagesBaseTest(base.TestBase):
doc = '{"_ttl": 60}' doc = '{"_ttl": 60}'
env = testing.create_environ('/v1/480924/queues/fizbit', env = testing.create_environ('/v1/480924/queues/fizbit',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.headers = { self.headers = {
@ -38,23 +38,23 @@ class MessagesBaseTest(base.TestBase):
def tearDown(self): def tearDown(self):
env = testing.create_environ('/v1/480924/queues/fizbit', env = testing.create_environ('/v1/480924/queues/fizbit',
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
super(MessagesBaseTest, self).tearDown() super(MessagesBaseTest, self).tearDown()
def test_post(self): def test_post(self):
doc = ''' doc = """
[ [
{"body": 239, "ttl": 10}, {"body": 239, "ttl": 10},
{"body": {"key": "value"}, "ttl": 20}, {"body": {"key": "value"}, "ttl": 20},
{"body": [1, 3], "ttl": 30} {"body": [1, 3], "ttl": 30}
] ]
''' """
path = '/v1/480924/queues/fizbit/messages' path = '/v1/480924/queues/fizbit/messages'
env = testing.create_environ(path, env = testing.create_environ(path,
method="POST", method='POST',
body=doc, body=doc,
headers=self.headers) headers=self.headers)
@ -76,7 +76,7 @@ class MessagesBaseTest(base.TestBase):
for msg_id in msg_ids: for msg_id in msg_ids:
message_uri = path + '/' + msg_id message_uri = path + '/' + msg_id
env = testing.create_environ(message_uri, method="GET") env = testing.create_environ(message_uri, method='GET')
body = self.app(env, self.srmock) body = self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_200) self.assertEquals(self.srmock.status, falcon.HTTP_200)
@ -92,14 +92,14 @@ class MessagesBaseTest(base.TestBase):
def test_post_bad_message(self): def test_post_bad_message(self):
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
headers=self.headers) headers=self.headers)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
body='[', body='[',
headers=self.headers) headers=self.headers)
@ -107,7 +107,7 @@ class MessagesBaseTest(base.TestBase):
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
body='[]', body='[]',
headers=self.headers) headers=self.headers)
@ -115,7 +115,7 @@ class MessagesBaseTest(base.TestBase):
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
body='{}', body='{}',
headers=self.headers) headers=self.headers)
@ -127,19 +127,19 @@ class MessagesBaseTest(base.TestBase):
[msg_id] = self._get_msg_ids(self.srmock.headers_dict) [msg_id] = self._get_msg_ids(self.srmock.headers_dict)
env = testing.create_environ('/v1/480924/queues/fizbit/messages/' env = testing.create_environ('/v1/480924/queues/fizbit/messages/'
+ msg_id, method="GET") + msg_id, method='GET')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_200) self.assertEquals(self.srmock.status, falcon.HTTP_200)
env = testing.create_environ('/v1/480924/queues/fizbit/messages/' env = testing.create_environ('/v1/480924/queues/fizbit/messages/'
+ msg_id, method="DELETE") + msg_id, method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_204) self.assertEquals(self.srmock.status, falcon.HTTP_204)
env = testing.create_environ('/v1/480924/queues/fizbit/messages/' env = testing.create_environ('/v1/480924/queues/fizbit/messages/'
+ msg_id, method="GET") + msg_id, method='GET')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_404) self.assertEquals(self.srmock.status, falcon.HTTP_404)
@ -203,23 +203,23 @@ class MessagesBaseTest(base.TestBase):
def test_no_uuid(self): def test_no_uuid(self):
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
body='[{"body": 0, "ttl": 0}]') body='[{"body": 0, "ttl": 0}]')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="GET") method='GET')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
def _post_messages(self, target, repeat=1): def _post_messages(self, target, repeat=1):
doc = json.dumps([{"body": 239, "ttl": 30}] * repeat) doc = json.dumps([{'body': 239, 'ttl': 30}] * repeat)
env = testing.create_environ(target, env = testing.create_environ(target,
method="POST", method='POST',
body=doc, body=doc,
headers=self.headers) headers=self.headers)
self.app(env, self.srmock) self.app(env, self.srmock)
@ -238,8 +238,8 @@ class MessagesMongoDBTests(MessagesBaseTest):
config_filename = 'wsgi_mongodb.conf' config_filename = 'wsgi_mongodb.conf'
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(MessagesMongoDBTests, self).setUp() super(MessagesMongoDBTests, self).setUp()
@ -255,7 +255,7 @@ class MessagesFaultyDriverTests(base.TestBaseFaulty):
} }
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="POST", method='POST',
body=doc, body=doc,
headers=headers) headers=headers)
@ -263,7 +263,7 @@ class MessagesFaultyDriverTests(base.TestBaseFaulty):
self.assertEquals(self.srmock.status, falcon.HTTP_503) self.assertEquals(self.srmock.status, falcon.HTTP_503)
env = testing.create_environ('/v1/480924/queues/fizbit/messages', env = testing.create_environ('/v1/480924/queues/fizbit/messages',
method="GET", method='GET',
headers=headers) headers=headers)
self.app(env, self.srmock) self.app(env, self.srmock)
@ -271,14 +271,14 @@ class MessagesFaultyDriverTests(base.TestBaseFaulty):
env = testing.create_environ('/v1/480924/queues/fizbit/messages' env = testing.create_environ('/v1/480924/queues/fizbit/messages'
'/nonexistent', '/nonexistent',
method="GET") method='GET')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_503) self.assertEquals(self.srmock.status, falcon.HTTP_503)
env = testing.create_environ('/v1/480924/queues/fizbit/messages' env = testing.create_environ('/v1/480924/queues/fizbit/messages'
'/nonexistent', '/nonexistent',
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_503) self.assertEquals(self.srmock.status, falcon.HTTP_503)

View File

@ -35,7 +35,7 @@ class QueueLifecycleBaseTest(base.TestBase):
# Create # Create
env = testing.create_environ('/v1/480924/queues/gumshoe', env = testing.create_environ('/v1/480924/queues/gumshoe',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_201) self.assertEquals(self.srmock.status, falcon.HTTP_201)
@ -51,7 +51,7 @@ class QueueLifecycleBaseTest(base.TestBase):
# Delete # Delete
env = testing.create_environ('/v1/480924/queues/gumshoe', env = testing.create_environ('/v1/480924/queues/gumshoe',
method="DELETE") method='DELETE')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_204) self.assertEquals(self.srmock.status, falcon.HTTP_204)
@ -63,22 +63,22 @@ class QueueLifecycleBaseTest(base.TestBase):
self.assertEquals(self.srmock.status, falcon.HTTP_404) self.assertEquals(self.srmock.status, falcon.HTTP_404)
def test_no_metadata(self): def test_no_metadata(self):
env = testing.create_environ('/v1/480924/queues/fizbat', method="PUT") env = testing.create_environ('/v1/480924/queues/fizbat', method='PUT')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
def test_bad_metadata(self): def test_bad_metadata(self):
env = testing.create_environ('/v1/480924/queues/fizbat', env = testing.create_environ('/v1/480924/queues/fizbat',
body="{", body='{',
method="PUT") method='PUT')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
env = testing.create_environ('/v1/480924/queues/fizbat', env = testing.create_environ('/v1/480924/queues/fizbat',
body="[]", body='[]',
method="PUT") method='PUT')
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
@ -88,7 +88,7 @@ class QueueLifecycleBaseTest(base.TestBase):
padding_len = transport.MAX_QUEUE_METADATA_SIZE - (len(doc) - 2) + 1 padding_len = transport.MAX_QUEUE_METADATA_SIZE - (len(doc) - 2) + 1
doc = doc % ('x' * padding_len) doc = doc % ('x' * padding_len)
env = testing.create_environ('/v1/480924/queues/fizbat', env = testing.create_environ('/v1/480924/queues/fizbat',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
@ -98,7 +98,7 @@ class QueueLifecycleBaseTest(base.TestBase):
padding_len = transport.MAX_QUEUE_METADATA_SIZE * 100 padding_len = transport.MAX_QUEUE_METADATA_SIZE * 100
doc = doc % ('x' * padding_len) doc = doc % ('x' * padding_len)
env = testing.create_environ('/v1/480924/queues/gumshoe', env = testing.create_environ('/v1/480924/queues/gumshoe',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_400) self.assertEquals(self.srmock.status, falcon.HTTP_400)
@ -109,7 +109,7 @@ class QueueLifecycleBaseTest(base.TestBase):
padding_len = transport.MAX_QUEUE_METADATA_SIZE - (len(doc) - 2) padding_len = transport.MAX_QUEUE_METADATA_SIZE - (len(doc) - 2)
doc = doc % ('x' * padding_len) doc = doc % ('x' * padding_len)
env = testing.create_environ('/v1/480924/queues/gumshoe', env = testing.create_environ('/v1/480924/queues/gumshoe',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_201) self.assertEquals(self.srmock.status, falcon.HTTP_201)
@ -124,7 +124,7 @@ class QueueLifecycleBaseTest(base.TestBase):
# Create # Create
doc1 = '{"messages": {"ttl": 600}}' doc1 = '{"messages": {"ttl": 600}}'
env = testing.create_environ('/v1/480924/queues/xyz', env = testing.create_environ('/v1/480924/queues/xyz',
method="PUT", body=doc1) method='PUT', body=doc1)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_201) self.assertEquals(self.srmock.status, falcon.HTTP_201)
@ -132,7 +132,7 @@ class QueueLifecycleBaseTest(base.TestBase):
# Update # Update
doc2 = '{"messages": {"ttl": 100}}' doc2 = '{"messages": {"ttl": 100}}'
env = testing.create_environ('/v1/480924/queues/xyz', env = testing.create_environ('/v1/480924/queues/xyz',
method="PUT", body=doc2) method='PUT', body=doc2)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_204) self.assertEquals(self.srmock.status, falcon.HTTP_204)
@ -155,17 +155,17 @@ class QueueLifecycleBaseTest(base.TestBase):
# Create some # Create some
env = testing.create_environ('/v1/480924/queues/q1', env = testing.create_environ('/v1/480924/queues/q1',
method="PUT", method='PUT',
body='{"_ttl": 30 }') body='{"_ttl": 30 }')
self.app(env, self.srmock) self.app(env, self.srmock)
env = testing.create_environ('/v1/480924/queues/q2', env = testing.create_environ('/v1/480924/queues/q2',
method="PUT", method='PUT',
body='{}') body='{}')
self.app(env, self.srmock) self.app(env, self.srmock)
env = testing.create_environ('/v1/480924/queues/q3', env = testing.create_environ('/v1/480924/queues/q3',
method="PUT", method='PUT',
body='{"_ttl": 30 }') body='{"_ttl": 30 }')
self.app(env, self.srmock) self.app(env, self.srmock)
@ -213,11 +213,11 @@ class QueueLifecycleMongoDBTests(QueueLifecycleBaseTest):
config_filename = 'wsgi_mongodb.conf' config_filename = 'wsgi_mongodb.conf'
def setUp(self): def setUp(self):
if not os.environ.get("MONGODB_TEST_LIVE"): if not os.environ.get('MONGODB_TEST_LIVE'):
self.skipTest("No MongoDB instance running") self.skipTest('No MongoDB instance running')
super(QueueLifecycleMongoDBTests, self).setUp() super(QueueLifecycleMongoDBTests, self).setUp()
self.cfg = config.namespace("drivers:storage:mongodb").from_options() self.cfg = config.namespace('drivers:storage:mongodb').from_options()
def tearDown(self): def tearDown(self):
conn = pymongo.MongoClient(self.cfg.uri) conn = pymongo.MongoClient(self.cfg.uri)
@ -237,7 +237,7 @@ class QueueFaultyDriverTests(base.TestBaseFaulty):
def test_simple(self): def test_simple(self):
doc = '{"messages": {"ttl": 600}}' doc = '{"messages": {"ttl": 600}}'
env = testing.create_environ('/v1/480924/queues/gumshoe', env = testing.create_environ('/v1/480924/queues/gumshoe',
method="PUT", body=doc) method='PUT', body=doc)
self.app(env, self.srmock) self.app(env, self.srmock)
self.assertEquals(self.srmock.status, falcon.HTTP_503) self.assertEquals(self.srmock.status, falcon.HTTP_503)

View File

@ -44,7 +44,7 @@ class TestBase(testtools.TestCase):
"""Returns the full path to the specified Marconi conf file. """Returns the full path to the specified Marconi conf file.
:param filename: Name of the conf file to find (e.g., :param filename: Name of the conf file to find (e.g.,
"wsgi_memory.conf") 'wsgi_memory.conf')
""" """
parent = os.path.dirname(self._my_dir()) parent = os.path.dirname(self._my_dir())
@ -54,7 +54,7 @@ class TestBase(testtools.TestCase):
"""Loads `filename` configuration file. """Loads `filename` configuration file.
:param filename: Name of the conf file to find (e.g., :param filename: Name of the conf file to find (e.g.,
"wsgi_memory.conf") 'wsgi_memory.conf')
:returns: Project's config object. :returns: Project's config object.
""" """

View File

@ -4,10 +4,10 @@ from marconi.common import config
from marconi.transport import base from marconi.transport import base
OPTIONS = { OPTIONS = {
"auth_strategy": "" 'auth_strategy': ""
} }
cfg = config.project('marconi').from_options(**OPTIONS) CFG = config.project('marconi').from_options(**OPTIONS)
MAX_QUEUE_METADATA_SIZE = 64 * 1024 MAX_QUEUE_METADATA_SIZE = 64 * 1024
"""Maximum metadata size per queue when serialized as JSON""" """Maximum metadata size per queue when serialized as JSON"""

View File

@ -40,7 +40,7 @@ class KeystoneAuth(object):
return auth_token.AuthProtocol(app, conf=conf) return auth_token.AuthProtocol(app, conf=conf)
STRATEGIES["keystone"] = KeystoneAuth STRATEGIES['keystone'] = KeystoneAuth
def strategy(strategy): def strategy(strategy):

View File

@ -30,9 +30,9 @@ OPTIONS = {
'port': 8888 'port': 8888
} }
pconfig = config.project('marconi') PROJECT_CFG = config.project('marconi')
gcfg = pconfig.from_options() GLOBAL_CFG = PROJECT_CFG.from_options()
lcfg = config.namespace('drivers:transport:wsgi').from_options(**OPTIONS) WSGI_CFG = config.namespace('drivers:transport:wsgi').from_options(**OPTIONS)
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -77,13 +77,14 @@ class Driver(transport.DriverBase):
'/claims/{claim_id}', claim_item) '/claims/{claim_id}', claim_item)
# NOTE(flaper87): Install Auth # NOTE(flaper87): Install Auth
if gcfg.auth_strategy: if GLOBAL_CFG.auth_strategy:
strategy = auth.strategy(gcfg.auth_strategy) strategy = auth.strategy(GLOBAL_CFG.auth_strategy)
self.app = strategy.install(self.app, pconfig.conf) self.app = strategy.install(self.app, PROJECT_CFG.conf)
def listen(self): def listen(self):
msg = _("Serving on host %(bind)s:%(port)s") % {"bind": lcfg.bind, msg = _('Serving on host %(bind)s:%(port)s')
"port": lcfg.port} msg %= {'bind': WSGI_CFG.bind, 'port': WSGI_CFG.port}
LOG.debug(msg) LOG.debug(msg)
httpd = simple_server.make_server(lcfg.bind, lcfg.port, self.app) httpd = simple_server.make_server(WSGI_CFG.bind, WSGI_CFG.port,
self.app)
httpd.serve_forever() httpd.serve_forever()

View File

@ -82,7 +82,7 @@ def filter_stream(stream, len, spec, doctype=JSONObject):
# streaming JSON deserializer (see above.git ) # streaming JSON deserializer (see above.git )
return (filter(obj, spec) for obj in document) return (filter(obj, spec) for obj in document)
raise ValueError("doctype not in (JSONObject, JSONArray)") raise ValueError('doctype not in (JSONObject, JSONArray)')
# TODO(kgriffs): Consider moving this to Falcon and/or Oslo # TODO(kgriffs): Consider moving this to Falcon and/or Oslo

View File

@ -90,7 +90,7 @@ class CollectionResource(object):
resp.location = req.path + '/' + resource resp.location = req.path + '/' + resource
hrefs = [req.path + '/' + id for id in message_ids] hrefs = [req.path + '/' + id for id in message_ids]
body = {"resources": hrefs, "partial": partial} body = {'resources': hrefs, 'partial': partial}
resp.body = helpers.to_json(body) resp.body = helpers.to_json(body)
def on_get(self, req, resp, project_id, queue_name): def on_get(self, req, resp, project_id, queue_name):