Use a real MongoDB instance to run unit tests

This will allow more real tests, and use of more functionnality not
implemented in MIM such as aggregation.

Change-Id: Ie38deadf190db33863c99d4610157349484ac10f
This commit is contained in:
Julien Danjou 2013-06-17 17:32:13 +02:00
parent 65c4790303
commit 048c59c930
10 changed files with 73 additions and 149 deletions

View File

@ -21,7 +21,6 @@
""" """
import copy import copy
import datetime
import operator import operator
import os import os
import re import re
@ -155,11 +154,32 @@ def make_query_from_filter(sample_filter, require_meter=True):
return q return q
class ConnectionPool(object):
def __init__(self):
self._pool = {}
def connect(self, opts):
# opts is a dict, dict are unhashable, convert to tuple
connection_pool_key = tuple(sorted(opts.items()))
if connection_pool_key not in self._pool:
LOG.info('connecting to MongoDB replicaset "%s" on %s',
opts['replica_set'],
opts['netloc'])
self._pool[connection_pool_key] = pymongo.Connection(
opts['netloc'],
replicaSet=opts['replica_set'],
safe=True)
return self._pool.get(connection_pool_key)
class Connection(base.Connection): class Connection(base.Connection):
"""MongoDB connection. """MongoDB connection.
""" """
_mim_instance = None CONNECTION_POOL = ConnectionPool()
MAP_STATS = bson.code.Code(""" MAP_STATS = bson.code.Code("""
function () { function () {
@ -196,13 +216,20 @@ class Connection(base.Connection):
REDUCE_STATS = bson.code.Code(""" REDUCE_STATS = bson.code.Code("""
function (key, values) { function (key, values) {
var res = values[0]; var res = { min: values[0].min,
max: values[0].max,
count: values[0].count,
sum: values[0].sum,
period_start: values[0].period_start,
period_end: values[0].period_end,
duration_start: values[0].duration_start,
duration_end: values[0].duration_end };
for ( var i=1; i<values.length; i++ ) { for ( var i=1; i<values.length; i++ ) {
if ( values[i].min < res.min ) if ( values[i].min < res.min )
res.min = values[i].min; res.min = values[i].min;
if ( values[i].max > res.max ) if ( values[i].max > res.max )
res.max = values[i].max; res.max = values[i].max;
res.count += values[i].count; res.count = NumberInt(res.count + values[i].count);
res.sum += values[i].sum; res.sum += values[i].sum;
if ( values[i].duration_start < res.duration_start ) if ( values[i].duration_start < res.duration_start )
res.duration_start = values[i].duration_start; res.duration_start = values[i].duration_start;
@ -224,33 +251,23 @@ class Connection(base.Connection):
def __init__(self, conf): def __init__(self, conf):
opts = self._parse_connection_url(conf.database.connection) opts = self._parse_connection_url(conf.database.connection)
LOG.info('connecting to MongoDB replicaset "%s" on %s',
conf.storage_mongodb.replica_set_name,
opts['netloc'])
if opts['netloc'] == '__test__': if opts['netloc'] == '__test__':
url = os.environ.get('CEILOMETER_TEST_MONGODB_URL') url = os.environ.get('CEILOMETER_TEST_MONGODB_URL')
if url: if not url:
raise RuntimeError(
"No MongoDB test URL set,"
"export CEILOMETER_TEST_MONGODB_URL environment variable")
opts = self._parse_connection_url(url) opts = self._parse_connection_url(url)
self.conn = pymongo.Connection(opts['netloc'], safe=True)
else: # FIXME(jd) This should be a parameter in the database URL, not global
# MIM will die if we have too many connections, so use a opts['replica_set'] = conf.storage_mongodb.replica_set_name
# Singleton
if Connection._mim_instance is None: # NOTE(jd) Use our own connection pooling on top of the Pymongo one.
try: # We need that otherwise we overflow the MongoDB instance with new
from ming import mim # connection since we instanciate a Pymongo client each time someone
except ImportError: # requires a new storage connection.
import testtools self.conn = self.CONNECTION_POOL.connect(opts)
raise testtools.testcase.TestSkipped('requires mim')
LOG.debug('Creating a new MIM Connection object')
Connection._mim_instance = mim.Connection()
self.conn = Connection._mim_instance
LOG.debug('Using MIM for test connection')
else:
self.conn = pymongo.Connection(
opts['netloc'],
replicaSet=conf.storage_mongodb.replica_set_name,
safe=True)
self.db = getattr(self.conn, opts['dbname']) self.db = getattr(self.conn, opts['dbname'])
if 'username' in opts: if 'username' in opts:
@ -281,12 +298,6 @@ class Connection(base.Connection):
pass pass
def clear(self): def clear(self):
if self._mim_instance is not None:
# Don't want to use drop_database() because
# may end up running out of spidermonkey instances.
# http://davisp.lighthouseapp.com/projects/26898/tickets/22
self.db.clear()
else:
self.conn.drop_database(self.db) self.conn.drop_database(self.db)
@staticmethod @staticmethod
@ -526,34 +537,6 @@ class Connection(base.Connection):
for r in results['results']), for r in results['results']),
key=operator.attrgetter('period_start')) key=operator.attrgetter('period_start'))
def _fix_interval_min_max(self, a_min, a_max):
if hasattr(a_min, 'valueOf') and a_min.valueOf is not None:
# NOTE (dhellmann): HACK ALERT
#
# The real MongoDB server can handle Date objects and
# the driver converts them to datetime instances
# correctly but the in-memory implementation in MIM
# (used by the tests) returns a spidermonkey.Object
# representing the "value" dictionary and there
# doesn't seem to be a way to recursively introspect
# that object safely to convert the min and max values
# back to datetime objects. In this method, we know
# what type the min and max values are expected to be,
# so it is safe to do the conversion
# here. JavaScript's time representation uses
# different units than Python's, so we divide to
# convert to the right units and then create the
# datetime instances to return.
#
# The issue with MIM is documented at
# https://sourceforge.net/p/merciless/bugs/3/
#
a_min = datetime.datetime.fromtimestamp(
a_min.valueOf() // 1000)
a_max = datetime.datetime.fromtimestamp(
a_max.valueOf() // 1000)
return (a_min, a_max)
def get_alarms(self, name=None, user=None, def get_alarms(self, name=None, user=None,
project=None, enabled=True, alarm_id=None): project=None, enabled=True, alarm_id=None):
"""Yields a lists of alarms that match filters """Yields a lists of alarms that match filters
@ -612,22 +595,3 @@ class Connection(base.Connection):
:param event_filter: EventFilter instance :param event_filter: EventFilter instance
""" """
raise NotImplementedError('Events not implemented.') raise NotImplementedError('Events not implemented.')
def require_map_reduce(conn):
"""Raises SkipTest if the connection is using mim.
"""
# NOTE(dhellmann): mim requires spidermonkey to implement the
# map-reduce functions, so if we can't import it then just
# skip these tests unless we aren't using mim.
try:
import spidermonkey # noqa
except BaseException:
try:
from ming import mim
if hasattr(conn, "conn") and isinstance(conn.conn, mim.Connection):
import testtools
raise testtools.testcase.TestSkipped('requires spidermonkey')
except ImportError:
import testtools
raise testtools.testcase.TestSkipped('requires mim')

15
run-tests.sh Executable file
View File

@ -0,0 +1,15 @@
#!/bin/bash
set -e
# Nova notifier tests
bash tools/init_testr_if_needed.sh
python setup.py testr --slowest --testr-args="--concurrency=1 --here=nova_tests $*"
# Main unit tests
MONGO_DATA=`mktemp -d`
trap "rm -rf ${MONGO_DATA}" EXIT
mongod --maxConns 32 --smallfiles --quiet --noauth --port 29000 --dbpath "${MONGO_DATA}" --bind_ip localhost &
MONGO_PID=$!
trap "kill -9 ${MONGO_PID} || true" EXIT
export CEILOMETER_TEST_MONGODB_URL="mongodb://localhost:29000/ceilometer"
python setup.py testr --slowest --testr-args="--concurrency=1 $*"

View File

@ -5,9 +5,6 @@ mock
mox mox
fixtures>=0.3.12 fixtures>=0.3.12
Babel>=0.9.6 Babel>=0.9.6
# NOTE(dhellmann): Ming is necessary to provide the Mongo-in-memory
# implementation of MongoDB.
Ming>=0.3.4
http://tarballs.openstack.org/nova/nova-master.tar.gz#egg=nova http://tarballs.openstack.org/nova/nova-master.tar.gz#egg=nova
# We should use swift>1.7.5, but it's not yet available # We should use swift>1.7.5, but it's not yet available
swift swift
@ -18,7 +15,6 @@ sphinx
sphinxcontrib-pecanwsme>=0.2 sphinxcontrib-pecanwsme>=0.2
docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released.
oslo.sphinx oslo.sphinx
python-spidermonkey
python-subunit python-subunit
testrepository>=0.0.13 testrepository>=0.0.13
testtools>=0.9.29 testtools>=0.9.29

View File

@ -27,14 +27,12 @@ from ceilometer.publisher import rpc
from ceilometer import counter from ceilometer import counter
from ceilometer.tests import api as tests_api from ceilometer.tests import api as tests_api
from ceilometer.storage.impl_mongodb import require_map_reduce
class TestMaxProjectVolume(tests_api.TestBase): class TestMaxProjectVolume(tests_api.TestBase):
def setUp(self): def setUp(self):
super(TestMaxProjectVolume, self).setUp() super(TestMaxProjectVolume, self).setUp()
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):

View File

@ -26,14 +26,12 @@ from ceilometer.publisher import rpc
from ceilometer import counter from ceilometer import counter
from ceilometer.tests import api as tests_api from ceilometer.tests import api as tests_api
from ceilometer.storage.impl_mongodb import require_map_reduce
class TestMaxResourceVolume(tests_api.TestBase): class TestMaxResourceVolume(tests_api.TestBase):
def setUp(self): def setUp(self):
super(TestMaxResourceVolume, self).setUp() super(TestMaxResourceVolume, self).setUp()
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):

View File

@ -27,14 +27,12 @@ from ceilometer.publisher import rpc
from ceilometer import counter from ceilometer import counter
from ceilometer.tests import api as tests_api from ceilometer.tests import api as tests_api
from ceilometer.storage.impl_mongodb import require_map_reduce
class TestSumProjectVolume(tests_api.TestBase): class TestSumProjectVolume(tests_api.TestBase):
def setUp(self): def setUp(self):
super(TestSumProjectVolume, self).setUp() super(TestSumProjectVolume, self).setUp()
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):

View File

@ -27,14 +27,12 @@ from ceilometer.publisher import rpc
from ceilometer import counter from ceilometer import counter
from ceilometer.tests import api as tests_api from ceilometer.tests import api as tests_api
from ceilometer.storage.impl_mongodb import require_map_reduce
class TestSumResourceVolume(tests_api.TestBase): class TestSumResourceVolume(tests_api.TestBase):
def setUp(self): def setUp(self):
super(TestSumResourceVolume, self).setUp() super(TestSumResourceVolume, self).setUp()
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):

View File

@ -23,8 +23,6 @@ from oslo.config import cfg
from . import base from . import base
from ceilometer import counter from ceilometer import counter
from ceilometer.storage.impl_mongodb import Connection as mongo_conn
from ceilometer.storage.impl_mongodb import require_map_reduce
from ceilometer.publisher import rpc from ceilometer.publisher import rpc
@ -34,9 +32,6 @@ class TestMaxProjectVolume(base.FunctionalTest):
def setUp(self): def setUp(self):
super(TestMaxProjectVolume, self).setUp() super(TestMaxProjectVolume, self).setUp()
# TODO(gordc): remove when we drop mim
if isinstance(self.conn, mongo_conn):
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):
@ -137,9 +132,6 @@ class TestMaxResourceVolume(base.FunctionalTest):
def setUp(self): def setUp(self):
super(TestMaxResourceVolume, self).setUp() super(TestMaxResourceVolume, self).setUp()
# TODO(gordc): remove when we drop mim
if isinstance(self.conn, mongo_conn):
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):
@ -256,9 +248,6 @@ class TestSumProjectVolume(base.FunctionalTest):
def setUp(self): def setUp(self):
super(TestSumProjectVolume, self).setUp() super(TestSumProjectVolume, self).setUp()
# TODO(gordc): remove when we drop mim
if isinstance(self.conn, mongo_conn):
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):
@ -361,9 +350,6 @@ class TestSumResourceVolume(base.FunctionalTest):
def setUp(self): def setUp(self):
super(TestSumResourceVolume, self).setUp() super(TestSumResourceVolume, self).setUp()
# TODO(gordc): remove when we drop mim
if isinstance(self.conn, mongo_conn):
require_map_reduce(self.conn)
self.counters = [] self.counters = []
for i in range(3): for i in range(3):

View File

@ -18,56 +18,31 @@
"""Tests for ceilometer/storage/impl_mongodb.py """Tests for ceilometer/storage/impl_mongodb.py
.. note:: .. note::
In order to run the tests against another MongoDB server set the
(dhellmann) These tests have some dependencies which cannot be environment variable CEILOMETER_TEST_MONGODB_URL to point to a MongoDB
installed in the CI environment right now. server before running the tests.
Ming is necessary to provide the Mongo-in-memory implementation for
of MongoDB. The original source for Ming is at
http://sourceforge.net/project/merciless but there does not seem to
be a way to point to a "zipball" of the latest HEAD there, and we
need features present only in that version. I forked the project to
github to make it easier to install, and put the URL into the
test-requires file. Then I ended up making some changes to it so it
would be compatible with PyMongo's API.
https://github.com/dreamhost/Ming/zipball/master#egg=Ming
In order to run the tests that use map-reduce with MIM, some
additional system-level packages are required::
apt-get install nspr-config
apt-get install libnspr4-dev
apt-get install pkg-config
pip install python-spidermonkey
To run the tests *without* mim, set the environment variable
CEILOMETER_TEST_MONGODB_URL to a MongoDB URL before running tox.
""" """
import copy import copy
import datetime import datetime
from oslo.config import cfg
from tests.storage import base from tests.storage import base
from ceilometer.publisher import rpc from ceilometer.publisher import rpc
from ceilometer import counter from ceilometer import counter
from ceilometer.storage.impl_mongodb import require_map_reduce from ceilometer.storage import impl_mongodb
class MongoDBEngineTestBase(base.DBTestBase): class MongoDBEngineTestBase(base.DBTestBase):
database_connection = 'mongodb://__test__' database_connection = 'mongodb://__test__'
class IndexTest(MongoDBEngineTestBase): class MongoDBConnection(MongoDBEngineTestBase):
def test_connection_pooling(self):
def test_indexes_exist(self): self.assertEqual(self.conn.conn,
# ensure_index returns none if index already exists impl_mongodb.Connection(cfg.CONF).conn)
assert not self.conn.db.resource.ensure_index('foo',
name='resource_idx')
assert not self.conn.db.meter.ensure_index('foo',
name='meter_idx')
class UserTest(base.UserTest, MongoDBEngineTestBase): class UserTest(base.UserTest, MongoDBEngineTestBase):
@ -91,10 +66,7 @@ class RawSampleTest(base.RawSampleTest, MongoDBEngineTestBase):
class StatisticsTest(base.StatisticsTest, MongoDBEngineTestBase): class StatisticsTest(base.StatisticsTest, MongoDBEngineTestBase):
pass
def setUp(self):
super(StatisticsTest, self).setUp()
require_map_reduce(self.conn)
class AlarmTest(base.AlarmTest, MongoDBEngineTestBase): class AlarmTest(base.AlarmTest, MongoDBEngineTestBase):

View File

@ -7,10 +7,9 @@ deps = -r{toxinidir}/requirements.txt
setenv = VIRTUAL_ENV={envdir} setenv = VIRTUAL_ENV={envdir}
EVENTLET_NO_GREENDNS=yes EVENTLET_NO_GREENDNS=yes
commands = commands =
python setup.py testr --slowest --testr-args='--concurrency=1 {posargs}' bash -x {toxinidir}/run-tests.sh {posargs}
bash tools/init_testr_if_needed.sh
python setup.py testr --slowest --testr-args='--concurrency=1 --here=nova_tests {posargs}'
{toxinidir}/tools/conf/check_uptodate.sh {toxinidir}/tools/conf/check_uptodate.sh
sitepackages = False sitepackages = False
downloadcache = {toxworkdir}/_download downloadcache = {toxworkdir}/_download