Merge "Add SQLAlchemy implementation of groupby"

This commit is contained in:
Jenkins 2013-08-22 17:52:14 +00:00 committed by Gerrit Code Review
commit ed12ff5132
10 changed files with 632 additions and 29 deletions

View File

@ -176,7 +176,7 @@ class Connection(object):
"""
@abc.abstractmethod
def get_meter_statistics(self, sample_filter, period=None):
def get_meter_statistics(self, sample_filter, period=None, groupby=None):
"""Return an iterable of model.Statistics instances.
The filter must have a meter value set.

View File

@ -494,7 +494,7 @@ class Connection(base.Connection):
timeutils.delta_seconds(stat.duration_start,
stat.duration_end)
def get_meter_statistics(self, sample_filter, period=None):
def get_meter_statistics(self, sample_filter, period=None, groupby=None):
"""Return an iterable of models.Statistics instances containing meter
statistics described by the query parameters.
@ -507,6 +507,9 @@ class Connection(base.Connection):
because of all the Thrift traffic it is going to create.
"""
if groupby:
raise NotImplementedError("Group by not implemented.")
meter_table = self.conn.table(self.METER_TABLE)
q, start, stop = make_query_from_filter(sample_filter)
@ -563,7 +566,8 @@ class Connection(base.Connection):
period_end=period_end,
duration=None,
duration_start=None,
duration_end=None)
duration_end=None,
groupby=None)
)
self._update_meter_stats(results[-1], meter)
return results

View File

@ -133,7 +133,7 @@ class Connection(base.Connection):
"""
return []
def get_meter_statistics(self, sample_filter, period=None):
def get_meter_statistics(self, sample_filter, period=None, groupby=None):
"""Return a dictionary containing meter statistics.
described by the query parameters.

View File

@ -683,13 +683,16 @@ class Connection(base.Connection):
s['counter_unit'] = s.get('counter_unit', '')
yield models.Sample(**s)
def get_meter_statistics(self, sample_filter, period=None):
def get_meter_statistics(self, sample_filter, period=None, groupby=None):
"""Return an iterable of models.Statistics instance containing meter
statistics described by the query parameters.
The filter must have a meter value set.
"""
if groupby:
raise NotImplementedError("Group by not implemented.")
q = make_query_from_filter(sample_filter)
if period:
@ -712,6 +715,10 @@ class Connection(base.Connection):
query=q,
)
# TODO(jd) implement groupby and remove this code
for r in results['results']:
r['value']['groupby'] = None
return sorted((models.Statistics(**(r['value']))
for r in results['results']),
key=operator.attrgetter('period_start'))

View File

@ -431,9 +431,8 @@ class Connection(base.Connection):
)
@staticmethod
def _make_stats_query(sample_filter):
session = sqlalchemy_session.get_session()
query = session.query(
def _make_stats_query(sample_filter, groupby):
select = [
Meter.counter_unit.label('unit'),
func.min(Meter.timestamp).label('tsmin'),
func.max(Meter.timestamp).label('tsmax'),
@ -441,12 +440,25 @@ class Connection(base.Connection):
func.sum(Meter.counter_volume).label('sum'),
func.min(Meter.counter_volume).label('min'),
func.max(Meter.counter_volume).label('max'),
func.count(Meter.counter_volume).label('count'))
func.count(Meter.counter_volume).label('count'),
]
session = sqlalchemy_session.get_session()
if groupby:
group_attributes = [getattr(Meter, g) for g in groupby]
select.extend(group_attributes)
query = session.query(*select)
if groupby:
query = query.group_by(*group_attributes)
return make_query_from_filter(query, sample_filter)
@staticmethod
def _stats_result_to_model(result, period, period_start, period_end):
def _stats_result_to_model(result, period, period_start,
period_end, groupby):
duration = (timeutils.delta_seconds(result.tsmin, result.tsmax)
if result.tsmin is not None and result.tsmax is not None
else None)
@ -463,24 +475,35 @@ class Connection(base.Connection):
period=period,
period_start=period_start,
period_end=period_end,
groupby=(dict((g, getattr(result, g)) for g in groupby)
if groupby else None)
)
def get_meter_statistics(self, sample_filter, period=None):
def get_meter_statistics(self, sample_filter, period=None, groupby=None):
"""Return an iterable of api_models.Statistics instances containing
meter statistics described by the query parameters.
The filter must have a meter value set.
"""
if not period or not sample_filter.start or not sample_filter.end:
res = self._make_stats_query(sample_filter).all()[0]
if groupby:
for group in groupby:
if group not in ['user_id', 'project_id', 'resource_id']:
raise NotImplementedError(
"Unable to group by these fields")
if not period:
for res in self._make_stats_query(sample_filter, groupby):
if res.count:
yield self._stats_result_to_model(res, 0, res.tsmin, res.tsmax)
yield self._stats_result_to_model(res, 0,
res.tsmin, res.tsmax,
groupby)
return
query = self._make_stats_query(sample_filter)
if not sample_filter.start or not sample_filter.end:
res = self._make_stats_query(sample_filter, None).first()
query = self._make_stats_query(sample_filter, groupby)
# HACK(jd) This is an awful method to compute stats by period, but
# since we're trying to be SQL agnostic we have to write portable
# code, so here it is, admire! We're going to do one request to get
@ -492,8 +515,7 @@ class Connection(base.Connection):
period):
q = query.filter(Meter.timestamp >= period_start)
q = q.filter(Meter.timestamp < period_end)
r = q.all()[0]
# Don't return results that didn't have any data.
for r in q.all():
if r.count:
yield self._stats_result_to_model(
result=r,
@ -501,6 +523,7 @@ class Connection(base.Connection):
period_end)),
period_start=period_start,
period_end=period_end,
groupby=groupby
)
@staticmethod

View File

@ -212,7 +212,8 @@ class Statistics(Model):
def __init__(self, unit,
min, max, avg, sum, count,
period, period_start, period_end,
duration, duration_start, duration_end):
duration, duration_start, duration_end,
groupby):
"""Create a new statistics object.
:param unit: The unit type of the data set
@ -227,13 +228,15 @@ class Statistics(Model):
:param duration: The total time for the matching samples
:param duration_start: The earliest time for the matching samples
:param duration_end: The latest time for the matching samples
:param groupby: The fields used to group the samples.
"""
Model.__init__(self, unit=unit,
min=min, max=max, avg=avg, sum=sum, count=count,
period=period, period_start=period_start,
period_end=period_end, duration=duration,
duration_start=duration_start,
duration_end=duration_end)
duration_end=duration_end,
groupby=groupby)
class Alarm(Model):

View File

@ -64,7 +64,8 @@ class TestComputeDurationByResource(tests_api.TestBase,
period_end=None,
duration=end - start,
duration_start=start,
duration_end=end)
duration_end=end,
groupby=None)
self.stubs.Set(self.conn, 'get_meter_statistics',
get_meter_statistics)

View File

@ -80,6 +80,7 @@ class TestComputeDurationByResource(FunctionalTest,
duration=duration,
duration_start=duration_start,
duration_end=duration_end,
groupby=None,
)
]
self._stub_interval_func(get_interval)
@ -155,6 +156,7 @@ class TestComputeDurationByResource(FunctionalTest,
period=None,
period_start=None,
period_end=None,
groupby=None,
)
]
self._stub_interval_func(get_interval)
@ -185,6 +187,7 @@ class TestComputeDurationByResource(FunctionalTest,
period=None,
period_start=None,
period_end=None,
groupby=None,
)
]
return (self.early1, self.early2)

View File

@ -862,6 +862,561 @@ class StatisticsTest(DBTestBase):
assert results.avg == 6
class StatisticsGroupByTest(DBTestBase):
def prepare_data(self):
test_sample_data = (
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 16, 10),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-1',
'source': 'source-2'},
{'volume': 2, 'user': 'user-1', 'project': 'project-2',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 15, 37),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-1',
'source': 'source-2'},
{'volume': 1, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 11),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 1, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 40),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 2, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 14, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 4, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 17, 28),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 4, 'user': 'user-3', 'project': 'project-1',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 11, 22),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-2',
'source': 'source-3'},
)
for test_sample in test_sample_data:
c = sample.Sample(
'instance',
sample.TYPE_CUMULATIVE,
unit='s',
volume=test_sample['volume'],
user_id=test_sample['user'],
project_id=test_sample['project'],
resource_id=test_sample['resource'],
timestamp=datetime.datetime(*test_sample['timestamp']),
resource_metadata={'flavor': test_sample['metadata_flavor'],
'event': test_sample['metadata_event'], },
source=test_sample['source'],
)
msg = rpc.meter_message_from_counter(
c,
cfg.CONF.publisher_rpc.metering_secret,
)
self.conn.record_metering_data(msg)
def test_group_by_user(self):
f = storage.SampleFilter(
meter='instance',
)
results = list(self.conn.get_meter_statistics(f, groupby=['user_id']))
self.assertEqual(len(results), 3)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['user_id']))
self.assertEqual(groupby_vals_set, set(['user-1', 'user-2', 'user-3']))
for r in results:
if r.groupby == {'user_id': 'user-1'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 2)
elif r.groupby == {'user_id': 'user-2'}:
self.assertEqual(r.count, 4)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 8)
self.assertEqual(r.avg, 2)
elif r.groupby == {'user_id': 'user-3'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
def test_group_by_resource(self):
f = storage.SampleFilter(
meter='instance',
)
results = list(self.conn.get_meter_statistics(f,
groupby=['resource_id']))
self.assertEqual(len(results), 3)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['resource_id']))
self.assertEqual(groupby_vals_set, set(['resource-1',
'resource-2',
'resource-3']))
for r in results:
if r.groupby == {'resource_id': 'resource-1'}:
self.assertEqual(r.count, 3)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 6)
self.assertEqual(r.avg, 2)
elif r.groupby == {'resource_id': 'resource-2'}:
self.assertEqual(r.count, 3)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 6)
self.assertEqual(r.avg, 2)
elif r.groupby == {'resource_id': 'resource-3'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
def test_group_by_project(self):
f = storage.SampleFilter(
meter='instance',
)
results = list(self.conn.get_meter_statistics(f,
groupby=['project_id']))
self.assertEqual(len(results), 2)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['project_id']))
self.assertEqual(groupby_vals_set, set(['project-1', 'project-2']))
for r in results:
if r.groupby == {'project_id': 'project-1'}:
self.assertEqual(r.count, 5)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 10)
self.assertEqual(r.avg, 2)
elif r.groupby == {'project_id': 'project-2'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 6)
self.assertEqual(r.avg, 3)
def test_group_by_source(self):
f = storage.SampleFilter(
meter='instance',
)
results = list(self.conn.get_meter_statistics(f, groupby=['source']))
self.assertEqual(len(results), 3)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['source']))
self.assertEqual(groupby_vals_set, set(['source-1',
'source-2',
'source-3']))
for r in results:
if r.groupby == {'source': 'source-1'}:
self.assertEqual(r.count, 4)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 8)
self.assertEqual(r.avg, 2)
elif r.groupby == {'source': 'source-2'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 2)
elif r.groupby == {'source': 'source-3'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
def test_group_by_unknown_field(self):
f = storage.SampleFilter(
meter='instance',
)
result = self.conn.get_meter_statistics(
f, groupby=['wtf'])
self.assertRaises(
NotImplementedError,
list,
result)
def test_group_by_metadata(self):
pass
def test_group_by_multiple_regular(self):
f = storage.SampleFilter(
meter='instance',
)
results = list(self.conn.get_meter_statistics(f,
groupby=['user_id',
'resource_id']))
self.assertEqual(len(results), 4)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['user_id', 'resource_id']))
self.assertEqual(groupby_vals_set, set(['user-1', 'user-2',
'user-3', 'resource-1',
'resource-2', 'resource-3']))
for r in results:
if r.groupby == {'user_id': 'user-1', 'resource_id': 'resource-1'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 2)
elif r.groupby == {'user_id': 'user-2',
'resource_id': 'resource-1'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 2)
elif r.groupby == {'user_id': 'user-2',
'resource_id': 'resource-2'}:
self.assertEqual(r.count, 3)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 6)
self.assertEqual(r.avg, 2)
elif r.groupby == {'user_id': 'user-3',
'resource_id': 'resource-3'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
else:
self.assertNotEqual(r.groupby, {'user_id': 'user-1',
'resource_id': 'resource-2'})
self.assertNotEqual(r.groupby, {'user_id': 'user-1',
'resource_id': 'resource-3'})
self.assertNotEqual(r.groupby, {'user_id': 'user-2',
'resource_id': 'resource-3'})
self.assertNotEqual(r.groupby, {'user_id': 'user-3',
'resource_id': 'resource-1'})
self.assertNotEqual(r.groupby, {'user_id': 'user-3',
'resource_id': 'resource-2'})
def test_group_by_multiple_metadata(self):
pass
def test_group_by_multiple_regular_metadata(self):
pass
def test_group_by_with_query_filter(self):
f = storage.SampleFilter(
meter='instance',
project='project-1',
)
results = list(self.conn.get_meter_statistics(
f,
groupby=['resource_id']))
self.assertEqual(len(results), 3)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['resource_id']))
self.assertEqual(groupby_vals_set, set(['resource-1',
'resource-2',
'resource-3']))
for r in results:
if r.groupby == {'resource_id': 'resource-1'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 2)
elif r.groupby == {'resource_id': 'resource-2'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 1)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 1)
elif r.groupby == {'resource_id': 'resource-3'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
def test_group_by_metadata_with_query_filter(self):
pass
def test_group_by_with_query_filter_multiple(self):
f = storage.SampleFilter(
meter='instance',
user='user-2',
source='source-1',
)
results = list(self.conn.get_meter_statistics(
f,
groupby=['project_id', 'resource_id']))
self.assertEqual(len(results), 3)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['project_id', 'resource_id']))
self.assertEqual(groupby_vals_set, set(['project-1', 'project-2',
'resource-1', 'resource-2']))
for r in results:
if r.groupby == {'project_id': 'project-1',
'resource_id': 'resource-1'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 2)
elif r.groupby == {'project_id': 'project-1',
'resource_id': 'resource-2'}:
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 1)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 1)
elif r.groupby == {'project_id': 'project-2',
'resource_id': 'resource-2'}:
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
def test_group_by_metadata_with_query_filter_multiple(self):
pass
def test_group_by_with_period(self):
f = storage.SampleFilter(
meter='instance',
)
results = list(self.conn.get_meter_statistics(f,
period=7200,
groupby=['project_id']))
self.assertEqual(len(results), 4)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['project_id']))
self.assertEqual(groupby_vals_set, set(['project-1', 'project-2']))
period_start_set = set([r.period_start for r in results])
period_start_valid = set([datetime.datetime(2013, 8, 1, 10, 11),
datetime.datetime(2013, 8, 1, 14, 11),
datetime.datetime(2013, 8, 1, 16, 11)])
self.assertEqual(period_start_set, period_start_valid)
for r in results:
if (r.groupby == {'project_id': 'project-1'} and
r.period_start == datetime.datetime(2013, 8, 1, 10, 11)):
self.assertEqual(r.count, 3)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 6)
self.assertEqual(r.avg, 2)
self.assertEqual(r.duration, 4260)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 10, 11))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 11, 22))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 12, 11))
elif (r.groupby == {'project_id': 'project-1'} and
r.period_start == datetime.datetime(2013, 8, 1, 14, 11)):
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 2)
self.assertEqual(r.duration, 4260)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 14, 59))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 16, 10))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 16, 11))
elif (r.groupby == {'project_id': 'project-2'} and
r.period_start == datetime.datetime(2013, 8, 1, 14, 11)):
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 2)
self.assertEqual(r.duration, 0)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 15, 37))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 15, 37))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 16, 11))
elif (r.groupby == {'project_id': 'project-2'} and
r.period_start == datetime.datetime(2013, 8, 1, 16, 11)):
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
self.assertEqual(r.duration, 0)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 17, 28))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 17, 28))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 18, 11))
else:
self.assertNotEqual([r.groupby, r.period_start],
[{'project_id': 'project-1'},
datetime.datetime(2013, 8, 1, 16, 11)])
self.assertNotEqual([r.groupby, r.period_start],
[{'project_id': 'project-2'},
datetime.datetime(2013, 8, 1, 10, 11)])
def test_group_by_metadata_with_period(self):
pass
def test_group_by_with_query_filter_and_period(self):
f = storage.SampleFilter(
meter='instance',
source='source-1',
)
results = list(self.conn.get_meter_statistics(f,
period=7200,
groupby=['project_id']))
self.assertEqual(len(results), 3)
groupby_list = [r.groupby for r in results]
groupby_keys_set = set(x for sub_dict in groupby_list
for x in sub_dict.keys())
groupby_vals_set = set(x for sub_dict in groupby_list
for x in sub_dict.values())
self.assertEqual(groupby_keys_set, set(['project_id']))
self.assertEqual(groupby_vals_set, set(['project-1', 'project-2']))
period_start_set = set([r.period_start for r in results])
period_start_valid = set([datetime.datetime(2013, 8, 1, 10, 11),
datetime.datetime(2013, 8, 1, 14, 11),
datetime.datetime(2013, 8, 1, 16, 11)])
self.assertEqual(period_start_set, period_start_valid)
for r in results:
if (r.groupby == {'project_id': 'project-1'} and
r.period_start == datetime.datetime(2013, 8, 1, 10, 11)):
self.assertEqual(r.count, 2)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 1)
self.assertEqual(r.max, 1)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 1)
self.assertEqual(r.duration, 1740)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 10, 11))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 10, 40))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 12, 11))
elif (r.groupby == {'project_id': 'project-1'} and
r.period_start == datetime.datetime(2013, 8, 1, 14, 11)):
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 2)
self.assertEqual(r.max, 2)
self.assertEqual(r.sum, 2)
self.assertEqual(r.avg, 2)
self.assertEqual(r.duration, 0)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 14, 59))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 14, 59))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 16, 11))
elif (r.groupby == {'project_id': 'project-2'} and
r.period_start == datetime.datetime(2013, 8, 1, 16, 11)):
self.assertEqual(r.count, 1)
self.assertEqual(r.unit, 's')
self.assertEqual(r.min, 4)
self.assertEqual(r.max, 4)
self.assertEqual(r.sum, 4)
self.assertEqual(r.avg, 4)
self.assertEqual(r.duration, 0)
self.assertEqual(r.duration_start,
datetime.datetime(2013, 8, 1, 17, 28))
self.assertEqual(r.duration_end,
datetime.datetime(2013, 8, 1, 17, 28))
self.assertEqual(r.period, 7200)
self.assertEqual(r.period_end,
datetime.datetime(2013, 8, 1, 18, 11))
else:
self.assertNotEqual([r.groupby, r.period_start],
[{'project_id': 'project-1'},
datetime.datetime(2013, 8, 1, 16, 11)])
self.assertNotEqual([r.groupby, r.period_start],
[{'project_id': 'project-2'},
datetime.datetime(2013, 8, 1, 10, 11)])
def test_group_by_metadata_with_query_filter_and_period(self):
pass
class CounterDataTypeTest(DBTestBase):
def prepare_data(self):

View File

@ -54,6 +54,13 @@ class StatisticsTest(base.StatisticsTest, SQLAlchemyEngineTestBase):
pass
class StatisticsGroupByTest(base.StatisticsGroupByTest,
SQLAlchemyEngineTestBase):
# This is not implemented
def test_group_by_source(self):
pass
class CounterDataTypeTest(base.CounterDataTypeTest, SQLAlchemyEngineTestBase):
pass