Standardize timestamp fields of ceilometer API
For currently, the timestamp in query fields is not unified, this patch standardizes the usage of timestamp query fileds: - unify the time query filed in API to *timestamp* supported only. - unify the time related fileds of filters to start_timestamp and end_timestamp to simplify the process logic. - improve the _query_to_kwargs method to give explicit presentation if user specifies unsupported query fields. Change-Id: Ic3ade312efa89edd01d2ee5ae31d341805b11f79 Closes-bug: #1295104 Closes-bug: #1291171
This commit is contained in:
parent
31e5435910
commit
33065c16f6
@ -411,7 +411,7 @@ def _validate_query(query, db_func, internal_keys=None,
|
|||||||
:param allow_timestamps: defines whether the timestamp-based constraint is
|
:param allow_timestamps: defines whether the timestamp-based constraint is
|
||||||
applicable for this query or not
|
applicable for this query or not
|
||||||
|
|
||||||
:returns: None, if the query is valid
|
:returns: valid query keys the db_func supported
|
||||||
|
|
||||||
:raises InvalidInput: if an operator is not supported for a given field
|
:raises InvalidInput: if an operator is not supported for a given field
|
||||||
:raises InvalidInput: if timestamp constraints are allowed, but
|
:raises InvalidInput: if timestamp constraints are allowed, but
|
||||||
@ -428,6 +428,11 @@ def _validate_query(query, db_func, internal_keys=None,
|
|||||||
valid_keys.remove('alarm_type')
|
valid_keys.remove('alarm_type')
|
||||||
valid_keys.append('type')
|
valid_keys.append('type')
|
||||||
|
|
||||||
|
internal_timestamp_keys = ['end_timestamp', 'start_timestamp',
|
||||||
|
'end_timestamp_op', 'start_timestamp_op']
|
||||||
|
if 'start_timestamp' in valid_keys:
|
||||||
|
internal_keys += internal_timestamp_keys
|
||||||
|
valid_keys += ['timestamp', 'search_offset']
|
||||||
internal_keys.append('self')
|
internal_keys.append('self')
|
||||||
valid_keys = set(valid_keys) - set(internal_keys)
|
valid_keys = set(valid_keys) - set(internal_keys)
|
||||||
translation = {'user_id': 'user',
|
translation = {'user_id': 'user',
|
||||||
@ -470,6 +475,7 @@ def _validate_query(query, db_func, internal_keys=None,
|
|||||||
msg = ("unrecognized field in query: %s, "
|
msg = ("unrecognized field in query: %s, "
|
||||||
"valid keys: %s") % (query, sorted(valid_keys))
|
"valid keys: %s") % (query, sorted(valid_keys))
|
||||||
raise wsme.exc.UnknownArgument(key, msg)
|
raise wsme.exc.UnknownArgument(key, msg)
|
||||||
|
return valid_keys
|
||||||
|
|
||||||
|
|
||||||
def _validate_timestamp_fields(query, field_name, operator_list,
|
def _validate_timestamp_fields(query, field_name, operator_list,
|
||||||
@ -516,11 +522,9 @@ def _validate_timestamp_fields(query, field_name, operator_list,
|
|||||||
def _query_to_kwargs(query, db_func, internal_keys=None,
|
def _query_to_kwargs(query, db_func, internal_keys=None,
|
||||||
allow_timestamps=True):
|
allow_timestamps=True):
|
||||||
internal_keys = internal_keys or []
|
internal_keys = internal_keys or []
|
||||||
_validate_query(query, db_func, internal_keys=internal_keys,
|
valid_keys = _validate_query(query, db_func, internal_keys=internal_keys,
|
||||||
allow_timestamps=allow_timestamps)
|
allow_timestamps=allow_timestamps)
|
||||||
query = _sanitize_query(query, db_func)
|
query = _sanitize_query(query, db_func)
|
||||||
internal_keys.append('self')
|
|
||||||
valid_keys = set(inspect.getargspec(db_func)[0]) - set(internal_keys)
|
|
||||||
translation = {'user_id': 'user',
|
translation = {'user_id': 'user',
|
||||||
'project_id': 'project',
|
'project_id': 'project',
|
||||||
'resource_id': 'resource',
|
'resource_id': 'resource',
|
||||||
@ -553,18 +557,7 @@ def _query_to_kwargs(query, db_func, internal_keys=None,
|
|||||||
if metaquery and 'metaquery' in valid_keys:
|
if metaquery and 'metaquery' in valid_keys:
|
||||||
kwargs['metaquery'] = metaquery
|
kwargs['metaquery'] = metaquery
|
||||||
if stamp:
|
if stamp:
|
||||||
q_ts = _get_query_timestamps(stamp)
|
kwargs.update(_get_query_timestamps(stamp))
|
||||||
if 'start' in valid_keys:
|
|
||||||
kwargs['start'] = q_ts['query_start']
|
|
||||||
kwargs['end'] = q_ts['query_end']
|
|
||||||
elif 'start_timestamp' in valid_keys:
|
|
||||||
kwargs['start_timestamp'] = q_ts['query_start']
|
|
||||||
kwargs['end_timestamp'] = q_ts['query_end']
|
|
||||||
if 'start_timestamp_op' in stamp:
|
|
||||||
kwargs['start_timestamp_op'] = stamp['start_timestamp_op']
|
|
||||||
if 'end_timestamp_op' in stamp:
|
|
||||||
kwargs['end_timestamp_op'] = stamp['end_timestamp_op']
|
|
||||||
|
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
|
|
||||||
@ -599,19 +592,14 @@ def _get_query_timestamps(args=None):
|
|||||||
|
|
||||||
Returns a dictionary containing:
|
Returns a dictionary containing:
|
||||||
|
|
||||||
query_start: First timestamp to use for query
|
start_timestamp: First timestamp to use for query
|
||||||
start_timestamp: start_timestamp parameter from request
|
start_timestamp_op: First timestamp operator to use for query
|
||||||
query_end: Final timestamp to use for query
|
end_timestamp: Final timestamp to use for query
|
||||||
end_timestamp: end_timestamp parameter from request
|
end_timestamp_op: Final timestamp operator to use for query
|
||||||
search_offset: search_offset parameter from request
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if args is None:
|
if args is None:
|
||||||
return {'query_start': None,
|
return {}
|
||||||
'query_end': None,
|
|
||||||
'start_timestamp': None,
|
|
||||||
'end_timestamp': None,
|
|
||||||
'search_offset': 0}
|
|
||||||
search_offset = int(args.get('search_offset', 0))
|
search_offset = int(args.get('search_offset', 0))
|
||||||
|
|
||||||
def _parse_timestamp(timestamp):
|
def _parse_timestamp(timestamp):
|
||||||
@ -625,20 +613,16 @@ def _get_query_timestamps(args=None):
|
|||||||
'invalid timestamp format')
|
'invalid timestamp format')
|
||||||
return iso_timestamp
|
return iso_timestamp
|
||||||
|
|
||||||
start_timestamp = args.get('start_timestamp')
|
start_timestamp = _parse_timestamp(args.get('start_timestamp'))
|
||||||
end_timestamp = args.get('end_timestamp')
|
end_timestamp = _parse_timestamp(args.get('end_timestamp'))
|
||||||
start_timestamp = _parse_timestamp(start_timestamp)
|
start_timestamp = start_timestamp - datetime.timedelta(
|
||||||
end_timestamp = _parse_timestamp(end_timestamp)
|
|
||||||
query_start = start_timestamp - datetime.timedelta(
|
|
||||||
minutes=search_offset) if start_timestamp else None
|
minutes=search_offset) if start_timestamp else None
|
||||||
query_end = end_timestamp + datetime.timedelta(
|
end_timestamp = end_timestamp + datetime.timedelta(
|
||||||
minutes=search_offset) if end_timestamp else None
|
minutes=search_offset) if end_timestamp else None
|
||||||
return {'query_start': query_start,
|
return {'start_timestamp': start_timestamp,
|
||||||
'query_end': query_end,
|
|
||||||
'start_timestamp': start_timestamp,
|
|
||||||
'end_timestamp': end_timestamp,
|
'end_timestamp': end_timestamp,
|
||||||
'search_offset': search_offset,
|
'start_timestamp_op': args.get('start_timestamp_op'),
|
||||||
}
|
'end_timestamp_op': args.get('end_timestamp_op')}
|
||||||
|
|
||||||
|
|
||||||
def _flatten_metadata(metadata):
|
def _flatten_metadata(metadata):
|
||||||
@ -2369,8 +2353,8 @@ def _event_query_to_event_filter(q):
|
|||||||
evt_model_filter = {
|
evt_model_filter = {
|
||||||
'event_type': None,
|
'event_type': None,
|
||||||
'message_id': None,
|
'message_id': None,
|
||||||
'start_time': None,
|
'start_timestamp': None,
|
||||||
'end_time': None
|
'end_timestamp': None
|
||||||
}
|
}
|
||||||
traits_filter = []
|
traits_filter = []
|
||||||
|
|
||||||
|
@ -209,8 +209,8 @@ class Connection(base.Connection):
|
|||||||
:param event_filter: EventFilter instance
|
:param event_filter: EventFilter instance
|
||||||
"""
|
"""
|
||||||
|
|
||||||
start = event_filter.start_time
|
start = event_filter.start_timestamp
|
||||||
end = event_filter.end_time
|
end = event_filter.end_timestamp
|
||||||
session = self._engine_facade.get_session()
|
session = self._engine_facade.get_session()
|
||||||
LOG.debug(_("Getting events that match filter: %s") % event_filter)
|
LOG.debug(_("Getting events that match filter: %s") % event_filter)
|
||||||
with session.begin():
|
with session.begin():
|
||||||
|
@ -119,9 +119,9 @@ class SampleFilter(object):
|
|||||||
|
|
||||||
:param user: The sample owner.
|
:param user: The sample owner.
|
||||||
:param project: The sample project.
|
:param project: The sample project.
|
||||||
:param start: Earliest time point in the request.
|
:param start_timestamp: Earliest time point in the request.
|
||||||
:param start_timestamp_op: Earliest timestamp operation in the request.
|
:param start_timestamp_op: Earliest timestamp operation in the request.
|
||||||
:param end: Latest time point in the request.
|
:param end_timestamp: Latest time point in the request.
|
||||||
:param end_timestamp_op: Latest timestamp operation in the request.
|
:param end_timestamp_op: Latest timestamp operation in the request.
|
||||||
:param resource: Optional filter for resource id.
|
:param resource: Optional filter for resource id.
|
||||||
:param meter: Optional filter for meter type using the meter name.
|
:param meter: Optional filter for meter type using the meter name.
|
||||||
@ -130,16 +130,16 @@ class SampleFilter(object):
|
|||||||
:param metaquery: Optional filter on the metadata
|
:param metaquery: Optional filter on the metadata
|
||||||
"""
|
"""
|
||||||
def __init__(self, user=None, project=None,
|
def __init__(self, user=None, project=None,
|
||||||
start=None, start_timestamp_op=None,
|
start_timestamp=None, start_timestamp_op=None,
|
||||||
end=None, end_timestamp_op=None,
|
end_timestamp=None, end_timestamp_op=None,
|
||||||
resource=None, meter=None,
|
resource=None, meter=None,
|
||||||
source=None, message_id=None,
|
source=None, message_id=None,
|
||||||
metaquery=None):
|
metaquery=None):
|
||||||
self.user = user
|
self.user = user
|
||||||
self.project = project
|
self.project = project
|
||||||
self.start = utils.sanitize_timestamp(start)
|
self.start_timestamp = utils.sanitize_timestamp(start_timestamp)
|
||||||
self.start_timestamp_op = start_timestamp_op
|
self.start_timestamp_op = start_timestamp_op
|
||||||
self.end = utils.sanitize_timestamp(end)
|
self.end_timestamp = utils.sanitize_timestamp(end_timestamp)
|
||||||
self.end_timestamp_op = end_timestamp_op
|
self.end_timestamp_op = end_timestamp_op
|
||||||
self.resource = resource
|
self.resource = resource
|
||||||
self.meter = meter
|
self.meter = meter
|
||||||
@ -150,9 +150,9 @@ class SampleFilter(object):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return ("<SampleFilter(user: %s,"
|
return ("<SampleFilter(user: %s,"
|
||||||
" project: %s,"
|
" project: %s,"
|
||||||
" start: %s,"
|
" start_timestamp: %s,"
|
||||||
" start_timestamp_op: %s,"
|
" start_timestamp_op: %s,"
|
||||||
" end: %s,"
|
" end_timestamp: %s,"
|
||||||
" end_timestamp_op: %s,"
|
" end_timestamp_op: %s,"
|
||||||
" resource: %s,"
|
" resource: %s,"
|
||||||
" meter: %s,"
|
" meter: %s,"
|
||||||
@ -161,9 +161,9 @@ class SampleFilter(object):
|
|||||||
" message_id: %s)>" %
|
" message_id: %s)>" %
|
||||||
(self.user,
|
(self.user,
|
||||||
self.project,
|
self.project,
|
||||||
self.start,
|
self.start_timestamp,
|
||||||
self.start_timestamp_op,
|
self.start_timestamp_op,
|
||||||
self.end,
|
self.end_timestamp,
|
||||||
self.end_timestamp_op,
|
self.end_timestamp_op,
|
||||||
self.resource,
|
self.resource,
|
||||||
self.meter,
|
self.meter,
|
||||||
@ -175,8 +175,8 @@ class SampleFilter(object):
|
|||||||
class EventFilter(object):
|
class EventFilter(object):
|
||||||
"""Properties for building an Event query.
|
"""Properties for building an Event query.
|
||||||
|
|
||||||
:param start_time: UTC start datetime (mandatory)
|
:param start_timestamp: UTC start datetime (mandatory)
|
||||||
:param end_time: UTC end datetime (mandatory)
|
:param end_timestamp: UTC end datetime (mandatory)
|
||||||
:param event_type: the name of the event. None for all.
|
:param event_type: the name of the event. None for all.
|
||||||
:param message_id: the message_id of the event. None for all.
|
:param message_id: the message_id of the event. None for all.
|
||||||
:param traits_filter: the trait filter dicts, all of which are optional.
|
:param traits_filter: the trait filter dicts, all of which are optional.
|
||||||
@ -192,20 +192,20 @@ class EventFilter(object):
|
|||||||
'op': <eq, lt, le, ne, gt or ge> }
|
'op': <eq, lt, le, ne, gt or ge> }
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, start_time=None, end_time=None, event_type=None,
|
def __init__(self, start_timestamp=None, end_timestamp=None,
|
||||||
message_id=None, traits_filter=None):
|
event_type=None, message_id=None, traits_filter=None):
|
||||||
self.start_time = utils.sanitize_timestamp(start_time)
|
self.start_timestamp = utils.sanitize_timestamp(start_timestamp)
|
||||||
self.end_time = utils.sanitize_timestamp(end_time)
|
self.end_timestamp = utils.sanitize_timestamp(end_timestamp)
|
||||||
self.message_id = message_id
|
self.message_id = message_id
|
||||||
self.event_type = event_type
|
self.event_type = event_type
|
||||||
self.traits_filter = traits_filter or []
|
self.traits_filter = traits_filter or []
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return ("<EventFilter(start_time: %s,"
|
return ("<EventFilter(start_timestamp: %s,"
|
||||||
" end_time: %s,"
|
" end_timestamp: %s,"
|
||||||
" event_type: %s,"
|
" event_type: %s,"
|
||||||
" traits: %s)>" %
|
" traits: %s)>" %
|
||||||
(self.start_time,
|
(self.start_timestamp,
|
||||||
self.end_time,
|
self.end_timestamp,
|
||||||
self.event_type,
|
self.event_type,
|
||||||
six.text_type(self.traits_filter)))
|
six.text_type(self.traits_filter)))
|
||||||
|
@ -65,10 +65,10 @@ def make_events_query_from_filter(event_filter):
|
|||||||
Query is based on the selected parameter.
|
Query is based on the selected parameter.
|
||||||
:param event_filter: storage.EventFilter object.
|
:param event_filter: storage.EventFilter object.
|
||||||
"""
|
"""
|
||||||
start = "%s" % (timestamp(event_filter.start_time, reverse=False)
|
start = "%s" % (timestamp(event_filter.start_timestamp, reverse=False)
|
||||||
if event_filter.start_time else "")
|
if event_filter.start_timestamp else "")
|
||||||
stop = "%s" % (timestamp(event_filter.end_time, reverse=False)
|
stop = "%s" % (timestamp(event_filter.end_timestamp, reverse=False)
|
||||||
if event_filter.end_time else "")
|
if event_filter.end_timestamp else "")
|
||||||
kwargs = {'event_type': event_filter.event_type,
|
kwargs = {'event_type': event_filter.event_type,
|
||||||
'event_id': event_filter.message_id}
|
'event_id': event_filter.message_id}
|
||||||
res_q = make_query(**kwargs)
|
res_q = make_query(**kwargs)
|
||||||
@ -240,8 +240,10 @@ def make_sample_query_from_filter(sample_filter, require_meter=True):
|
|||||||
raise RuntimeError('Missing required meter specifier')
|
raise RuntimeError('Missing required meter specifier')
|
||||||
start_row, end_row, ts_query = make_timestamp_query(
|
start_row, end_row, ts_query = make_timestamp_query(
|
||||||
make_general_rowkey_scan,
|
make_general_rowkey_scan,
|
||||||
start=sample_filter.start, start_op=sample_filter.start_timestamp_op,
|
start=sample_filter.start_timestamp,
|
||||||
end=sample_filter.end, end_op=sample_filter.end_timestamp_op,
|
start_op=sample_filter.start_timestamp_op,
|
||||||
|
end=sample_filter.end_timestamp,
|
||||||
|
end_op=sample_filter.end_timestamp_op,
|
||||||
some_id=meter)
|
some_id=meter)
|
||||||
kwargs = dict(user_id=sample_filter.user,
|
kwargs = dict(user_id=sample_filter.user,
|
||||||
project_id=sample_filter.project,
|
project_id=sample_filter.project,
|
||||||
@ -257,7 +259,8 @@ def make_sample_query_from_filter(sample_filter, require_meter=True):
|
|||||||
else:
|
else:
|
||||||
res_q = ts_query if ts_query else None
|
res_q = ts_query if ts_query else None
|
||||||
|
|
||||||
need_timestamp = (sample_filter.start or sample_filter.end) is not None
|
need_timestamp = (sample_filter.start_timestamp or
|
||||||
|
sample_filter.end_timestamp) is not None
|
||||||
columns = get_meter_columns(metaquery=sample_filter.metaquery,
|
columns = get_meter_columns(metaquery=sample_filter.metaquery,
|
||||||
need_timestamp=need_timestamp, **kwargs)
|
need_timestamp=need_timestamp, **kwargs)
|
||||||
return res_q, start_row, end_row, columns
|
return res_q, start_row, end_row, columns
|
||||||
|
@ -336,8 +336,8 @@ class Connection(pymongo_base.Connection):
|
|||||||
q = pymongo_utils.make_query_from_filter(sample_filter)
|
q = pymongo_utils.make_query_from_filter(sample_filter)
|
||||||
|
|
||||||
if period:
|
if period:
|
||||||
if sample_filter.start:
|
if sample_filter.start_timestamp:
|
||||||
period_start = sample_filter.start
|
period_start = sample_filter.start_timestamp
|
||||||
else:
|
else:
|
||||||
period_start = self.db.meter.find(
|
period_start = self.db.meter.find(
|
||||||
limit=1, sort=[('timestamp',
|
limit=1, sort=[('timestamp',
|
||||||
|
@ -369,15 +369,15 @@ class Connection(hbase_base.Connection, base.Connection):
|
|||||||
filter=q, row_start=start,
|
filter=q, row_start=start,
|
||||||
row_stop=stop, columns=columns)))
|
row_stop=stop, columns=columns)))
|
||||||
|
|
||||||
if sample_filter.start:
|
if sample_filter.start_timestamp:
|
||||||
start_time = sample_filter.start
|
start_time = sample_filter.start_timestamp
|
||||||
elif meters:
|
elif meters:
|
||||||
start_time = meters[-1][0]['timestamp']
|
start_time = meters[-1][0]['timestamp']
|
||||||
else:
|
else:
|
||||||
start_time = None
|
start_time = None
|
||||||
|
|
||||||
if sample_filter.end:
|
if sample_filter.end_timestamp:
|
||||||
end_time = sample_filter.end
|
end_time = sample_filter.end_timestamp
|
||||||
elif meters:
|
elif meters:
|
||||||
end_time = meters[0][0]['timestamp']
|
end_time = meters[0][0]['timestamp']
|
||||||
else:
|
else:
|
||||||
|
@ -860,8 +860,8 @@ class Connection(pymongo_base.Connection):
|
|||||||
q = pymongo_utils.make_query_from_filter(sample_filter)
|
q = pymongo_utils.make_query_from_filter(sample_filter)
|
||||||
|
|
||||||
if period:
|
if period:
|
||||||
if sample_filter.start:
|
if sample_filter.start_timestamp:
|
||||||
period_start = sample_filter.start
|
period_start = sample_filter.start_timestamp
|
||||||
else:
|
else:
|
||||||
period_start = self.db.meter.find(
|
period_start = self.db.meter.find(
|
||||||
limit=1, sort=[('timestamp',
|
limit=1, sort=[('timestamp',
|
||||||
|
@ -147,14 +147,14 @@ def make_query_from_filter(session, query, sample_filter, require_meter=True):
|
|||||||
if sample_filter.source:
|
if sample_filter.source:
|
||||||
query = query.filter(
|
query = query.filter(
|
||||||
models.Resource.source_id == sample_filter.source)
|
models.Resource.source_id == sample_filter.source)
|
||||||
if sample_filter.start:
|
if sample_filter.start_timestamp:
|
||||||
ts_start = sample_filter.start
|
ts_start = sample_filter.start_timestamp
|
||||||
if sample_filter.start_timestamp_op == 'gt':
|
if sample_filter.start_timestamp_op == 'gt':
|
||||||
query = query.filter(models.Sample.timestamp > ts_start)
|
query = query.filter(models.Sample.timestamp > ts_start)
|
||||||
else:
|
else:
|
||||||
query = query.filter(models.Sample.timestamp >= ts_start)
|
query = query.filter(models.Sample.timestamp >= ts_start)
|
||||||
if sample_filter.end:
|
if sample_filter.end_timestamp:
|
||||||
ts_end = sample_filter.end
|
ts_end = sample_filter.end_timestamp
|
||||||
if sample_filter.end_timestamp_op == 'le':
|
if sample_filter.end_timestamp_op == 'le':
|
||||||
query = query.filter(models.Sample.timestamp <= ts_end)
|
query = query.filter(models.Sample.timestamp <= ts_end)
|
||||||
else:
|
else:
|
||||||
@ -401,9 +401,9 @@ class Connection(base.Connection):
|
|||||||
s_filter = storage.SampleFilter(user=user,
|
s_filter = storage.SampleFilter(user=user,
|
||||||
project=project,
|
project=project,
|
||||||
source=source,
|
source=source,
|
||||||
start=start_timestamp,
|
start_timestamp=start_timestamp,
|
||||||
start_timestamp_op=start_timestamp_op,
|
start_timestamp_op=start_timestamp_op,
|
||||||
end=end_timestamp,
|
end_timestamp=end_timestamp,
|
||||||
end_timestamp_op=end_timestamp_op,
|
end_timestamp_op=end_timestamp_op,
|
||||||
metaquery=metaquery,
|
metaquery=metaquery,
|
||||||
resource=resource)
|
resource=resource)
|
||||||
@ -687,7 +687,7 @@ class Connection(base.Connection):
|
|||||||
aggregate)
|
aggregate)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not sample_filter.start or not sample_filter.end:
|
if not (sample_filter.start_timestamp and sample_filter.end_timestamp):
|
||||||
res = self._make_stats_query(sample_filter,
|
res = self._make_stats_query(sample_filter,
|
||||||
None,
|
None,
|
||||||
aggregate).first()
|
aggregate).first()
|
||||||
@ -703,8 +703,8 @@ class Connection(base.Connection):
|
|||||||
# stats by period. We would like to use GROUP BY, but there's no
|
# stats by period. We would like to use GROUP BY, but there's no
|
||||||
# portable way to manipulate timestamp in SQL, so we can't.
|
# portable way to manipulate timestamp in SQL, so we can't.
|
||||||
for period_start, period_end in base.iter_period(
|
for period_start, period_end in base.iter_period(
|
||||||
sample_filter.start or res.tsmin,
|
sample_filter.start_timestamp or res.tsmin,
|
||||||
sample_filter.end or res.tsmax,
|
sample_filter.end_timestamp or res.tsmax,
|
||||||
period):
|
period):
|
||||||
q = query.filter(models.Sample.timestamp >= period_start)
|
q = query.filter(models.Sample.timestamp >= period_start)
|
||||||
q = q.filter(models.Sample.timestamp < period_end)
|
q = q.filter(models.Sample.timestamp < period_end)
|
||||||
|
@ -74,8 +74,8 @@ def make_events_query_from_filter(event_filter):
|
|||||||
:param event_filter: storage.EventFilter object.
|
:param event_filter: storage.EventFilter object.
|
||||||
"""
|
"""
|
||||||
q = {}
|
q = {}
|
||||||
ts_range = make_timestamp_range(event_filter.start_time,
|
ts_range = make_timestamp_range(event_filter.start_timestamp,
|
||||||
event_filter.end_time)
|
event_filter.end_timestamp)
|
||||||
if ts_range:
|
if ts_range:
|
||||||
q['timestamp'] = ts_range
|
q['timestamp'] = ts_range
|
||||||
if event_filter.event_type:
|
if event_filter.event_type:
|
||||||
@ -130,8 +130,8 @@ def make_query_from_filter(sample_filter, require_meter=True):
|
|||||||
elif require_meter:
|
elif require_meter:
|
||||||
raise RuntimeError('Missing required meter specifier')
|
raise RuntimeError('Missing required meter specifier')
|
||||||
|
|
||||||
ts_range = make_timestamp_range(sample_filter.start,
|
ts_range = make_timestamp_range(sample_filter.start_timestamp,
|
||||||
sample_filter.end,
|
sample_filter.end_timestamp,
|
||||||
sample_filter.start_timestamp_op,
|
sample_filter.start_timestamp_op,
|
||||||
sample_filter.end_timestamp_op)
|
sample_filter.end_timestamp_op)
|
||||||
|
|
||||||
|
@ -2170,10 +2170,8 @@ class TestAlarms(v2.FunctionalTest,
|
|||||||
expect_errors=True, status=400)
|
expect_errors=True, status=400)
|
||||||
self.assertEqual('Unknown argument: "alarm_id": unrecognized'
|
self.assertEqual('Unknown argument: "alarm_id": unrecognized'
|
||||||
" field in query: [<Query u'alarm_id' eq"
|
" field in query: [<Query u'alarm_id' eq"
|
||||||
" u'b' Unset>], valid keys: ['end_timestamp',"
|
" u'b' Unset>], valid keys: ['project', "
|
||||||
" 'end_timestamp_op', 'project',"
|
"'search_offset', 'timestamp', 'type', 'user']",
|
||||||
" 'start_timestamp', 'start_timestamp_op',"
|
|
||||||
" 'type', 'user']",
|
|
||||||
resp.json['error_message']['faultstring'])
|
resp.json['error_message']['faultstring'])
|
||||||
|
|
||||||
def test_get_alarm_history_constrained_by_not_supported_rule(self):
|
def test_get_alarm_history_constrained_by_not_supported_rule(self):
|
||||||
@ -2183,10 +2181,8 @@ class TestAlarms(v2.FunctionalTest,
|
|||||||
expect_errors=True, status=400)
|
expect_errors=True, status=400)
|
||||||
self.assertEqual('Unknown argument: "abcd": unrecognized'
|
self.assertEqual('Unknown argument: "abcd": unrecognized'
|
||||||
" field in query: [<Query u'abcd' eq"
|
" field in query: [<Query u'abcd' eq"
|
||||||
" u'abcd' Unset>], valid keys: ['end_timestamp',"
|
" u'abcd' Unset>], valid keys: ['project', "
|
||||||
" 'end_timestamp_op', 'project',"
|
"'search_offset', 'timestamp', 'type', 'user']",
|
||||||
" 'start_timestamp', 'start_timestamp_op',"
|
|
||||||
" 'type', 'user']",
|
|
||||||
resp.json['error_message']['faultstring'])
|
resp.json['error_message']['faultstring'])
|
||||||
|
|
||||||
def test_get_nonexistent_alarm_history(self):
|
def test_get_nonexistent_alarm_history(self):
|
||||||
|
@ -49,13 +49,14 @@ class TestComputeDurationByResource(v2.FunctionalTest,
|
|||||||
self.late2 = datetime.datetime(2012, 8, 29, 19, 0)
|
self.late2 = datetime.datetime(2012, 8, 29, 19, 0)
|
||||||
|
|
||||||
def _patch_get_interval(self, start, end):
|
def _patch_get_interval(self, start, end):
|
||||||
def get_interval(event_filter, period, groupby, aggregate):
|
def get_interval(sample_filter, period, groupby, aggregate):
|
||||||
self.assertIsNotNone(event_filter.start)
|
self.assertIsNotNone(sample_filter.start_timestamp)
|
||||||
self.assertIsNotNone(event_filter.end)
|
self.assertIsNotNone(sample_filter.end_timestamp)
|
||||||
if event_filter.start > end or event_filter.end < start:
|
if (sample_filter.start_timestamp > end or
|
||||||
|
sample_filter.end_timestamp < start):
|
||||||
return []
|
return []
|
||||||
duration_start = max(event_filter.start, start)
|
duration_start = max(sample_filter.start_timestamp, start)
|
||||||
duration_end = min(event_filter.end, end)
|
duration_end = min(sample_filter.end_timestamp, end)
|
||||||
duration = timeutils.delta_seconds(duration_start, duration_end)
|
duration = timeutils.delta_seconds(duration_start, duration_end)
|
||||||
return [
|
return [
|
||||||
models.Statistics(
|
models.Statistics(
|
||||||
|
@ -23,8 +23,10 @@ from oslotest import base
|
|||||||
from oslotest import mockpatch
|
from oslotest import mockpatch
|
||||||
import wsme
|
import wsme
|
||||||
|
|
||||||
|
from ceilometer.alarm.storage import base as alarm_storage_base
|
||||||
from ceilometer.api.controllers import v2 as api
|
from ceilometer.api.controllers import v2 as api
|
||||||
from ceilometer import storage
|
from ceilometer import storage
|
||||||
|
from ceilometer.storage import base as storage_base
|
||||||
from ceilometer.tests import base as tests_base
|
from ceilometer.tests import base as tests_base
|
||||||
|
|
||||||
|
|
||||||
@ -224,8 +226,8 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
|
|||||||
value=str(ts_start))]
|
value=str(ts_start))]
|
||||||
kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__)
|
kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__)
|
||||||
self.assertEqual(4, len(kwargs))
|
self.assertEqual(4, len(kwargs))
|
||||||
self.assertTimestampEqual(kwargs['start'], ts_start)
|
self.assertTimestampEqual(kwargs['start_timestamp'], ts_start)
|
||||||
self.assertTimestampEqual(kwargs['end'], ts_end)
|
self.assertTimestampEqual(kwargs['end_timestamp'], ts_end)
|
||||||
self.assertEqual('gt', kwargs['start_timestamp_op'])
|
self.assertEqual('gt', kwargs['start_timestamp_op'])
|
||||||
self.assertEqual('lt', kwargs['end_timestamp_op'])
|
self.assertEqual('lt', kwargs['end_timestamp_op'])
|
||||||
|
|
||||||
@ -333,3 +335,75 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
|
|||||||
expected_exc = wsme.exc.InvalidInput('timestamp', '123',
|
expected_exc = wsme.exc.InvalidInput('timestamp', '123',
|
||||||
'invalid timestamp format')
|
'invalid timestamp format')
|
||||||
self.assertEqual(str(expected_exc), str(exc))
|
self.assertEqual(str(expected_exc), str(exc))
|
||||||
|
|
||||||
|
def test_get_alarm_changes_filter_valid_fields(self):
|
||||||
|
q = [api.Query(field='abc',
|
||||||
|
op='eq',
|
||||||
|
value='abc')]
|
||||||
|
exc = self.assertRaises(
|
||||||
|
wsme.exc.UnknownArgument,
|
||||||
|
api._query_to_kwargs, q,
|
||||||
|
alarm_storage_base.Connection.get_alarm_changes)
|
||||||
|
valid_keys = ['alarm_id', 'on_behalf_of', 'project', 'search_offset',
|
||||||
|
'timestamp', 'type', 'user']
|
||||||
|
msg = ("unrecognized field in query: %s, "
|
||||||
|
"valid keys: %s") % (q, valid_keys)
|
||||||
|
expected_exc = wsme.exc.UnknownArgument('abc', msg)
|
||||||
|
self.assertEqual(str(expected_exc), str(exc))
|
||||||
|
|
||||||
|
def test_sample_filter_valid_fields(self):
|
||||||
|
q = [api.Query(field='abc',
|
||||||
|
op='eq',
|
||||||
|
value='abc')]
|
||||||
|
exc = self.assertRaises(
|
||||||
|
wsme.exc.UnknownArgument,
|
||||||
|
api._query_to_kwargs, q, storage.SampleFilter.__init__)
|
||||||
|
valid_keys = ['message_id', 'metaquery', 'meter', 'project',
|
||||||
|
'resource', 'search_offset', 'source', 'timestamp',
|
||||||
|
'user']
|
||||||
|
msg = ("unrecognized field in query: %s, "
|
||||||
|
"valid keys: %s") % (q, valid_keys)
|
||||||
|
expected_exc = wsme.exc.UnknownArgument('abc', msg)
|
||||||
|
self.assertEqual(str(expected_exc), str(exc))
|
||||||
|
|
||||||
|
def test_get_meters_filter_valid_fields(self):
|
||||||
|
q = [api.Query(field='abc',
|
||||||
|
op='eq',
|
||||||
|
value='abc')]
|
||||||
|
exc = self.assertRaises(
|
||||||
|
wsme.exc.UnknownArgument,
|
||||||
|
api._query_to_kwargs, q, storage_base.Connection.get_meters)
|
||||||
|
valid_keys = ['metaquery', 'pagination', 'project', 'resource',
|
||||||
|
'source', 'user']
|
||||||
|
msg = ("unrecognized field in query: %s, "
|
||||||
|
"valid keys: %s") % (q, valid_keys)
|
||||||
|
expected_exc = wsme.exc.UnknownArgument('abc', msg)
|
||||||
|
self.assertEqual(str(expected_exc), str(exc))
|
||||||
|
|
||||||
|
def test_get_resources_filter_valid_fields(self):
|
||||||
|
q = [api.Query(field='abc',
|
||||||
|
op='eq',
|
||||||
|
value='abc')]
|
||||||
|
exc = self.assertRaises(
|
||||||
|
wsme.exc.UnknownArgument,
|
||||||
|
api._query_to_kwargs, q, storage_base.Connection.get_resources)
|
||||||
|
valid_keys = ['metaquery', 'pagination', 'project', 'resource',
|
||||||
|
'search_offset', 'source', 'timestamp', 'user']
|
||||||
|
msg = ("unrecognized field in query: %s, "
|
||||||
|
"valid keys: %s") % (q, valid_keys)
|
||||||
|
expected_exc = wsme.exc.UnknownArgument('abc', msg)
|
||||||
|
self.assertEqual(str(expected_exc), str(exc))
|
||||||
|
|
||||||
|
def test_get_alarms_filter_valid_fields(self):
|
||||||
|
q = [api.Query(field='abc',
|
||||||
|
op='eq',
|
||||||
|
value='abc')]
|
||||||
|
exc = self.assertRaises(
|
||||||
|
wsme.exc.UnknownArgument,
|
||||||
|
api._query_to_kwargs, q, alarm_storage_base.Connection.get_alarms)
|
||||||
|
valid_keys = ['alarm_id', 'enabled', 'meter', 'name', 'pagination',
|
||||||
|
'project', 'state', 'type', 'user']
|
||||||
|
msg = ("unrecognized field in query: %s, "
|
||||||
|
"valid keys: %s") % (q, valid_keys)
|
||||||
|
expected_exc = wsme.exc.UnknownArgument('abc', msg)
|
||||||
|
self.assertEqual(str(expected_exc), str(exc))
|
||||||
|
@ -576,7 +576,7 @@ class RawSampleTest(DBTestBase,
|
|||||||
timestamp = datetime.datetime(2012, 7, 2, 10, 41)
|
timestamp = datetime.datetime(2012, 7, 2, 10, 41)
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
user='user-id',
|
user='user-id',
|
||||||
start=timestamp,
|
start_timestamp=timestamp,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = list(self.conn.get_samples(f))
|
results = list(self.conn.get_samples(f))
|
||||||
@ -596,7 +596,7 @@ class RawSampleTest(DBTestBase,
|
|||||||
timestamp = datetime.datetime(2012, 7, 2, 10, 40)
|
timestamp = datetime.datetime(2012, 7, 2, 10, 40)
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
user='user-id',
|
user='user-id',
|
||||||
end=timestamp,
|
end_timestamp=timestamp,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = list(self.conn.get_samples(f))
|
results = list(self.conn.get_samples(f))
|
||||||
@ -616,8 +616,8 @@ class RawSampleTest(DBTestBase,
|
|||||||
start_ts = datetime.datetime(2012, 7, 2, 10, 42)
|
start_ts = datetime.datetime(2012, 7, 2, 10, 42)
|
||||||
end_ts = datetime.datetime(2012, 7, 2, 10, 43)
|
end_ts = datetime.datetime(2012, 7, 2, 10, 43)
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
start=start_ts,
|
start_timestamp=start_ts,
|
||||||
end=end_ts,
|
end_timestamp=end_ts,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = list(self.conn.get_samples(f))
|
results = list(self.conn.get_samples(f))
|
||||||
@ -1219,7 +1219,7 @@ class StatisticsTest(DBTestBase,
|
|||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
user='user-5',
|
user='user-5',
|
||||||
meter='volume.size',
|
meter='volume.size',
|
||||||
start='2012-09-25T10:28:00',
|
start_timestamp='2012-09-25T10:28:00',
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f, period=7200))
|
results = list(self.conn.get_meter_statistics(f, period=7200))
|
||||||
self.assertEqual(2, len(results))
|
self.assertEqual(2, len(results))
|
||||||
@ -1278,7 +1278,7 @@ class StatisticsTest(DBTestBase,
|
|||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
user='user-5',
|
user='user-5',
|
||||||
meter='volume.size',
|
meter='volume.size',
|
||||||
start=date
|
start_timestamp=date
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f, period=7200))
|
results = list(self.conn.get_meter_statistics(f, period=7200))
|
||||||
self.assertEqual(2, len(results))
|
self.assertEqual(2, len(results))
|
||||||
@ -1293,8 +1293,8 @@ class StatisticsTest(DBTestBase,
|
|||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
user='user-5',
|
user='user-5',
|
||||||
meter='volume.size',
|
meter='volume.size',
|
||||||
start='2012-09-25T10:28:00',
|
start_timestamp='2012-09-25T10:28:00',
|
||||||
end='2012-09-25T11:28:00',
|
end_timestamp='2012-09-25T11:28:00',
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f, period=1800))
|
results = list(self.conn.get_meter_statistics(f, period=1800))
|
||||||
self.assertEqual(1, len(results))
|
self.assertEqual(1, len(results))
|
||||||
@ -1320,8 +1320,8 @@ class StatisticsTest(DBTestBase,
|
|||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='volume.size',
|
meter='volume.size',
|
||||||
resource='resource-id',
|
resource='resource-id',
|
||||||
start='2012-09-25T11:30:00',
|
start_timestamp='2012-09-25T11:30:00',
|
||||||
end='2012-09-25T11:32:00',
|
end_timestamp='2012-09-25T11:32:00',
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f))[0]
|
results = list(self.conn.get_meter_statistics(f))[0]
|
||||||
self.assertEqual(0, results.duration)
|
self.assertEqual(0, results.duration)
|
||||||
@ -2284,7 +2284,7 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
def test_group_by_start_timestamp_after(self):
|
def test_group_by_start_timestamp_after(self):
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
start=datetime.datetime(2013, 8, 1, 17, 28, 1),
|
start_timestamp=datetime.datetime(2013, 8, 1, 17, 28, 1),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
groupby=['project_id']))
|
groupby=['project_id']))
|
||||||
@ -2294,7 +2294,7 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
def test_group_by_end_timestamp_before(self):
|
def test_group_by_end_timestamp_before(self):
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
end=datetime.datetime(2013, 8, 1, 10, 10, 59),
|
end_timestamp=datetime.datetime(2013, 8, 1, 10, 10, 59),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
groupby=['project_id']))
|
groupby=['project_id']))
|
||||||
@ -2304,7 +2304,7 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
def test_group_by_start_timestamp(self):
|
def test_group_by_start_timestamp(self):
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
start=datetime.datetime(2013, 8, 1, 14, 58),
|
start_timestamp=datetime.datetime(2013, 8, 1, 14, 58),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
groupby=['project_id']))
|
groupby=['project_id']))
|
||||||
@ -2336,7 +2336,7 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
def test_group_by_end_timestamp(self):
|
def test_group_by_end_timestamp(self):
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
end=datetime.datetime(2013, 8, 1, 11, 45),
|
end_timestamp=datetime.datetime(2013, 8, 1, 11, 45),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
groupby=['project_id']))
|
groupby=['project_id']))
|
||||||
@ -2361,8 +2361,8 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
def test_group_by_start_end_timestamp(self):
|
def test_group_by_start_end_timestamp(self):
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
start=datetime.datetime(2013, 8, 1, 8, 17, 3),
|
start_timestamp=datetime.datetime(2013, 8, 1, 8, 17, 3),
|
||||||
end=datetime.datetime(2013, 8, 1, 23, 59, 59),
|
end_timestamp=datetime.datetime(2013, 8, 1, 23, 59, 59),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
groupby=['project_id']))
|
groupby=['project_id']))
|
||||||
@ -2395,8 +2395,8 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
project='project-1',
|
project='project-1',
|
||||||
start=datetime.datetime(2013, 8, 1, 11, 1),
|
start_timestamp=datetime.datetime(2013, 8, 1, 11, 1),
|
||||||
end=datetime.datetime(2013, 8, 1, 20, 0),
|
end_timestamp=datetime.datetime(2013, 8, 1, 20, 0),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
groupby=['resource_id']))
|
groupby=['resource_id']))
|
||||||
@ -2427,8 +2427,8 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
def test_group_by_start_end_timestamp_with_period(self):
|
def test_group_by_start_end_timestamp_with_period(self):
|
||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
start=datetime.datetime(2013, 8, 1, 14, 0),
|
start_timestamp=datetime.datetime(2013, 8, 1, 14, 0),
|
||||||
end=datetime.datetime(2013, 8, 1, 17, 0),
|
end_timestamp=datetime.datetime(2013, 8, 1, 17, 0),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
period=3600,
|
period=3600,
|
||||||
@ -2511,8 +2511,8 @@ class StatisticsGroupByTest(DBTestBase,
|
|||||||
f = storage.SampleFilter(
|
f = storage.SampleFilter(
|
||||||
meter='instance',
|
meter='instance',
|
||||||
source='source-1',
|
source='source-1',
|
||||||
start=datetime.datetime(2013, 8, 1, 10, 0),
|
start_timestamp=datetime.datetime(2013, 8, 1, 10, 0),
|
||||||
end=datetime.datetime(2013, 8, 1, 18, 0),
|
end_timestamp=datetime.datetime(2013, 8, 1, 18, 0),
|
||||||
)
|
)
|
||||||
results = list(self.conn.get_meter_statistics(f,
|
results = list(self.conn.get_meter_statistics(f,
|
||||||
period=7200,
|
period=7200,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user