Remove code related to metadata/metaquery
This is Ceilometer specific code that's not used in Aodh. Change-Id: Ida81435f4a0e1786e221e14ccd1aa79ea8017ce6
This commit is contained in:
parent
34282df13f
commit
56eed8c839
@ -217,7 +217,6 @@ def query_to_kwargs(query, db_func, internal_keys=None,
|
|||||||
'resource_id': 'resource',
|
'resource_id': 'resource',
|
||||||
'type': 'alarm_type'}
|
'type': 'alarm_type'}
|
||||||
stamp = {}
|
stamp = {}
|
||||||
metaquery = {}
|
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
for i in query:
|
for i in query:
|
||||||
if i.field == 'timestamp':
|
if i.field == 'timestamp':
|
||||||
@ -233,16 +232,10 @@ def query_to_kwargs(query, db_func, internal_keys=None,
|
|||||||
stamp['search_offset'] = i.value
|
stamp['search_offset'] = i.value
|
||||||
elif i.field == 'enabled':
|
elif i.field == 'enabled':
|
||||||
kwargs[i.field] = i._get_value_as_type('boolean')
|
kwargs[i.field] = i._get_value_as_type('boolean')
|
||||||
elif i.field.startswith('metadata.'):
|
|
||||||
metaquery[i.field] = i._get_value_as_type()
|
|
||||||
elif i.field.startswith('resource_metadata.'):
|
|
||||||
metaquery[i.field[9:]] = i._get_value_as_type()
|
|
||||||
else:
|
else:
|
||||||
key = translation.get(i.field, i.field)
|
key = translation.get(i.field, i.field)
|
||||||
kwargs[key] = i.value
|
kwargs[key] = i.value
|
||||||
|
|
||||||
if metaquery and 'metaquery' in inspect.getargspec(db_func)[0]:
|
|
||||||
kwargs['metaquery'] = metaquery
|
|
||||||
if stamp:
|
if stamp:
|
||||||
kwargs.update(_get_query_timestamps(stamp))
|
kwargs.update(_get_query_timestamps(stamp))
|
||||||
return kwargs
|
return kwargs
|
||||||
|
@ -22,7 +22,6 @@ from oslo_log import log
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
from aodh.i18n import _
|
from aodh.i18n import _
|
||||||
from aodh import utils
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
@ -97,15 +96,13 @@ def get_start_end_rts(start, end):
|
|||||||
return rts_start, rts_end
|
return rts_start, rts_end
|
||||||
|
|
||||||
|
|
||||||
def make_query(metaquery=None, **kwargs):
|
def make_query(**kwargs):
|
||||||
"""Return a filter query string based on the selected parameters.
|
"""Return a filter query string based on the selected parameters.
|
||||||
|
|
||||||
:param metaquery: optional metaquery dict
|
|
||||||
:param kwargs: key-value pairs to filter on. Key should be a real
|
:param kwargs: key-value pairs to filter on. Key should be a real
|
||||||
column name in db
|
column name in db
|
||||||
"""
|
"""
|
||||||
q = []
|
q = []
|
||||||
res_q = None
|
|
||||||
|
|
||||||
# Note: we use extended constructor for SingleColumnValueFilter here.
|
# Note: we use extended constructor for SingleColumnValueFilter here.
|
||||||
# It is explicitly specified that entry should not be returned if CF is not
|
# It is explicitly specified that entry should not be returned if CF is not
|
||||||
@ -124,25 +121,8 @@ def make_query(metaquery=None, **kwargs):
|
|||||||
q.append("SingleColumnValueFilter "
|
q.append("SingleColumnValueFilter "
|
||||||
"('f', '%s', =, 'binary:%s', true, true)" %
|
"('f', '%s', =, 'binary:%s', true, true)" %
|
||||||
(quote(key), dump(value)))
|
(quote(key), dump(value)))
|
||||||
res_q = None
|
|
||||||
if len(q):
|
if len(q):
|
||||||
res_q = " AND ".join(q)
|
return " AND ".join(q)
|
||||||
|
|
||||||
if metaquery:
|
|
||||||
meta_q = []
|
|
||||||
for k, v in metaquery.items():
|
|
||||||
meta_q.append(
|
|
||||||
"SingleColumnValueFilter ('f', '%s', =, 'binary:%s', "
|
|
||||||
"true, true)"
|
|
||||||
% ('r_' + k, dump(v)))
|
|
||||||
meta_q = " AND ".join(meta_q)
|
|
||||||
# join query and metaquery
|
|
||||||
if res_q is not None:
|
|
||||||
res_q += " AND " + meta_q
|
|
||||||
else:
|
|
||||||
res_q = meta_q # metaquery only
|
|
||||||
|
|
||||||
return res_q
|
|
||||||
|
|
||||||
|
|
||||||
def make_general_rowkey_scan(rts_start=None, rts_end=None, some_id=None):
|
def make_general_rowkey_scan(rts_start=None, rts_end=None, some_id=None):
|
||||||
@ -178,44 +158,22 @@ def prepare_key(*args):
|
|||||||
return ":".join(key_quote)
|
return ":".join(key_quote)
|
||||||
|
|
||||||
|
|
||||||
def deserialize_entry(entry, get_raw_meta=True):
|
def deserialize_entry(entry):
|
||||||
"""Return a list of flatten_result, sources, meters and metadata.
|
"""Return a list of flatten results.
|
||||||
|
|
||||||
Flatten_result contains a dict of simple structures such as 'resource_id':1
|
Result contains a dict of simple structures such as 'resource_id':1
|
||||||
sources/meters are the lists of sources and meters correspondingly.
|
|
||||||
metadata is metadata dict. This dict may be returned as flattened if
|
|
||||||
get_raw_meta is False.
|
|
||||||
|
|
||||||
:param entry: entry from HBase, without row name and timestamp
|
:param entry: entry from HBase, without row name and timestamp
|
||||||
:param get_raw_meta: If true then raw metadata will be returned,
|
|
||||||
if False metadata will be constructed from
|
|
||||||
'f:r_metadata.' fields
|
|
||||||
"""
|
"""
|
||||||
flatten_result = {}
|
flatten_result = {}
|
||||||
sources = []
|
|
||||||
meters = []
|
|
||||||
metadata_flattened = {}
|
|
||||||
for k, v in entry.items():
|
for k, v in entry.items():
|
||||||
if k.startswith('f:s_'):
|
if ':' in k[2:]:
|
||||||
sources.append(decode_unicode(k[4:]))
|
key = tuple([unquote(i) for i in k[2:].split(':')])
|
||||||
elif k.startswith('f:r_metadata.'):
|
|
||||||
qualifier = decode_unicode(k[len('f:r_metadata.'):])
|
|
||||||
metadata_flattened[qualifier] = load(v)
|
|
||||||
elif k.startswith("f:m_"):
|
|
||||||
meter = ([unquote(i) for i in k[4:].split(':')], load(v))
|
|
||||||
meters.append(meter)
|
|
||||||
else:
|
else:
|
||||||
if ':' in k[2:]:
|
key = unquote(k[2:])
|
||||||
key = tuple([unquote(i) for i in k[2:].split(':')])
|
flatten_result[key] = load(v)
|
||||||
else:
|
|
||||||
key = unquote(k[2:])
|
|
||||||
flatten_result[key] = load(v)
|
|
||||||
if get_raw_meta:
|
|
||||||
metadata = flatten_result.get('resource_metadata', {})
|
|
||||||
else:
|
|
||||||
metadata = metadata_flattened
|
|
||||||
|
|
||||||
return flatten_result, sources, meters, metadata
|
return flatten_result
|
||||||
|
|
||||||
|
|
||||||
def serialize_entry(data=None, **kwargs):
|
def serialize_entry(data=None, **kwargs):
|
||||||
@ -234,13 +192,6 @@ def serialize_entry(data=None, **kwargs):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def dump_metadata(meta):
|
|
||||||
resource_metadata = {}
|
|
||||||
for key, v in utils.dict_to_keyval(meta):
|
|
||||||
resource_metadata[key] = v
|
|
||||||
return resource_metadata
|
|
||||||
|
|
||||||
|
|
||||||
def dump(data):
|
def dump(data):
|
||||||
return json.dumps(data, default=bson.json_util.default)
|
return json.dumps(data, default=bson.json_util.default)
|
||||||
|
|
||||||
@ -249,10 +200,6 @@ def load(data):
|
|||||||
return json.loads(data, object_hook=object_hook)
|
return json.loads(data, object_hook=object_hook)
|
||||||
|
|
||||||
|
|
||||||
def decode_unicode(data):
|
|
||||||
return data.decode('utf-8') if isinstance(data, six.string_types) else data
|
|
||||||
|
|
||||||
|
|
||||||
# We don't want to have tzinfo in decoded json.This object_hook is
|
# We don't want to have tzinfo in decoded json.This object_hook is
|
||||||
# overwritten json_util.object_hook for $date
|
# overwritten json_util.object_hook for $date
|
||||||
def object_hook(dct):
|
def object_hook(dct):
|
||||||
|
@ -109,7 +109,7 @@ class Connection(hbase_base.Connection, base.Connection):
|
|||||||
alarm_table = conn.table(self.ALARM_TABLE)
|
alarm_table = conn.table(self.ALARM_TABLE)
|
||||||
alarm_table.put(_id, alarm_to_store)
|
alarm_table.put(_id, alarm_to_store)
|
||||||
stored_alarm = hbase_utils.deserialize_entry(
|
stored_alarm = hbase_utils.deserialize_entry(
|
||||||
alarm_table.row(_id))[0]
|
alarm_table.row(_id))
|
||||||
return models.Alarm(**stored_alarm)
|
return models.Alarm(**stored_alarm)
|
||||||
|
|
||||||
create_alarm = update_alarm
|
create_alarm = update_alarm
|
||||||
@ -135,7 +135,7 @@ class Connection(hbase_base.Connection, base.Connection):
|
|||||||
with self.conn_pool.connection() as conn:
|
with self.conn_pool.connection() as conn:
|
||||||
alarm_table = conn.table(self.ALARM_TABLE)
|
alarm_table = conn.table(self.ALARM_TABLE)
|
||||||
gen = alarm_table.scan(filter=q)
|
gen = alarm_table.scan(filter=q)
|
||||||
alarms = [hbase_utils.deserialize_entry(data)[0]
|
alarms = [hbase_utils.deserialize_entry(data)
|
||||||
for ignored, data in gen]
|
for ignored, data in gen]
|
||||||
for alarm in sorted(
|
for alarm in sorted(
|
||||||
alarms,
|
alarms,
|
||||||
@ -162,7 +162,7 @@ class Connection(hbase_base.Connection, base.Connection):
|
|||||||
gen = alarm_history_table.scan(filter=q, row_start=start_row,
|
gen = alarm_history_table.scan(filter=q, row_start=start_row,
|
||||||
row_stop=end_row)
|
row_stop=end_row)
|
||||||
for ignored, data in gen:
|
for ignored, data in gen:
|
||||||
stored_entry = hbase_utils.deserialize_entry(data)[0]
|
stored_entry = hbase_utils.deserialize_entry(data)
|
||||||
yield models.AlarmChange(**stored_entry)
|
yield models.AlarmChange(**stored_entry)
|
||||||
|
|
||||||
def record_alarm_change(self, alarm_change):
|
def record_alarm_change(self, alarm_change):
|
||||||
|
@ -210,19 +210,6 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
|
|||||||
self.assertEqual('gt', kwargs['start_timestamp_op'])
|
self.assertEqual('gt', kwargs['start_timestamp_op'])
|
||||||
self.assertEqual('lt', kwargs['end_timestamp_op'])
|
self.assertEqual('lt', kwargs['end_timestamp_op'])
|
||||||
|
|
||||||
def test_sample_filter_meta(self):
|
|
||||||
q = [v2_base.Query(field='metadata.size',
|
|
||||||
op='eq',
|
|
||||||
value='20'),
|
|
||||||
v2_base.Query(field='resource_metadata.id',
|
|
||||||
op='eq',
|
|
||||||
value='meta_id')]
|
|
||||||
kwargs = utils.query_to_kwargs(q, storage.SampleFilter.__init__)
|
|
||||||
self.assertEqual(1, len(kwargs))
|
|
||||||
self.assertEqual(2, len(kwargs['metaquery']))
|
|
||||||
self.assertEqual(20, kwargs['metaquery']['metadata.size'])
|
|
||||||
self.assertEqual('meta_id', kwargs['metaquery']['metadata.id'])
|
|
||||||
|
|
||||||
def test_sample_filter_non_equality_on_metadata(self):
|
def test_sample_filter_non_equality_on_metadata(self):
|
||||||
queries = [v2_base.Query(field='resource_metadata.image_id',
|
queries = [v2_base.Query(field='resource_metadata.image_id',
|
||||||
op='gt',
|
op='gt',
|
||||||
|
@ -58,44 +58,11 @@ Alarms
|
|||||||
Filtering Queries
|
Filtering Queries
|
||||||
=================
|
=================
|
||||||
|
|
||||||
Aodh's REST API currently supports two types of queries. The Simple
|
The filter expressions of the query feature operate on the fields of *Alarm*
|
||||||
Query functionality provides simple filtering on several fields of the
|
and *AlarmChange*. The following comparison operators are supported: *=*, *!=*,
|
||||||
*Sample* type. Complex Query provides the possibility to specify queries
|
*<*, *<=*, *>*, *>=* and *in*; and the following logical operators can be used:
|
||||||
with logical and comparison operators on the fields of *Sample*.
|
*and* *or* and *not*. The field names are validated against the database
|
||||||
|
models.
|
||||||
You may also apply filters based on the values of one or more of the
|
|
||||||
*resource_metadata* field, which you can identify by using *metadata.<field>*
|
|
||||||
syntax in either type of query. Note, however, that given the free-form
|
|
||||||
nature of *resource_metadata* field, there is no practical or consistent way
|
|
||||||
to validate the query fields under *metadata* domain like it is done for
|
|
||||||
all other fields.
|
|
||||||
|
|
||||||
.. note:: The API call will return HTTP 200 OK status for both of the
|
|
||||||
following cases: when a query with *metadata.<field>* does not match its
|
|
||||||
value, and when *<field>* itself does not exist in any of the records being
|
|
||||||
queried.
|
|
||||||
|
|
||||||
Simple Query
|
|
||||||
++++++++++++
|
|
||||||
|
|
||||||
Many of the endpoints above accept a query filter argument, which
|
|
||||||
should be a list of Query data structures. Whatever the endpoint you
|
|
||||||
want to apply a filter on, you always filter on the fields of the *Sample*
|
|
||||||
type (for example, if you apply a filter on a query for statistics,
|
|
||||||
you won't target *duration_start* field of *Statistics*, but *timestamp*
|
|
||||||
field of *Sample*).
|
|
||||||
|
|
||||||
.. autotype:: aodh.api.controllers.v2.base.Query
|
|
||||||
:members:
|
|
||||||
|
|
||||||
Complex Query
|
|
||||||
+++++++++++++
|
|
||||||
|
|
||||||
The filter expressions of the Complex Query feature operate on the fields
|
|
||||||
of *Sample*, *Alarm* and *AlarmChange*. The following comparison operators are
|
|
||||||
supported: *=*, *!=*, *<*, *<=*, *>*, *>=* and *in*; and the following logical
|
|
||||||
operators can be used: *and* *or* and *not*. The field names are validated
|
|
||||||
against the database models.
|
|
||||||
|
|
||||||
.. note:: The *not* operator has different meaning in Mongo DB and in SQL DB engine.
|
.. note:: The *not* operator has different meaning in Mongo DB and in SQL DB engine.
|
||||||
If the *not* operator is applied on a non existent metadata field then
|
If the *not* operator is applied on a non existent metadata field then
|
||||||
|
Loading…
Reference in New Issue
Block a user