Fix list of modules not included in auto-gen docs
The variable for excluding modules should be a sequence of strings. Turn the current value from a string to a tuple so documentation for our source is generated. The errors and warnings were fixed in the docstrings in the source code otherwise the doc generation would fail. The following files were excluded from the doc build because of non-existing imports: 'ceilometer.compute.nova_notifier' http://bit.ly/remove-nova-notifier-bp 'ceilometer.openstack.common.db.sqlalchemy.session' https://review.openstack.org/#/c/97850/ 'ceilometer.openstack.common.middleware.audit' 'ceilometer.openstack.common.middleware.notifier' https://bugs.launchpad.net/ceilometer/+bug/1327084 'ceilometer.openstack.common.log_handler' https://bugs.launchpad.net/ceilometer/+bug/1327076 These failed imports are registered in one blueprint and two bug reports. Change-Id: If0bc1c8fc96ba513bbeb90d5257e40b7621a8473
This commit is contained in:
parent
3efe03b60a
commit
702d99937e
@ -240,17 +240,16 @@ class AlarmNotifierService(os_service.Service):
|
|||||||
def notify_alarm(self, context, data):
|
def notify_alarm(self, context, data):
|
||||||
"""Notify that alarm has been triggered.
|
"""Notify that alarm has been triggered.
|
||||||
|
|
||||||
data should be a dict with the following keys:
|
:param context: Request context.
|
||||||
- actions, the URL of the action to run;
|
:param data: (dict):
|
||||||
this is a mapped to extensions automatically
|
|
||||||
|
- actions, the URL of the action to run; this is mapped to
|
||||||
|
extensions automatically
|
||||||
- alarm_id, the ID of the alarm that has been triggered
|
- alarm_id, the ID of the alarm that has been triggered
|
||||||
- previous, the previous state of the alarm
|
- previous, the previous state of the alarm
|
||||||
- current, the new state the alarm has transitioned to
|
- current, the new state the alarm has transitioned to
|
||||||
- reason, the reason the alarm changed its state
|
- reason, the reason the alarm changed its state
|
||||||
- reason_data, a dict representation of the reason
|
- reason_data, a dict representation of the reason
|
||||||
|
|
||||||
:param context: Request context.
|
|
||||||
:param data: A dict as described above.
|
|
||||||
"""
|
"""
|
||||||
actions = data.get('actions')
|
actions = data.get('actions')
|
||||||
if not actions:
|
if not actions:
|
||||||
|
@ -251,24 +251,26 @@ class NotificationEventsConverter(object):
|
|||||||
notification will be processed according to the LAST definition that
|
notification will be processed according to the LAST definition that
|
||||||
matches it's event_type. (We use the last matching definition because that
|
matches it's event_type. (We use the last matching definition because that
|
||||||
allows you to use YAML merge syntax in the definitions file.)
|
allows you to use YAML merge syntax in the definitions file.)
|
||||||
Each definition is a dictionary with the following keys (all are required):
|
Each definition is a dictionary with the following keys (all are
|
||||||
event_type: this is a list of notification event_types this definition
|
required):
|
||||||
will handle. These can be wildcarded with unix shell glob
|
|
||||||
(not regex!) wildcards.
|
- event_type: this is a list of notification event_types this definition
|
||||||
An exclusion listing (starting with a '!') will exclude any
|
will handle. These can be wildcarded with unix shell glob (not regex!)
|
||||||
types listed from matching. If ONLY exclusions are listed,
|
wildcards.
|
||||||
the definition will match anything not matching the
|
An exclusion listing (starting with a '!') will exclude any types listed
|
||||||
exclusions.
|
from matching. If ONLY exclusions are listed, the definition will match
|
||||||
This item can also be a string, which will be taken as
|
anything not matching the exclusions.
|
||||||
equivalent to 1 item list.
|
This item can also be a string, which will be taken as equivalent to 1
|
||||||
|
item list.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
* ['compute.instance.exists'] will only match
|
* ['compute.instance.exists'] will only match
|
||||||
compute.intance.exists notifications
|
compute.intance.exists notifications
|
||||||
* "compute.instance.exists" Same as above.
|
* "compute.instance.exists" Same as above.
|
||||||
* ["image.create", "image.delete"] will match
|
* ["image.create", "image.delete"] will match
|
||||||
image.create and image.delete, but not anything else.
|
image.create and image.delete, but not anything else.
|
||||||
* 'compute.instance.*" will match
|
* "compute.instance.*" will match
|
||||||
compute.instance.create.start but not image.upload
|
compute.instance.create.start but not image.upload
|
||||||
* ['*.start','*.end', '!scheduler.*'] will match
|
* ['*.start','*.end', '!scheduler.*'] will match
|
||||||
compute.instance.create.start, and image.delete.end,
|
compute.instance.create.start, and image.delete.end,
|
||||||
@ -277,45 +279,46 @@ class NotificationEventsConverter(object):
|
|||||||
* '!image.*' matches any notification except image
|
* '!image.*' matches any notification except image
|
||||||
notifications.
|
notifications.
|
||||||
* ['*', '!image.*'] same as above.
|
* ['*', '!image.*'] same as above.
|
||||||
traits: dictionary, The keys are trait names, the values are the trait
|
|
||||||
definitions
|
- traits: (dict) The keys are trait names, the values are the trait
|
||||||
Each trait definition is a dictionary with the following keys:
|
definitions. Each trait definition is a dictionary with the following
|
||||||
type (optional): The data type for this trait. (as a string)
|
keys:
|
||||||
Valid options are: 'text', 'int', 'float' and 'datetime'
|
|
||||||
defaults to 'text' if not specified.
|
- type (optional): The data type for this trait. (as a string)
|
||||||
fields: a path specification for the field(s) in the
|
Valid options are: 'text', 'int', 'float' and 'datetime', defaults to
|
||||||
notification you wish to extract. The paths can be
|
'text' if not specified.
|
||||||
specified with a dot syntax (e.g. 'payload.host').
|
- fields: a path specification for the field(s) in the notification you
|
||||||
dictionary syntax (e.g. 'payload[host]') is also supported.
|
wish to extract. The paths can be specified with a dot syntax
|
||||||
in either case, if the key for the field you are looking
|
(e.g. 'payload.host') or dictionary syntax (e.g. 'payload[host]') is
|
||||||
for contains special charecters, like '.', it will need to
|
also supported.
|
||||||
be quoted (with double or single quotes) like so:
|
In either case, if the key for the field you are looking for contains
|
||||||
|
special charecters, like '.', it will need to be quoted (with double
|
||||||
|
or single quotes) like so::
|
||||||
|
|
||||||
"payload.image_meta.'org.openstack__1__architecture'"
|
"payload.image_meta.'org.openstack__1__architecture'"
|
||||||
|
|
||||||
The syntax used for the field specification is a variant
|
The syntax used for the field specification is a variant of JSONPath,
|
||||||
of JSONPath, and is fairly flexible.
|
and is fairly flexible.
|
||||||
(see: https://github.com/kennknowles/python-jsonpath-rw
|
(see: https://github.com/kennknowles/python-jsonpath-rw for more info)
|
||||||
for more info) Specifications can be written to match
|
Specifications can be written to match multiple possible fields, the
|
||||||
multiple possible fields, the value for the trait will
|
value for the trait will be derived from the matching fields that
|
||||||
be derived from the matching fields that exist and have
|
exist and have a non-null (i.e. is not None) values in the
|
||||||
a non-null (i.e. is not None) values in the notification.
|
notification.
|
||||||
By default the value will be the first such field.
|
By default the value will be the first such field. (plugins can alter
|
||||||
(plugins can alter that, if they wish)
|
that, if they wish)
|
||||||
|
|
||||||
This configuration value is normally a string, for
|
This configuration value is normally a string, for convenience, it can
|
||||||
convenience, it can be specified as a list of
|
be specified as a list of specifications, which will be OR'ed together
|
||||||
specifications, which will be OR'ed together (a union
|
(a union query in jsonpath terms)
|
||||||
query in jsonpath terms)
|
- plugin (optional): (dictionary) with the following keys:
|
||||||
plugin (optional): (dictionary) with the following keys:
|
|
||||||
name: (string) name of a plugin to load
|
- name: (string) name of a plugin to load
|
||||||
parameters: (optional) Dictionary of keyword args to pass
|
- parameters: (optional) Dictionary of keyword args to pass
|
||||||
to the plugin on initialization.
|
to the plugin on initialization. See documentation on each plugin to
|
||||||
See documentation on each plugin to see what
|
see what arguments it accepts.
|
||||||
arguments it accepts.
|
|
||||||
For convenience, this value can also be specified as a
|
For convenience, this value can also be specified as a string, which is
|
||||||
string, which is interpreted as a plugin name, which will
|
interpreted as a plugin name, which will be loaded with no parameters.
|
||||||
be loaded with no parameters.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -51,10 +51,11 @@ class TraitPluginBase(object):
|
|||||||
appropriate type for the trait.
|
appropriate type for the trait.
|
||||||
|
|
||||||
:param match_list: A list (may be empty if no matches) of *tuples*.
|
:param match_list: A list (may be empty if no matches) of *tuples*.
|
||||||
Each tuple is (field_path, value) where field_path
|
Each tuple is (field_path, value) where field_path is the jsonpath
|
||||||
is the jsonpath for that specific field,
|
for that specific field.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
Example:
|
|
||||||
trait's fields definition: ['payload.foobar',
|
trait's fields definition: ['payload.foobar',
|
||||||
'payload.baz',
|
'payload.baz',
|
||||||
'payload.thing.*']
|
'payload.thing.*']
|
||||||
@ -76,8 +77,10 @@ class TraitPluginBase(object):
|
|||||||
|
|
||||||
Here is a plugin that emulates the default (no plugin) behavior:
|
Here is a plugin that emulates the default (no plugin) behavior:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
class DefaultPlugin(TraitPluginBase):
|
class DefaultPlugin(TraitPluginBase):
|
||||||
"Plugin that returns the first field value"
|
"Plugin that returns the first field value."
|
||||||
|
|
||||||
def __init__(self, **kw):
|
def __init__(self, **kw):
|
||||||
super(DefaultPlugin, self).__init__()
|
super(DefaultPlugin, self).__init__()
|
||||||
@ -86,6 +89,7 @@ class TraitPluginBase(object):
|
|||||||
if not match_list:
|
if not match_list:
|
||||||
return None
|
return None
|
||||||
return match_list[0][1]
|
return match_list[0][1]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,11 +27,13 @@ class OpencontrailDriver(driver.Driver):
|
|||||||
|
|
||||||
This driver uses resources in "pipeline.yaml".
|
This driver uses resources in "pipeline.yaml".
|
||||||
Resource requires below conditions:
|
Resource requires below conditions:
|
||||||
|
|
||||||
* resource is url
|
* resource is url
|
||||||
* scheme is "opencontrail"
|
* scheme is "opencontrail"
|
||||||
|
|
||||||
This driver can be configured via query parameters.
|
This driver can be configured via query parameters.
|
||||||
Supported parameters:
|
Supported parameters:
|
||||||
|
|
||||||
* scheme:
|
* scheme:
|
||||||
The scheme of request url to Opencontrail Analytics endpoint.
|
The scheme of request url to Opencontrail Analytics endpoint.
|
||||||
(default http)
|
(default http)
|
||||||
@ -39,13 +41,14 @@ class OpencontrailDriver(driver.Driver):
|
|||||||
This is username used by Opencontrail Analytics.(default None)
|
This is username used by Opencontrail Analytics.(default None)
|
||||||
* password:
|
* password:
|
||||||
This is password used by Opencontrail Analytics.(default None)
|
This is password used by Opencontrail Analytics.(default None)
|
||||||
* domain
|
* domain:
|
||||||
This is domain used by Opencontrail Analytics.(default None)
|
This is domain used by Opencontrail Analytics.(default None)
|
||||||
* verify_ssl
|
* verify_ssl:
|
||||||
Specify if the certificate will be checked for https request.
|
Specify if the certificate will be checked for https request.
|
||||||
(default false)
|
(default false)
|
||||||
|
|
||||||
e.g.
|
e.g.::
|
||||||
|
|
||||||
opencontrail://localhost:8143/?username=admin&password=admin&
|
opencontrail://localhost:8143/?username=admin&password=admin&
|
||||||
scheme=https&domain=&verify_ssl=true
|
scheme=https&domain=&verify_ssl=true
|
||||||
"""
|
"""
|
||||||
|
@ -51,11 +51,13 @@ class OpenDayLightDriver(driver.Driver):
|
|||||||
|
|
||||||
This driver uses resources in "pipeline.yaml".
|
This driver uses resources in "pipeline.yaml".
|
||||||
Resource requires below conditions:
|
Resource requires below conditions:
|
||||||
|
|
||||||
* resource is url
|
* resource is url
|
||||||
* scheme is "opendaylight"
|
* scheme is "opendaylight"
|
||||||
|
|
||||||
This driver can be configured via query parameters.
|
This driver can be configured via query parameters.
|
||||||
Supported parameters:
|
Supported parameters:
|
||||||
|
|
||||||
* scheme:
|
* scheme:
|
||||||
The scheme of request url to OpenDaylight REST API endpoint.
|
The scheme of request url to OpenDaylight REST API endpoint.
|
||||||
(default http)
|
(default http)
|
||||||
@ -70,13 +72,14 @@ class OpenDayLightDriver(driver.Driver):
|
|||||||
Name of container of OpenDaylight.(default "default")
|
Name of container of OpenDaylight.(default "default")
|
||||||
This parameter allows multi vaues.
|
This parameter allows multi vaues.
|
||||||
|
|
||||||
e.g.
|
e.g.::
|
||||||
|
|
||||||
opendaylight://127.0.0.1:8080/controller/nb/v2?container_name=default&
|
opendaylight://127.0.0.1:8080/controller/nb/v2?container_name=default&
|
||||||
container_name=egg&auth=basic&user=admin&password=admin&scheme=http
|
container_name=egg&auth=basic&user=admin&password=admin&scheme=http
|
||||||
|
|
||||||
In this case, the driver send request to below URL:
|
In this case, the driver send request to below URLs:
|
||||||
|
|
||||||
http://127.0.0.1:8080/controller/nb/v2/statistics/default/flow
|
http://127.0.0.1:8080/controller/nb/v2/statistics/default/flow
|
||||||
and
|
|
||||||
http://127.0.0.1:8080/controller/nb/v2/statistics/egg/flow
|
http://127.0.0.1:8080/controller/nb/v2/statistics/egg/flow
|
||||||
"""
|
"""
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -26,15 +26,17 @@ Configuration:
|
|||||||
In /etc/swift/proxy-server.conf on the main pipeline add "ceilometer" just
|
In /etc/swift/proxy-server.conf on the main pipeline add "ceilometer" just
|
||||||
before "proxy-server" and add the following filter in the file:
|
before "proxy-server" and add the following filter in the file:
|
||||||
|
|
||||||
[filter:ceilometer]
|
.. code-block:: python
|
||||||
use = egg:ceilometer#swift
|
|
||||||
|
|
||||||
# Some optional configuration
|
[filter:ceilometer]
|
||||||
# this allow to publish additional metadata
|
use = egg:ceilometer#swift
|
||||||
metadata_headers = X-TEST
|
|
||||||
|
|
||||||
# Set reseller prefix (defaults to "AUTH_" if not set)
|
# Some optional configuration
|
||||||
reseller_prefix = AUTH_
|
# this allow to publish additional metadata
|
||||||
|
metadata_headers = X-TEST
|
||||||
|
|
||||||
|
# Set reseller prefix (defaults to "AUTH_" if not set)
|
||||||
|
reseller_prefix = AUTH_
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
@ -34,8 +34,9 @@ class FilePublisher(publisher.PublisherBase):
|
|||||||
If a file name and location is not specified, this File Publisher will not
|
If a file name and location is not specified, this File Publisher will not
|
||||||
log any meters other than log a warning in Ceilometer log file.
|
log any meters other than log a warning in Ceilometer log file.
|
||||||
|
|
||||||
To enable this publisher, add the following section to file
|
To enable this publisher, add the following section to the
|
||||||
/etc/ceilometer/publisher.yaml or simply add it to an existing pipeline.
|
/etc/ceilometer/publisher.yaml file or simply add it to an existing
|
||||||
|
pipeline::
|
||||||
|
|
||||||
-
|
-
|
||||||
name: meter_file
|
name: meter_file
|
||||||
|
@ -124,8 +124,10 @@ class EventFilter(object):
|
|||||||
:param event_type: the name of the event. None for all.
|
:param event_type: the name of the event. None for all.
|
||||||
:param message_id: the message_id of the event. None for all.
|
:param message_id: the message_id of the event. None for all.
|
||||||
:param traits_filter: the trait filter dicts, all of which are optional.
|
:param traits_filter: the trait filter dicts, all of which are optional.
|
||||||
This parameter is a list of dictionaries that specify
|
This parameter is a list of dictionaries that specify trait values:
|
||||||
trait values:
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
{'key': <key>,
|
{'key': <key>,
|
||||||
'string': <value>,
|
'string': <value>,
|
||||||
'integer': <value>,
|
'integer': <value>,
|
||||||
|
@ -71,64 +71,87 @@ class Connection(base.Connection):
|
|||||||
|
|
||||||
Collections:
|
Collections:
|
||||||
|
|
||||||
- meter (describes sample actually)
|
- meter (describes sample actually):
|
||||||
|
|
||||||
- row-key: consists of reversed timestamp, meter and an md5 of
|
- row-key: consists of reversed timestamp, meter and an md5 of
|
||||||
user+resource+project for purposes of uniqueness
|
user+resource+project for purposes of uniqueness
|
||||||
- Column Families:
|
- Column Families:
|
||||||
f: contains the following qualifiers:
|
|
||||||
-counter_name : <name of counter>
|
|
||||||
-counter_type : <type of counter>
|
|
||||||
-counter_unit : <unit of counter>
|
|
||||||
-counter_volume : <volume of counter>
|
|
||||||
-message: <raw incoming data>
|
|
||||||
-message_id: <id of message>
|
|
||||||
-message_signature: <signature of message>
|
|
||||||
-resource_metadata: raw metadata for corresponding resource
|
|
||||||
of the meter
|
|
||||||
-project_id: <id of project>
|
|
||||||
-resource_id: <id of resource>
|
|
||||||
-user_id: <id of user>
|
|
||||||
-recorded_at: <datetime when sample has been recorded (utc.now)>
|
|
||||||
-flattened metadata with prefix r_metadata. e.g.
|
|
||||||
f:r_metadata.display_name or f:r_metadata.tag
|
|
||||||
-rts: <reversed timestamp of entry>
|
|
||||||
-timestamp: <meter's timestamp (came from message)>
|
|
||||||
-source for meter with prefix 's'
|
|
||||||
|
|
||||||
- resource
|
f: contains the following qualifiers:
|
||||||
|
|
||||||
|
- counter_name: <name of counter>
|
||||||
|
- counter_type: <type of counter>
|
||||||
|
- counter_unit: <unit of counter>
|
||||||
|
- counter_volume: <volume of counter>
|
||||||
|
- message: <raw incoming data>
|
||||||
|
- message_id: <id of message>
|
||||||
|
- message_signature: <signature of message>
|
||||||
|
- resource_metadata: raw metadata for corresponding resource
|
||||||
|
of the meter
|
||||||
|
- project_id: <id of project>
|
||||||
|
- resource_id: <id of resource>
|
||||||
|
- user_id: <id of user>
|
||||||
|
- recorded_at: <datetime when sample has been recorded (utc.now)>
|
||||||
|
- flattened metadata with prefix r_metadata. e.g.::
|
||||||
|
|
||||||
|
f:r_metadata.display_name or f:r_metadata.tag
|
||||||
|
|
||||||
|
- rts: <reversed timestamp of entry>
|
||||||
|
- timestamp: <meter's timestamp (came from message)>
|
||||||
|
- source for meter with prefix 's'
|
||||||
|
|
||||||
|
- resource:
|
||||||
|
|
||||||
- row_key: uuid of resource
|
- row_key: uuid of resource
|
||||||
- Column Families:
|
- Column Families:
|
||||||
|
|
||||||
f: contains the following qualifiers:
|
f: contains the following qualifiers:
|
||||||
-resource_metadata: raw metadata for corresponding resource
|
|
||||||
-project_id: <id of project>
|
- resource_metadata: raw metadata for corresponding resource
|
||||||
-resource_id: <id of resource>
|
- project_id: <id of project>
|
||||||
-user_id: <id of user>
|
- resource_id: <id of resource>
|
||||||
-flattened metadata with prefix r_metadata. e.g.
|
- user_id: <id of user>
|
||||||
|
- flattened metadata with prefix r_metadata. e.g.::
|
||||||
|
|
||||||
f:r_metadata.display_name or f:r_metadata.tag
|
f:r_metadata.display_name or f:r_metadata.tag
|
||||||
-sources for all corresponding meters with prefix 's'
|
|
||||||
-all meters for this resource in format
|
- sources for all corresponding meters with prefix 's'
|
||||||
|
- all meters for this resource in format:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
"%s+%s+%s!%s!%s" % (rts, source, counter_name, counter_type,
|
"%s+%s+%s!%s!%s" % (rts, source, counter_name, counter_type,
|
||||||
counter_unit)
|
counter_unit)
|
||||||
|
|
||||||
- alarm
|
- alarm:
|
||||||
|
|
||||||
- row_key: uuid of alarm
|
- row_key: uuid of alarm
|
||||||
- Column Families:
|
- Column Families:
|
||||||
|
|
||||||
f: contains the raw incoming alarm data
|
f: contains the raw incoming alarm data
|
||||||
|
|
||||||
- alarm_h
|
- alarm_h:
|
||||||
|
|
||||||
- row_key: uuid of alarm + "_" + reversed timestamp
|
- row_key: uuid of alarm + "_" + reversed timestamp
|
||||||
- Column Families:
|
- Column Families:
|
||||||
|
|
||||||
f: raw incoming alarm_history data. Timestamp becomes now()
|
f: raw incoming alarm_history data. Timestamp becomes now()
|
||||||
if not determined
|
if not determined
|
||||||
|
|
||||||
- events
|
- events:
|
||||||
|
|
||||||
- row_key: timestamp of event's generation + uuid of event
|
- row_key: timestamp of event's generation + uuid of event
|
||||||
in format: "%s+%s" % (ts, Event.message_id)
|
in format: "%s+%s" % (ts, Event.message_id)
|
||||||
-Column Families:
|
- Column Families:
|
||||||
|
|
||||||
f: contains the following qualifiers:
|
f: contains the following qualifiers:
|
||||||
-event_type: description of event's type
|
|
||||||
-timestamp: time stamp of event generation
|
- event_type: description of event's type
|
||||||
-all traits for this event in format
|
- timestamp: time stamp of event generation
|
||||||
|
- all traits for this event in format:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
"%s+%s" % (trait_name, trait_type)
|
"%s+%s" % (trait_name, trait_type)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -230,6 +253,7 @@ class Connection(base.Connection):
|
|||||||
|
|
||||||
def update_alarm(self, alarm):
|
def update_alarm(self, alarm):
|
||||||
"""Create an alarm.
|
"""Create an alarm.
|
||||||
|
|
||||||
:param alarm: The alarm to create. It is Alarm object, so we need to
|
:param alarm: The alarm to create. It is Alarm object, so we need to
|
||||||
call as_dict()
|
call as_dict()
|
||||||
"""
|
"""
|
||||||
@ -575,6 +599,9 @@ class Connection(base.Connection):
|
|||||||
"""Write the events to Hbase.
|
"""Write the events to Hbase.
|
||||||
|
|
||||||
:param event_models: a list of models.Event objects.
|
:param event_models: a list of models.Event objects.
|
||||||
|
:return problem_events: a list of events that could not be saved in a
|
||||||
|
(reason, event) tuple. From the reasons that are enumerated in
|
||||||
|
storage.models.Event only the UNKNOWN_PROBLEM is applicable here.
|
||||||
"""
|
"""
|
||||||
problem_events = []
|
problem_events = []
|
||||||
|
|
||||||
@ -656,6 +683,7 @@ class Connection(base.Connection):
|
|||||||
"""Return a dictionary containing the name and data type of the trait.
|
"""Return a dictionary containing the name and data type of the trait.
|
||||||
|
|
||||||
Only trait types for the provided event_type are returned.
|
Only trait types for the provided event_type are returned.
|
||||||
|
|
||||||
:param event_type: the type of the Event
|
:param event_type: the type of the Event
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -809,7 +837,7 @@ class MTable(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def SingleColumnValueFilter(args, rows):
|
def SingleColumnValueFilter(args, rows):
|
||||||
"""This method is called from scan() when 'SingleColumnValueFilter'
|
"""This method is called from scan() when 'SingleColumnValueFilter'
|
||||||
is found in the 'filter' argument
|
is found in the 'filter' argument.
|
||||||
"""
|
"""
|
||||||
op = args[2]
|
op = args[2]
|
||||||
column = "%s:%s" % (args[0], args[1])
|
column = "%s:%s" % (args[0], args[1])
|
||||||
@ -841,9 +869,10 @@ class MTable(object):
|
|||||||
"""This is filter for testing "in-memory HBase".
|
"""This is filter for testing "in-memory HBase".
|
||||||
|
|
||||||
This method is called from scan() when 'ColumnPrefixFilter' is found
|
This method is called from scan() when 'ColumnPrefixFilter' is found
|
||||||
in the 'filter' argument
|
in the 'filter' argument.
|
||||||
:param args is list of filter arguments, contain prefix of column
|
|
||||||
:param rows is dict of row prefixes for filtering
|
:param args: a list of filter arguments, contain prefix of column
|
||||||
|
:param rows: a dict of row prefixes for filtering
|
||||||
"""
|
"""
|
||||||
value = args[0]
|
value = args[0]
|
||||||
column = 'f:' + value
|
column = 'f:' + value
|
||||||
@ -860,11 +889,12 @@ class MTable(object):
|
|||||||
def RowFilter(args, rows):
|
def RowFilter(args, rows):
|
||||||
"""This is filter for testing "in-memory HBase".
|
"""This is filter for testing "in-memory HBase".
|
||||||
|
|
||||||
This method is called from scan() when 'RowFilter'
|
This method is called from scan() when 'RowFilter' is found in the
|
||||||
is found in the 'filter' argument
|
'filter' argument.
|
||||||
:param args is list of filter arguments, it contains operator and
|
|
||||||
|
:param args: a list of filter arguments, it contains operator and
|
||||||
sought string
|
sought string
|
||||||
:param rows is dict of rows which are filtered
|
:param rows: a dict of rows which are filtered
|
||||||
"""
|
"""
|
||||||
op = args[0]
|
op = args[0]
|
||||||
value = args[1]
|
value = args[1]
|
||||||
@ -962,10 +992,10 @@ def timestamp(dt, reverse=True):
|
|||||||
the 'oldest' entries will be on top of the table or it should be the newest
|
the 'oldest' entries will be on top of the table or it should be the newest
|
||||||
ones (reversed timestamp case).
|
ones (reversed timestamp case).
|
||||||
|
|
||||||
:param: dt: datetime which is translated to timestamp
|
:param dt: datetime which is translated to timestamp
|
||||||
:param: reverse: a boolean parameter for reverse or straight count of
|
:param reverse: a boolean parameter for reverse or straight count of
|
||||||
timestamp in milliseconds
|
timestamp in milliseconds
|
||||||
:return count or reversed count of milliseconds since start of epoch
|
:return: count or reversed count of milliseconds since start of epoch
|
||||||
"""
|
"""
|
||||||
epoch = datetime.datetime(1970, 1, 1)
|
epoch = datetime.datetime(1970, 1, 1)
|
||||||
td = dt - epoch
|
td = dt - epoch
|
||||||
@ -1008,7 +1038,8 @@ def make_events_query_from_filter(event_filter):
|
|||||||
def make_timestamp_query(func, start=None, start_op=None, end=None,
|
def make_timestamp_query(func, start=None, start_op=None, end=None,
|
||||||
end_op=None, bounds_only=False, **kwargs):
|
end_op=None, bounds_only=False, **kwargs):
|
||||||
"""Return a filter start and stop row for filtering and a query
|
"""Return a filter start and stop row for filtering and a query
|
||||||
which based on the fact that CF-name is 'rts'
|
which based on the fact that CF-name is 'rts'.
|
||||||
|
|
||||||
:param start: Optional start timestamp
|
:param start: Optional start timestamp
|
||||||
:param start_op: Optional start timestamp operator, like gt, ge
|
:param start_op: Optional start timestamp operator, like gt, ge
|
||||||
:param end: Optional end timestamp
|
:param end: Optional end timestamp
|
||||||
@ -1240,9 +1271,8 @@ def deserialize_entry(entry, get_raw_meta=True):
|
|||||||
get_raw_meta is False.
|
get_raw_meta is False.
|
||||||
|
|
||||||
:param entry: entry from HBase, without row name and timestamp
|
:param entry: entry from HBase, without row name and timestamp
|
||||||
:param get_raw_meta: If true then raw metadata will be returned
|
:param get_raw_meta: If true then raw metadata will be returned,
|
||||||
If False metadata will be constructed from
|
if False metadata will be constructed from 'f:r_metadata.' fields
|
||||||
'f:r_metadata.' fields
|
|
||||||
"""
|
"""
|
||||||
flatten_result = {}
|
flatten_result = {}
|
||||||
sources = []
|
sources = []
|
||||||
|
@ -654,6 +654,7 @@ class Connection(pymongo_base.Connection):
|
|||||||
Pagination works by requiring sort_key and sort_dir.
|
Pagination works by requiring sort_key and sort_dir.
|
||||||
We use the last item in previous page as the 'marker' for pagination.
|
We use the last item in previous page as the 'marker' for pagination.
|
||||||
So we return values that follow the passed marker in the order.
|
So we return values that follow the passed marker in the order.
|
||||||
|
|
||||||
:param q: the query dict passed in.
|
:param q: the query dict passed in.
|
||||||
:param db_collection: Database collection that be query.
|
:param db_collection: Database collection that be query.
|
||||||
:param limit: maximum number of items to return.
|
:param limit: maximum number of items to return.
|
||||||
@ -661,7 +662,8 @@ class Connection(pymongo_base.Connection):
|
|||||||
results after this item.
|
results after this item.
|
||||||
:param sort_keys: array of attributes by which results be sorted.
|
:param sort_keys: array of attributes by which results be sorted.
|
||||||
:param sort_dir: direction in which results be sorted (asc, desc).
|
:param sort_dir: direction in which results be sorted (asc, desc).
|
||||||
return: The query with sorting/pagination added.
|
|
||||||
|
:return: The query with sorting/pagination added.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
sort_keys = sort_keys or []
|
sort_keys = sort_keys or []
|
||||||
|
@ -51,7 +51,11 @@ def write_autodoc_index():
|
|||||||
RSTDIR = os.path.abspath(os.path.join(BASE_DIR, "sourcecode"))
|
RSTDIR = os.path.abspath(os.path.join(BASE_DIR, "sourcecode"))
|
||||||
SRCS = {'ceilometer': ROOT}
|
SRCS = {'ceilometer': ROOT}
|
||||||
|
|
||||||
EXCLUDED_MODULES = ('ceilometer.tests')
|
EXCLUDED_MODULES = ('ceilometer.tests','ceilometer.compute.nova_notifier',
|
||||||
|
'ceilometer.openstack.common.db.sqlalchemy.session',
|
||||||
|
'ceilometer.openstack.common.middleware.audit',
|
||||||
|
'ceilometer.openstack.common.middleware.notifier',
|
||||||
|
'ceilometer.openstack.common.log_handler')
|
||||||
CURRENT_SOURCES = {}
|
CURRENT_SOURCES = {}
|
||||||
|
|
||||||
if not(os.path.exists(RSTDIR)):
|
if not(os.path.exists(RSTDIR)):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user