Add toolchain test to check Nova metrics

Test Nova metrics change in Grafana on creating of instance.
Minor fixes in InfluxDb-Grafana functional tests.

Change-Id: I5474e20bc87223e7527dcd9f2e9a326ee1407182
This commit is contained in:
Rodion Promyshlennikov 2016-06-03 21:20:29 +03:00
parent 3cc1528103
commit 187f874fac
7 changed files with 158 additions and 3 deletions

View File

@ -19,6 +19,12 @@ Post-installation
.. automodule:: stacklight_tests.toolchain.test_post_install
:members:
Functional
==========
.. automodule:: stacklight_tests.toolchain.test_functional
:members:
Detached plugins
================

View File

@ -12,6 +12,8 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
from fuelweb_test import logger
from proboscis import asserts
@ -114,3 +116,25 @@ class InfluxdbPluginApi(base_test.PluginApi):
def check_grafana_dashboards(self):
grafana_url = self.get_grafana_url()
ui_api.check_grafana_dashboards(grafana_url)
def get_nova_instance_creation_time_metrics(self, time_point=None):
"""Gets instance creation metrics for provided interval
:param time_point: time interval
:type time_point: str
:returns: list of metrics
:rtype: list
"""
logger.info("Getting Nova instance creation metrics")
interval = "now() - 1h" if time_point is None else time_point
query = (
"select value "
"from openstack_nova_instance_creation_time "
"where time >= {interval}".format(interval=interval))
result = self.do_influxdb_query(query=query)
result = json.loads(
result.content)["results"][0]
if result:
return result["series"][0]["values"]
return []

View File

@ -30,7 +30,7 @@ class TestFunctionalInfluxdbPlugin(api.InfluxdbPluginApi):
"""Verify that the dashboards show up in the Grafana UI.
Scenario:
1. Revert snapshot with 9 deployed nodes in HA configuration
1. Revert snapshot with 3 deployed nodes
2. Open the Grafana URL (
open the "Dashboard" tab and click the "Grafana" link)
3. Sign-in using the credentials provided
@ -54,10 +54,10 @@ class TestFunctionalInfluxdbPlugin(api.InfluxdbPluginApi):
* RabbitMQ
* System
Duration 40m
Duration 20m
"""
self.env.revert_snapshot("deploy_influxdb_grafana_plugin")
self.env.revert_snapshot("deploy_influxdb_grafana")
self.check_plugin_online()

View File

@ -53,6 +53,7 @@ def import_tests():
from stacklight_tests.lma_infrastructure_alerting import ( # noqa
test_system)
from stacklight_tests.toolchain import test_detached_plugins # noqa
from stacklight_tests.toolchain import test_functional # noqa
from stacklight_tests.toolchain import test_network_templates # noqa
from stacklight_tests.toolchain import test_post_install # noqa
from stacklight_tests.toolchain import test_smoke_bvt # noqa

View File

@ -11,8 +11,12 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from fuelweb_test import logger
from fuelweb_test.tests import base_test_case
from proboscis import asserts
from stacklight_tests.elasticsearch_kibana import api as elasticsearch_api
from stacklight_tests.helpers import checkers
@ -83,3 +87,35 @@ class ToolchainApi(object):
def get_pids_of_services(self):
return self.plugins_mapping["lma_collector"].verify_services()
def check_nova_metrics(self):
time_started = "{}s".format(int(time.time()))
metrics = self.plugins_mapping[
"influxdb_grafana"].get_nova_instance_creation_time_metrics(
time_started)
asserts.assert_equal(
metrics, [],
"Spawned instances was found in Nova metrics "
"before instance creation")
test_name_pref = (
'fuel_health.tests.smoke.'
'test_nova_create_instance_with_connectivity.TestNovaNetwork.')
instance_tests = (
'{}test_004_create_servers'.format(test_name_pref),
'{}test_009_create_server_with_file'.format(test_name_pref))
for test_name in instance_tests:
self.helpers.run_single_ostf(test_sets=['smoke'],
test_name=test_name)
updated_metrics = self.plugins_mapping[
"influxdb_grafana"].get_nova_instance_creation_time_metrics(
time_started)
asserts.assert_equal(
len(updated_metrics), len(instance_tests),
"There is a mismatch of created instances in Nova metrics, "
"found {instances_found} instead of {tests_started}".format(
instances_found=len(updated_metrics),
tests_started=len(instance_tests))
)

View File

@ -0,0 +1,85 @@
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from fuelweb_test.helpers.decorators import log_snapshot_after_test
from proboscis import test
from stacklight_tests.toolchain import api
@test(groups=["plugins"])
class TestFunctionalToolchain(api.ToolchainApi):
"""Class for functional testing of plugins toolchain."""
@test(depends_on_groups=["deploy_toolchain"],
groups=["check_display_grafana_dashboards_toolchain",
"toolchain", "functional"])
@log_snapshot_after_test
def check_display_grafana_dashboards_toolchain(self):
"""Verify that the dashboards show up in the Grafana UI.
Scenario:
1. Revert snapshot with 3 deployed nodes
2. Open the Grafana URL (
open the "Dashboard" tab and click the "Grafana" link)
3. Sign-in using the credentials provided
during the configuration of the environment
4. Go to the Main dashboard and verify that everything is ok
5. Repeat the previous step for the following dashboards:
* Apache
* Cinder
* Elasticsearch
* Glance
* HAProxy
* Heat
* Hypervisor
* InfluxDB
* Keystone
* LMA self-monitoring
* Memcached
* MySQL
* Neutron
* Nova
* RabbitMQ
* System
Duration 20m
"""
self.env.revert_snapshot("deploy_toolchain")
self.check_plugins_online()
self.plugins_mapping["influxdb_grafana"].check_grafana_dashboards()
@test(depends_on_groups=["deploy_toolchain"],
groups=["check_nova_metrics_toolchain",
"toolchain", "functional"])
@log_snapshot_after_test
def check_nova_metrics_toolchain(self):
"""Verify that the Nova metrics are collecting.
Scenario:
1. Revert snapshot with 3 deployed nodes
2. Check that plugins are online
3. Check Nova metrics in InfluxDB during OSTF tests
Duration 20m
"""
self.env.revert_snapshot("deploy_toolchain")
self.check_plugins_online()
self.check_nova_metrics()

View File

@ -218,6 +218,9 @@ class TestNodesToolchain(api.ToolchainApi):
self.helpers.replace_ubuntu_mirror_with_mos()
self.helpers.fuel_create_repositories(ready_nodes_before)
# NOTE(rpromyshlennikov): next check will fail
# before this bug will be fixed
# https://bugs.launchpad.net/lma-toolchain/+bug/1570850
ready_nodes_hostnames_after = {node["hostname"] for node
in self.helpers.get_all_ready_nodes()}
asserts.assert_equal(