Add test deploying a dedicated environment
Change-Id: I795ef004f5640b0a02ec8b2fd106a837b36ffdd9
This commit is contained in:
parent
921adcb604
commit
4d806b3aee
10
fixtures/scripts/update_controller_role.sh
Normal file
10
fixtures/scripts/update_controller_role.sh
Normal file
@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Hacky way to find out which release should be updated
|
||||
RELEASE_ID=$(fuel rel | grep Ubuntu | grep -v UCA | awk '{print $1}')
|
||||
TMP_FILE="$(mktemp).yaml"
|
||||
fuel role --rel "$RELEASE_ID" --role controller --file "$TMP_FILE"
|
||||
sed -i 's/ min: ./ min: 0/' "$TMP_FILE"
|
||||
fuel role --rel 2 --update --file "$TMP_FILE"
|
||||
rm -f "$TMP_FILE"
|
@ -25,6 +25,10 @@ from proboscis import asserts
|
||||
PLUGIN_PACKAGE_RE = re.compile(r'([^/]+)-(\d+\.\d+)-(\d+\.\d+\.\d+)')
|
||||
|
||||
|
||||
class NotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_plugin_name(filename):
|
||||
"""Extract the plugin name from the package filename.
|
||||
|
||||
@ -55,8 +59,12 @@ def get_plugin_version(filename):
|
||||
return None
|
||||
|
||||
|
||||
class NotFound(Exception):
|
||||
pass
|
||||
def get_fixture(name):
|
||||
"""Return the full path to a fixture."""
|
||||
path = os.path.join(os.environ.get("WORKSPACE", "./"), "fixtures", name)
|
||||
if not os.path.isfile(path):
|
||||
raise NotFound("File {} not found".format(path))
|
||||
return path
|
||||
|
||||
|
||||
class PluginHelper(object):
|
||||
@ -162,7 +170,7 @@ class PluginHelper(object):
|
||||
mode='ha_compact')
|
||||
|
||||
def deploy_cluster(self, nodes_roles, verify_network=False,
|
||||
update_interfaces=True):
|
||||
update_interfaces=True, check_services=True):
|
||||
"""Assign roles to nodes and deploy the cluster.
|
||||
|
||||
:param nodes_roles: nodes to roles mapping.
|
||||
@ -173,13 +181,17 @@ class PluginHelper(object):
|
||||
:param update_interfaces: whether or not interfaces should be updated
|
||||
before the deployment (default: True).
|
||||
:type settings: boolean
|
||||
:param check_services: whether or not OSTF tests should run after the
|
||||
deployment (default: True).
|
||||
:type settings: boolean
|
||||
:returns: None
|
||||
"""
|
||||
self.fuel_web.update_nodes(self.cluster_id, nodes_roles,
|
||||
update_interfaces=update_interfaces)
|
||||
if verify_network:
|
||||
self.fuel_web.verify_network(self.cluster_id)
|
||||
self.fuel_web.deploy_cluster_wait(self.cluster_id)
|
||||
self.fuel_web.deploy_cluster_wait(self.cluster_id,
|
||||
check_services=check_services)
|
||||
|
||||
def run_ostf(self, *args, **kwargs):
|
||||
"""Run the OpenStack health checks."""
|
||||
|
@ -65,11 +65,15 @@ class LMACollectorPluginApi(base_test.PluginApi):
|
||||
return pids
|
||||
|
||||
def check_plugin_online(self):
|
||||
# Run OSTF test to check pacemaker status
|
||||
self.helpers.run_single_ostf(
|
||||
test_sets=['ha'],
|
||||
test_name='fuel_health.tests.ha.test_pacemaker_status.'
|
||||
'TestPacemakerStatus.test_check_pacemaker_resources')
|
||||
# Run the OSTF tests to check the Pacemaker status except when no
|
||||
# controller are being deployed (dedicated environment case)
|
||||
controllers = self.fuel_web.get_nailgun_cluster_nodes_by_roles(
|
||||
self.helpers.cluster_id, ["controller"])
|
||||
if len(controllers) > 0:
|
||||
self.helpers.run_single_ostf(
|
||||
test_sets=['ha'],
|
||||
test_name='fuel_health.tests.ha.test_pacemaker_status.'
|
||||
'TestPacemakerStatus.test_check_pacemaker_resources')
|
||||
|
||||
self.verify_services()
|
||||
|
||||
|
@ -52,6 +52,7 @@ def import_tests():
|
||||
test_smoke_bvt)
|
||||
from stacklight_tests.lma_infrastructure_alerting import ( # noqa
|
||||
test_system)
|
||||
from stacklight_tests.toolchain import test_dedicated_environment # noqa
|
||||
from stacklight_tests.toolchain import test_detached_plugins # noqa
|
||||
from stacklight_tests.toolchain import test_functional # noqa
|
||||
from stacklight_tests.toolchain import test_network_templates # noqa
|
||||
|
@ -12,11 +12,13 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from fuelweb_test import logger
|
||||
from fuelweb_test.tests import base_test_case
|
||||
from proboscis import asserts
|
||||
import yaml
|
||||
|
||||
from stacklight_tests.elasticsearch_kibana import api as elasticsearch_api
|
||||
from stacklight_tests.helpers import checkers
|
||||
@ -39,45 +41,73 @@ class ToolchainApi(object):
|
||||
self.checkers = checkers
|
||||
self.remote_ops = remote_ops
|
||||
self.ui_tester = ui_tester
|
||||
self.plugins_mapping = {
|
||||
"elasticsearch_kibana": elasticsearch_api.ElasticsearchPluginApi(),
|
||||
"influxdb_grafana": influx_api.InfluxdbPluginApi(),
|
||||
"lma_collector": collector_api.LMACollectorPluginApi(),
|
||||
"lma_infrastructure_alerting":
|
||||
infrastructure_alerting_api.InfraAlertingPluginApi()
|
||||
self.ELASTICSEARCH_KIBANA = elasticsearch_api.ElasticsearchPluginApi()
|
||||
self.INFLUXDB_GRAFANA = influx_api.InfluxdbPluginApi()
|
||||
self.LMA_COLLECTOR = collector_api.LMACollectorPluginApi()
|
||||
self.LMA_INFRASTRUCTURE_ALERTING = \
|
||||
infrastructure_alerting_api.InfraAlertingPluginApi()
|
||||
self._plugins = {
|
||||
self.ELASTICSEARCH_KIBANA,
|
||||
self.INFLUXDB_GRAFANA,
|
||||
self.LMA_COLLECTOR,
|
||||
self.LMA_INFRASTRUCTURE_ALERTING
|
||||
}
|
||||
self.plugins = set(self.plugins_mapping.values())
|
||||
self._disabled_plugins = set()
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.test, item)
|
||||
|
||||
def disable_plugin(self, plugin):
|
||||
"""Disable a plugin."""
|
||||
self._disabled_plugins.add(plugin)
|
||||
|
||||
def enable_plugin(self, plugin):
|
||||
"""Enable a plugin."""
|
||||
self._disabled_plugins.remove(plugin)
|
||||
|
||||
def call_plugin_method(self, plugin, f):
|
||||
"""Call a method on a plugin but only if it's enabled."""
|
||||
if plugin in self.plugins:
|
||||
return f(plugin)
|
||||
|
||||
@property
|
||||
def plugins(self):
|
||||
"""Return the list of plugins that are enabled."""
|
||||
return list(self._plugins - self._disabled_plugins)
|
||||
|
||||
def prepare_plugins(self):
|
||||
"""Upload and install the plugins."""
|
||||
for plugin in self.plugins:
|
||||
plugin.prepare_plugin()
|
||||
|
||||
def activate_plugins(self):
|
||||
msg = "Activate {} plugin"
|
||||
"""Enable and configure the plugins for the environment."""
|
||||
for plugin in self.plugins:
|
||||
logger.info(msg.format(plugin.get_plugin_settings().name))
|
||||
logger.info("Activate plugin {}".format(
|
||||
plugin.get_plugin_settings().name))
|
||||
plugin.activate_plugin(
|
||||
options=plugin.get_plugin_settings().toolchain_options)
|
||||
|
||||
def check_plugins_online(self):
|
||||
msg = "Check {} plugin"
|
||||
for plugin in self.plugins:
|
||||
logger.info(msg.format(plugin.get_plugin_settings().name))
|
||||
logger.info("Checking plugin {}".format(
|
||||
plugin.get_plugin_settings().name))
|
||||
plugin.check_plugin_online()
|
||||
|
||||
def check_nodes_count(self, count, hostname, state):
|
||||
self.plugins_mapping[
|
||||
'elasticsearch_kibana'].check_elasticsearch_nodes_count(count)
|
||||
self.plugins_mapping[
|
||||
'influxdb_grafana'].check_influxdb_nodes_count(count)
|
||||
self.plugins_mapping[
|
||||
'lma_infrastructure_alerting'].check_node_in_nagios(
|
||||
hostname, state)
|
||||
"""Check that all nodes are present in the different backends."""
|
||||
self.call_plugin_method(
|
||||
self.ELASTICSEARCH_KIBANA,
|
||||
lambda x: x.check_elasticsearch_nodes_count(count))
|
||||
self.call_plugin_method(
|
||||
self.INFLUXDB_GRAFANA,
|
||||
lambda x: x.check_influxdb_nodes_count(count))
|
||||
self.call_plugin_method(
|
||||
self.LMA_INFRASTRUCTURE_ALERTING,
|
||||
lambda x: x.check_node_in_nagios(hostname, state))
|
||||
|
||||
def uninstall_plugins(self):
|
||||
"""Uninstall the plugins from the environment."""
|
||||
for plugin in self.plugins:
|
||||
plugin.uninstall_plugin()
|
||||
|
||||
@ -86,13 +116,20 @@ class ToolchainApi(object):
|
||||
plugin.check_uninstall_failure()
|
||||
|
||||
def get_pids_of_services(self):
|
||||
return self.plugins_mapping["lma_collector"].verify_services()
|
||||
"""Check that all nodes run the required LMA collector services."""
|
||||
return self.LMA_COLLECTOR.verify_services()
|
||||
|
||||
@staticmethod
|
||||
def get_network_template(template_name):
|
||||
template_path = os.path.join("network_templates",
|
||||
"{}.yaml".format(template_name))
|
||||
with open(helpers.get_fixture(template_path)) as f:
|
||||
return yaml.load(f)
|
||||
|
||||
def check_nova_metrics(self):
|
||||
time_started = "{}s".format(int(time.time()))
|
||||
metrics = self.plugins_mapping[
|
||||
"influxdb_grafana"].get_nova_instance_creation_time_metrics(
|
||||
time_started)
|
||||
plugin = self.INFLUXDB_GRAFANA
|
||||
metrics = plugin.get_nova_instance_creation_time_metrics(time_started)
|
||||
asserts.assert_equal(
|
||||
metrics, [],
|
||||
"Spawned instances was found in Nova metrics "
|
||||
@ -108,9 +145,8 @@ class ToolchainApi(object):
|
||||
self.helpers.run_single_ostf(test_sets=['smoke'],
|
||||
test_name=test_name)
|
||||
|
||||
updated_metrics = self.plugins_mapping[
|
||||
"influxdb_grafana"].get_nova_instance_creation_time_metrics(
|
||||
time_started)
|
||||
updated_metrics = plugin.get_nova_instance_creation_time_metrics(
|
||||
time_started)
|
||||
|
||||
asserts.assert_equal(
|
||||
len(updated_metrics), len(instance_tests),
|
||||
@ -121,11 +157,9 @@ class ToolchainApi(object):
|
||||
)
|
||||
|
||||
def check_nova_logs(self):
|
||||
indices = self.plugins_mapping[
|
||||
'elasticsearch_kibana'].get_current_indices('log')
|
||||
indices = self.ELASTICSEARCH_KIBANA.get_current_indices('log')
|
||||
logger.info("Found indexes {}".format(indices))
|
||||
output = self.plugins_mapping[
|
||||
'elasticsearch_kibana'].query_nova_logs(indices)
|
||||
output = self.ELASTICSEARCH_KIBANA.query_nova_logs(indices)
|
||||
msg = "Indexes {} don't contain Nova logs"
|
||||
asserts.assert_not_equal(output['hits']['total'], 0, msg.format(
|
||||
indices))
|
||||
|
95
stacklight_tests/toolchain/test_dedicated_environment.py
Normal file
95
stacklight_tests/toolchain/test_dedicated_environment.py
Normal file
@ -0,0 +1,95 @@
|
||||
# Copyright 2016 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from fuelweb_test.helpers.decorators import log_snapshot_after_test
|
||||
from proboscis import asserts
|
||||
from proboscis import test
|
||||
|
||||
from stacklight_tests.helpers import helpers
|
||||
from stacklight_tests import settings
|
||||
from stacklight_tests.toolchain import api
|
||||
|
||||
|
||||
@test(groups=["plugins"])
|
||||
class TestToolchainDedicatedEnvironment(api.ToolchainApi):
|
||||
"""Class for testing that the LMA Toolchain plugins can be installed in an
|
||||
dedicated environment.
|
||||
"""
|
||||
|
||||
@test(depends_on_groups=['prepare_slaves_5'],
|
||||
groups=["deploy_standalone_backends", "deploy",
|
||||
"toolchain", "dedicated_environment"])
|
||||
@log_snapshot_after_test
|
||||
def deploy_standalone_backends(self):
|
||||
"""Deploy a cluster with the Elasticsearch and InfluxDB backends.
|
||||
|
||||
Scenario:
|
||||
1. Create the cluster
|
||||
2. Add 3 nodes with the elasticsearch_kibana, influxdb_grafana and
|
||||
database roles
|
||||
3. Deploy the cluster
|
||||
4. Check that backends are running
|
||||
|
||||
Duration 60m
|
||||
Snapshot deploy_standalone_backends
|
||||
"""
|
||||
self.check_run("deploy_standalone_backends")
|
||||
|
||||
self.env.revert_snapshot("ready_with_5_slaves")
|
||||
|
||||
# Grafana requires a MySQL server to run
|
||||
asserts.assert_is_not_none(
|
||||
settings.DETACH_DATABASE_PLUGIN_PATH,
|
||||
"DETACH_DATABASE_PLUGIN_PATH variable should be set"
|
||||
)
|
||||
|
||||
# Relax the restrictions on the controller role to deploy an
|
||||
# environment without OpenStack nodes.
|
||||
with self.helpers.env.d_env.get_admin_remote() as remote:
|
||||
remote.upload(
|
||||
helpers.get_fixture("scripts/update_controller_role.sh"),
|
||||
"/tmp")
|
||||
remote.check_call(
|
||||
"bash -x /tmp/update_controller_role.sh",
|
||||
verbose=True)
|
||||
|
||||
# NOTE: in this test case, we don't deploy the LMA Infrastructure
|
||||
# Alerting plugin because there is no support for the remote mode on
|
||||
# the collector side
|
||||
roles = ["elasticsearch_kibana", "influxdb_grafana",
|
||||
"standalone-database"]
|
||||
self.disable_plugin(self.LMA_INFRASTRUCTURE_ALERTING)
|
||||
|
||||
self.prepare_plugins()
|
||||
self.helpers.prepare_plugin(settings.DETACH_DATABASE_PLUGIN_PATH)
|
||||
|
||||
self.helpers.create_cluster(name='deploy_standalone_backends')
|
||||
|
||||
self.activate_plugins()
|
||||
self.helpers.activate_plugin(
|
||||
helpers.get_plugin_name(settings.DETACH_DATABASE_PLUGIN_PATH),
|
||||
helpers.get_plugin_version(settings.DETACH_DATABASE_PLUGIN_PATH))
|
||||
|
||||
# Don't run OSTF tests because they don't pass with the detach-database
|
||||
# plugin
|
||||
self.helpers.deploy_cluster({
|
||||
'slave-03': roles,
|
||||
'slave-04': roles,
|
||||
'slave-05': roles
|
||||
}, check_services=False)
|
||||
|
||||
self.check_plugins_online()
|
||||
|
||||
self.env.make_snapshot("deploy_standalone_backends",
|
||||
is_make=True)
|
@ -61,7 +61,7 @@ class TestFunctionalToolchain(api.ToolchainApi):
|
||||
|
||||
self.check_plugins_online()
|
||||
|
||||
self.plugins_mapping["influxdb_grafana"].check_grafana_dashboards()
|
||||
self.INFLUXDB_GRAFANA.check_grafana_dashboards()
|
||||
|
||||
@test(depends_on_groups=["deploy_toolchain"],
|
||||
groups=["check_nova_metrics_toolchain",
|
||||
|
@ -13,14 +13,12 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
from fuelweb_test.helpers.decorators import log_snapshot_after_test
|
||||
from fuelweb_test import logger
|
||||
from fuelweb_test.settings import NEUTRON_SEGMENT
|
||||
from proboscis import asserts
|
||||
from proboscis import test
|
||||
import yaml
|
||||
|
||||
from stacklight_tests.toolchain import api
|
||||
|
||||
@ -31,15 +29,6 @@ class TestToolchainNetworkTemplates(api.ToolchainApi):
|
||||
templates.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_network_template(template_name):
|
||||
template = os.path.join(
|
||||
os.environ.get("WORKSPACE", "./"),
|
||||
"fixtures/network_templates/",
|
||||
"{}.yaml".format(template_name))
|
||||
with open(template) as f:
|
||||
return yaml.load(f)
|
||||
|
||||
@test(depends_on_groups=["prepare_slaves_3"],
|
||||
groups=["deploy_toolchain_with_network_template", "deploy",
|
||||
"toolchain", "network_templates"])
|
||||
|
Loading…
x
Reference in New Issue
Block a user