Merge "Fix services check function"

This commit is contained in:
Jenkins 2016-05-30 14:18:52 +00:00 committed by Gerrit Code Review
commit 59acad0c9f
6 changed files with 92 additions and 37 deletions

View File

@ -15,6 +15,8 @@
from proboscis import asserts
import requests
from stacklight_tests.helpers import remote_ops
def check_http_get_response(url, expected_code=200, msg=None, **kwargs):
"""Perform a HTTP GET request and assert that the HTTP server replies with
@ -34,3 +36,22 @@ def check_http_get_response(url, expected_code=200, msg=None, **kwargs):
asserts.assert_equal(
r.status_code, expected_code, msg.format(r.status_code, expected_code))
return r
def verify_services(remote, service_name, count):
"""Check that a process is running on a host.
:param remote: SSH connection to the node.
:type remote: SSHClient
:param service_name: the process name to match.
:type service_name: str
:param count: the number of processes to match.
:type count: int
:returns: list of PIDs.
:rtype: list
"""
msg = "{0} count not equal to {1}, received instead {2}."
pids = remote_ops.get_pids_of_process(remote, service_name)
asserts.assert_equal(
len(pids), count, msg.format(service_name, count, len(pids)))
return pids

View File

@ -18,7 +18,6 @@ import time
import urllib2
from devops.helpers import helpers
from fuelweb_test.helpers import checkers
from fuelweb_test import logger
from proboscis import asserts
@ -151,19 +150,6 @@ class PluginHelper(object):
self.fuel_web.run_single_ostf_test(self.cluster_id, test_sets,
test_name, *args, **kwargs)
def verify_service(self, ip, service_name, count):
"""Check that a process is running on a host.
:param ip: IP address of the host.
:type ip: str
:param service_name: the process name to match.
:type service_name: str
:param count: the number of processes to match.
:type count: int
"""
with self.env.d_env.get_ssh_to_remote(ip) as remote:
checkers.verify_service(remote, service_name, count)
def add_node_to_cluster(self, node, redeploy=True, check_services=False):
"""Add nodes to the cluster.
@ -333,6 +319,20 @@ class PluginHelper(object):
with self.env.d_env.get_admin_remote() as remote:
exec_res = remote.execute(
"fuel-createmirror {0}".format(option))
asserts.assert_equal(exit_code, exec_res['exit_code'],
'fuel-createmirror failed:'
' {0}'.format(exec_res['stderr']))
asserts.assert_equal(
exit_code, exec_res['exit_code'],
'fuel-createmirror failed: {0}'.format(exec_res['stderr']))
@staticmethod
def get_services_for_version(services_mapping, version):
"""Returns processes for needed version only.
:param services_mapping: full services mapping.
:type services_mapping: dict
:param version: plugin's version.
:type version: str
"""
def get_major_version():
return ".".join(version.split(".")[:2])
major_version = get_major_version()
return services_mapping[major_version]

View File

@ -63,3 +63,20 @@ def simulate_network_interrupt_on_node(remote, interval=30):
"/sbin/iptables -D INPUT -j DROP) 2>&1>/dev/null &".format(
interval=interval))
remote.execute(cmd)
def get_pids_of_process(remote, name):
"""Get PIDs of process by its name.
:param remote: SSH connection to the node.
:type remote: SSHClient
:param name: process name.
:type name: str
:returns: list of PIDs.
:rtype: list
"""
cmd = "pidof {}".format(name)
result = remote.execute(cmd)
if result['exit_code'] != 0:
return []
return result['stdout'][0].strip().split()

View File

@ -11,6 +11,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from fuelweb_test import logger
from stacklight_tests import base_test
@ -33,6 +34,21 @@ class LMACollectorPluginApi(base_test.PluginApi):
def get_plugin_vip(self):
pass
def verify_services(self):
"""Check that LMA services started in the right quantity."""
nodes = self.helpers.get_all_ready_nodes()
services_to_check = self.helpers.get_services_for_version(
self.settings.services_to_check,
self.settings.version)
for node in nodes:
logger.info("Check {services} services on the {name} node".format(
name=node['name'],
services=', '.join(services_to_check.keys()),))
with self.env.d_env.get_ssh_to_remote(node['ip']) as remote:
for service, count in services_to_check.items():
self.checkers.verify_services(remote, service, count)
def check_plugin_online(self):
# Run OSTF test to check pacemaker status
self.helpers.run_single_ostf(
@ -40,14 +56,7 @@ class LMACollectorPluginApi(base_test.PluginApi):
test_name='fuel_health.tests.ha.test_pacemaker_status.'
'TestPacemakerStatus.test_check_pacemaker_resources')
# Check that heka and collectd processes are started on all nodes
nodes = self.helpers.get_all_ready_nodes()
msg = "Check services on the {} node"
for node in nodes:
logger.info(msg.format(node['name']))
_ip = node['ip']
self.helpers.verify_service(_ip, 'hekad', 2)
self.helpers.verify_service(_ip, 'collectd -C', 1)
self.verify_services()
def uninstall_plugin(self):
return self.helpers.uninstall_plugin(self.settings.name,

View File

@ -37,3 +37,11 @@ toolchain_options = {
'influxdb_mode/value': 'local',
'alerting_mode/value': 'local'
}
services_to_check = {
"0.9": {
"hekad": 1,
"collectd": 1,
"collectdmon": 1
},
}

View File

@ -42,12 +42,12 @@ class TestToolchainDetachPlugins(api.ToolchainApi):
2. Install the plugins
3. Create the cluster
4. Add 1 node with the controller role
4. Add 1 node with the rabbitmq role
5. Add 1 node with the compute and cinder roles
6. Add 1 node with the plugin roles
7. Deploy the cluster
8. Check that LMA Toolchain plugins are running
9. Run OSTF
5. Add 1 node with the rabbitmq role
6. Add 1 node with the compute and cinder roles
7. Add 1 node with the plugin roles
8. Deploy the cluster
9. Check that LMA Toolchain plugins are running
10. Run OSTF
Duration 60m
Snapshot deploy_toolchain_with_detached_rabbitmq
@ -78,12 +78,12 @@ class TestToolchainDetachPlugins(api.ToolchainApi):
2. Install the plugins
3. Create the cluster
4. Add 1 node with the controller role
4. Add 1 node with the database role
5. Add 1 node with the compute and cinder roles
6. Add 1 node with the plugin roles
7. Deploy the cluster
8. Check that LMA Toolchain plugins are running
9. Run OSTF
5. Add 1 node with the database role
6. Add 1 node with the compute and cinder roles
7. Add 1 node with the plugin roles
8. Deploy the cluster
9. Check that LMA Toolchain plugins are running
10. Run OSTF
Duration 60m
Snapshot deploy_toolchain_with_detached_database