Initial refactor
Refactor module names Logger match module names Add abstract method for run_workloads to WorkloadBase Change-Id: I20b22787046bab98efb0a099bd32a08df8918628
This commit is contained in:
parent
c111042326
commit
2f0096fde5
@ -3,4 +3,4 @@
|
||||
template: src=dump_facts.j2 dest={{ browbeat_path }}/metadata/machine_facts.json
|
||||
|
||||
- name: Generate metadata jsons
|
||||
shell : python {{ browbeat_path }}/lib/Metadata.py {{ browbeat_path }}/metadata
|
||||
shell : python {{ browbeat_path }}/browbeat/metadata.py {{ browbeat_path }}/metadata
|
||||
|
34
browbeat.py
34
browbeat.py
@ -13,13 +13,13 @@
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import lib.Elastic
|
||||
import lib.PerfKit
|
||||
import lib.Rally
|
||||
import lib.Shaker
|
||||
import lib.Yoda
|
||||
import lib.WorkloadBase
|
||||
import lib.Tools
|
||||
import browbeat.elastic
|
||||
import browbeat.perfkit
|
||||
import browbeat.rally
|
||||
import browbeat.shaker
|
||||
import browbeat.yoda
|
||||
import browbeat.workloadbase
|
||||
import browbeat.tools
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@ -31,7 +31,7 @@ debug_log_file = 'log/debug.log'
|
||||
|
||||
|
||||
def main():
|
||||
tools = lib.Tools.Tools()
|
||||
tools = browbeat.tools.Tools()
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Browbeat Performance and Scale testing for Openstack")
|
||||
parser.add_argument(
|
||||
@ -76,7 +76,7 @@ def main():
|
||||
_config = tools._load_config(_cli_args.setup)
|
||||
|
||||
if _cli_args.compare == "software-metadata":
|
||||
es = lib.Elastic.Elastic(_config, "BrowbeatCLI")
|
||||
es = browbeat.elastic.Elastic(_config, "BrowbeatCLI")
|
||||
es.compare_metadata("_all", 'controller', _cli_args.uuids)
|
||||
exit(0)
|
||||
|
||||
@ -103,7 +103,7 @@ def main():
|
||||
else:
|
||||
time_stamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
||||
_logger.info("Browbeat test suite kicked off")
|
||||
_logger.info("Browbeat UUID: {}".format(lib.Elastic.browbeat_uuid))
|
||||
_logger.info("Browbeat UUID: {}".format(browbeat.elastic.browbeat_uuid))
|
||||
if _config['elasticsearch']['enabled']:
|
||||
_logger.info("Checking for Metadata")
|
||||
metadata_exists = tools.check_metadata()
|
||||
@ -129,28 +129,28 @@ def main():
|
||||
_logger.error("{} is missing in {}".format(
|
||||
wkld_provider, _cli_args.setup))
|
||||
result_dir = _config['browbeat']['results']
|
||||
lib.WorkloadBase.WorkloadBase.print_report(result_dir, time_stamp)
|
||||
browbeat.workloadbase.WorkloadBase.print_report(result_dir, time_stamp)
|
||||
_logger.info("Saved browbeat result summary to {}".format(
|
||||
os.path.join(result_dir, time_stamp + '.' + 'report')))
|
||||
lib.WorkloadBase.WorkloadBase.print_summary()
|
||||
browbeat.workloadbase.WorkloadBase.print_summary()
|
||||
|
||||
browbeat_rc = 0
|
||||
if lib.WorkloadBase.WorkloadBase.failure > 0:
|
||||
if browbeat.workloadbase.WorkloadBase.failure > 0:
|
||||
browbeat_rc = 1
|
||||
if lib.WorkloadBase.WorkloadBase.index_failures > 0:
|
||||
if browbeat.workloadbase.WorkloadBase.index_failures > 0:
|
||||
browbeat_rc = 2
|
||||
|
||||
if browbeat_rc == 1:
|
||||
_logger.info("Browbeat finished with test failures, UUID: {}".format(
|
||||
lib.Elastic.browbeat_uuid))
|
||||
browbeat.elastic.browbeat_uuid))
|
||||
sys.exit(browbeat_rc)
|
||||
elif browbeat_rc == 2:
|
||||
_logger.info("Browbeat finished with Elasticsearch indexing failures, UUID: {}"
|
||||
.format(lib.Elastic.browbeat_uuid))
|
||||
.format(browbeat.elastic.browbeat_uuid))
|
||||
sys.exit(browbeat_rc)
|
||||
else:
|
||||
_logger.info("Browbeat finished successfully, UUID: {}".format(
|
||||
lib.Elastic.browbeat_uuid))
|
||||
browbeat.elastic.browbeat_uuid))
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -10,7 +10,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import Tools
|
||||
import tools
|
||||
import os
|
||||
import logging
|
||||
import shutil
|
||||
@ -19,9 +19,9 @@ import shutil
|
||||
class Connmon(object):
|
||||
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger('browbeat.Connmon')
|
||||
self.logger = logging.getLogger('browbeat.connmon')
|
||||
self.config = config
|
||||
self.tools = Tools.Tools(self.config)
|
||||
self.tools = tools.Tools(self.config)
|
||||
return None
|
||||
|
||||
# Start connmond
|
@ -33,7 +33,7 @@ class Elastic(object):
|
||||
self.max_cache_size = cache_size
|
||||
self.last_upload = datetime.datetime.utcnow()
|
||||
self.max_cache_age = datetime.timedelta(minutes=max_cache_time)
|
||||
self.logger = logging.getLogger('browbeat.Elastic')
|
||||
self.logger = logging.getLogger('browbeat.elastic')
|
||||
self.es = elasticsearch.Elasticsearch([
|
||||
{'host': self.config['elasticsearch']['host'],
|
||||
'port': self.config['elasticsearch']['port']}],
|
||||
@ -304,8 +304,8 @@ class Elastic(object):
|
||||
data['_id'] = _id
|
||||
self.cache.append(data)
|
||||
now = datetime.datetime.utcnow()
|
||||
if len(self.cache) <= self.max_cache_size \
|
||||
and (now - self.last_upload) <= self.max_cache_age:
|
||||
if (len(self.cache) <= self.max_cache_size and
|
||||
(now - self.last_upload) <= self.max_cache_age):
|
||||
return True
|
||||
else:
|
||||
return self.flush_cache()
|
@ -16,7 +16,7 @@ import logging
|
||||
class Grafana(object):
|
||||
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger('browbeat.Grafana')
|
||||
self.logger = logging.getLogger('browbeat.grafana')
|
||||
self.config = config
|
||||
self.cloud_name = self.config['browbeat']['cloud_name']
|
||||
self.hosts_file = self.config['ansible']['hosts']
|
@ -10,10 +10,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
@ -78,22 +78,22 @@ class Metadata(object):
|
||||
a seperator.
|
||||
"""
|
||||
service = soft.split('_S_')
|
||||
if len(service) < 2 :
|
||||
if len(service) < 2:
|
||||
service = soft.split('_')
|
||||
key = service[2]
|
||||
section = "DEFAULT"
|
||||
service_name = service[1]
|
||||
else :
|
||||
else:
|
||||
key = service[3]
|
||||
section = service[2]
|
||||
service_name = service[1]
|
||||
|
||||
node = item['inventory_hostname']
|
||||
|
||||
if service_name in software_dict :
|
||||
if section in software_dict[service_name] :
|
||||
if service_name in software_dict:
|
||||
if section in software_dict[service_name]:
|
||||
software_dict[service_name][section][key] = item[soft]
|
||||
else :
|
||||
else:
|
||||
software_dict[service_name][section] = {}
|
||||
software_dict[service_name][section][key] = item[soft]
|
||||
else:
|
@ -11,30 +11,30 @@
|
||||
# limitations under the License.
|
||||
|
||||
import ast
|
||||
import Connmon
|
||||
import connmon
|
||||
import datetime
|
||||
import Elastic
|
||||
import elastic
|
||||
import glob
|
||||
import Grafana
|
||||
import grafana
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import Tools
|
||||
import WorkloadBase
|
||||
import tools
|
||||
import workloadbase
|
||||
|
||||
|
||||
class PerfKit(WorkloadBase.WorkloadBase):
|
||||
class PerfKit(workloadbase.WorkloadBase):
|
||||
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger('browbeat.PerfKit')
|
||||
self.logger = logging.getLogger('browbeat.perfkit')
|
||||
self.config = config
|
||||
self.error_count = 0
|
||||
self.tools = Tools.Tools(self.config)
|
||||
self.connmon = Connmon.Connmon(self.config)
|
||||
self.grafana = Grafana.Grafana(self.config)
|
||||
self.elastic = Elastic.Elastic(self.config, self.__class__.__name__.lower())
|
||||
self.tools = tools.Tools(self.config)
|
||||
self.connmon = connmon.Connmon(self.config)
|
||||
self.grafana = grafana.Grafana(self.config)
|
||||
self.elastic = elastic.Elastic(self.config, self.__class__.__name__.lower())
|
||||
self.test_count = 0
|
||||
self.scenario_count = 0
|
||||
self.pass_count = 0
|
||||
@ -193,7 +193,7 @@ class PerfKit(WorkloadBase.WorkloadBase):
|
||||
|
||||
return success, to_ts, from_ts
|
||||
|
||||
def start_workloads(self):
|
||||
def run_workloads(self):
|
||||
self.logger.info("Starting PerfKitBenchmarker Workloads.")
|
||||
time_stamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
||||
self.logger.debug("Time Stamp (Prefix): {}".format(time_stamp))
|
@ -11,31 +11,30 @@
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
import Connmon
|
||||
import connmon
|
||||
import datetime
|
||||
import Elastic
|
||||
import elastic
|
||||
import glob
|
||||
import Grafana
|
||||
import grafana
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import time
|
||||
import Tools
|
||||
import WorkloadBase
|
||||
import tools
|
||||
import workloadbase
|
||||
import json
|
||||
|
||||
|
||||
class Rally(WorkloadBase.WorkloadBase):
|
||||
class Rally(workloadbase.WorkloadBase):
|
||||
|
||||
def __init__(self, config, hosts=None):
|
||||
self.logger = logging.getLogger('browbeat.Rally')
|
||||
self.logger = logging.getLogger('browbeat.rally')
|
||||
self.config = config
|
||||
self.tools = Tools.Tools(self.config)
|
||||
self.connmon = Connmon.Connmon(self.config)
|
||||
self.grafana = Grafana.Grafana(self.config)
|
||||
self.elastic = Elastic.Elastic(
|
||||
self.config, self.__class__.__name__.lower())
|
||||
self.tools = tools.Tools(self.config)
|
||||
self.connmon = connmon.Connmon(self.config)
|
||||
self.grafana = grafana.Grafana(self.config)
|
||||
self.elastic = elastic.Elastic(self.config, self.__class__.__name__.lower())
|
||||
self.error_count = 0
|
||||
self.pass_count = 0
|
||||
self.test_count = 0
|
||||
@ -224,7 +223,7 @@ class Rally(WorkloadBase.WorkloadBase):
|
||||
success = False
|
||||
return success
|
||||
|
||||
def start_workloads(self):
|
||||
def run_workloads(self):
|
||||
"""Iterates through all rally scenarios in browbeat yaml config file"""
|
||||
results = collections.OrderedDict()
|
||||
self.logger.info("Starting Rally workloads")
|
@ -12,26 +12,26 @@
|
||||
|
||||
import collections
|
||||
import datetime
|
||||
import Elastic
|
||||
import Grafana
|
||||
import elastic
|
||||
import grafana
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import Tools
|
||||
import tools
|
||||
import uuid
|
||||
import WorkloadBase
|
||||
import workloadbase
|
||||
import yaml
|
||||
|
||||
|
||||
class Shaker(WorkloadBase.WorkloadBase):
|
||||
class Shaker(workloadbase.WorkloadBase):
|
||||
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger('browbeat.Shaker')
|
||||
self.logger = logging.getLogger('browbeat.shaker')
|
||||
self.config = config
|
||||
self.tools = Tools.Tools(self.config)
|
||||
self.grafana = Grafana.Grafana(self.config)
|
||||
self.elastic = Elastic.Elastic(self.config, self.__class__.__name__.lower())
|
||||
self.tools = tools.Tools(self.config)
|
||||
self.grafana = grafana.Grafana(self.config)
|
||||
self.elastic = elastic.Elastic(self.config, self.__class__.__name__.lower())
|
||||
self.error_count = 0
|
||||
self.pass_count = 0
|
||||
self.test_count = 0
|
||||
@ -403,7 +403,7 @@ class Shaker(WorkloadBase.WorkloadBase):
|
||||
else:
|
||||
self.result_check(result_dir, test_name, scenario, to_time, from_time)
|
||||
|
||||
def run_shaker(self):
|
||||
def run_workloads(self):
|
||||
self.logger.info("Starting Shaker workloads")
|
||||
time_stamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
||||
self.logger.debug("Time Stamp (Prefix): {}".format(time_stamp))
|
@ -10,10 +10,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import PerfKit
|
||||
import Rally
|
||||
import Shaker
|
||||
import Yoda
|
||||
import perfkit
|
||||
import rally
|
||||
import shaker
|
||||
import yoda
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
@ -26,7 +26,7 @@ from pykwalify import errors as pykwalify_errors
|
||||
class Tools(object):
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.logger = logging.getLogger('browbeat.Tools')
|
||||
self.logger = logging.getLogger('browbeat.tools')
|
||||
self.config = config
|
||||
return None
|
||||
|
||||
@ -103,7 +103,7 @@ class Tools(object):
|
||||
def validate_yaml(self):
|
||||
self.logger.info(
|
||||
"Validating the configuration file passed by the user")
|
||||
stream = open("lib/validate.yaml", 'r')
|
||||
stream = open("browbeat/validate.yaml", 'r')
|
||||
schema = yaml.safe_load(stream)
|
||||
check = pykwalify_core.Core(
|
||||
source_data=self.config, schema_data=schema)
|
||||
@ -117,19 +117,16 @@ class Tools(object):
|
||||
def _run_workload_provider(self, provider):
|
||||
self.logger = logging.getLogger('browbeat')
|
||||
if provider == "perfkit":
|
||||
perfkit = PerfKit.PerfKit(self.config)
|
||||
perfkit.start_workloads()
|
||||
workloads = perfkit.PerfKit(self.config)
|
||||
elif provider == "rally":
|
||||
rally = Rally.Rally(self.config)
|
||||
rally.start_workloads()
|
||||
workloads = rally.Rally(self.config)
|
||||
elif provider == "shaker":
|
||||
shaker = Shaker.Shaker(self.config)
|
||||
shaker.run_shaker()
|
||||
workloads = shaker.Shaker(self.config)
|
||||
elif provider == "yoda":
|
||||
yoda = Yoda.Yoda(self.config)
|
||||
yoda.start_workloads()
|
||||
workloads = yoda.Yoda(self.config)
|
||||
else:
|
||||
self.logger.error("Unknown workload provider: {}".format(provider))
|
||||
workloads.run_workloads()
|
||||
|
||||
def check_metadata(self):
|
||||
meta = self.config['elasticsearch']['metadata_files']
|
||||
@ -144,8 +141,9 @@ class Tools(object):
|
||||
os.putenv("ANSIBLE_SSH_ARGS",
|
||||
" -F {}".format(self.config['ansible']['ssh_config']))
|
||||
|
||||
ansible_cmd = 'ansible-playbook -i {} {}' .format(
|
||||
self.config['ansible']['hosts'], self.config['ansible']['metadata'])
|
||||
ansible_cmd = \
|
||||
'ansible-playbook -i {} {}' \
|
||||
.format(self.config['ansible']['hosts'], self.config['ansible']['metadata'])
|
||||
self.run_cmd(ansible_cmd)
|
||||
if not self.check_metadata():
|
||||
self.logger.warning("Metadata could not be gathered")
|
||||
@ -191,9 +189,9 @@ class Tools(object):
|
||||
if len(workload_results) > 0:
|
||||
for workload in workload_results:
|
||||
if workload is "rally":
|
||||
rally = Rally.Rally(self.config)
|
||||
rally_workload = rally.Rally(self.config)
|
||||
for file in workload_results[workload]:
|
||||
errors, results = rally.file_to_json(file)
|
||||
errors, results = rally_workload.file_to_json(file)
|
||||
if workload is "shaker":
|
||||
# Stub for Shaker.
|
||||
continue
|
||||
@ -205,11 +203,11 @@ class Tools(object):
|
||||
values = {}
|
||||
with open(filepath) as stackrc:
|
||||
for line in stackrc:
|
||||
line = line.replace('export', '')
|
||||
pair = line.split('=')
|
||||
if '#' not in line and len(pair) == 2 and '$(' not in line:
|
||||
if 'export' not in line and '#' not in line and '$(' not in line:
|
||||
values[pair[0].strip()] = pair[1].strip()
|
||||
elif '#' not in line and '$(' in line and 'for key' not in line:
|
||||
values[pair[0].strip()] = self.run_cmd(
|
||||
elif '$(' in line and 'for key' not in line:
|
||||
values[pair[0].strip()] = \
|
||||
self.run_cmd(
|
||||
"echo " + pair[1].strip())['stdout'].strip()
|
||||
return values
|
@ -18,7 +18,7 @@ import yaml
|
||||
|
||||
class WorkloadBase(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
logger = logging.getLogger('browbeat.WorkloadBase')
|
||||
logger = logging.getLogger('browbeat.workloadbase')
|
||||
success = 0
|
||||
failure = 0
|
||||
total_tests = 0
|
||||
@ -27,11 +27,11 @@ class WorkloadBase(object):
|
||||
browbeat = {}
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_scenarios(self):
|
||||
def run_workloads(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_tests(self):
|
||||
def update_fail_tests(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
@ -39,7 +39,11 @@ class WorkloadBase(object):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_fail_tests(self):
|
||||
def update_scenarios(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_tests(self):
|
||||
pass
|
||||
|
||||
def update_total_scenarios(self):
|
@ -13,13 +13,13 @@
|
||||
|
||||
# Yet another cloud deployment tool
|
||||
import datetime
|
||||
import Elastic
|
||||
import Grafana
|
||||
import elastic
|
||||
import grafana
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import Tools
|
||||
import WorkloadBase
|
||||
import tools
|
||||
import workloadbase
|
||||
from openstack import connection
|
||||
from openstack import exceptions
|
||||
import os
|
||||
@ -31,15 +31,14 @@ except ImportError:
|
||||
from collections import deque
|
||||
|
||||
|
||||
class Yoda(WorkloadBase.WorkloadBase):
|
||||
class Yoda(workloadbase.WorkloadBase):
|
||||
|
||||
def __init__(self, config):
|
||||
self.logger = logging.getLogger('browbeat.yoda')
|
||||
self.config = config
|
||||
self.tools = Tools.Tools(self.config)
|
||||
self.grafana = Grafana.Grafana(self.config)
|
||||
self.elastic = Elastic.Elastic(
|
||||
self.config, self.__class__.__name__.lower())
|
||||
self.tools = tools.Tools(self.config)
|
||||
self.grafana = grafana.Grafana(self.config)
|
||||
self.elastic = elastic.Elastic(self.config, self.__class__.__name__.lower())
|
||||
self.error_count = 0
|
||||
self.pass_count = 0
|
||||
self.test_count = 0
|
||||
@ -638,7 +637,7 @@ class Yoda(WorkloadBase.WorkloadBase):
|
||||
results = out[0]
|
||||
changed = out[1]
|
||||
|
||||
def start_workloads(self):
|
||||
def run_workloads(self):
|
||||
"""Iterates through all yoda scenarios in browbeat yaml config file"""
|
||||
self.logger.info("Starting YODA workloads")
|
||||
es_ts = datetime.datetime.utcnow()
|
@ -1,7 +1,6 @@
|
||||
ansible==2.2.0.0
|
||||
elasticsearch
|
||||
openstacksdk
|
||||
python-dateutil==2.4.2
|
||||
python-openstackclient==3.11.0
|
||||
pykwalify
|
||||
elasticsearch
|
||||
openstacksdk
|
||||
|
6
tox.ini
6
tox.ini
@ -17,10 +17,10 @@ commands =
|
||||
bash -c "cd ansible; find . -type f -regex '.*.y[a]?ml' -print0 | xargs -t -n1 -0 \
|
||||
ansible-lint \
|
||||
-x ANSIBLE0013,ANSIBLE0012,ANSIBLE0006,ANSIBLE0007,ANSIBLE0010,ANSIBLE0016"
|
||||
python ci-scripts/linters/lint-browbeat-config.py lib/validate.yaml browbeat-config.yaml
|
||||
python ci-scripts/linters/lint-browbeat-config.py lib/validate.yaml browbeat-complete.yaml
|
||||
python ci-scripts/linters/lint-browbeat-config.py browbeat/validate.yaml browbeat-config.yaml
|
||||
python ci-scripts/linters/lint-browbeat-config.py browbeat/validate.yaml browbeat-complete.yaml
|
||||
bash -c "for config in $(ls conf/); do \
|
||||
python ci-scripts/linters/lint-browbeat-config.py lib/validate.yaml conf/$config; done"
|
||||
python ci-scripts/linters/lint-browbeat-config.py browbeat/validate.yaml conf/$config; done"
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8 {posargs}
|
||||
|
Loading…
Reference in New Issue
Block a user