Adding Browbeat Infra files
Adding .gitreview file Adding tox.ini Adding setup.py Adding test-requirments.txt Fixing syntax Change-Id: Id6d628708079440207e5f068f5f0827802f2aa14
This commit is contained in:
parent
c838c61ba2
commit
06e3e3292b
@ -1,5 +1,4 @@
|
|||||||
[gerrit]
|
[gerrit]
|
||||||
host=review.gerrithub.io
|
host=review.openstack.org
|
||||||
port=29418
|
port=29418
|
||||||
project=jtaleric/browbeat
|
project=openstack/browbeat.git
|
||||||
defaultbranch=master
|
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
def dict_remove(the_dict, item):
|
def dict_remove(the_dict, item):
|
||||||
"""Remove an item from a dictionary."""
|
"""Remove an item from a dictionary."""
|
||||||
del the_dict[item]
|
del the_dict[item]
|
||||||
|
12
ansible/install/roles/dashboard-openstack/fix-ids.py
Executable file → Normal file
12
ansible/install/roles/dashboard-openstack/fix-ids.py
Executable file → Normal file
@ -1,3 +1,15 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
12
browbeat.py
Executable file → Normal file
12
browbeat.py
Executable file → Normal file
@ -1,4 +1,16 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from lib.PerfKit import PerfKit
|
from lib.PerfKit import PerfKit
|
||||||
from lib.Rally import Rally
|
from lib.Rally import Rally
|
||||||
from lib.Shaker import Shaker
|
from lib.Shaker import Shaker
|
||||||
|
73
docs/source/conf.py
Normal file
73
docs/source/conf.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.abspath('../..'))
|
||||||
|
# -- General configuration ----------------------------------------------------
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
# 'sphinx.ext.intersphinx',
|
||||||
|
'oslosphinx'
|
||||||
|
]
|
||||||
|
|
||||||
|
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||||
|
# text edit cycles.
|
||||||
|
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||||
|
|
||||||
|
# The suffix of source filenames.
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = u'browbeat'
|
||||||
|
copyright = u'2013, OpenStack Foundation'
|
||||||
|
|
||||||
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
|
add_function_parentheses = True
|
||||||
|
|
||||||
|
# If true, the current module name will be prepended to all description
|
||||||
|
# unit titles (such as .. function::).
|
||||||
|
add_module_names = True
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# -- Options for HTML output --------------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||||
|
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||||
|
# html_theme_path = ["."]
|
||||||
|
# html_theme = '_theme'
|
||||||
|
# html_static_path = ['static']
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = '%sdoc' % project
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title, author, documentclass
|
||||||
|
# [howto/manual]).
|
||||||
|
latex_documents = [
|
||||||
|
('index',
|
||||||
|
'%s.tex' % project,
|
||||||
|
u'%s Documentation' % project,
|
||||||
|
u'OpenStack Foundation', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
|
# intersphinx_mapping = {'http://docs.python.org/': None}
|
4
docs/source/contributing.rst
Normal file
4
docs/source/contributing.rst
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
============
|
||||||
|
Contributing
|
||||||
|
============
|
||||||
|
.. include:: ../../CONTRIBUTING.rst
|
25
docs/source/index.rst
Normal file
25
docs/source/index.rst
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
.. browbeat documentation master file, created by
|
||||||
|
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
|
||||||
|
You can adapt this file completely to your liking, but it should at least
|
||||||
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
|
Welcome to browbeat's documentation!
|
||||||
|
========================================================
|
||||||
|
|
||||||
|
Contents:
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
readme
|
||||||
|
installation
|
||||||
|
usage
|
||||||
|
contributing
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
|
|
3
docs/source/installation.rst
Normal file
3
docs/source/installation.rst
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
============
|
||||||
|
Installation
|
||||||
|
============
|
1
docs/source/readme.rst
Normal file
1
docs/source/readme.rst
Normal file
@ -0,0 +1 @@
|
|||||||
|
.. include:: ../../README.rst
|
3
docs/source/usage.rst
Normal file
3
docs/source/usage.rst
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
========
|
||||||
|
Usage
|
||||||
|
========
|
@ -1,199 +0,0 @@
|
|||||||
import csv
|
|
||||||
from collections import Counter
|
|
||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
import matplotlib
|
|
||||||
import numpy as np
|
|
||||||
import ntpath
|
|
||||||
matplotlib.use('Agg')
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import matplotlib.cbook as cbook
|
|
||||||
from pylab import rcParams
|
|
||||||
rcParams['figure.figsize'] = 18, 10
|
|
||||||
|
|
||||||
services=['/usr/bin/nova-scheduler','/usr/bin/keystone-all','/usr/bin/nova-api',
|
|
||||||
'/usr/bin/nova-conductor','/usr/bin/neutron-server','/usr/bin/cinder-api',
|
|
||||||
'/usr/bin/cinder-volume','/usr/bin/cinder-scheduler']
|
|
||||||
color_wheel=['r','g','b','y']
|
|
||||||
|
|
||||||
data = {}
|
|
||||||
average = {}
|
|
||||||
for service in services :
|
|
||||||
data[service] = {}
|
|
||||||
average[service] = {}
|
|
||||||
average[service]['connection_count_avg'] = 0
|
|
||||||
average[service]['max_checkedout_avg'] = 0
|
|
||||||
average[service]['checkouts_per_second_avg'] = 0
|
|
||||||
|
|
||||||
print "--------------------------------------------------------------------------------------"
|
|
||||||
print "Reading File : %s" % sys.argv[1]
|
|
||||||
print "--------------------------------------------------------------------------------------"
|
|
||||||
reader = csv.DictReader(open(sys.argv[1]))
|
|
||||||
for row in reader:
|
|
||||||
for service in services :
|
|
||||||
if not row['hostname'] in data[service].keys() :
|
|
||||||
data[service][row['hostname']] = {}
|
|
||||||
data[service][row['hostname']]['timestamp'] = []
|
|
||||||
data[service][row['hostname']]['hostname'] = []
|
|
||||||
data[service][row['hostname']]['max_connections'] = []
|
|
||||||
data[service][row['hostname']]['checkout_count'] = []
|
|
||||||
data[service][row['hostname']]['connection_count'] = []
|
|
||||||
data[service][row['hostname']]['max_checkedout'] = []
|
|
||||||
data[service][row['hostname']]['checkouts_per_second'] = []
|
|
||||||
|
|
||||||
if row['progname'] == service :
|
|
||||||
data[service][row['hostname']]['timestamp'].append(datetime.strptime(row['timestamp'],
|
|
||||||
'%Y-%m-%d %H:%M:%S'))
|
|
||||||
data[service][row['hostname']]['connection_count'].append(row['connection_count'])
|
|
||||||
data[service][row['hostname']]['max_connections'].append(row['max_connections'])
|
|
||||||
data[service][row['hostname']]['checkout_count'].append(row['checkout_count'])
|
|
||||||
data[service][row['hostname']]['max_checkedout'].append(row['max_checkedout'])
|
|
||||||
data[service][row['hostname']]['checkouts_per_second'].append(row['checkouts_per_second'])
|
|
||||||
|
|
||||||
#
|
|
||||||
# Graph connections across each controller.
|
|
||||||
#
|
|
||||||
for service in data :
|
|
||||||
|
|
||||||
print "Building Graph of connections per host second for : %s" % service
|
|
||||||
plt.title("Database Connections : Service : %s" % service)
|
|
||||||
plt.xlabel("Time")
|
|
||||||
plt.ylabel("Connections")
|
|
||||||
pos=0
|
|
||||||
for host in data[service] :
|
|
||||||
controller,=plt.plot_date(data[service][host]['timestamp'],
|
|
||||||
data[service][host]['connection_count'],
|
|
||||||
'c',
|
|
||||||
linewidth=5,label="%s-controller0-conn"%service)
|
|
||||||
|
|
||||||
controller2,=plt.plot_date(data[service][host]['timestamp'],
|
|
||||||
data[service][host]['checkout_count'],
|
|
||||||
'c',
|
|
||||||
linewidth=3,
|
|
||||||
label="%s-controller0-ckout"%service)
|
|
||||||
|
|
||||||
controller1,=plt.plot_date(data[service][host]['timestamp'],
|
|
||||||
data[service][host]['max_checkedout'],
|
|
||||||
'c',
|
|
||||||
linewidth=1,
|
|
||||||
label="%s-controller0-max_checkedout"%service)
|
|
||||||
|
|
||||||
controller.set_color(color_wheel[pos])
|
|
||||||
controller1.set_color(color_wheel[pos])
|
|
||||||
controller2.set_color(color_wheel[pos])
|
|
||||||
pos=pos+1
|
|
||||||
|
|
||||||
plt.legend(["%s-controller0-conn"%service,
|
|
||||||
"%s-controller0-ckout"%service,
|
|
||||||
"%s-controller0-max-ckout"%service,
|
|
||||||
"%s-controller1-conn"%service,
|
|
||||||
"%s-controller1-ckout"%service,
|
|
||||||
"%s-controller1-max-ckout"%service,
|
|
||||||
"%s-controller2-conn"%service,
|
|
||||||
"%s-controller2-ckout"%service,
|
|
||||||
"%s-controller2-max-ckout"%service])
|
|
||||||
|
|
||||||
plt.savefig("%s_%s-connctions.png"%(sys.argv[1],ntpath.basename(service)), bbox_inches='tight')
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
#
|
|
||||||
# Graph checkouts per second across each controller.
|
|
||||||
#
|
|
||||||
print "Building Graph of checkouts per second for : %s" % service
|
|
||||||
pos=0
|
|
||||||
for host in data[service] :
|
|
||||||
plt.title("Database Checkouts Per-Second : Service : %s" % service)
|
|
||||||
plt.xlabel("Time")
|
|
||||||
plt.ylabel("Connections")
|
|
||||||
|
|
||||||
controller,=plt.plot_date(data[service][host]['timestamp'],
|
|
||||||
data[service][host]['checkouts_per_second'],
|
|
||||||
'c',
|
|
||||||
linewidth=1,
|
|
||||||
label="%s-controller0-ckout"%service)
|
|
||||||
|
|
||||||
controller.set_color(color_wheel[pos])
|
|
||||||
pos=pos+1
|
|
||||||
|
|
||||||
plt.legend(["%s-controller0-ckout-persec"%service,
|
|
||||||
"%s-controller1-ckout-persec"%service,
|
|
||||||
"%s-controller2-ckout-persec"%service])
|
|
||||||
plt.savefig("%s_%s-connctions-checkout-persec.png"%
|
|
||||||
(sys.argv[1],
|
|
||||||
ntpath.basename(service)),
|
|
||||||
bbox_inches='tight')
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
#
|
|
||||||
# Sum connections across controllers
|
|
||||||
#
|
|
||||||
#
|
|
||||||
print "Building Graph of sum of connections for : %s" % service
|
|
||||||
num_controllers=len(data[service].keys())
|
|
||||||
pos=0
|
|
||||||
total_connections = np.array([])
|
|
||||||
total_checkouts = np.array([])
|
|
||||||
total_maxcheckouts = np.array([])
|
|
||||||
for host in data[service] :
|
|
||||||
plt.title("Database Connections : Service : %s" % service)
|
|
||||||
plt.xlabel("Time")
|
|
||||||
plt.ylabel("Connections")
|
|
||||||
if pos == 0 :
|
|
||||||
total_connections = np.array(data[service][host]['connection_count']).astype(np.float)
|
|
||||||
total_checkouts = np.array(data[service][host]['checkout_count']).astype(np.float)
|
|
||||||
total_maxcheckouts = np.array(data[service][host]['max_checkedout']).astype(np.float)
|
|
||||||
|
|
||||||
elif pos <= num_controllers :
|
|
||||||
if total_connections.size < len(data[service][host]['connection_count']):
|
|
||||||
data[service][host]['connection_count'] = np.resize(data[service][host]['connection_count'],total_connections.size)
|
|
||||||
else:
|
|
||||||
total_connections = np.resize(total_connections,len(data[service][host]['connection_count']))
|
|
||||||
|
|
||||||
if total_checkouts.size < len(data[service][host]['checkout_count']):
|
|
||||||
data[service][host]['checkout_count'] = np.resize(data[service][host]['checkout_count'],total_checkouts.size)
|
|
||||||
else:
|
|
||||||
total_checkouts = np.resize(total_checkouts,len(data[service][host]['checkout_count']))
|
|
||||||
|
|
||||||
if total_maxcheckouts.size < len(data[service][host]['max_checkedout']):
|
|
||||||
data[service][host]['max_checkedout'] = np.resize(data[service][host]['max_checkedout'],total_maxcheckouts.size)
|
|
||||||
else:
|
|
||||||
total_maxcheckouts = np.resize(total_maxcheckouts,len(data[service][host]['max_checkedout']))
|
|
||||||
|
|
||||||
total_connections = np.add(total_connections, np.array(data[service][host]['connection_count']).astype(np.float))
|
|
||||||
total_checkouts= np.add(total_checkouts, np.array(data[service][host]['checkout_count']).astype(np.float))
|
|
||||||
total_maxcheckouts= np.add(total_maxcheckouts, np.array(data[service][host]['max_checkedout']).astype(np.float))
|
|
||||||
|
|
||||||
pos=pos+1
|
|
||||||
|
|
||||||
plt.title("Database Connections : Service : %s" % service)
|
|
||||||
plt.xlabel("Time")
|
|
||||||
plt.ylabel("Connections")
|
|
||||||
pos=0
|
|
||||||
controller,=plt.plot_date(np.resize(data[service][host]['timestamp'],len(total_connections)),
|
|
||||||
total_connections,
|
|
||||||
'c',
|
|
||||||
linewidth=5,label="%s-controllers-conn"%service)
|
|
||||||
|
|
||||||
controller2,=plt.plot_date(np.resize(data[service][host]['timestamp'],len(total_checkouts)),
|
|
||||||
total_checkouts,
|
|
||||||
'c',
|
|
||||||
linewidth=3,
|
|
||||||
label="%s-controllers-ckout"%service)
|
|
||||||
|
|
||||||
controller1,=plt.plot_date(np.resize(data[service][host]['timestamp'],len(total_maxcheckouts)),
|
|
||||||
total_maxcheckouts,
|
|
||||||
'c',
|
|
||||||
linewidth=1,
|
|
||||||
label="%s-controllers-max_checkedout"%service)
|
|
||||||
|
|
||||||
controller.set_color(color_wheel[pos])
|
|
||||||
controller1.set_color(color_wheel[pos+1])
|
|
||||||
controller2.set_color(color_wheel[pos+2])
|
|
||||||
|
|
||||||
plt.legend(["%s-controllers-sum-conn"%service,
|
|
||||||
"%s-controllers-sum-ckout"%service,
|
|
||||||
"%s-controllers-sum-maxckout"%service])
|
|
||||||
|
|
||||||
plt.savefig("%s_%s-connctions-all.png"%(sys.argv[1],ntpath.basename(service)), bbox_inches='tight')
|
|
||||||
plt.close()
|
|
||||||
|
|
@ -1,175 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
from datetime import datetime
|
|
||||||
from collections import OrderedDict
|
|
||||||
import argparse
|
|
||||||
import csv
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import matplotlib
|
|
||||||
matplotlib.use('Agg')
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
|
|
||||||
# Saved Measurements:
|
|
||||||
measurements = ['Min', 'Median', '90%ile', '95%ile', 'Max', 'Avg', 'Success%', 'Count']
|
|
||||||
|
|
||||||
"""
|
|
||||||
Results directory structure:
|
|
||||||
".../browbeat/results/full-apache-fernet-keystone-36/keystone/keystone-cc/run-1/
|
|
||||||
full-apache-fernet-keystone-36-iteration_1-keystone-cc-0256.log"
|
|
||||||
Structure of compiled results dictionary:
|
|
||||||
results[service][test][iteration][#workers][concurrency][measurement] = value
|
|
||||||
"""
|
|
||||||
|
|
||||||
def list_only_directories(the_directory):
|
|
||||||
return [a_dir for a_dir in os.listdir(the_directory)
|
|
||||||
if os.path.isdir(os.path.join(the_directory, a_dir)) ]
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Processes multiple rally log files from brwowbeat into compiled graphs.')
|
|
||||||
parser.add_argument('test_prefix', help='Use the resulting prefixed directories/files in '
|
|
||||||
'browbeat results directory.')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
compiled_results = OrderedDict()
|
|
||||||
compiled_issues = []
|
|
||||||
# Should be /home/<user>/browbeat/graphing:
|
|
||||||
rallyplot_path = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
browbeat_path = rallyplot_path.replace('/graphing', '')
|
|
||||||
|
|
||||||
test_runs = [a_dir for a_dir in list_only_directories('{}/results/'.format(browbeat_path))
|
|
||||||
if re.match('^{}-[A-Za-z]+-[0-9]+'.format(args.test_prefix), a_dir)]
|
|
||||||
|
|
||||||
for test_run in test_runs:
|
|
||||||
extract = re.search('{}-([a-zA-Z]*)-([0-9]*)'.format(args.test_prefix), test_run)
|
|
||||||
skip = True
|
|
||||||
if extract:
|
|
||||||
service = extract.group(1)
|
|
||||||
w_count = extract.group(2)
|
|
||||||
skip = False
|
|
||||||
else:
|
|
||||||
print 'Potentially incorrect directory: {}'.format(test_run)
|
|
||||||
if not skip:
|
|
||||||
for service in os.listdir('{}/results/{}/'.format(browbeat_path, test_run)):
|
|
||||||
if service not in compiled_results:
|
|
||||||
compiled_results[service] = OrderedDict()
|
|
||||||
for test in os.listdir('{}/results/{}/{}/'.format(browbeat_path, test_run, service)):
|
|
||||||
if test not in compiled_results[service]:
|
|
||||||
compiled_results[service][test] = OrderedDict()
|
|
||||||
for iteration in os.listdir('{}/results/{}/{}/{}/'.format(browbeat_path, test_run, service, test)):
|
|
||||||
iter_num = int(iteration.replace('run-', ''))
|
|
||||||
if iter_num not in compiled_results[service][test]:
|
|
||||||
compiled_results[service][test][iter_num] = OrderedDict()
|
|
||||||
if w_count not in compiled_results[service][test][iter_num]:
|
|
||||||
compiled_results[service][test][iter_num][w_count] = OrderedDict()
|
|
||||||
result_files = os.listdir('{}/results/{}/{}/{}/{}/'.format(browbeat_path, test_run, service, test, iteration))
|
|
||||||
result_files = [a_file for a_file in result_files if re.match('.*log', a_file)]
|
|
||||||
for r_file in result_files:
|
|
||||||
# Extract concurrency of test
|
|
||||||
extract = re.search('{}-{}-{}-iteration_{}-{}-([0-9]*)\.log'.format(args.test_prefix, service, w_count, iter_num, test), r_file)
|
|
||||||
if extract:
|
|
||||||
concurrency = extract.group(1)
|
|
||||||
if concurrency not in compiled_results[service][test][iter_num][w_count]:
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency] = OrderedDict()
|
|
||||||
result_file_full_path = '{}/results/{}/{}/{}/{}/{}'.format(browbeat_path, test_run, service, test, iteration, r_file)
|
|
||||||
# print 'Test_run: {}, Service: {}, Test: {}, iteration: {}, Concurrency: {}, Result_file: {}'.format(test_run, service, test, iteration, concurrency, r_file)
|
|
||||||
# print 'Full Path: {}'.format(result_file_full_path)
|
|
||||||
|
|
||||||
grep_cmd = subprocess.Popen(['grep', 'total', result_file_full_path],
|
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
out, err = grep_cmd.communicate()
|
|
||||||
if len(out) == 0:
|
|
||||||
print 'Could not find results. Setting to -1'
|
|
||||||
compiled_issues.append(r_file)
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Min'] = '-1'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Median'] = '-1'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['90%ile'] = '-1'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['95%ile'] = '-1'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Max'] = '-1'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Avg'] = '-1'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Success%'] = '0'
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Count'] = '-1'
|
|
||||||
else:
|
|
||||||
output = [s.strip() for s in out.strip().split('|') if s]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Min'] = output[1]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Median'] = output[2]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['90%ile'] = output[3]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['95%ile'] = output[4]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Max'] = output[5]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Avg'] = output[6]
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Success%'] = output[7].replace('%', '')
|
|
||||||
compiled_results[service][test][iter_num][w_count][concurrency]['Count'] = output[8]
|
|
||||||
|
|
||||||
rally_graph_dir = '{}/results/{}-rally-compiled-graphs/'.format(browbeat_path, args.test_prefix)
|
|
||||||
if not os.path.exists(rally_graph_dir):
|
|
||||||
os.mkdir(rally_graph_dir)
|
|
||||||
|
|
||||||
# Now graph results based on measurements list:
|
|
||||||
for service in compiled_results:
|
|
||||||
for test in compiled_results[service]:
|
|
||||||
# Assumption is all tests have same number of iterations!!!
|
|
||||||
for iteration in compiled_results[service][test]:
|
|
||||||
for measurement in measurements:
|
|
||||||
concurrency_dict = {}
|
|
||||||
for worker_count in sorted(compiled_results[service][test][iteration].keys()):
|
|
||||||
for concurrency in compiled_results[service][test][iteration][worker_count]:
|
|
||||||
if concurrency not in concurrency_dict:
|
|
||||||
concurrency_dict[concurrency] = []
|
|
||||||
if str(compiled_results[service][test][iteration][worker_count][concurrency][measurement]) == "n/a":
|
|
||||||
# Rally will place n/a in place of an actual result when it fails
|
|
||||||
# completely, we can't graph n/a, so replace with -1
|
|
||||||
concurrency_dict[concurrency].append(-1)
|
|
||||||
else:
|
|
||||||
concurrency_dict[concurrency].append(float(compiled_results[service][test][iteration][worker_count][concurrency][measurement]))
|
|
||||||
|
|
||||||
graph_file_name = '{}{}-{}-{}-{}.png'.format(rally_graph_dir, service, test, iteration, measurement)
|
|
||||||
print '----------------------------------------------------------'
|
|
||||||
print 'Test Prefix: {}'.format(args.test_prefix)
|
|
||||||
print 'Service: {}'.format(service)
|
|
||||||
print 'Test: {}'.format(test)
|
|
||||||
print 'Iteration: {}'.format(iteration)
|
|
||||||
print 'Measurement: {}'.format(measurement)
|
|
||||||
print 'File Name: {}'.format(graph_file_name)
|
|
||||||
print 'X-Axis (Worker Counts): {}'.format(sorted(compiled_results[service][test][iteration].keys()))
|
|
||||||
print 'X-Axis (# of values per series): {}'.format(len(compiled_results[service][test][iteration].keys()))
|
|
||||||
print '# of Series (# of Concurrencies tested): {}'.format(len(compiled_results[service][test][iteration][worker_count].keys()))
|
|
||||||
for series in sorted(concurrency_dict):
|
|
||||||
print 'Series: {}, Values: {}'.format(series, concurrency_dict[series])
|
|
||||||
print 'Legend: {}'.format(sorted(concurrency_dict.keys()))
|
|
||||||
print '----------------------------------------------------------'
|
|
||||||
fig = plt.figure()
|
|
||||||
plt.title(
|
|
||||||
'Test Name: {}\n'
|
|
||||||
'Service: {}, Test: {}, Iteration: {}, Measurement: {}\n'
|
|
||||||
'Graphed from rally task log output'.format(args.test_prefix, service, test,
|
|
||||||
iteration, measurement))
|
|
||||||
plt.xlabel('Workers')
|
|
||||||
plt.ylabel('{} Time (s)'.format(measurement))
|
|
||||||
ax = fig.add_subplot(111)
|
|
||||||
for series in sorted(concurrency_dict.keys()):
|
|
||||||
plt_linewidth = 1
|
|
||||||
if '-1' in concurrency_dict[series]:
|
|
||||||
plt_linewidth = 2
|
|
||||||
plt.plot(sorted(compiled_results[service][test][iteration].keys()),
|
|
||||||
concurrency_dict[series], linewidth=plt_linewidth, label=series, marker='o')
|
|
||||||
for x, y in zip(sorted(compiled_results[service][test][iteration].keys()),
|
|
||||||
concurrency_dict[series]):
|
|
||||||
ax.annotate('%s' % y, xy=(x,y), xytext=(4,4), textcoords='offset points')
|
|
||||||
plt.legend(loc='upper center', bbox_to_anchor=(1.12, 0.5), fancybox=True)
|
|
||||||
ax.grid(True)
|
|
||||||
plt.savefig(graph_file_name, bbox_inches='tight')
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
# Print files that had an issue:
|
|
||||||
print '----------------------------------------------------------'
|
|
||||||
print 'Files missing results:'
|
|
||||||
print '----------------------------------------------------------'
|
|
||||||
for issue in compiled_issues:
|
|
||||||
print 'File: {}'.format(issue)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
@ -1,140 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
import json
|
|
||||||
import math
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def getiter(prog_dict):
|
|
||||||
density = prog_dict['deployment']['accommodation'][2]['density']
|
|
||||||
compute = prog_dict['deployment']['accommodation'][3]['compute_nodes']
|
|
||||||
iterval = density*compute
|
|
||||||
if(prog_dict['deployment']['accommodation'][0]=="pair" and
|
|
||||||
prog_dict['deployment']['accommodation'][1]=="single_room"):
|
|
||||||
iterval //=2
|
|
||||||
iterlist = []
|
|
||||||
if prog_dict['execution']['progression'] in ['arithmetic', 'linear',
|
|
||||||
'linear_progression']:
|
|
||||||
iterlist = range(1,iterval+1)
|
|
||||||
elif prog_dict['execution']['progression'] in ['geometric', 'quadratic',
|
|
||||||
'quadratic_progression']:
|
|
||||||
iterlist = [iterval]
|
|
||||||
while iterval > 1:
|
|
||||||
iterval //= 2
|
|
||||||
iterlist.append(iterval)
|
|
||||||
iterlist.reverse()
|
|
||||||
elif prog_dict['execution']['progression'] == None:
|
|
||||||
iterlist.append(iterval)
|
|
||||||
return iterlist
|
|
||||||
|
|
||||||
|
|
||||||
def get_uuidlist(data):
|
|
||||||
uuidlist = []
|
|
||||||
for key in data['records'].iterkeys():
|
|
||||||
uuidlist.append(key)
|
|
||||||
return uuidlist
|
|
||||||
|
|
||||||
def get_agentlist(uuidlist, data):
|
|
||||||
agentset=set()
|
|
||||||
for uuid in uuidlist:
|
|
||||||
agentname = data['records'][uuid]['agent']
|
|
||||||
agentset.add(agentname)
|
|
||||||
agentlist = list(agentset)
|
|
||||||
agentlist.sort()
|
|
||||||
return agentlist
|
|
||||||
|
|
||||||
|
|
||||||
def generate_aggregated_graphs(data, fname):
|
|
||||||
for key in data['scenarios'].iterkeys():
|
|
||||||
time1 = data['scenarios'][key]['execution']['tests'][0]['time']
|
|
||||||
time = range(time1-1)
|
|
||||||
density = (data['scenarios'][key]['deployment']
|
|
||||||
['accommodation'][2]['density'])
|
|
||||||
concur_list=getiter(data['scenarios'][key])
|
|
||||||
uuidlist = get_uuidlist(data)
|
|
||||||
title_name = (data['scenarios'][key]['title']).split('/')
|
|
||||||
for concur in concur_list:
|
|
||||||
countlist=[0]*(time1-1)
|
|
||||||
for uuid in uuidlist:
|
|
||||||
if data['records'][uuid]['concurrency'] == concur:
|
|
||||||
if data['records'][uuid]['status'] == "ok":
|
|
||||||
for index in range(time1-1):
|
|
||||||
countlist[index] += ((data['records'][uuid]
|
|
||||||
['samples'][index][1])/math.pow(10,6))
|
|
||||||
plt.xlabel('Time in seconds')
|
|
||||||
plt.ylabel('Throughput in Mbps')
|
|
||||||
plt.title('Aggregated Throuhput for concurrencies \non node\n{}'.format(
|
|
||||||
data['records'][uuid]['node']),loc='left')
|
|
||||||
plt.title(title_name[10],loc='right')
|
|
||||||
plt.plot(time,countlist, linewidth=1,marker='o',
|
|
||||||
label="Concurrency:{}".format(concur))
|
|
||||||
plt.grid()
|
|
||||||
plt.legend(loc=9, prop={'size':8}, bbox_to_anchor=(0.5, -0.1),
|
|
||||||
ncol=concur)
|
|
||||||
plt.savefig(os.path.splitext(fname)[0]+'.png', bbox_inches='tight')
|
|
||||||
print("Generated plot for aggregated throughput for scenario {}".
|
|
||||||
format(title_name[10]))
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
|
|
||||||
def generate_perinstance_graphs(data, fname):
|
|
||||||
uuidlist = get_uuidlist(data)
|
|
||||||
agentlist = get_agentlist(uuidlist, data)
|
|
||||||
for key in data['scenarios'].iterkeys():
|
|
||||||
time1 = data['scenarios'][key]['execution']['tests'][0]['time']
|
|
||||||
time = range(time1-1)
|
|
||||||
density=(data['scenarios'][key]['deployment']
|
|
||||||
['accommodation'][2]['density'])
|
|
||||||
concur_list=getiter(data['scenarios'][key])
|
|
||||||
title_name = (data['scenarios'][key]['title']).split('/')
|
|
||||||
for agent in agentlist:
|
|
||||||
resultlist=[0]*(time1-1)
|
|
||||||
for concur in concur_list:
|
|
||||||
for uuid in uuidlist:
|
|
||||||
if (data['records'][uuid]['concurrency'] == concur and
|
|
||||||
data['records'][uuid]['agent'] == agent):
|
|
||||||
for index in range(time1-1):
|
|
||||||
if data['records'][uuid]['status'] == "ok":
|
|
||||||
resultlist[index] = ((data['records'][uuid]
|
|
||||||
['samples'][index][1])/math.pow(10,6))
|
|
||||||
plt.xlabel('Time in seconds')
|
|
||||||
plt.ylabel('Throughput in Mbps')
|
|
||||||
plt.title('Throughput for {} \non node \n{}'.format(
|
|
||||||
agent, data['records'][uuid]['node']), loc='left')
|
|
||||||
plt.title(title_name[10],loc='right')
|
|
||||||
plt.plot(time,resultlist, linewidth=1,marker='o',
|
|
||||||
label="Concurrency:{}".format(concur))
|
|
||||||
plt.grid()
|
|
||||||
plt.legend(loc=9, prop={'size':8}, bbox_to_anchor=(0.5, -0.1),
|
|
||||||
ncol=concur )
|
|
||||||
plt.savefig(os.path.splitext(fname)[0]+ '_' + agent + '.png', bbox_inches='tight')
|
|
||||||
print("Generated plot for agent {} in scenario {}".format(
|
|
||||||
agent, title_name[10]))
|
|
||||||
plt.close()
|
|
||||||
def main():
|
|
||||||
filelist=[]
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description='Processes shaker results into aggregated graphs')
|
|
||||||
parser.add_argument('result_dir',
|
|
||||||
help='Name of the directory in which results are stored'
|
|
||||||
' Example: 20160226-101636')
|
|
||||||
args = parser.parse_args()
|
|
||||||
shakerplot_path = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
results_path = os.path.join(shakerplot_path.replace('graphing',
|
|
||||||
'results'), args.result_dir)
|
|
||||||
if not os.path.isdir(results_path):
|
|
||||||
print "ERROR Directory doesn't exist"
|
|
||||||
exit(1)
|
|
||||||
for root, dirs, files in os.walk(results_path, topdown=False):
|
|
||||||
for name in files:
|
|
||||||
if name.endswith('.json'):
|
|
||||||
filelist.append(os.path.join(root, name))
|
|
||||||
for fname in filelist:
|
|
||||||
with open(fname) as data_file:
|
|
||||||
data = json.load(data_file)
|
|
||||||
generate_aggregated_graphs(data, fname)
|
|
||||||
generate_perinstance_graphs(data, fname)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
@ -1,5 +1,19 @@
|
|||||||
from Tools import *
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from Tools import Tools
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import shutil
|
||||||
|
|
||||||
class Connmon:
|
class Connmon:
|
||||||
|
|
||||||
|
@ -1,13 +1,26 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from elasticsearch import Elasticsearch
|
from elasticsearch import Elasticsearch
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import pprint
|
|
||||||
import numpy
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
class Elastic:
|
class Elastic:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config, tool="browbeat"):
|
def __init__(self, config, tool="browbeat"):
|
||||||
self.config = config
|
self.config = config
|
||||||
self.logger = logging.getLogger('browbeat.Elastic')
|
self.logger = logging.getLogger('browbeat.Elastic')
|
||||||
@ -21,6 +34,7 @@ class Elastic:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def load_json(self, result):
|
def load_json(self, result):
|
||||||
json_data = None
|
json_data = None
|
||||||
self.logger.info("Loading JSON")
|
self.logger.info("Loading JSON")
|
||||||
@ -29,19 +43,21 @@ class Elastic:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def load_json_file(self, result):
|
def load_json_file(self, result):
|
||||||
json_data = None
|
json_data = None
|
||||||
self.logger.info("Loading JSON file : {}".format(result))
|
self.logger.info("Loading JSON file : {}".format(result))
|
||||||
try:
|
try:
|
||||||
with open(result) as jdata:
|
with open(result) as jdata:
|
||||||
json_data = json.load(jdata)
|
json_data = json.load(jdata)
|
||||||
except (IOError, OSError) as e:
|
except (IOError, OSError):
|
||||||
self.logger.error("Error loading JSON file : {}".format(result))
|
self.logger.error("Error loading JSON file : {}".format(result))
|
||||||
return False
|
return False
|
||||||
return json_data
|
return json_data
|
||||||
|
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def combine_metadata(self, result):
|
def combine_metadata(self, result):
|
||||||
if len(self.config['elasticsearch']['metadata_files']) > 0:
|
if len(self.config['elasticsearch']['metadata_files']) > 0:
|
||||||
meta = self.config['elasticsearch']['metadata_files']
|
meta = self.config['elasticsearch']['metadata_files']
|
||||||
@ -49,13 +65,15 @@ class Elastic:
|
|||||||
try:
|
try:
|
||||||
with open(_meta['file']) as jdata:
|
with open(_meta['file']) as jdata:
|
||||||
result[_meta['name']] = json.load(jdata)
|
result[_meta['name']] = json.load(jdata)
|
||||||
except (IOError, OSError) as e:
|
except (IOError, OSError):
|
||||||
self.logger.error("Error loading Metadata file : {}".format(_meta['file']))
|
self.logger.error(
|
||||||
|
"Error loading Metadata file : {}".format(_meta['file']))
|
||||||
return False
|
return False
|
||||||
return result
|
return result
|
||||||
|
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def index_result(self, result, _type='result', _id=None):
|
def index_result(self, result, _type='result', _id=None):
|
||||||
return self.es.index(index=self.index,
|
return self.es.index(index=self.index,
|
||||||
id=_id,
|
id=_id,
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@ -15,8 +27,10 @@ class Grafana:
|
|||||||
self.grafana_url = {}
|
self.grafana_url = {}
|
||||||
|
|
||||||
def extra_vars(self, from_ts, to_ts, result_dir, test_name):
|
def extra_vars(self, from_ts, to_ts, result_dir, test_name):
|
||||||
extra_vars = 'grafana_ip={} '.format(self.config['grafana']['grafana_ip'])
|
extra_vars = 'grafana_ip={} '.format(
|
||||||
extra_vars += 'grafana_port={} '.format(self.config['grafana']['grafana_port'])
|
self.config['grafana']['grafana_ip'])
|
||||||
|
extra_vars += 'grafana_port={} '.format(
|
||||||
|
self.config['grafana']['grafana_port'])
|
||||||
extra_vars += 'from={} '.format(from_ts)
|
extra_vars += 'from={} '.format(from_ts)
|
||||||
extra_vars += 'to={} '.format(to_ts)
|
extra_vars += 'to={} '.format(to_ts)
|
||||||
extra_vars += 'results_dir={}/{} '.format(result_dir, test_name)
|
extra_vars += 'results_dir={}/{} '.format(result_dir, test_name)
|
||||||
@ -36,11 +50,18 @@ class Grafana:
|
|||||||
self.grafana_ip, self.grafana_port)
|
self.grafana_ip, self.grafana_port)
|
||||||
for dashboard in self.config['grafana']['dashboards']:
|
for dashboard in self.config['grafana']['dashboards']:
|
||||||
self.grafana_url[dashboard] = '{}{}?from={}&to={}&var-Cloud={}'.format(
|
self.grafana_url[dashboard] = '{}{}?from={}&to={}&var-Cloud={}'.format(
|
||||||
url, dashboard, from_ts, to_ts, self.cloud_name)
|
url,
|
||||||
|
dashboard,
|
||||||
|
from_ts,
|
||||||
|
to_ts,
|
||||||
|
self.cloud_name)
|
||||||
|
|
||||||
def print_dashboard_url(self, test_name):
|
def print_dashboard_url(self, test_name):
|
||||||
for dashboard in self.grafana_url:
|
for dashboard in self.grafana_url:
|
||||||
self.logger.info('{} - Grafana Dashboard {} URL: {}'.format(test_name, dashboard,
|
self.logger.info(
|
||||||
|
'{} - Grafana Dashboard {} URL: {}'.format(
|
||||||
|
test_name,
|
||||||
|
dashboard,
|
||||||
self.grafana_url[dashboard]))
|
self.grafana_url[dashboard]))
|
||||||
|
|
||||||
def log_snapshot_playbook_cmd(self, from_ts, to_ts, result_dir, test_name):
|
def log_snapshot_playbook_cmd(self, from_ts, to_ts, result_dir, test_name):
|
||||||
@ -56,8 +77,17 @@ class Grafana:
|
|||||||
if self.config['grafana']['snapshot']['enabled']:
|
if self.config['grafana']['snapshot']['enabled']:
|
||||||
extra_vars = self.extra_vars(
|
extra_vars = self.extra_vars(
|
||||||
from_ts, to_ts, result_dir, test_name)
|
from_ts, to_ts, result_dir, test_name)
|
||||||
subprocess_cmd = ['ansible-playbook', '-i', self.hosts_file, self.playbook, '-e',
|
subprocess_cmd = [
|
||||||
|
'ansible-playbook',
|
||||||
|
'-i',
|
||||||
|
self.hosts_file,
|
||||||
|
self.playbook,
|
||||||
|
'-e',
|
||||||
'{}'.format(extra_vars)]
|
'{}'.format(extra_vars)]
|
||||||
snapshot_log = open('{}/snapshot.log'.format(result_dir), 'a+')
|
snapshot_log = open('{}/snapshot.log'.format(result_dir), 'a+')
|
||||||
self.logger.info('Running ansible to create snapshots for: {}'.format(test_name))
|
self.logger.info(
|
||||||
subprocess.Popen(subprocess_cmd, stdout=snapshot_log, stderr=subprocess.STDOUT)
|
'Running ansible to create snapshots for: {}'.format(test_name))
|
||||||
|
subprocess.Popen(
|
||||||
|
subprocess_cmd,
|
||||||
|
stdout=snapshot_log,
|
||||||
|
stderr=subprocess.STDOUT)
|
||||||
|
@ -1,9 +1,21 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import logging
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
class Metadata:
|
class Metadata:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -17,7 +29,6 @@ class Metadata:
|
|||||||
except IOError:
|
except IOError:
|
||||||
print("Machine facts json is missing")
|
print("Machine facts json is missing")
|
||||||
exit(1)
|
exit(1)
|
||||||
regex = re.compile(r"}\n{")
|
|
||||||
new_json = re.sub(r"}\n{", r"},\n{", json_str, re.M)
|
new_json = re.sub(r"}\n{", r"},\n{", json_str, re.M)
|
||||||
convert = "{ \"machines\": [" + new_json + "] }"
|
convert = "{ \"machines\": [" + new_json + "] }"
|
||||||
sys_data = {}
|
sys_data = {}
|
||||||
@ -32,8 +43,10 @@ class Metadata:
|
|||||||
hardware_dict = {}
|
hardware_dict = {}
|
||||||
hardware_dict['label'] = item['inventory_hostname']
|
hardware_dict['label'] = item['inventory_hostname']
|
||||||
hardware_dict['kernel'] = item['ansible_kernel']
|
hardware_dict['kernel'] = item['ansible_kernel']
|
||||||
hardware_dict['total_mem'] = item['ansible_memory_mb']['real']['total']
|
hardware_dict['total_mem'] = item[
|
||||||
hardware_dict['total_logical_cores'] = item['facter_processorcount']
|
'ansible_memory_mb']['real']['total']
|
||||||
|
hardware_dict['total_logical_cores'] = item[
|
||||||
|
'facter_processorcount']
|
||||||
hardware_dict['os_name'] = item['ansible_distribution'] + \
|
hardware_dict['os_name'] = item['ansible_distribution'] + \
|
||||||
item['ansible_distribution_version']
|
item['ansible_distribution_version']
|
||||||
hardware_dict['ip'] = item['ansible_default_ipv4']['address']
|
hardware_dict['ip'] = item['ansible_default_ipv4']['address']
|
||||||
@ -62,7 +75,8 @@ class Metadata:
|
|||||||
if 'openstack' not in soft_all_dict['software_details']:
|
if 'openstack' not in soft_all_dict['software_details']:
|
||||||
soft_all_dict['software_details']['openstack'] = {}
|
soft_all_dict['software_details']['openstack'] = {}
|
||||||
if 'config' not in soft_all_dict['software_details']['openstack']:
|
if 'config' not in soft_all_dict['software_details']['openstack']:
|
||||||
soft_all_dict['software_details']['openstack']['config'] = []
|
soft_all_dict['software_details'][
|
||||||
|
'openstack']['config'] = []
|
||||||
software_dict = {}
|
software_dict = {}
|
||||||
software_dict['node_name'] = item['inventory_hostname']
|
software_dict['node_name'] = item['inventory_hostname']
|
||||||
for soft in item:
|
for soft in item:
|
||||||
@ -73,7 +87,8 @@ class Metadata:
|
|||||||
software_dict[service_name] = {}
|
software_dict[service_name] = {}
|
||||||
if service_name in soft:
|
if service_name in soft:
|
||||||
software_dict[service_name][soft] = item[soft]
|
software_dict[service_name][soft] = item[soft]
|
||||||
soft_all_dict['software_details']['openstack']['config'].append(software_dict)
|
soft_all_dict['software_details']['openstack'][
|
||||||
|
'config'].append(software_dict)
|
||||||
return soft_all_dict
|
return soft_all_dict
|
||||||
|
|
||||||
def write_metadata_file(self, data, filename):
|
def write_metadata_file(self, data, filename):
|
||||||
@ -86,11 +101,14 @@ def main():
|
|||||||
metadata = Metadata()
|
metadata = Metadata()
|
||||||
sysdata = metadata.load_file(_filename)
|
sysdata = metadata.load_file(_filename)
|
||||||
env_data = metadata.get_environment_metadata(sysdata)
|
env_data = metadata.get_environment_metadata(sysdata)
|
||||||
metadata.write_metadata_file(env_data, os.path.join(sys.argv[1], 'environment-metadata.json'))
|
metadata.write_metadata_file(
|
||||||
|
env_data, os.path.join(sys.argv[1], 'environment-metadata.json'))
|
||||||
hardware_data = metadata.get_hardware_metadata(sysdata)
|
hardware_data = metadata.get_hardware_metadata(sysdata)
|
||||||
metadata.write_metadata_file(hardware_data, os.path.join(sys.argv[1], 'hardware-metadata.json'))
|
metadata.write_metadata_file(
|
||||||
|
hardware_data, os.path.join(sys.argv[1], 'hardware-metadata.json'))
|
||||||
software_data = metadata.get_software_metadata(sysdata)
|
software_data = metadata.get_software_metadata(sysdata)
|
||||||
metadata.write_metadata_file(software_data, os.path.join(sys.argv[1], 'software-metadata.json'))
|
metadata.write_metadata_file(
|
||||||
|
software_data, os.path.join(sys.argv[1], 'software-metadata.json'))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from Connmon import Connmon
|
from Connmon import Connmon
|
||||||
from Grafana import Grafana
|
from Grafana import Grafana
|
||||||
from Tools import Tools
|
from Tools import Tools
|
||||||
@ -27,9 +39,12 @@ class PerfKit(WorkloadBase):
|
|||||||
def _log_details(self):
|
def _log_details(self):
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
"Current number of Perkit scenarios executed: {}".format(self.scenario_count))
|
"Current number of Perkit scenarios executed: {}".format(self.scenario_count))
|
||||||
self.logger.info("Current number of Perfkit test(s) executed: {}".format(self.test_count))
|
self.logger.info(
|
||||||
self.logger.info("Current number of Perfkit test(s) succeeded: {}".format(self.pass_count))
|
"Current number of Perfkit test(s) executed: {}".format(self.test_count))
|
||||||
self.logger.info("Current number of Perfkit test failures: {}".format(self.error_count))
|
self.logger.info(
|
||||||
|
"Current number of Perfkit test(s) succeeded: {}".format(self.pass_count))
|
||||||
|
self.logger.info(
|
||||||
|
"Current number of Perfkit test failures: {}".format(self.error_count))
|
||||||
|
|
||||||
def update_tests(self):
|
def update_tests(self):
|
||||||
self.test_count += 1
|
self.test_count += 1
|
||||||
@ -79,7 +94,8 @@ class PerfKit(WorkloadBase):
|
|||||||
from_ts = time.time()
|
from_ts = time.time()
|
||||||
if 'sleep_before' in self.config['perfkit']:
|
if 'sleep_before' in self.config['perfkit']:
|
||||||
time.sleep(self.config['perfkit']['sleep_before'])
|
time.sleep(self.config['perfkit']['sleep_before'])
|
||||||
process = subprocess.Popen(cmd, shell=True, stdout=stdout_file, stderr=stderr_file)
|
process = subprocess.Popen(
|
||||||
|
cmd, shell=True, stdout=stdout_file, stderr=stderr_file)
|
||||||
process.communicate()
|
process.communicate()
|
||||||
if 'sleep_after' in self.config['perfkit']:
|
if 'sleep_after' in self.config['perfkit']:
|
||||||
time.sleep(self.config['perfkit']['sleep_after'])
|
time.sleep(self.config['perfkit']['sleep_after'])
|
||||||
@ -91,8 +107,9 @@ class PerfKit(WorkloadBase):
|
|||||||
try:
|
try:
|
||||||
self.connmon.move_connmon_results(result_dir, test_name)
|
self.connmon.move_connmon_results(result_dir, test_name)
|
||||||
self.connmon.connmon_graphs(result_dir, test_name)
|
self.connmon.connmon_graphs(result_dir, test_name)
|
||||||
except:
|
except Exception:
|
||||||
self.logger.error("Connmon Result data missing, Connmon never started")
|
self.logger.error(
|
||||||
|
"Connmon Result data missing, Connmon never started")
|
||||||
|
|
||||||
workload = self.__class__.__name__
|
workload = self.__class__.__name__
|
||||||
new_test_name = test_name.split('-')
|
new_test_name = test_name.split('-')
|
||||||
@ -106,17 +123,20 @@ class PerfKit(WorkloadBase):
|
|||||||
self.update_pass_tests()
|
self.update_pass_tests()
|
||||||
self.update_total_pass_tests()
|
self.update_total_pass_tests()
|
||||||
self.get_time_dict(
|
self.get_time_dict(
|
||||||
to_ts, from_ts, benchmark_config['benchmarks'], new_test_name,
|
to_ts, from_ts, benchmark_config[
|
||||||
|
'benchmarks'], new_test_name,
|
||||||
workload, "pass")
|
workload, "pass")
|
||||||
else:
|
else:
|
||||||
self.logger.error("Benchmark failed.")
|
self.logger.error("Benchmark failed.")
|
||||||
self.update_fail_tests()
|
self.update_fail_tests()
|
||||||
self.update_total_fail_tests()
|
self.update_total_fail_tests()
|
||||||
self.get_time_dict(
|
self.get_time_dict(
|
||||||
to_ts, from_ts, benchmark_config['benchmarks'], new_test_name,
|
to_ts, from_ts, benchmark_config[
|
||||||
|
'benchmarks'], new_test_name,
|
||||||
workload, "fail")
|
workload, "fail")
|
||||||
except IOError:
|
except IOError:
|
||||||
self.logger.error("File missing: {}/pkb.stderr.log".format(result_dir))
|
self.logger.error(
|
||||||
|
"File missing: {}/pkb.stderr.log".format(result_dir))
|
||||||
|
|
||||||
# Copy all results
|
# Copy all results
|
||||||
for perfkit_file in glob.glob("/tmp/perfkitbenchmarker/run_browbeat/*"):
|
for perfkit_file in glob.glob("/tmp/perfkitbenchmarker/run_browbeat/*"):
|
||||||
@ -129,7 +149,8 @@ class PerfKit(WorkloadBase):
|
|||||||
{'from_ts': int(from_ts * 1000),
|
{'from_ts': int(from_ts * 1000),
|
||||||
'to_ts': int(to_ts * 1000)})
|
'to_ts': int(to_ts * 1000)})
|
||||||
self.grafana.print_dashboard_url(test_name)
|
self.grafana.print_dashboard_url(test_name)
|
||||||
self.grafana.log_snapshot_playbook_cmd(from_ts, to_ts, result_dir, test_name)
|
self.grafana.log_snapshot_playbook_cmd(
|
||||||
|
from_ts, to_ts, result_dir, test_name)
|
||||||
self.grafana.run_playbook(from_ts, to_ts, result_dir, test_name)
|
self.grafana.run_playbook(from_ts, to_ts, result_dir, test_name)
|
||||||
|
|
||||||
def start_workloads(self):
|
def start_workloads(self):
|
||||||
@ -148,7 +169,8 @@ class PerfKit(WorkloadBase):
|
|||||||
self.update_total_tests()
|
self.update_total_tests()
|
||||||
result_dir = self.tools.create_results_dir(
|
result_dir = self.tools.create_results_dir(
|
||||||
self.config['browbeat']['results'], time_stamp, benchmark['name'], run)
|
self.config['browbeat']['results'], time_stamp, benchmark['name'], run)
|
||||||
test_name = "{}-{}-{}".format(time_stamp, benchmark['name'], run)
|
test_name = "{}-{}-{}".format(time_stamp,
|
||||||
|
benchmark['name'], run)
|
||||||
workload = self.__class__.__name__
|
workload = self.__class__.__name__
|
||||||
self.workload_logger(result_dir, workload)
|
self.workload_logger(result_dir, workload)
|
||||||
self.run_benchmark(benchmark, result_dir, test_name)
|
self.run_benchmark(benchmark, result_dir, test_name)
|
||||||
|
63
lib/Rally.py
63
lib/Rally.py
@ -1,20 +1,30 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from Connmon import Connmon
|
from Connmon import Connmon
|
||||||
from Tools import Tools
|
from Tools import Tools
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from Grafana import Grafana
|
from Grafana import Grafana
|
||||||
from WorkloadBase import WorkloadBase
|
from WorkloadBase import WorkloadBase
|
||||||
from Elastic import Elastic
|
from Elastic import Elastic
|
||||||
import pprint
|
|
||||||
import numpy
|
|
||||||
import datetime
|
import datetime
|
||||||
import glob
|
import glob
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
class Rally(WorkloadBase):
|
class Rally(WorkloadBase):
|
||||||
|
|
||||||
def __init__(self, config, hosts=None):
|
def __init__(self, config, hosts=None):
|
||||||
@ -61,7 +71,8 @@ class Rally(WorkloadBase):
|
|||||||
to_ts = int(time.time() * 1000)
|
to_ts = int(time.time() * 1000)
|
||||||
self.grafana.create_grafana_urls({'from_ts': from_ts, 'to_ts': to_ts})
|
self.grafana.create_grafana_urls({'from_ts': from_ts, 'to_ts': to_ts})
|
||||||
self.grafana.print_dashboard_url(test_name)
|
self.grafana.print_dashboard_url(test_name)
|
||||||
self.grafana.log_snapshot_playbook_cmd(from_ts, to_ts, result_dir, test_name)
|
self.grafana.log_snapshot_playbook_cmd(
|
||||||
|
from_ts, to_ts, result_dir, test_name)
|
||||||
self.grafana.run_playbook(from_ts, to_ts, result_dir, test_name)
|
self.grafana.run_playbook(from_ts, to_ts, result_dir, test_name)
|
||||||
return (from_time, to_time)
|
return (from_time, to_time)
|
||||||
|
|
||||||
@ -86,9 +97,12 @@ class Rally(WorkloadBase):
|
|||||||
self.logger.info(
|
self.logger.info(
|
||||||
"Current number of Rally scenarios executed:{}".format(
|
"Current number of Rally scenarios executed:{}".format(
|
||||||
self.scenario_count))
|
self.scenario_count))
|
||||||
self.logger.info("Current number of Rally tests executed:{}".format(self.test_count))
|
self.logger.info(
|
||||||
self.logger.info("Current number of Rally tests passed:{}".format(self.pass_count))
|
"Current number of Rally tests executed:{}".format(self.test_count))
|
||||||
self.logger.info("Current number of Rally test failures:{}".format(self.error_count))
|
self.logger.info(
|
||||||
|
"Current number of Rally tests passed:{}".format(self.pass_count))
|
||||||
|
self.logger.info(
|
||||||
|
"Current number of Rally test failures:{}".format(self.error_count))
|
||||||
|
|
||||||
def gen_scenario_html(self, task_ids, test_name):
|
def gen_scenario_html(self, task_ids, test_name):
|
||||||
all_task_ids = ' '.join(task_ids)
|
all_task_ids = ' '.join(task_ids)
|
||||||
@ -113,8 +127,6 @@ class Rally(WorkloadBase):
|
|||||||
|
|
||||||
def json_result(self, task_id, scenario_name):
|
def json_result(self, task_id, scenario_name):
|
||||||
rally_data = {}
|
rally_data = {}
|
||||||
rally_errors = []
|
|
||||||
rally_sla = []
|
|
||||||
self.logger.info("Loadding Task_ID {} JSON".format(task_id))
|
self.logger.info("Loadding Task_ID {} JSON".format(task_id))
|
||||||
rally_json = self.elastic.load_json(self.gen_scenario_json(task_id))
|
rally_json = self.elastic.load_json(self.gen_scenario_json(task_id))
|
||||||
es_ts = datetime.datetime.utcnow()
|
es_ts = datetime.datetime.utcnow()
|
||||||
@ -137,7 +149,6 @@ class Rally(WorkloadBase):
|
|||||||
'scenario': scenario_name,
|
'scenario': scenario_name,
|
||||||
}
|
}
|
||||||
self.elastic.index_result(error, 'config')
|
self.elastic.index_result(error, 'config')
|
||||||
rally_doc = []
|
|
||||||
for workload in rally_data:
|
for workload in rally_data:
|
||||||
if not type(rally_data[workload]) is dict:
|
if not type(rally_data[workload]) is dict:
|
||||||
iteration = 1
|
iteration = 1
|
||||||
@ -148,12 +159,6 @@ class Rally(WorkloadBase):
|
|||||||
rally_stats = {'result': task_id,
|
rally_stats = {'result': task_id,
|
||||||
'action': workload_name,
|
'action': workload_name,
|
||||||
'iteration': iteration,
|
'iteration': iteration,
|
||||||
#'90th':numpy.percentile(rally_data[workload], 90),
|
|
||||||
#'95th':numpy.percentile(rally_data[workload], 95),
|
|
||||||
#'max':numpy.max(rally_data[workload]),
|
|
||||||
#'min':numpy.min(rally_data[workload]),
|
|
||||||
#'average':numpy.average(rally_data[workload]),
|
|
||||||
#'median':numpy.median(rally_data[workload]),
|
|
||||||
'timestamp': es_ts,
|
'timestamp': es_ts,
|
||||||
'scenario': scenario_name,
|
'scenario': scenario_name,
|
||||||
'rally_setup': rally_json[0]['key'],
|
'rally_setup': rally_json[0]['key'],
|
||||||
@ -202,7 +207,8 @@ class Rally(WorkloadBase):
|
|||||||
self.config['browbeat'][
|
self.config['browbeat'][
|
||||||
'results'], dir_ts, benchmark['name'],
|
'results'], dir_ts, benchmark['name'],
|
||||||
scenario_name)
|
scenario_name)
|
||||||
self.logger.debug("Created result directory: {}".format(result_dir))
|
self.logger.debug(
|
||||||
|
"Created result directory: {}".format(result_dir))
|
||||||
workload = self.__class__.__name__
|
workload = self.__class__.__name__
|
||||||
self.workload_logger(result_dir, workload)
|
self.workload_logger(result_dir, workload)
|
||||||
|
|
||||||
@ -244,12 +250,13 @@ class Rally(WorkloadBase):
|
|||||||
try:
|
try:
|
||||||
self.connmon.move_connmon_results(
|
self.connmon.move_connmon_results(
|
||||||
result_dir, test_name)
|
result_dir, test_name)
|
||||||
except:
|
except Exception:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
"Connmon Result data missing, \
|
"Connmon Result data missing, \
|
||||||
Connmon never started")
|
Connmon never started")
|
||||||
return False
|
return False
|
||||||
self.connmon.connmon_graphs(result_dir, test_name)
|
self.connmon.connmon_graphs(
|
||||||
|
result_dir, test_name)
|
||||||
new_test_name = test_name.split('-')
|
new_test_name = test_name.split('-')
|
||||||
new_test_name = new_test_name[3:]
|
new_test_name = new_test_name[3:]
|
||||||
new_test_name = "-".join(new_test_name)
|
new_test_name = "-".join(new_test_name)
|
||||||
@ -261,23 +268,29 @@ class Rally(WorkloadBase):
|
|||||||
self.logger.info(
|
self.logger.info(
|
||||||
"Generating Rally HTML for task_id : {}".
|
"Generating Rally HTML for task_id : {}".
|
||||||
format(task_id))
|
format(task_id))
|
||||||
self.gen_scenario_html([task_id], test_name)
|
self.gen_scenario_html(
|
||||||
self.gen_scenario_json_file(task_id, test_name)
|
[task_id], test_name)
|
||||||
|
self.gen_scenario_json_file(
|
||||||
|
task_id, test_name)
|
||||||
results[run].append(task_id)
|
results[run].append(task_id)
|
||||||
self.update_pass_tests()
|
self.update_pass_tests()
|
||||||
self.update_total_pass_tests()
|
self.update_total_pass_tests()
|
||||||
self.get_time_dict(
|
self.get_time_dict(
|
||||||
to_time, from_time, benchmark['name'], new_test_name,
|
to_time, from_time, benchmark[
|
||||||
|
'name'], new_test_name,
|
||||||
workload, "pass")
|
workload, "pass")
|
||||||
if self.config['elasticsearch']['enabled']:
|
if self.config['elasticsearch']['enabled']:
|
||||||
# Start indexing
|
# Start indexing
|
||||||
result_json = self.json_result(task_id,scenario_name)
|
self.json_result(
|
||||||
|
task_id, scenario_name)
|
||||||
else:
|
else:
|
||||||
self.logger.error("Cannot find task_id")
|
self.logger.error(
|
||||||
|
"Cannot find task_id")
|
||||||
self.update_fail_tests()
|
self.update_fail_tests()
|
||||||
self.update_total_fail_tests()
|
self.update_total_fail_tests()
|
||||||
self.get_time_dict(
|
self.get_time_dict(
|
||||||
to_time, from_time, benchmark['name'], new_test_name,
|
to_time, from_time, benchmark[
|
||||||
|
'name'], new_test_name,
|
||||||
workload, "fail")
|
workload, "fail")
|
||||||
|
|
||||||
for data in glob.glob("./{}*".format(test_name)):
|
for data in glob.glob("./{}*".format(test_name)):
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from Tools import Tools
|
from Tools import Tools
|
||||||
from Grafana import Grafana
|
from Grafana import Grafana
|
||||||
from WorkloadBase import WorkloadBase
|
from WorkloadBase import WorkloadBase
|
||||||
@ -8,6 +20,7 @@ import os
|
|||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
class Shaker(WorkloadBase):
|
class Shaker(WorkloadBase):
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
@ -29,15 +42,22 @@ class Shaker(WorkloadBase):
|
|||||||
self.logger.info("Shaker image is built, continuing")
|
self.logger.info("Shaker image is built, continuing")
|
||||||
|
|
||||||
def get_stats(self):
|
def get_stats(self):
|
||||||
self.logger.info("Current number of Shaker tests executed: {}".format(self.test_count))
|
self.logger.info(
|
||||||
self.logger.info("Current number of Shaker tests passed: {}".format(self.pass_count))
|
"Current number of Shaker tests executed: {}".format(self.test_count))
|
||||||
self.logger.info("Current number of Shaker tests failed: {}".format(self.error_count))
|
self.logger.info(
|
||||||
|
"Current number of Shaker tests passed: {}".format(self.pass_count))
|
||||||
|
self.logger.info(
|
||||||
|
"Current number of Shaker tests failed: {}".format(self.error_count))
|
||||||
|
|
||||||
def final_stats(self, total):
|
def final_stats(self, total):
|
||||||
self.logger.info("Total Shaker scenarios enabled by user: {}".format(total))
|
self.logger.info(
|
||||||
self.logger.info("Total number of Shaker tests executed: {}".format(self.test_count))
|
"Total Shaker scenarios enabled by user: {}".format(total))
|
||||||
self.logger.info("Total number of Shaker tests passed: {}".format(self.pass_count))
|
self.logger.info(
|
||||||
self.logger.info("Total number of Shaker tests failed: {}".format(self.error_count))
|
"Total number of Shaker tests executed: {}".format(self.test_count))
|
||||||
|
self.logger.info(
|
||||||
|
"Total number of Shaker tests passed: {}".format(self.pass_count))
|
||||||
|
self.logger.info(
|
||||||
|
"Total number of Shaker tests failed: {}".format(self.error_count))
|
||||||
|
|
||||||
def update_tests(self):
|
def update_tests(self):
|
||||||
self.test_count += 1
|
self.test_count += 1
|
||||||
@ -110,7 +130,8 @@ class Shaker(WorkloadBase):
|
|||||||
error = True
|
error = True
|
||||||
if error:
|
if error:
|
||||||
self.logger.error("Failed Test: {}".format(scenario['name']))
|
self.logger.error("Failed Test: {}".format(scenario['name']))
|
||||||
self.logger.error("saved log to: {}.log".format(os.path.join(result_dir, test_name)))
|
self.logger.error(
|
||||||
|
"saved log to: {}.log".format(os.path.join(result_dir, test_name)))
|
||||||
self.update_fail_tests()
|
self.update_fail_tests()
|
||||||
self.update_total_fail_tests()
|
self.update_total_fail_tests()
|
||||||
self.get_time_dict(
|
self.get_time_dict(
|
||||||
@ -129,7 +150,8 @@ class Shaker(WorkloadBase):
|
|||||||
test_name +
|
test_name +
|
||||||
"." +
|
"." +
|
||||||
"html")))
|
"html")))
|
||||||
self.logger.info("saved log to: {}.log".format(os.path.join(result_dir, test_name)))
|
self.logger.info(
|
||||||
|
"saved log to: {}.log".format(os.path.join(result_dir, test_name)))
|
||||||
self.update_pass_tests()
|
self.update_pass_tests()
|
||||||
self.update_total_pass_tests()
|
self.update_total_pass_tests()
|
||||||
self.get_time_dict(
|
self.get_time_dict(
|
||||||
@ -199,11 +221,13 @@ class Shaker(WorkloadBase):
|
|||||||
self.logger.debug("Set Scenario File: {}".format(
|
self.logger.debug("Set Scenario File: {}".format(
|
||||||
scenario['file']))
|
scenario['file']))
|
||||||
result_dir = self.tools.create_results_dir(
|
result_dir = self.tools.create_results_dir(
|
||||||
self.config['browbeat']['results'], time_stamp, "shaker",
|
self.config['browbeat'][
|
||||||
|
'results'], time_stamp, "shaker",
|
||||||
scenario['name'])
|
scenario['name'])
|
||||||
workload = self.__class__.__name__
|
workload = self.__class__.__name__
|
||||||
self.workload_logger(result_dir, workload)
|
self.workload_logger(result_dir, workload)
|
||||||
time_stamp1 = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
|
time_stamp1 = datetime.datetime.now().strftime(
|
||||||
|
"%Y%m%d-%H%M%S")
|
||||||
test_name = "{}-browbeat-{}-{}".format(time_stamp1,
|
test_name = "{}-browbeat-{}-{}".format(time_stamp1,
|
||||||
"shaker", scenario['name'])
|
"shaker", scenario['name'])
|
||||||
self.run_scenario(scenario, result_dir, test_name)
|
self.run_scenario(scenario, result_dir, test_name)
|
||||||
|
20
lib/Tools.py
20
lib/Tools.py
@ -1,7 +1,19 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
from subprocess import Popen
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import PIPE
|
||||||
|
|
||||||
|
|
||||||
class Tools:
|
class Tools:
|
||||||
@ -37,7 +49,7 @@ class Tools:
|
|||||||
try:
|
try:
|
||||||
os.makedirs("%s/run-%s" % (results_dir, run))
|
os.makedirs("%s/run-%s" % (results_dir, run))
|
||||||
return "%s/run-%s" % (results_dir, run)
|
return "%s/run-%s" % (results_dir, run)
|
||||||
except OSError as e:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Create directory for results
|
# Create directory for results
|
||||||
@ -48,5 +60,5 @@ class Tools:
|
|||||||
self.logger.debug("{}/{}/{}/{}".format(os.path.dirname(results_dir), timestamp, service,
|
self.logger.debug("{}/{}/{}/{}".format(os.path.dirname(results_dir), timestamp, service,
|
||||||
scenario))
|
scenario))
|
||||||
return "{}/{}/{}/{}".format(os.path.dirname(results_dir), timestamp, service, scenario)
|
return "{}/{}/{}/{}".format(os.path.dirname(results_dir), timestamp, service, scenario)
|
||||||
except OSError as e:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
@ -1,8 +1,22 @@
|
|||||||
from abc import ABCMeta, abstractmethod
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from abc import ABCMeta
|
||||||
|
from abc import abstractmethod
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import yaml
|
import yaml
|
||||||
import collections
|
|
||||||
|
|
||||||
class WorkloadBase:
|
class WorkloadBase:
|
||||||
__metaclass__ = ABCMeta
|
__metaclass__ = ABCMeta
|
||||||
success = 0
|
success = 0
|
||||||
@ -45,7 +59,8 @@ class WorkloadBase:
|
|||||||
file = logging.FileHandler(
|
file = logging.FileHandler(
|
||||||
"{}/{}/browbeat-{}-run.log".format(base[0], base[1], workload))
|
"{}/{}/browbeat-{}-run.log".format(base[0], base[1], workload))
|
||||||
file.setLevel(logging.DEBUG)
|
file.setLevel(logging.DEBUG)
|
||||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)5s - %(message)s')
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s - %(name)s - %(levelname)5s - %(message)s')
|
||||||
file.setFormatter(formatter)
|
file.setFormatter(formatter)
|
||||||
self.logger.addHandler(file)
|
self.logger.addHandler(file)
|
||||||
return None
|
return None
|
||||||
@ -68,11 +83,13 @@ class WorkloadBase:
|
|||||||
if not WorkloadBase.browbeat:
|
if not WorkloadBase.browbeat:
|
||||||
yaml_file.write("No tests were enabled")
|
yaml_file.write("No tests were enabled")
|
||||||
else:
|
else:
|
||||||
yaml_file.write(yaml.dump(WorkloadBase.browbeat, default_flow_style=False))
|
yaml_file.write(
|
||||||
|
yaml.dump(WorkloadBase.browbeat, default_flow_style=False))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def print_summary():
|
def print_summary():
|
||||||
print("Total scenarios executed:{}".format(WorkloadBase.total_scenarios))
|
print("Total scenarios executed:{}".format(
|
||||||
|
WorkloadBase.total_scenarios))
|
||||||
print("Total tests executed:{}".format(WorkloadBase.total_tests))
|
print("Total tests executed:{}".format(WorkloadBase.total_tests))
|
||||||
print("Total tests passed:{}".format(WorkloadBase.success))
|
print("Total tests passed:{}".format(WorkloadBase.success))
|
||||||
print("Total tests failed:{}".format(WorkloadBase.failure))
|
print("Total tests failed:{}".format(WorkloadBase.failure))
|
||||||
|
@ -1,233 +1,372 @@
|
|||||||
name: Browbeat configuration schema
|
name:
|
||||||
type: map
|
Browbeat configuration schema
|
||||||
allowempty: True
|
type:
|
||||||
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
browbeat:
|
browbeat:
|
||||||
required: True
|
required:
|
||||||
type: map
|
True
|
||||||
|
type:
|
||||||
|
map
|
||||||
mapping:
|
mapping:
|
||||||
results:
|
results:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
rerun:
|
rerun:
|
||||||
type: int
|
type:
|
||||||
required: True
|
int
|
||||||
|
required:
|
||||||
|
True
|
||||||
|
|
||||||
ansible:
|
ansible:
|
||||||
required: True
|
required:
|
||||||
type: map
|
True
|
||||||
allowempty: True
|
type:
|
||||||
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
hosts:
|
hosts:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
adjust:
|
adjust:
|
||||||
type: map
|
type:
|
||||||
|
map
|
||||||
mapping:
|
mapping:
|
||||||
keystone_token:
|
keystone_token:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
neutron_l3:
|
neutron_l3:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
nova_db:
|
nova_db:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
workers:
|
workers:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
grafana_snapshot:
|
grafana_snapshot:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
shaker_build:
|
shaker_build:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
|
|
||||||
connmon:
|
connmon:
|
||||||
type: map
|
type:
|
||||||
allowempty: True
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
|
|
||||||
grafana:
|
grafana:
|
||||||
required: True
|
required:
|
||||||
type: map
|
True
|
||||||
allowempty: True
|
type:
|
||||||
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
cloud_name:
|
cloud_name:
|
||||||
type: str
|
type:
|
||||||
|
str
|
||||||
grafana_ip:
|
grafana_ip:
|
||||||
type: str
|
type:
|
||||||
pattern: ^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$
|
str
|
||||||
|
pattern:
|
||||||
|
^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\- ]{0, 61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\- ]{0, 61}[a-zA-Z0-9]))*$
|
||||||
grafana_port:
|
grafana_port:
|
||||||
type: int
|
type:
|
||||||
|
int
|
||||||
dashboards:
|
dashboards:
|
||||||
type: seq
|
type:
|
||||||
|
seq
|
||||||
sequence:
|
sequence:
|
||||||
- type: str
|
- type:
|
||||||
|
str
|
||||||
snapshot:
|
snapshot:
|
||||||
type: map
|
type:
|
||||||
|
map
|
||||||
mapping:
|
mapping:
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
snapshot_compute:
|
snapshot_compute:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
|
|
||||||
perfkit:
|
perfkit:
|
||||||
required: False
|
required:
|
||||||
type: map
|
False
|
||||||
allowempty: True
|
type:
|
||||||
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
sleep_before:
|
sleep_before:
|
||||||
type: number
|
type:
|
||||||
required: True
|
number
|
||||||
|
required:
|
||||||
|
True
|
||||||
sleep_after:
|
sleep_after:
|
||||||
type: number
|
type:
|
||||||
required: True
|
number
|
||||||
|
required:
|
||||||
|
True
|
||||||
venv:
|
venv:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
default:
|
default:
|
||||||
type: map
|
type:
|
||||||
required: True
|
map
|
||||||
|
required:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
image:
|
image:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
machine_type:
|
machine_type:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
os_type:
|
os_type:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
enum: ['rhel', 'debian', 'ubuntu_container', 'windows']
|
required:
|
||||||
|
True
|
||||||
|
enum:
|
||||||
|
['rhel', 'debian', 'ubuntu_container', 'windows']
|
||||||
openstack_image_username:
|
openstack_image_username:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
openstack_floating_ip_pool:
|
openstack_floating_ip_pool:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
openstack_network:
|
openstack_network:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
benchmarks:
|
benchmarks:
|
||||||
type: seq
|
type:
|
||||||
|
seq
|
||||||
sequence:
|
sequence:
|
||||||
- type: map
|
- type:
|
||||||
allowempty: True
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
name:
|
name:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
benchmarks:
|
benchmarks:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
|
|
||||||
shaker:
|
shaker:
|
||||||
required: False
|
required:
|
||||||
allowempty: True
|
False
|
||||||
type: map
|
allowempty:
|
||||||
|
True
|
||||||
|
type:
|
||||||
|
map
|
||||||
mapping:
|
mapping:
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
server:
|
server:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
pattern: ^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$
|
required:
|
||||||
|
True
|
||||||
|
pattern:
|
||||||
|
^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0, 61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\- ]{0, 61}[a-zA-Z0-9]))*$
|
||||||
port:
|
port:
|
||||||
type: int
|
type:
|
||||||
required: True
|
int
|
||||||
|
required:
|
||||||
|
True
|
||||||
flavor:
|
flavor:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
join_timeout:
|
join_timeout:
|
||||||
type: int
|
type:
|
||||||
required: True
|
int
|
||||||
|
required:
|
||||||
|
True
|
||||||
sleep_before:
|
sleep_before:
|
||||||
type: number
|
type:
|
||||||
required: True
|
number
|
||||||
|
required:
|
||||||
|
True
|
||||||
sleep_after:
|
sleep_after:
|
||||||
type: number
|
type:
|
||||||
required: True
|
number
|
||||||
|
required:
|
||||||
|
True
|
||||||
venv:
|
venv:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
shaker_region:
|
shaker_region:
|
||||||
type: str
|
type:
|
||||||
required: true
|
str
|
||||||
|
required:
|
||||||
|
true
|
||||||
scenarios:
|
scenarios:
|
||||||
type: seq
|
type:
|
||||||
|
seq
|
||||||
sequence:
|
sequence:
|
||||||
- type: map
|
- type:
|
||||||
allowempty: True
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
name:
|
name:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
file:
|
file:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
|
|
||||||
rally:
|
rally:
|
||||||
required: False
|
required:
|
||||||
type: map
|
False
|
||||||
allowempty: True
|
type:
|
||||||
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
sleep_before:
|
sleep_before:
|
||||||
type: number
|
type:
|
||||||
required: True
|
number
|
||||||
|
required:
|
||||||
|
True
|
||||||
sleep_after:
|
sleep_after:
|
||||||
type: number
|
type:
|
||||||
required: True
|
number
|
||||||
|
required:
|
||||||
|
True
|
||||||
venv:
|
venv:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
benchmarks:
|
benchmarks:
|
||||||
type: seq
|
type:
|
||||||
required: True
|
seq
|
||||||
|
required:
|
||||||
|
True
|
||||||
sequence:
|
sequence:
|
||||||
- type: map
|
- type:
|
||||||
|
map
|
||||||
mapping:
|
mapping:
|
||||||
name:
|
name:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
enabled:
|
enabled:
|
||||||
required: True
|
required:
|
||||||
type: bool
|
True
|
||||||
|
type:
|
||||||
|
bool
|
||||||
concurrency:
|
concurrency:
|
||||||
type: seq
|
type:
|
||||||
required: True
|
seq
|
||||||
|
required:
|
||||||
|
True
|
||||||
sequence:
|
sequence:
|
||||||
- type: int
|
- type:
|
||||||
|
int
|
||||||
times:
|
times:
|
||||||
type: int
|
type:
|
||||||
required: True
|
int
|
||||||
|
required:
|
||||||
|
True
|
||||||
scenarios:
|
scenarios:
|
||||||
type: seq
|
type:
|
||||||
|
seq
|
||||||
sequence:
|
sequence:
|
||||||
- type: map
|
- type:
|
||||||
allowempty: True
|
map
|
||||||
|
allowempty:
|
||||||
|
True
|
||||||
mapping:
|
mapping:
|
||||||
name:
|
name:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
enabled:
|
enabled:
|
||||||
type: bool
|
type:
|
||||||
required: True
|
bool
|
||||||
|
required:
|
||||||
|
True
|
||||||
file:
|
file:
|
||||||
type: str
|
type:
|
||||||
required: True
|
str
|
||||||
|
required:
|
||||||
|
True
|
||||||
|
@ -1,12 +1,23 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from rally.task import atomic
|
from rally.task import atomic
|
||||||
from rally.task import scenario
|
from rally.task import scenario
|
||||||
from rally.plugins.openstack.scenarios.nova import utils as nova_utils
|
|
||||||
from rally.plugins.openstack.scenarios.neutron import utils as neutron_utils
|
from rally.plugins.openstack.scenarios.neutron import utils as neutron_utils
|
||||||
from rally.plugins.openstack.scenarios.vm import utils as vm_utils
|
from rally.plugins.openstack.scenarios.vm import utils as vm_utils
|
||||||
from rally.task import types
|
from rally.task import types
|
||||||
from rally.task import utils as task_utils
|
|
||||||
from rally.task import validation
|
from rally.task import validation
|
||||||
|
|
||||||
|
|
||||||
class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
||||||
vm_utils.VMScenario,
|
vm_utils.VMScenario,
|
||||||
scenario.Scenario):
|
scenario.Scenario):
|
||||||
@ -19,6 +30,7 @@ class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
|||||||
# Ping
|
# Ping
|
||||||
# Cleanup
|
# Cleanup
|
||||||
#
|
#
|
||||||
|
|
||||||
@types.set(image=types.ImageResourceType,
|
@types.set(image=types.ImageResourceType,
|
||||||
flavor=types.FlavorResourceType)
|
flavor=types.FlavorResourceType)
|
||||||
@validation.image_valid_on_flavor("flavor", "image")
|
@validation.image_valid_on_flavor("flavor", "image")
|
||||||
@ -28,7 +40,7 @@ class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
|||||||
def create_network_nova_boot_ping(self, image, flavor, ext_net, floating=False, router=None,
|
def create_network_nova_boot_ping(self, image, flavor, ext_net, floating=False, router=None,
|
||||||
network_create_args=None, subnet_create_args=None,
|
network_create_args=None, subnet_create_args=None,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
if router == None:
|
if router is None:
|
||||||
router = self._create_router({}, ext_net)
|
router = self._create_router({}, ext_net)
|
||||||
|
|
||||||
network = self._create_network(network_create_args or {})
|
network = self._create_network(network_create_args or {})
|
||||||
@ -37,7 +49,8 @@ class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
|||||||
kwargs["nics"] = [{'net-id': network['network']['id']}]
|
kwargs["nics"] = [{'net-id': network['network']['id']}]
|
||||||
_address = None
|
_address = None
|
||||||
if floating:
|
if floating:
|
||||||
_guest = self._boot_server_with_fip(image, flavor,True,ext_net, **kwargs)
|
_guest = self._boot_server_with_fip(
|
||||||
|
image, flavor, True, ext_net, **kwargs)
|
||||||
_address = _guest[1]['ip']
|
_address = _guest[1]['ip']
|
||||||
else:
|
else:
|
||||||
self._boot_server(image, flavor, **kwargs)
|
self._boot_server(image, flavor, **kwargs)
|
||||||
@ -46,9 +59,6 @@ class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
|||||||
if _address:
|
if _address:
|
||||||
self._wait_for_ping(_address)
|
self._wait_for_ping(_address)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@atomic.action_timer("neutronPlugin.create_router")
|
@atomic.action_timer("neutronPlugin.create_router")
|
||||||
def _create_router(self, router_create_args, external_gw=False):
|
def _create_router(self, router_create_args, external_gw=False):
|
||||||
"""Create neutron router.
|
"""Create neutron router.
|
||||||
@ -80,4 +90,3 @@ class NeutronBootFipPingPlugin(neutron_utils.NeutronScenario,
|
|||||||
|
|
||||||
return self.clients("neutron").create_router(
|
return self.clients("neutron").create_router(
|
||||||
{"router": router_create_args})
|
{"router": router_create_args})
|
||||||
|
|
||||||
|
@ -1,14 +1,26 @@
|
|||||||
from rally.task import atomic
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from rally.task import scenario
|
from rally.task import scenario
|
||||||
from rally.plugins.openstack.scenarios.nova import utils as nova_utils
|
from rally.plugins.openstack.scenarios.nova import utils as nova_utils
|
||||||
from rally.plugins.openstack.scenarios.neutron import utils as neutron_utils
|
from rally.plugins.openstack.scenarios.neutron import utils as neutron_utils
|
||||||
from rally.task import types
|
from rally.task import types
|
||||||
from rally.task import utils as task_utils
|
|
||||||
from rally.task import validation
|
from rally.task import validation
|
||||||
|
|
||||||
|
|
||||||
class NeutronPlugin(neutron_utils.NeutronScenario,
|
class NeutronPlugin(neutron_utils.NeutronScenario,
|
||||||
nova_utils.NovaScenario,
|
nova_utils.NovaScenario,
|
||||||
scenario.Scenario):
|
scenario.Scenario):
|
||||||
|
|
||||||
@types.set(image=types.ImageResourceType,
|
@types.set(image=types.ImageResourceType,
|
||||||
flavor=types.FlavorResourceType)
|
flavor=types.FlavorResourceType)
|
||||||
@validation.image_valid_on_flavor("flavor", "image")
|
@validation.image_valid_on_flavor("flavor", "image")
|
||||||
@ -19,7 +31,7 @@ class NeutronPlugin(neutron_utils.NeutronScenario,
|
|||||||
nets = []
|
nets = []
|
||||||
for net in range(1, num_networks):
|
for net in range(1, num_networks):
|
||||||
network = self._create_network(network_create_args or {})
|
network = self._create_network(network_create_args or {})
|
||||||
subnet = self._create_subnet(network, subnet_create_args or {})
|
self._create_subnet(network, subnet_create_args or {})
|
||||||
nets.append(network)
|
nets.append(network)
|
||||||
|
|
||||||
kwargs["nics"] = []
|
kwargs["nics"] = []
|
||||||
|
@ -1,9 +1,18 @@
|
|||||||
from rally.task import atomic
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from rally.task import scenario
|
from rally.task import scenario
|
||||||
from rally.plugins.openstack.scenarios.nova import utils as nova_utils
|
|
||||||
from rally.plugins.openstack.scenarios.neutron import utils as neutron_utils
|
from rally.plugins.openstack.scenarios.neutron import utils as neutron_utils
|
||||||
from rally.task import types
|
from rally.task import types
|
||||||
from rally.task import utils as task_utils
|
|
||||||
from rally.task import validation
|
from rally.task import validation
|
||||||
|
|
||||||
class NeutronPlugin(neutron_utils.NeutronScenario,
|
class NeutronPlugin(neutron_utils.NeutronScenario,
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
ansible
|
ansible
|
||||||
matplotlib
|
|
||||||
python-dateutil==2.4.2
|
python-dateutil==2.4.2
|
||||||
pykwalify
|
pykwalify
|
||||||
elasticsearch
|
elasticsearch
|
||||||
|
47
setup.cfg
47
setup.cfg
@ -1,3 +1,44 @@
|
|||||||
[pep8]
|
[metadata]
|
||||||
ignore = E226,E302,E41,E111,E231,E203
|
name = browbeat
|
||||||
max-line-length = 100
|
summary = OpenStack Performance Tooling
|
||||||
|
author = OpenStack
|
||||||
|
author-email = openstack-dev@lists.openstack.org
|
||||||
|
home-page = http://www.browbeatproject.org/
|
||||||
|
classifier =
|
||||||
|
Environment :: OpenStack
|
||||||
|
Intended Audience :: Information Technology
|
||||||
|
Intended Audience :: System Administrators
|
||||||
|
License :: OSI Approved :: Apache Software License
|
||||||
|
Operating System :: POSIX :: Linux
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 2
|
||||||
|
Programming Language :: Python :: 2.7
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: Python :: 3.3
|
||||||
|
Programming Language :: Python :: 3.4
|
||||||
|
|
||||||
|
#[files]
|
||||||
|
#packages =
|
||||||
|
# browbeat
|
||||||
|
|
||||||
|
#[build_sphinx]
|
||||||
|
#source-dir = doc/source
|
||||||
|
#build-dir = doc/build
|
||||||
|
#all_files = 1
|
||||||
|
|
||||||
|
#[upload_sphinx]
|
||||||
|
#upload-dir = doc/build/html
|
||||||
|
|
||||||
|
#[compile_catalog]
|
||||||
|
#directory = browbeat/locale
|
||||||
|
#domain = browbeat
|
||||||
|
|
||||||
|
#[update_catalog]
|
||||||
|
#domain = browbeat
|
||||||
|
#output_dir = browbeat/locale
|
||||||
|
#input_file = browbeat/locale/browbeat.pot
|
||||||
|
|
||||||
|
#[extract_messages]
|
||||||
|
#keywords = _ gettext ngettext l_ lazy_gettext
|
||||||
|
#mapping_file = babel.cfg
|
||||||
|
#output_file = browbeat/locale/browbeat.pot
|
||||||
|
17
setup.py
Normal file
17
setup.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import setuptools
|
||||||
|
|
||||||
|
setuptools.setup(
|
||||||
|
setup_requires=['pbr'],
|
||||||
|
pbr=True)
|
14
test-requirements.txt
Normal file
14
test-requirements.txt
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# The order of packages is significant, because pip processes them in the order
|
||||||
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
|
hacking<0.11,>=0.10.0
|
||||||
|
|
||||||
|
coverage>=3.6
|
||||||
|
python-subunit>=0.0.18
|
||||||
|
sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2
|
||||||
|
oslosphinx>=2.5.0 # Apache-2.0
|
||||||
|
oslotest>=1.10.0 # Apache-2.0
|
||||||
|
testrepository>=0.0.18
|
||||||
|
testscenarios>=0.4
|
||||||
|
testtools>=1.4.0
|
62
tox.ini
Normal file
62
tox.ini
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
[tox]
|
||||||
|
minversion = 2.0
|
||||||
|
#envlist = py34-constraints,py27-constraints,pypy-constraints,pep8-constraints
|
||||||
|
envlist = py27-constraints,pypy-constraints,pep8-constraints
|
||||||
|
skipsdist = True
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
usedevelop = True
|
||||||
|
install_command =
|
||||||
|
constraints: {[testenv:common-constraints]install_command}
|
||||||
|
pip install -U {opts} {packages}
|
||||||
|
setenv =
|
||||||
|
VIRTUAL_ENV={envdir}
|
||||||
|
deps = -r{toxinidir}/test-requirements.txt
|
||||||
|
commands = python setup.py test
|
||||||
|
#commands = python setup.py test --slowest --testr-args='{posargs}'
|
||||||
|
|
||||||
|
[testenv:common-constraints]
|
||||||
|
install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages}
|
||||||
|
|
||||||
|
[testenv:pep8]
|
||||||
|
commands = flake8 {posargs} --exclude=ansible
|
||||||
|
|
||||||
|
[testenv:pep8-constraints]
|
||||||
|
install_command = {[testenv:common-constraints]install_command}
|
||||||
|
commands = flake8 {posargs}
|
||||||
|
|
||||||
|
[testenv:venv]
|
||||||
|
commands = {posargs}
|
||||||
|
|
||||||
|
[testenv:venv-constraints]
|
||||||
|
install_command = {[testenv:common-constraints]install_command}
|
||||||
|
commands = {posargs}
|
||||||
|
|
||||||
|
[testenv:cover]
|
||||||
|
commands = python setup.py test --coverage --testr-args='{posargs}'
|
||||||
|
|
||||||
|
[testenv:cover-constraints]
|
||||||
|
install_command = {[testenv:common-constraints]install_command}
|
||||||
|
commands = python setup.py test --coverage --testr-args='{posargs}'
|
||||||
|
|
||||||
|
[testenv:docs]
|
||||||
|
commands = python setup.py build_sphinx
|
||||||
|
|
||||||
|
[testenv:docs-constraints]
|
||||||
|
install_command = {[testenv:common-constraints]install_command}
|
||||||
|
commands = python setup.py build_sphinx
|
||||||
|
|
||||||
|
[testenv:debug]
|
||||||
|
commands = oslo_debug_helper {posargs}
|
||||||
|
|
||||||
|
[testenv:debug-constraints]
|
||||||
|
install_command = {[testenv:common-constraints]install_command}
|
||||||
|
commands = oslo_debug_helper {posargs}
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
# E123, E125 skipped as they are invalid PEP-8.
|
||||||
|
show-source = True
|
||||||
|
ignore = E123,E125,E226,E302,E41,E111,E231,E203,H233,H306,H238,H236,H404,H405
|
||||||
|
max-line-length = 100
|
||||||
|
builtins = _
|
||||||
|
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,ansible/*
|
Loading…
x
Reference in New Issue
Block a user