Removed test tools from Refstack

Remove depricated code (part 3)

This patch removes code which runs tempest tests  and parse its
results. Now, this functionality moves to new refstack-client.
Refstack-client hosted in separate repository:
https://github.com/stackforge/refstack-client

https://storyboard.openstack.org/#!/story/110
Spec: specs/approved/identify-code-to-deprecate.rst

Change-Id: I93f625d6efdd4e763304b242e1e34e1b5187e744
This commit is contained in:
sslypushenko 2014-08-21 22:52:48 +03:00
parent 8e0b50389a
commit 2742e8c861
13 changed files with 0 additions and 1282 deletions

View File

@ -1,27 +0,0 @@
Using Execute Test
======================================
Used to run Tempest from Refstack: `refstack/tools/execute_test.py`
Command Line
--------------------------------------
* tempest-home: path to tempest, e.g. /tempest
* callback: url to post results formatted as ${api_addr} ${test_id}"
Environment Variables
--------------------------------------
Instead of a configuration file, you can also start Execute_Test using environment variables.
Required:
* OS_AUTH_URL : Keystone URL
* OS_REGION_NAME : Region
* OS_USERNAME : Username
* OS_PASSWORD : Password (passed in clear, do not save into file!)
* OS_TENANT_NAME : Tenant name or ID
Optional:
* test_id : System Generated, may be overridden
* api_addr
* TEMPEST_IMAGE_REF: name of image used for testing. Defaults to "cirus"

View File

@ -1,120 +0,0 @@
# Copyright 2014 Piston Cloud Computing, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gear
from gear import GearmanError, TimeoutError
import json
import logging
import time
from uuid import uuid4
class RefstackGearmanClient(gear.Client):
def __init__(self, refstack_gearman):
""" init wrapper sets local refstack_gearman
object with passed in one"""
super(RefstackGearmanClient, self).__init__()
self.__refstack_gearman = refstack_gearman
class Gearman(object):
log = logging.getLogger("refstack.Gearman")
def __init__(self, config):
""" sets up connection and client object"""
self.config = config
self.meta_jobs = {} # A list of meta-jobs like stop or describe
server = config.get('gearman', 'server')
if config.has_option('gearman', 'port'):
port = config.get('gearman', 'port')
else:
port = 4730
self.gearman = RefstackGearmanClient(self)
self.gearman.addServer(server, port)
def add_job(self, name, params):
""" adds job to the gearman queue"""
self.log.info("starting test run")
uuid = str(uuid4().hex)
gearman_job = gear.Job(name, json.dumps(params),
unique=uuid)
if not self.is_job_registered(gearman_job.name):
self.log.error("Job %s is not registered with Gearman" %
gearman_job)
self.on_job_completed(gearman_job, 'NOT_REGISTERED')
#return build
try:
self.gearman.submitJob(gearman_job)
except GearmanError:
self.log.exception("Unable to submit job to Gearman")
self.on_build_completed(gearman_job, 'EXCEPTION')
#return build
if not gearman_job.handle:
self.log.error("No job handle was received for %s after 30 seconds"
" marking as lost." %
gearman_job)
self.on_build_completed(gearman_job, 'NO_HANDLE')
self.log.debug("Received handle %s for job" % gearman_job.handle)
def on_job_completed(self, job, result=None):
"""called when test is completed"""
if job.unique in self.meta_jobs:
del self.meta_jobs[job.unique]
return result
def is_job_registered(self, name=None):
""" checks to see if job registered with gearman or not"""
if not name:
return False
if self.function_cache_time:
for connection in self.gearman.active_connections:
if connection.connect_time > self.function_cache_time:
self.function_cache = set()
self.function_cache_time = 0
break
if name in self.function_cache:
self.log.debug("Function %s is registered" % name)
return True
if ((time.time() - self.function_cache_time) <
self.negative_function_cache_ttl):
self.log.debug("Function %s is not registered "
"(negative ttl in effect)" % name)
return False
self.function_cache_time = time.time()
for connection in self.gearman.active_connections:
try:
req = gear.StatusAdminRequest()
connection.sendAdminRequest(req)
except TimeoutError:
self.log.exception("Exception while checking functions")
continue
for line in req.response.split('\n'):
parts = [x.strip() for x in line.split()]
if not parts or parts[0] == '.':
continue
self.function_cache.add(parts[0])
if name in self.function_cache:
self.log.debug("Function %s is registered" % name)
return True
self.log.debug("Function %s is not registered" % name)
return False

View File

@ -1,76 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from jinja2 import FileSystemLoader, Environment
import os
from refstack.utils import get_current_time
class DockerBuildFile(object):
'''*
Build a docker buildfile with customized parameters from a pre-defined
docker build file template.
'''
test_id = None
api_server_address = None
tempest_code_url = None
confJSON = None
def build_docker_buildfile(self, output_file_with_path):
'''*
Build a docker build file based on a pre-defined template.
This method assumes that the caller has already initialized all the
needed parameters for customization.
'''
docker_template_dir = os.path.dirname(os.path.abspath(__file__))
template_file_with_path = os.path.join(docker_template_dir,
"docker_buildfile.template")
values = {"THE_TIME_STAMP": get_current_time(),
"THE_TEST_ID": self.test_id,
"THE_API_SERVER_ADDRESS": self.api_server_address,
"THE_TEMPEST_CODE_URL": self.tempest_code_url,
"THE_CONF_JSON": self.confJSON
}
template_filling(template_file_with_path, output_file_with_path,
values)
def template_filling(template_file_with_path,
output_file_with_path, value_dict):
'''Filling values in a template file.'''
outputText = ""
if os.path.isfile(template_file_with_path):
input_dir = os.path.dirname(os.path.abspath(template_file_with_path))
file_name = os.path.basename(template_file_with_path)
j2_env = Environment(loader=FileSystemLoader(input_dir),
trim_blocks=True)
template = j2_env.get_template(file_name)
outputText = template.render(value_dict)
output_dir = os.path.dirname(os.path.abspath(output_file_with_path))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with open(output_file_with_path, "wt") as fout:
fout.write(outputText)

View File

@ -1,66 +0,0 @@
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This template file is designed to be used with the RefStack app server.
# The RefStack app server will generate different docker build file by
# replacing THE_TEMPEST_CODE_URL with the actual URL corresponding to the
# Tempest version specified by the user for the target cloud.
FROM ubuntu
RUN apt-get update
# Download tools
RUN apt-get install -y git python-pip wget unzip
# Download dependencies
RUN apt-get install -y libxml2-dev libxslt-dev lib32z1-dev python2.7-dev libssl-dev
RUN apt-get install -y python-dev libxslt1-dev libsasl2-dev libsqlite3-dev libldap2-dev libffi-dev
RUN pip install ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.9.tar.gz
# Upgrade pip before installing Tempest
RUN pip install --upgrade pip>=1.4
# Get Tempest code
#
# THE_TEMPEST_CODE_URL should be replaced with an appropriate address.
# For example:
# https://github.com/openstack/tempest/archive/stable/havana.zip for Havana.
RUN wget {{ THE_TEMPEST_CODE_URL }} -O tempest.zip
RUN unzip -d /tmp/tempest tempest.zip
RUN mv /tmp/tempest/* /tempest
# Install Tempest requirements
RUN /usr/local/bin/pip install -r /tempest/requirements.txt
RUN /usr/local/bin/pip install -r /tempest/test-requirements.txt
# Download the test execution script from the RefStack server
# and execute the script
#
# The script will generate the Tempest config file, run Tempest tests
# and upload the test results back to the RefStack app server.
#
# This CMD command uses values from the following environment variables:
# APP_SERVER_ADDRESS - The IP address & port that the RefStack app server
# is listening at.
# TEST_ID - The unique ID for this test run assigned by the
# RefStack app server.
# CONF_JSON - JSON string that contains portion of the Tempest
# config. For example, the passwords of the users.
CMD wget http://${APP_SERVER_ADDRESS}/get-script -O execute_test.py && python execute_test.py --url ${APP_SERVER_ADDRESS} --test-id ${TEST_ID} --conf-json ${CONF_JSON} --tempest-dir /tempest -v

View File

@ -1,45 +0,0 @@
Execute Test
============
Execute test is a command line utility that allows you to execute tempest runs with generated configs. When finished running tempest it sends the raw subunit data back to an api.
**Usage**
First make sure you have some stuff installed
`apt-get update`
`apt-get install -y git python-pip`
`apt-get install -y libxml2-dev libxslt-dev lib32z1-dev python2.7-dev libssl-dev libxml2-python`
`apt-get install -y python-dev libxslt1-dev libsasl2-dev libsqlite3-dev libldap2-dev libffi-dev`
`pip install --upgrade pip>=1.4`
`pip install virtualenv`
Then you'll need to setup the tempest env.. from the refstack dir.
`cd refstack/tools/execute_test/`
the following command installs stable havana tempest in a virtual env named 'test_runner'. putting tempest in `./test_runner/src/tempest`
`./setup_env`
From here you have two options..
a. if you are triggering this test from the web gui you can use the `/get-miniconf` method ..
i.e. `./execute_test --url refstack.org --test-id 235 --tempest-dir ./test_runner/src/tempest --conf-json {section:{option:value}}`
or
b. my recomendation which is to source an openstack rc file you download from the cloud you want to test.
i.e.
`source openstackrc.sh`
`./execute_test --env --url refstack.org --test-id 235 --tempest-dir ./test_runner/src/tempest`

View File

@ -1,476 +0,0 @@
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import ConfigParser
import fnmatch
import json
import logging
import os
import requests
import subprocess
import time
import urllib2
class Test:
app_server_address = None
test_id = None
extra_conf_dict = dict()
mini_conf_dict = dict()
testcases = {"testcases": ["tempest"]}
def __init__(self, args):
'''Prepare a tempest test against a cloud.'''
log_format = "%(asctime)s %(name)s %(levelname)s %(message)s"
self.logger = logging.getLogger("execute_test")
console_log_handle = logging.StreamHandler()
console_log_handle.setFormatter(logging.Formatter(log_format))
self.logger.addHandler(console_log_handle)
if os.environ.get("DEBUG"):
self.logger.setLevel(logging.DEBUG)
elif args.verbose:
self.logger.setLevel(logging.INFO)
else:
self.logger.setLevel(logging.CRITICAL)
# assign local vars to match args
# things not passed in right will be set as None
self.app_server_address = args.app_server_address
self.test_id = args.test_id
self.tempest_dir = args.tempest_dir
self.extra_conf_dict = args.extra_conf_dict
# if --env is used then import from env vars and don't call home
if args.env:
self.mini_conf_dict = self.import_config_from_env()
else:
self.mini_conf_dict = self.get_mini_config()
if args.testcases:
self.testcases = {"testcases": args.testcases}
self.sample_conf_file = os.path.join(self.tempest_dir, 'etc',
'tempest.conf.sample')
self.tempest_conf_file = os.path.join(self.tempest_dir,
'tempest.config')
self.result_dir = os.path.join(self.tempest_dir, '.testrepository')
self.result = os.path.join(self.result_dir, 'result')
self.tempest_script = os.path.join(self.tempest_dir, 'run_tests.sh')
self.sample_conf_parser = ConfigParser.SafeConfigParser()
self.sample_conf_parser.read(self.sample_conf_file)
def import_config_from_env(self):
"""create config from environment variables if set"""
if not os.environ.get("OS_AUTH_URL"):
# TODO: add better argument parsing for this input..
# failure needs to give a more detailed response.
print """Missing env variables did you source your localrc?"""
else:
self.logger.info('Using Config ENV variables for %s@%s'
% (os.environ.get("OS_USERNAME"),
os.environ.get("OS_AUTH_URL")))
env_config = {"identity":
{"uri": os.environ.get("OS_AUTH_URL"),
"username": os.environ.get("OS_USERNAME"),
"password": os.environ.get("OS_PASSWORD"),
"tenant_name": os.environ.get("OS_TENANT_NAME")},
"compute": {"image_ref":
os.environ.get("TEMPEST_IMAGE_REF",
"cirros")}}
self.logger.debug("ENV config: %s" % (env_config))
return env_config
def generate_config(self):
'''Merge passed in config with tempest.conf.sample
and write to $tempest/tempest.config'''
self.logger.info('Generating tempest.config')
# merge in config from env or api
self.merge_to_sample_conf(self.mini_conf_dict)
# merge in extra config
self.merge_to_sample_conf(self.extra_conf_dict)
# discovered config will not overwrite the value in the
# mini_conf_dict and extra_conf_dict
discovered_conf_dict = self._build_discovered_dict_conf()
self.merge_to_sample_conf(discovered_conf_dict)
# write the config file
self.sample_conf_parser.write(open(self.tempest_conf_file, 'w'))
def merge_to_sample_conf(self, dic):
'''Merge values in a dictionary to tempest.conf.sample.'''
for section, data in dic.items():
for key, value in data.items():
if self.sample_conf_parser.has_option(section, key):
self.sample_conf_parser.set(section, key, value)
def get_mini_config(self):
'''Return a mini config in from the remote server.'''
if self.app_server_address and self.test_id:
url = "http://%s/get-miniconf?test_id=%s" % \
(self.app_server_address, self.test_id)
try:
req = urllib2.urlopen(url=url, timeout=10)
self.logger.info('Using App Server Config from %s' % (url))
return json.loads(req.readlines()[0])
except:
self.logger.critical('Failed to get mini config from %s'
% url)
raise
else:
return dict()
def get_test_cases(self):
'''Return list of tempest testcases in JSON string.
For certification, the list will contain only one test case.
For vendor testing, the list may contain any number of test cases.
'''
if self.app_server_address and self.test_id:
self.logger.info("Get test cases")
url = "http://%s/get-testcases?test_id=%s" % \
(self.app_server_address, self.test_id)
try:
req = urllib2.urlopen(url=url, timeout=10)
return req.readlines()[0]
except:
self.logger.crtical('Failed to get test cases from %s' % url)
raise
else:
return json.dumps(self.testcases)
def run_test_cases(self):
'''Executes each test case in the testcase list.'''
# Make a backup in case previous data exists in the the directory
if os.path.exists(self.result_dir):
date = time.strftime("%m%d%H%M%S")
backup_path = os.path.join(os.path.dirname(self.result_dir),
"%s_backup_%s" %
(os.path.basename(self.result_dir),
date))
self.logger.info("Rename existing %s to %s" %
(self.result_dir, backup_path))
os.rename(self.result_dir, backup_path)
# Execute each testcase.
testcases = json.loads(self.get_test_cases())['testcases']
self.logger.info('Running test cases')
for case in testcases:
cmd = ('%s -C %s -N -- %s' %
(self.tempest_script, self.tempest_conf_file, case))
# When a testcase fails
# continue execute all remaining cases so any partial result can be
# reserved and posted later.
try:
subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError as e:
self.logger.error('%s %s testcases failed to complete' %
(e, case))
def post_test_result(self):
'''Post the combined results back to the server.'''
if self.app_server_address and self.test_id:
self.logger.info('Send back the result')
url = "http://%s/post-result?test_id=%s" % \
(self.app_server_address, self.test_id)
files = {'file': open(self.result, 'rb')}
try:
requests.post(url, files=files)
self.logger.info('Result posted successfully')
except:
self.logger.critical('failed to post result to %s' % url)
raise
else:
self.logger.info('Testr result can be found at %s' % (self.result))
def combine_test_result(self):
'''Generate a combined testr result.'''
testr_results = [item for item in os.listdir(self.result_dir)
if fnmatch.fnmatch(item, '[0-9]*')]
testr_results.sort(key=int)
with open(self.result, 'w') as outfile:
for fp in testr_results:
with open(os.path.join(self.result_dir, fp), 'r') as infile:
outfile.write(infile.read())
self.logger.info('Combined testr result')
def run(self):
'''Execute tempest test against the cloud.'''
self.generate_config()
self.run_test_cases()
self.combine_test_result()
self.post_test_result()
# These methods are for identity discovery
def _subtract_dictionaries(self, discovered_conf_dict, conf_dict):
'''Remove the configs in conf_dict from discovered_conf_dict.'''
for section, data in discovered_conf_dict.items():
for key in data.keys():
if section in conf_dict and key in conf_dict[section]:
self.logger.info("Will not discover [%s] %s because caller"
" chose to overwrite" % (section, key))
del discovered_conf_dict[section][key]
def _build_discovered_dict_conf(self):
'''Return discovered tempest configs in a json string.'''
self.logger.info("Starting tempest config discovery")
# This is the default discovery items
# in which the tempest.sample.conf will be discovered.
discovery_conf_dict =\
{"identity": {"region": self.get_identity_region,
"admin_tenant_name": self.get_admin_tenant_name,
"tenant_name": self.get_tenant_name,
"alt_tenant_name": self.get_alt_tenant_name}
}
# Remove the configs from the default discovery
# for those that caller choose to overwrite.
self._subtract_dictionaries(discovery_conf_dict, self.mini_conf_dict)
self._subtract_dictionaries(discovery_conf_dict, self.extra_conf_dict)
# populate configs
for section, data in discovery_conf_dict.items():
for key in data.keys():
discovery_conf_dict[section][key] =\
discovery_conf_dict[section][key]()
self.logger.info("Discovered configs: %s" % discovery_conf_dict)
return discovery_conf_dict
def get_keystone_token(self, url, user, password, tenant=""):
''' Returns the json response from keystone tokens API call.'''
parameter = {"auth": {"tenantName": tenant,
"passwordCredentials":
{"username": user,
"password": password}
}
}
header = {"Content-type": "application/json"}
try:
req = requests.post(url, data=json.dumps(parameter),
headers=header)
if req.status_code is not requests.codes.ok:
req.raise_for_status()
except:
self.logger.critical("Failed to get a Keystone token. "
"Please verify your keystone endpoint url,"
"username or password.\n"
"url: \"%s\"\nheader: %s\nparameter: %s\n"
"response %s" %
(url, header, parameter, req.content))
raise
return req.content
def get_tenants(self, token_id):
'''Return a list of tenants of a token_id.'''
keystone_url = self.sample_conf_parser.get("identity", "uri")
headers = {"Content-type": "application/json",
"X-Auth-Token": token_id}
try:
req = requests.get(keystone_url + "/tenants", headers=headers)
except:
self.logger.critical("failed to get tenant for token id %s"
"from %s" % (token_id, keystone_url))
raise
return json.loads(req.content)["tenants"]
def get_alt_tenant_name(self):
'''Return the alt_tenant_name
'''
keystone_url = self.sample_conf_parser.get("identity", "uri")
alt_user = self.sample_conf_parser.get("identity", "alt_username")
alt_pw = self.sample_conf_parser.get("identity", "alt_password")
token_id = json.loads(self.get_keystone_token(url=keystone_url +
"/tokens",
user=alt_user,
password=alt_pw)
)["access"]["token"]["id"]
'''TODO: Assuming the user only belongs to one tenant'''
try:
alt_tenant = self.get_tenants(token_id)[0]["name"]
except:
self.logger.critical("failed to get the tenant for alt_username %s"
"from %s" % (alt_user, keystone_url))
raise
return alt_tenant
def get_tenant_name(self):
'''Return the tenant_name.
'''
keystone_url = self.sample_conf_parser.get("identity", "uri")
user = self.sample_conf_parser.get("identity", "username")
pw = self.sample_conf_parser.get("identity", "password")
token_id = json.loads(self.get_keystone_token(url=keystone_url +
"/tokens",
user=user,
password=pw)
)["access"]["token"]["id"]
'''TODO: Assuming the user only belongs to one tenant'''
try:
tenant = self.get_tenants(token_id)[0]["name"]
except:
self.logger.critical("failed to get the tenant for username %s"
"from %s" % (user, keystone_url))
raise
return tenant
def get_admin_tenant_name(self):
'''
Return the admin_tenant_name.
TODO: save admin tenant as an attribute so get_identity_region()
method can directly use it.
'''
keystone_url = self.sample_conf_parser.get("identity", "uri")
admin_user = self.sample_conf_parser.get("identity", "admin_username")
admin_pw = self.sample_conf_parser.get("identity", "admin_password")
token_id = json.loads(self.get_keystone_token(url=keystone_url +
"/tokens",
user=admin_user,
password=admin_pw)
)["access"]["token"]["id"]
'''TODO: Authenticate as "admin" (public URL) against each tenant found
in tanantList until a tenant is found on which "admin" has
"admin"role. For now, assuming admin user ONLY belongs to admin tenant
and the admin has admin role as defined in
tempest.sample.conf.identiy.admin_role
'''
try:
tenant = self.get_tenants(token_id)[0]["name"]
except:
self.logger.critical("failed to get the tenant for"
"admin_username %s from %s" %
(admin_user, keystone_url))
raise
return tenant
def get_identity_region(self):
'''Return the identity region.
'''
keystone_url = self.sample_conf_parser.get("identity", "uri")
admin_user = self.sample_conf_parser.get("identity", "admin_username")
admin_pw = self.sample_conf_parser.get("identity", "admin_password")
admin_tenant = self.get_admin_tenant_name()
'''
TODO: Preserve the admin token id as an attribute because
the admin_token will be used to for image discovery
'''
admin_token = json.loads(self.get_keystone_token
(url=keystone_url + "/tokens",
user=admin_user,
password=admin_pw,
tenant=admin_tenant))
'''TODO: assume there is only one identity endpoint'''
identity_region =\
[service["endpoints"][0]["region"]
for service in admin_token["access"]["serviceCatalog"]
if service["type"] == "identity"][0]
return identity_region
''' TODO: The remaining methods are for image discovery. '''
def create_image(self):
'''Download and create cirros image.
Return the image reference id
'''
pass
def find_smallest_flavor(self):
'''Find the smallest flavor by sorting by memory size.
'''
pass
def delete_image(self):
'''Delete a image.
'''
pass
if __name__ == '__main__':
''' Generate tempest.conf from a tempest.conf.sample and then run test.'''
parser = argparse.ArgumentParser(description='Starts a tempest test',
formatter_class=argparse.
ArgumentDefaultsHelpFormatter)
parser.add_argument('-s', '--silent',
action='store_true',
help='rigged for silent running')
parser.add_argument("-v", "--verbose",
action="count",
help="show verbose output")
parser.add_argument('-e', '--env',
action='store_true',
required=False,
dest='env',
help='uses env variables.. and does not\
pull mini config from server')
parser.add_argument("--url",
action='store',
required=True,
type=str,
dest='app_server_address',
help="refstack API url \
retrieve configurations. i.e.:\
--url 127.0.0.1:8000")
parser.add_argument("--test-id",
action='store',
required=False,
dest='test_id',
type=int,
help="refstack test ID i.e.:\
--test-id 1234 ")
parser.add_argument("--tempest-dir",
action='store',
required=True,
dest='tempest_dir',
help="tempest directory path")
parser.add_argument("--testcases",
action='store',
required=False,
nargs='+',
help="tempest test cases. Use space to\
separate each testcase")
parser.add_argument("--conf-json",
action='store',
required=False,
type=json.loads,
dest='extra_conf_dict',
help="tempest configurations in JSON string")
args = parser.parse_args()
test = Test(args)
test.run()

View File

@ -1 +0,0 @@
-e git+https://github.com/openstack/tempest.git@stable/havana#egg=tempest

View File

@ -1,27 +0,0 @@
[metadata]
name = execute_test
version = 0.1
summary = Tempest test wrapper for refstack
description-file =
README.rst
author = OpenStack
author-email = fits@lists.openstack.org
home-page = http://www.openstack.org/
classifier =
Environment :: OpenStack
Intended Audience :: Developers
Intended Audience :: Information Technology
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3.3
[files]
packages =
refstack
[global]
setup-hooks =
pbr.hooks.setup_hook

View File

@ -1,21 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2014 Piston Cloud Computing, inc. all rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)

View File

@ -1,12 +0,0 @@
# !/bin/bash
virtualenv test_runner
source test_runner/bin/activate
pip install -r havana_requirements.txt
#cd /test_runner/src/tempest

View File

@ -1,201 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import subunit
import testtools
import unittest
class TempestSubunitTestResultBase(testtools.TestResult):
"""Class to process subunit stream.
This class is derived from testtools.TestResult.
This class overrides all the inherited addXXX methods
to call the new _process_result() method to process the data.
This class is designed to be a base class.
The _process_result() method should be overriden by the
derived class to customize the processing.
"""
result_type = ["SUCCESS", "FAILURE", "ERROR", "SKIP"]
def __init__(self, stream, descriptions, verbosity):
"""Initialize with super class signature."""
super(TempestSubunitTestResultBase, self).__init__()
def _process_result(self, result_type, testcase, *arg):
"""Process the data.
The value of parameter "result_type" can be SUCCESS, FAILURE,
ERROR, or SKIP.
It can be used to determine from which add method this is called.
"""
pass
def addSuccess(self, testcase):
"""Overwrite super class method for additional data processing."""
super(TempestSubunitTestResultBase, self).addSuccess(testcase)
self._process_result(self.result_type[0], testcase)
def addFailure(self, testcase, err):
"""Overwrite super class method for additional data processing."""
if testcase.id() == 'process-returncode':
return
super(TempestSubunitTestResultBase, self).addFailure(testcase, err)
self._process_result(self.result_type[1], testcase, err)
def addError(self, testcase, err):
"""Overwrite super class method for additional data processing."""
super(TempestSubunitTestResultBase, self).addFailure(testcase, err)
self._process_result(self.result_type[2], testcase, err)
def addSkip(self, testcase, reason=None, details=None):
"""Overwrite super class method for additional data processing."""
super(TempestSubunitTestResultBase,
self).addSkip(testcase, reason, details)
self._process_result(self.result_type[3], testcase, reason, details)
def startTest(self, testcase):
"""Overwrite super class method for additional data processing."""
self.start_time = self._now()
super(TempestSubunitTestResultBase, self).startTest(testcase)
class TempestSubunitTestResult(TempestSubunitTestResultBase):
"""Process subunit stream and save data into two dictionary objects.
1) The result dictionary object:
results={testcase_id: [status, elapsed],
testcase_id: [status, elapsed],
...}
testcase_id: the id fetched from subunit data.
For Tempest test: testcase_id = test_class_name + test_name
status: status of the testcase (PASS, FAIL, FAIL_SETUP, ERROR, SKIP)
elapsed: testcase elapsed time
2) The summary dictionary object:
summary={"PASS": count, "FAIL": count, "FAIL_SETUP: count",
"ERROR": count, "SKIP": count, "Total": count}
count: the number of occurrence
"""
def __init__(self, stream, descriptions, verbosity):
"""Initialize with supper class signature."""
super(TempestSubunitTestResult, self).__init__(stream, descriptions,
verbosity)
self.start_time = None
self.status = ["PASS", "FAIL", "FAIL_SETUP", "ERROR", "SKIP"]
self.results = {}
self.summary = {self.status[0]: 0, self.status[1]: 0,
self.status[2]: 0, self.status[3]: 0,
self.status[4]: 0, "Total": 0}
def _process_result(self, result_type, testcase, *arg):
"""Process and append data to dictionary objects."""
testcase_id = testcase.id()
elapsed = (self._now() - self.start_time).total_seconds()
status = result_type
# Convert "SUCCESS" to "PASS"
# Separate "FAILURE" into "FAIL" and "FAIL_SETUP"
if status == self.result_type[0]:
status = self.status[0]
elif status == self.result_type[1]:
if "setUpClass" in testcase_id:
status = self.status[2]
testcase_id = '%s.setUpClass' % \
(re.search('\((.*)\)', testcase_id).group(1))
else:
status = self.status[1]
self.results.setdefault(testcase_id, [])
self.results[testcase_id] = [status, elapsed]
self.summary[status] += 1
self.summary["Total"] += 1
class TempestSubunitTestResultTuples(TempestSubunitTestResult):
"""Process subunit stream and save data into two dictionary objects.
1) The result dictionary object:
results={test_classname: [(test_name, status, elapsed),
(test_name, status, elapsed),...],
test_classname: [(test_name, status, elapsed),
(test_name, status, elapsed),...],
...}
status: status of the testcase (PASS, FAIL, FAIL_SETUP, ERROR, SKIP)
elapsed: testcase elapsed time
2) The summary dictionary object:
summary={"PASS": count, "FAIL": count, "FAIL_SETUP: count",
"ERROR": count, "SKIP": count, "Total": count}
count: the number of occurrence
"""
def _process_result(self, result_type, testcase, *arg):
"""Process and append data to dictionary objects."""
testcase_id = testcase.id()
elapsed = round((self._now() - self.start_time).total_seconds(), 2)
status = result_type
# Convert "SUCCESS" to "PASS"
# Separate "FAILURE" into "FAIL" and "FAIL_SETUP"
if status == self.result_type[0]:
status = self.status[0]
elif status == self.result_type[1]:
if "setUpClass" in testcase_id:
status = self.status[2]
testcase_id = '%s.setUpClass' % \
(re.search('\((.*)\)', testcase_id).group(1))
else:
status = self.status[1]
classname, testname = testcase_id.rsplit('.', 1)
self.results.setdefault(classname, [])
self.results[classname].append((testname, status, elapsed))
self.summary[status] += 1
self.summary["Total"] += 1
class ProcessSubunitData():
"""A class to replay subunit data from a stream."""
result = None
def __init__(self, in_stream, test_result_class_name=
TempestSubunitTestResult):
"""Read and process subunit data from a stream.
Save processed data into a class named TempestSubunitTestResult
which is a class derived from unittest.TestResults.
"""
test = subunit.ProtocolTestCase(in_stream, passthrough=None)
runner = unittest.TextTestRunner(verbosity=2, resultclass=
test_result_class_name)
#Run (replay) the test from subunit stream.
#runner,run will return an object of type "test_result_class_name"
self.result = runner.run(test)
def get_result(self):
"""Return an object of type test_result_class_name."""
return self.result

View File

@ -1,210 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
from docker_buildfile import DockerBuildFile
import json
import os
from refstack.models import Test
from refstack.refstack_config import RefStackConfig
from tempest_subunit_test_result import ProcessSubunitData
from tempest_subunit_test_result import TempestSubunitTestResultTuples
config_data = RefStackConfig()
# Common tempest conf values to be used for all tests
# - Image user and password can be set to any value for testing purpose
# - We do not test OpenStack CLI
# Vendors running their own Refstack can override these using config file.
common_tempest_conf = {
"compute":
{
"image_ssh_user": "root",
"image_ssh_password": "password",
"image_alt_ssh_user": "root",
"image_alt_ssh_password": "password"
},
"cli":
{
"enabled": "False"
}
}
class TempestTester(object):
'''Utility class to handle tempest test.'''
test_id = None
test_obj = None
def __init__(self, test_id):
'''Extract the corresponding test object from the db.'''
if test_id:
self.test_obj = Test.query.filter_by(id=test_id).first()
if self.test_obj:
self.test_id = test_id
return
raise ValueError('Invalid test id %s' % (test_id))
def generate_miniconf(self):
'''Return a JSON object representing the mini tempest conf.'''
# Get custom tempest config from config file
custom_tempest_conf = config_data.get_tempest_config()
# Construct cloud specific tempest config from db
if self.test_obj.cloud:
cloud_tempest_conf = {
"identity":
{
"uri": self.test_obj.cloud.endpoint,
"uri_v3": self.test_obj.cloud.endpoint_v3,
"username": self.test_obj.cloud.test_user,
"alt_username": self.test_obj.cloud.test_user,
"admin_username": self.test_obj.cloud.admin_user
}
}
else:
cloud_tempest_conf = dict()
# Merge all the config data together
# - Start with common config
# - Insert/Overwrite with values from custom config
# - Insert/Overwrite with values from cloud DB
tempest_conf = common_tempest_conf
self._merge_config(tempest_conf, custom_tempest_conf)
self._merge_config(tempest_conf, cloud_tempest_conf)
return json.dumps(tempest_conf)
def _merge_config(self, dic1, dic2):
'''Insert data from dictionary dic2 into dictionary dic1.
dic1 and dic2 are in the format of section, key, value.
'''
if not all([dic1, dic2]):
return
for section, data in dic2.items():
if section in dic1:
dic1[section].update(data)
else:
dic1.update({section: data})
def generate_testcases(self):
'''Return a JSON array of the tempest testcases to be executed.'''
# Set to full tempest test unless it is specified in the config file
testcases = config_data.get_tempest_testcases()
if not testcases:
testcases = {"testcases": ["tempest"]}
return json.dumps(testcases)
def get_result(self):
'''Return the test result objects.'''
if not self.test_obj.finished:
return None
try:
test_result = ProcessSubunitData(cStringIO.StringIO(
self.test_obj.subunit),
TempestSubunitTestResultTuples).get_result()
return {"summary": test_result.summary,
"data": test_result.results}
except Exception:
return None
def process_resultfile(self, filename):
'''Process the tempest result file.'''
''' TODO: store the file in test db obj '''
''' ForNow: write the file to console output '''
with open(filename, 'r') as f:
print f.read()
f.close()
def execute_test(self, extra_conf_json=None):
'''Execute the tempest test with the provided extra_conf_json.'''
if not extra_conf_json:
extra_conf_json = '{}'
options = {'DOCKER': self._execute_test_docker,
'LOCAL': self._execute_test_local,
'GEARMAN': self._execute_test_gearman}
''' TODO: Initial test status in DB '''
if config_data.get_test_mode():
test_mode = config_data.get_test_mode().upper()
else:
# Default to use docker if not specified in the config file
test_mode = 'DOCKER'
try:
options[test_mode](extra_conf_json)
except KeyError:
print 'Error: Invalid test mode in config file'
''' TODO: Update test status in DB'''
def _execute_test_docker(self, extra_conf_json):
'''Execute the tempest test in a docker container.'''
# Generate the docker file
docker_file = os.path.join(config_data.get_working_dir(),
'test_%s.docker_file' % self.test_id)
docker_builder = DockerBuildFile()
''' TODO: Determine tempest URL based on the cloud version '''
''' ForNow: Use the Tempest URL in the config file '''
docker_builder.tempest_code_url = config_data.get_tempest_url()
docker_builder.build_docker_buildfile(docker_file)
# Generate the docker commands
docker_tag = 'refstack_%s' % (self.test_id)
docker_build_command = 'docker build -t %s - < %s' % \
(docker_tag, docker_file)
docker_run_env = '-e APP_SERVER_ADDRESS=%s ' \
'-e TEST_ID=%s ' \
'-e CONF_JSON=\'%s\'' % \
(config_data.get_app_address(),
self.test_id,
extra_conf_json.replace('"', '\\"'))
docker_run_command = 'docker run %s -t %s' % \
(docker_run_env, docker_tag)
out_file = os.path.join(config_data.get_working_dir(),
'test_%s.dockerOutput' % self.test_id)
cmd = 'nohup sh -c "%s && %s" > %s &' % (docker_build_command,
docker_run_command,
out_file)
# Build and execute the docker file
os.system(cmd)
''' TODO: Clean up the temporary docker build and output file '''
def _execute_test_local(self, extra_conf_json):
'''Execute the tempest test locally.'''
pass
def _execute_test_gearman(self, extra_conf_json):
'''Execute the tempest test with gearman.'''
pass