Fixes PEP8 issues as reported by tox and unittests
* Changes docstrings from single to double quotes. * Reformats multiline declarations with non-visual indentation. * Turns rsyslog resource into a plugin * Moves plugin-specific unittests into their respective plugins * Add tox support to plugins with tests Change-Id: Ife172b165004c366ad64faa7c00afceb8e99c76d
This commit is contained in:
parent
f6eef4188d
commit
f7e4f0084a
@ -1 +1 @@
|
||||
include README.md LICENSE NOTICE HISTORY.md pip-requires
|
||||
include README.md LICENSE pip-requires plugins
|
@ -27,7 +27,7 @@ def logsafe_str(data):
|
||||
|
||||
|
||||
def get_object_namespace(obj):
|
||||
'''Attempts to return a dotted string name representation of the general
|
||||
"""Attempts to return a dotted string name representation of the general
|
||||
form 'package.module.class.obj' for an object that has an __mro__ attribute
|
||||
|
||||
Designed to let you to name loggers inside objects in such a way
|
||||
@ -40,7 +40,7 @@ def get_object_namespace(obj):
|
||||
and is then further improved by a series of functions until
|
||||
one of them fails.
|
||||
The value of the last successful name-setting method is returned.
|
||||
'''
|
||||
"""
|
||||
|
||||
try:
|
||||
return parse_class_namespace_string(str(obj.__mro__[0]))
|
||||
@ -61,9 +61,10 @@ def get_object_namespace(obj):
|
||||
|
||||
|
||||
def parse_class_namespace_string(class_string):
|
||||
'''Parses the dotted namespace out of an object's __mro__.
|
||||
Returns a string
|
||||
'''
|
||||
"""
|
||||
Parses the dotted namespace out of an object's __mro__. Returns a string
|
||||
"""
|
||||
|
||||
class_string = str(class_string)
|
||||
class_string = class_string.replace("'>", "")
|
||||
class_string = class_string.replace("<class '", "")
|
||||
@ -71,10 +72,9 @@ def parse_class_namespace_string(class_string):
|
||||
|
||||
|
||||
def getLogger(log_name, log_level=None):
|
||||
'''Convenience function to create a logger and set it's log level at the
|
||||
same time.
|
||||
Log level defaults to logging.DEBUG
|
||||
'''
|
||||
"""Convenience function to create a logger and set it's log level at the
|
||||
same time. Log level defaults to logging.DEBUG
|
||||
"""
|
||||
|
||||
# Create new log
|
||||
new_log = logging.getLogger(name=log_name)
|
||||
@ -97,11 +97,11 @@ def getLogger(log_name, log_level=None):
|
||||
|
||||
def setup_new_cchandler(
|
||||
log_file_name, log_dir=None, encoding=None, msg_format=None):
|
||||
'''Creates a log handler named <log_file_name> configured to save the log
|
||||
"""Creates a log handler named <log_file_name> configured to save the log
|
||||
in <log_dir> or <os environment variable 'CAFE_TEST_LOG_PATH'>,
|
||||
in that order or precedent.
|
||||
File handler defaults: 'a+', encoding=encoding or "UTF-8", delay=True
|
||||
'''
|
||||
"""
|
||||
|
||||
log_dir = log_dir or os.getenv('CAFE_TEST_LOG_PATH')
|
||||
|
||||
@ -131,10 +131,10 @@ def setup_new_cchandler(
|
||||
|
||||
|
||||
def log_results(result, test_id=None, verbosity=0):
|
||||
"""Replicates the printing functionality of unittest's runner.run() but
|
||||
log's instead of prints
|
||||
"""
|
||||
@summary: Replicates the printing functionality of unittest's
|
||||
runner.run() but log's instead of prints
|
||||
"""
|
||||
|
||||
infos = []
|
||||
expected_fails = unexpected_successes = skipped = 0
|
||||
|
||||
@ -198,7 +198,8 @@ def log_errors(label, result, errors):
|
||||
|
||||
|
||||
def init_root_log_handler():
|
||||
# Setup root log handler if the root logger doesn't already have one
|
||||
"""Setup root log handler if the root logger doesn't already have one"""
|
||||
|
||||
if not getLogger('').handlers:
|
||||
master_log_file_name = os.getenv('CAFE_MASTER_LOG_FILE_NAME')
|
||||
getLogger('').addHandler(
|
||||
@ -224,6 +225,7 @@ def log_info_block(
|
||||
using newlines. Otherwise, each line of the info block will be logged
|
||||
as seperate log lines (with seperate timestamps, etc.)
|
||||
"""
|
||||
|
||||
output = []
|
||||
try:
|
||||
info = info if isinstance(info, OrderedDict) else OrderedDict(info)
|
||||
@ -233,7 +235,8 @@ def log_info_block(
|
||||
return
|
||||
|
||||
separator = str(separator or "{0}".format('=' * 56))
|
||||
max_length = len(max([k for k in info.keys() if info.get(k)], key=len)) + 3
|
||||
max_length = \
|
||||
len(max([k for k in info.keys() if info.get(k)], key=len)) + 3
|
||||
|
||||
output.append(separator)
|
||||
if heading:
|
||||
|
@ -14,9 +14,9 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
'''
|
||||
"""
|
||||
@summary: Generic Classes for test statistics
|
||||
'''
|
||||
"""
|
||||
from datetime import datetime
|
||||
import os
|
||||
import csv
|
||||
@ -24,7 +24,7 @@ import sys
|
||||
|
||||
|
||||
class TestRunMetrics(object):
|
||||
'''
|
||||
"""
|
||||
@summary: Generic Timer used to track any time span
|
||||
@ivar start_time: Timestamp from the start of the timer
|
||||
@type start_time: C{datetime}
|
||||
@ -34,7 +34,7 @@ class TestRunMetrics(object):
|
||||
of the runner and the default unittest.TestCase architecture to make
|
||||
this auto-magically work with unittest properly.
|
||||
This should be a child of unittest.TestResult
|
||||
'''
|
||||
"""
|
||||
def __init__(self):
|
||||
self.total_tests = 0
|
||||
self.total_passed = 0
|
||||
@ -45,7 +45,7 @@ class TestRunMetrics(object):
|
||||
|
||||
|
||||
class TestResultTypes(object):
|
||||
'''
|
||||
"""
|
||||
@summary: Types dictating an individual Test Case result
|
||||
@cvar PASSED: Test has passed
|
||||
@type PASSED: C{str}
|
||||
@ -58,7 +58,8 @@ class TestResultTypes(object):
|
||||
@cvar ERRORED: Test has errored
|
||||
@type ERRORED: C{str}
|
||||
@note: This is essentially an Enumerated Type
|
||||
'''
|
||||
"""
|
||||
|
||||
PASSED = "Passed"
|
||||
FAILED = "Failed"
|
||||
SKIPPED = "Skipped" # Not Supported Yet
|
||||
@ -68,39 +69,43 @@ class TestResultTypes(object):
|
||||
|
||||
|
||||
class TestTimer(object):
|
||||
'''
|
||||
"""
|
||||
@summary: Generic Timer used to track any time span
|
||||
@ivar start_time: Timestamp from the start of the timer
|
||||
@type start_time: C{datetime}
|
||||
@ivar stop_time: Timestamp of the end of the timer
|
||||
@type stop_time: C{datetime}
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.start_time = None
|
||||
self.stop_time = None
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
@summary: Starts this timer
|
||||
@return: None
|
||||
@rtype: None
|
||||
'''
|
||||
"""
|
||||
|
||||
self.start_time = datetime.now()
|
||||
|
||||
def stop(self):
|
||||
'''
|
||||
"""
|
||||
@summary: Stops this timer
|
||||
@return: None
|
||||
@rtype: None
|
||||
'''
|
||||
"""
|
||||
|
||||
self.stop_time = datetime.now()
|
||||
|
||||
def get_elapsed_time(self):
|
||||
'''
|
||||
"""
|
||||
@summary: Convenience method for total elapsed time
|
||||
@rtype: C{datetime}
|
||||
@return: Elapsed time for this timer. C{None} if timer has not started
|
||||
'''
|
||||
"""
|
||||
|
||||
elapsedTime = None
|
||||
if self.start_time is not None:
|
||||
if self.stop_time is not None:
|
||||
@ -108,36 +113,36 @@ class TestTimer(object):
|
||||
else:
|
||||
elapsedTime = (datetime.now() - self.start_time)
|
||||
else:
|
||||
''' Timer hasn't started, error on the side of caution '''
|
||||
# Timer hasn't started, error on the side of caution
|
||||
rightNow = datetime.now()
|
||||
elapsedTime = (rightNow - rightNow)
|
||||
return(elapsedTime)
|
||||
|
||||
|
||||
class CSVWriter(object):
|
||||
'''
|
||||
'''
|
||||
"""CSVWriter"""
|
||||
|
||||
def __init__(self, headers, file_name, log_dir='.', start_clean=False):
|
||||
self.file_mode = 'a'
|
||||
self.headers = headers
|
||||
|
||||
#create the dir if it does not exist
|
||||
# create the dir if it does not exist
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
#get full path
|
||||
# get full path
|
||||
self.full_path = os.path.normpath(os.path.join(log_dir, file_name))
|
||||
|
||||
#remove file if you want a clean log file
|
||||
# remove file if you want a clean log file
|
||||
if start_clean:
|
||||
''' Force the file to be overwritten before any writing '''
|
||||
# Force the file to be overwritten before any writing
|
||||
try:
|
||||
os.remove(self.full_path)
|
||||
except OSError:
|
||||
sys.stderr.write('File not writable\n')
|
||||
|
||||
if os.path.exists(self.full_path) is False:
|
||||
''' Write out the header to the stats log '''
|
||||
# Write out the header to the stats log
|
||||
self.writerow(self.headers)
|
||||
|
||||
def writerow(self, row_list):
|
||||
|
@ -61,7 +61,7 @@ class ConfiguratorCLI(object):
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
|
||||
#Engine configuration subparser
|
||||
# Engine configuration subparser
|
||||
subparser_engine_config = subparsers.add_parser('engine')
|
||||
subparser_engine_config.add_argument(
|
||||
'--init-install', action=EngineActions.InitInstall, nargs=0)
|
||||
@ -72,8 +72,7 @@ class ConfiguratorCLI(object):
|
||||
|
||||
plugins_add_parser = plugin_args.add_parser('add')
|
||||
plugins_add_parser.add_argument(
|
||||
'plugin_dir', action=PluginActions.AddPluginCache,
|
||||
type=str)
|
||||
'plugin_dir', action=PluginActions.AddPluginCache, type=str)
|
||||
|
||||
plugins_add_parser = plugin_args.add_parser('list')
|
||||
plugins_add_parser.add_argument(
|
||||
@ -81,8 +80,8 @@ class ConfiguratorCLI(object):
|
||||
|
||||
plugins_install_parser = plugin_args.add_parser('install')
|
||||
plugins_install_parser.add_argument(
|
||||
'plugin-name', action=PluginActions.InstallPlugin,
|
||||
type=str, nargs='*')
|
||||
'plugin-name', action=PluginActions.InstallPlugin, type=str,
|
||||
nargs='*')
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
@ -186,6 +186,7 @@ class TestEnvManager(object):
|
||||
reason, it sets the CAFE_TEST_REPO_PATH directly as well as
|
||||
CAFE_TEST_REPO_PACKAGE
|
||||
"""
|
||||
|
||||
return os.path.expanduser(
|
||||
self.engine_config_interface.default_test_repo)
|
||||
|
||||
@ -218,8 +219,8 @@ class TestEnvManager(object):
|
||||
def test_logging_verbosity(self):
|
||||
"""Currently supports STANDARD and VERBOSE.
|
||||
TODO: Implement 'OFF' option that adds null handlers to all loggers
|
||||
|
||||
"""
|
||||
|
||||
return self.engine_config_interface.logging_verbosity
|
||||
|
||||
@_lazy_property
|
||||
@ -234,6 +235,7 @@ class EngineDirectoryManager(object):
|
||||
Converts the top-level keys of this dictionary into a namespace.
|
||||
Raises exception if any self.keys() collide with internal attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
dict.__init__(self, **kwargs)
|
||||
collisions = set(kwargs) & set(dir(self))
|
||||
@ -279,7 +281,8 @@ class EngineDirectoryManager(object):
|
||||
all changes made to the default .opencafe directory structure since
|
||||
opencafe's release.
|
||||
"""
|
||||
#Rename .cloudcafe to .opencafe
|
||||
|
||||
# Rename .cloudcafe to .opencafe
|
||||
if os.path.exists(cls._OLD_ROOT_DIR):
|
||||
if os.path.exists(cls.OPENCAFE_ROOT_DIR):
|
||||
print cls.wrapper.fill("* * ERROR * * *")
|
||||
@ -312,7 +315,8 @@ class EngineDirectoryManager(object):
|
||||
@classmethod
|
||||
def set_engine_directory_permissions(cls):
|
||||
"""Recursively changes permissions default engine directory so that
|
||||
everything is user-owned"""
|
||||
everything is user-owned
|
||||
"""
|
||||
|
||||
PlatformManager.safe_chown(cls.OPENCAFE_ROOT_DIR)
|
||||
for root, dirs, files in os.walk(cls.OPENCAFE_ROOT_DIR):
|
||||
@ -336,11 +340,11 @@ class EngineConfigManager(object):
|
||||
wrapper = textwrap.TextWrapper(
|
||||
initial_indent="* ", subsequent_indent=" ", break_long_words=False)
|
||||
|
||||
#Old Config Stuff for backwards compatability testing only
|
||||
# Old Config Stuff for backwards compatability testing only
|
||||
_OLD_ENGINE_CONFIG_PATH = os.path.join(
|
||||
EngineDirectoryManager.OPENCAFE_ROOT_DIR, 'configs', 'engine.config')
|
||||
|
||||
#Openafe config defaults
|
||||
# Openafe config defaults
|
||||
ENGINE_CONFIG_PATH = os.path.join(
|
||||
EngineDirectoryManager.OPENCAFE_ROOT_DIR, 'engine.config')
|
||||
|
||||
@ -419,7 +423,7 @@ class EngineConfigManager(object):
|
||||
"Moving engine.config file from {0} to {1}".format(
|
||||
cls._OLD_ENGINE_CONFIG_PATH, cls.ENGINE_CONFIG_PATH))
|
||||
config = cls.read_config_file(cls._OLD_ENGINE_CONFIG_PATH)
|
||||
#Move to new location
|
||||
# Move to new location
|
||||
os.rename(cls._OLD_ENGINE_CONFIG_PATH, cls.ENGINE_CONFIG_PATH)
|
||||
|
||||
# Read config from current default location ('.opencafe/engine.config)
|
||||
@ -597,6 +601,7 @@ class EnginePluginManager(object):
|
||||
""" Handles moving all plugin src data from package into the user's
|
||||
.opencafe folder for installation by the cafe-config tool.
|
||||
"""
|
||||
|
||||
default_dest = EngineDirectoryManager.OPENCAFE_SUB_DIRS.PLUGIN_CACHE
|
||||
plugins = os.walk(plugins_src_dir).next()[1]
|
||||
|
||||
@ -607,6 +612,7 @@ class EnginePluginManager(object):
|
||||
@classmethod
|
||||
def list_plugins(cls):
|
||||
""" Lists all plugins currently available in user's .opencafe cache"""
|
||||
|
||||
plugin_cache = EngineDirectoryManager.OPENCAFE_SUB_DIRS.PLUGIN_CACHE
|
||||
plugin_folders = os.walk(plugin_cache).next()[1]
|
||||
wrap = textwrap.TextWrapper(initial_indent=" ",
|
||||
@ -619,12 +625,14 @@ class EnginePluginManager(object):
|
||||
@classmethod
|
||||
def install_plugins(cls, plugin_names):
|
||||
""" Installs a list of plugins into the current environment"""
|
||||
|
||||
for plugin_name in plugin_names:
|
||||
cls.install_plugin(plugin_name)
|
||||
|
||||
@classmethod
|
||||
def install_plugin(cls, plugin_name):
|
||||
""" Install a single plugin by name into the current environment"""
|
||||
|
||||
plugin_cache = EngineDirectoryManager.OPENCAFE_SUB_DIRS.PLUGIN_CACHE
|
||||
plugin_dir = os.path.join(plugin_cache, plugin_name)
|
||||
wrap = textwrap.TextWrapper(initial_indent=" ",
|
||||
|
@ -43,7 +43,7 @@ class DatasetList(list):
|
||||
|
||||
super(DatasetList, self).append(dataset)
|
||||
|
||||
def append_new_dataset(self, name, data_dict):
|
||||
def append_new_dataset(self, name, data_dict):
|
||||
"""Creates and appends a new Dataset"""
|
||||
self.append(_Dataset(name, data_dict))
|
||||
|
||||
|
@ -19,7 +19,7 @@ import inspect
|
||||
import itertools
|
||||
|
||||
from types import FunctionType
|
||||
from unittest2 import TestCase
|
||||
from unittest import TestCase
|
||||
from warnings import warn, simplefilter
|
||||
|
||||
from cafe.common.reporting import cclogging
|
||||
|
@ -21,7 +21,7 @@ limitations under the License.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import unittest2 as unittest
|
||||
import unittest
|
||||
|
||||
from cafe.drivers.base import FixtureReporter
|
||||
from cafe.common.reporting.cclogging import init_root_log_handler
|
||||
@ -75,7 +75,7 @@ class BaseTestFixture(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(BaseTestFixture, cls).setUpClass()
|
||||
#Move root log handler initialization to the runner!
|
||||
# TODO: Move root log handler initialization to the runner!
|
||||
init_root_log_handler()
|
||||
cls._reporter = FixtureReporter(cls)
|
||||
cls.fixture_log = cls._reporter.logger.log
|
||||
@ -140,8 +140,8 @@ class BaseTestFixture(unittest.TestCase):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as exception:
|
||||
#Pretty prints method signature in the following format:
|
||||
#"classTearDown failure: Unable to execute FnName(a, b, c=42)"
|
||||
# Pretty prints method signature in the following format:
|
||||
# "classTearDown failure: Unable to execute FnName(a, b, c=42)"
|
||||
cls.fixture_log.exception(exception)
|
||||
cls.fixture_log.error(
|
||||
"classTearDown failure: Exception occured while trying to"
|
||||
|
@ -26,7 +26,6 @@ class SummarizeResults(object):
|
||||
|
||||
def get_passed_tests(self):
|
||||
all_tests = []
|
||||
actual_number_of_tests_run = []
|
||||
failed_tests = []
|
||||
skipped_tests = []
|
||||
errored_tests = []
|
||||
@ -50,8 +49,6 @@ class SummarizeResults(object):
|
||||
for item_2 in setup_errored_classes:
|
||||
if item_2 == item_1.__class__.__name__:
|
||||
setup_errored_tests.append(item_1)
|
||||
else:
|
||||
actual_number_of_tests_run = all_tests
|
||||
|
||||
passed_tests = list(set(all_tests) - set(failed_tests) -
|
||||
set(skipped_tests) - set(errored_tests) -
|
||||
|
@ -25,7 +25,7 @@ from inspect import getmembers, isclass
|
||||
from multiprocessing import Process, Manager
|
||||
from re import search
|
||||
from traceback import extract_tb
|
||||
import unittest2 as unittest
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from result import TaggedTextTestResult
|
||||
@ -88,8 +88,7 @@ def print_traceback():
|
||||
"""
|
||||
info = sys.exc_info()
|
||||
excp_type, excp_value = info[:2]
|
||||
err_msg = error_msg(excp_type.__name__,
|
||||
excp_value)
|
||||
err_msg = error_msg(excp_type.__name__, excp_value)
|
||||
print err_msg
|
||||
for file_name, lineno, function, text in extract_tb(info[2]):
|
||||
print ">>>", file_name
|
||||
@ -129,12 +128,8 @@ class OpenCafeParallelTextTestRunner(unittest.TextTestRunner):
|
||||
def run(self, test):
|
||||
"""Run the given test case or test suite."""
|
||||
result = self._makeResult()
|
||||
startTime = time.time()
|
||||
test(result)
|
||||
stopTime = time.time()
|
||||
timeTaken = stopTime - startTime
|
||||
result.printErrors()
|
||||
run = result.testsRun
|
||||
return result
|
||||
|
||||
|
||||
@ -905,18 +900,10 @@ class UnittestRunner(object):
|
||||
"""Inject tag mapping into the result __dict__ object if available"""
|
||||
if hasattr(result, 'mapping'):
|
||||
mapping = result.mapping.test_to_tag_mapping
|
||||
|
||||
if not mapping is None and len(mapping) > 0:
|
||||
setattr(result, 'tags', mapping)
|
||||
else:
|
||||
setattr(result, 'tags', [])
|
||||
setattr(result, 'tags', mapping or [])
|
||||
|
||||
attributes = result.mapping.test_to_attribute_mapping
|
||||
|
||||
if not attributes is None and len(attributes) > 0:
|
||||
setattr(result, 'attributes', attributes)
|
||||
else:
|
||||
setattr(result, 'attributes', [])
|
||||
setattr(result, 'attributes', attributes or [])
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
@ -927,14 +914,11 @@ class UnittestRunner(object):
|
||||
parallel_test_list = []
|
||||
|
||||
builder = SuiteBuilder(
|
||||
self.cl_args.module_regex,
|
||||
self.cl_args.method_regex,
|
||||
self.cl_args.tags,
|
||||
self.cl_args.supress_flag)
|
||||
self.cl_args.module_regex, self.cl_args.method_regex,
|
||||
self.cl_args.tags, self.cl_args.supress_flag)
|
||||
|
||||
test_runner = self.get_runner(
|
||||
self.cl_args.parallel,
|
||||
self.cl_args.fail_fast,
|
||||
self.cl_args.parallel, self.cl_args.fail_fast,
|
||||
self.cl_args.verbose)
|
||||
|
||||
# Build master test suite
|
||||
@ -950,27 +934,24 @@ class UnittestRunner(object):
|
||||
parallel_test_list = builder.generate_suite_list(
|
||||
path, parallel_test_list)
|
||||
else:
|
||||
master_suite = builder.generate_suite(
|
||||
self.product_repo_path)
|
||||
master_suite = builder.generate_suite(self.product_repo_path)
|
||||
if self.cl_args.parallel:
|
||||
parallel_test_list = builder.generate_suite_list(
|
||||
self.product_repo_path)
|
||||
|
||||
if self.cl_args.parallel:
|
||||
exit_code = self.run_parallel(
|
||||
parallel_test_list,
|
||||
test_runner,
|
||||
parallel_test_list, test_runner,
|
||||
result_type=self.cl_args.result,
|
||||
results_path=self.cl_args.result_directory,
|
||||
verbosity=self.cl_args.verbose)
|
||||
exit(exit_code)
|
||||
else:
|
||||
exit_code = self.run_serialized(
|
||||
master_suite,
|
||||
test_runner,
|
||||
result_type=self.cl_args.result,
|
||||
master_suite, test_runner, result_type=self.cl_args.result,
|
||||
results_path=self.cl_args.result_directory,
|
||||
verbosity=self.cl_args.verbose)
|
||||
|
||||
exit(exit_code)
|
||||
|
||||
@staticmethod
|
||||
@ -1040,13 +1021,13 @@ class UnittestRunner(object):
|
||||
UnittestRunner._inject_tag_mapping(result)
|
||||
|
||||
if result_type is not None:
|
||||
result_parser = SummarizeResults(vars(result), master_suite,
|
||||
total_execution_time)
|
||||
result_parser = SummarizeResults(
|
||||
vars(result), master_suite, total_execution_time)
|
||||
all_results = result_parser.gather_results()
|
||||
reporter = Reporter(result_parser=result_parser,
|
||||
all_results=all_results)
|
||||
reporter.generate_report(result_type=result_type,
|
||||
path=results_path)
|
||||
reporter = Reporter(
|
||||
result_parser=result_parser, all_results=all_results)
|
||||
reporter.generate_report(
|
||||
result_type=result_type, path=results_path)
|
||||
|
||||
log_results(result, verbosity=verbosity)
|
||||
if not result.wasSuccessful():
|
||||
|
@ -72,5 +72,5 @@ class OpenCafeUnittestTestSuite(TestSuite):
|
||||
className = util.strclass(currentClass)
|
||||
errorName = 'setUpClass (%s)' % className
|
||||
self._addClassOrModuleLevelException(result, e, errorName)
|
||||
#Monkeypatch: Run class cleanup if setUpClass fails
|
||||
# Monkeypatch: Run class cleanup if setUpClass fails
|
||||
currentClass._do_class_cleanup_tasks()
|
||||
|
@ -23,14 +23,14 @@ class RequiredClientNotDefinedError(Exception):
|
||||
|
||||
|
||||
def behavior(*required_clients):
|
||||
'''Decorator that tags method as a behavior, and optionally adds
|
||||
"""Decorator that tags method as a behavior, and optionally adds
|
||||
required client objects to an internal attribute. Causes calls to this
|
||||
method to throw RequiredClientNotDefinedError exception if the containing
|
||||
class does not have the proper client instances defined.
|
||||
'''
|
||||
#@decorator.decorator
|
||||
"""
|
||||
|
||||
def _decorator(func):
|
||||
#Unused for now
|
||||
# Unused for now
|
||||
setattr(func, '__is_behavior__', True)
|
||||
setattr(func, '__required_clients__', [])
|
||||
for client in required_clients:
|
||||
|
@ -14,10 +14,10 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
'''Provides low level connectivity to the commandline via popen()
|
||||
"""Provides low level connectivity to the commandline via popen()
|
||||
@note: Primarily intended to serve as base classes for a specific
|
||||
command line client Class
|
||||
'''
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
from subprocess import Popen, PIPE, CalledProcessError
|
||||
@ -29,49 +29,54 @@ from cafe.engine.models.commandline_response import CommandLineResponse
|
||||
|
||||
|
||||
class BaseCommandLineClient(BaseClient):
|
||||
'''Wrapper for driving/parsing a command line program
|
||||
"""Wrapper for driving/parsing a command line program
|
||||
@ivar base_command: This processes base command string. (I.E. 'ls', 'pwd')
|
||||
@type base_command: C{str}
|
||||
@note: This class is dependent on a local installation of the wrapped
|
||||
client process. The thing you run has to be there!
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, base_command=None, env_var_dict=None):
|
||||
'''
|
||||
"""
|
||||
@param base_command: This processes base command string.
|
||||
(I.E. 'ls', 'pwd')
|
||||
@type base_command: C{str}
|
||||
'''
|
||||
"""
|
||||
|
||||
super(BaseCommandLineClient, self).__init__()
|
||||
self.base_command = base_command
|
||||
self.env_var_dict = env_var_dict or {}
|
||||
self.set_environment_variables(self.env_var_dict)
|
||||
|
||||
def set_environment_variables(self, env_var_dict=None):
|
||||
'''Sets all os environment variables provided in env_var_dict'''
|
||||
"""Sets all os environment variables provided in env_var_dict"""
|
||||
|
||||
self.env_var_dict = env_var_dict
|
||||
for key, value in self.env_var_dict.items():
|
||||
self._log.debug('setting {0}={1}'.format(key, value))
|
||||
os.environ[str(key)] = str(value)
|
||||
|
||||
def update_environment_variables(self, env_var_dict=None):
|
||||
'''Sets all os environment variables provided in env_var_dict'''
|
||||
"""Sets all os environment variables provided in env_var_dict"""
|
||||
|
||||
self.env_var_dict = self.env_var_dict.update(env_var_dict or {})
|
||||
for key, value in self.env_var_dict.items():
|
||||
self._log.debug('setting {0}={1}'.format(key, value))
|
||||
os.environ[str(key)] = str(value)
|
||||
|
||||
def unset_environment_variables(self, env_var_list=None):
|
||||
'''Unsets all os environment variables provided in env_var_dict
|
||||
"""Unsets all os environment variables provided in env_var_dict
|
||||
by default.
|
||||
If env_var_list is passed, attempts to unset all environment vars in
|
||||
list'''
|
||||
list"""
|
||||
|
||||
env_var_list = env_var_list or self.env_var_dict.keys() or []
|
||||
for key, _ in env_var_list:
|
||||
self._log.debug('unsetting {0}'.format(key))
|
||||
os.unsetenv(str(key))
|
||||
|
||||
def _build_command(self, cmd, *args):
|
||||
#Process command we received
|
||||
# Process command we received
|
||||
command = "{0} {1}".format(
|
||||
self.base_command, cmd) if self.base_command else cmd
|
||||
if args and args[0]:
|
||||
@ -93,7 +98,7 @@ class BaseCommandLineClient(BaseClient):
|
||||
return command
|
||||
|
||||
def _execute_command(self, command):
|
||||
#Run the command
|
||||
# Run the command
|
||||
process = None
|
||||
try:
|
||||
process = Popen(command, stdout=PIPE, stderr=PIPE, shell=True)
|
||||
@ -107,13 +112,14 @@ class BaseCommandLineClient(BaseClient):
|
||||
"""Running a command asynchronously returns a CommandLineResponse
|
||||
objecct with a running subprocess.Process object in it. This process
|
||||
needs to be closed or killed manually after execution."""
|
||||
|
||||
os_response = CommandLineResponse()
|
||||
os_response.command = self._build_command(cmd, *args)
|
||||
os_response.process = self._execute_command(os_response.command)
|
||||
return os_response
|
||||
|
||||
def run_command(self, cmd, *args):
|
||||
'''Sends a command directly to this instance's command line
|
||||
"""Sends a command directly to this instance's command line
|
||||
@param cmd: Command to sent to command line
|
||||
@type cmd: C{str}
|
||||
@param args: Optional list of args to be passed with the command
|
||||
@ -122,8 +128,9 @@ class BaseCommandLineClient(BaseClient):
|
||||
@return: The full response details from the command line
|
||||
@rtype: L{CommandLineResponse}
|
||||
@note: PRIVATE. Can be over-ridden in a child class
|
||||
'''
|
||||
#Wait for the process to complete and then read the output
|
||||
"""
|
||||
|
||||
# Wait for the process to complete and then read the output
|
||||
os_response = self.run_command_async(cmd, *args)
|
||||
std_out, std_err = os_response.process.communicate()
|
||||
os_response.standard_out = str(std_out).splitlines()
|
||||
@ -140,12 +147,13 @@ class BaseCommandLineClient(BaseClient):
|
||||
self._log, info, heading='COMMAND LINE RESPONSE',
|
||||
log_level=DEBUG, one_line=True)
|
||||
|
||||
#Clean up the process to avoid any leakage/wonkiness with stdout/stderr
|
||||
# Clean up the process to avoid any leakage/wonkiness with
|
||||
# stdout/stderr
|
||||
try:
|
||||
os_response.process.kill()
|
||||
except OSError:
|
||||
#An OS Error is valid if the process has exited. We only
|
||||
#need to be concerned about other exceptions
|
||||
# An OS Error is valid if the process has exited. We only
|
||||
# need to be concerned about other exceptions
|
||||
sys.exc_clear()
|
||||
|
||||
os_response.process = None
|
||||
|
@ -19,30 +19,35 @@ from cafe.common.reporting import cclogging
|
||||
|
||||
class CommonToolsMixin(object):
|
||||
"""Methods used to make building data models easier, common to all types"""
|
||||
|
||||
@staticmethod
|
||||
def _bool_to_string(value, true_string='true', false_string='false'):
|
||||
"""Returns a string representation of a boolean value, or the value
|
||||
provided if the value is not an instance of bool
|
||||
"""
|
||||
|
||||
if isinstance(value, bool):
|
||||
return true_string if value is True else false_string
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def _remove_empty_values(dictionary):
|
||||
'''Returns a new dictionary based on 'dictionary', minus any keys with
|
||||
"""Returns a new dictionary based on 'dictionary', minus any keys with
|
||||
values that are None
|
||||
'''
|
||||
"""
|
||||
|
||||
return dict((k, v) for k, v in dictionary.iteritems() if v is not None)
|
||||
|
||||
|
||||
class JSON_ToolsMixin(object):
|
||||
"""Methods used to make building json data models easier"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class XML_ToolsMixin(object):
|
||||
"""Methods used to make building xml data models easier"""
|
||||
|
||||
_XML_VERSION = '1.0'
|
||||
_ENCODING = 'UTF-8'
|
||||
|
||||
@ -69,6 +74,7 @@ class XML_ToolsMixin(object):
|
||||
@staticmethod
|
||||
def _remove_xml_etree_namespace(doc, namespace):
|
||||
"""Remove namespace in the passed document in place."""
|
||||
|
||||
ns = u'{%s}' % namespace
|
||||
nsl = len(ns)
|
||||
for elem in doc.getiterator():
|
||||
@ -126,10 +132,10 @@ class BaseModel(object):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
#Splitting the xml and json stuff into mixins cleans up the code but still
|
||||
#muddies the AutoMarshallingModel namespace. We could create
|
||||
#tool objects in the AutoMarshallingModel, which would just act as
|
||||
#sub-namespaces, to keep it clean. --Jose
|
||||
# Splitting the xml and json stuff into mixins cleans up the code but still
|
||||
# muddies the AutoMarshallingModel namespace. We could create
|
||||
# tool objects in the AutoMarshallingModel, which would just act as
|
||||
# sub-namespaces, to keep it clean. --Jose
|
||||
class AutoMarshallingModel(
|
||||
BaseModel, CommonToolsMixin, JSON_ToolsMixin, XML_ToolsMixin):
|
||||
"""
|
||||
@ -137,6 +143,7 @@ class AutoMarshallingModel(
|
||||
to automatically create serialized requests and automatically
|
||||
deserialize responses in a format-agnostic way.
|
||||
"""
|
||||
|
||||
_log = cclogging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
@ -181,7 +188,7 @@ class AutoMarshallingModel(
|
||||
except Exception as deserialization_exception:
|
||||
cls._log.exception(deserialization_exception)
|
||||
|
||||
#Try to log string and format_type if deserialization broke
|
||||
# Try to log string and format_type if deserialization broke
|
||||
if deserialization_exception is not None:
|
||||
try:
|
||||
cls._log.debug(
|
||||
@ -200,14 +207,14 @@ class AutoMarshallingModel(
|
||||
|
||||
return model_object
|
||||
|
||||
#Serialization Functions
|
||||
# Serialization Functions
|
||||
def _obj_to_json(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def _obj_to_xml(self):
|
||||
raise NotImplementedError
|
||||
|
||||
#Deserialization Functions
|
||||
# Deserialization Functions
|
||||
@classmethod
|
||||
def _xml_to_obj(cls, serialized_str):
|
||||
raise NotImplementedError
|
||||
@ -219,11 +226,13 @@ class AutoMarshallingModel(
|
||||
|
||||
class AutoMarshallingListModel(list, AutoMarshallingModel):
|
||||
"""List-like AutoMarshallingModel used for some special cases"""
|
||||
|
||||
def __str__(self):
|
||||
return list.__str__(self)
|
||||
|
||||
|
||||
class AutoMarshallingDictModel(dict, AutoMarshallingModel):
|
||||
"""Dict-like AutoMarshallingModel used for some special cases"""
|
||||
|
||||
def __str__(self):
|
||||
return dict.__str__(self)
|
||||
|
@ -27,6 +27,7 @@ except:
|
||||
The mongo data-source is currently not being used. and needs to be
|
||||
abstracted out into a data-source plugin.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@ -42,7 +43,7 @@ class ConfigEnvironmentVariableError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
#Decorator
|
||||
# This is a decorator
|
||||
def expected_values(*values):
|
||||
def decorator(fn):
|
||||
def wrapped():
|
||||
@ -242,11 +243,9 @@ class MongoDataSource(DictionaryDataSource):
|
||||
|
||||
|
||||
class BaseConfigSectionInterface(object):
|
||||
"""
|
||||
Base class for building an interface for the data contained in a
|
||||
"""Base class for building an interface for the data contained in a
|
||||
SafeConfigParser object, as loaded from the config file as defined
|
||||
by the engine's config file.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, config_file_path, section_name):
|
||||
|
@ -40,4 +40,4 @@ def get_range_data(data, bytes_):
|
||||
start = int(bytes_.strip('-'))
|
||||
else:
|
||||
(start, end) = [int(x) for x in bytes_.split('-')]
|
||||
return data[start:end+1]
|
||||
return data[start:end + 1]
|
||||
|
@ -38,13 +38,13 @@ def _log_transaction(log, level=cclogging.logging.DEBUG):
|
||||
try:
|
||||
log.debug(logline.decode('utf-8', 'replace'))
|
||||
except Exception as exception:
|
||||
#Ignore all exceptions that happen in logging, then log them
|
||||
# Ignore all exceptions that happen in logging, then log them
|
||||
log.info(
|
||||
'Exception occured while logging signature of calling'
|
||||
'method in http client')
|
||||
log.exception(exception)
|
||||
|
||||
#Make the request and time it's execution
|
||||
# Make the request and time it's execution
|
||||
response = None
|
||||
elapsed = None
|
||||
try:
|
||||
@ -56,7 +56,7 @@ def _log_transaction(log, level=cclogging.logging.DEBUG):
|
||||
log.exception(exception)
|
||||
raise exception
|
||||
|
||||
#requests lib 1.0.0 renamed body to data in the request object
|
||||
# requests lib 1.0.0 renamed body to data in the request object
|
||||
request_body = ''
|
||||
if 'body' in dir(response.request):
|
||||
request_body = response.request.body
|
||||
@ -67,7 +67,7 @@ def _log_transaction(log, level=cclogging.logging.DEBUG):
|
||||
"Unable to log request body, neither a 'data' nor a "
|
||||
"'body' object could be found")
|
||||
|
||||
#requests lib 1.0.4 removed params from response.request
|
||||
# requests lib 1.0.4 removed params from response.request
|
||||
request_params = ''
|
||||
request_url = response.request.url
|
||||
if 'params' in dir(response.request):
|
||||
@ -85,7 +85,7 @@ def _log_transaction(log, level=cclogging.logging.DEBUG):
|
||||
try:
|
||||
log.log(level, logline.decode('utf-8', 'replace'))
|
||||
except Exception as exception:
|
||||
#Ignore all exceptions that happen in logging, then log them
|
||||
# Ignore all exceptions that happen in logging, then log them
|
||||
log.log(level, '\n{0}\nREQUEST INFO\n{0}\n'.format('-' * 12))
|
||||
log.exception(exception)
|
||||
|
||||
@ -99,7 +99,7 @@ def _log_transaction(log, level=cclogging.logging.DEBUG):
|
||||
try:
|
||||
log.log(level, logline.decode('utf-8', 'replace'))
|
||||
except Exception as exception:
|
||||
#Ignore all exceptions that happen in logging, then log them
|
||||
# Ignore all exceptions that happen in logging, then log them
|
||||
log.log(level, '\n{0}\nRESPONSE INFO\n{0}\n'.format('-' * 13))
|
||||
log.exception(exception)
|
||||
return response
|
||||
@ -217,45 +217,43 @@ class HTTPClient(BaseHTTPClient):
|
||||
self, method, url, headers=None, params=None, data=None,
|
||||
requestslib_kwargs=None):
|
||||
|
||||
#set requestslib_kwargs to an empty dict if None
|
||||
# set requestslib_kwargs to an empty dict if None
|
||||
requestslib_kwargs = requestslib_kwargs if (
|
||||
requestslib_kwargs is not None) else {}
|
||||
|
||||
#Set defaults
|
||||
# Set defaults
|
||||
params = params if params is not None else {}
|
||||
verify = False
|
||||
|
||||
#If headers are provided by both, headers "wins" over default_headers
|
||||
# If headers are provided by both, headers "wins" over default_headers
|
||||
headers = dict(self.default_headers, **(headers or {}))
|
||||
|
||||
#Override url if present in requestslib_kwargs
|
||||
# Override url if present in requestslib_kwargs
|
||||
if 'url' in requestslib_kwargs.keys():
|
||||
url = requestslib_kwargs.get('url', None) or url
|
||||
del requestslib_kwargs['url']
|
||||
|
||||
#Override method if present in requestslib_kwargs
|
||||
# Override method if present in requestslib_kwargs
|
||||
if 'method' in requestslib_kwargs.keys():
|
||||
method = requestslib_kwargs.get('method', None) or method
|
||||
del requestslib_kwargs['method']
|
||||
|
||||
#The requests lib already removes None key/value pairs, but we force it
|
||||
#here in case that behavior ever changes
|
||||
# The requests lib already removes None key/value pairs, but we force
|
||||
# it here in case that behavior ever changes
|
||||
for key in requestslib_kwargs.keys():
|
||||
if requestslib_kwargs[key] is None:
|
||||
del requestslib_kwargs[key]
|
||||
|
||||
#Create the final parameters for the call to the base request()
|
||||
#Wherever a parameter is provided both by the calling method AND
|
||||
#the requests_lib kwargs dictionary, requestslib_kwargs "wins"
|
||||
requestslib_kwargs = dict({'headers': headers,
|
||||
'params': params,
|
||||
'verify': verify,
|
||||
'data': data},
|
||||
**requestslib_kwargs)
|
||||
# Create the final parameters for the call to the base request()
|
||||
# Wherever a parameter is provided both by the calling method AND
|
||||
# the requests_lib kwargs dictionary, requestslib_kwargs "wins"
|
||||
requestslib_kwargs = dict(
|
||||
{'headers': headers, 'params': params, 'verify': verify,
|
||||
'data': data}, **requestslib_kwargs)
|
||||
|
||||
#Make the request
|
||||
return super(HTTPClient, self).request(method, url,
|
||||
**requestslib_kwargs)
|
||||
# Make the request
|
||||
return super(HTTPClient, self).request(
|
||||
method, url, **requestslib_kwargs)
|
||||
|
||||
|
||||
class AutoMarshallingHTTPClient(HTTPClient):
|
||||
@ -269,32 +267,33 @@ class AutoMarshallingHTTPClient(HTTPClient):
|
||||
self.default_headers = {'Content-Type': 'application/{format}'.format(
|
||||
format=serialize_format)}
|
||||
|
||||
def request(self, method, url, headers=None, params=None, data=None,
|
||||
response_entity_type=None, request_entity=None,
|
||||
requestslib_kwargs=None):
|
||||
def request(
|
||||
self, method, url, headers=None, params=None, data=None,
|
||||
response_entity_type=None, request_entity=None,
|
||||
requestslib_kwargs=None):
|
||||
|
||||
#defaults requestslib_kwargs to a dictionary if it is None
|
||||
# defaults requestslib_kwargs to a dictionary if it is None
|
||||
requestslib_kwargs = requestslib_kwargs if (requestslib_kwargs is not
|
||||
None) else {}
|
||||
|
||||
#set the 'data' parameter of the request to either what's already in
|
||||
#requestslib_kwargs, or the deserialized output of the request_entity
|
||||
# set the 'data' parameter of the request to either what's already in
|
||||
# requestslib_kwargs, or the deserialized output of the request_entity
|
||||
if request_entity is not None:
|
||||
requestslib_kwargs = dict(
|
||||
{'data': request_entity.serialize(self.serialize_format)},
|
||||
**requestslib_kwargs)
|
||||
|
||||
#Make the request
|
||||
# Make the request
|
||||
response = super(AutoMarshallingHTTPClient, self).request(
|
||||
method, url, headers=headers, params=params, data=data,
|
||||
requestslib_kwargs=requestslib_kwargs)
|
||||
|
||||
#Append the deserialized data object to the response
|
||||
# Append the deserialized data object to the response
|
||||
response.request.__dict__['entity'] = None
|
||||
response.__dict__['entity'] = None
|
||||
|
||||
#If present, append the serialized request data object to
|
||||
#response.request
|
||||
# If present, append the serialized request data object to
|
||||
# response.request
|
||||
if response.request is not None:
|
||||
response.request.__dict__['entity'] = request_entity
|
||||
|
||||
|
@ -14,6 +14,22 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from setuptools import setup, find_packages
|
||||
import sys
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
|
||||
# tox integration
|
||||
class Tox(TestCommand):
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = []
|
||||
self.test_suite = True
|
||||
|
||||
def run_tests(self):
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import tox
|
||||
errno = tox.cmdline(self.test_args)
|
||||
sys.exit(errno)
|
||||
|
||||
setup(
|
||||
name='cafe_http_plugin',
|
||||
@ -25,4 +41,6 @@ setup(
|
||||
packages=find_packages(),
|
||||
namespace_packages=['cafe'],
|
||||
install_requires=['requests'],
|
||||
tests_require=['tox'],
|
||||
cmdclass={'test': Tox},
|
||||
zip_safe=False)
|
||||
|
4
plugins/http/test-requirements.txt
Normal file
4
plugins/http/test-requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
tox
|
||||
mock
|
||||
flake8
|
||||
nose
|
17
plugins/http/tox.ini
Normal file
17
plugins/http/tox.ini
Normal file
@ -0,0 +1,17 @@
|
||||
[tox]
|
||||
envlist=pep8,py27
|
||||
|
||||
[testenv]
|
||||
setenv=VIRTUAL_ENV={envdir}
|
||||
|
||||
deps=-r{toxinidir}/test-requirements.txt
|
||||
|
||||
[testenv:py27]
|
||||
commands=nosetests {toxinidir}
|
||||
|
||||
[testenv:pep8]
|
||||
commands=flake8
|
||||
|
||||
[flake8]
|
||||
ignore=F401
|
||||
exclude=.git,.idea,docs,.tox,bin,dist,tools,*.egg-info
|
16
plugins/rsyslog/cafe/__init__.py
Normal file
16
plugins/rsyslog/cafe/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
"""
|
||||
Copyright 2013 Rackspace
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
46
plugins/rsyslog/setup.py
Normal file
46
plugins/rsyslog/setup.py
Normal file
@ -0,0 +1,46 @@
|
||||
"""
|
||||
Copyright 2013 Rackspace
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
from setuptools import setup, find_packages
|
||||
import sys
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
|
||||
# tox integration
|
||||
class Tox(TestCommand):
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = []
|
||||
self.test_suite = True
|
||||
|
||||
def run_tests(self):
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import tox
|
||||
errno = tox.cmdline(self.test_args)
|
||||
sys.exit(errno)
|
||||
|
||||
setup(
|
||||
name='cafe_rsyslog_plugin',
|
||||
version='0.0.1',
|
||||
description='The Common Automation Framework Engine',
|
||||
author='Rackspace Cloud QE',
|
||||
author_email='cloud-cafe@lists.rackspace.com',
|
||||
url='http://rackspace.com',
|
||||
packages=find_packages(),
|
||||
namespace_packages=['cafe'],
|
||||
install_requires=['portal'],
|
||||
tests_require=['tox'],
|
||||
cmdclass={'test': Tox},
|
||||
zip_safe=False)
|
4
plugins/rsyslog/test-requirements.txt
Normal file
4
plugins/rsyslog/test-requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
tox
|
||||
mock
|
||||
flake8
|
||||
nose
|
15
plugins/rsyslog/tests/resources/rsyslog/__init__.py
Normal file
15
plugins/rsyslog/tests/resources/rsyslog/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
"""
|
||||
Copyright 2013 Rackspace
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
@ -20,7 +20,7 @@ from cafe.resources.rsyslog.client import RSyslogClient, MessageHandler
|
||||
|
||||
class TestSyslogClient(TestCase):
|
||||
DEFAULT_SD_DICT = {
|
||||
'meniscus': {
|
||||
'test_project': {
|
||||
'token': 'test-token',
|
||||
'tenant': 'test-tenant'
|
||||
}
|
||||
@ -28,7 +28,7 @@ class TestSyslogClient(TestCase):
|
||||
|
||||
SAMPLE_SD_DICT = {
|
||||
'origin': {
|
||||
'software': 'cloudcafe-rsyslog'
|
||||
'software': 'opencafe-rsyslog'
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,7 +41,7 @@ class TestSyslogClient(TestCase):
|
||||
|
||||
def test_conversion_between_sd_dict_to_syslog_str(self):
|
||||
result = MessageHandler.sd_dict_to_syslog_str(self.SAMPLE_SD_DICT)
|
||||
self.assertEqual(result, '[origin software="cloudcafe-rsyslog"]')
|
||||
self.assertEqual(result, '[origin software="opencafe-rsyslog"]')
|
||||
|
||||
def test_send_basic_message(self):
|
||||
result = self.client.send(priority=1, msg='bam',
|
17
plugins/rsyslog/tox.ini
Normal file
17
plugins/rsyslog/tox.ini
Normal file
@ -0,0 +1,17 @@
|
||||
[tox]
|
||||
envlist=pep8,py27
|
||||
|
||||
[testenv]
|
||||
setenv=VIRTUAL_ENV={envdir}
|
||||
|
||||
deps=-r{toxinidir}/test-requirements.txt
|
||||
|
||||
[testenv:py27]
|
||||
commands=nosetests {toxinidir}
|
||||
|
||||
[testenv:pep8]
|
||||
commands=flake8
|
||||
|
||||
[flake8]
|
||||
ignore=F401
|
||||
exclude=.git,.idea,docs,.tox,bin,dist,tools,*.egg-info
|
@ -20,12 +20,12 @@ class GitHubConfig(ConfigSectionInterface):
|
||||
|
||||
SECTION_NAME = 'GITHUB'
|
||||
|
||||
#Access or authorization token
|
||||
# Access or authorization token
|
||||
@property
|
||||
def token(self):
|
||||
return self.get('token')
|
||||
|
||||
#Full repository name (e.g., 'organization/repo')
|
||||
# Full repository name (e.g., 'organization/repo')
|
||||
@property
|
||||
def repo(self):
|
||||
return self.get('repo')
|
||||
|
@ -20,7 +20,7 @@ class LaunchpadTrackerConfig(ConfigSectionInterface):
|
||||
|
||||
SECTION_NAME = 'LAUNCHPAD'
|
||||
|
||||
#Project name in Launchpad (name in URL of project, not display name)
|
||||
# Project name in Launchpad (name in URL of project, not display name)
|
||||
@property
|
||||
def project(self):
|
||||
return self.get('project')
|
||||
|
@ -13,4 +13,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
|
@ -16,8 +16,9 @@ limitations under the License.
|
||||
|
||||
from warnings import warn, simplefilter
|
||||
simplefilter("default", DeprecationWarning)
|
||||
warn("cafe.engine.clients.ssh has been moved to "
|
||||
"cafe.engine.ssh.client", DeprecationWarning)
|
||||
from cafe.engine.ssh.client import SSHAuthStrategy, ExtendedParamikoSSHClient,\
|
||||
BaseSSHClient,SSHClient
|
||||
warn(
|
||||
"cafe.engine.clients.ssh has been moved to cafe.engine.ssh.client",
|
||||
DeprecationWarning)
|
||||
|
||||
from cafe.engine.ssh.client import \
|
||||
SSHAuthStrategy, ExtendedParamikoSSHClient, BaseSSHClient, SSHClient
|
||||
|
@ -12,4 +12,4 @@ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
"""
|
||||
|
@ -12,4 +12,4 @@ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
"""
|
||||
|
17
run_tests.sh
17
run_tests.sh
@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Copyright 2013 Rackspace
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
flake8 cafe
|
||||
python -m unittest discover -t cafe -s cafe/tests
|
47
setup.py
47
setup.py
@ -14,24 +14,14 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from subprocess import call
|
||||
|
||||
try:
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools.command.install import install as _install
|
||||
except ImportError:
|
||||
#currently broken, this really only works with setuptools
|
||||
from distutils.core import setup, find_packages
|
||||
from distutils.command.install import install as _install
|
||||
|
||||
if sys.argv[-1] == 'publish':
|
||||
os.system('python setup.py sdist upload')
|
||||
sys.exit()
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools.command.install import install as _install
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
|
||||
#Post-install engine configuration
|
||||
# Post-install engine configuration
|
||||
def _post_install(dir):
|
||||
call(['cafe-config', 'engine', '--init-install'])
|
||||
call(['cafe-config', 'plugins', 'add', 'plugins'])
|
||||
@ -66,7 +56,7 @@ if sys.version_info < (2, 7):
|
||||
requires.extend(oldpy_requires)
|
||||
|
||||
|
||||
#cmdclass hook allows setup to make post install call
|
||||
# cmdclass hook allows setup to make post install call
|
||||
class install(_install):
|
||||
def run(self):
|
||||
_install.run(self)
|
||||
@ -74,21 +64,31 @@ class install(_install):
|
||||
_post_install, (self.install_lib,),
|
||||
msg="\nRunning post install tasks...")
|
||||
|
||||
#Normal setup stuff
|
||||
|
||||
# tox integration
|
||||
class Tox(TestCommand):
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = []
|
||||
self.test_suite = True
|
||||
|
||||
def run_tests(self):
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import tox
|
||||
errno = tox.cmdline(self.test_args)
|
||||
sys.exit(errno)
|
||||
|
||||
# Normal setup stuff
|
||||
setup(
|
||||
name='cafe',
|
||||
version='0.1.0',
|
||||
description='The Common Automation Framework Engine',
|
||||
long_description='{0}\n\n{1}'.format(
|
||||
open('README.md').read(),
|
||||
open('HISTORY.md').read()),
|
||||
long_description='{0}'.format(open('README.md').read()),
|
||||
author='Rackspace Cloud QE',
|
||||
author_email='cloud-cafe@lists.rackspace.com',
|
||||
url='http://rackspace.com',
|
||||
packages=find_packages(),
|
||||
package_data={'': ['LICENSE', 'NOTICE']},
|
||||
namespace_packages=['cafe'],
|
||||
include_package_data=True,
|
||||
install_requires=requires,
|
||||
license=open('LICENSE').read(),
|
||||
zip_safe=False,
|
||||
@ -109,4 +109,7 @@ setup(
|
||||
'vows-runner = cafe.drivers.pyvows.runner:entry_point',
|
||||
'specter-runner = cafe.drivers.specter.runner:entry_point',
|
||||
'cafe-config = cafe.configurator.cli:entry_point']},
|
||||
cmdclass={'install': install})
|
||||
tests_require=['tox'],
|
||||
cmdclass={
|
||||
'install': install,
|
||||
'test': Tox})
|
||||
|
@ -15,7 +15,7 @@ limitations under the License.
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import unittest2 as unittest
|
||||
import unittest
|
||||
from uuid import uuid4
|
||||
|
||||
from cafe.common.reporting.reporter import Reporter
|
||||
@ -25,6 +25,15 @@ from cafe.drivers.unittest.runner import UnittestRunner
|
||||
from cafe.drivers.unittest.suite import OpenCafeUnittestTestSuite
|
||||
|
||||
|
||||
def load_tests(*args, **kwargs):
|
||||
suite = unittest.suite.TestSuite()
|
||||
suite.addTest(ReportingTests('test_create_json_report'))
|
||||
suite.addTest(ReportingTests('test_create_xml_report'))
|
||||
suite.addTest(ReportingTests('test_create_json_report_w_file_name'))
|
||||
suite.addTest(ReportingTests('test_create_xml_report_w_file_name'))
|
||||
return suite
|
||||
|
||||
|
||||
class FakeTests(unittest.TestCase):
|
||||
|
||||
""" These tests are only used only to create a SummarizeResults object
|
||||
@ -59,22 +68,20 @@ class ReportingTests(unittest.TestCase):
|
||||
self.failure_trace = 'Traceback: ' + str(uuid4())
|
||||
self.skip_msg = str(uuid4())
|
||||
self.error_trace = 'Traceback: ' + str(uuid4())
|
||||
result = {'testsRun': 4,
|
||||
'errors': [(FakeTests('test_report_error'),
|
||||
self.error_trace)],
|
||||
'skipped': [(FakeTests('test_report_skip'),
|
||||
self.skip_msg)],
|
||||
'failures': [(FakeTests('test_report_fail'),
|
||||
self.failure_trace)]}
|
||||
result = {
|
||||
'testsRun': 4,
|
||||
'errors': [(FakeTests('test_report_error'), self.error_trace)],
|
||||
'skipped': [(FakeTests('test_report_skip'), self.skip_msg)],
|
||||
'failures': [(FakeTests('test_report_fail'), self.failure_trace)]}
|
||||
|
||||
self.result_parser = SummarizeResults(master_testsuite=test_suite,
|
||||
result_dict=result,
|
||||
execution_time=1.23)
|
||||
self.result_parser = SummarizeResults(
|
||||
master_testsuite=test_suite, result_dict=result,
|
||||
execution_time=1.23)
|
||||
self.all_results = self.result_parser.gather_results()
|
||||
self.reporter = Reporter(result_parser=self.result_parser,
|
||||
all_results=self.all_results,)
|
||||
self.reporter = Reporter(
|
||||
result_parser=self.result_parser, all_results=self.all_results,)
|
||||
|
||||
self.results_dir = os.getcwd() + '/test-reporting-results'
|
||||
self.results_dir = os.getcwd() + os.path.sep + 'test-reporting-results'
|
||||
if not os.path.exists(self.results_dir):
|
||||
os.makedirs(self.results_dir)
|
||||
|
||||
@ -82,12 +89,11 @@ class ReportingTests(unittest.TestCase):
|
||||
""" Checks for generic test information (names and messages)
|
||||
in the specified report file.
|
||||
"""
|
||||
return self._file_contains(file_path=file_path,
|
||||
target_strings=
|
||||
['test_report_pass', 'test_report_fail',
|
||||
'test_report_skip', 'test_report_error',
|
||||
self.failure_trace, self.skip_msg,
|
||||
self.error_trace])
|
||||
return self._file_contains(
|
||||
file_path=file_path, target_strings=[
|
||||
'test_report_pass', 'test_report_fail', 'test_report_skip',
|
||||
'test_report_error', self.failure_trace, self.skip_msg,
|
||||
self.error_trace])
|
||||
|
||||
def _file_contains(self, file_path, target_strings):
|
||||
""" Checks that the specified file contains all strings in the
|
||||
@ -103,9 +109,9 @@ class ReportingTests(unittest.TestCase):
|
||||
""" Creates a json report and checks that the created report contains
|
||||
the proper test information.
|
||||
"""
|
||||
self.reporter.generate_report(result_type='json',
|
||||
path=self.results_dir)
|
||||
results_file = self.results_dir + '/results.json'
|
||||
self.reporter.generate_report(
|
||||
result_type='json', path=self.results_dir)
|
||||
results_file = self.results_dir + os.path.sep + 'results.json'
|
||||
self.assertTrue(os.path.exists(results_file))
|
||||
self.assertTrue(self._file_contains_test_info(file_path=results_file))
|
||||
|
||||
@ -114,9 +120,8 @@ class ReportingTests(unittest.TestCase):
|
||||
""" Creates an xml report and checks that the created report contains
|
||||
the proper test information.
|
||||
"""
|
||||
self.reporter.generate_report(result_type='xml',
|
||||
path=self.results_dir)
|
||||
results_file = self.results_dir + '/results.xml'
|
||||
self.reporter.generate_report(result_type='xml', path=self.results_dir)
|
||||
results_file = self.results_dir + os.path.sep + 'results.xml'
|
||||
self.assertTrue(os.path.exists(results_file))
|
||||
self.assertTrue(self._file_contains_test_info(file_path=results_file))
|
||||
|
||||
@ -125,9 +130,8 @@ class ReportingTests(unittest.TestCase):
|
||||
""" Creates a json report with a specified file name and checks that
|
||||
the created report contains the proper test information.
|
||||
"""
|
||||
results_file = self.results_dir + str(uuid4()) + '.json'
|
||||
self.reporter.generate_report(result_type='json',
|
||||
path=results_file)
|
||||
results_file = self.results_dir + os.path.sep + str(uuid4()) + '.json'
|
||||
self.reporter.generate_report(result_type='json', path=results_file)
|
||||
self.assertTrue(os.path.exists(results_file))
|
||||
self.assertTrue(self._file_contains_test_info(file_path=results_file))
|
||||
|
||||
@ -136,9 +140,8 @@ class ReportingTests(unittest.TestCase):
|
||||
""" Creates an xml report with a specified file name and checks that
|
||||
the created report contains the proper test information.
|
||||
"""
|
||||
results_file = self.results_dir + str(uuid4()) + '.xml'
|
||||
self.reporter.generate_report(result_type='xml',
|
||||
path=results_file)
|
||||
results_file = self.results_dir + os.path.sep + str(uuid4()) + '.xml'
|
||||
self.reporter.generate_report(result_type='xml', path=results_file)
|
||||
self.assertTrue(os.path.exists(results_file))
|
||||
self.assertTrue(self._file_contains_test_info(file_path=results_file))
|
||||
|
||||
@ -146,20 +149,3 @@ class ReportingTests(unittest.TestCase):
|
||||
""" Deletes created reports and directories. """
|
||||
if os.path.exists(self.results_dir):
|
||||
self.results_dir = shutil.rmtree(self.results_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Creates a suite of only the actual unit tests so that
|
||||
# fake tests are not a part of the unit test results.
|
||||
test_runner_serial = UnittestRunner.get_runner(False, True, 3)
|
||||
test_runner_parallel = UnittestRunner.get_runner(True, True, 3)
|
||||
master_suite = OpenCafeUnittestTestSuite()
|
||||
master_suite.addTest(ReportingTests('test_create_json_report'))
|
||||
master_suite.addTest(ReportingTests('test_create_xml_report'))
|
||||
master_suite.addTest(ReportingTests('test_create_json_report_w_file_name'))
|
||||
master_suite.addTest(ReportingTests('test_create_xml_report_w_file_name'))
|
||||
UnittestRunner.run_serialized(
|
||||
master_suite, test_runner_serial, Reporter.JSON_REPORT, verbosity=3)
|
||||
UnittestRunner.run_parallel(
|
||||
[master_suite], test_runner_parallel, Reporter.XML_REPORT, verbosity=3)
|
||||
exit(0)
|
||||
|
Loading…
x
Reference in New Issue
Block a user