Merge "Port all Sahara scenarios to new style"

This commit is contained in:
Jenkins 2016-10-06 12:55:10 +00:00 committed by Gerrit Code Review
commit a7b0739b4e
6 changed files with 266 additions and 259 deletions

View File

@ -22,34 +22,31 @@ from rally.task import validation
LOG = logging.getLogger(__name__)
"""Scenarios for Sahara clusters."""
class SaharaClusters(utils.SaharaScenario):
"""Benchmark scenarios for Sahara clusters."""
@types.convert(flavor={"type": "nova_flavor"},
master_flavor={"type": "nova_flavor"},
worker_flavor={"type": "nova_flavor"},
neutron_net={"type": "neutron_network"},
floating_ip_pool={"type": "neutron_network"})
@validation.flavor_exists("master_flavor")
@validation.flavor_exists("worker_flavor")
@validation.required_contexts("users", "sahara_image")
@validation.number("workers_count", minval=1, integer_only=True)
@validation.required_services(consts.Service.SAHARA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["sahara"]})
def create_and_delete_cluster(self, workers_count, plugin_name,
hadoop_version,
master_flavor=None, worker_flavor=None,
flavor=None,
floating_ip_pool=None,
volumes_per_node=None,
volumes_size=None, auto_security_group=None,
security_groups=None, node_configs=None,
cluster_configs=None,
enable_anti_affinity=False,
enable_proxy=False,
use_autoconfig=True):
@types.convert(flavor={"type": "nova_flavor"},
master_flavor={"type": "nova_flavor"},
worker_flavor={"type": "nova_flavor"},
neutron_net={"type": "neutron_network"},
floating_ip_pool={"type": "neutron_network"})
@validation.flavor_exists("master_flavor")
@validation.flavor_exists("worker_flavor")
@validation.required_contexts("users", "sahara_image")
@validation.number("workers_count", minval=1, integer_only=True)
@validation.required_services(consts.Service.SAHARA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaClusters.create_and_delete_cluster")
class CreateAndDeleteCluster(utils.SaharaScenario):
def run(self, workers_count, plugin_name, hadoop_version,
master_flavor=None, worker_flavor=None, flavor=None,
floating_ip_pool=None, volumes_per_node=None,
volumes_size=None, auto_security_group=None,
security_groups=None, node_configs=None,
cluster_configs=None, enable_anti_affinity=False,
enable_proxy=False, use_autoconfig=True):
"""Launch and delete a Sahara Cluster.
This scenario launches a Hadoop cluster, waits until it becomes
@ -119,27 +116,26 @@ class SaharaClusters(utils.SaharaScenario):
self._delete_cluster(cluster)
@types.convert(flavor={"type": "nova_flavor"},
master_flavor={"type": "nova_flavor"},
worker_flavor={"type": "nova_flavor"})
@validation.flavor_exists("master_flavor")
@validation.flavor_exists("worker_flavor")
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image")
@validation.number("workers_count", minval=1, integer_only=True)
@scenario.configure(context={"cleanup": ["sahara"]})
def create_scale_delete_cluster(self, master_flavor, worker_flavor,
workers_count, plugin_name,
hadoop_version, deltas,
flavor=None,
floating_ip_pool=None,
volumes_per_node=None, volumes_size=None,
auto_security_group=None,
security_groups=None, node_configs=None,
cluster_configs=None,
enable_anti_affinity=False,
enable_proxy=False,
use_autoconfig=True):
@types.convert(flavor={"type": "nova_flavor"},
master_flavor={"type": "nova_flavor"},
worker_flavor={"type": "nova_flavor"})
@validation.flavor_exists("master_flavor")
@validation.flavor_exists("worker_flavor")
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image")
@validation.number("workers_count", minval=1, integer_only=True)
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaClusters.create_scale_delete_cluster")
class CreateScaleDeleteCluster(utils.SaharaScenario):
def run(self, master_flavor, worker_flavor, workers_count,
plugin_name, hadoop_version, deltas, flavor=None,
floating_ip_pool=None, volumes_per_node=None,
volumes_size=None, auto_security_group=None,
security_groups=None, node_configs=None,
cluster_configs=None, enable_anti_affinity=False,
enable_proxy=False, use_autoconfig=True):
"""Launch, scale and delete a Sahara Cluster.
This scenario launches a Hadoop cluster, waits until it becomes
@ -228,4 +224,4 @@ class SaharaClusters(utils.SaharaScenario):
elif delta < 0:
self._scale_cluster_down(cluster, delta)
self._delete_cluster(cluster)
self._delete_cluster(cluster)

View File

@ -22,14 +22,17 @@ from rally.task import validation
LOG = logging.getLogger(__name__)
class SaharaJob(utils.SaharaScenario):
"""Benchmark scenarios for Sahara jobs."""
"""Benchmark scenarios for Sahara jobs."""
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image",
"sahara_job_binaries", "sahara_cluster")
@scenario.configure(context={"cleanup": ["sahara"]})
def create_launch_job(self, job_type, configs, job_idx=0):
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image",
"sahara_job_binaries", "sahara_cluster")
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaJob.create_launch_job")
class CreateLaunchJob(utils.SaharaScenario):
def run(self, job_type, configs, job_idx=0):
"""Create and execute a Sahara EDP Job.
This scenario Creates a Job entity and launches an execution on a
@ -68,11 +71,15 @@ class SaharaJob(utils.SaharaScenario):
configs=configs,
job_idx=job_idx)
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image",
"sahara_job_binaries", "sahara_cluster")
@scenario.configure(context={"cleanup": ["sahara"]})
def create_launch_job_sequence(self, jobs):
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image",
"sahara_job_binaries", "sahara_cluster")
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaJob.create_launch_job_sequence")
class CreateLaunchJobSequence(utils.SaharaScenario):
def run(self, jobs):
"""Create and execute a sequence of the Sahara EDP Jobs.
This scenario Creates a Job entity and launches an execution on a
@ -81,15 +88,21 @@ class SaharaJob(utils.SaharaScenario):
:param jobs: list of jobs that should be executed in one context
"""
launch_job = CreateLaunchJob(self.context)
for idx, job in enumerate(jobs):
LOG.debug("Launching Job. Sequence #%d" % idx)
self.create_launch_job(job["job_type"], job["configs"], idx)
launch_job.run(job["job_type"], job["configs"], idx)
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image",
"sahara_job_binaries", "sahara_cluster")
@scenario.configure(context={"cleanup": ["sahara"]})
def create_launch_job_sequence_with_scaling(self, jobs, deltas):
@validation.required_services(consts.Service.SAHARA)
@validation.required_contexts("users", "sahara_image",
"sahara_job_binaries", "sahara_cluster")
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaJob.create_launch_job_sequence_with_scaling")
class CreateLaunchJobSequenceWithScaling(utils.SaharaScenario,):
def run(self, jobs, deltas):
"""Create and execute Sahara EDP Jobs on a scaling Cluster.
This scenario Creates a Job entity and launches an execution on a
@ -103,8 +116,8 @@ class SaharaJob(utils.SaharaScenario):
cluster_id = self.context["tenant"]["sahara"]["cluster"]
# Executing the sequence before the first scaling
self.create_launch_job_sequence(jobs)
launch_job_sequence = CreateLaunchJobSequence(self.context)
launch_job_sequence.run(jobs)
for delta in deltas:
# The Cluster is fetched every time so that its node groups have
@ -122,4 +135,4 @@ class SaharaJob(utils.SaharaScenario):
self._scale_cluster_down(cluster, delta)
LOG.debug("Starting Job sequence")
self.create_launch_job_sequence(jobs)
launch_job_sequence.run(jobs)

View File

@ -19,19 +19,20 @@ from rally.plugins.openstack.scenarios.sahara import utils
from rally.task import types
from rally.task import validation
"""Scenarios for Sahara node group templates."""
class SaharaNodeGroupTemplates(utils.SaharaScenario):
"""Benchmark scenarios for Sahara node group templates."""
@types.convert(flavor={"type": "nova_flavor"})
@validation.flavor_exists("flavor")
@validation.required_services(consts.Service.SAHARA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["sahara"]})
def create_and_list_node_group_templates(self, flavor,
plugin_name="vanilla",
hadoop_version="1.2.1",
use_autoconfig=True):
@types.convert(flavor={"type": "nova_flavor"})
@validation.flavor_exists("flavor")
@validation.required_services(consts.Service.SAHARA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaNodeGroupTemplates"
".create_and_list_node_group_templates")
class CreateAndListNodeGroupTemplates(utils.SaharaScenario):
def run(self, flavor, plugin_name="vanilla",
hadoop_version="1.2.1", use_autoconfig=True):
"""Create and list Sahara Node Group Templates.
This scenario creates two Node Group Templates with different set of
@ -65,15 +66,18 @@ class SaharaNodeGroupTemplates(utils.SaharaScenario):
use_autoconfig=use_autoconfig)
self._list_node_group_templates()
@types.convert(flavor={"type": "nova_flavor"})
@validation.flavor_exists("flavor")
@validation.required_services(consts.Service.SAHARA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["sahara"]})
def create_delete_node_group_templates(self, flavor,
plugin_name="vanilla",
hadoop_version="1.2.1",
use_autoconfig=True):
@types.convert(flavor={"type": "nova_flavor"})
@validation.flavor_exists("flavor")
@validation.required_services(consts.Service.SAHARA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["sahara"]},
name="SaharaNodeGroupTemplates"
".create_delete_node_group_templates")
class CreateDeleteNodeGroupTemplates(utils.SaharaScenario):
def run(self, flavor, plugin_name="vanilla",
hadoop_version="1.2.1", use_autoconfig=True):
"""Create and delete Sahara Node Group Templates.
This scenario creates and deletes two most common types of
@ -106,4 +110,4 @@ class SaharaNodeGroupTemplates(utils.SaharaScenario):
use_autoconfig=use_autoconfig)
self._delete_node_group_template(master_ngt)
self._delete_node_group_template(worker_ngt)
self._delete_node_group_template(worker_ngt)

View File

@ -18,35 +18,34 @@ import mock
from rally.plugins.openstack.scenarios.sahara import clusters
from tests.unit import test
SAHARA_CLUSTERS = ("rally.plugins.openstack.scenarios.sahara.clusters"
".SaharaClusters")
SAHARA_UTILS = "rally.plugins.openstack.scenarios.sahara.utils"
BASE = "rally.plugins.openstack.scenarios.sahara.clusters"
class SaharaClustersTestCase(test.ScenarioTestCase):
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
@mock.patch("%s.CreateAndDeleteCluster._delete_cluster" % BASE)
@mock.patch("%s.CreateAndDeleteCluster._launch_cluster" % BASE,
return_value=mock.MagicMock(id=42))
def test_create_and_delete_cluster(
self, mock__launch_cluster, mock__delete_cluster):
clusters_scenario = clusters.SaharaClusters(self.context)
def test_create_and_delete_cluster(self,
mock_launch_cluster,
mock_delete_cluster):
scenario = clusters.CreateAndDeleteCluster(self.context)
clusters_scenario.context = {
scenario.context = {
"tenant": {
"sahara": {
"image": "test_image",
}
}
}
clusters_scenario.create_and_delete_cluster(
master_flavor="test_flavor_m",
worker_flavor="test_flavor_w",
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version")
mock__launch_cluster.assert_called_once_with(
scenario.run(master_flavor="test_flavor_m",
worker_flavor="test_flavor_w",
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version")
mock_launch_cluster.assert_called_once_with(
flavor_id=None,
master_flavor_id="test_flavor_m",
worker_flavor_id="test_flavor_w",
@ -65,32 +64,32 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
enable_proxy=False,
use_autoconfig=True)
mock__delete_cluster.assert_called_once_with(
mock__launch_cluster.return_value)
mock_delete_cluster.assert_called_once_with(
mock_launch_cluster.return_value)
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
@mock.patch("%s.CreateAndDeleteCluster._delete_cluster" % BASE)
@mock.patch("%s.CreateAndDeleteCluster._launch_cluster" % BASE,
return_value=mock.MagicMock(id=42))
def test_create_and_delete_cluster_deprecated_flavor(
self, mock__launch_cluster, mock__delete_cluster):
clusters_scenario = clusters.SaharaClusters(self.context)
def test_create_and_delete_cluster_deprecated_flavor(self,
mock_launch_cluster,
mock_delete_cluster):
scenario = clusters.CreateAndDeleteCluster(self.context)
clusters_scenario.context = {
scenario.context = {
"tenant": {
"sahara": {
"image": "test_image",
}
}
}
clusters_scenario.create_and_delete_cluster(
flavor="test_deprecated_arg",
master_flavor=None,
worker_flavor=None,
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version")
scenario.run(flavor="test_deprecated_arg",
master_flavor=None,
worker_flavor=None,
workers_count=5,
plugin_name="test_plugin",
hadoop_version="test_version")
mock__launch_cluster.assert_called_once_with(
mock_launch_cluster.assert_called_once_with(
flavor_id="test_deprecated_arg",
master_flavor_id=None,
worker_flavor_id=None,
@ -109,38 +108,37 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
enable_proxy=False,
use_autoconfig=True)
mock__delete_cluster.assert_called_once_with(
mock__launch_cluster.return_value)
mock_delete_cluster.assert_called_once_with(
mock_launch_cluster.return_value)
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
@mock.patch(SAHARA_CLUSTERS + "._scale_cluster")
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
@mock.patch("%s.CreateScaleDeleteCluster._delete_cluster" % BASE)
@mock.patch("%s.CreateScaleDeleteCluster._scale_cluster" % BASE)
@mock.patch("%s.CreateScaleDeleteCluster._launch_cluster" % BASE,
return_value=mock.MagicMock(id=42))
def test_create_scale_delete_cluster(
self, mock__launch_cluster, mock__scale_cluster,
mock__delete_cluster):
def test_create_scale_delete_cluster(self,
mock_launch_cluster,
mock_scale_cluster,
mock_delete_cluster):
self.clients("sahara").clusters.get.return_value = mock.MagicMock(
id=42, status="active"
)
clusters_scenario = clusters.SaharaClusters(self.context)
scenario = clusters.CreateScaleDeleteCluster(self.context)
clusters_scenario.context = {
scenario.context = {
"tenant": {
"sahara": {
"image": "test_image",
}
}
}
scenario.run(master_flavor="test_flavor_m",
worker_flavor="test_flavor_w",
workers_count=5,
deltas=[1, -1],
plugin_name="test_plugin",
hadoop_version="test_version")
clusters_scenario.create_scale_delete_cluster(
master_flavor="test_flavor_m",
worker_flavor="test_flavor_w",
workers_count=5,
deltas=[1, -1],
plugin_name="test_plugin",
hadoop_version="test_version")
mock__launch_cluster.assert_called_once_with(
mock_launch_cluster.assert_called_once_with(
flavor_id=None,
master_flavor_id="test_flavor_m",
worker_flavor_id="test_flavor_w",
@ -159,10 +157,14 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
enable_proxy=False,
use_autoconfig=True)
mock__scale_cluster.assert_has_calls([
mock.call(self.clients("sahara").clusters.get.return_value, 1),
mock.call(self.clients("sahara").clusters.get.return_value, -1),
mock_scale_cluster.assert_has_calls([
mock.call(
self.clients("sahara").clusters.get.return_value,
1),
mock.call(
self.clients("sahara").clusters.get.return_value,
-1),
])
mock__delete_cluster.assert_called_once_with(
mock_delete_cluster.assert_called_once_with(
self.clients("sahara").clusters.get.return_value)

View File

@ -21,8 +21,7 @@ from tests.unit import test
CONF = cfg.CONF
SAHARA_JOB = "rally.plugins.openstack.scenarios.sahara.jobs.SaharaJob"
SAHARA_UTILS = "rally.plugins.openstack.scenarios.sahara.utils"
BASE = "rally.plugins.openstack.scenarios.sahara.jobs"
class SaharaJobTestCase(test.ScenarioTestCase):
@ -36,8 +35,8 @@ class SaharaJobTestCase(test.ScenarioTestCase):
CONF.set_override("sahara_job_check_interval", 0, "benchmark",
enforce_type=True)
@mock.patch(SAHARA_JOB + "._run_job_execution")
def test_create_launch_job_java(self, mock__run_job_execution):
@mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE)
def test_create_launch_job_java(self, mock_run_job):
self.clients("sahara").jobs.create.return_value = mock.MagicMock(
id="42")
@ -52,23 +51,22 @@ class SaharaJobTestCase(test.ScenarioTestCase):
}
}
})
jobs_scenario = jobs.SaharaJob(self.context)
jobs_scenario.generate_random_name = mock.Mock(return_value="job_42")
scenario = jobs.CreateLaunchJob(self.context)
scenario.generate_random_name = mock.Mock(
return_value="job_42")
jobs_scenario.create_launch_job(
job_type="java",
configs={"conf_key": "conf_val"},
job_idx=0
)
scenario.run(job_type="java",
configs={"conf_key": "conf_val"},
job_idx=0)
self.clients("sahara").jobs.create.assert_called_once_with(
name=jobs_scenario.generate_random_name.return_value,
name="job_42",
type="java",
description="",
mains=["main_42"],
libs=["lib_42"]
)
mock__run_job_execution.assert_called_once_with(
mock_run_job.assert_called_once_with(
job_id="42",
cluster_id="cl_42",
input_id=None,
@ -77,11 +75,12 @@ class SaharaJobTestCase(test.ScenarioTestCase):
job_idx=0
)
@mock.patch(SAHARA_JOB + "._run_job_execution")
@mock.patch(SAHARA_JOB + "._create_output_ds",
@mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE)
@mock.patch("%s.CreateLaunchJob._create_output_ds" % BASE,
return_value=mock.MagicMock(id="out_42"))
def test_create_launch_job_pig(self, mock__create_output_ds,
mock__run_job_execution):
def test_create_launch_job_pig(self,
mock_create_output,
mock_run_job):
self.clients("sahara").jobs.create.return_value = mock.MagicMock(
id="42")
@ -96,23 +95,21 @@ class SaharaJobTestCase(test.ScenarioTestCase):
}
}
})
jobs_scenario = jobs.SaharaJob(self.context)
jobs_scenario.generate_random_name = mock.Mock(return_value="job_42")
scenario = jobs.CreateLaunchJob(self.context)
scenario.generate_random_name = mock.Mock(return_value="job_42")
jobs_scenario.create_launch_job(
job_type="pig",
configs={"conf_key": "conf_val"},
job_idx=0
)
scenario.run(job_type="pig",
configs={"conf_key": "conf_val"},
job_idx=0)
self.clients("sahara").jobs.create.assert_called_once_with(
name=jobs_scenario.generate_random_name.return_value,
name="job_42",
type="pig",
description="",
mains=["main_42"],
libs=["lib_42"]
)
mock__run_job_execution.assert_called_once_with(
mock_run_job.assert_called_once_with(
job_id="42",
cluster_id="cl_42",
input_id="in_42",
@ -121,8 +118,12 @@ class SaharaJobTestCase(test.ScenarioTestCase):
job_idx=0
)
@mock.patch(SAHARA_JOB + "._run_job_execution")
def test_create_launch_job_sequence(self, mock__run_job_execution):
@mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE)
@mock.patch("%s.CreateLaunchJob.generate_random_name" % BASE,
return_value="job_42")
def test_create_launch_job_sequence(self,
mock__random_name,
mock_run_job):
self.clients("sahara").jobs.create.return_value = mock.MagicMock(
id="42")
@ -137,10 +138,9 @@ class SaharaJobTestCase(test.ScenarioTestCase):
}
}
})
jobs_scenario = jobs.SaharaJob(self.context)
jobs_scenario.generate_random_name = mock.Mock(return_value="job_42")
scenario = jobs.CreateLaunchJobSequence(self.context)
jobs_scenario.create_launch_job_sequence(
scenario.run(
jobs=[
{
"job_type": "java",
@ -150,38 +150,41 @@ class SaharaJobTestCase(test.ScenarioTestCase):
"configs": {"conf_key2": "conf_val2"}
}])
jobs_create_call = mock.call(
name=jobs_scenario.generate_random_name.return_value,
type="java",
description="",
mains=["main_42"],
libs=["lib_42"])
jobs_create_call = mock.call(name="job_42",
type="java",
description="",
mains=["main_42"],
libs=["lib_42"])
self.clients("sahara").jobs.create.assert_has_calls([jobs_create_call,
jobs_create_call])
self.clients("sahara").jobs.create.assert_has_calls(
[jobs_create_call, jobs_create_call])
mock__run_job_execution.assert_has_calls([
mock.call(
job_id="42",
cluster_id="cl_42",
input_id=None,
output_id=None,
configs={"conf_key": "conf_val"},
job_idx=0),
mock.call(
job_id="42",
cluster_id="cl_42",
input_id=None,
output_id=None,
configs={"conf_key2": "conf_val2"},
job_idx=1)]
)
mock_run_job.assert_has_calls([
mock.call(job_id="42",
cluster_id="cl_42",
input_id=None,
output_id=None,
configs={"conf_key": "conf_val"},
job_idx=0),
mock.call(job_id="42",
cluster_id="cl_42",
input_id=None,
output_id=None,
configs={"conf_key2": "conf_val2"},
job_idx=1)
])
@mock.patch(SAHARA_JOB + "._run_job_execution")
@mock.patch(SAHARA_JOB + "._scale_cluster")
def test_create_launch_job_sequence_with_scaling(self,
mock__scale_cluster,
mock__run_job_execution):
@mock.patch("%s.CreateLaunchJob.generate_random_name" % BASE,
return_value="job_42")
@mock.patch("%s.CreateLaunchJobSequenceWithScaling"
"._scale_cluster" % BASE)
@mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE)
def test_create_launch_job_sequence_with_scaling(
self,
mock_run_job,
mock_create_launch_job_sequence_with_scaling__scale_cluster,
mock_create_launch_job_generate_random_name
):
self.clients("sahara").jobs.create.return_value = mock.MagicMock(
id="42")
self.clients("sahara").clusters.get.return_value = mock.MagicMock(
@ -198,10 +201,9 @@ class SaharaJobTestCase(test.ScenarioTestCase):
}
}
})
jobs_scenario = jobs.SaharaJob(self.context)
jobs_scenario.generate_random_name = mock.Mock(return_value="job_42")
scenario = jobs.CreateLaunchJobSequenceWithScaling(self.context)
jobs_scenario.create_launch_job_sequence_with_scaling(
scenario.run(
jobs=[
{
"job_type": "java",
@ -212,15 +214,14 @@ class SaharaJobTestCase(test.ScenarioTestCase):
}],
deltas=[1, -1])
jobs_create_call = mock.call(
name=jobs_scenario.generate_random_name.return_value,
type="java",
description="",
mains=["main_42"],
libs=["lib_42"])
jobs_create_call = mock.call(name="job_42",
type="java",
description="",
mains=["main_42"],
libs=["lib_42"])
self.clients("sahara").jobs.create.assert_has_calls([jobs_create_call,
jobs_create_call])
self.clients("sahara").jobs.create.assert_has_calls(
[jobs_create_call, jobs_create_call])
je_0 = mock.call(job_id="42", cluster_id="cl_42", input_id=None,
output_id=None, configs={"conf_key": "conf_val"},
@ -228,5 +229,4 @@ class SaharaJobTestCase(test.ScenarioTestCase):
je_1 = mock.call(job_id="42", cluster_id="cl_42", input_id=None,
output_id=None,
configs={"conf_key2": "conf_val2"}, job_idx=1)
mock__run_job_execution.assert_has_calls(
[je_0, je_1, je_0, je_1, je_0, je_1])
mock_run_job.assert_has_calls([je_0, je_1, je_0, je_1, je_0, je_1])

View File

@ -19,8 +19,7 @@ from rally.plugins.openstack.scenarios.sahara import (node_group_templates
as ngts)
from tests.unit import test
SAHARA_NGTS = ("rally.plugins.openstack.scenarios.sahara.node_group_templates"
".SaharaNodeGroupTemplates")
BASE = "rally.plugins.openstack.scenarios.sahara.node_group_templates"
class SaharaNodeGroupTemplatesTestCase(test.TestCase):
@ -29,62 +28,55 @@ class SaharaNodeGroupTemplatesTestCase(test.TestCase):
super(SaharaNodeGroupTemplatesTestCase, self).setUp()
self.context = test.get_test_context()
@mock.patch(SAHARA_NGTS + "._list_node_group_templates")
@mock.patch(SAHARA_NGTS + "._create_master_node_group_template",
return_value=object())
@mock.patch(SAHARA_NGTS + "._create_worker_node_group_template",
return_value=object)
def test_create_and_list_node_group_templates(
self,
mock__create_worker_node_group_template,
mock__create_master_node_group_template,
mock__list_node_group_templates):
@mock.patch("%s.CreateAndListNodeGroupTemplates"
"._list_node_group_templates" % BASE)
@mock.patch("%s.CreateAndListNodeGroupTemplates"
"._create_master_node_group_template" % BASE)
@mock.patch("%s.CreateAndListNodeGroupTemplates"
"._create_worker_node_group_template" % BASE)
def test_create_and_list_node_group_templates(self,
mock_create_worker,
mock_create_master,
mock_list_group):
ngts.CreateAndListNodeGroupTemplates(self.context).run(
"test_flavor", "test_plugin", "test_version")
ngts_scenario = ngts.SaharaNodeGroupTemplates(self.context)
ngts_scenario.create_and_list_node_group_templates("test_flavor",
"test_plugin",
"test_version")
mock__create_master_node_group_template.assert_called_once_with(
mock_create_master.assert_called_once_with(
flavor_id="test_flavor",
plugin_name="test_plugin",
hadoop_version="test_version",
use_autoconfig=True)
mock__create_worker_node_group_template.assert_called_once_with(
mock_create_worker.assert_called_once_with(
flavor_id="test_flavor",
plugin_name="test_plugin",
hadoop_version="test_version",
use_autoconfig=True)
mock__list_node_group_templates.assert_called_once_with()
mock_list_group.assert_called_once_with()
@mock.patch(SAHARA_NGTS + "._delete_node_group_template")
@mock.patch(SAHARA_NGTS + "._create_master_node_group_template",
return_value=mock.MagicMock(id=1))
@mock.patch(SAHARA_NGTS + "._create_worker_node_group_template",
return_value=mock.MagicMock(id=2))
def test_create_delete_node_group_templates(
self,
mock__create_worker_node_group_template,
mock__create_master_node_group_template,
mock__delete_node_group_template):
@mock.patch("%s.CreateDeleteNodeGroupTemplates"
"._delete_node_group_template" % BASE)
@mock.patch("%s.CreateDeleteNodeGroupTemplates"
"._create_master_node_group_template" % BASE)
@mock.patch("%s.CreateDeleteNodeGroupTemplates"
"._create_worker_node_group_template" % BASE)
def test_create_delete_node_group_templates(self,
mock_create_worker,
mock_create_master,
mock_delete_group):
ngts.CreateDeleteNodeGroupTemplates(self.context).run(
"test_flavor", "test_plugin", "test_version")
ngts_scenario = ngts.SaharaNodeGroupTemplates(self.context)
ngts_scenario.create_delete_node_group_templates(
"test_flavor",
"test_plugin",
"test_version")
mock__create_master_node_group_template.assert_called_once_with(
mock_create_master.assert_called_once_with(
flavor_id="test_flavor",
plugin_name="test_plugin",
hadoop_version="test_version",
use_autoconfig=True)
mock__create_worker_node_group_template.assert_called_once_with(
mock_create_worker.assert_called_once_with(
flavor_id="test_flavor",
plugin_name="test_plugin",
hadoop_version="test_version",
use_autoconfig=True)
mock__delete_node_group_template.assert_has_calls(calls=[
mock.call(mock__create_master_node_group_template.return_value),
mock.call(mock__create_worker_node_group_template.return_value)])
mock_delete_group.assert_has_calls(calls=[
mock.call(mock_create_master.return_value),
mock.call(mock_create_worker.return_value)])