Merge "[Sahara] Improved flavor paramter for clusters"
This commit is contained in:
commit
f17f664a85
@ -51,6 +51,12 @@ class SaharaCluster(context.Context):
|
|||||||
"flavor_id": {
|
"flavor_id": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
},
|
},
|
||||||
|
"master_flavor_id": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"worker_flavor_id": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
"floating_ip_pool": {
|
"floating_ip_pool": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
},
|
},
|
||||||
@ -86,7 +92,7 @@ class SaharaCluster(context.Context):
|
|||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": ["plugin_name", "hadoop_version", "workers_count",
|
"required": ["plugin_name", "hadoop_version", "workers_count",
|
||||||
"flavor_id"]
|
"master_flavor_id", "worker_flavor_id"]
|
||||||
}
|
}
|
||||||
|
|
||||||
@logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Cluster`"))
|
@logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Cluster`"))
|
||||||
@ -113,7 +119,9 @@ class SaharaCluster(context.Context):
|
|||||||
cluster = scenario._launch_cluster(
|
cluster = scenario._launch_cluster(
|
||||||
plugin_name=self.config["plugin_name"],
|
plugin_name=self.config["plugin_name"],
|
||||||
hadoop_version=self.config["hadoop_version"],
|
hadoop_version=self.config["hadoop_version"],
|
||||||
flavor_id=self.config["flavor_id"],
|
flavor_id=self.config.get("flavor_id", None),
|
||||||
|
master_flavor_id=self.config["master_flavor_id"],
|
||||||
|
worker_flavor_id=self.config["worker_flavor_id"],
|
||||||
workers_count=self.config["workers_count"],
|
workers_count=self.config["workers_count"],
|
||||||
image_id=image_id,
|
image_id=image_id,
|
||||||
floating_ip_pool=floating_ip_pool,
|
floating_ip_pool=floating_ip_pool,
|
||||||
|
@ -27,16 +27,22 @@ class SaharaClusters(utils.SaharaScenario):
|
|||||||
"""Benchmark scenarios for Sahara clusters."""
|
"""Benchmark scenarios for Sahara clusters."""
|
||||||
|
|
||||||
@types.set(flavor=types.FlavorResourceType,
|
@types.set(flavor=types.FlavorResourceType,
|
||||||
|
master_flavor=types.FlavorResourceType,
|
||||||
|
worker_flavor=types.FlavorResourceType,
|
||||||
neutron_net=types.NeutronNetworkResourceType,
|
neutron_net=types.NeutronNetworkResourceType,
|
||||||
floating_ip_pool=types.NeutronNetworkResourceType)
|
floating_ip_pool=types.NeutronNetworkResourceType)
|
||||||
@validation.flavor_exists("flavor")
|
@validation.flavor_exists("master_flavor")
|
||||||
|
@validation.flavor_exists("worker_flavor")
|
||||||
@validation.required_contexts("users", "sahara_image")
|
@validation.required_contexts("users", "sahara_image")
|
||||||
@validation.number("workers_count", minval=1, integer_only=True)
|
@validation.number("workers_count", minval=1, integer_only=True)
|
||||||
@validation.required_services(consts.Service.SAHARA)
|
@validation.required_services(consts.Service.SAHARA)
|
||||||
@validation.required_openstack(users=True)
|
@validation.required_openstack(users=True)
|
||||||
@scenario.configure(context={"cleanup": ["sahara"]})
|
@scenario.configure(context={"cleanup": ["sahara"]})
|
||||||
def create_and_delete_cluster(self, flavor, workers_count, plugin_name,
|
def create_and_delete_cluster(self, workers_count, plugin_name,
|
||||||
hadoop_version, floating_ip_pool=None,
|
hadoop_version,
|
||||||
|
master_flavor=None, worker_flavor=None,
|
||||||
|
flavor=None,
|
||||||
|
floating_ip_pool=None,
|
||||||
volumes_per_node=None,
|
volumes_per_node=None,
|
||||||
volumes_size=None, auto_security_group=None,
|
volumes_size=None, auto_security_group=None,
|
||||||
security_groups=None, node_configs=None,
|
security_groups=None, node_configs=None,
|
||||||
@ -49,7 +55,11 @@ class SaharaClusters(utils.SaharaScenario):
|
|||||||
'Active' and deletes it.
|
'Active' and deletes it.
|
||||||
|
|
||||||
:param flavor: Nova flavor that will be for nodes in the
|
:param flavor: Nova flavor that will be for nodes in the
|
||||||
created node groups
|
created node groups. Deprecated.
|
||||||
|
:param master_flavor: Nova flavor that will be used for the master
|
||||||
|
instance of the cluster
|
||||||
|
:param worker_flavor: Nova flavor that will be used for the workers of
|
||||||
|
the cluster
|
||||||
:param workers_count: number of worker instances in a cluster
|
:param workers_count: number of worker instances in a cluster
|
||||||
:param plugin_name: name of a provisioning plugin
|
:param plugin_name: name of a provisioning plugin
|
||||||
:param hadoop_version: version of Hadoop distribution supported by
|
:param hadoop_version: version of Hadoop distribution supported by
|
||||||
@ -85,6 +95,8 @@ class SaharaClusters(utils.SaharaScenario):
|
|||||||
|
|
||||||
cluster = self._launch_cluster(
|
cluster = self._launch_cluster(
|
||||||
flavor_id=flavor,
|
flavor_id=flavor,
|
||||||
|
master_flavor_id=master_flavor,
|
||||||
|
worker_flavor_id=worker_flavor,
|
||||||
image_id=image_id,
|
image_id=image_id,
|
||||||
workers_count=workers_count,
|
workers_count=workers_count,
|
||||||
plugin_name=plugin_name,
|
plugin_name=plugin_name,
|
||||||
@ -101,14 +113,19 @@ class SaharaClusters(utils.SaharaScenario):
|
|||||||
|
|
||||||
self._delete_cluster(cluster)
|
self._delete_cluster(cluster)
|
||||||
|
|
||||||
@types.set(flavor=types.FlavorResourceType)
|
@types.set(flavor=types.FlavorResourceType,
|
||||||
@validation.flavor_exists("flavor")
|
master_flavor=types.FlavorResourceType,
|
||||||
|
worker_flavor=types.FlavorResourceType)
|
||||||
|
@validation.flavor_exists("master_flavor")
|
||||||
|
@validation.flavor_exists("worker_flavor")
|
||||||
@validation.required_services(consts.Service.SAHARA)
|
@validation.required_services(consts.Service.SAHARA)
|
||||||
@validation.required_contexts("users", "sahara_image")
|
@validation.required_contexts("users", "sahara_image")
|
||||||
@validation.number("workers_count", minval=1, integer_only=True)
|
@validation.number("workers_count", minval=1, integer_only=True)
|
||||||
@scenario.configure(context={"cleanup": ["sahara"]})
|
@scenario.configure(context={"cleanup": ["sahara"]})
|
||||||
def create_scale_delete_cluster(self, flavor, workers_count, plugin_name,
|
def create_scale_delete_cluster(self, master_flavor, worker_flavor,
|
||||||
|
workers_count, plugin_name,
|
||||||
hadoop_version, deltas,
|
hadoop_version, deltas,
|
||||||
|
flavor=None,
|
||||||
floating_ip_pool=None,
|
floating_ip_pool=None,
|
||||||
volumes_per_node=None, volumes_size=None,
|
volumes_per_node=None, volumes_size=None,
|
||||||
auto_security_group=None,
|
auto_security_group=None,
|
||||||
@ -125,7 +142,11 @@ class SaharaClusters(utils.SaharaScenario):
|
|||||||
add 2 worker nodes to the cluster and the second will remove two.
|
add 2 worker nodes to the cluster and the second will remove two.
|
||||||
|
|
||||||
:param flavor: Nova flavor that will be for nodes in the
|
:param flavor: Nova flavor that will be for nodes in the
|
||||||
created node groups
|
created node groups. Deprecated.
|
||||||
|
:param master_flavor: Nova flavor that will be used for the master
|
||||||
|
instance of the cluster
|
||||||
|
:param worker_flavor: Nova flavor that will be used for the workers of
|
||||||
|
the cluster
|
||||||
:param workers_count: number of worker instances in a cluster
|
:param workers_count: number of worker instances in a cluster
|
||||||
:param plugin_name: name of a provisioning plugin
|
:param plugin_name: name of a provisioning plugin
|
||||||
:param hadoop_version: version of Hadoop distribution supported by
|
:param hadoop_version: version of Hadoop distribution supported by
|
||||||
@ -166,6 +187,8 @@ class SaharaClusters(utils.SaharaScenario):
|
|||||||
|
|
||||||
cluster = self._launch_cluster(
|
cluster = self._launch_cluster(
|
||||||
flavor_id=flavor,
|
flavor_id=flavor,
|
||||||
|
master_flavor_id=master_flavor,
|
||||||
|
worker_flavor_id=worker_flavor,
|
||||||
image_id=image_id,
|
image_id=image_id,
|
||||||
workers_count=workers_count,
|
workers_count=workers_count,
|
||||||
plugin_name=plugin_name,
|
plugin_name=plugin_name,
|
||||||
|
@ -244,10 +244,15 @@ class SaharaScenario(scenario.OpenStackScenario):
|
|||||||
}
|
}
|
||||||
return replication_config
|
return replication_config
|
||||||
|
|
||||||
|
@logging.log_deprecated_args("`flavor_id` argument is deprecated. Use "
|
||||||
|
"`master_flavor_id` and `worker_flavor_id` "
|
||||||
|
"parameters.", rally_version="2.0",
|
||||||
|
deprecated_args=["flavor_id"])
|
||||||
@atomic.action_timer("sahara.launch_cluster")
|
@atomic.action_timer("sahara.launch_cluster")
|
||||||
def _launch_cluster(self, plugin_name, hadoop_version, flavor_id,
|
def _launch_cluster(self, plugin_name, hadoop_version, master_flavor_id,
|
||||||
image_id, workers_count, floating_ip_pool=None,
|
worker_flavor_id, image_id, workers_count,
|
||||||
volumes_per_node=None,
|
flavor_id=None,
|
||||||
|
floating_ip_pool=None, volumes_per_node=None,
|
||||||
volumes_size=None, auto_security_group=None,
|
volumes_size=None, auto_security_group=None,
|
||||||
security_groups=None, node_configs=None,
|
security_groups=None, node_configs=None,
|
||||||
cluster_configs=None, enable_anti_affinity=False,
|
cluster_configs=None, enable_anti_affinity=False,
|
||||||
@ -261,7 +266,9 @@ class SaharaScenario(scenario.OpenStackScenario):
|
|||||||
|
|
||||||
:param plugin_name: provisioning plugin name
|
:param plugin_name: provisioning plugin name
|
||||||
:param hadoop_version: Hadoop version supported by the plugin
|
:param hadoop_version: Hadoop version supported by the plugin
|
||||||
:param flavor_id: flavor which will be used to create instances
|
:param master_flavor_id: flavor which will be used to create master
|
||||||
|
instance
|
||||||
|
:param worker_flavor_id: flavor which will be used to create workers
|
||||||
:param image_id: image id that will be used to boot instances
|
:param image_id: image id that will be used to boot instances
|
||||||
:param workers_count: number of worker instances. All plugins will
|
:param workers_count: number of worker instances. All plugins will
|
||||||
also add one Master instance and some plugins
|
also add one Master instance and some plugins
|
||||||
@ -295,16 +302,22 @@ class SaharaScenario(scenario.OpenStackScenario):
|
|||||||
else:
|
else:
|
||||||
proxies_count = 0
|
proxies_count = 0
|
||||||
|
|
||||||
|
if flavor_id:
|
||||||
|
# Note: the deprecated argument is used. Falling back to single
|
||||||
|
# flavor behavior.
|
||||||
|
master_flavor_id = flavor_id
|
||||||
|
worker_flavor_id = flavor_id
|
||||||
|
|
||||||
node_groups = [
|
node_groups = [
|
||||||
{
|
{
|
||||||
"name": "master-ng",
|
"name": "master-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": master_flavor_id,
|
||||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["master"],
|
[hadoop_version]["master"],
|
||||||
"count": 1
|
"count": 1
|
||||||
}, {
|
}, {
|
||||||
"name": "worker-ng",
|
"name": "worker-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": worker_flavor_id,
|
||||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["worker"],
|
[hadoop_version]["worker"],
|
||||||
"count": workers_count - proxies_count
|
"count": workers_count - proxies_count
|
||||||
@ -314,7 +327,7 @@ class SaharaScenario(scenario.OpenStackScenario):
|
|||||||
if proxies_count:
|
if proxies_count:
|
||||||
node_groups.append({
|
node_groups.append({
|
||||||
"name": "proxy-ng",
|
"name": "proxy-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": worker_flavor_id,
|
||||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["worker"],
|
[hadoop_version]["worker"],
|
||||||
"count": proxies_count
|
"count": proxies_count
|
||||||
@ -327,7 +340,7 @@ class SaharaScenario(scenario.OpenStackScenario):
|
|||||||
|
|
||||||
node_groups.append({
|
node_groups.append({
|
||||||
"name": "manager-ng",
|
"name": "manager-ng",
|
||||||
"flavor_id": flavor_id,
|
"flavor_id": worker_flavor_id,
|
||||||
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
"node_processes": sahara_consts.NODE_PROCESSES[plugin_name]
|
||||||
[hadoop_version]["manager"],
|
[hadoop_version]["manager"],
|
||||||
"count": 1
|
"count": 1
|
||||||
|
@ -55,7 +55,8 @@ class SaharaClusterTestCase(test.ScenarioTestCase):
|
|||||||
"users_per_tenant": self.users_per_tenant
|
"users_per_tenant": self.users_per_tenant
|
||||||
},
|
},
|
||||||
"sahara_cluster": {
|
"sahara_cluster": {
|
||||||
"flavor_id": "test_flavor",
|
"master_flavor_id": "test_flavor_m",
|
||||||
|
"worker_flavor_id": "test_flavor_w",
|
||||||
"workers_count": 2,
|
"workers_count": 2,
|
||||||
"plugin_name": "test_plugin",
|
"plugin_name": "test_plugin",
|
||||||
"hadoop_version": "test_version"
|
"hadoop_version": "test_version"
|
||||||
@ -77,9 +78,11 @@ class SaharaClusterTestCase(test.ScenarioTestCase):
|
|||||||
|
|
||||||
for i in self.tenants:
|
for i in self.tenants:
|
||||||
launch_cluster_calls.append(mock.call(
|
launch_cluster_calls.append(mock.call(
|
||||||
|
flavor_id=None,
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version",
|
hadoop_version="test_version",
|
||||||
flavor_id="test_flavor",
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
workers_count=2,
|
workers_count=2,
|
||||||
image_id=self.context["tenants"][i]["sahara"]["image"],
|
image_id=self.context["tenants"][i]["sahara"]["image"],
|
||||||
floating_ip_pool=None,
|
floating_ip_pool=None,
|
||||||
@ -115,9 +118,11 @@ class SaharaClusterTestCase(test.ScenarioTestCase):
|
|||||||
|
|
||||||
for i in self.tenants:
|
for i in self.tenants:
|
||||||
launch_cluster_calls.append(mock.call(
|
launch_cluster_calls.append(mock.call(
|
||||||
|
flavor_id=None,
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version",
|
hadoop_version="test_version",
|
||||||
flavor_id="test_flavor",
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
workers_count=2,
|
workers_count=2,
|
||||||
image_id=self.context["tenants"][i]["sahara"]["image"],
|
image_id=self.context["tenants"][i]["sahara"]["image"],
|
||||||
floating_ip_pool=None,
|
floating_ip_pool=None,
|
||||||
|
@ -40,13 +40,59 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
clusters_scenario.create_and_delete_cluster(
|
clusters_scenario.create_and_delete_cluster(
|
||||||
flavor="test_flavor",
|
master_flavor="test_flavor_m",
|
||||||
|
worker_flavor="test_flavor_w",
|
||||||
workers_count=5,
|
workers_count=5,
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version")
|
hadoop_version="test_version")
|
||||||
|
|
||||||
mock__launch_cluster.assert_called_once_with(
|
mock__launch_cluster.assert_called_once_with(
|
||||||
flavor_id="test_flavor",
|
flavor_id=None,
|
||||||
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
|
image_id="test_image",
|
||||||
|
workers_count=5,
|
||||||
|
plugin_name="test_plugin",
|
||||||
|
hadoop_version="test_version",
|
||||||
|
floating_ip_pool=None,
|
||||||
|
volumes_per_node=None,
|
||||||
|
volumes_size=None,
|
||||||
|
auto_security_group=None,
|
||||||
|
security_groups=None,
|
||||||
|
node_configs=None,
|
||||||
|
cluster_configs=None,
|
||||||
|
enable_anti_affinity=False,
|
||||||
|
enable_proxy=False)
|
||||||
|
|
||||||
|
mock__delete_cluster.assert_called_once_with(
|
||||||
|
mock__launch_cluster.return_value)
|
||||||
|
|
||||||
|
@mock.patch(SAHARA_CLUSTERS + "._delete_cluster")
|
||||||
|
@mock.patch(SAHARA_CLUSTERS + "._launch_cluster",
|
||||||
|
return_value=mock.MagicMock(id=42))
|
||||||
|
def test_create_and_delete_cluster_deprecated_flavor(
|
||||||
|
self, mock__launch_cluster, mock__delete_cluster):
|
||||||
|
clusters_scenario = clusters.SaharaClusters(self.context)
|
||||||
|
|
||||||
|
clusters_scenario.context = {
|
||||||
|
"tenant": {
|
||||||
|
"sahara": {
|
||||||
|
"image": "test_image",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
clusters_scenario.create_and_delete_cluster(
|
||||||
|
flavor="test_deprecated_arg",
|
||||||
|
master_flavor=None,
|
||||||
|
worker_flavor=None,
|
||||||
|
workers_count=5,
|
||||||
|
plugin_name="test_plugin",
|
||||||
|
hadoop_version="test_version")
|
||||||
|
|
||||||
|
mock__launch_cluster.assert_called_once_with(
|
||||||
|
flavor_id="test_deprecated_arg",
|
||||||
|
master_flavor_id=None,
|
||||||
|
worker_flavor_id=None,
|
||||||
image_id="test_image",
|
image_id="test_image",
|
||||||
workers_count=5,
|
workers_count=5,
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
@ -85,14 +131,17 @@ class SaharaClustersTestCase(test.ScenarioTestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
clusters_scenario.create_scale_delete_cluster(
|
clusters_scenario.create_scale_delete_cluster(
|
||||||
flavor="test_flavor",
|
master_flavor="test_flavor_m",
|
||||||
|
worker_flavor="test_flavor_w",
|
||||||
workers_count=5,
|
workers_count=5,
|
||||||
deltas=[1, -1],
|
deltas=[1, -1],
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version")
|
hadoop_version="test_version")
|
||||||
|
|
||||||
mock__launch_cluster.assert_called_once_with(
|
mock__launch_cluster.assert_called_once_with(
|
||||||
flavor_id="test_flavor",
|
flavor_id=None,
|
||||||
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
image_id="test_image",
|
image_id="test_image",
|
||||||
workers_count=5,
|
workers_count=5,
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
|
@ -168,7 +168,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
node_groups = [
|
node_groups = [
|
||||||
{
|
{
|
||||||
"name": "master-ng",
|
"name": "master-ng",
|
||||||
"flavor_id": "test_flavor",
|
"flavor_id": "test_flavor_m",
|
||||||
"node_processes": ["p1"],
|
"node_processes": ["p1"],
|
||||||
"floating_ip_pool": floating_ip_pool_uuid,
|
"floating_ip_pool": floating_ip_pool_uuid,
|
||||||
"count": 1,
|
"count": 1,
|
||||||
@ -177,7 +177,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
"node_configs": {"HDFS": {"local_config": "local_value"}},
|
"node_configs": {"HDFS": {"local_config": "local_value"}},
|
||||||
}, {
|
}, {
|
||||||
"name": "worker-ng",
|
"name": "worker-ng",
|
||||||
"flavor_id": "test_flavor",
|
"flavor_id": "test_flavor_w",
|
||||||
"node_processes": ["p2"],
|
"node_processes": ["p2"],
|
||||||
"floating_ip_pool": floating_ip_pool_uuid,
|
"floating_ip_pool": floating_ip_pool_uuid,
|
||||||
"volumes_per_node": 5,
|
"volumes_per_node": 5,
|
||||||
@ -201,7 +201,8 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
scenario._launch_cluster(
|
scenario._launch_cluster(
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version",
|
hadoop_version="test_version",
|
||||||
flavor_id="test_flavor",
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
image_id="test_image",
|
image_id="test_image",
|
||||||
floating_ip_pool=floating_ip_pool_uuid,
|
floating_ip_pool=floating_ip_pool_uuid,
|
||||||
volumes_per_node=5,
|
volumes_per_node=5,
|
||||||
@ -271,7 +272,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
node_groups = [
|
node_groups = [
|
||||||
{
|
{
|
||||||
"name": "master-ng",
|
"name": "master-ng",
|
||||||
"flavor_id": "test_flavor",
|
"flavor_id": "test_flavor_m",
|
||||||
"node_processes": ["p1"],
|
"node_processes": ["p1"],
|
||||||
"floating_ip_pool": floating_ip_pool_uuid,
|
"floating_ip_pool": floating_ip_pool_uuid,
|
||||||
"count": 1,
|
"count": 1,
|
||||||
@ -281,7 +282,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
"is_proxy_gateway": True
|
"is_proxy_gateway": True
|
||||||
}, {
|
}, {
|
||||||
"name": "worker-ng",
|
"name": "worker-ng",
|
||||||
"flavor_id": "test_flavor",
|
"flavor_id": "test_flavor_w",
|
||||||
"node_processes": ["p2"],
|
"node_processes": ["p2"],
|
||||||
"volumes_per_node": 5,
|
"volumes_per_node": 5,
|
||||||
"volumes_size": 10,
|
"volumes_size": 10,
|
||||||
@ -291,7 +292,7 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
"node_configs": {"HDFS": {"local_config": "local_value"}},
|
"node_configs": {"HDFS": {"local_config": "local_value"}},
|
||||||
}, {
|
}, {
|
||||||
"name": "proxy-ng",
|
"name": "proxy-ng",
|
||||||
"flavor_id": "test_flavor",
|
"flavor_id": "test_flavor_w",
|
||||||
"node_processes": ["p2"],
|
"node_processes": ["p2"],
|
||||||
"floating_ip_pool": floating_ip_pool_uuid,
|
"floating_ip_pool": floating_ip_pool_uuid,
|
||||||
"volumes_per_node": 5,
|
"volumes_per_node": 5,
|
||||||
@ -316,7 +317,8 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
scenario._launch_cluster(
|
scenario._launch_cluster(
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version",
|
hadoop_version="test_version",
|
||||||
flavor_id="test_flavor",
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
image_id="test_image",
|
image_id="test_image",
|
||||||
floating_ip_pool=floating_ip_pool_uuid,
|
floating_ip_pool=floating_ip_pool_uuid,
|
||||||
volumes_per_node=5,
|
volumes_per_node=5,
|
||||||
@ -380,7 +382,8 @@ class SaharaScenarioTestCase(test.ScenarioTestCase):
|
|||||||
scenario._launch_cluster,
|
scenario._launch_cluster,
|
||||||
plugin_name="test_plugin",
|
plugin_name="test_plugin",
|
||||||
hadoop_version="test_version",
|
hadoop_version="test_version",
|
||||||
flavor_id="test_flavor",
|
master_flavor_id="test_flavor_m",
|
||||||
|
worker_flavor_id="test_flavor_w",
|
||||||
image_id="test_image",
|
image_id="test_image",
|
||||||
floating_ip_pool="test_pool",
|
floating_ip_pool="test_pool",
|
||||||
volumes_per_node=5,
|
volumes_per_node=5,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user