Extract merge process of the component options:
* Move merge component's opts with distro, persona, etc to cfg.py * Implementing YamlMergeLoader * Add unit tests for correct updating cached and processed opts data * Add unit tests for YamlMergeLoader * Fix redefinition process in personas * Add documentation to YamlMergerLoader docstring Change-Id: Ie491defca3df09d763081799d551cef8c2128701
This commit is contained in:
parent
2051e05d05
commit
53d8e30bbd
@ -25,7 +25,6 @@ from anvil import importer
|
||||
from anvil import log as logging
|
||||
from anvil import passwords as pw
|
||||
from anvil import phase
|
||||
from anvil import settings
|
||||
from anvil import shell as sh
|
||||
from anvil import utils
|
||||
|
||||
@ -33,10 +32,6 @@ from anvil.utils import OrderedDict
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Include the general yaml during all interpolation
|
||||
# actions since it typically contains useful shared settings...
|
||||
BASE_YAML_INTERP = ('general', )
|
||||
|
||||
|
||||
class PhaseFunctors(object):
|
||||
def __init__(self, start, run, end):
|
||||
@ -56,9 +51,11 @@ class Action(object):
|
||||
self.root_dir = root_dir
|
||||
# Action phases are tracked in this directory
|
||||
self.phase_dir = sh.joinpths(root_dir, 'phases')
|
||||
|
||||
# Yamls are loaded (with its reference links) using this instance at the
|
||||
# given component directory where component configuration will be found.
|
||||
self.config_loader = cfg.YamlRefLoader(settings.COMPONENT_CONF_DIR)
|
||||
self.config_loader = cfg.YamlMergeLoader(root_dir)
|
||||
|
||||
# Keyring/pw settings + cache
|
||||
self.passwords = {}
|
||||
self.keyring_path = cli_opts.pop('keyring_path')
|
||||
@ -118,21 +115,6 @@ class Action(object):
|
||||
# Duplicate the list to avoid problems if it is updated later.
|
||||
return copy.copy(components)
|
||||
|
||||
def _get_component_dirs(self, component):
|
||||
component_dir = sh.joinpths(self.root_dir, component)
|
||||
trace_dir = sh.joinpths(component_dir, 'traces')
|
||||
app_dir = sh.joinpths(component_dir, 'app')
|
||||
return {
|
||||
'app_dir': app_dir,
|
||||
'component_dir': component_dir,
|
||||
'root_dir': self.root_dir,
|
||||
'trace_dir': trace_dir,
|
||||
}
|
||||
|
||||
def _merge_options(self, name, distro_opts, component_opts, persona_opts):
|
||||
return utils.merge_dicts(self._get_component_dirs(name),
|
||||
distro_opts, component_opts, persona_opts)
|
||||
|
||||
def _merge_subsystems(self, distro_subsystems, desired_subsystems):
|
||||
subsystems = {}
|
||||
for subsystem_name in desired_subsystems:
|
||||
@ -163,23 +145,9 @@ class Action(object):
|
||||
sibling_instances[action][name] = a_sibling
|
||||
return there_siblings
|
||||
|
||||
def _get_interpolated_names(self, name):
|
||||
# Return which sources that will be interpolated from
|
||||
# Note(harlowja): if one of the bases here pulls in
|
||||
# another yaml, it will be done automatically so this
|
||||
# list is more of the starting list and not the end list...
|
||||
return list(BASE_YAML_INTERP) + [name]
|
||||
|
||||
def _get_interpolated_options(self, name):
|
||||
opts = {}
|
||||
for c in self._get_interpolated_names(name):
|
||||
opts.update(self.config_loader.load(c))
|
||||
return opts
|
||||
|
||||
def _construct_instances(self, persona):
|
||||
"""Create component objects for each component in the persona."""
|
||||
persona_subsystems = persona.wanted_subsystems or {}
|
||||
persona_opts = persona.component_options or {}
|
||||
wanted_components = persona.wanted_components or []
|
||||
# All siblings for the current persona
|
||||
instances = {}
|
||||
@ -203,10 +171,8 @@ class Action(object):
|
||||
sibling_params['siblings'] = {} # This gets adjusted during construction
|
||||
sibling_params['passwords'] = self.passwords
|
||||
sibling_params['distro'] = self.distro
|
||||
sibling_params['options'] = self._merge_options(c,
|
||||
component_opts=self._get_interpolated_options(c),
|
||||
distro_opts=d_component.options,
|
||||
persona_opts={})
|
||||
sibling_params['options'] = self.config_loader.load(d_component, c)
|
||||
|
||||
LOG.debug("Constructing %r %s siblings...", c, len(d_component.siblings))
|
||||
my_siblings = self._construct_siblings(c, d_component.siblings, sibling_params, sibling_instances)
|
||||
# Now inject the full options and create the target instance
|
||||
@ -214,10 +180,8 @@ class Action(object):
|
||||
# siblings get...
|
||||
instance_params = dict(sibling_params)
|
||||
instance_params['instances'] = instances
|
||||
instance_params['options'] = self._merge_options(c,
|
||||
component_opts=self._get_interpolated_options(c),
|
||||
distro_opts=d_component.options,
|
||||
persona_opts=persona_opts.get(c, {}))
|
||||
instance_params['options'] = self.config_loader.load(d_component, c,
|
||||
persona)
|
||||
instance_params['siblings'] = my_siblings
|
||||
instance_params = utils.merge_dicts(instance_params, self.cli_opts, preserve=True)
|
||||
instances[c] = importer.construct_entry_point(d_component.entry_point, **instance_params)
|
||||
|
117
anvil/cfg.py
117
anvil/cfg.py
@ -27,6 +27,7 @@ import iniparse
|
||||
|
||||
from anvil import exceptions
|
||||
from anvil import log as logging
|
||||
from anvil import settings
|
||||
from anvil import shell as sh
|
||||
from anvil import utils
|
||||
|
||||
@ -163,32 +164,66 @@ class DefaultConf(object):
|
||||
self.backing.remove_option(section, key)
|
||||
|
||||
|
||||
# TODO(vnovikov): inject all config merges into class below
|
||||
#class YamlMergeLoader(object):
|
||||
#
|
||||
# def __init__(self, path):
|
||||
# self._merge_order = ('general',)
|
||||
# self._base_loader = YamlRefLoader(path)
|
||||
#
|
||||
# def load(self, distro, component, persona, cli):
|
||||
#
|
||||
# distro_opts = distro.options
|
||||
# general_component_opts = self._base_loader.load('general')
|
||||
# component_specific_opts = self._base_loader.load(component)
|
||||
# persona_component_opts = persona.component_options.get(component, {})
|
||||
# persona_global_opts = persona.component_options.get('global', {})
|
||||
# cli_opts = cli
|
||||
#
|
||||
# merged_opts = utils.merge_dicts(
|
||||
# distro_opts,
|
||||
# general_component_opts,
|
||||
# component_specific_opts,
|
||||
# persona_component_opts,
|
||||
# persona_global_opts,
|
||||
# cli_opts,
|
||||
# )
|
||||
#
|
||||
# return merged_opts
|
||||
class YamlMergeLoader(object):
|
||||
"""Holds merging process component options (based on Yaml reference loader).
|
||||
|
||||
Merge order is:
|
||||
* Directory options (app_dir, component_dir...).
|
||||
* Distro matched options (from `distros` directory).
|
||||
* General component options (from `general.yaml`).
|
||||
* Persona general options (from personas/basic*.yaml with `general:` key).
|
||||
* Specific component options (from `component_name.yaml`).
|
||||
* Persona specific options (from personas/basic*.yaml
|
||||
with `component_name:` key).
|
||||
|
||||
All merging is done to right with overwriting existing options (keys)
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir):
|
||||
self._root_dir = root_dir
|
||||
self._base_loader = YamlRefLoader(settings.COMPONENT_CONF_DIR)
|
||||
|
||||
def _get_dir_opts(self, component):
|
||||
component_dir = sh.joinpths(self._root_dir, component)
|
||||
trace_dir = sh.joinpths(component_dir, 'traces')
|
||||
app_dir = sh.joinpths(component_dir, 'app')
|
||||
return utils.OrderedDict([
|
||||
('app_dir', app_dir),
|
||||
('component_dir', component_dir),
|
||||
('root_dir', self._root_dir),
|
||||
('trace_dir', trace_dir)
|
||||
])
|
||||
|
||||
def _apply_persona(self, component, persona):
|
||||
"""Apply persona specific options according to component.
|
||||
|
||||
Include the general.yaml in each applying since it typically contains
|
||||
useful shared settings.
|
||||
"""
|
||||
for conf in ('general', component):
|
||||
if persona is not None:
|
||||
# Note: any additional redefines could be added here.
|
||||
persona_specific = persona.component_options.get(component, {})
|
||||
self._base_loader.update_cache(conf, persona_specific)
|
||||
|
||||
def load(self, distro, component, persona=None):
|
||||
# NOTE (vnovikov): applying takes place before loading reference links
|
||||
self._apply_persona(component, persona)
|
||||
|
||||
dir_opts = self._get_dir_opts(component)
|
||||
distro_opts = distro.options
|
||||
general_component_opts = self._base_loader.load('general')
|
||||
component_specific_opts = self._base_loader.load(component)
|
||||
|
||||
# NOTE (vnovikov): merge order is the same as arguments order below.
|
||||
merged_opts = utils.merge_dicts(
|
||||
dir_opts,
|
||||
distro_opts,
|
||||
general_component_opts,
|
||||
component_specific_opts,
|
||||
)
|
||||
|
||||
return merged_opts
|
||||
|
||||
|
||||
class YamlRefLoader(object):
|
||||
@ -296,18 +331,6 @@ class YamlRefLoader(object):
|
||||
|
||||
return processed
|
||||
|
||||
def _cache(self, conf):
|
||||
"""Cache config file into memory to avoid re-reading it from disk."""
|
||||
if conf not in self._cached:
|
||||
path = sh.joinpths(self._path, conf + self._conf_ext)
|
||||
if not sh.isfile(path):
|
||||
raise exceptions.YamlConfigNotFoundException(path)
|
||||
|
||||
# TODO(vnovikov): may be it makes sense to reintroduce load_yaml
|
||||
# for returning OrderedDict with the same order as options placement
|
||||
# in source yaml file...
|
||||
self._cached[conf] = utils.load_yaml(path) or {}
|
||||
|
||||
def _precache(self):
|
||||
"""Cache and process predefined auto-references"""
|
||||
for conf, options in self._predefined_refs.items():
|
||||
@ -342,6 +365,24 @@ class YamlRefLoader(object):
|
||||
self._ref_stack.pop()
|
||||
return result
|
||||
|
||||
def _cache(self, conf):
|
||||
"""Cache config file into memory to avoid re-reading it from disk."""
|
||||
if conf not in self._cached:
|
||||
path = sh.joinpths(self._path, conf + self._conf_ext)
|
||||
if not sh.isfile(path):
|
||||
raise exceptions.YamlConfigNotFoundException(path)
|
||||
|
||||
self._cached[conf] = utils.load_yaml(path) or {}
|
||||
|
||||
def update_cache(self, conf, dict2update):
|
||||
self._cache(conf)
|
||||
#for k, v in dict2update.items():
|
||||
# self._cached[conf][k] = v
|
||||
|
||||
# NOTE (vnovikov): should remove obsolete processed data
|
||||
self._cached[conf].update(dict2update)
|
||||
self._processed[conf] = {}
|
||||
|
||||
def load(self, conf):
|
||||
"""Load config `conf` from same yaml file with and resolve all
|
||||
references.
|
||||
|
@ -1,3 +1,4 @@
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
@ -506,3 +507,130 @@ class TestYamlRefLoader(unittest.TestCase):
|
||||
|
||||
self.assertRaises(exceptions.YamlLoopException,
|
||||
self.loader.load, 'sample')
|
||||
|
||||
def test_update_cache(self):
|
||||
self.sample = """
|
||||
stable: 9
|
||||
|
||||
reference: "$(sample2:stable)"
|
||||
reference2: "$(sample2:stable)"
|
||||
reference3: "$(sample2:stable2)"
|
||||
"""
|
||||
|
||||
self.sample2 = """
|
||||
stable: 10
|
||||
stable2: 11
|
||||
"""
|
||||
|
||||
self._write_samples()
|
||||
|
||||
self.loader.update_cache('sample', dict(reference=20))
|
||||
self.loader.update_cache('sample2', dict(stable=21))
|
||||
|
||||
processed = self.loader.load('sample')
|
||||
self.assertEqual(processed['stable'], 9)
|
||||
self.assertEqual(processed['reference'], 20)
|
||||
self.assertEqual(processed['reference2'], 21)
|
||||
self.assertEqual(processed['reference3'], 11)
|
||||
|
||||
def test_update_cache__few_times(self):
|
||||
self.sample = "stable: '$(sample2:stable)'"
|
||||
self.sample2 = "stable: 10"
|
||||
|
||||
self._write_samples()
|
||||
|
||||
processed = self.loader.load('sample')
|
||||
self.assertEqual(processed['stable'], 10)
|
||||
|
||||
self.loader.update_cache('sample', dict(stable=11))
|
||||
processed = self.loader.load('sample')
|
||||
self.assertEqual(processed['stable'], 11)
|
||||
|
||||
self.loader.update_cache('sample', dict(stable=12))
|
||||
processed = self.loader.load('sample')
|
||||
self.assertEqual(processed['stable'], 12)
|
||||
|
||||
|
||||
class TestYamlMergeLoader(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestYamlMergeLoader, self).setUp()
|
||||
|
||||
class Distro(object):
|
||||
|
||||
def __init__(self):
|
||||
self.options = {
|
||||
'unique-distro': True,
|
||||
'redefined-in-general': 0,
|
||||
'redefined-in-component': 0
|
||||
}
|
||||
|
||||
class Persona(object):
|
||||
|
||||
def __init__(self):
|
||||
self.component_options = {
|
||||
'component': {
|
||||
'unique-specific': True,
|
||||
'redefined-in-specific': 1
|
||||
}
|
||||
}
|
||||
|
||||
self.general = ""
|
||||
self.component = ""
|
||||
self.distro = Distro()
|
||||
self.persona = Persona()
|
||||
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
|
||||
with mock.patch('anvil.settings.COMPONENT_CONF_DIR', self.temp_dir):
|
||||
self.loader = cfg.YamlMergeLoader(self.temp_dir)
|
||||
|
||||
def tearDown(self):
|
||||
super(TestYamlMergeLoader, self).tearDown()
|
||||
|
||||
shutil.rmtree(self.temp_dir, ignore_errors=True)
|
||||
|
||||
def _write_samples(self):
|
||||
with open(os.path.join(self.temp_dir, 'general.yaml'), 'w') as f:
|
||||
f.write(self.general)
|
||||
|
||||
with open(os.path.join(self.temp_dir, 'component.yaml'), 'w') as f:
|
||||
f.write(self.component)
|
||||
|
||||
def test_load(self):
|
||||
self.general = """
|
||||
unique-general: True
|
||||
redefined-in-general: 1
|
||||
redefined-in-component: 1
|
||||
"""
|
||||
|
||||
self.component = """
|
||||
unique-component: True
|
||||
redefined-in-component: 2
|
||||
redefined-in-specific: 0
|
||||
"""
|
||||
|
||||
self._write_samples()
|
||||
|
||||
merged = self.loader.load(self.distro, 'component', self.persona)
|
||||
should_be = utils.OrderedDict([
|
||||
('app_dir', os.path.join(self.temp_dir, 'component', 'app')),
|
||||
('component_dir', os.path.join(self.temp_dir, 'component')),
|
||||
('root_dir', os.path.join(self.temp_dir)),
|
||||
('trace_dir', os.path.join(self.temp_dir, 'component', 'traces')),
|
||||
|
||||
('unique-distro', True),
|
||||
('redefined-in-general', 1),
|
||||
('redefined-in-component', 2),
|
||||
('redefined-in-specific', 1),
|
||||
|
||||
('unique-general', True),
|
||||
('unique-specific', True),
|
||||
('unique-component', True),
|
||||
])
|
||||
self.assertEqual(merged, should_be)
|
||||
|
||||
# yet once loading with changed values.
|
||||
self.persona.component_options['component']['redefined-in-specific'] = 2
|
||||
merged = self.loader.load(self.distro, 'component', self.persona)
|
||||
self.assertEqual(merged['redefined-in-specific'], 2)
|
||||
|
@ -36,11 +36,11 @@ fixed_range: "10.0.0.0/24"
|
||||
# Used however you want - ensure you know nova's conf file format if you use this!
|
||||
extra_flags: ""
|
||||
|
||||
# DHCP Warning: If your flat interface device uses DHCP, there will be a hiccup while the network
|
||||
# is moved from the flat interface to the flat network bridge. This will happen when you launch
|
||||
# DHCP Warning: If your flat interface device uses DHCP, there will be a hiccup while the network
|
||||
# is moved from the flat interface to the flat network bridge. This will happen when you launch
|
||||
# your first instance. Upon launch you will lose all connectivity to the node, and the vm launch will probably fail.
|
||||
#
|
||||
# If you are running on a single node and don't need to access the VMs from devices other than
|
||||
# If you are running on a single node and don't need to access the VMs from devices other than
|
||||
# that node, you can set the flat interface to the same value as FLAT_NETWORK_BRIDGE.
|
||||
# This will stop the network hiccup from occurring.
|
||||
|
||||
@ -83,8 +83,8 @@ libvirt_type: "qemu"
|
||||
# This is just a firewall based on iptables, for non-libvirt usage
|
||||
basic_firewall_driver: nova.virt.firewall.IptablesFirewallDriver
|
||||
|
||||
# Multi-host is a mode where each compute node runs its own network node.
|
||||
# This allows network operations and routing for a VM to occur on the server
|
||||
# Multi-host is a mode where each compute node runs its own network node.
|
||||
# This allows network operations and routing for a VM to occur on the server
|
||||
# that is running the VM - removing a SPOF and bandwidth bottleneck.
|
||||
multi_host: False
|
||||
|
||||
|
@ -4,3 +4,4 @@ pyflakes==0.7.2
|
||||
flake8==2.0
|
||||
pylint==0.25.2
|
||||
hacking>=0.5.3,<0.6
|
||||
mock
|
||||
|
Loading…
x
Reference in New Issue
Block a user