PEP-8 code styling and linting
As a newer project, Spyglass is still a work in progress in terms of its styling. This change is meant to help improve readability and compliance of Spyglass source code. To match other Airship projects, this change implements YAPF into tox so linting is automatically ran. This will hopefully keep formatting grey areas styled consistently. PEP-8: https://www.python.org/dev/peps/pep-0008/ OpenStack Style Guidelines: https://docs.openstack.org/hacking/latest/user/hacking.html PEP Index: https://www.python.org/dev/peps/ Depends on https://review.openstack.org/#/c/648764. Change-Id: I45b19cc8a7932fd7823dcb69f64a0a1bf19fc434
This commit is contained in:
parent
f24e7cfba7
commit
3bf68e464a
16
doc/source/conf.py
Normal file → Executable file
16
doc/source/conf.py
Normal file → Executable file
@ -18,9 +18,10 @@
|
|||||||
#
|
#
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, os.path.abspath('../../'))
|
|
||||||
import sphinx_rtd_theme
|
import sphinx_rtd_theme
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.abspath('../../'))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
@ -50,18 +51,18 @@ source_suffix = '.rst'
|
|||||||
master_doc = 'index'
|
master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'tugboat'
|
project = 'tugboat'
|
||||||
copyright = u'2018 AT&T Intellectual Property.'
|
copyright = '2018 AT&T Intellectual Property.'
|
||||||
author = u'Tugboat Authors'
|
author = 'Tugboat Authors'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = u'0.1.0'
|
version = '0.1.0'
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = u'0.1.0'
|
release = '0.1.0'
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
@ -81,7 +82,6 @@ pygments_style = 'sphinx'
|
|||||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
todo_include_todos = False
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
@ -101,13 +101,11 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
|||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = []
|
html_static_path = []
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTMLHelp output ------------------------------------------
|
# -- Options for HTMLHelp output ------------------------------------------
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'ucpintdoc'
|
htmlhelp_basename = 'ucpintdoc'
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
|
67
doc/source/developer_quickstart.rst
Normal file
67
doc/source/developer_quickstart.rst
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
..
|
||||||
|
Copyright 2018 AT&T Intellectual Property.
|
||||||
|
All Rights Reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
not use this file except in compliance with the License. You may obtain
|
||||||
|
a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
License for the specific language governing permissions and limitations
|
||||||
|
under the License.
|
||||||
|
|
||||||
|
==========================
|
||||||
|
Developer Quickstart Guide
|
||||||
|
==========================
|
||||||
|
|
||||||
|
To run your first spyglass job, follow these steps from inside the
|
||||||
|
airship-spyglass directory.
|
||||||
|
|
||||||
|
1. Install external dependencies if not already installed.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
sudo apt install -y python3-pip
|
||||||
|
sudo apt install -y tox
|
||||||
|
|
||||||
|
2. Set up an environment with tox.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
tox -e py36 --notest
|
||||||
|
|
||||||
|
3. Enter the tox environment.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
source .tox/py36/bin/activate
|
||||||
|
|
||||||
|
4. Install spyglass in the tox environment.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
5. Run spyglass on the example files to generate an intermediate document.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
mkdir intermediate
|
||||||
|
spyglass -g -s airship-seaworthy -t tugboat \
|
||||||
|
-idir intermediate \
|
||||||
|
--excel_spec spyglass/examples/excel_spec.yaml \
|
||||||
|
--excel spyglass/examples/SiteDesignSpec_v0.1.xlsx \
|
||||||
|
--additional_config spyglass/examples/site_config.yaml \
|
||||||
|
--template_dir spyglass/examples/templates/
|
||||||
|
|
||||||
|
6. Run spyglass on the intermediate document to generate manifests.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
mkdir manifest_dir
|
||||||
|
spyglass -m -i intermediate/airship-seaworthy_intermediary.yaml \
|
||||||
|
-mdir manifest_dir/ -tdir spyglass/examples/templates/
|
@ -32,4 +32,5 @@ fed to Shipyard for site deployment / updates.
|
|||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
getting_started
|
getting_started
|
||||||
|
developer_quickstart
|
||||||
tugboat
|
tugboat
|
||||||
|
@ -4,7 +4,7 @@ FROM ${FROM}
|
|||||||
VOLUME /var/spyglass
|
VOLUME /var/spyglass
|
||||||
WORKDIR /var/spyglass
|
WORKDIR /var/spyglass
|
||||||
|
|
||||||
ARG ctx_base=.
|
ARG ctx_base=./
|
||||||
|
|
||||||
COPY ${ctx_base}/requirements.txt /opt/spyglass/requirements.txt
|
COPY ${ctx_base}/requirements.txt /opt/spyglass/requirements.txt
|
||||||
RUN pip3 install --no-cache-dir -r /opt/spyglass/requirements.txt
|
RUN pip3 install --no-cache-dir -r /opt/spyglass/requirements.txt
|
||||||
|
@ -4,4 +4,3 @@ netaddr
|
|||||||
openpyxl==2.5.4
|
openpyxl==2.5.4
|
||||||
pyyaml==3.12
|
pyyaml==3.12
|
||||||
requests
|
requests
|
||||||
six
|
|
||||||
|
10
setup.py
Normal file → Executable file
10
setup.py
Normal file → Executable file
@ -12,8 +12,8 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from setuptools import setup
|
|
||||||
from setuptools import find_packages
|
from setuptools import find_packages
|
||||||
|
from setuptools import setup
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='spyglass',
|
name='spyglass',
|
||||||
@ -35,9 +35,11 @@ setup(
|
|||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'spyglass=spyglass.spyglass:main',
|
'spyglass=spyglass.spyglass:main',
|
||||||
],
|
],
|
||||||
'data_extractor_plugins':
|
'data_extractor_plugins': [
|
||||||
['formation=spyglass.data_extractor.plugins.formation:FormationPlugin',
|
'formation='
|
||||||
'tugboat=spyglass.data_extractor.plugins.tugboat.tugboat:TugboatPlugin',
|
'spyglass.data_extractor.plugins.formation:FormationPlugin',
|
||||||
|
'tugboat='
|
||||||
|
'spyglass.data_extractor.plugins.tugboat.tugboat:TugboatPlugin',
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
|
67
spyglass/data_extractor/base.py
Normal file → Executable file
67
spyglass/data_extractor/base.py
Normal file → Executable file
@ -15,15 +15,13 @@
|
|||||||
import abc
|
import abc
|
||||||
import logging
|
import logging
|
||||||
import pprint
|
import pprint
|
||||||
import six
|
|
||||||
|
|
||||||
from spyglass.utils import utils
|
from spyglass.utils import utils
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class BaseDataSourcePlugin(metaclass=abc.ABCMeta):
|
||||||
class BaseDataSourcePlugin(object):
|
|
||||||
"""Provide basic hooks for data source plugins"""
|
"""Provide basic hooks for data source plugins"""
|
||||||
|
|
||||||
def __init__(self, region):
|
def __init__(self, region):
|
||||||
@ -52,10 +50,10 @@ class BaseDataSourcePlugin(object):
|
|||||||
|
|
||||||
If validation fails, Spyglass exits.
|
If validation fails, Spyglass exits.
|
||||||
|
|
||||||
:param char pointer: Spyglass CLI parameters.
|
:param char kwargs: Spyglass CLI parameters.
|
||||||
:returns plugin conf if successfully validated.
|
:returns plugin conf if successfully validated.
|
||||||
|
|
||||||
Each plugin implements their own validaton mechanism.
|
Each plugin implements their own validation mechanism.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
@ -74,7 +72,7 @@ class BaseDataSourcePlugin(object):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_hosts(self, region, rack):
|
def get_hosts(self, region, rack=None):
|
||||||
"""Return list of hosts in the region
|
"""Return list of hosts in the region
|
||||||
|
|
||||||
:param string region: Region name
|
:param string region: Region name
|
||||||
@ -291,29 +289,23 @@ class BaseDataSourcePlugin(object):
|
|||||||
|
|
||||||
# Fill network IP for this host
|
# Fill network IP for this host
|
||||||
temp_host["ip"] = {}
|
temp_host["ip"] = {}
|
||||||
temp_host["ip"]["oob"] = temp_host_ips[host_name].get(
|
temp_host["ip"]["oob"] = \
|
||||||
"oob", "#CHANGE_ME"
|
temp_host_ips[host_name].get("oob", "#CHANGE_ME")
|
||||||
)
|
temp_host["ip"]["calico"] = \
|
||||||
temp_host["ip"]["calico"] = temp_host_ips[host_name].get(
|
temp_host_ips[host_name].get("calico", "#CHANGE_ME")
|
||||||
"calico", "#CHANGE_ME"
|
temp_host["ip"]["oam"] = \
|
||||||
)
|
temp_host_ips[host_name].get("oam", "#CHANGE_ME")
|
||||||
temp_host["ip"]["oam"] = temp_host_ips[host_name].get(
|
temp_host["ip"]["storage"] = \
|
||||||
"oam", "#CHANGE_ME"
|
temp_host_ips[host_name].get("storage", "#CHANGE_ME")
|
||||||
)
|
temp_host["ip"]["overlay"] = \
|
||||||
temp_host["ip"]["storage"] = temp_host_ips[host_name].get(
|
temp_host_ips[host_name].get("overlay", "#CHANGE_ME")
|
||||||
"storage", "#CHANGE_ME"
|
temp_host["ip"]["pxe"] = \
|
||||||
)
|
temp_host_ips[host_name].get("pxe", "#CHANGE_ME")
|
||||||
temp_host["ip"]["overlay"] = temp_host_ips[host_name].get(
|
|
||||||
"overlay", "#CHANGE_ME"
|
|
||||||
)
|
|
||||||
temp_host["ip"]["pxe"] = temp_host_ips[host_name].get(
|
|
||||||
"pxe", "#CHANGE_ME"
|
|
||||||
)
|
|
||||||
|
|
||||||
baremetal[rack_name][host_name] = temp_host
|
baremetal[rack_name][host_name] = temp_host
|
||||||
LOG.debug(
|
|
||||||
"Baremetal information:\n{}".format(pprint.pformat(baremetal))
|
LOG.debug("Baremetal information:\n{}".format(
|
||||||
)
|
pprint.pformat(baremetal)))
|
||||||
|
|
||||||
return baremetal
|
return baremetal
|
||||||
|
|
||||||
@ -357,9 +349,8 @@ class BaseDataSourcePlugin(object):
|
|||||||
domain_data = self.get_domain_name(self.region)
|
domain_data = self.get_domain_name(self.region)
|
||||||
site_info["domain"] = domain_data
|
site_info["domain"] = domain_data
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug("Extracted site information:\n{}".format(
|
||||||
"Extracted site information:\n{}".format(pprint.pformat(site_info))
|
pprint.pformat(site_info)))
|
||||||
)
|
|
||||||
|
|
||||||
return site_info
|
return site_info
|
||||||
|
|
||||||
@ -405,14 +396,13 @@ class BaseDataSourcePlugin(object):
|
|||||||
tmp_net = {}
|
tmp_net = {}
|
||||||
if net["name"] in networks_to_scan:
|
if net["name"] in networks_to_scan:
|
||||||
tmp_net["subnet"] = net.get("subnet", "#CHANGE_ME")
|
tmp_net["subnet"] = net.get("subnet", "#CHANGE_ME")
|
||||||
if (net["name"] != "ingress") and (net["name"] != "oob"):
|
if net["name"] != "ingress" and net["name"] != "oob":
|
||||||
tmp_net["vlan"] = net.get("vlan", "#CHANGE_ME")
|
tmp_net["vlan"] = net.get("vlan", "#CHANGE_ME")
|
||||||
|
|
||||||
network_data["vlan_network_data"][net["name"]] = tmp_net
|
network_data["vlan_network_data"][net["name"]] = tmp_net
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug("Extracted network data:\n{}".format(
|
||||||
"Extracted network data:\n{}".format(pprint.pformat(network_data))
|
pprint.pformat(network_data)))
|
||||||
)
|
|
||||||
return network_data
|
return network_data
|
||||||
|
|
||||||
def extract_data(self):
|
def extract_data(self):
|
||||||
@ -423,10 +413,11 @@ class BaseDataSourcePlugin(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
LOG.info("Extract data from plugin")
|
LOG.info("Extract data from plugin")
|
||||||
site_data = {}
|
site_data = {
|
||||||
site_data["baremetal"] = self.extract_baremetal_information()
|
"baremetal": self.extract_baremetal_information(),
|
||||||
site_data["site_info"] = self.extract_site_information()
|
"site_info": self.extract_site_information(),
|
||||||
site_data["network"] = self.extract_network_information()
|
"network": self.extract_network_information()
|
||||||
|
}
|
||||||
self.site_data = site_data
|
self.site_data = site_data
|
||||||
return site_data
|
return site_data
|
||||||
|
|
||||||
|
@ -32,11 +32,9 @@ class NoSpecMatched(BaseError):
|
|||||||
self.specs = excel_specs
|
self.specs = excel_specs
|
||||||
|
|
||||||
def display_error(self):
|
def display_error(self):
|
||||||
print(
|
# FIXME (Ian Pittwood): use log instead of print
|
||||||
"No spec matched. Following are the available specs:\n".format(
|
print("No spec matched. Following are the available specs:\n".format(
|
||||||
self.specs
|
self.specs))
|
||||||
)
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -56,5 +54,5 @@ class TokenGenerationError(BaseError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ConnectionError(BaseError):
|
class FormationConnectionError(BaseError):
|
||||||
pass
|
pass
|
||||||
|
83
spyglass/data_extractor/plugins/formation.py
Normal file → Executable file
83
spyglass/data_extractor/plugins/formation.py
Normal file → Executable file
@ -12,15 +12,15 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import formation_client
|
|
||||||
import logging
|
import logging
|
||||||
import pprint
|
import pprint
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import formation_client
|
||||||
import requests
|
import requests
|
||||||
import urllib3
|
import urllib3
|
||||||
|
|
||||||
from spyglass.data_extractor.base import BaseDataSourcePlugin
|
from spyglass.data_extractor.base import BaseDataSourcePlugin
|
||||||
|
|
||||||
import spyglass.data_extractor.custom_exceptions as exceptions
|
import spyglass.data_extractor.custom_exceptions as exceptions
|
||||||
|
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
@ -136,17 +136,16 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
auth=(self.user, self.password),
|
auth=(self.user, self.password),
|
||||||
verify=self.client_config.verify_ssl,
|
verify=self.client_config.verify_ssl,
|
||||||
)
|
)
|
||||||
except requests.exceptions.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
raise exceptions.ConnectionError("Incorrect URL: {}".format(url))
|
raise exceptions.FormationConnectionError(
|
||||||
|
"Incorrect URL: {}".format(url))
|
||||||
|
|
||||||
if token_response.status_code == 200:
|
if token_response.status_code == 200:
|
||||||
self.token = token_response.json().get("X-Subject-Token", None)
|
self.token = token_response.json().get("X-Subject-Token", None)
|
||||||
else:
|
else:
|
||||||
raise exceptions.TokenGenerationError(
|
raise exceptions.TokenGenerationError(
|
||||||
"Unable to generate token because {}".format(
|
"Unable to generate token because {}".format(
|
||||||
token_response.reason
|
token_response.reason))
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.token
|
return self.token
|
||||||
|
|
||||||
@ -160,9 +159,8 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
|
|
||||||
token = self._generate_token()
|
token = self._generate_token()
|
||||||
self.client_config.api_key = {"X-Auth-Token": self.user + "|" + token}
|
self.client_config.api_key = {"X-Auth-Token": self.user + "|" + token}
|
||||||
self.formation_api_client = formation_client.ApiClient(
|
self.formation_api_client = \
|
||||||
self.client_config
|
formation_client.ApiClient(self.client_config)
|
||||||
)
|
|
||||||
|
|
||||||
def _update_site_and_zone(self, region):
|
def _update_site_and_zone(self, region):
|
||||||
"""Get Zone name and Site name from region"""
|
"""Get Zone name and Site name from region"""
|
||||||
@ -309,32 +307,27 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
zone_id = self._get_zone_id_by_name(zone)
|
zone_id = self._get_zone_id_by_name(zone)
|
||||||
device_api = formation_client.DevicesApi(self.formation_api_client)
|
device_api = formation_client.DevicesApi(self.formation_api_client)
|
||||||
control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id)
|
control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id)
|
||||||
compute_hosts = device_api.zones_zone_id_devices_get(
|
compute_hosts = device_api.zones_zone_id_devices_get(zone_id,
|
||||||
zone_id, type="KVM"
|
type="KVM")
|
||||||
)
|
|
||||||
|
|
||||||
hosts_list = []
|
hosts_list = []
|
||||||
for host in control_hosts:
|
for host in control_hosts:
|
||||||
self.device_name_id_mapping[host.aic_standard_name] = host.id
|
self.device_name_id_mapping[host.aic_standard_name] = host.id
|
||||||
hosts_list.append(
|
hosts_list.append({
|
||||||
{
|
|
||||||
"name": host.aic_standard_name,
|
"name": host.aic_standard_name,
|
||||||
"type": "controller",
|
"type": "controller",
|
||||||
"rack_name": host.rack_name,
|
"rack_name": host.rack_name,
|
||||||
"host_profile": host.host_profile_name,
|
"host_profile": host.host_profile_name,
|
||||||
}
|
})
|
||||||
)
|
|
||||||
|
|
||||||
for host in compute_hosts:
|
for host in compute_hosts:
|
||||||
self.device_name_id_mapping[host.aic_standard_name] = host.id
|
self.device_name_id_mapping[host.aic_standard_name] = host.id
|
||||||
hosts_list.append(
|
hosts_list.append({
|
||||||
{
|
|
||||||
"name": host.aic_standard_name,
|
"name": host.aic_standard_name,
|
||||||
"type": "compute",
|
"type": "compute",
|
||||||
"rack_name": host.rack_name,
|
"rack_name": host.rack_name,
|
||||||
"host_profile": host.host_profile_name,
|
"host_profile": host.host_profile_name,
|
||||||
}
|
})
|
||||||
)
|
|
||||||
"""
|
"""
|
||||||
for host in itertools.chain(control_hosts, compute_hosts):
|
for host in itertools.chain(control_hosts, compute_hosts):
|
||||||
self.device_name_id_mapping[host.aic_standard_name] = host.id
|
self.device_name_id_mapping[host.aic_standard_name] = host.id
|
||||||
@ -354,8 +347,7 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
region_id = self._get_region_id_by_name(region)
|
region_id = self._get_region_id_by_name(region)
|
||||||
vlan_api = formation_client.VlansApi(self.formation_api_client)
|
vlan_api = formation_client.VlansApi(self.formation_api_client)
|
||||||
vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get(
|
vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get(
|
||||||
zone_id, region_id
|
zone_id, region_id)
|
||||||
)
|
|
||||||
|
|
||||||
# Case when vlans list is empty from
|
# Case when vlans list is empty from
|
||||||
# zones_zone_id_regions_region_id_vlans_get
|
# zones_zone_id_regions_region_id_vlans_get
|
||||||
@ -364,22 +356,22 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
hosts = self.get_hosts(self.region)
|
hosts = self.get_hosts(self.region)
|
||||||
host = hosts[0]["name"]
|
host = hosts[0]["name"]
|
||||||
device_id = self._get_device_id_by_name(host)
|
device_id = self._get_device_id_by_name(host)
|
||||||
vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
|
vlans = \
|
||||||
zone_id, device_id
|
vlan_api.zones_zone_id_devices_device_id_vlans_get(zone_id,
|
||||||
)
|
device_id)
|
||||||
|
|
||||||
LOG.debug("Extracted region network information\n{}".format(vlans))
|
LOG.debug("Extracted region network information\n{}".format(vlans))
|
||||||
vlans_list = []
|
vlans_list = []
|
||||||
for vlan_ in vlans:
|
for vlan_ in vlans:
|
||||||
if len(vlan_.vlan.ipv4) != 0:
|
if len(vlan_.vlan.ipv4) != 0:
|
||||||
tmp_vlan = {}
|
tmp_vlan = {
|
||||||
tmp_vlan["name"] = self._get_network_name_from_vlan_name(
|
"name":
|
||||||
vlan_.vlan.name
|
self._get_network_name_from_vlan_name(vlan_.vlan.name),
|
||||||
)
|
"vlan": vlan_.vlan.vlan_id,
|
||||||
tmp_vlan["vlan"] = vlan_.vlan.vlan_id
|
"subnet": vlan_.vlan.subnet_range,
|
||||||
tmp_vlan["subnet"] = vlan_.vlan.subnet_range
|
"gateway": vlan_.ipv4_gateway,
|
||||||
tmp_vlan["gateway"] = vlan_.ipv4_gateway
|
"subnet_level": vlan_.vlan.subnet_level
|
||||||
tmp_vlan["subnet_level"] = vlan_.vlan.subnet_level
|
}
|
||||||
vlans_list.append(tmp_vlan)
|
vlans_list.append(tmp_vlan)
|
||||||
|
|
||||||
return vlans_list
|
return vlans_list
|
||||||
@ -401,9 +393,9 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
|
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
device_id = self._get_device_id_by_name(host)
|
device_id = self._get_device_id_by_name(host)
|
||||||
vlans = vlan_api.zones_zone_id_devices_device_id_vlans_get(
|
vlans = \
|
||||||
zone_id, device_id
|
vlan_api.zones_zone_id_devices_device_id_vlans_get(zone_id,
|
||||||
)
|
device_id)
|
||||||
LOG.debug("Received VLAN Network Information\n{}".format(vlans))
|
LOG.debug("Received VLAN Network Information\n{}".format(vlans))
|
||||||
ip_[host] = {}
|
ip_[host] = {}
|
||||||
for vlan_ in vlans:
|
for vlan_ in vlans:
|
||||||
@ -411,14 +403,10 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
# list is empty
|
# list is empty
|
||||||
if len(vlan_.vlan.ipv4) != 0:
|
if len(vlan_.vlan.ipv4) != 0:
|
||||||
name = self._get_network_name_from_vlan_name(
|
name = self._get_network_name_from_vlan_name(
|
||||||
vlan_.vlan.name
|
vlan_.vlan.name)
|
||||||
)
|
|
||||||
ipv4 = vlan_.vlan.ipv4[0].ip
|
ipv4 = vlan_.vlan.ipv4[0].ip
|
||||||
LOG.debug(
|
LOG.debug("vlan:{},name:{},ip:{},vlan_name:{}".format(
|
||||||
"vlan:{},name:{},ip:{},vlan_name:{}".format(
|
vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name))
|
||||||
vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# TODD(pg710r) This code needs to extended to support ipv4
|
# TODD(pg710r) This code needs to extended to support ipv4
|
||||||
# and ipv6
|
# and ipv6
|
||||||
# ip_[host][name] = {'ipv4': ipv4}
|
# ip_[host][name] = {'ipv4': ipv4}
|
||||||
@ -465,7 +453,7 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
raise exceptions.ApiClientError(e.msg)
|
raise exceptions.ApiClientError(e.msg)
|
||||||
|
|
||||||
if not zone_.ipv4_dns:
|
if not zone_.ipv4_dns:
|
||||||
LOG.warn("No dns server")
|
LOG.warning("No dns server")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
dns_list = []
|
dns_list = []
|
||||||
@ -498,8 +486,7 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
}
|
}
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
raise exceptions.MissingAttributeError(
|
raise exceptions.MissingAttributeError(
|
||||||
"Missing {} information in {}".format(e, site_info.city)
|
"Missing {} information in {}".format(e, site_info.city))
|
||||||
)
|
|
||||||
|
|
||||||
def get_domain_name(self, region):
|
def get_domain_name(self, region):
|
||||||
try:
|
try:
|
||||||
@ -511,7 +498,7 @@ class FormationPlugin(BaseDataSourcePlugin):
|
|||||||
raise exceptions.ApiClientError(e.msg)
|
raise exceptions.ApiClientError(e.msg)
|
||||||
|
|
||||||
if not zone_.dns:
|
if not zone_.dns:
|
||||||
LOG.warn("Got None while running get domain name")
|
LOG.warning("Got None while running get domain name")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return zone_.dns
|
return zone_.dns
|
||||||
|
@ -31,8 +31,5 @@ class NoSpecMatched(BaseError):
|
|||||||
self.specs = excel_specs
|
self.specs = excel_specs
|
||||||
|
|
||||||
def display_error(self):
|
def display_error(self):
|
||||||
print(
|
print("No spec matched. Following are the available specs:\n".format(
|
||||||
"No spec matched. Following are the available specs:\n".format(
|
self.specs))
|
||||||
self.specs
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
240
spyglass/data_extractor/plugins/tugboat/excel_parser.py
Normal file → Executable file
240
spyglass/data_extractor/plugins/tugboat/excel_parser.py
Normal file → Executable file
@ -13,17 +13,16 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from openpyxl import load_workbook
|
|
||||||
from openpyxl import Workbook
|
|
||||||
import pprint
|
import pprint
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from openpyxl import load_workbook
|
||||||
|
from openpyxl import Workbook
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from spyglass.data_extractor.custom_exceptions import NoSpecMatched
|
from spyglass.data_extractor.custom_exceptions import NoSpecMatched
|
||||||
|
|
||||||
# from spyglass.data_extractor.custom_exceptions
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -35,7 +34,7 @@ class ExcelParser(object):
|
|||||||
with open(excel_specs, "r") as f:
|
with open(excel_specs, "r") as f:
|
||||||
spec_raw_data = f.read()
|
spec_raw_data = f.read()
|
||||||
self.excel_specs = yaml.safe_load(spec_raw_data)
|
self.excel_specs = yaml.safe_load(spec_raw_data)
|
||||||
# A combined design spec, returns a workbok object after combining
|
# A combined design spec, returns a workbook object after combining
|
||||||
# all the inputs excel specs
|
# all the inputs excel specs
|
||||||
combined_design_spec = self.combine_excel_design_specs(file_name)
|
combined_design_spec = self.combine_excel_design_specs(file_name)
|
||||||
self.wb_combined = combined_design_spec
|
self.wb_combined = combined_design_spec
|
||||||
@ -80,33 +79,24 @@ class ExcelParser(object):
|
|||||||
|
|
||||||
ipmi_data = {}
|
ipmi_data = {}
|
||||||
hosts = []
|
hosts = []
|
||||||
provided_sheetname = self.excel_specs["specs"][self.spec][
|
spec_ = self.excel_specs["specs"][self.spec]
|
||||||
"ipmi_sheet_name"
|
provided_sheetname = spec_["ipmi_sheet_name"]
|
||||||
]
|
workbook_object, extracted_sheetname = \
|
||||||
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
|
self.get_xl_obj_and_sheetname(provided_sheetname)
|
||||||
provided_sheetname
|
|
||||||
)
|
|
||||||
if workbook_object is not None:
|
if workbook_object is not None:
|
||||||
ws = workbook_object[extracted_sheetname]
|
ws = workbook_object[extracted_sheetname]
|
||||||
else:
|
else:
|
||||||
ws = self.wb_combined[provided_sheetname]
|
ws = self.wb_combined[provided_sheetname]
|
||||||
row = self.excel_specs["specs"][self.spec]["start_row"]
|
row = spec_["start_row"]
|
||||||
end_row = self.excel_specs["specs"][self.spec]["end_row"]
|
end_row = spec_["end_row"]
|
||||||
hostname_col = self.excel_specs["specs"][self.spec]["hostname_col"]
|
hostname_col = spec_["hostname_col"]
|
||||||
ipmi_address_col = self.excel_specs["specs"][self.spec][
|
ipmi_address_col = spec_["ipmi_address_col"]
|
||||||
"ipmi_address_col"
|
host_profile_col = spec_["host_profile_col"]
|
||||||
]
|
ipmi_gateway_col = spec_["ipmi_gateway_col"]
|
||||||
host_profile_col = self.excel_specs["specs"][self.spec][
|
|
||||||
"host_profile_col"
|
|
||||||
]
|
|
||||||
ipmi_gateway_col = self.excel_specs["specs"][self.spec][
|
|
||||||
"ipmi_gateway_col"
|
|
||||||
]
|
|
||||||
previous_server_gateway = None
|
previous_server_gateway = None
|
||||||
while row <= end_row:
|
while row <= end_row:
|
||||||
hostname = self.sanitize(
|
hostname = \
|
||||||
ws.cell(row=row, column=hostname_col).value
|
self.sanitize(ws.cell(row=row, column=hostname_col).value)
|
||||||
)
|
|
||||||
hosts.append(hostname)
|
hosts.append(hostname)
|
||||||
ipmi_address = ws.cell(row=row, column=ipmi_address_col).value
|
ipmi_address = ws.cell(row=row, column=ipmi_address_col).value
|
||||||
if "/" in ipmi_address:
|
if "/" in ipmi_address:
|
||||||
@ -119,12 +109,10 @@ class ExcelParser(object):
|
|||||||
host_profile = ws.cell(row=row, column=host_profile_col).value
|
host_profile = ws.cell(row=row, column=host_profile_col).value
|
||||||
try:
|
try:
|
||||||
if host_profile is None:
|
if host_profile is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError("No value read from "
|
||||||
"No value read from {} ".format(self.file_name)
|
"{} sheet:{} row:{}, col:{}".format(
|
||||||
+ "sheet:{} row:{}, col:{}".format(
|
self.file_name, self.spec, row,
|
||||||
self.spec, row, host_profile_col
|
host_profile_col))
|
||||||
)
|
|
||||||
)
|
|
||||||
except RuntimeError as rerror:
|
except RuntimeError as rerror:
|
||||||
LOG.critical(rerror)
|
LOG.critical(rerror)
|
||||||
sys.exit("Tugboat exited!!")
|
sys.exit("Tugboat exited!!")
|
||||||
@ -132,17 +120,13 @@ class ExcelParser(object):
|
|||||||
"ipmi_address": ipmi_address,
|
"ipmi_address": ipmi_address,
|
||||||
"ipmi_gateway": ipmi_gateway,
|
"ipmi_gateway": ipmi_gateway,
|
||||||
"host_profile": host_profile,
|
"host_profile": host_profile,
|
||||||
"type": type,
|
"type": type, # FIXME (Ian Pittwood): shadows type built-in
|
||||||
}
|
}
|
||||||
row += 1
|
row += 1
|
||||||
LOG.debug(
|
LOG.debug("ipmi data extracted from excel:\n{}".format(
|
||||||
"ipmi data extracted from excel:\n{}".format(
|
pprint.pformat(ipmi_data)))
|
||||||
pprint.pformat(ipmi_data)
|
LOG.debug("host data extracted from excel:\n{}".format(
|
||||||
)
|
pprint.pformat(hosts)))
|
||||||
)
|
|
||||||
LOG.debug(
|
|
||||||
"host data extracted from excel:\n{}".format(pprint.pformat(hosts))
|
|
||||||
)
|
|
||||||
return [ipmi_data, hosts]
|
return [ipmi_data, hosts]
|
||||||
|
|
||||||
def get_private_vlan_data(self, ws):
|
def get_private_vlan_data(self, ws):
|
||||||
@ -161,30 +145,27 @@ class ExcelParser(object):
|
|||||||
vlan = vlan.lower()
|
vlan = vlan.lower()
|
||||||
vlan_data[vlan] = cell_value
|
vlan_data[vlan] = cell_value
|
||||||
row += 1
|
row += 1
|
||||||
LOG.debug(
|
LOG.debug("vlan data extracted from excel:\n%s" %
|
||||||
"vlan data extracted from excel:\n%s", pprint.pformat(vlan_data)
|
pprint.pformat(vlan_data))
|
||||||
)
|
|
||||||
return vlan_data
|
return vlan_data
|
||||||
|
|
||||||
def get_private_network_data(self):
|
def get_private_network_data(self):
|
||||||
"""Read network data from the private ip sheet"""
|
"""Read network data from the private ip sheet"""
|
||||||
|
|
||||||
provided_sheetname = self.excel_specs["specs"][self.spec][
|
spec_ = self.excel_specs["specs"][self.spec]
|
||||||
"private_ip_sheet"
|
provided_sheetname = spec_["private_ip_sheet"]
|
||||||
]
|
workbook_object, extracted_sheetname = \
|
||||||
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
|
self.get_xl_obj_and_sheetname(provided_sheetname)
|
||||||
provided_sheetname
|
|
||||||
)
|
|
||||||
if workbook_object is not None:
|
if workbook_object is not None:
|
||||||
ws = workbook_object[extracted_sheetname]
|
ws = workbook_object[extracted_sheetname]
|
||||||
else:
|
else:
|
||||||
ws = self.wb_combined[provided_sheetname]
|
ws = self.wb_combined[provided_sheetname]
|
||||||
vlan_data = self.get_private_vlan_data(ws)
|
vlan_data = self.get_private_vlan_data(ws)
|
||||||
network_data = {}
|
network_data = {}
|
||||||
row = self.excel_specs["specs"][self.spec]["net_start_row"]
|
row = spec_["net_start_row"]
|
||||||
end_row = self.excel_specs["specs"][self.spec]["net_end_row"]
|
end_row = spec_["net_end_row"]
|
||||||
col = self.excel_specs["specs"][self.spec]["net_col"]
|
col = spec_["net_col"]
|
||||||
vlan_col = self.excel_specs["specs"][self.spec]["net_vlan_col"]
|
vlan_col = spec_["net_vlan_col"]
|
||||||
old_vlan = ""
|
old_vlan = ""
|
||||||
while row <= end_row:
|
while row <= end_row:
|
||||||
vlan = ws.cell(row=row, column=vlan_col).value
|
vlan = ws.cell(row=row, column=vlan_col).value
|
||||||
@ -212,93 +193,82 @@ class ExcelParser(object):
|
|||||||
network_data[network]['is_common'] = False
|
network_data[network]['is_common'] = False
|
||||||
else:
|
else:
|
||||||
network_data[network]['is_common'] = True
|
network_data[network]['is_common'] = True
|
||||||
LOG.debug(
|
LOG.debug("private network data extracted from excel:\n%s"
|
||||||
"private network data extracted from\
|
% pprint.pformat(network_data))
|
||||||
excel:\n%s", pprint.pformat(network_data))
|
|
||||||
"""
|
"""
|
||||||
return network_data
|
return network_data
|
||||||
|
|
||||||
def get_public_network_data(self):
|
def get_public_network_data(self):
|
||||||
"""Read public network data from public ip data"""
|
"""Read public network data from public ip data"""
|
||||||
|
|
||||||
network_data = {}
|
spec_ = self.excel_specs["specs"][self.spec]
|
||||||
provided_sheetname = self.excel_specs["specs"][self.spec][
|
provided_sheetname = spec_["public_ip_sheet"]
|
||||||
"public_ip_sheet"
|
|
||||||
]
|
|
||||||
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
|
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
|
||||||
provided_sheetname
|
provided_sheetname)
|
||||||
)
|
|
||||||
if workbook_object is not None:
|
if workbook_object is not None:
|
||||||
ws = workbook_object[extracted_sheetname]
|
ws = workbook_object[extracted_sheetname]
|
||||||
else:
|
else:
|
||||||
ws = self.wb_combined[provided_sheetname]
|
ws = self.wb_combined[provided_sheetname]
|
||||||
oam_row = self.excel_specs["specs"][self.spec]["oam_ip_row"]
|
oam_row = spec_["oam_ip_row"]
|
||||||
oam_col = self.excel_specs["specs"][self.spec]["oam_ip_col"]
|
oam_col = spec_["oam_ip_col"]
|
||||||
oam_vlan_col = self.excel_specs["specs"][self.spec]["oam_vlan_col"]
|
oam_vlan_col = spec_["oam_vlan_col"]
|
||||||
ingress_row = self.excel_specs["specs"][self.spec]["ingress_ip_row"]
|
ingress_row = spec_["ingress_ip_row"]
|
||||||
oob_row = self.excel_specs["specs"][self.spec]["oob_net_row"]
|
oob_row = spec_["oob_net_row"]
|
||||||
col = self.excel_specs["specs"][self.spec]["oob_net_start_col"]
|
col = spec_["oob_net_start_col"]
|
||||||
end_col = self.excel_specs["specs"][self.spec]["oob_net_end_col"]
|
end_col = spec_["oob_net_end_col"]
|
||||||
network_data = {
|
network_data = {
|
||||||
"oam": {
|
"oam": {
|
||||||
"subnet": [ws.cell(row=oam_row, column=oam_col).value],
|
"subnet": [ws.cell(row=oam_row, column=oam_col).value],
|
||||||
"vlan": ws.cell(row=oam_row, column=oam_vlan_col).value,
|
"vlan": ws.cell(row=oam_row, column=oam_vlan_col).value,
|
||||||
},
|
},
|
||||||
"ingress": ws.cell(row=ingress_row, column=oam_col).value,
|
"ingress": ws.cell(row=ingress_row, column=oam_col).value,
|
||||||
|
"oob": {
|
||||||
|
"subnet": [],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
network_data["oob"] = {"subnet": []}
|
|
||||||
while col <= end_col:
|
while col <= end_col:
|
||||||
cell_value = ws.cell(row=oob_row, column=col).value
|
cell_value = ws.cell(row=oob_row, column=col).value
|
||||||
if cell_value:
|
if cell_value:
|
||||||
network_data["oob"]["subnet"].append(self.sanitize(cell_value))
|
network_data["oob"]["subnet"].append(self.sanitize(cell_value))
|
||||||
col += 1
|
col += 1
|
||||||
LOG.debug(
|
LOG.debug("public network data extracted from excel:\n%s" %
|
||||||
"public network data extracted from\
|
pprint.pformat(network_data))
|
||||||
excel:\n%s",
|
|
||||||
pprint.pformat(network_data),
|
|
||||||
)
|
|
||||||
return network_data
|
return network_data
|
||||||
|
|
||||||
def get_site_info(self):
|
def get_site_info(self):
|
||||||
"""Read location, dns, ntp and ldap data"""
|
"""Read location, dns, ntp and ldap data"""
|
||||||
|
|
||||||
site_info = {}
|
spec_ = self.excel_specs["specs"][self.spec]
|
||||||
provided_sheetname = self.excel_specs["specs"][self.spec][
|
provided_sheetname = spec_["dns_ntp_ldap_sheet"]
|
||||||
"dns_ntp_ldap_sheet"
|
workbook_object, extracted_sheetname = \
|
||||||
]
|
self.get_xl_obj_and_sheetname(provided_sheetname)
|
||||||
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
|
|
||||||
provided_sheetname
|
|
||||||
)
|
|
||||||
if workbook_object is not None:
|
if workbook_object is not None:
|
||||||
ws = workbook_object[extracted_sheetname]
|
ws = workbook_object[extracted_sheetname]
|
||||||
else:
|
else:
|
||||||
ws = self.wb_combined[provided_sheetname]
|
ws = self.wb_combined[provided_sheetname]
|
||||||
dns_row = self.excel_specs["specs"][self.spec]["dns_row"]
|
dns_row = spec_["dns_row"]
|
||||||
dns_col = self.excel_specs["specs"][self.spec]["dns_col"]
|
dns_col = spec_["dns_col"]
|
||||||
ntp_row = self.excel_specs["specs"][self.spec]["ntp_row"]
|
ntp_row = spec_["ntp_row"]
|
||||||
ntp_col = self.excel_specs["specs"][self.spec]["ntp_col"]
|
ntp_col = spec_["ntp_col"]
|
||||||
domain_row = self.excel_specs["specs"][self.spec]["domain_row"]
|
domain_row = spec_["domain_row"]
|
||||||
domain_col = self.excel_specs["specs"][self.spec]["domain_col"]
|
domain_col = spec_["domain_col"]
|
||||||
login_domain_row = self.excel_specs["specs"][self.spec][
|
login_domain_row = spec_["login_domain_row"]
|
||||||
"login_domain_row"
|
ldap_col = spec_["ldap_col"]
|
||||||
]
|
global_group = spec_["global_group"]
|
||||||
ldap_col = self.excel_specs["specs"][self.spec]["ldap_col"]
|
ldap_search_url_row = spec_["ldap_search_url_row"]
|
||||||
global_group = self.excel_specs["specs"][self.spec]["global_group"]
|
|
||||||
ldap_search_url_row = self.excel_specs["specs"][self.spec][
|
|
||||||
"ldap_search_url_row"
|
|
||||||
]
|
|
||||||
dns_servers = ws.cell(row=dns_row, column=dns_col).value
|
dns_servers = ws.cell(row=dns_row, column=dns_col).value
|
||||||
ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value
|
ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value
|
||||||
try:
|
try:
|
||||||
if dns_servers is None:
|
if dns_servers is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError("No value for dns_server from: "
|
||||||
(
|
"{} Sheet:'{}' Row:{} Col:{}".format(
|
||||||
"No value for dns_server from:{} Sheet:'{}' ",
|
self.file_name, provided_sheetname,
|
||||||
"Row:{} Col:{}",
|
dns_row, dns_col))
|
||||||
).format(
|
if ntp_servers is None:
|
||||||
self.file_name, provided_sheetname, dns_row, dns_col
|
raise RuntimeError("No value for ntp_server from: "
|
||||||
)
|
"{} Sheet:'{}' Row:{} Col:{}".format(
|
||||||
)
|
self.file_name, provided_sheetname,
|
||||||
|
ntp_row, ntp_col))
|
||||||
except RuntimeError as rerror:
|
except RuntimeError as rerror:
|
||||||
LOG.critical(rerror)
|
LOG.critical(rerror)
|
||||||
sys.exit("Tugboat exited!!")
|
sys.exit("Tugboat exited!!")
|
||||||
@ -319,12 +289,10 @@ class ExcelParser(object):
|
|||||||
"ntp": ntp_servers,
|
"ntp": ntp_servers,
|
||||||
"domain": ws.cell(row=domain_row, column=domain_col).value,
|
"domain": ws.cell(row=domain_row, column=domain_col).value,
|
||||||
"ldap": {
|
"ldap": {
|
||||||
"subdomain": ws.cell(
|
"subdomain": ws.cell(row=login_domain_row,
|
||||||
row=login_domain_row, column=ldap_col
|
column=ldap_col).value,
|
||||||
).value,
|
"common_name": ws.cell(row=global_group,
|
||||||
"common_name": ws.cell(
|
column=ldap_col).value,
|
||||||
row=global_group, column=ldap_col
|
|
||||||
).value,
|
|
||||||
"url": ws.cell(row=ldap_search_url_row, column=ldap_col).value,
|
"url": ws.cell(row=ldap_search_url_row, column=ldap_col).value,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -338,32 +306,27 @@ class ExcelParser(object):
|
|||||||
def get_location_data(self):
|
def get_location_data(self):
|
||||||
"""Read location data from the site and zone sheet"""
|
"""Read location data from the site and zone sheet"""
|
||||||
|
|
||||||
provided_sheetname = self.excel_specs["specs"][self.spec][
|
spec_ = self.excel_specs["specs"][self.spec]
|
||||||
"location_sheet"
|
provided_sheetname = spec_["location_sheet"]
|
||||||
]
|
workbook_object, extracted_sheetname = \
|
||||||
workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname(
|
self.get_xl_obj_and_sheetname(provided_sheetname)
|
||||||
provided_sheetname
|
|
||||||
)
|
|
||||||
if workbook_object is not None:
|
if workbook_object is not None:
|
||||||
ws = workbook_object[extracted_sheetname]
|
ws = workbook_object[extracted_sheetname]
|
||||||
else:
|
else:
|
||||||
ws = self.wb_combined[provided_sheetname]
|
ws = self.wb_combined[provided_sheetname]
|
||||||
corridor_row = self.excel_specs["specs"][self.spec]["corridor_row"]
|
corridor_row = spec_["corridor_row"]
|
||||||
column = self.excel_specs["specs"][self.spec]["column"]
|
column = spec_["column"]
|
||||||
site_name_row = self.excel_specs["specs"][self.spec]["site_name_row"]
|
site_name_row = spec_["site_name_row"]
|
||||||
state_name_row = self.excel_specs["specs"][self.spec]["state_name_row"]
|
state_name_row = spec_["state_name_row"]
|
||||||
country_name_row = self.excel_specs["specs"][self.spec][
|
country_name_row = spec_["country_name_row"]
|
||||||
"country_name_row"
|
clli_name_row = spec_["clli_name_row"]
|
||||||
]
|
|
||||||
clli_name_row = self.excel_specs["specs"][self.spec]["clli_name_row"]
|
|
||||||
return {
|
return {
|
||||||
"corridor": ws.cell(row=corridor_row, column=column).value,
|
"corridor": ws.cell(row=corridor_row, column=column).value,
|
||||||
"name": ws.cell(row=site_name_row, column=column).value,
|
"name": ws.cell(row=site_name_row, column=column).value,
|
||||||
"state": ws.cell(row=state_name_row, column=column).value,
|
"state": ws.cell(row=state_name_row, column=column).value,
|
||||||
"country": ws.cell(row=country_name_row, column=column).value,
|
"country": ws.cell(row=country_name_row, column=column).value,
|
||||||
"physical_location": ws.cell(
|
"physical_location": ws.cell(row=clli_name_row,
|
||||||
row=clli_name_row, column=column
|
column=column).value,
|
||||||
).value,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def validate_sheet_names_with_spec(self):
|
def validate_sheet_names_with_spec(self):
|
||||||
@ -384,8 +347,8 @@ class ExcelParser(object):
|
|||||||
sheet_name_list.append(location_sheet_name)
|
sheet_name_list.append(location_sheet_name)
|
||||||
try:
|
try:
|
||||||
for sheetname in sheet_name_list:
|
for sheetname in sheet_name_list:
|
||||||
workbook_object, extracted_sheetname = (
|
workbook_object, extracted_sheetname = \
|
||||||
self.get_xl_obj_and_sheetname(sheetname))
|
self.get_xl_obj_and_sheetname(sheetname)
|
||||||
if workbook_object is not None:
|
if workbook_object is not None:
|
||||||
wb = workbook_object
|
wb = workbook_object
|
||||||
sheetname = extracted_sheetname
|
sheetname = extracted_sheetname
|
||||||
@ -394,8 +357,7 @@ class ExcelParser(object):
|
|||||||
|
|
||||||
if sheetname not in wb.sheetnames:
|
if sheetname not in wb.sheetnames:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"SheetName '{}' not found ".format(sheetname)
|
"SheetName '{}' not found ".format(sheetname))
|
||||||
)
|
|
||||||
except RuntimeError as rerror:
|
except RuntimeError as rerror:
|
||||||
LOG.critical(rerror)
|
LOG.critical(rerror)
|
||||||
sys.exit("Tugboat exited!!")
|
sys.exit("Tugboat exited!!")
|
||||||
@ -418,11 +380,8 @@ class ExcelParser(object):
|
|||||||
},
|
},
|
||||||
"site_info": site_info_data,
|
"site_info": site_info_data,
|
||||||
}
|
}
|
||||||
LOG.debug(
|
LOG.debug("Location data extracted from excel:\n%s" %
|
||||||
"Location data extracted from\
|
pprint.pformat(data))
|
||||||
excel:\n%s",
|
|
||||||
pprint.pformat(data),
|
|
||||||
)
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def combine_excel_design_specs(self, filenames):
|
def combine_excel_design_specs(self, filenames):
|
||||||
@ -436,9 +395,8 @@ class ExcelParser(object):
|
|||||||
loaded_workbook_ws = loaded_workbook[names]
|
loaded_workbook_ws = loaded_workbook[names]
|
||||||
for row in loaded_workbook_ws:
|
for row in loaded_workbook_ws:
|
||||||
for cell in row:
|
for cell in row:
|
||||||
design_spec_worksheet[
|
design_spec_worksheet[cell.coordinate].value = \
|
||||||
cell.coordinate
|
cell.value
|
||||||
].value = cell.value
|
|
||||||
return design_spec
|
return design_spec
|
||||||
|
|
||||||
def get_xl_obj_and_sheetname(self, sheetname):
|
def get_xl_obj_and_sheetname(self, sheetname):
|
||||||
@ -448,7 +406,7 @@ class ExcelParser(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if re.search(".xlsx", sheetname) or re.search(".xls", sheetname):
|
if re.search(".xlsx", sheetname) or re.search(".xls", sheetname):
|
||||||
""" Extract file name """
|
# Extract file name
|
||||||
source_xl_file = sheetname.split(":")[0]
|
source_xl_file = sheetname.split(":")[0]
|
||||||
wb = load_workbook(source_xl_file, data_only=True)
|
wb = load_workbook(source_xl_file, data_only=True)
|
||||||
return [wb, sheetname.split(":")[1]]
|
return [wb, sheetname.split(":")[1]]
|
||||||
|
66
spyglass/data_extractor/plugins/tugboat/tugboat.py
Normal file → Executable file
66
spyglass/data_extractor/plugins/tugboat/tugboat.py
Normal file → Executable file
@ -16,6 +16,7 @@ import itertools
|
|||||||
import logging
|
import logging
|
||||||
import pprint
|
import pprint
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from spyglass.data_extractor.base import BaseDataSourcePlugin
|
from spyglass.data_extractor.base import BaseDataSourcePlugin
|
||||||
from spyglass.data_extractor.plugins.tugboat.excel_parser import ExcelParser
|
from spyglass.data_extractor.plugins.tugboat.excel_parser import ExcelParser
|
||||||
|
|
||||||
@ -106,13 +107,14 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
host_list = []
|
host_list = []
|
||||||
for rack in rackwise_hosts.keys():
|
for rack in rackwise_hosts.keys():
|
||||||
for host in rackwise_hosts[rack]:
|
for host in rackwise_hosts[rack]:
|
||||||
host_list.append(
|
host_list.append({
|
||||||
{
|
"rack_name":
|
||||||
"rack_name": rack,
|
rack,
|
||||||
"name": host,
|
"name":
|
||||||
"host_profile": ipmi_data[host]["host_profile"],
|
host,
|
||||||
}
|
"host_profile":
|
||||||
)
|
ipmi_data[host]["host_profile"],
|
||||||
|
})
|
||||||
return host_list
|
return host_list
|
||||||
|
|
||||||
def get_networks(self, region):
|
def get_networks(self, region):
|
||||||
@ -125,20 +127,18 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
private_net = self.parsed_xl_data["network_data"]["private"]
|
private_net = self.parsed_xl_data["network_data"]["private"]
|
||||||
public_net = self.parsed_xl_data["network_data"]["public"]
|
public_net = self.parsed_xl_data["network_data"]["public"]
|
||||||
# Extract network information from private and public network data
|
# Extract network information from private and public network data
|
||||||
for net_type, net_val in itertools.chain(
|
for net_type, net_val in itertools.chain(private_net.items(),
|
||||||
private_net.items(), public_net.items()
|
public_net.items()):
|
||||||
):
|
|
||||||
tmp_vlan = {}
|
tmp_vlan = {}
|
||||||
# Ingress is special network that has no vlan, only a subnet string
|
# Ingress is special network that has no vlan, only a subnet string
|
||||||
# So treatment for ingress is different
|
# So treatment for ingress is different
|
||||||
if net_type != "ingress":
|
if net_type != "ingress":
|
||||||
# standardize the network name as net_type may ne different.
|
# standardize the network name as net_type may ne different.
|
||||||
# For e.g insteas of pxe it may be PXE or instead of calico
|
# For e.g instead of pxe it may be PXE or instead of calico
|
||||||
# it may be ksn. Valid network names are pxe, calico, oob, oam,
|
# it may be ksn. Valid network names are pxe, calico, oob, oam,
|
||||||
# overlay, storage, ingress
|
# overlay, storage, ingress
|
||||||
tmp_vlan["name"] = self._get_network_name_from_vlan_name(
|
tmp_vlan["name"] = \
|
||||||
net_type
|
self._get_network_name_from_vlan_name(net_type)
|
||||||
)
|
|
||||||
|
|
||||||
# extract vlan tag. It was extracted from xl file as 'VlAN 45'
|
# extract vlan tag. It was extracted from xl file as 'VlAN 45'
|
||||||
# The code below extracts the numeric data fron net_val['vlan']
|
# The code below extracts the numeric data fron net_val['vlan']
|
||||||
@ -154,11 +154,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
tmp_vlan["name"] = "ingress"
|
tmp_vlan["name"] = "ingress"
|
||||||
tmp_vlan["subnet"] = net_val
|
tmp_vlan["subnet"] = net_val
|
||||||
vlan_list.append(tmp_vlan)
|
vlan_list.append(tmp_vlan)
|
||||||
LOG.debug(
|
LOG.debug("vlan list extracted from tugboat:\n{}".format(
|
||||||
"vlan list extracted from tugboat:\n{}".format(
|
pprint.pformat(vlan_list)))
|
||||||
pprint.pformat(vlan_list)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return vlan_list
|
return vlan_list
|
||||||
|
|
||||||
def get_ips(self, region, host=None):
|
def get_ips(self, region, host=None):
|
||||||
@ -200,9 +197,8 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
ldap_info["domain"] = url.split(".")[1]
|
ldap_info["domain"] = url.split(".")[1]
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
LOG.error("url.split:{}".format(e))
|
LOG.error("url.split:{}".format(e))
|
||||||
ldap_info["common_name"] = ldap_raw_data.get(
|
ldap_info["common_name"] = \
|
||||||
"common_name", "#CHANGE_ME"
|
ldap_raw_data.get("common_name", "#CHANGE_ME")
|
||||||
)
|
|
||||||
ldap_info["subdomain"] = ldap_raw_data.get("subdomain", "#CHANGE_ME")
|
ldap_info["subdomain"] = ldap_raw_data.get("subdomain", "#CHANGE_ME")
|
||||||
|
|
||||||
return ldap_info
|
return ldap_info
|
||||||
@ -210,16 +206,16 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
def get_ntp_servers(self, region):
|
def get_ntp_servers(self, region):
|
||||||
"""Returns a comma separated list of ntp ip addresses"""
|
"""Returns a comma separated list of ntp ip addresses"""
|
||||||
|
|
||||||
ntp_server_list = self._get_formatted_server_list(
|
ntp_server_list = \
|
||||||
self.parsed_xl_data["site_info"]["ntp"]
|
self._get_formatted_server_list(self.parsed_xl_data["site_info"]
|
||||||
)
|
["ntp"])
|
||||||
return ntp_server_list
|
return ntp_server_list
|
||||||
|
|
||||||
def get_dns_servers(self, region):
|
def get_dns_servers(self, region):
|
||||||
"""Returns a comma separated list of dns ip addresses"""
|
"""Returns a comma separated list of dns ip addresses"""
|
||||||
dns_server_list = self._get_formatted_server_list(
|
dns_server_list = \
|
||||||
self.parsed_xl_data["site_info"]["dns"]
|
self._get_formatted_server_list(self.parsed_xl_data["site_info"]
|
||||||
)
|
["dns"])
|
||||||
return dns_server_list
|
return dns_server_list
|
||||||
|
|
||||||
def get_domain_name(self, region):
|
def get_domain_name(self, region):
|
||||||
@ -228,17 +224,13 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
return self.parsed_xl_data["site_info"]["domain"]
|
return self.parsed_xl_data["site_info"]["domain"]
|
||||||
|
|
||||||
def get_location_information(self, region):
|
def get_location_information(self, region):
|
||||||
"""Prepare location data from information extracted
|
"""Prepare location data from information extracted by ExcelParser"""
|
||||||
|
|
||||||
by ExcelParser(i.e raw data)
|
|
||||||
"""
|
|
||||||
|
|
||||||
location_data = self.parsed_xl_data["site_info"]["location"]
|
location_data = self.parsed_xl_data["site_info"]["location"]
|
||||||
|
|
||||||
corridor_pattern = r"\d+"
|
corridor_pattern = r"\d+"
|
||||||
corridor_number = re.findall(
|
corridor_number = \
|
||||||
corridor_pattern, location_data["corridor"]
|
re.findall(corridor_pattern, location_data["corridor"])[0]
|
||||||
)[0]
|
|
||||||
name = location_data.get("name", "#CHANGE_ME")
|
name = location_data.get("name", "#CHANGE_ME")
|
||||||
state = location_data.get("state", "#CHANGE_ME")
|
state = location_data.get("state", "#CHANGE_ME")
|
||||||
country = location_data.get("country", "#CHANGE_ME")
|
country = location_data.get("country", "#CHANGE_ME")
|
||||||
@ -274,7 +266,6 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
def _get_network_name_from_vlan_name(self, vlan_name):
|
def _get_network_name_from_vlan_name(self, vlan_name):
|
||||||
"""Network names are ksn, oam, oob, overlay, storage, pxe
|
"""Network names are ksn, oam, oob, overlay, storage, pxe
|
||||||
|
|
||||||
|
|
||||||
This is a utility function to determine the vlan acceptable
|
This is a utility function to determine the vlan acceptable
|
||||||
vlan from the name extracted from excel file
|
vlan from the name extracted from excel file
|
||||||
|
|
||||||
@ -314,8 +305,7 @@ class TugboatPlugin(BaseDataSourcePlugin):
|
|||||||
return "pxe"
|
return "pxe"
|
||||||
# if nothing matches
|
# if nothing matches
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Unable to recognize VLAN name extracted from Plugin data source"
|
"Unable to recognize VLAN name extracted from Plugin data source")
|
||||||
)
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def _get_formatted_server_list(self, server_list):
|
def _get_formatted_server_list(self, server_list):
|
||||||
|
140
spyglass/parser/engine.py
Normal file → Executable file
140
spyglass/parser/engine.py
Normal file → Executable file
@ -15,22 +15,22 @@
|
|||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import pkg_resources
|
|
||||||
import pprint
|
import pprint
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
import netaddr
|
from netaddr import IPNetwork
|
||||||
|
from pkg_resources import resource_filename
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ProcessDataSource(object):
|
class ProcessDataSource(object):
|
||||||
def __init__(self, sitetype):
|
def __init__(self, site_type):
|
||||||
# Initialize intermediary and save site type
|
# Initialize intermediary and save site type
|
||||||
self._initialize_intermediary()
|
self._initialize_intermediary()
|
||||||
self.region_name = sitetype
|
self.region_name = site_type
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _read_file(file_name):
|
def _read_file(file_name):
|
||||||
@ -64,15 +64,15 @@ class ProcessDataSource(object):
|
|||||||
for net_type in self.data["network"]["vlan_network_data"]:
|
for net_type in self.data["network"]["vlan_network_data"]:
|
||||||
# One of the type is ingress and we don't want that here
|
# One of the type is ingress and we don't want that here
|
||||||
if net_type != "ingress":
|
if net_type != "ingress":
|
||||||
network_subnets[net_type] = netaddr.IPNetwork(
|
network_subnets[net_type] = \
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
IPNetwork(self.data["network"]
|
||||||
"subnet"
|
["vlan_network_data"]
|
||||||
][0]
|
[net_type]
|
||||||
)
|
["subnet"]
|
||||||
|
[0])
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug("Network subnets:\n{}".format(
|
||||||
"Network subnets:\n{}".format(pprint.pformat(network_subnets))
|
pprint.pformat(network_subnets)))
|
||||||
)
|
|
||||||
return network_subnets
|
return network_subnets
|
||||||
|
|
||||||
def _get_genesis_node_details(self):
|
def _get_genesis_node_details(self):
|
||||||
@ -83,11 +83,8 @@ class ProcessDataSource(object):
|
|||||||
if rack_hosts[host]["type"] == "genesis":
|
if rack_hosts[host]["type"] == "genesis":
|
||||||
self.genesis_node = rack_hosts[host]
|
self.genesis_node = rack_hosts[host]
|
||||||
self.genesis_node["name"] = host
|
self.genesis_node["name"] = host
|
||||||
LOG.debug(
|
LOG.debug("Genesis Node Details:\n{}".format(
|
||||||
"Genesis Node Details:\n{}".format(
|
pprint.pformat(self.genesis_node)))
|
||||||
pprint.pformat(self.genesis_node)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_genesis_node_ip(self):
|
def _get_genesis_node_ip(self):
|
||||||
"""Returns the genesis node ip"""
|
"""Returns the genesis node ip"""
|
||||||
@ -104,14 +101,13 @@ class ProcessDataSource(object):
|
|||||||
def _validate_intermediary_data(self, data):
|
def _validate_intermediary_data(self, data):
|
||||||
"""Validates the intermediary data before generating manifests.
|
"""Validates the intermediary data before generating manifests.
|
||||||
|
|
||||||
It checks wether the data types and data format are as expected.
|
It checks whether the data types and data format are as expected.
|
||||||
The method validates this with regex pattern defined for each
|
The method validates this with regex pattern defined for each
|
||||||
data type.
|
data type.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LOG.info("Validating Intermediary data")
|
LOG.info("Validating Intermediary data")
|
||||||
temp_data = {}
|
# Performing a deep copy
|
||||||
# Peforming a deep copy
|
|
||||||
temp_data = copy.deepcopy(data)
|
temp_data = copy.deepcopy(data)
|
||||||
# Converting baremetal dict to list.
|
# Converting baremetal dict to list.
|
||||||
baremetal_list = []
|
baremetal_list = []
|
||||||
@ -120,7 +116,7 @@ class ProcessDataSource(object):
|
|||||||
baremetal_list = baremetal_list + temp
|
baremetal_list = baremetal_list + temp
|
||||||
|
|
||||||
temp_data["baremetal"] = baremetal_list
|
temp_data["baremetal"] = baremetal_list
|
||||||
schema_dir = pkg_resources.resource_filename("spyglass", "schemas/")
|
schema_dir = resource_filename("spyglass", "schemas/")
|
||||||
schema_file = schema_dir + "data_schema.json"
|
schema_file = schema_dir + "data_schema.json"
|
||||||
json_data = json.loads(json.dumps(temp_data))
|
json_data = json.loads(json.dumps(temp_data))
|
||||||
with open(schema_file, "r") as f:
|
with open(schema_file, "r") as f:
|
||||||
@ -153,12 +149,12 @@ class ProcessDataSource(object):
|
|||||||
|
|
||||||
These rules are used to determine ip address allocation ranges,
|
These rules are used to determine ip address allocation ranges,
|
||||||
host profile interfaces and also to create hardware profile
|
host profile interfaces and also to create hardware profile
|
||||||
information. The method calls corresponding rule hander function
|
information. The method calls corresponding rule handler function
|
||||||
based on rule name and applies them to appropriate data objects.
|
based on rule name and applies them to appropriate data objects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
LOG.info("Apply design rules")
|
LOG.info("Apply design rules")
|
||||||
rules_dir = pkg_resources.resource_filename("spyglass", "config/")
|
rules_dir = resource_filename("spyglass", "config/")
|
||||||
rules_file = rules_dir + "rules.yaml"
|
rules_file = rules_dir + "rules.yaml"
|
||||||
rules_data_raw = self._read_file(rules_file)
|
rules_data_raw = self._read_file(rules_file)
|
||||||
rules_yaml = yaml.safe_load(rules_data_raw)
|
rules_yaml = yaml.safe_load(rules_data_raw)
|
||||||
@ -197,10 +193,8 @@ class ProcessDataSource(object):
|
|||||||
# done to determine the genesis node.
|
# done to determine the genesis node.
|
||||||
for host in sorted(self.data["baremetal"][rack].keys()):
|
for host in sorted(self.data["baremetal"][rack].keys()):
|
||||||
host_info = self.data["baremetal"][rack][host]
|
host_info = self.data["baremetal"][rack][host]
|
||||||
if (
|
if host_info["host_profile"] \
|
||||||
host_info["host_profile"]
|
== hardware_profile["profile_name"]["ctrl"]:
|
||||||
== hardware_profile["profile_name"]["ctrl"]
|
|
||||||
):
|
|
||||||
if not is_genesis:
|
if not is_genesis:
|
||||||
host_info["type"] = "genesis"
|
host_info["type"] = "genesis"
|
||||||
is_genesis = True
|
is_genesis = True
|
||||||
@ -229,7 +223,7 @@ class ProcessDataSource(object):
|
|||||||
If a particular ip exists it is overridden.
|
If a particular ip exists it is overridden.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Ger defult ip offset
|
# Ger default ip offset
|
||||||
default_ip_offset = rule_data["default"]
|
default_ip_offset = rule_data["default"]
|
||||||
|
|
||||||
host_idx = 0
|
host_idx = 0
|
||||||
@ -243,11 +237,8 @@ class ProcessDataSource(object):
|
|||||||
host_networks[net] = str(ips[host_idx + default_ip_offset])
|
host_networks[net] = str(ips[host_idx + default_ip_offset])
|
||||||
host_idx = host_idx + 1
|
host_idx = host_idx + 1
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug("Updated baremetal host:\n{}".format(
|
||||||
"Updated baremetal host:\n{}".format(
|
pprint.pformat(self.data["baremetal"])))
|
||||||
pprint.pformat(self.data["baremetal"])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _update_vlan_net_data(self, rule_data):
|
def _update_vlan_net_data(self, rule_data):
|
||||||
"""Offset allocation rules to determine ip address range(s)
|
"""Offset allocation rules to determine ip address range(s)
|
||||||
@ -270,21 +261,17 @@ class ProcessDataSource(object):
|
|||||||
|
|
||||||
# Set ingress vip and CIDR for bgp
|
# Set ingress vip and CIDR for bgp
|
||||||
LOG.info("Apply network design rules:bgp")
|
LOG.info("Apply network design rules:bgp")
|
||||||
subnet = netaddr.IPNetwork(
|
vlan_network_data_ = self.data["network"]["vlan_network_data"]
|
||||||
self.data["network"]["vlan_network_data"]["ingress"]["subnet"][0]
|
subnet = IPNetwork(vlan_network_data_["ingress"]["subnet"][0])
|
||||||
)
|
|
||||||
ips = list(subnet)
|
ips = list(subnet)
|
||||||
self.data["network"]["bgp"]["ingress_vip"] = str(
|
self.data["network"]["bgp"]["ingress_vip"] = \
|
||||||
ips[ingress_vip_offset]
|
str(ips[ingress_vip_offset])
|
||||||
)
|
self.data["network"]["bgp"]["public_service_cidr"] = \
|
||||||
self.data["network"]["bgp"]["public_service_cidr"] = self.data[
|
(vlan_network_data_["ingress"]
|
||||||
"network"
|
["subnet"]
|
||||||
]["vlan_network_data"]["ingress"]["subnet"][0]
|
[0])
|
||||||
LOG.debug(
|
LOG.debug("Updated network bgp data:\n{}".format(
|
||||||
"Updated network bgp data:\n{}".format(
|
pprint.pformat(self.data["network"]["bgp"])))
|
||||||
pprint.pformat(self.data["network"]["bgp"])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
LOG.info("Apply network design rules:vlan")
|
LOG.info("Apply network design rules:vlan")
|
||||||
# Apply rules to vlan networks
|
# Apply rules to vlan networks
|
||||||
@ -297,16 +284,11 @@ class ProcessDataSource(object):
|
|||||||
subnet = self.network_subnets[net_type]
|
subnet = self.network_subnets[net_type]
|
||||||
ips = list(subnet)
|
ips = list(subnet)
|
||||||
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
vlan_network_data_[net_type]["gateway"] = \
|
||||||
"gateway"
|
str(ips[gateway_ip_offset])
|
||||||
] = str(ips[gateway_ip_offset])
|
|
||||||
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
vlan_network_data_[net_type]["reserved_start"] = str(ips[1])
|
||||||
"reserved_start"
|
vlan_network_data_[net_type]["reserved_end"] = str(ips[ip_offset])
|
||||||
] = str(ips[1])
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
|
||||||
"reserved_end"
|
|
||||||
] = str(ips[ip_offset])
|
|
||||||
|
|
||||||
static_start = str(ips[ip_offset + 1])
|
static_start = str(ips[ip_offset + 1])
|
||||||
static_end = str(ips[static_ip_end_offset])
|
static_end = str(ips[static_ip_end_offset])
|
||||||
@ -317,40 +299,26 @@ class ProcessDataSource(object):
|
|||||||
dhcp_start = str(ips[mid])
|
dhcp_start = str(ips[mid])
|
||||||
dhcp_end = str(ips[dhcp_ip_end_offset])
|
dhcp_end = str(ips[dhcp_ip_end_offset])
|
||||||
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
vlan_network_data_[net_type]["dhcp_start"] = dhcp_start
|
||||||
"dhcp_start"
|
vlan_network_data_[net_type]["dhcp_end"] = dhcp_end
|
||||||
] = dhcp_start
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
|
||||||
"dhcp_end"
|
|
||||||
] = dhcp_end
|
|
||||||
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
vlan_network_data_[net_type]["static_start"] = static_start
|
||||||
"static_start"
|
vlan_network_data_[net_type]["static_end"] = static_end
|
||||||
] = static_start
|
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
|
||||||
"static_end"
|
|
||||||
] = static_end
|
|
||||||
|
|
||||||
# There is no vlan for oob network
|
# There is no vlan for oob network
|
||||||
if net_type != "oob":
|
if net_type != "oob":
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
vlan_network_data_[net_type]["vlan"] = \
|
||||||
"vlan"
|
vlan_network_data_[net_type]["vlan"]
|
||||||
] = self.data["network"]["vlan_network_data"][net_type]["vlan"]
|
|
||||||
|
|
||||||
# OAM have default routes. Only for cruiser. TBD
|
# OAM have default routes. Only for cruiser. TBD
|
||||||
if net_type == "oam":
|
if net_type == "oam":
|
||||||
routes = ["0.0.0.0/0"]
|
routes = ["0.0.0.0/0"]
|
||||||
else:
|
else:
|
||||||
routes = []
|
routes = []
|
||||||
self.data["network"]["vlan_network_data"][net_type][
|
vlan_network_data_[net_type]["routes"] = routes
|
||||||
"routes"
|
|
||||||
] = routes
|
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug("Updated vlan network data:\n{}".format(
|
||||||
"Updated vlan network data:\n{}".format(
|
pprint.pformat(vlan_network_data_)))
|
||||||
pprint.pformat(self.data["network"]["vlan_network_data"])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_extracted_data_from_data_source(self, extracted_data):
|
def load_extracted_data_from_data_source(self, extracted_data):
|
||||||
"""Function called from spyglass.py to pass extracted data
|
"""Function called from spyglass.py to pass extracted data
|
||||||
@ -366,12 +334,9 @@ class ProcessDataSource(object):
|
|||||||
|
|
||||||
LOG.info("Loading plugin data source")
|
LOG.info("Loading plugin data source")
|
||||||
self.data = extracted_data
|
self.data = extracted_data
|
||||||
LOG.debug(
|
LOG.debug("Extracted data from plugin:\n{}".format(
|
||||||
"Extracted data from plugin:\n{}".format(
|
pprint.pformat(extracted_data)))
|
||||||
pprint.pformat(extracted_data)
|
# Uncomment following segment for debugging purpose.
|
||||||
)
|
|
||||||
)
|
|
||||||
# Uncommeent following segment for debugging purpose.
|
|
||||||
# extracted_file = "extracted_file.yaml"
|
# extracted_file = "extracted_file.yaml"
|
||||||
# yaml_file = yaml.dump(extracted_data, default_flow_style=False)
|
# yaml_file = yaml.dump(extracted_data, default_flow_style=False)
|
||||||
# with open(extracted_file, 'w') as f:
|
# with open(extracted_file, 'w') as f:
|
||||||
@ -385,9 +350,8 @@ class ProcessDataSource(object):
|
|||||||
"""Writing intermediary yaml"""
|
"""Writing intermediary yaml"""
|
||||||
|
|
||||||
LOG.info("Writing intermediary yaml")
|
LOG.info("Writing intermediary yaml")
|
||||||
intermediary_file = "{}_intermediary.yaml".format(
|
intermediary_file = "{}_intermediary.yaml" \
|
||||||
self.data["region_name"]
|
.format(self.data["region_name"])
|
||||||
)
|
|
||||||
# Check of if output dir = intermediary_dir exists
|
# Check of if output dir = intermediary_dir exists
|
||||||
if intermediary_dir is not None:
|
if intermediary_dir is not None:
|
||||||
outfile = "{}/{}".format(intermediary_dir, intermediary_file)
|
outfile = "{}/{}".format(intermediary_dir, intermediary_file)
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
|
|
||||||
class BaseProcessor(object):
|
class BaseProcessor(object):
|
||||||
def __init__(self, file_name):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def render_template(self, template):
|
def render_template(self, template):
|
||||||
@ -32,9 +32,8 @@ class BaseProcessor(object):
|
|||||||
"pxe": yaml_data["baremetal"][rack][host]["ip"]["pxe"],
|
"pxe": yaml_data["baremetal"][rack][host]["ip"]["pxe"],
|
||||||
"oam": yaml_data["baremetal"][rack][host]["ip"]["oam"],
|
"oam": yaml_data["baremetal"][rack][host]["ip"]["oam"],
|
||||||
}
|
}
|
||||||
elif (
|
elif yaml_data["baremetal"][rack][host]["type"] \
|
||||||
yaml_data["baremetal"][rack][host]["type"] == "controller"
|
== "controller":
|
||||||
):
|
|
||||||
hosts["masters"].append(host)
|
hosts["masters"].append(host)
|
||||||
else:
|
else:
|
||||||
hosts["workers"].append(host)
|
hosts["workers"].append(host)
|
||||||
|
@ -12,10 +12,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from jinja2 import Environment
|
|
||||||
from jinja2 import FileSystemLoader
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from jinja2 import Environment
|
||||||
|
from jinja2 import FileSystemLoader
|
||||||
|
|
||||||
from spyglass.site_processors.base import BaseProcessor
|
from spyglass.site_processors.base import BaseProcessor
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -23,6 +25,7 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class SiteProcessor(BaseProcessor):
|
class SiteProcessor(BaseProcessor):
|
||||||
def __init__(self, intermediary_yaml, manifest_dir):
|
def __init__(self, intermediary_yaml, manifest_dir):
|
||||||
|
super().__init__()
|
||||||
self.yaml_data = intermediary_yaml
|
self.yaml_data = intermediary_yaml
|
||||||
self.manifest_dir = manifest_dir
|
self.manifest_dir = manifest_dir
|
||||||
|
|
||||||
@ -47,20 +50,17 @@ class SiteProcessor(BaseProcessor):
|
|||||||
|
|
||||||
for dirpath, dirs, files in os.walk(template_dir_abspath):
|
for dirpath, dirs, files in os.walk(template_dir_abspath):
|
||||||
for filename in files:
|
for filename in files:
|
||||||
j2_env = Environment(
|
j2_env = Environment(autoescape=True,
|
||||||
autoescape=True,
|
|
||||||
loader=FileSystemLoader(dirpath),
|
loader=FileSystemLoader(dirpath),
|
||||||
trim_blocks=True,
|
trim_blocks=True)
|
||||||
)
|
j2_env.filters["get_role_wise_nodes"] = \
|
||||||
j2_env.filters[
|
self.get_role_wise_nodes
|
||||||
"get_role_wise_nodes"
|
|
||||||
] = self.get_role_wise_nodes
|
|
||||||
templatefile = os.path.join(dirpath, filename)
|
templatefile = os.path.join(dirpath, filename)
|
||||||
outdirs = dirpath.split("templates")[1]
|
outdirs = dirpath.split("templates")[1]
|
||||||
|
|
||||||
outfile_path = "{}{}{}".format(
|
outfile_path = "{}{}{}".format(site_manifest_dir,
|
||||||
site_manifest_dir, self.yaml_data["region_name"], outdirs
|
self.yaml_data["region_name"],
|
||||||
)
|
outdirs)
|
||||||
outfile_yaml = templatefile.split(".j2")[0].split("/")[-1]
|
outfile_yaml = templatefile.split(".j2")[0].split("/")[-1]
|
||||||
outfile = outfile_path + "/" + outfile_yaml
|
outfile = outfile_path + "/" + outfile_yaml
|
||||||
outfile_dir = os.path.dirname(outfile)
|
outfile_dir = os.path.dirname(outfile)
|
||||||
@ -74,10 +74,7 @@ class SiteProcessor(BaseProcessor):
|
|||||||
out.close()
|
out.close()
|
||||||
except IOError as ioe:
|
except IOError as ioe:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"IOError during rendering:{}".format(outfile_yaml)
|
"IOError during rendering:{}".format(outfile_yaml))
|
||||||
)
|
|
||||||
raise SystemExit(
|
raise SystemExit(
|
||||||
"Error when generating {:s}:\n{:s}".format(
|
"Error when generating {:s}:\n{:s}".format(
|
||||||
outfile, ioe.strerror
|
outfile, ioe.strerror))
|
||||||
)
|
|
||||||
)
|
|
||||||
|
@ -13,10 +13,10 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import pkg_resources
|
|
||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
from pkg_resources import iter_entry_points
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from spyglass.parser.engine import ProcessDataSource
|
from spyglass.parser.engine import ProcessDataSource
|
||||||
@ -26,25 +26,23 @@ LOG = logging.getLogger("spyglass")
|
|||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option(
|
@click.option("--site",
|
||||||
"--site", "-s", help="Specify the site for which manifests to be generated"
|
"-s",
|
||||||
)
|
help="Specify the site for which manifests to be generated")
|
||||||
@click.option(
|
@click.option("--type",
|
||||||
"--type", "-t", help="Specify the plugin type formation or tugboat"
|
"-t",
|
||||||
)
|
help="Specify the plugin type formation or tugboat")
|
||||||
@click.option("--formation_url", "-f", help="Specify the formation url")
|
@click.option("--formation_url", "-f", help="Specify the formation url")
|
||||||
@click.option("--formation_user", "-u", help="Specify the formation user id")
|
@click.option("--formation_user", "-u", help="Specify the formation user id")
|
||||||
@click.option(
|
@click.option("--formation_password",
|
||||||
"--formation_password", "-p", help="Specify the formation user password"
|
"-p",
|
||||||
)
|
help="Specify the formation user password")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--intermediary",
|
"--intermediary",
|
||||||
"-i",
|
"-i",
|
||||||
type=click.Path(exists=True),
|
type=click.Path(exists=True),
|
||||||
help=(
|
help="Intermediary file path generate manifests, "
|
||||||
"Intermediary file path generate manifests, "
|
"use -m also with this option",
|
||||||
"use -m also with this option"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--additional_config",
|
"--additional_config",
|
||||||
@ -87,10 +85,8 @@ LOG = logging.getLogger("spyglass")
|
|||||||
"-x",
|
"-x",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
type=click.Path(exists=True),
|
type=click.Path(exists=True),
|
||||||
help=(
|
help="Path to engineering excel file, to be passed with "
|
||||||
"Path to engineering excel file, to be passed with "
|
"generate_intermediary",
|
||||||
"generate_intermediary"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--excel_spec",
|
"--excel_spec",
|
||||||
@ -104,8 +100,8 @@ LOG = logging.getLogger("spyglass")
|
|||||||
default=20,
|
default=20,
|
||||||
multiple=False,
|
multiple=False,
|
||||||
show_default=True,
|
show_default=True,
|
||||||
help="Loglevel NOTSET:0 ,DEBUG:10, \
|
help="Loglevel NOTSET:0 ,DEBUG:10, INFO:20, WARNING:30, ERROR:40, "
|
||||||
INFO:20, WARNING:30, ERROR:40, CRITICAL:50",
|
"CRITICAL:50",
|
||||||
)
|
)
|
||||||
def main(*args, **kwargs):
|
def main(*args, **kwargs):
|
||||||
# Extract user provided inputs
|
# Extract user provided inputs
|
||||||
@ -121,9 +117,8 @@ def main(*args, **kwargs):
|
|||||||
# Set Logging format
|
# Set Logging format
|
||||||
LOG.setLevel(loglevel)
|
LOG.setLevel(loglevel)
|
||||||
stream_handle = logging.StreamHandler()
|
stream_handle = logging.StreamHandler()
|
||||||
formatter = logging.Formatter(
|
formatter = \
|
||||||
"(%(name)s): %(asctime)s %(levelname)s %(message)s"
|
logging.Formatter("(%(name)s): %(asctime)s %(levelname)s %(message)s")
|
||||||
)
|
|
||||||
stream_handle.setFormatter(formatter)
|
stream_handle.setFormatter(formatter)
|
||||||
LOG.addHandler(stream_handle)
|
LOG.addHandler(stream_handle)
|
||||||
|
|
||||||
@ -139,14 +134,12 @@ def main(*args, **kwargs):
|
|||||||
if generate_manifests:
|
if generate_manifests:
|
||||||
if template_dir is None:
|
if template_dir is None:
|
||||||
LOG.error("Template directory not specified!! Spyglass exited")
|
LOG.error("Template directory not specified!! Spyglass exited")
|
||||||
LOG.error(
|
LOG.error("It is mandatory to provide it when "
|
||||||
"It is mandatory to provide it when generate_manifests is true"
|
"generate_manifests is true")
|
||||||
)
|
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
# Generate Intermediary yaml and manifests extracting data
|
# Generate Intermediary yaml and manifests extracting data
|
||||||
# from data source specified by plugin type
|
# from data source specified by plugin type
|
||||||
intermediary_yaml = {}
|
|
||||||
if intermediary is None:
|
if intermediary is None:
|
||||||
LOG.info("Generating Intermediary yaml")
|
LOG.info("Generating Intermediary yaml")
|
||||||
plugin_type = kwargs.get("type", None)
|
plugin_type = kwargs.get("type", None)
|
||||||
@ -154,16 +147,13 @@ def main(*args, **kwargs):
|
|||||||
|
|
||||||
# Discover the plugin and load the plugin class
|
# Discover the plugin and load the plugin class
|
||||||
LOG.info("Load the plugin class")
|
LOG.info("Load the plugin class")
|
||||||
for entry_point in pkg_resources.iter_entry_points(
|
for entry_point in iter_entry_points("data_extractor_plugins"):
|
||||||
"data_extractor_plugins"
|
|
||||||
):
|
|
||||||
if entry_point.name == plugin_type:
|
if entry_point.name == plugin_type:
|
||||||
plugin_class = entry_point.load()
|
plugin_class = entry_point.load()
|
||||||
|
|
||||||
if plugin_class is None:
|
if plugin_class is None:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Unsupported Plugin type. Plugin type:{}".format(plugin_type)
|
"Unsupported Plugin type. Plugin type:{}".format(plugin_type))
|
||||||
)
|
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
# Extract data from plugin data source
|
# Extract data from plugin data source
|
||||||
@ -179,17 +169,11 @@ def main(*args, **kwargs):
|
|||||||
with open(additional_config, "r") as config:
|
with open(additional_config, "r") as config:
|
||||||
raw_data = config.read()
|
raw_data = config.read()
|
||||||
additional_config_data = yaml.safe_load(raw_data)
|
additional_config_data = yaml.safe_load(raw_data)
|
||||||
LOG.debug(
|
LOG.debug("Additional config data:\n{}".format(
|
||||||
"Additional config data:\n{}".format(
|
pprint.pformat(additional_config_data)))
|
||||||
pprint.pformat(additional_config_data)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
LOG.info(
|
LOG.info("Apply additional configuration from:{}".format(
|
||||||
"Apply additional configuration from:{}".format(
|
additional_config))
|
||||||
additional_config
|
|
||||||
)
|
|
||||||
)
|
|
||||||
data_extractor.apply_additional_data(additional_config_data)
|
data_extractor.apply_additional_data(additional_config_data)
|
||||||
LOG.debug(pprint.pformat(data_extractor.site_data))
|
LOG.debug(pprint.pformat(data_extractor.site_data))
|
||||||
|
|
||||||
@ -197,8 +181,7 @@ def main(*args, **kwargs):
|
|||||||
LOG.info("Apply design rules to the extracted data")
|
LOG.info("Apply design rules to the extracted data")
|
||||||
process_input_ob = ProcessDataSource(site)
|
process_input_ob = ProcessDataSource(site)
|
||||||
process_input_ob.load_extracted_data_from_data_source(
|
process_input_ob.load_extracted_data_from_data_source(
|
||||||
data_extractor.site_data
|
data_extractor.site_data)
|
||||||
)
|
|
||||||
|
|
||||||
LOG.info("Generate intermediary yaml")
|
LOG.info("Generate intermediary yaml")
|
||||||
intermediary_yaml = process_input_ob.generate_intermediary_yaml()
|
intermediary_yaml = process_input_ob.generate_intermediary_yaml()
|
||||||
|
19
spyglass/utils/utils.py
Normal file → Executable file
19
spyglass/utils/utils.py
Normal file → Executable file
@ -14,10 +14,9 @@
|
|||||||
|
|
||||||
|
|
||||||
# Merge two dictionaries
|
# Merge two dictionaries
|
||||||
def dict_merge(dictA, dictB, path=None):
|
def dict_merge(dict_a, dict_b, path=None):
|
||||||
"""Recursively Merge dictionary dictB into dictA
|
"""Recursively Merge dictionary dictB into dictA
|
||||||
|
|
||||||
|
|
||||||
DictA represents the data extracted by a plugin and DictB
|
DictA represents the data extracted by a plugin and DictB
|
||||||
represents the additional site config dictionary that is passed
|
represents the additional site config dictionary that is passed
|
||||||
to CLI. The merge process compares the dictionary keys and if they
|
to CLI. The merge process compares the dictionary keys and if they
|
||||||
@ -28,14 +27,14 @@ def dict_merge(dictA, dictB, path=None):
|
|||||||
if path is None:
|
if path is None:
|
||||||
path = []
|
path = []
|
||||||
|
|
||||||
for key in dictB:
|
for key in dict_b:
|
||||||
if key in dictA:
|
if key in dict_a:
|
||||||
if isinstance(dictA[key], dict) and isinstance(dictB[key], dict):
|
if isinstance(dict_a[key], dict) and isinstance(dict_b[key], dict):
|
||||||
dict_merge(dictA[key], dictB[key], path + [str(key)])
|
dict_merge(dict_a[key], dict_b[key], path + [str(key)])
|
||||||
elif dictA[key] == dictB[key]:
|
elif dict_a[key] == dict_b[key]:
|
||||||
pass # values are same, so no processing here
|
pass # values are same, so no processing here
|
||||||
else:
|
else:
|
||||||
dictA[key] = dictB[key]
|
dict_a[key] = dict_b[key]
|
||||||
else:
|
else:
|
||||||
dictA[key] = dictB[key]
|
dict_a[key] = dict_b[key]
|
||||||
return dictA
|
return dict_a
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# Formatting
|
# Formatting
|
||||||
yapf==0.20.0
|
yapf==0.27.0
|
||||||
|
|
||||||
# Linting
|
# Linting
|
||||||
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
||||||
|
3
tox.ini
3
tox.ini
@ -31,7 +31,8 @@ basepython = python3
|
|||||||
deps =
|
deps =
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
commands =
|
commands =
|
||||||
bash -c "{toxinidir}/tools/gate/whitespace-linter.sh"
|
bash -c {toxinidir}/tools/gate/whitespace-linter.sh
|
||||||
|
yapf -dr {toxinidir}/spyglass {toxinidir}/setup.py
|
||||||
flake8 {toxinidir}/spyglass
|
flake8 {toxinidir}/spyglass
|
||||||
whitelist_externals =
|
whitelist_externals =
|
||||||
bash
|
bash
|
||||||
|
Loading…
x
Reference in New Issue
Block a user