Refactor inventory generation load/save filesystem

Segmenting the storage-related operations from the rest of the
inventory generation management tooling, in support of better
maintainability testability and future support for alternate storage
options.

Operations in manage.py are slightly updated to make use of common
code.

Change-Id: If032bcc9e4727d3895e4ef6cecb6aff511d4b0af
This commit is contained in:
Steve Lewis 2016-10-31 12:46:52 -07:00
parent 3070530e88
commit b7354adacb
4 changed files with 95 additions and 72 deletions

View File

@ -16,8 +16,16 @@
# (c) 2015, Major Hayden <major@mhtx.net>
#
import copy
import datetime
import json
import logging
import os
import tarfile
logger = logging.getLogger('osa-inventory')
INVENTORY_FILENAME = 'openstack_inventory.json'
def _get_search_paths(preferred_path=None, suffix=None):
@ -49,6 +57,7 @@ def file_find(filename, preferred_path=None, pass_exception=False):
:param pass_exception: ``bool`` Should a SystemExit be raised if the file
is not found
"""
search_paths = _get_search_paths(preferred_path, suffix=filename)
for file_candidate in search_paths:
@ -61,25 +70,79 @@ def file_find(filename, preferred_path=None, pass_exception=False):
return False
def save_to_json(filename, dictionary):
"""Write out the given dictionary
:param filename: ``str`` Name of the file to write to
:param dictionary: ``dict`` A dictionary to write
"""
target_file = file_find(filename)
with open(target_file, 'wb') as f_handle:
inventory_json = json.dumps(dictionary, indent=2)
f_handle.write(inventory_json)
def make_backup(config_path, inventory_file_path):
# Create a backup of all previous inventory files as a tar archive
inventory_backup_file = os.path.join(
config_path,
'backup_openstack_inventory.tar'
)
with tarfile.open(inventory_backup_file, 'a') as tar:
basename = os.path.basename(inventory_file_path)
backup_name = get_backup_name(basename)
tar.add(inventory_file_path, arcname=backup_name)
logger.debug("Backup written to {}".format(inventory_backup_file))
def load_from_json(filename):
def get_backup_name(basename):
utctime = datetime.datetime.utcnow()
utctime = utctime.strftime("%Y%m%d_%H%M%S")
return '{}-{}.json'.format(basename, utctime)
def load_from_json(filename, preferred_path=None, pass_exception=False):
"""Return a dictionary found in a given file
:param filename: ``str`` Name of the file to read from
:param preferred_path: ``str`` Path to the json file to try FIRST
:param pass_exception: ``bool`` Should a SystemExit be raised if the file
is not found
:return ``(dict, str)`` Dictionary describing the JSON file contents or
False, and the fully resolved file name loaded or None
"""
target_file = file_find(filename)
with open(target_file, 'rb') as f_handle:
dictionary = json.loads(f_handle.read())
return dictionary
target_file = file_find(filename, preferred_path, pass_exception)
dictionary = False
if target_file is not False:
with open(target_file, 'rb') as f_handle:
dictionary = json.loads(f_handle.read())
return dictionary, target_file
def load_inventory(preferred_path=None, default_inv=None):
"""Create an inventory dictionary from the given source file or a default
inventory. If an inventory is found then a backup tarball is created
as well.
:param preferred_path: ``str`` Path to the inventory directory to try FIRST
:param default_inv: ``dict`` Default inventory skeleton
:return: ``dict`` A dictionary found or ``default_inv``
"""
inventory, file_loaded = load_from_json(INVENTORY_FILENAME, preferred_path,
pass_exception=True)
if inventory is not False:
logger.debug("Loaded existing inventory from {}".format(file_loaded))
make_backup(preferred_path, file_loaded)
else:
logger.debug("No existing inventory, created fresh skeleton.")
inventory = copy.deepcopy(default_inv)
return inventory
def save_inventory(inventory_json, save_path):
"""Save an inventory dictionary
:param inventory_json: ``str`` String of JSON formatted inventory to store
:param save_path: ``str`` Path of the directory to save to
"""
if INVENTORY_FILENAME == save_path:
inventory_file = file_find(save_path)
else:
inventory_file = os.path.join(save_path, INVENTORY_FILENAME)
with open(inventory_file, 'wb') as f:
f.write(inventory_json)
logger.info("Inventory written")

View File

@ -15,21 +15,20 @@
#
# (c) 2014, Kevin Carter <kevin.carter@rackspace.com>
import copy
import datetime
import json
import logging
import netaddr
import os
import Queue
import random
import tarfile
import uuid
import warnings
import yaml
from dictutils import append_if
from dictutils import merge_dict
from filesystem import load_inventory
from filesystem import save_inventory
logger = logging.getLogger('osa-inventory')
@ -1102,40 +1101,6 @@ def load_user_configuration(config_path):
return user_defined_config
def make_backup(config_path, inventory_file_path):
# Create a backup of all previous inventory files as a tar archive
inventory_backup_file = os.path.join(
config_path,
'backup_openstack_inventory.tar'
)
with tarfile.open(inventory_backup_file, 'a') as tar:
basename = os.path.basename(inventory_file_path)
backup_name = get_backup_name(basename)
tar.add(inventory_file_path, arcname=backup_name)
logger.debug("Backup written to %s", inventory_backup_file)
def get_backup_name(basename):
utctime = datetime.datetime.utcnow()
utctime = utctime.strftime("%Y%m%d_%H%M%S")
return '{}-{}.json'.format(basename, utctime)
def get_inventory(config_path, inventory_file_path):
if os.path.isfile(inventory_file_path):
with open(inventory_file_path, 'rb') as f:
dynamic_inventory = json.loads(f.read())
logger.debug("Loaded existing inventory from %s",
inventory_file_path)
make_backup(config_path, inventory_file_path)
else:
dynamic_inventory = copy.deepcopy(INVENTORY_SKEL)
logger.debug("No existing inventory, created fresh skeleton.")
return dynamic_inventory
def main(config=None, check=False, debug=False, environment=None, **kwargs):
"""Run the main application.
@ -1164,11 +1129,7 @@ def main(config=None, check=False, debug=False, environment=None, **kwargs):
environment = load_environment(config_path, base_env)
# Load existing inventory file if found
dynamic_inventory_file = os.path.join(
config_path, 'openstack_inventory.json'
)
dynamic_inventory = get_inventory(config_path, dynamic_inventory_file)
dynamic_inventory = load_inventory(config_path, INVENTORY_SKEL)
# Save the users container cidr as a group variable
cidr_networks = user_defined_config.get('cidr_networks')
@ -1255,8 +1216,6 @@ def main(config=None, check=False, debug=False, environment=None, **kwargs):
logger.debug("%d hosts found." % num_hosts)
# Save new dynamic inventory
with open(dynamic_inventory_file, 'wb') as f:
f.write(dynamic_inventory_json)
logger.info("Inventory written")
save_inventory(dynamic_inventory_json, config_path)
return dynamic_inventory_json

View File

@ -24,7 +24,7 @@ import prettytable
from dictutils import recursive_dict_removal
from filesystem import load_from_json
from filesystem import save_to_json
from filesystem import save_inventory
def args():
@ -307,7 +307,7 @@ def main():
user_args = args()
# Get the contents of the system inventory
inventory = load_from_json(user_args['file'])
inventory, filename = load_from_json(user_args['file'])
# Make a table with hosts in the left column and details about each in the
# columns to the right
@ -325,11 +325,13 @@ def main():
print(json.dumps(export_host_info(inventory), indent=2))
elif user_args['clear_ips'] is True:
remove_ip_addresses(inventory)
save_to_json(user_args['file'], inventory)
inventory_json = json.dumps(inventory, indent=2)
save_inventory(inventory_json, filename)
print('Success. . .')
else:
recursive_dict_removal(inventory, user_args['remove_item'])
save_to_json(user_args['file'], inventory)
inventory_json = json.dumps(inventory, indent=2)
save_inventory(inventory_json, filename)
print('Success. . .')
if __name__ == "__main__":

View File

@ -20,6 +20,7 @@ sys.path.append(path.join(os.getcwd(), LIB_DIR))
sys.path.append(path.join(os.getcwd(), INV_DIR))
import dynamic_inventory
import filesystem as fs
import generate as di
TARGET_DIR = path.join(os.getcwd(), 'tests', 'inventory')
@ -843,18 +844,18 @@ class TestMultipleRuns(unittest.TestCase):
def test_creating_backup_file(self):
inventory_file_path = os.path.join(TARGET_DIR,
'openstack_inventory.json')
get_backup_name_path = 'generate.get_backup_name'
get_backup_name_path = 'filesystem.get_backup_name'
backup_name = 'openstack_inventory.json-20160531_171804.json'
tar_file = mock.MagicMock()
tar_file.__enter__.return_value = tar_file
# run make backup with faked tarfiles and date
with mock.patch('generate.tarfile.open') as tar_open:
with mock.patch('filesystem.tarfile.open') as tar_open:
tar_open.return_value = tar_file
with mock.patch(get_backup_name_path) as backup_mock:
backup_mock.return_value = backup_name
di.make_backup(TARGET_DIR, inventory_file_path)
fs.make_backup(TARGET_DIR, inventory_file_path)
backup_path = path.join(TARGET_DIR, 'backup_openstack_inventory.tar')
@ -881,9 +882,7 @@ class TestMultipleRuns(unittest.TestCase):
# Generate the initial inventory files
get_inventory(clean=False)
inventory_file_path = os.path.join(TARGET_DIR,
'openstack_inventory.json')
inv = di.get_inventory(TARGET_DIR, inventory_file_path)
inv = fs.load_inventory(TARGET_DIR)
self.assertIsInstance(inv, dict)
self.assertIn('_meta', inv)
# This test is basically just making sure we get more than
@ -1046,7 +1045,7 @@ class TestOverridingEnvIntegration(OverridingEnvBase):
self.user_defined_config = get_config()
# Inventory is necessary since keys are assumed present
self.inv = di.get_inventory(TARGET_DIR, '')
self.inv = fs.load_inventory(TARGET_DIR, di.INVENTORY_SKEL)
def skel_setup(self):
self.environment = di.load_environment(TARGET_DIR, self.base_env)
@ -1198,7 +1197,7 @@ class TestDebugLogging(unittest.TestCase):
self.assertFalse(mock_logging.basicConfig.called)
# Even though logging is disabled, we still call these
# all over the place; they just choose not to do anything.
self.assertTrue(mock_logger.info.called)
# NOTE: No info messages are published when debug is False
self.assertTrue(mock_logger.debug.called)