Add unit tests coverage for validation, group and logs classes
Increase the unit tests coverage for recent code: validation, group and logs classes. (Increase of 54 unit tests) Move per action files into one test file: test_validation_actions Change-Id: I7a1a8b0681c13ebd758a5686b2398a0a9e87a4eb
This commit is contained in:
parent
eb0158c35a
commit
c940a0c30d
@ -25,8 +25,11 @@ class Group(object):
|
||||
self.data = self._get_content(groups)
|
||||
|
||||
def _get_content(self, groups):
|
||||
with open(groups, 'r') as gp:
|
||||
return yaml.safe_load(gp)
|
||||
try:
|
||||
with open(groups, 'r') as gp:
|
||||
return yaml.safe_load(gp)
|
||||
except IOError:
|
||||
raise IOError("Group file not found")
|
||||
|
||||
@property
|
||||
def get_data(self):
|
||||
|
@ -146,12 +146,27 @@ VALIDATIONS_DATA = {'Description': 'My Validation One Description',
|
||||
VALIDATIONS_STATS = {'Last execution date': '2019-11-25 13:40:14',
|
||||
'Number of execution': 'Total: 1, Passed: 1, Failed: 0'}
|
||||
|
||||
FAKE_PLAYBOOK = {'hosts': 'undercloud',
|
||||
'roles': ['advanced_format_512e_support'],
|
||||
'vars': {'metadata': {'description': 'foo',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'name':
|
||||
'Advanced Format 512e Support'}}}
|
||||
FAKE_PLAYBOOK = [{'hosts': 'undercloud',
|
||||
'roles': ['advanced_format_512e_support'],
|
||||
'vars': {'metadata': {'description': 'foo',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'name':
|
||||
'Advanced Format 512e Support'}}}]
|
||||
|
||||
FAKE_METADATA = {'id': 'foo',
|
||||
'description': 'foo',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'name':
|
||||
'Advanced Format 512e Support'}
|
||||
|
||||
FORMATED_DATA = {'Description': 'foo',
|
||||
'Groups': ['prep', 'pre-deployment'],
|
||||
'ID': 'foo',
|
||||
'Name': 'Advanced Format 512e Support'}
|
||||
|
||||
GROUP = {'no-op': [{'description': 'noop-foo'}],
|
||||
'pre': [{'description': 'pre-foo'}],
|
||||
'post': [{'description': 'post-foo'}]}
|
||||
|
||||
|
||||
def fake_ansible_runner_run_return(status='successful', rc=0):
|
||||
|
61
validations_libs/tests/test_group.py
Normal file
61
validations_libs/tests/test_group.py
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.group import Group
|
||||
from validations_libs.tests import fakes
|
||||
|
||||
|
||||
class TestGroup(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestGroup, self).setUp()
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_data(self, mock_open, mock_yaml):
|
||||
grp = Group('/tmp/foo')
|
||||
data = grp.get_data
|
||||
self.assertEquals(data, fakes.GROUP)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_formated_group(self, mock_open, mock_yaml):
|
||||
grp = Group('/tmp/foo')
|
||||
ret = [('no-op', 'noop-foo'), ('post', 'post-foo'), ('pre', 'pre-foo')]
|
||||
data = grp.get_formated_group
|
||||
self.assertEquals(data, ret)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_groups_keys_list(self, mock_open, mock_yaml):
|
||||
grp = Group('/tmp/foo')
|
||||
ret = ['no-op', 'pre', 'post']
|
||||
data = grp.get_groups_keys_list
|
||||
self.assertEquals(data, ret)
|
||||
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_group_file_not_found(self, mock_open):
|
||||
mock_open.side_effect = IOError()
|
||||
self.assertRaises(
|
||||
IOError,
|
||||
Group,
|
||||
'non-existing.yaml'
|
||||
)
|
@ -21,17 +21,15 @@ from unittest import TestCase
|
||||
|
||||
from validations_libs import utils
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.validation_logs import ValidationLogs
|
||||
|
||||
|
||||
class TestUtils(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestUtils, self).setUp()
|
||||
self.vlog = ValidationLogs()
|
||||
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK)
|
||||
return_value=fakes.FAKE_PLAYBOOK[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
def test_get_validations_data(self, mock_exists, mock_open, mock_data):
|
||||
@ -41,7 +39,116 @@ class TestUtils(TestCase):
|
||||
res = utils.get_validations_data('512e')
|
||||
self.assertEqual(res, output)
|
||||
|
||||
def test_get_validations_stats(self):
|
||||
res = self.vlog.get_validations_stats(
|
||||
fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
self.assertEqual(res, fakes.VALIDATIONS_STATS)
|
||||
@mock.patch('validations_libs.utils.current_time',
|
||||
return_value='2020-04-02T06:58:20.352272Z')
|
||||
@mock.patch('os.makedirs')
|
||||
@mock.patch('uuid.uuid4', return_value='1234')
|
||||
def test_create_artifacts_dir(self, mock_uuid, mock_makedirs,
|
||||
mock_datetime):
|
||||
uuid, dir_path = utils.create_artifacts_dir(dir_path='/tmp/foo',
|
||||
prefix='ntp')
|
||||
self.assertEqual(uuid, '1234')
|
||||
self.assertEqual(dir_path,
|
||||
'/tmp/foo/1234_ntp_2020-04-02T06:58:20.352272Z')
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
@mock.patch('glob.glob')
|
||||
def test_parse_all_validations_on_disk(self, mock_glob, mock_open,
|
||||
mock_load):
|
||||
mock_glob.return_value = \
|
||||
['/foo/playbook/foo.yaml']
|
||||
result = utils.parse_all_validations_on_disk('/foo/playbook')
|
||||
self.assertEqual(result, [fakes.FAKE_METADATA])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_playbook_by_id(self, mock_open, mock_load,
|
||||
mock_listdir, mock_isfile):
|
||||
mock_listdir.return_value = ['foo.yaml']
|
||||
mock_isfile.return_value = True
|
||||
result = utils.get_validations_playbook('/foo/playbook', 'foo')
|
||||
self.assertEqual(result, ['/foo/playbook/foo.yaml'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_playbook_by_id_group(self, mock_open, mock_load,
|
||||
mock_listdir, mock_isfile):
|
||||
mock_listdir.return_value = ['foo.yaml']
|
||||
mock_isfile.return_value = True
|
||||
result = utils.get_validations_playbook('/foo/playbook', 'foo', 'prep')
|
||||
self.assertEqual(result, ['/foo/playbook/foo.yaml'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_playbook_group_not_exist(self, mock_open,
|
||||
mock_load,
|
||||
mock_listdir,
|
||||
mock_isfile):
|
||||
mock_listdir.return_value = ['foo.yaml']
|
||||
mock_isfile.return_value = True
|
||||
result = utils.get_validations_playbook('/foo/playbook', 'foo',
|
||||
'no_group')
|
||||
self.assertEqual(result, [])
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validation_parameters(self, mock_open, mock_load):
|
||||
|
||||
result = utils.get_validation_parameters('/foo/playbook/foo.yaml')
|
||||
self.assertEqual(result, {})
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_read_validation_groups_file(self, mock_open, mock_load):
|
||||
|
||||
result = utils.read_validation_groups_file('/foo/groups.yaml')
|
||||
self.assertEqual(result, {'no-op': [{'description': 'noop-foo'}],
|
||||
'post': [{'description': 'post-foo'}],
|
||||
'pre': [{'description': 'pre-foo'}]})
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validation_group_name_list(self, mock_open, mock_load):
|
||||
|
||||
result = utils.get_validation_group_name_list('/foo/groups.yaml')
|
||||
self.assertEqual(result, ['no-op', 'pre', 'post'])
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=[fakes.FAKE_METADATA])
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_details(self, mock_open, mock_load, mock_parse):
|
||||
|
||||
result = utils.get_validations_details('foo')
|
||||
self.assertEqual(result, fakes.FAKE_METADATA)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_parameters_no_group(self, mock_open, mock_load):
|
||||
|
||||
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
|
||||
'foo')
|
||||
self.assertEqual(result, {'foo': {'parameters': fakes.FAKE_METADATA}})
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_parameters_no_val(self, mock_open, mock_load):
|
||||
|
||||
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
|
||||
[], ['prep'])
|
||||
self.assertEqual(result, {'foo': {'parameters': fakes.FAKE_METADATA}})
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_parameters_nothing(self, mock_open, mock_load):
|
||||
|
||||
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
|
||||
[], [])
|
||||
self.assertEqual(result, {})
|
||||
|
82
validations_libs/tests/test_validation.py
Normal file
82
validations_libs/tests/test_validation.py
Normal file
@ -0,0 +1,82 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.validation import Validation
|
||||
from validations_libs.tests import fakes
|
||||
|
||||
|
||||
class TestValidation(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidation, self).setUp()
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_data(self, mock_open, mock_yaml):
|
||||
val = Validation('/tmp/foo')
|
||||
data = val.get_data
|
||||
self.assertEquals(data, fakes.FAKE_PLAYBOOK[0])
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_metadata(self, mock_open, mock_yaml):
|
||||
val = Validation('/tmp/foo')
|
||||
data = val.get_metadata
|
||||
self.assertEquals(data, fakes.FAKE_METADATA)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_id(self, mock_open, mock_yaml):
|
||||
val = Validation('/tmp/foo')
|
||||
id = val.id
|
||||
get_id = val.get_id
|
||||
self.assertEquals(id, 'foo')
|
||||
self.assertEquals(get_id, 'foo')
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_groups(self, mock_open, mock_yaml):
|
||||
val = Validation('/tmp/foo')
|
||||
groups = val.groups
|
||||
self.assertEquals(groups, ['prep', 'pre-deployment'])
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_ordered_dict(self, mock_open, mock_yaml):
|
||||
val = Validation('/tmp/foo')
|
||||
data = val.get_ordered_dict
|
||||
self.assertEquals(data, fakes.FAKE_PLAYBOOK[0])
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_formated_data(self, mock_open, mock_yaml):
|
||||
val = Validation('/tmp/foo')
|
||||
data = val.get_formated_data
|
||||
self.assertEquals(data, fakes.FORMATED_DATA)
|
||||
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_validation_not_found(self, mock_open):
|
||||
mock_open.side_effect = IOError()
|
||||
self.assertRaises(
|
||||
IOError,
|
||||
Validation,
|
||||
'non-existing.yaml'
|
||||
)
|
@ -23,10 +23,24 @@ from validations_libs.tests import fakes
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
|
||||
|
||||
class TestValidatorRun(TestCase):
|
||||
class TestValidationActions(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidatorRun, self).setUp()
|
||||
super(TestValidationActions, self).setUp()
|
||||
self.column_name = ('ID', 'Name', 'Groups')
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
def test_validation_list(self, mock_validation_dir):
|
||||
validations_list = ValidationActions(fakes.GROUPS_LIST, '/tmp/foo')
|
||||
|
||||
self.assertEqual(validations_list.list_validations(),
|
||||
(self.column_name, [('my_val1',
|
||||
'My Validation One Name',
|
||||
['prep', 'pre-deployment']),
|
||||
('my_val2',
|
||||
'My Validation Two Name',
|
||||
['prep', 'pre-introspection'])]))
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.get_results')
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
@ -112,3 +126,81 @@ class TestValidatorRun(TestCase):
|
||||
run = ValidationActions()
|
||||
self.assertRaises(RuntimeError, run.run_validations, playbook,
|
||||
inventory)
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK[0])
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_all_logfiles_content',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
def test_validation_show(self, mock_exists, mock_open,
|
||||
mock_parse_validation, mock_data, mock_log):
|
||||
data = {'Name': 'Advanced Format 512e Support',
|
||||
'Description': 'foo', 'Groups': ['prep', 'pre-deployment'],
|
||||
'ID': '512e'}
|
||||
data.update({'Last execution date': '2019-11-25 13:40:14',
|
||||
'Number of execution': 'Total: 1, Passed: 1, Failed: 0'})
|
||||
validations_show = ValidationActions()
|
||||
out = validations_show.show_validations('512e')
|
||||
self.assertEqual(out, data)
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_group_information(self, mock_open, mock_yaml, mock_data):
|
||||
v_actions = ValidationActions()
|
||||
col, values = v_actions.group_information('512e')
|
||||
self.assertEqual(col, ('Groups', 'Description',
|
||||
'Number of Validations'))
|
||||
self.assertEqual(values, [('no-op', 'noop-foo', 2),
|
||||
('post', 'post-foo', 2),
|
||||
('pre', 'pre-foo', 2)])
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook',
|
||||
return_value=['/foo/playbook/foo.yaml'])
|
||||
@mock.patch('validations_libs.utils.get_validations_parameters')
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_show_validations_parameters(self, mock_open, mock_load,
|
||||
mock_get_param, mock_get_play):
|
||||
mock_get_param.return_value = {'foo':
|
||||
{'parameters': fakes.FAKE_METADATA}}
|
||||
v_actions = ValidationActions()
|
||||
result = v_actions.show_validations_parameters('foo')
|
||||
self.assertEqual(result, {'foo': {'parameters': fakes.FAKE_METADATA}})
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_logfile_by_validation',
|
||||
return_value=['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_show_history(self, mock_open, mock_load, mock_get_log):
|
||||
v_actions = ValidationActions()
|
||||
col, values = v_actions.show_history('foo')
|
||||
self.assertEqual(col, ('UUID', 'Validations',
|
||||
'Status', 'Execution at',
|
||||
'Duration'))
|
||||
self.assertEqual(values, [('123', 'foo', 'PASSED',
|
||||
'2019-11-25T13:40:14.404623Z',
|
||||
'0:00:03.753')])
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_all_logfiles',
|
||||
return_value=['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_show_history_all(self, mock_open, mock_load, mock_get_log):
|
||||
v_actions = ValidationActions()
|
||||
col, values = v_actions.show_history()
|
||||
self.assertEqual(col, ('UUID', 'Validations',
|
||||
'Status', 'Execution at',
|
||||
'Duration'))
|
||||
self.assertEqual(values, [('123', 'foo', 'PASSED',
|
||||
'2019-11-25T13:40:14.404623Z',
|
||||
'0:00:03.753')])
|
162
validations_libs/tests/test_validation_log.py
Normal file
162
validations_libs/tests/test_validation_log.py
Normal file
@ -0,0 +1,162 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.validation_logs import ValidationLog
|
||||
from validations_libs.tests import fakes
|
||||
|
||||
|
||||
class TestValidationLog(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidationLog, self).setUp()
|
||||
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_validation_log_file(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
self.assertEquals(val.uuid, '123')
|
||||
self.assertEquals(val.validation_id, 'foo')
|
||||
self.assertEquals(val.datetime, '2020-03-30T13:17:22.447857Z')
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_log_path(self, mock_open, mock_yaml, mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
val = ValidationLog(uuid='123', validation_id='foo', log_path='/tmp')
|
||||
path = val.get_log_path()
|
||||
self.assertEquals(path,
|
||||
'/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_infos(self, mock_open, mock_json, mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
val = ValidationLog(uuid='123', validation_id='foo', log_path='/tmp')
|
||||
log_info = val.get_logfile_infos
|
||||
self.assertEquals(log_info,
|
||||
['123', 'foo', '2020-03-30T13:17:22.447857Z'])
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_datetime(self, mock_open, mock_json, mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
val = ValidationLog(uuid='123', validation_id='foo', log_path='/tmp')
|
||||
datetime = val.get_logfile_datetime
|
||||
self.assertEquals(datetime, '2020-03-30T13:17:22.447857Z')
|
||||
|
||||
@mock.patch('json.load', return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_content(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
content = val.get_logfile_content
|
||||
self.assertEquals(content, fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_uuid(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
uuid = val.get_uuid
|
||||
self.assertEquals(uuid, '123')
|
||||
self.assertEquals(val.uuid, '123')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validation_id(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
validation_id = val.get_validation_id
|
||||
self.assertEquals(validation_id, 'foo')
|
||||
self.assertEquals(val.validation_id, 'foo')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_status(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
status = val.get_status
|
||||
self.assertEquals(status, 'PASSED')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_host_group(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
host_group = val.get_host_group
|
||||
self.assertEquals(host_group, 'undercloud')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_hosts_status(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
host_group = val.get_hosts_status
|
||||
self.assertEquals(host_group, 'undercloud,PASSED')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_unreachable_hosts(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
unreachable = val.get_unreachable_hosts
|
||||
self.assertEquals(unreachable, '')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_duration(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
duration = val.get_duration
|
||||
self.assertEquals(duration, '0:00:03.753')
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_start_time(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
start_time = val.get_start_time
|
||||
self.assertEquals(start_time, '2019-11-25T13:40:14.404623Z')
|
||||
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_log_not_found(self, mock_open):
|
||||
mock_open.side_effect = IOError()
|
||||
self.assertRaises(
|
||||
IOError,
|
||||
ValidationLog,
|
||||
logfile='non-existing.yaml'
|
||||
)
|
174
validations_libs/tests/test_validation_logs.py
Normal file
174
validations_libs/tests/test_validation_logs.py
Normal file
@ -0,0 +1,174 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.validation_logs import ValidationLogs
|
||||
from validations_libs.tests import fakes
|
||||
|
||||
|
||||
class TestValidationLogs(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidationLogs, self).setUp()
|
||||
|
||||
@mock.patch('json.load', return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_validation_log_file(self, mock_open, mock_json):
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs._get_content('/tmp/foo/bar.json')
|
||||
self.assertEquals(content, fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_log_not_found(self, mock_open):
|
||||
mock_open.side_effect = IOError()
|
||||
vlogs = ValidationLogs()
|
||||
self.assertRaises(
|
||||
IOError,
|
||||
vlogs._get_content,
|
||||
'/var/log/non-existing.json'
|
||||
)
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_by_validation(self, mock_open, mock_json, mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
log = vlogs.get_logfile_by_validation('foo')
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_content_by_validation(self, mock_open, mock_json,
|
||||
mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs.get_logfile_content_by_validation('foo')
|
||||
self.assertEquals(content, fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_by_uuid(self, mock_open, mock_json, mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
log = vlogs.get_logfile_by_uuid('123')
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_content_by_uuid(self, mock_open, mock_json,
|
||||
mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs.get_logfile_content_by_uuid('123')
|
||||
self.assertEquals(content, fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_by_uuid_validation_id(self, mock_open, mock_json,
|
||||
mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
log = vlogs.get_logfile_by_uuid_validation_id('123', 'foo')
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
|
||||
@mock.patch('glob.glob')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_logfile_content_by_uuid_validation_id(self, mock_open,
|
||||
mock_json,
|
||||
mock_glob):
|
||||
mock_glob.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs.get_logfile_content_by_uuid_validation_id('123', 'foo')
|
||||
self.assertEquals(content, fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_all_logfiles(self, mock_open, mock_json,
|
||||
mock_listdir, mock_isfile):
|
||||
mock_listdir.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
mock_isfile.return_value = True
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
log = vlogs.get_all_logfiles()
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_all_logfiles_content(self, mock_open, mock_json,
|
||||
mock_listdir, mock_isfile):
|
||||
mock_listdir.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
mock_isfile.return_value = True
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs.get_all_logfiles_content()
|
||||
self.assertEquals(content, fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_stats(self, mock_open, mock_json):
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs.get_validations_stats(
|
||||
fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
self.assertEquals(content, fakes.VALIDATIONS_STATS)
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_logfile_by_uuid_validation_id')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_results(self, mock_open, mock_json, mock_get_validation):
|
||||
mock_get_validation.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
content = vlogs.get_results(uuid='123', validation_id='foo')
|
||||
self.assertEquals(content, [{'UUID': '123',
|
||||
'Validations': 'foo',
|
||||
'Status': 'PASSED',
|
||||
'Status_by_Host': 'undercloud,PASSED',
|
||||
'Host_Group': 'undercloud',
|
||||
'Unreachable_Hosts': '',
|
||||
'Duration': '0:00:03.753',
|
||||
'Validations': 'foo'}])
|
@ -1,43 +0,0 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
|
||||
|
||||
class TestValidatorList(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidatorList, self).setUp()
|
||||
self.column_name = ('ID', 'Name', 'Groups')
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
def test_validation_list(self, mock_validation_dir):
|
||||
validations_list = ValidationActions(fakes.GROUPS_LIST, '/tmp/foo')
|
||||
|
||||
self.assertEqual(validations_list.list_validations(),
|
||||
(self.column_name, [('my_val1',
|
||||
'My Validation One Name',
|
||||
['prep', 'pre-deployment']),
|
||||
('my_val2',
|
||||
'My Validation Two Name',
|
||||
['prep', 'pre-introspection'])]))
|
@ -1,49 +0,0 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
|
||||
|
||||
class TestValidatorShow(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidatorShow, self).setUp()
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_all_logfiles_content',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
def test_validation_show(self, mock_exists, mock_open,
|
||||
mock_parse_validation, mock_data, mock_log):
|
||||
data = {'Name': 'Advanced Format 512e Support',
|
||||
'Description': 'foo', 'Groups': ['prep', 'pre-deployment'],
|
||||
'ID': '512e'}
|
||||
data.update({'Last execution date': '2019-11-25 13:40:14',
|
||||
'Number of execution': 'Total: 1, Passed: 1, Failed: 0'})
|
||||
validations_show = ValidationActions()
|
||||
out = validations_show.show_validations('512e')
|
||||
self.assertEqual(out, data)
|
@ -17,13 +17,12 @@ import glob
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import uuid
|
||||
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
from os.path import join
|
||||
from validations_libs import constants
|
||||
from validations_libs.group import Group
|
||||
from validations_libs.validation import Validation
|
||||
from uuid import uuid4
|
||||
|
||||
LOG = logging.getLogger(__name__ + ".utils")
|
||||
|
||||
@ -37,7 +36,7 @@ def create_artifacts_dir(dir_path=None, prefix=None):
|
||||
"""Create Ansible artifacts directory"""
|
||||
dir_path = (dir_path if dir_path else
|
||||
constants.VALIDATION_ANSIBLE_ARTIFACT_PATH)
|
||||
validation_uuid = str(uuid4())
|
||||
validation_uuid = str(uuid.uuid4())
|
||||
log_dir = "{}/{}_{}_{}".format(dir_path, validation_uuid,
|
||||
(prefix if prefix else ''), current_time())
|
||||
try:
|
||||
@ -73,9 +72,9 @@ def get_validations_playbook(path, validation_id, groups=None):
|
||||
if isinstance(groups, six.string_types):
|
||||
groups = [groups]
|
||||
pl = []
|
||||
for f in listdir(path):
|
||||
for f in os.listdir(path):
|
||||
pl_path = join(path, f)
|
||||
if isfile(pl_path):
|
||||
if os.path.isfile(pl_path):
|
||||
if os.path.splitext(f)[0] in validation_id:
|
||||
val = Validation(pl_path)
|
||||
if not groups or set(groups).intersection(val.groups):
|
||||
@ -124,11 +123,11 @@ def get_validations_data(validation, path=constants.ANSIBLE_VALIDATION_DIR):
|
||||
def get_validations_parameters(validations_data, validation_name=[],
|
||||
groups=[]):
|
||||
params = {}
|
||||
for val in validations_data['validations']:
|
||||
for val in validations_data:
|
||||
v = Validation(val)
|
||||
if v.id in validation_name or set(groups).intersection(v.groups):
|
||||
params[v.id] = {
|
||||
'parameters': (val.get('metadata') if val.get('metadata') else
|
||||
val.get('parameters'))
|
||||
'parameters': (v.get_metadata if v.get_metadata else
|
||||
v.get_vars)
|
||||
}
|
||||
return params
|
||||
|
@ -30,8 +30,11 @@ class Validation(object):
|
||||
self.id = os.path.splitext(os.path.basename(validation_path))[0]
|
||||
|
||||
def _get_content(self, val_path):
|
||||
with open(val_path, 'r') as val_playbook:
|
||||
return yaml.safe_load(val_playbook)[0]
|
||||
try:
|
||||
with open(val_path, 'r') as val_playbook:
|
||||
return yaml.safe_load(val_playbook)[0]
|
||||
except IOError:
|
||||
raise IOError("Validation playbook not found")
|
||||
|
||||
@property
|
||||
def get_metadata(self):
|
||||
|
@ -138,14 +138,13 @@ class ValidationActions(object):
|
||||
|
||||
def show_validations_parameters(self, validation, group=None):
|
||||
"""Return Validations Parameters"""
|
||||
validations = v_utils.parse_all_validations_on_disk(
|
||||
constants.ANSIBLE_VALIDATION_DIR)
|
||||
validations = v_utils.get_validations_playbook(
|
||||
constants.ANSIBLE_VALIDATION_DIR, group)
|
||||
|
||||
return v_utils.get_validations_parameters({'validations': validations},
|
||||
validation,
|
||||
return v_utils.get_validations_parameters(validations, validation,
|
||||
group)
|
||||
|
||||
def show_history(self, validation_id):
|
||||
def show_history(self, validation_id=None):
|
||||
"""Return validations history"""
|
||||
vlogs = ValidationLogs()
|
||||
logs = (vlogs.get_logfile_by_validation(validation_id)
|
||||
|
@ -17,8 +17,7 @@ import glob
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
from os.path import join
|
||||
|
||||
from validations_libs import constants
|
||||
|
||||
@ -47,8 +46,12 @@ class ValidationLog(object):
|
||||
self.name.replace('.{}'.format(self.extension), '').split('_')
|
||||
|
||||
def _get_content(self, file):
|
||||
with open(file, 'r') as log_file:
|
||||
return json.load(log_file)
|
||||
try:
|
||||
with open(file, 'r') as log_file:
|
||||
return json.load(log_file)
|
||||
except IOError:
|
||||
msg = "log file: {} not found".format(file)
|
||||
raise IOError(msg)
|
||||
|
||||
def get_log_path(self):
|
||||
"""Return full path of a validation log"""
|
||||
@ -63,7 +66,7 @@ class ValidationLog(object):
|
||||
Return log file information:
|
||||
uuid,
|
||||
validation_id,
|
||||
datatime
|
||||
datetime
|
||||
"""
|
||||
return self.name.replace('.{}'.format(self.extension), '').split('_')
|
||||
|
||||
@ -139,8 +142,12 @@ class ValidationLogs(object):
|
||||
self.logs_path = logs_path
|
||||
|
||||
def _get_content(self, file):
|
||||
with open(file, 'r') as log_file:
|
||||
return json.load(log_file)
|
||||
try:
|
||||
with open(file, 'r') as log_file:
|
||||
return json.load(log_file)
|
||||
except IOError:
|
||||
msg = "log file: {} not found".format(file)
|
||||
raise IOError(msg)
|
||||
|
||||
def get_logfile_by_validation(self, validation_id):
|
||||
"""Return logfiles by validation_id"""
|
||||
@ -174,14 +181,14 @@ class ValidationLogs(object):
|
||||
|
||||
def get_all_logfiles(self):
|
||||
"""Return logfiles from logs_path"""
|
||||
return [join(self.logs_path, f) for f in listdir(self.logs_path) if
|
||||
isfile(join(self.logs_path, f))]
|
||||
return [join(self.logs_path, f) for f in os.listdir(self.logs_path) if
|
||||
os.path.isfile(join(self.logs_path, f))]
|
||||
|
||||
def get_all_logfiles_content(self):
|
||||
"""Return logfiles content filter by uuid and content"""
|
||||
return [self._get_content(join(self.logs_path, f))
|
||||
for f in listdir(self.logs_path)
|
||||
if isfile(join(self.logs_path, f))]
|
||||
for f in os.listdir(self.logs_path)
|
||||
if os.path.isfile(join(self.logs_path, f))]
|
||||
|
||||
def get_validations_stats(self, logs):
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user