fix patch not work bug and add default noproxy
Change-Id: I52b7bc06ccd61d28189821c1d1d301aed58648b8
This commit is contained in:
parent
ed9082418a
commit
92cda5aa90
@ -45,10 +45,11 @@ def update_progress():
|
||||
After the progress got updated, these information will be stored back
|
||||
to the log_progressing_history for next time run.
|
||||
"""
|
||||
with util.lock('log_progressing', blocking=False) as lock:
|
||||
with util.lock('log_progressing', timeout=60, blocking=False) as lock:
|
||||
if not lock:
|
||||
logging.error(
|
||||
'failed to acquire lock to calculate installation progress')
|
||||
'failed to acquire lock to calculate installation progress'
|
||||
)
|
||||
return
|
||||
|
||||
logging.info('update installing progress')
|
||||
|
@ -347,7 +347,7 @@ def list_user_permissions(user_id):
|
||||
@log_user_action
|
||||
@login_required
|
||||
def take_user_action(user_id):
|
||||
"""Update user permissions."""
|
||||
"""Take user action."""
|
||||
data = _get_request_data()
|
||||
update_permissions_func = _wrap_response(
|
||||
functools.partial(
|
||||
|
@ -219,6 +219,45 @@ def add_package_metadata_internal(session, exception_when_existing=True):
|
||||
return package_metadatas
|
||||
|
||||
|
||||
def _filter_metadata(metadata, **kwargs):
|
||||
if not isinstance(metadata, dict):
|
||||
return metadata
|
||||
filtered_metadata = {}
|
||||
for key, value in metadata.items():
|
||||
if key == '_self':
|
||||
default_value = value.get('default_value', None)
|
||||
if default_value is None:
|
||||
default_callback_params = value.get(
|
||||
'default_callback_params', {}
|
||||
)
|
||||
callback_params = dict(kwargs)
|
||||
if default_callback_params:
|
||||
callback_params.update(default_callback_params)
|
||||
default_callback = value.get('default_callback', None)
|
||||
if default_callback:
|
||||
default_value = default_callback(key, **callback_params)
|
||||
options = value.get('options', None)
|
||||
if options is None:
|
||||
options_callback_params = value.get(
|
||||
'options_callback_params', {}
|
||||
)
|
||||
callback_params = dict(kwargs)
|
||||
if options_callback_params:
|
||||
callback_params.update(options_callback_params)
|
||||
|
||||
options_callback = value.get('options_callback', None)
|
||||
if options_callback:
|
||||
options = options_callback(key, **callback_params)
|
||||
filtered_metadata[key] = value
|
||||
if default_value is not None:
|
||||
filtered_metadata[key]['default_value'] = default_value
|
||||
if options is not None:
|
||||
filtered_metadata[key]['options'] = options
|
||||
else:
|
||||
filtered_metadata[key] = _filter_metadata(value, **kwargs)
|
||||
return filtered_metadata
|
||||
|
||||
|
||||
def get_package_metadatas_internal(session):
|
||||
metadata_mapping = {}
|
||||
adapters = utils.list_db_objects(
|
||||
@ -227,7 +266,9 @@ def get_package_metadatas_internal(session):
|
||||
for adapter in adapters:
|
||||
if adapter.deployable:
|
||||
metadata_dict = adapter.metadata_dict()
|
||||
metadata_mapping[adapter.id] = metadata_dict
|
||||
metadata_mapping[adapter.id] = _filter_metadata(
|
||||
metadata_dict, session=session
|
||||
)
|
||||
else:
|
||||
logging.info(
|
||||
'ignore metadata since its adapter %s is not deployable',
|
||||
@ -244,7 +285,9 @@ def get_os_metadatas_internal(session):
|
||||
for os in oses:
|
||||
if os.deployable:
|
||||
metadata_dict = os.metadata_dict()
|
||||
metadata_mapping[os.id] = metadata_dict
|
||||
metadata_mapping[os.id] = _filter_metadata(
|
||||
metadata_dict, session=session
|
||||
)
|
||||
else:
|
||||
logging.info(
|
||||
'ignore metadata since its os %s is not deployable',
|
||||
@ -324,7 +367,7 @@ def _validate_config(
|
||||
continue
|
||||
if specified[key]['_self'].get('is_required', False):
|
||||
raise exception.InvalidParameter(
|
||||
'%s/%s does not find is_required' % (
|
||||
'%s/%s does not find but it is required' % (
|
||||
config_path, key
|
||||
)
|
||||
)
|
||||
@ -335,7 +378,7 @@ def _validate_config(
|
||||
)
|
||||
):
|
||||
raise exception.InvalidParameter(
|
||||
'%s/%s does not find required_in_whole_config' % (
|
||||
'%s/%s does not find but it is required in whole config' % (
|
||||
config_path, key
|
||||
)
|
||||
)
|
||||
@ -371,6 +414,9 @@ def _autofill_self_config(
|
||||
config_path, config, metadata, **kwargs
|
||||
)
|
||||
return config
|
||||
logging.debug(
|
||||
'autofill %s by metadata %s', config_path, metadata['_self']
|
||||
)
|
||||
autofill_callback = metadata['_self'].get(
|
||||
'autofill_callback', None
|
||||
)
|
||||
|
@ -93,40 +93,15 @@ def _filter_metadata(metadata, **kwargs):
|
||||
filtered_metadata = {}
|
||||
for key, value in metadata.items():
|
||||
if key == '_self':
|
||||
filtered_metadata['_self'] = {}
|
||||
default_value = value.get('default_value', None)
|
||||
if default_value is None:
|
||||
default_callback_params = value.get(
|
||||
'default_callback_params', {}
|
||||
)
|
||||
callback_params = dict(kwargs)
|
||||
if default_callback_params:
|
||||
callback_params.update(default_callback_params)
|
||||
default_callback = value.get('default_callback', None)
|
||||
if default_callback:
|
||||
default_value = default_callback(key, **callback_params)
|
||||
options = value.get('options', None)
|
||||
if options is None:
|
||||
options_callback_params = value.get(
|
||||
'options_callback_params', {}
|
||||
)
|
||||
callback_params = dict(kwargs)
|
||||
if options_callback_params:
|
||||
callback_params.update(options_callback_params)
|
||||
|
||||
options_callback = value.get('options_callback', None)
|
||||
if options_callback:
|
||||
options = options_callback(key, **callback_params)
|
||||
filtered_metadata[key] = {
|
||||
'name': value['name'],
|
||||
'description': value.get('description', None),
|
||||
'default_value': default_value,
|
||||
'is_required': value.get(
|
||||
'is_required', False),
|
||||
'default_value': value.get('default_value', None),
|
||||
'is_required': value.get('is_required', False),
|
||||
'required_in_whole_config': value.get(
|
||||
'required_in_whole_config', False),
|
||||
'js_validator': value.get('js_validator', None),
|
||||
'options': options,
|
||||
'options': value.get('options', None),
|
||||
'required_in_options': value.get(
|
||||
'required_in_options', False),
|
||||
'field_type': value.get(
|
||||
@ -135,7 +110,7 @@ def _filter_metadata(metadata, **kwargs):
|
||||
'mapping_to': value.get('mapping_to', None)
|
||||
}
|
||||
else:
|
||||
filtered_metadata[key] = _filter_metadata(value)
|
||||
filtered_metadata[key] = _filter_metadata(value, **kwargs)
|
||||
return filtered_metadata
|
||||
|
||||
|
||||
@ -223,8 +198,8 @@ def _autofill_config(
|
||||
)
|
||||
metadatas = metadata_mapping[id]
|
||||
logging.debug(
|
||||
'auto fill %s config %s by metadata %s',
|
||||
id_name, config, metadatas
|
||||
'auto fill %s config %s by params %s',
|
||||
id_name, config, kwargs
|
||||
)
|
||||
return metadata_api.autofill_config_internal(
|
||||
config, metadatas, **kwargs
|
||||
|
@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
"""Metadata Callback methods."""
|
||||
import logging
|
||||
import netaddr
|
||||
import random
|
||||
import re
|
||||
@ -128,7 +129,39 @@ def autofill_callback_random_option(name, config, **kwargs):
|
||||
return config
|
||||
|
||||
|
||||
def autofill_no_proxy(name, config, **kwargs):
|
||||
logging.debug(
|
||||
'autofill %s config %s by params %s',
|
||||
name, config, kwargs
|
||||
)
|
||||
if 'cluster' in kwargs:
|
||||
if config is None:
|
||||
config = []
|
||||
if 'default_value' in kwargs:
|
||||
for default_no_proxy in kwargs['default_value']:
|
||||
if default_no_proxy and default_no_proxy not in config:
|
||||
config.append(default_no_proxy)
|
||||
cluster = kwargs['cluster']
|
||||
for clusterhost in cluster.clusterhosts:
|
||||
host = clusterhost.host
|
||||
hostname = host.name
|
||||
if hostname not in config:
|
||||
config.append(hostname)
|
||||
for host_network in host.host_networks:
|
||||
if host_network.is_mgmt:
|
||||
ip = host_network.ip
|
||||
if ip not in config:
|
||||
config.append(ip)
|
||||
if not config:
|
||||
return config
|
||||
return [no_proxy for no_proxy in config if no_proxy]
|
||||
|
||||
|
||||
def autofill_network_mapping(name, config, **kwargs):
|
||||
logging.debug(
|
||||
'autofill %s config %s by params %s',
|
||||
name, config, kwargs
|
||||
)
|
||||
if not config:
|
||||
return config
|
||||
if isinstance(config, basestring):
|
||||
|
@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
"""Database model"""
|
||||
import copy
|
||||
import datetime
|
||||
import logging
|
||||
import netaddr
|
||||
@ -673,8 +674,12 @@ class ClusterHost(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@patched_package_config.setter
|
||||
def patched_package_config(self, value):
|
||||
package_config = util.merge_dict(dict(self.package_config), value)
|
||||
self.package_config = package_config
|
||||
package_config = copy.deepcopy(self.package_config)
|
||||
self.package_config = util.merge_dict(package_config, value)
|
||||
logging.debug(
|
||||
'patch clusterhost %s package_config: %s',
|
||||
self.clusterhost_id, value
|
||||
)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -683,9 +688,13 @@ class ClusterHost(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@put_package_config.setter
|
||||
def put_package_config(self, value):
|
||||
package_config = dict(self.package_config)
|
||||
package_config = copy.deepcopy(self.package_config)
|
||||
package_config.update(value)
|
||||
self.package_config = package_config
|
||||
logging.debug(
|
||||
'put clusterhost %s package_config: %s',
|
||||
self.clusterhost_id, value
|
||||
)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -949,8 +958,9 @@ class Host(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@patched_os_config.setter
|
||||
def patched_os_config(self, value):
|
||||
self.os_config = util.merge_dict(dict(self.os_config), value)
|
||||
logging.info('patch host os config in %s: %s', self.id, value)
|
||||
os_config = copy.deepcopy(self.os_config)
|
||||
self.os_config = util.merge_dict(os_config, value)
|
||||
logging.debug('patch host os config in %s: %s', self.id, value)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -959,10 +969,10 @@ class Host(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@put_os_config.setter
|
||||
def put_os_config(self, value):
|
||||
os_config = dict(self.os_config)
|
||||
os_config = copy.deepcopy(self.os_config)
|
||||
os_config.update(value)
|
||||
self.os_config = os_config
|
||||
logging.info('put host os config in %s: %s', self.id, value)
|
||||
logging.debug('put host os config in %s: %s', self.id, value)
|
||||
self.config_validated = False
|
||||
|
||||
def __init__(self, id, **kwargs):
|
||||
@ -1289,8 +1299,9 @@ class Cluster(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@patched_os_config.setter
|
||||
def patched_os_config(self, value):
|
||||
self.os_config = util.merge_dict(dict(self.os_config), value)
|
||||
logging.info('patch cluster %s os config: %s', self.id, value)
|
||||
os_config = copy.deepcopy(self.os_config)
|
||||
self.os_config = util.merge_dict(os_config, value)
|
||||
logging.debug('patch cluster %s os config: %s', self.id, value)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -1299,10 +1310,10 @@ class Cluster(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@put_os_config.setter
|
||||
def put_os_config(self, value):
|
||||
os_config = dict(self.os_config)
|
||||
os_config = copy.deepcopy(self.os_config)
|
||||
os_config.update(value)
|
||||
self.os_config = os_config
|
||||
logging.info('put cluster %s os config: %s', self.id, value)
|
||||
logging.debug('put cluster %s os config: %s', self.id, value)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -1311,9 +1322,9 @@ class Cluster(BASE, TimestampMixin, HelperMixin):
|
||||
|
||||
@patched_package_config.setter
|
||||
def patched_package_config(self, value):
|
||||
package_config = dict(self.package_config)
|
||||
package_config = copy.deepcopy(self.package_config)
|
||||
self.package_config = util.merge_dict(package_config, value)
|
||||
logging.info('patch cluster %s package config: %s', self.id, value)
|
||||
logging.debug('patch cluster %s package config: %s', self.id, value)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -1325,7 +1336,7 @@ class Cluster(BASE, TimestampMixin, HelperMixin):
|
||||
package_config = dict(self.package_config)
|
||||
package_config.update(value)
|
||||
self.package_config = package_config
|
||||
logging.info('put cluster %s package config: %s', self.id, value)
|
||||
logging.debug('put cluster %s package config: %s', self.id, value)
|
||||
self.config_validated = False
|
||||
|
||||
@property
|
||||
@ -1715,9 +1726,10 @@ class Machine(BASE, HelperMixin, TimestampMixin):
|
||||
|
||||
@patched_ipmi_credentials.setter
|
||||
def patched_ipmi_credentials(self, value):
|
||||
self.ipmi_credentials = (
|
||||
util.merge_dict(dict(self.ipmi_credentials), value)
|
||||
)
|
||||
if not value:
|
||||
return
|
||||
ipmi_credentials = copy.deepcopy(self.ipmi_credentials)
|
||||
self.ipmi_credentials = util.merge_dict(ipmi_credentials, value)
|
||||
|
||||
@property
|
||||
def patched_tag(self):
|
||||
@ -1725,7 +1737,9 @@ class Machine(BASE, HelperMixin, TimestampMixin):
|
||||
|
||||
@patched_tag.setter
|
||||
def patched_tag(self, value):
|
||||
tag = dict(self.tag)
|
||||
if not value:
|
||||
return
|
||||
tag = copy.deepcopy(self.tag)
|
||||
tag.update(value)
|
||||
self.tag = value
|
||||
|
||||
@ -1735,7 +1749,9 @@ class Machine(BASE, HelperMixin, TimestampMixin):
|
||||
|
||||
@patched_location.setter
|
||||
def patched_location(self, value):
|
||||
location = dict(self.location)
|
||||
if not value:
|
||||
return
|
||||
location = copy.deepcopy(self.location)
|
||||
location.update(value)
|
||||
self.location = location
|
||||
|
||||
@ -1904,7 +1920,10 @@ class Switch(BASE, HelperMixin, TimestampMixin):
|
||||
|
||||
@patched_credentials.setter
|
||||
def patched_credentials(self, value):
|
||||
self.credentials = util.merge_dict(dict(self.credentials), value)
|
||||
if not value:
|
||||
return
|
||||
credentials = copy.deepcopy(self.credentials)
|
||||
self.credentials = util.merge_dict(credentials, value)
|
||||
|
||||
@property
|
||||
def filters(self):
|
||||
|
@ -62,7 +62,7 @@ class FilterFileExist(FileFilter):
|
||||
"""filter log file."""
|
||||
file_exist = os.path.isfile(pathname)
|
||||
if not file_exist:
|
||||
logging.error("%s is not exist", pathname)
|
||||
logging.debug("%s is not exist", pathname)
|
||||
|
||||
return file_exist
|
||||
|
||||
@ -150,7 +150,7 @@ class FileReaderFactory(object):
|
||||
pathname = os.path.join(self.logdir_, hostname, filename)
|
||||
logging.debug('get FileReader from %s', pathname)
|
||||
if not self.filefilter_.filter(pathname):
|
||||
logging.error('%s is filtered', pathname)
|
||||
logging.debug('%s is filtered', pathname)
|
||||
return None
|
||||
|
||||
return FileReader(pathname, log_history)
|
||||
|
@ -55,8 +55,8 @@ class ProgressCalculator(object):
|
||||
log_history['severity'] = severity
|
||||
logging.debug('update progress to %s', log_history)
|
||||
else:
|
||||
logging.info('ignore update progress %s to %s',
|
||||
progress_data, log_history)
|
||||
logging.debug('ignore update progress %s to %s',
|
||||
progress_data, log_history)
|
||||
|
||||
def update(self, message, severity, log_history):
|
||||
"""vritual method to update progress by message and severity.
|
||||
|
@ -48,6 +48,7 @@ METADATA = {
|
||||
'field': 'general_list',
|
||||
'default_callback': default_noproxy,
|
||||
'options_callback': noproxy_options,
|
||||
'autofill_callback': autofill_no_proxy,
|
||||
'mapping_to': 'no_proxy'
|
||||
}
|
||||
},
|
||||
|
Loading…
Reference in New Issue
Block a user