refactor: Use yaml.add_representer to reduce complexity
This patchset uses yaml.add_representer for DocumentDict which enables yaml.safe_load/safe_load_all to correctly serialize the DocumentDict object without a recursive routine. This also completely removes the usage of jsonpath_parse from DocumentDict as jsonpath-ng is a rather expensive library to call continuously; and even though Deckhand does some caching to alleviate this, it is simply better to avoid it altogether in a wrapper that is used everywhere across the engine module, which does all the heavy processing. This also reduces the amount of wrapping using DocumentDict because the better way to do this in the DB module is to have a helper function retrieve the data from the DB and immediately wrap it in a DocumentDict if applicable; this is left as an exercise for later. Change-Id: I715ff7e314cf0ec0d34c17f3378514d235dfb377
This commit is contained in:
parent
109b78df59
commit
cd2d3020ec
@ -13,16 +13,12 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo_serialization import jsonutils as json
|
from oslo_serialization import jsonutils as json
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
import six
|
||||||
|
import yaml
|
||||||
from deckhand.common import utils
|
|
||||||
|
|
||||||
|
|
||||||
_URL_RE = re.compile(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|'
|
_URL_RE = re.compile(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|'
|
||||||
'(?:%[0-9a-fA-F][0-9a-fA-F]))+')
|
'(?:%[0-9a-fA-F][0-9a-fA-F]))+')
|
||||||
@ -44,49 +40,17 @@ class DocumentDict(dict):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(DocumentDict, self).__init__(*args, **kwargs)
|
|
||||||
self._replaced_by = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(self, documents):
|
|
||||||
"""Convert a list of documents or single document into an instance of
|
|
||||||
this class.
|
|
||||||
|
|
||||||
:param documents: Documents to wrap in this class.
|
|
||||||
:type documents: list or dict
|
|
||||||
"""
|
|
||||||
if isinstance(documents, collections.Iterable):
|
|
||||||
return [DocumentDict(d) for d in documents]
|
|
||||||
return DocumentDict(documents)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def meta(self):
|
def meta(self):
|
||||||
return (self.schema, self.layer, self.name)
|
return (self.schema, self.layer, self.name)
|
||||||
|
|
||||||
@property
|
|
||||||
def is_abstract(self):
|
|
||||||
return utils.jsonpath_parse(
|
|
||||||
self, 'metadata.layeringDefinition.abstract') is True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_control(self):
|
|
||||||
return self.metadata.get('schema', '').startswith('metadata/Control')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def schema(self):
|
|
||||||
schema = self.get('schema')
|
|
||||||
return schema if schema is not None else ''
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self):
|
def metadata(self):
|
||||||
metadata = self.get('metadata')
|
return self.get('metadata') or DocumentDict({})
|
||||||
return metadata if metadata is not None else {}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self):
|
def data(self):
|
||||||
data = self.get('data')
|
return self.get('data')
|
||||||
return data if data is not None else {}
|
|
||||||
|
|
||||||
@data.setter
|
@data.setter
|
||||||
def data(self, value):
|
def data(self, value):
|
||||||
@ -94,50 +58,69 @@ class DocumentDict(dict):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
return utils.jsonpath_parse(self, 'metadata.name')
|
return self.metadata.get('name') or ''
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def layering_definition(self):
|
def schema(self):
|
||||||
return utils.jsonpath_parse(self, 'metadata.layeringDefinition')
|
return self.get('schema') or ''
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def layer(self):
|
def layer(self):
|
||||||
return utils.jsonpath_parse(
|
return self.layering_definition.get('layer') or ''
|
||||||
self, 'metadata.layeringDefinition.layer')
|
|
||||||
|
@property
|
||||||
|
def is_abstract(self):
|
||||||
|
return self.layering_definition.get('abstract') is True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_control(self):
|
||||||
|
return self.schema.startswith('deckhand/Control')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def layering_definition(self):
|
||||||
|
metadata = self.metadata or {}
|
||||||
|
return metadata.get('layeringDefinition') or DocumentDict({})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def layeringDefinition(self):
|
||||||
|
metadata = self.metadata or {}
|
||||||
|
return metadata.get('layeringDefinition') or DocumentDict({})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def layer_order(self):
|
def layer_order(self):
|
||||||
return utils.jsonpath_parse(self, 'data.layerOrder')
|
if not self.schema.startswith('deckhand/LayeringPolicy'):
|
||||||
|
raise TypeError(
|
||||||
|
'layer_order only exists for LayeringPolicy documents')
|
||||||
|
return self.data.get('layerOrder', [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def parent_selector(self):
|
def parent_selector(self):
|
||||||
return utils.jsonpath_parse(
|
return self.layering_definition.get(
|
||||||
self, 'metadata.layeringDefinition.parentSelector') or {}
|
'parentSelector') or DocumentDict({})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def labels(self):
|
def labels(self):
|
||||||
return utils.jsonpath_parse(self, 'metadata.labels') or {}
|
return self.metadata.get('labels') or DocumentDict({})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def substitutions(self):
|
def substitutions(self):
|
||||||
return utils.jsonpath_parse(self, 'metadata.substitutions') or []
|
return self.metadata.get('substitutions', [])
|
||||||
|
|
||||||
@substitutions.setter
|
@substitutions.setter
|
||||||
def substitutions(self, value):
|
def substitutions(self, value):
|
||||||
return utils.jsonpath_replace(self, value, '.metadata.substitutions')
|
self.metadata.substitutions = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def actions(self):
|
def actions(self):
|
||||||
return utils.jsonpath_parse(
|
return self.layering_definition.get('actions', [])
|
||||||
self, 'metadata.layeringDefinition.actions') or []
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def storage_policy(self):
|
def storage_policy(self):
|
||||||
return utils.jsonpath_parse(self, 'metadata.storagePolicy') or ''
|
return self.metadata.get('storagePolicy') or ''
|
||||||
|
|
||||||
@storage_policy.setter
|
@storage_policy.setter
|
||||||
def storage_policy(self, value):
|
def storage_policy(self, value):
|
||||||
return utils.jsonpath_replace(self, value, '.metadata.storagePolicy')
|
self.metadata['storagePolicy'] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_encrypted(self):
|
def is_encrypted(self):
|
||||||
@ -159,36 +142,42 @@ class DocumentDict(dict):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def is_replacement(self):
|
def is_replacement(self):
|
||||||
return utils.jsonpath_parse(self, 'metadata.replacement') is True
|
return self.metadata.get('replacement') is True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_replacement(self):
|
def has_replacement(self):
|
||||||
return isinstance(self._replaced_by, DocumentDict)
|
return isinstance(self.replaced_by, DocumentDict)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def replaced_by(self):
|
def replaced_by(self):
|
||||||
return self._replaced_by
|
return getattr(self, '_replaced_by', None)
|
||||||
|
|
||||||
@replaced_by.setter
|
@replaced_by.setter
|
||||||
def replaced_by(self, other):
|
def replaced_by(self, other):
|
||||||
self._replaced_by = other
|
setattr(self, '_replaced_by', other)
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash(json.dumps(self, sort_keys=True))
|
return hash(json.dumps(self, sort_keys=True))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_list(cls, documents):
|
||||||
|
"""Convert an iterable of documents into instances of this class.
|
||||||
|
|
||||||
def wrap_documents(f):
|
:param documents: Documents to wrap in this class.
|
||||||
"""Decorator to wrap dictionary-formatted documents in instances of
|
:type documents: iterable
|
||||||
``DocumentDict``.
|
"""
|
||||||
"""
|
if not isinstance(documents, collections.Iterable):
|
||||||
@functools.wraps(f)
|
documents = [documents]
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
fargs = inspect.getargspec(f)
|
return [DocumentDict(d) for d in documents]
|
||||||
if 'documents' in fargs[0]:
|
|
||||||
pos = fargs[0].index('documents')
|
|
||||||
new_args = list(args)
|
def document_dict_representer(dumper, data):
|
||||||
if new_args[pos] and not isinstance(
|
return dumper.represent_mapping('tag:yaml.org,2002:map', dict(data))
|
||||||
new_args[pos][0], DocumentDict):
|
|
||||||
new_args[pos] = DocumentDict.from_dict(args[pos])
|
|
||||||
return f(*tuple(new_args), **kwargs)
|
yaml.add_representer(DocumentDict, document_dict_representer)
|
||||||
return wrapper
|
# Required for py27 compatibility: yaml.safe_dump/safe_dump_all doesn't
|
||||||
|
# work unless SafeRepresenter add_representer method is called.
|
||||||
|
safe_representer = yaml.representer.SafeRepresenter
|
||||||
|
safe_representer.add_representer(DocumentDict, document_dict_representer)
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import ast
|
import ast
|
||||||
import copy
|
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
|
|
||||||
@ -171,8 +170,6 @@ def jsonpath_replace(data, value, jsonpath, pattern=None):
|
|||||||
# http://admin:super-duper-secret@svc-name:8080/v1
|
# http://admin:super-duper-secret@svc-name:8080/v1
|
||||||
doc['data'].update(replaced_data)
|
doc['data'].update(replaced_data)
|
||||||
"""
|
"""
|
||||||
data = copy.copy(data)
|
|
||||||
value = copy.copy(value)
|
|
||||||
|
|
||||||
jsonpath = _normalize_jsonpath(jsonpath)
|
jsonpath = _normalize_jsonpath(jsonpath)
|
||||||
|
|
||||||
|
@ -12,10 +12,9 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import yaml
|
||||||
|
|
||||||
from deckhand import context
|
from deckhand import context
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ import falcon
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
|
||||||
|
from deckhand.common import document as document_wrapper
|
||||||
from deckhand.control import base as api_base
|
from deckhand.control import base as api_base
|
||||||
from deckhand.control.views import document as document_view
|
from deckhand.control.views import document as document_view
|
||||||
from deckhand.db.sqlalchemy import api as db_api
|
from deckhand.db.sqlalchemy import api as db_api
|
||||||
@ -35,7 +36,8 @@ class BucketsResource(api_base.BaseResource):
|
|||||||
|
|
||||||
@policy.authorize('deckhand:create_cleartext_documents')
|
@policy.authorize('deckhand:create_cleartext_documents')
|
||||||
def on_put(self, req, resp, bucket_name=None):
|
def on_put(self, req, resp, bucket_name=None):
|
||||||
documents = self.from_yaml(req, expect_list=True, allow_empty=True)
|
data = self.from_yaml(req, expect_list=True, allow_empty=True)
|
||||||
|
documents = document_wrapper.DocumentDict.from_list(data)
|
||||||
|
|
||||||
# NOTE: Must validate documents before doing policy enforcement,
|
# NOTE: Must validate documents before doing policy enforcement,
|
||||||
# because we expect certain formatting of the documents while doing
|
# because we expect certain formatting of the documents while doing
|
||||||
|
@ -17,6 +17,7 @@ from oslo_log import log as logging
|
|||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
from deckhand.common import document as document_wrapper
|
||||||
from deckhand.common import utils
|
from deckhand.common import utils
|
||||||
from deckhand.common import validation_message as vm
|
from deckhand.common import validation_message as vm
|
||||||
from deckhand.control import base as api_base
|
from deckhand.control import base as api_base
|
||||||
@ -110,10 +111,9 @@ class RenderedDocumentsResource(api_base.BaseResource):
|
|||||||
if include_encrypted:
|
if include_encrypted:
|
||||||
filters['metadata.storagePolicy'].append('encrypted')
|
filters['metadata.storagePolicy'].append('encrypted')
|
||||||
|
|
||||||
documents = self._retrieve_documents_for_rendering(revision_id,
|
data = self._retrieve_documents_for_rendering(revision_id, **filters)
|
||||||
**filters)
|
documents = document_wrapper.DocumentDict.from_list(data)
|
||||||
encryption_sources = self._retrieve_encrypted_documents(documents)
|
encryption_sources = self._retrieve_encrypted_documents(documents)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# NOTE(fmontei): `validate` is False because documents have already
|
# NOTE(fmontei): `validate` is False because documents have already
|
||||||
# been pre-validated during ingestion. Documents are post-validated
|
# been pre-validated during ingestion. Documents are post-validated
|
||||||
|
@ -28,7 +28,6 @@ from oslo_serialization import jsonutils as json
|
|||||||
import sqlalchemy.orm as sa_orm
|
import sqlalchemy.orm as sa_orm
|
||||||
from sqlalchemy import text
|
from sqlalchemy import text
|
||||||
|
|
||||||
from deckhand.common import document as document_wrapper
|
|
||||||
from deckhand.common import utils
|
from deckhand.common import utils
|
||||||
from deckhand.db.sqlalchemy import models
|
from deckhand.db.sqlalchemy import models
|
||||||
from deckhand import errors
|
from deckhand import errors
|
||||||
@ -92,6 +91,14 @@ def raw_query(query, **kwargs):
|
|||||||
return get_engine().execute(stmt)
|
return get_engine().execute(stmt)
|
||||||
|
|
||||||
|
|
||||||
|
def _meta(document):
|
||||||
|
return (
|
||||||
|
document['schema'],
|
||||||
|
document['metadata'].get('layeringDefinition', {}).get('layer'),
|
||||||
|
document['metadata'].get('name')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def require_unique_document_schema(schema=None):
|
def require_unique_document_schema(schema=None):
|
||||||
"""Decorator to enforce only one singleton document exists in the system.
|
"""Decorator to enforce only one singleton document exists in the system.
|
||||||
|
|
||||||
@ -122,12 +129,12 @@ def require_unique_document_schema(schema=None):
|
|||||||
existing_documents = revision_documents_get(
|
existing_documents = revision_documents_get(
|
||||||
schema=schema, deleted=False, include_history=False)
|
schema=schema, deleted=False, include_history=False)
|
||||||
existing_document_names = [
|
existing_document_names = [
|
||||||
x.meta for x in existing_documents
|
_meta(x) for x in existing_documents
|
||||||
]
|
]
|
||||||
conflicting_names = [
|
conflicting_names = [
|
||||||
x.meta for x in documents
|
_meta(x) for x in documents
|
||||||
if x.meta not in existing_document_names and
|
if _meta(x) not in existing_document_names and
|
||||||
x.schema.startswith(schema)
|
x['schema'].startswith(schema)
|
||||||
]
|
]
|
||||||
if existing_document_names and conflicting_names:
|
if existing_document_names and conflicting_names:
|
||||||
raise errors.SingletonDocumentConflict(
|
raise errors.SingletonDocumentConflict(
|
||||||
@ -141,7 +148,6 @@ def require_unique_document_schema(schema=None):
|
|||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@document_wrapper.wrap_documents
|
|
||||||
@require_unique_document_schema(types.LAYERING_POLICY_SCHEMA)
|
@require_unique_document_schema(types.LAYERING_POLICY_SCHEMA)
|
||||||
def documents_create(bucket_name, documents, validations=None,
|
def documents_create(bucket_name, documents, validations=None,
|
||||||
session=None):
|
session=None):
|
||||||
@ -172,8 +178,8 @@ def documents_create(bucket_name, documents, validations=None,
|
|||||||
d for d in revision_documents_get(bucket_name=bucket_name)
|
d for d in revision_documents_get(bucket_name=bucket_name)
|
||||||
]
|
]
|
||||||
documents_to_delete = [
|
documents_to_delete = [
|
||||||
h for h in document_history if h.meta not in [
|
h for h in document_history if _meta(h) not in [
|
||||||
d.meta for d in documents]
|
_meta(d) for d in documents]
|
||||||
]
|
]
|
||||||
|
|
||||||
# Only create a revision if any docs have been created, changed or deleted.
|
# Only create a revision if any docs have been created, changed or deleted.
|
||||||
@ -187,18 +193,18 @@ def documents_create(bucket_name, documents, validations=None,
|
|||||||
|
|
||||||
if documents_to_delete:
|
if documents_to_delete:
|
||||||
LOG.debug('Deleting documents: %s.',
|
LOG.debug('Deleting documents: %s.',
|
||||||
[d.meta for d in documents_to_delete])
|
[_meta(d) for d in documents_to_delete])
|
||||||
deleted_documents = []
|
deleted_documents = []
|
||||||
|
|
||||||
for d in documents_to_delete:
|
for d in documents_to_delete:
|
||||||
doc = models.Document()
|
doc = models.Document()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
# Store bare minimum information about the document.
|
# Store bare minimum information about the document.
|
||||||
doc['schema'] = d.schema
|
doc['schema'] = d['schema']
|
||||||
doc['name'] = d.name
|
doc['name'] = d['name']
|
||||||
doc['layer'] = d.layer
|
doc['layer'] = d['layer']
|
||||||
doc['data'] = {}
|
doc['data'] = {}
|
||||||
doc['meta'] = d.metadata
|
doc['meta'] = d['metadata']
|
||||||
doc['data_hash'] = _make_hash({})
|
doc['data_hash'] = _make_hash({})
|
||||||
doc['metadata_hash'] = _make_hash({})
|
doc['metadata_hash'] = _make_hash({})
|
||||||
doc['bucket_id'] = bucket['id']
|
doc['bucket_id'] = bucket['id']
|
||||||
@ -374,7 +380,7 @@ def document_get(session=None, raw_dict=False, revision_id=None, **filters):
|
|||||||
for doc in documents:
|
for doc in documents:
|
||||||
d = doc.to_dict(raw_dict=raw_dict)
|
d = doc.to_dict(raw_dict=raw_dict)
|
||||||
if utils.deepfilter(d, **nested_filters):
|
if utils.deepfilter(d, **nested_filters):
|
||||||
return document_wrapper.DocumentDict(d)
|
return d
|
||||||
|
|
||||||
filters.update(nested_filters)
|
filters.update(nested_filters)
|
||||||
raise errors.DocumentNotFound(filters=filters)
|
raise errors.DocumentNotFound(filters=filters)
|
||||||
@ -426,7 +432,7 @@ def document_get_all(session=None, raw_dict=False, revision_id=None,
|
|||||||
if utils.deepfilter(d, **nested_filters):
|
if utils.deepfilter(d, **nested_filters):
|
||||||
final_documents.append(d)
|
final_documents.append(d)
|
||||||
|
|
||||||
return document_wrapper.DocumentDict.from_dict(final_documents)
|
return final_documents
|
||||||
|
|
||||||
|
|
||||||
####################
|
####################
|
||||||
@ -592,7 +598,6 @@ def revision_delete_all():
|
|||||||
raw_query("DELETE FROM revisions;")
|
raw_query("DELETE FROM revisions;")
|
||||||
|
|
||||||
|
|
||||||
@document_wrapper.wrap_documents
|
|
||||||
def _exclude_deleted_documents(documents):
|
def _exclude_deleted_documents(documents):
|
||||||
"""Excludes all documents that have been deleted including all documents
|
"""Excludes all documents that have been deleted including all documents
|
||||||
earlier in the revision history with the same ``metadata.name`` and
|
earlier in the revision history with the same ``metadata.name`` and
|
||||||
@ -602,12 +607,12 @@ def _exclude_deleted_documents(documents):
|
|||||||
|
|
||||||
for doc in sorted(documents, key=lambda x: x['created_at']):
|
for doc in sorted(documents, key=lambda x: x['created_at']):
|
||||||
if doc['deleted'] is True:
|
if doc['deleted'] is True:
|
||||||
previous_doc = documents_map.get(doc.meta)
|
previous_doc = documents_map.get(_meta(doc))
|
||||||
if previous_doc:
|
if previous_doc:
|
||||||
if doc['deleted_at'] >= previous_doc['created_at']:
|
if doc['deleted_at'] >= previous_doc['created_at']:
|
||||||
documents_map[doc.meta] = None
|
documents_map[_meta(doc)] = None
|
||||||
else:
|
else:
|
||||||
documents_map[doc.meta] = doc
|
documents_map[_meta(doc)] = doc
|
||||||
|
|
||||||
return [d for d in documents_map.values() if d is not None]
|
return [d for d in documents_map.values() if d is not None]
|
||||||
|
|
||||||
@ -694,7 +699,7 @@ def revision_documents_get(revision_id=None, include_history=True,
|
|||||||
filtered_documents = _filter_revision_documents(
|
filtered_documents = _filter_revision_documents(
|
||||||
revision_documents, unique_only, **filters)
|
revision_documents, unique_only, **filters)
|
||||||
|
|
||||||
return document_wrapper.DocumentDict.from_dict(filtered_documents)
|
return filtered_documents
|
||||||
|
|
||||||
|
|
||||||
# NOTE(fmontei): No need to include `@require_revision_exists` decorator as
|
# NOTE(fmontei): No need to include `@require_revision_exists` decorator as
|
||||||
@ -1099,7 +1104,7 @@ def validation_get_all(revision_id, session=None):
|
|||||||
expected_validations = set()
|
expected_validations = set()
|
||||||
for vp in validation_policies:
|
for vp in validation_policies:
|
||||||
expected_validations = expected_validations.union(
|
expected_validations = expected_validations.union(
|
||||||
list(v['name'] for v in vp.data.get('validations', [])))
|
list(v['name'] for v in vp['data'].get('validations', [])))
|
||||||
|
|
||||||
missing_validations = expected_validations - actual_validations
|
missing_validations = expected_validations - actual_validations
|
||||||
extra_validations = actual_validations - expected_validations
|
extra_validations = actual_validations - expected_validations
|
||||||
@ -1138,7 +1143,7 @@ def _check_validation_entries_against_validation_policies(
|
|||||||
expected_validations = set()
|
expected_validations = set()
|
||||||
for vp in validation_policies:
|
for vp in validation_policies:
|
||||||
expected_validations |= set(
|
expected_validations |= set(
|
||||||
v['name'] for v in vp.data.get('validations', []))
|
v['name'] for v in vp['data'].get('validations', []))
|
||||||
|
|
||||||
missing_validations = expected_validations - actual_validations
|
missing_validations = expected_validations - actual_validations
|
||||||
extra_validations = actual_validations - expected_validations
|
extra_validations = actual_validations - expected_validations
|
||||||
@ -1165,19 +1170,19 @@ def _check_validation_entries_against_validation_policies(
|
|||||||
for entry in result_map[extra_name]:
|
for entry in result_map[extra_name]:
|
||||||
original_status = entry['status']
|
original_status = entry['status']
|
||||||
entry['status'] = 'ignored [%s]' % original_status
|
entry['status'] = 'ignored [%s]' % original_status
|
||||||
|
|
||||||
entry.setdefault('errors', [])
|
entry.setdefault('errors', [])
|
||||||
|
|
||||||
|
msg_args = _meta(vp) + (
|
||||||
|
', '.join(v['name'] for v in vp['data'].get(
|
||||||
|
'validations', [])),
|
||||||
|
)
|
||||||
for vp in validation_policies:
|
for vp in validation_policies:
|
||||||
entry['errors'].append({
|
entry['errors'].append({
|
||||||
'message': (
|
'message': (
|
||||||
'The result for this validation was externally '
|
'The result for this validation was externally '
|
||||||
'registered but has been ignored because it is not '
|
'registered but has been ignored because it is not '
|
||||||
'found in the validations for ValidationPolicy '
|
'found in the validations for ValidationPolicy '
|
||||||
'[%s, %s] %s: %s.' % (
|
'[%s, %s] %s: %s.' % msg_args
|
||||||
vp.schema, vp.layer, vp.name,
|
|
||||||
', '.join(v['name'] for v in vp['data'].get(
|
|
||||||
'validations', []))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -199,11 +199,11 @@ class DataSchemaValidator(GenericValidator):
|
|||||||
for data_schema in data_schemas:
|
for data_schema in data_schemas:
|
||||||
# Ensure that each `DataSchema` document has required properties
|
# Ensure that each `DataSchema` document has required properties
|
||||||
# before they themselves can be used to validate other documents.
|
# before they themselves can be used to validate other documents.
|
||||||
if 'name' not in data_schema.metadata:
|
if not data_schema.name:
|
||||||
continue
|
continue
|
||||||
if self._schema_re.match(data_schema.name) is None:
|
if self._schema_re.match(data_schema.name) is None:
|
||||||
continue
|
continue
|
||||||
if 'data' not in data_schema:
|
if not data_schema.data:
|
||||||
continue
|
continue
|
||||||
schema_prefix, schema_version = _get_schema_parts(
|
schema_prefix, schema_version = _get_schema_parts(
|
||||||
data_schema, 'metadata.name')
|
data_schema, 'metadata.name')
|
||||||
|
@ -22,7 +22,7 @@ from oslo_log import log as logging
|
|||||||
from oslo_log import versionutils
|
from oslo_log import versionutils
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
|
||||||
from deckhand.common import document as document_wrapper
|
from deckhand.common.document import DocumentDict as dd
|
||||||
from deckhand.common import utils
|
from deckhand.common import utils
|
||||||
from deckhand.common.validation_message import ValidationMessage
|
from deckhand.common.validation_message import ValidationMessage
|
||||||
from deckhand.engine import document_validation
|
from deckhand.engine import document_validation
|
||||||
@ -127,7 +127,7 @@ class DocumentLayering(object):
|
|||||||
substitution_source_map = {}
|
substitution_source_map = {}
|
||||||
|
|
||||||
for src in substitution_sources:
|
for src in substitution_sources:
|
||||||
src_ref = document_wrapper.DocumentDict(src)
|
src_ref = dd(src)
|
||||||
if src_ref.meta in self._documents_by_index:
|
if src_ref.meta in self._documents_by_index:
|
||||||
src_ref = self._documents_by_index[src_ref.meta]
|
src_ref = self._documents_by_index[src_ref.meta]
|
||||||
if src_ref.has_replacement:
|
if src_ref.has_replacement:
|
||||||
@ -432,8 +432,7 @@ class DocumentLayering(object):
|
|||||||
filter(lambda x: x.get('schema').startswith(
|
filter(lambda x: x.get('schema').startswith(
|
||||||
types.LAYERING_POLICY_SCHEMA), documents))
|
types.LAYERING_POLICY_SCHEMA), documents))
|
||||||
if layering_policies:
|
if layering_policies:
|
||||||
self._layering_policy = document_wrapper.DocumentDict(
|
self._layering_policy = dd(layering_policies[0])
|
||||||
layering_policies[0])
|
|
||||||
if len(layering_policies) > 1:
|
if len(layering_policies) > 1:
|
||||||
LOG.warning('More than one layering policy document was '
|
LOG.warning('More than one layering policy document was '
|
||||||
'passed in. Using the first one found: [%s] %s.',
|
'passed in. Using the first one found: [%s] %s.',
|
||||||
@ -448,7 +447,7 @@ class DocumentLayering(object):
|
|||||||
raise errors.LayeringPolicyNotFound()
|
raise errors.LayeringPolicyNotFound()
|
||||||
|
|
||||||
for document in documents:
|
for document in documents:
|
||||||
document = document_wrapper.DocumentDict(document)
|
document = dd(document)
|
||||||
|
|
||||||
self._documents_by_index.setdefault(document.meta, document)
|
self._documents_by_index.setdefault(document.meta, document)
|
||||||
|
|
||||||
@ -542,6 +541,7 @@ class DocumentLayering(object):
|
|||||||
:raises MissingDocumentKey: If a layering action path isn't found
|
:raises MissingDocumentKey: If a layering action path isn't found
|
||||||
in the child document.
|
in the child document.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
method = action['method']
|
method = action['method']
|
||||||
if method not in self._SUPPORTED_METHODS:
|
if method not in self._SUPPORTED_METHODS:
|
||||||
raise errors.UnsupportedActionMethod(
|
raise errors.UnsupportedActionMethod(
|
||||||
|
@ -23,7 +23,6 @@ from deckhand.barbican.driver import BarbicanDriver
|
|||||||
from deckhand.common import document as document_wrapper
|
from deckhand.common import document as document_wrapper
|
||||||
from deckhand.common import utils
|
from deckhand.common import utils
|
||||||
from deckhand import errors
|
from deckhand import errors
|
||||||
from deckhand import types
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -68,11 +67,10 @@ class SecretsManager(object):
|
|||||||
if not isinstance(secret_doc, document_wrapper.DocumentDict):
|
if not isinstance(secret_doc, document_wrapper.DocumentDict):
|
||||||
secret_doc = document_wrapper.DocumentDict(secret_doc)
|
secret_doc = document_wrapper.DocumentDict(secret_doc)
|
||||||
|
|
||||||
if secret_doc.storage_policy == types.ENCRYPTED:
|
if secret_doc.is_encrypted:
|
||||||
payload = cls.barbican_driver.create_secret(secret_doc)
|
payload = cls.barbican_driver.create_secret(secret_doc)
|
||||||
else:
|
else:
|
||||||
payload = secret_doc.data
|
payload = secret_doc.data
|
||||||
|
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -12,8 +12,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import collections
|
|
||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
import six
|
||||||
@ -30,34 +28,6 @@ def get_version_from_request(req):
|
|||||||
return 'N/A'
|
return 'N/A'
|
||||||
|
|
||||||
|
|
||||||
def _safe_yaml_dump(error_response):
|
|
||||||
"""Cast every instance of ``DocumentDict`` into a dictionary for
|
|
||||||
compatibility with ``yaml.safe_dump``.
|
|
||||||
|
|
||||||
This should only be called for error formatting.
|
|
||||||
"""
|
|
||||||
is_dict_sublcass = (
|
|
||||||
lambda v: type(v) is not dict and issubclass(v.__class__, dict)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _to_dict(value, parent):
|
|
||||||
if isinstance(value, (list, tuple, set)):
|
|
||||||
for v in value:
|
|
||||||
_to_dict(v, value)
|
|
||||||
elif isinstance(value, collections.Mapping):
|
|
||||||
for v in value.values():
|
|
||||||
_to_dict(v, value)
|
|
||||||
else:
|
|
||||||
if isinstance(parent, (list, tuple, set)):
|
|
||||||
parent[parent.index(value)] = (
|
|
||||||
dict(value) if is_dict_sublcass(value) else value)
|
|
||||||
elif isinstance(parent, dict):
|
|
||||||
for k, v in parent.items():
|
|
||||||
parent[k] = dict(v) if is_dict_sublcass(v) else v
|
|
||||||
_to_dict(error_response, None)
|
|
||||||
return yaml.safe_dump(error_response)
|
|
||||||
|
|
||||||
|
|
||||||
def format_error_resp(req,
|
def format_error_resp(req,
|
||||||
resp,
|
resp,
|
||||||
status_code=falcon.HTTP_500,
|
status_code=falcon.HTTP_500,
|
||||||
@ -131,8 +101,8 @@ def format_error_resp(req,
|
|||||||
'retry': True if status_code is falcon.HTTP_500 else False
|
'retry': True if status_code is falcon.HTTP_500 else False
|
||||||
}
|
}
|
||||||
|
|
||||||
resp.body = _safe_yaml_dump(error_response)
|
|
||||||
resp.status = status_code
|
resp.status = status_code
|
||||||
|
resp.body = yaml.safe_dump(error_response)
|
||||||
|
|
||||||
|
|
||||||
def default_exception_handler(ex, req, resp, params):
|
def default_exception_handler(ex, req, resp, params):
|
||||||
|
@ -116,7 +116,7 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
|
|||||||
resp = self.app.simulate_put(
|
resp = self.app.simulate_put(
|
||||||
'/api/v1.0/buckets/test/documents',
|
'/api/v1.0/buckets/test/documents',
|
||||||
headers={'Content-Type': 'application/x-yaml'},
|
headers={'Content-Type': 'application/x-yaml'},
|
||||||
body='name: test')
|
body='foo: bar')
|
||||||
|
|
||||||
expected = {
|
expected = {
|
||||||
'status': 'Failure',
|
'status': 'Failure',
|
||||||
@ -132,8 +132,8 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
|
|||||||
{
|
{
|
||||||
'diagnostic': mock.ANY,
|
'diagnostic': mock.ANY,
|
||||||
'documents': [{
|
'documents': [{
|
||||||
'layer': None,
|
'layer': '',
|
||||||
'name': None,
|
'name': '',
|
||||||
'schema': ''
|
'schema': ''
|
||||||
}],
|
}],
|
||||||
'error': True,
|
'error': True,
|
||||||
@ -146,8 +146,8 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
|
|||||||
{
|
{
|
||||||
'diagnostic': mock.ANY,
|
'diagnostic': mock.ANY,
|
||||||
'documents': [{
|
'documents': [{
|
||||||
'layer': None,
|
'layer': '',
|
||||||
'name': None,
|
'name': '',
|
||||||
'schema': ''
|
'schema': ''
|
||||||
}],
|
}],
|
||||||
'error': True,
|
'error': True,
|
||||||
@ -211,8 +211,8 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
|
|||||||
{
|
{
|
||||||
'diagnostic': mock.ANY,
|
'diagnostic': mock.ANY,
|
||||||
'documents': [{
|
'documents': [{
|
||||||
'layer': None,
|
'layer': '',
|
||||||
'name': None,
|
'name': '',
|
||||||
'schema': invalid_document['schema']
|
'schema': invalid_document['schema']
|
||||||
}],
|
}],
|
||||||
'error': True,
|
'error': True,
|
||||||
|
@ -221,6 +221,32 @@ class TestDocumentLayeringNegative(
|
|||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
errors.InvalidDocumentParent, self._test_layering, documents)
|
errors.InvalidDocumentParent, self._test_layering, documents)
|
||||||
|
|
||||||
|
def test_layering_invalid_layer_order_raises_exc(self):
|
||||||
|
"""Validate that an invalid layerOrder (which means that the document
|
||||||
|
layer won't be found in the layerOrder) raises an exception.
|
||||||
|
"""
|
||||||
|
doc_factory = factories.DocumentFactory(1, [1])
|
||||||
|
lp_template, document = doc_factory.gen_test({
|
||||||
|
"_GLOBAL_SUBSTITUTIONS_1_": [{
|
||||||
|
"dest": {
|
||||||
|
"path": ".c"
|
||||||
|
},
|
||||||
|
"src": {
|
||||||
|
"schema": "deckhand/Certificate/v1",
|
||||||
|
"name": "global-cert",
|
||||||
|
"path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
}],
|
||||||
|
}, global_abstract=False)
|
||||||
|
|
||||||
|
layering_policy = copy.deepcopy(lp_template)
|
||||||
|
del layering_policy['data']['layerOrder']
|
||||||
|
error_re = "layer \'global\' .* was not found in layerOrder.*"
|
||||||
|
self.assertRaisesRegexp(
|
||||||
|
errors.InvalidDocumentLayer, error_re, self._test_layering,
|
||||||
|
[layering_policy, document], validate=True)
|
||||||
|
|
||||||
|
|
||||||
class TestDocumentLayeringValidationNegative(
|
class TestDocumentLayeringValidationNegative(
|
||||||
test_document_layering.TestDocumentLayering):
|
test_document_layering.TestDocumentLayering):
|
||||||
@ -261,34 +287,3 @@ class TestDocumentLayeringValidationNegative(
|
|||||||
self.assertRaises(errors.InvalidDocumentFormat,
|
self.assertRaises(errors.InvalidDocumentFormat,
|
||||||
self._test_layering, [layering_policy, document],
|
self._test_layering, [layering_policy, document],
|
||||||
validate=True)
|
validate=True)
|
||||||
|
|
||||||
def test_layering_invalid_document_format_generates_error_messages(self):
|
|
||||||
doc_factory = factories.DocumentFactory(1, [1])
|
|
||||||
lp_template, document = doc_factory.gen_test({
|
|
||||||
"_GLOBAL_SUBSTITUTIONS_1_": [{
|
|
||||||
"dest": {
|
|
||||||
"path": ".c"
|
|
||||||
},
|
|
||||||
"src": {
|
|
||||||
"schema": "deckhand/Certificate/v1",
|
|
||||||
"name": "global-cert",
|
|
||||||
"path": "."
|
|
||||||
}
|
|
||||||
|
|
||||||
}],
|
|
||||||
}, global_abstract=False)
|
|
||||||
|
|
||||||
layering_policy = copy.deepcopy(lp_template)
|
|
||||||
del layering_policy['data']['layerOrder']
|
|
||||||
error_re = r"^'layerOrder' is a required property$"
|
|
||||||
e = self.assertRaises(
|
|
||||||
errors.InvalidDocumentFormat, self._test_layering,
|
|
||||||
[layering_policy, document], validate=True)
|
|
||||||
self.assertRegex(e.error_list[0]['message'], error_re)
|
|
||||||
self.assertEqual(layering_policy['schema'],
|
|
||||||
e.error_list[0]['documents'][0]['schema'])
|
|
||||||
self.assertEqual(layering_policy['metadata']['name'],
|
|
||||||
e.error_list[0]['documents'][0]['name'])
|
|
||||||
self.assertEqual(layering_policy['metadata']['layeringDefinition'][
|
|
||||||
'layer'],
|
|
||||||
e.error_list[0]['documents'][0]['layer'])
|
|
||||||
|
Loading…
Reference in New Issue
Block a user