Fix uniqueness not being enforced at DB level for documents
UniqueConstraint is currently implemented incorrectly in terms of syntax in Deckhand's Document DB model. This PS fixes that. Now UniqueConstraint should be enforcing document uniqueness at DB level such that an error is thrown for duplicate documents (with same metadata.name and schema). Closes #17 Change-Id: I7d66457f471ec48b5766733046977117b509d592
This commit is contained in:
parent
d20f4741c5
commit
cce6ddaf6e
@ -94,7 +94,7 @@ class BucketsResource(api_base.BaseResource):
|
||||
try:
|
||||
created_documents = db_api.documents_create(
|
||||
bucket_name, documents, validations=validations)
|
||||
except (deckhand_errors.DocumentExists,
|
||||
except (deckhand_errors.DuplicateDocumentExists,
|
||||
deckhand_errors.SingletonDocumentConflict) as e:
|
||||
raise falcon.HTTPConflict(description=e.format_message())
|
||||
except Exception as e:
|
||||
|
@ -207,7 +207,12 @@ def documents_create(bucket_name, documents, validations=None,
|
||||
doc['revision_id'] = revision['id']
|
||||
|
||||
# Save and mark the document as `deleted` in the database.
|
||||
doc.save(session=session)
|
||||
try:
|
||||
doc.save(session=session)
|
||||
except db_exception.DBDuplicateEntry:
|
||||
raise errors.DuplicateDocumentExists(
|
||||
schema=doc['schema'], name=doc['name'],
|
||||
bucket=bucket['name'])
|
||||
doc.safe_delete(session=session)
|
||||
deleted_documents.append(doc)
|
||||
resp.append(doc.to_dict())
|
||||
@ -219,7 +224,14 @@ def documents_create(bucket_name, documents, validations=None,
|
||||
with session.begin():
|
||||
doc['bucket_id'] = bucket['id']
|
||||
doc['revision_id'] = revision['id']
|
||||
doc.save(session=session)
|
||||
|
||||
try:
|
||||
doc.save(session=session)
|
||||
except db_exception.DBDuplicateEntry:
|
||||
raise errors.DuplicateDocumentExists(
|
||||
schema=doc['schema'], name=doc['name'],
|
||||
bucket=bucket['name'])
|
||||
|
||||
resp.append(doc.to_dict())
|
||||
# NOTE(fmontei): The orig_revision_id is not copied into the
|
||||
# revision_id for each created document, because the revision_id here
|
||||
@ -266,7 +278,7 @@ def _documents_create(bucket_name, values_list, session=None):
|
||||
if (existing_document['bucket_name'] != bucket_name and
|
||||
not existing_document['schema'].startswith(
|
||||
types.VALIDATION_POLICY_SCHEMA)):
|
||||
raise errors.DocumentExists(
|
||||
raise errors.DuplicateDocumentExists(
|
||||
schema=existing_document['schema'],
|
||||
name=existing_document['name'],
|
||||
bucket=existing_document['bucket_name'])
|
||||
|
@ -133,8 +133,14 @@ def __build_tables(blob_type_obj, blob_type_list):
|
||||
|
||||
class Document(BASE, DeckhandBase):
|
||||
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
|
||||
|
||||
__tablename__ = 'documents'
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(*UNIQUE_CONSTRAINTS,
|
||||
name='duplicate_document_constraint'),
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(64), nullable=False)
|
||||
schema = Column(String(64), nullable=False)
|
||||
@ -163,8 +169,6 @@ def __build_tables(blob_type_obj, blob_type_list):
|
||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
|
||||
UniqueConstraint(*UNIQUE_CONSTRAINTS)
|
||||
|
||||
@hybrid_property
|
||||
def bucket_name(self):
|
||||
if hasattr(self, 'bucket') and self.bucket:
|
||||
|
@ -317,14 +317,14 @@ class ValidationNotFound(DeckhandException):
|
||||
code = 404
|
||||
|
||||
|
||||
class DocumentExists(DeckhandException):
|
||||
class DuplicateDocumentExists(DeckhandException):
|
||||
"""A document attempted to be put into a bucket where another document with
|
||||
the same schema and metadata.name already exist.
|
||||
|
||||
**Troubleshoot:**
|
||||
"""
|
||||
msg_fmt = ("Document with schema %(schema)s and metadata.name "
|
||||
"%(name)s already exists in bucket %(bucket)s.")
|
||||
"%(name)s already exists in bucket: %(bucket)s.")
|
||||
code = 409
|
||||
|
||||
|
||||
|
@ -662,9 +662,10 @@ class TestValidationsControllerPostValidate(ValidationsControllerBaseTest):
|
||||
{'_GLOBAL_DATA_1_': {'data': {'a': 'fail'}}},
|
||||
global_abstract=False)[-1]
|
||||
fail_doc['schema'] = 'example/foo/v1'
|
||||
fail_doc['metadata']['name'] = 'test_doc'
|
||||
fail_doc['metadata']['name'] = 'fail_doc'
|
||||
|
||||
pass_doc = copy.deepcopy(fail_doc)
|
||||
pass_doc['metadata']['name'] = 'pass_doc'
|
||||
pass_doc['data']['a'] = 5
|
||||
|
||||
revision_id = self._create_revision(
|
||||
@ -708,7 +709,7 @@ class TestValidationsControllerPostValidate(ValidationsControllerBaseTest):
|
||||
body = yaml.safe_load(resp.text)
|
||||
expected_errors = [{
|
||||
'error_section': {'a': 'fail'},
|
||||
'name': 'test_doc',
|
||||
'name': 'fail_doc',
|
||||
'path': '.data.a',
|
||||
'schema': 'example/foo/v1',
|
||||
'message': "'fail' is not of type 'integer'",
|
||||
|
@ -46,7 +46,14 @@ class TestDocumentsNegative(base.TestDbBase):
|
||||
self.show_document,
|
||||
id=-1)
|
||||
|
||||
def test_create_bucket_conflict(self):
|
||||
def test_create_duplicate_document_same_bucket_raises_exc(self):
|
||||
bucket_name = test_utils.rand_name('bucket')
|
||||
document = base.DocumentFixture.get_minimal_fixture()
|
||||
payload = [document, document.copy()]
|
||||
self.assertRaises(errors.DuplicateDocumentExists,
|
||||
self.create_documents, bucket_name, payload)
|
||||
|
||||
def test_create_duplicate_document_in_another_bucket_raises_exc(self):
|
||||
# Create the document in one bucket.
|
||||
payload = base.DocumentFixture.get_minimal_fixture()
|
||||
bucket_name = test_utils.rand_name('bucket')
|
||||
@ -55,9 +62,9 @@ class TestDocumentsNegative(base.TestDbBase):
|
||||
# Verify that the document cannot be created in another bucket.
|
||||
alt_bucket_name = test_utils.rand_name('bucket')
|
||||
error_re = ("Document with schema %s and metadata.name "
|
||||
"%s already exists in bucket %s." % (
|
||||
"%s already exists in bucket: %s." % (
|
||||
payload['schema'], payload['metadata']['name'],
|
||||
bucket_name))
|
||||
self.assertRaisesRegex(
|
||||
errors.DocumentExists, error_re, self.create_documents,
|
||||
errors.DuplicateDocumentExists, error_re, self.create_documents,
|
||||
alt_bucket_name, payload)
|
||||
|
@ -29,13 +29,13 @@ Deckhand Exceptions
|
||||
:members:
|
||||
:show-inheritance:
|
||||
:undoc-members:
|
||||
* - DocumentExists
|
||||
- .. autoexception:: deckhand.errors.DocumentExists
|
||||
* - DocumentNotFound
|
||||
- .. autoexception:: deckhand.errors.DocumentNotFound
|
||||
:members:
|
||||
:show-inheritance:
|
||||
:undoc-members:
|
||||
* - DocumentNotFound
|
||||
- .. autoexception:: deckhand.errors.DocumentNotFound
|
||||
* - DuplicateDocumentExists
|
||||
- .. autoexception:: deckhand.errors.DuplicateDocumentExists
|
||||
:members:
|
||||
:show-inheritance:
|
||||
:undoc-members:
|
||||
|
Loading…
x
Reference in New Issue
Block a user