Merge pull request #176 from rackerlabs/master_no_glance_verify
Reverting Glance Verifier
This commit is contained in:
commit
ea0b42df48
@ -7,4 +7,3 @@ prettytable>=0.7.2
|
|||||||
argparse
|
argparse
|
||||||
Pympler
|
Pympler
|
||||||
requests
|
requests
|
||||||
south
|
|
@ -11,9 +11,7 @@
|
|||||||
"userid": "rabbit",
|
"userid": "rabbit",
|
||||||
"password": "rabbit",
|
"password": "rabbit",
|
||||||
"virtual_host": "/",
|
"virtual_host": "/",
|
||||||
"topics": {
|
"exchange_name": "stacktach",
|
||||||
"nova": ["notifications.info"],
|
"routing_keys": ["notifications.info"]
|
||||||
"glance": ["notifications.info"]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,31 +0,0 @@
|
|||||||
import kombu
|
|
||||||
import kombu.entity
|
|
||||||
import kombu.pools
|
|
||||||
import kombu.connection
|
|
||||||
import kombu.common
|
|
||||||
|
|
||||||
def send_notification(message, routing_key, connection, exchange):
|
|
||||||
with kombu.pools.producers[connection].acquire(block=True) as producer:
|
|
||||||
kombu.common.maybe_declare(exchange, producer.channel)
|
|
||||||
producer.publish(message, routing_key)
|
|
||||||
|
|
||||||
|
|
||||||
def create_exchange(name, exchange_type, exclusive=False, auto_delete=False,
|
|
||||||
durable=True):
|
|
||||||
return kombu.entity.Exchange(name, type=exchange_type, exclusive=exclusive,
|
|
||||||
auto_delete=auto_delete, durable=durable)
|
|
||||||
|
|
||||||
|
|
||||||
def create_connection(hostname, port, userid, password, transport,
|
|
||||||
virtual_host):
|
|
||||||
return kombu.connection.BrokerConnection(
|
|
||||||
hostname=hostname, port=port, user_id=userid, password=password,
|
|
||||||
transport=transport, virtual_host=virtual_host)
|
|
||||||
|
|
||||||
|
|
||||||
def create_queue(name, exchange, routing_key, exclusive=False,
|
|
||||||
auto_delete=False, queue_arguments=None, durable=True):
|
|
||||||
return kombu.Queue(name, exchange, durable=durable,
|
|
||||||
auto_delete=auto_delete, exclusive=exclusive,
|
|
||||||
queue_arguments=queue_arguments,
|
|
||||||
routing_key=routing_key)
|
|
@ -12,20 +12,16 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
import datetime
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
|
||||||
|
|
||||||
|
|
||||||
def routing_key_type(key):
|
def routing_key_type(key):
|
||||||
if key.endswith('error'):
|
if key.endswith('error'):
|
||||||
return 'E'
|
return 'E'
|
||||||
return ' '
|
return ' '
|
||||||
|
|
||||||
|
|
||||||
class Deployment(models.Model):
|
class Deployment(models.Model):
|
||||||
name = models.CharField(max_length=50)
|
name = models.CharField(max_length=50)
|
||||||
|
|
||||||
@ -177,16 +173,6 @@ class InstanceUsage(models.Model):
|
|||||||
raw = raws[0]
|
raw = raws[0]
|
||||||
return raw.deployment
|
return raw.deployment
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(instance, launched_at):
|
|
||||||
start = launched_at - datetime.timedelta(
|
|
||||||
microseconds=launched_at.microsecond)
|
|
||||||
end = start + datetime.timedelta(microseconds=999999)
|
|
||||||
params = {'instance': instance,
|
|
||||||
'launched_at__gte': dt.dt_to_decimal(start),
|
|
||||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
|
||||||
return InstanceUsage.objects.filter(**params)
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceDeletes(models.Model):
|
class InstanceDeletes(models.Model):
|
||||||
instance = models.CharField(max_length=50, null=True,
|
instance = models.CharField(max_length=50, null=True,
|
||||||
@ -200,17 +186,6 @@ class InstanceDeletes(models.Model):
|
|||||||
def deployment(self):
|
def deployment(self):
|
||||||
return self.raw.deployment
|
return self.raw.deployment
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(instance, launched, deleted_max=None):
|
|
||||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
|
||||||
end = start + datetime.timedelta(microseconds=999999)
|
|
||||||
params = {'instance': instance,
|
|
||||||
'launched_at__gte': dt.dt_to_decimal(start),
|
|
||||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
|
||||||
if deleted_max:
|
|
||||||
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
|
|
||||||
return InstanceDeletes.objects.filter(**params)
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceReconcile(models.Model):
|
class InstanceReconcile(models.Model):
|
||||||
row_created = models.DateTimeField(auto_now_add=True)
|
row_created = models.DateTimeField(auto_now_add=True)
|
||||||
@ -234,15 +209,6 @@ class InstanceReconcile(models.Model):
|
|||||||
source = models.CharField(max_length=150, null=True,
|
source = models.CharField(max_length=150, null=True,
|
||||||
blank=True, db_index=True)
|
blank=True, db_index=True)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(instance, launched):
|
|
||||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
|
||||||
end = start + datetime.timedelta(microseconds=999999)
|
|
||||||
params = {'instance': instance,
|
|
||||||
'launched_at__gte': dt.dt_to_decimal(start),
|
|
||||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
|
||||||
return InstanceReconcile.objects.filter(**params)
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceExists(models.Model):
|
class InstanceExists(models.Model):
|
||||||
PENDING = 'pending'
|
PENDING = 'pending'
|
||||||
@ -294,32 +260,6 @@ class InstanceExists(models.Model):
|
|||||||
def deployment(self):
|
def deployment(self):
|
||||||
return self.raw.deployment
|
return self.raw.deployment
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(ending_max, status):
|
|
||||||
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
|
|
||||||
'status': status}
|
|
||||||
return InstanceExists.objects.select_related()\
|
|
||||||
.filter(**params).order_by('id')
|
|
||||||
|
|
||||||
def mark_verified(self, reconciled=False, reason=None):
|
|
||||||
if not reconciled:
|
|
||||||
self.status = InstanceExists.VERIFIED
|
|
||||||
else:
|
|
||||||
self.status = InstanceExists.RECONCILED
|
|
||||||
if reason is not None:
|
|
||||||
self.fail_reason = reason
|
|
||||||
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
def mark_failed(self, reason=None):
|
|
||||||
self.status = InstanceExists.FAILED
|
|
||||||
if reason:
|
|
||||||
self.fail_reason = reason
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
def update_status(self, new_status):
|
|
||||||
self.status = new_status
|
|
||||||
|
|
||||||
|
|
||||||
class Timing(models.Model):
|
class Timing(models.Model):
|
||||||
"""Each Timing record corresponds to a .start/.end event pair
|
"""Each Timing record corresponds to a .start/.end event pair
|
||||||
@ -436,13 +376,6 @@ class ImageDeletes(models.Model):
|
|||||||
null=True)
|
null=True)
|
||||||
raw = models.ForeignKey(GlanceRawData, null=True)
|
raw = models.ForeignKey(GlanceRawData, null=True)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(uuid, deleted_max=None):
|
|
||||||
params = {'uuid': uuid}
|
|
||||||
if deleted_max:
|
|
||||||
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
|
|
||||||
return ImageDeletes.objects.filter(**params)
|
|
||||||
|
|
||||||
|
|
||||||
class ImageExists(models.Model):
|
class ImageExists(models.Model):
|
||||||
PENDING = 'pending'
|
PENDING = 'pending'
|
||||||
@ -479,25 +412,6 @@ class ImageExists(models.Model):
|
|||||||
owner = models.CharField(max_length=255, db_index=True)
|
owner = models.CharField(max_length=255, db_index=True)
|
||||||
size = models.BigIntegerField(max_length=20)
|
size = models.BigIntegerField(max_length=20)
|
||||||
|
|
||||||
def update_status(self, new_status):
|
|
||||||
self.status = new_status
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find(ending_max, status):
|
|
||||||
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
|
|
||||||
'status': status}
|
|
||||||
return ImageExists.objects.select_related().filter(**params).order_by('id')
|
|
||||||
|
|
||||||
def mark_verified(self):
|
|
||||||
self.status = InstanceExists.VERIFIED
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
def mark_failed(self, reason=None):
|
|
||||||
self.status = InstanceExists.FAILED
|
|
||||||
if reason:
|
|
||||||
self.fail_reason = reason
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
|
|
||||||
def get_model_fields(model):
|
def get_model_fields(model):
|
||||||
return model._meta.fields
|
return model._meta.fields
|
||||||
|
@ -166,11 +166,14 @@ class GlanceNotification(Notification):
|
|||||||
'size': self.size,
|
'size': self.size,
|
||||||
'raw': raw
|
'raw': raw
|
||||||
}
|
}
|
||||||
usage = db.get_image_usage(uuid=self.uuid)
|
created_at_range = (self.created_at, self.created_at+1)
|
||||||
|
usage = db.get_image_usage(
|
||||||
|
uuid=self.uuid, created_at__range=created_at_range)
|
||||||
values['usage'] = usage
|
values['usage'] = usage
|
||||||
values['created_at'] = self.created_at
|
values['created_at'] = self.created_at
|
||||||
if self.deleted_at:
|
if self.deleted_at:
|
||||||
delete = db.get_image_delete(uuid=self.uuid)
|
delete = db.get_image_delete(
|
||||||
|
uuid=self.uuid, created_at__range=created_at_range)
|
||||||
values['delete'] = delete
|
values['delete'] = delete
|
||||||
values['deleted_at'] = self.deleted_at
|
values['deleted_at'] = self.deleted_at
|
||||||
|
|
||||||
|
@ -288,7 +288,6 @@ def _process_exists(raw, notification):
|
|||||||
def _process_glance_usage(raw, notification):
|
def _process_glance_usage(raw, notification):
|
||||||
notification.save_usage(raw)
|
notification.save_usage(raw)
|
||||||
|
|
||||||
|
|
||||||
def _process_glance_delete(raw, notification):
|
def _process_glance_delete(raw, notification):
|
||||||
notification.save_delete(raw)
|
notification.save_delete(raw)
|
||||||
|
|
||||||
|
@ -1,250 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import time
|
|
||||||
from django.db import transaction
|
|
||||||
import mox
|
|
||||||
from stacktach import message_service
|
|
||||||
from tests.unit import StacktachBaseTestCase
|
|
||||||
from tests.unit.utils import HOST, PORT, VIRTUAL_HOST, USERID, PASSWORD, TICK_TIME, SETTLE_TIME, SETTLE_UNITS
|
|
||||||
from tests.unit.utils import make_verifier_config
|
|
||||||
from verifier import base_verifier
|
|
||||||
|
|
||||||
|
|
||||||
class BaseVerifierTestCase(StacktachBaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.mox = mox.Mox()
|
|
||||||
config = make_verifier_config(False)
|
|
||||||
self.pool = self.mox.CreateMockAnything()
|
|
||||||
self.reconciler = self.mox.CreateMockAnything()
|
|
||||||
self.verifier_with_reconciler = base_verifier.Verifier(config,
|
|
||||||
pool=self.pool, reconciler=self.reconciler)
|
|
||||||
self.verifier_without_notifications = self\
|
|
||||||
._verifier_with_notifications_disabled()
|
|
||||||
self.verifier_with_notifications = self\
|
|
||||||
._verifier_with_notifications_enabled()
|
|
||||||
|
|
||||||
def _verifier_with_notifications_disabled(self):
|
|
||||||
config = make_verifier_config(False)
|
|
||||||
reconciler = self.mox.CreateMockAnything()
|
|
||||||
return base_verifier.Verifier(config,
|
|
||||||
pool=self.pool,
|
|
||||||
reconciler=reconciler)
|
|
||||||
|
|
||||||
def _verifier_with_notifications_enabled(self):
|
|
||||||
config = make_verifier_config(True)
|
|
||||||
reconciler = self.mox.CreateMockAnything()
|
|
||||||
return base_verifier.Verifier(config,
|
|
||||||
pool=self.pool,
|
|
||||||
reconciler=reconciler)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.mox.UnsetStubs()
|
|
||||||
|
|
||||||
def test_should_create_verifier_with_reconciler(self):
|
|
||||||
config = make_verifier_config(False)
|
|
||||||
rec = self.mox.CreateMockAnything()
|
|
||||||
verifier = base_verifier.Verifier(config, pool=None, reconciler=rec)
|
|
||||||
self.assertEqual(verifier.reconciler, rec)
|
|
||||||
|
|
||||||
def test_clean_results_full(self):
|
|
||||||
result_not_ready = self.mox.CreateMockAnything()
|
|
||||||
result_not_ready.ready().AndReturn(False)
|
|
||||||
result_unsuccessful = self.mox.CreateMockAnything()
|
|
||||||
result_unsuccessful.ready().AndReturn(True)
|
|
||||||
result_unsuccessful.successful().AndReturn(False)
|
|
||||||
result_successful = self.mox.CreateMockAnything()
|
|
||||||
result_successful.ready().AndReturn(True)
|
|
||||||
result_successful.successful().AndReturn(True)
|
|
||||||
result_successful.get().AndReturn((True, None))
|
|
||||||
result_failed_verification = self.mox.CreateMockAnything()
|
|
||||||
result_failed_verification.ready().AndReturn(True)
|
|
||||||
result_failed_verification.successful().AndReturn(True)
|
|
||||||
failed_exists = self.mox.CreateMockAnything()
|
|
||||||
result_failed_verification.get().AndReturn((False, failed_exists))
|
|
||||||
self.verifier_with_reconciler.results = [result_not_ready,
|
|
||||||
result_unsuccessful,
|
|
||||||
result_successful,
|
|
||||||
result_failed_verification]
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
|
|
||||||
self.assertEqual(result_count, 1)
|
|
||||||
self.assertEqual(success_count, 2)
|
|
||||||
self.assertEqual(errored, 1)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.results), 1)
|
|
||||||
self.assertEqual(self.verifier_with_reconciler.results[0], result_not_ready)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 1)
|
|
||||||
self.assertEqual(self.verifier_with_reconciler.failed[0], result_failed_verification)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_clean_results_pending(self):
|
|
||||||
result_not_ready = self.mox.CreateMockAnything()
|
|
||||||
result_not_ready.ready().AndReturn(False)
|
|
||||||
self.verifier_with_reconciler.results = [result_not_ready]
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
|
|
||||||
self.assertEqual(result_count, 1)
|
|
||||||
self.assertEqual(success_count, 0)
|
|
||||||
self.assertEqual(errored, 0)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.results), 1)
|
|
||||||
self.assertEqual(self.verifier_with_reconciler.results[0], result_not_ready)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_clean_results_successful(self):
|
|
||||||
self.verifier_with_reconciler.reconcile = True
|
|
||||||
result_successful = self.mox.CreateMockAnything()
|
|
||||||
result_successful.ready().AndReturn(True)
|
|
||||||
result_successful.successful().AndReturn(True)
|
|
||||||
result_successful.get().AndReturn((True, None))
|
|
||||||
self.verifier_with_reconciler.results = [result_successful]
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
|
|
||||||
self.assertEqual(result_count, 0)
|
|
||||||
self.assertEqual(success_count, 1)
|
|
||||||
self.assertEqual(errored, 0)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_clean_results_unsuccessful(self):
|
|
||||||
result_unsuccessful = self.mox.CreateMockAnything()
|
|
||||||
result_unsuccessful.ready().AndReturn(True)
|
|
||||||
result_unsuccessful.successful().AndReturn(False)
|
|
||||||
self.verifier_with_reconciler.results = [result_unsuccessful]
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
(result_count, success_count, errored) = \
|
|
||||||
self.verifier_with_reconciler.clean_results()
|
|
||||||
self.assertEqual(result_count, 0)
|
|
||||||
self.assertEqual(success_count, 0)
|
|
||||||
self.assertEqual(errored, 1)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_clean_results_fail_verification(self):
|
|
||||||
result_failed_verification = self.mox.CreateMockAnything()
|
|
||||||
result_failed_verification.ready().AndReturn(True)
|
|
||||||
result_failed_verification.successful().AndReturn(True)
|
|
||||||
failed_exists = self.mox.CreateMockAnything()
|
|
||||||
result_failed_verification.get().AndReturn((False, failed_exists))
|
|
||||||
self.verifier_with_reconciler.results = [result_failed_verification]
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
(result_count, success_count, errored) = \
|
|
||||||
self.verifier_with_reconciler.clean_results()
|
|
||||||
self.assertEqual(result_count, 0)
|
|
||||||
self.assertEqual(success_count, 1)
|
|
||||||
self.assertEqual(errored, 0)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
|
|
||||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 1)
|
|
||||||
self.assertEqual(self.verifier_with_reconciler.failed[0], failed_exists)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_run_notifications(self):
|
|
||||||
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
|
|
||||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
|
|
||||||
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.verifier_with_notifications.run()
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_run_notifications_with_routing_keys(self):
|
|
||||||
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
|
|
||||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
|
|
||||||
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.verifier_with_notifications.run()
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_run_no_notifications(self):
|
|
||||||
self.mox.StubOutWithMock(self.verifier_without_notifications, '_run')
|
|
||||||
self.verifier_without_notifications._run()
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.verifier_without_notifications.run()
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_run_full_no_notifications(self):
|
|
||||||
self.mox.StubOutWithMock(transaction, 'commit_on_success')
|
|
||||||
tran = self.mox.CreateMockAnything()
|
|
||||||
tran.__enter__().AndReturn(tran)
|
|
||||||
tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
|
|
||||||
transaction.commit_on_success().AndReturn(tran)
|
|
||||||
self.mox.StubOutWithMock(self.verifier_without_notifications, '_keep_running')
|
|
||||||
self.verifier_without_notifications._keep_running().AndReturn(True)
|
|
||||||
start = datetime.datetime.utcnow()
|
|
||||||
self.mox.StubOutWithMock(self.verifier_without_notifications, '_utcnow')
|
|
||||||
self.verifier_without_notifications._utcnow().AndReturn(start)
|
|
||||||
settle_offset = {SETTLE_UNITS: SETTLE_TIME}
|
|
||||||
ending_max = start - datetime.timedelta(**settle_offset)
|
|
||||||
self.mox.StubOutWithMock(self.verifier_without_notifications, 'verify_for_range')
|
|
||||||
self.verifier_without_notifications.verify_for_range(ending_max, callback=None)
|
|
||||||
self.mox.StubOutWithMock(self.verifier_without_notifications, 'reconcile_failed')
|
|
||||||
result1 = self.mox.CreateMockAnything()
|
|
||||||
result2 = self.mox.CreateMockAnything()
|
|
||||||
self.verifier_without_notifications.results = [result1, result2]
|
|
||||||
result1.ready().AndReturn(True)
|
|
||||||
result1.successful().AndReturn(True)
|
|
||||||
result1.get().AndReturn((True, None))
|
|
||||||
result2.ready().AndReturn(True)
|
|
||||||
result2.successful().AndReturn(True)
|
|
||||||
result2.get().AndReturn((True, None))
|
|
||||||
self.verifier_without_notifications.reconcile_failed()
|
|
||||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
|
||||||
time.sleep(TICK_TIME)
|
|
||||||
self.verifier_without_notifications._keep_running().AndReturn(False)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.verifier_without_notifications.run()
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_run_full(self):
|
|
||||||
self.mox.StubOutWithMock(transaction, 'commit_on_success')
|
|
||||||
tran = self.mox.CreateMockAnything()
|
|
||||||
tran.__enter__().AndReturn(tran)
|
|
||||||
tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
|
|
||||||
transaction.commit_on_success().AndReturn(tran)
|
|
||||||
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
|
|
||||||
self.verifier_with_notifications.exchange().AndReturn('exchange')
|
|
||||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_keep_running')
|
|
||||||
self.verifier_with_notifications._keep_running().AndReturn(True)
|
|
||||||
start = datetime.datetime.utcnow()
|
|
||||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_utcnow')
|
|
||||||
self.verifier_with_notifications._utcnow().AndReturn(start)
|
|
||||||
settle_offset = {SETTLE_UNITS: SETTLE_TIME}
|
|
||||||
ending_max = start - datetime.timedelta(**settle_offset)
|
|
||||||
self.mox.StubOutWithMock(self.verifier_with_notifications, 'verify_for_range')
|
|
||||||
self.verifier_with_notifications.verify_for_range(ending_max,
|
|
||||||
callback=mox.Not(mox.Is(None)))
|
|
||||||
self.mox.StubOutWithMock(self.verifier_with_notifications, 'reconcile_failed')
|
|
||||||
result1 = self.mox.CreateMockAnything()
|
|
||||||
result2 = self.mox.CreateMockAnything()
|
|
||||||
self.verifier_with_notifications.results = [result1, result2]
|
|
||||||
result1.ready().AndReturn(True)
|
|
||||||
result1.successful().AndReturn(True)
|
|
||||||
result1.get().AndReturn((True, None))
|
|
||||||
result2.ready().AndReturn(True)
|
|
||||||
result2.successful().AndReturn(True)
|
|
||||||
result2.get().AndReturn((True, None))
|
|
||||||
self.verifier_with_notifications.reconcile_failed()
|
|
||||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
|
||||||
time.sleep(TICK_TIME)
|
|
||||||
self.verifier_with_notifications._keep_running().AndReturn(False)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.verifier_with_notifications.run()
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def _mock_exchange_create_and_connect(self, verifier):
|
|
||||||
self.mox.StubOutWithMock(verifier, 'exchange')
|
|
||||||
self.verifier_with_notifications.exchange().AndReturn('exchange')
|
|
||||||
self.mox.StubOutWithMock(message_service, 'create_exchange')
|
|
||||||
exchange = self.mox.CreateMockAnything()
|
|
||||||
message_service.create_exchange('exchange', 'topic', durable=True) \
|
|
||||||
.AndReturn(exchange)
|
|
||||||
self.mox.StubOutWithMock(message_service, 'create_connection')
|
|
||||||
conn = self.mox.CreateMockAnything()
|
|
||||||
conn.__enter__().AndReturn(conn)
|
|
||||||
conn.__exit__(None, None, None)
|
|
||||||
message_service.create_connection(HOST, PORT, USERID,
|
|
||||||
PASSWORD, "librabbitmq",
|
|
||||||
VIRTUAL_HOST).AndReturn(conn)
|
|
@ -1,416 +0,0 @@
|
|||||||
# Copyright (c) 2013 - Rackspace Inc.
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to
|
|
||||||
# deal in the Software without restriction, including without limitation the
|
|
||||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
# sell copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
||||||
# IN THE SOFTWARE.
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import decimal
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
import kombu
|
|
||||||
|
|
||||||
import mox
|
|
||||||
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
|
||||||
from stacktach import models
|
|
||||||
from tests.unit import StacktachBaseTestCase
|
|
||||||
from utils import IMAGE_UUID_1
|
|
||||||
from utils import make_verifier_config
|
|
||||||
from verifier import glance_verifier
|
|
||||||
from verifier import FieldMismatch
|
|
||||||
from verifier import NotFound
|
|
||||||
from verifier import VerificationException
|
|
||||||
|
|
||||||
|
|
||||||
class GlanceVerifierTestCase(StacktachBaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.mox = mox.Mox()
|
|
||||||
self.mox.StubOutWithMock(models, 'ImageUsage', use_mock_anything=True)
|
|
||||||
models.ImageUsage.objects = self.mox.CreateMockAnything()
|
|
||||||
self.pool = self.mox.CreateMockAnything()
|
|
||||||
config = make_verifier_config(False)
|
|
||||||
self.glance_verifier = glance_verifier.GlanceVerifier(config,
|
|
||||||
pool=self.pool)
|
|
||||||
self.mox.StubOutWithMock(models, 'ImageDeletes',
|
|
||||||
use_mock_anything=True)
|
|
||||||
models.ImageDeletes.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'ImageExists',
|
|
||||||
use_mock_anything=True)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.mox.UnsetStubs()
|
|
||||||
self.verifier = None
|
|
||||||
|
|
||||||
def test_verify_usage_should_not_raise_exception_on_success(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.created_at = decimal.Decimal('1.1')
|
|
||||||
exist.owner = 'owner'
|
|
||||||
exist.size = 1234
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.created_at = decimal.Decimal('1.1')
|
|
||||||
exist.usage.size = 1234
|
|
||||||
exist.usage.owner = 'owner'
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_usage_created_at_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.created_at = decimal.Decimal('1.1')
|
|
||||||
exist.usage.created_at = decimal.Decimal('2.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEqual(exception.field_name, 'created_at')
|
|
||||||
self.assertEqual(exception.expected, decimal.Decimal('1.1'))
|
|
||||||
self.assertEqual(exception.actual, decimal.Decimal('2.1'))
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_usage_owner_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.owner = 'owner'
|
|
||||||
exist.usage.owner = 'not_owner'
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEqual(exception.field_name, 'owner')
|
|
||||||
self.assertEqual(exception.expected, 'owner')
|
|
||||||
self.assertEqual(exception.actual, 'not_owner')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_usage_size_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.size = 1234
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.size = 5678
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
|
|
||||||
self.assertEqual(exception.field_name, 'size')
|
|
||||||
self.assertEqual(exception.expected, 1234)
|
|
||||||
self.assertEqual(exception.actual, 5678)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_usage_for_late_usage(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = None
|
|
||||||
exist.uuid = IMAGE_UUID_1
|
|
||||||
exist.created_at = decimal.Decimal('1.1')
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageUsage.objects.filter(uuid=IMAGE_UUID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
usage = self.mox.CreateMockAnything()
|
|
||||||
results.__getitem__(0).AndReturn(usage)
|
|
||||||
usage.created_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_usage_raises_not_found_for_no_usage(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = None
|
|
||||||
exist.uuid = IMAGE_UUID_1
|
|
||||||
exist.created_at = decimal.Decimal('1.1')
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageUsage.objects.filter(uuid=IMAGE_UUID_1) \
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(0)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(NotFound) as cm:
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEqual(exception.object_type, 'ImageUsage')
|
|
||||||
self.assertEqual(exception.search_params, {'uuid': IMAGE_UUID_1})
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = self.mox.CreateMockAnything()
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
exist.delete.deleted_at = decimal.Decimal('5.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_delete_when_late_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.uuid = IMAGE_UUID_1
|
|
||||||
exist.delete = None
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageDeletes.find(uuid=IMAGE_UUID_1).AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
delete = self.mox.CreateMockAnything()
|
|
||||||
delete.deleted_at = decimal.Decimal('5.1')
|
|
||||||
results.__getitem__(0).AndReturn(delete)
|
|
||||||
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_delete_when_no_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = None
|
|
||||||
exist.uuid = IMAGE_UUID_1
|
|
||||||
exist.deleted_at = None
|
|
||||||
audit_period_ending = decimal.Decimal('1.2')
|
|
||||||
exist.audit_period_ending = audit_period_ending
|
|
||||||
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageDeletes.find(
|
|
||||||
IMAGE_UUID_1, dt.dt_from_decimal(audit_period_ending)).AndReturn(
|
|
||||||
results)
|
|
||||||
results.count().AndReturn(0)
|
|
||||||
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_delete_found_delete_when_exist_deleted_at_is_none(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = None
|
|
||||||
exist.uuid = IMAGE_UUID_1
|
|
||||||
audit_period_ending = decimal.Decimal('1.3')
|
|
||||||
exist.deleted_at = None
|
|
||||||
exist.audit_period_ending = audit_period_ending
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageDeletes.find(
|
|
||||||
IMAGE_UUID_1, dt.dt_from_decimal(audit_period_ending)).AndReturn(
|
|
||||||
results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(VerificationException) as ve:
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
exception = ve.exception
|
|
||||||
self.assertEqual(exception.reason,
|
|
||||||
'Found ImageDeletes for non-delete exist')
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_delete_deleted_at_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = self.mox.CreateMockAnything()
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
exist.delete.deleted_at = decimal.Decimal('4.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as fm:
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
exception = fm.exception
|
|
||||||
self.assertEqual(exception.field_name, 'deleted_at')
|
|
||||||
self.assertEqual(exception.expected, decimal.Decimal('5.1'))
|
|
||||||
self.assertEqual(exception.actual, decimal.Decimal('4.1'))
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete_size_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
exist.delete.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.delete.deleted_at = decimal.Decimal('6.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
self.fail()
|
|
||||||
except FieldMismatch, fm:
|
|
||||||
self.assertEqual(fm.field_name, 'deleted_at')
|
|
||||||
self.assertEqual(fm.expected, decimal.Decimal('5.1'))
|
|
||||||
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_should_verify_exists_for_usage_and_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
|
|
||||||
glance_verifier._verify_for_usage(exist)
|
|
||||||
self.mox.StubOutWithMock(glance_verifier, '_verify_for_delete')
|
|
||||||
glance_verifier._verify_for_delete(exist)
|
|
||||||
exist.mark_verified()
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
verified, exist = glance_verifier._verify(exist)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
self.assertTrue(verified)
|
|
||||||
|
|
||||||
|
|
||||||
def test_verify_exist_marks_exist_as_failed_if_field_mismatch_exception_is_raised(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
|
|
||||||
field_mismatch_exc = FieldMismatch('field', 'expected', 'actual')
|
|
||||||
glance_verifier._verify_for_usage(exist).AndRaise(exception=field_mismatch_exc)
|
|
||||||
exist.mark_failed(reason='FieldMismatch')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
verified, exist = glance_verifier._verify(exist)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
self.assertFalse(verified)
|
|
||||||
|
|
||||||
def test_verify_for_range_without_callback(self):
|
|
||||||
when_max = datetime.utcnow()
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageExists.PENDING = 'pending'
|
|
||||||
models.ImageExists.VERIFYING = 'verifying'
|
|
||||||
self.mox.StubOutWithMock(models.ImageExists, 'find')
|
|
||||||
models.ImageExists.find(
|
|
||||||
ending_max=when_max,
|
|
||||||
status=models.ImageExists.PENDING).AndReturn(results)
|
|
||||||
results.count().AndReturn(2)
|
|
||||||
exist1 = self.mox.CreateMockAnything()
|
|
||||||
exist2 = self.mox.CreateMockAnything()
|
|
||||||
results.__getslice__(0, 1000).AndReturn(results)
|
|
||||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
|
||||||
exist1.save()
|
|
||||||
exist2.save()
|
|
||||||
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
|
|
||||||
callback=None)
|
|
||||||
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
|
|
||||||
callback=None)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.glance_verifier.verify_for_range(when_max)
|
|
||||||
self.assertEqual(exist1.status, 'verifying')
|
|
||||||
self.assertEqual(exist2.status, 'verifying')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_range_with_callback(self):
|
|
||||||
callback = self.mox.CreateMockAnything()
|
|
||||||
when_max = datetime.utcnow()
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.ImageExists.PENDING = 'pending'
|
|
||||||
models.ImageExists.VERIFYING = 'verifying'
|
|
||||||
models.ImageExists.find(
|
|
||||||
ending_max=when_max,
|
|
||||||
status=models.ImageExists.PENDING).AndReturn(results)
|
|
||||||
results.count().AndReturn(2)
|
|
||||||
exist1 = self.mox.CreateMockAnything()
|
|
||||||
exist2 = self.mox.CreateMockAnything()
|
|
||||||
results.__getslice__(0, 1000).AndReturn(results)
|
|
||||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
|
||||||
exist1.save()
|
|
||||||
exist2.save()
|
|
||||||
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
|
|
||||||
callback=callback)
|
|
||||||
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
|
|
||||||
callback=callback)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.glance_verifier.verify_for_range(
|
|
||||||
when_max, callback=callback)
|
|
||||||
self.assertEqual(exist1.status, 'verifying')
|
|
||||||
self.assertEqual(exist2.status, 'verifying')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_send_verified_notification_routing_keys(self):
|
|
||||||
connection = self.mox.CreateMockAnything()
|
|
||||||
exchange = self.mox.CreateMockAnything()
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.raw = self.mox.CreateMockAnything()
|
|
||||||
exist_dict = [
|
|
||||||
'monitor.info',
|
|
||||||
{
|
|
||||||
'event_type': 'test',
|
|
||||||
'message_id': 'some_uuid'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
exist_str = json.dumps(exist_dict)
|
|
||||||
exist.raw.json = exist_str
|
|
||||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
|
||||||
uuid.uuid4().AndReturn('some_other_uuid')
|
|
||||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
|
||||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
|
||||||
routing_keys = ['notifications.info', 'monitor.info']
|
|
||||||
for key in routing_keys:
|
|
||||||
producer = self.mox.CreateMockAnything()
|
|
||||||
producer.channel = self.mox.CreateMockAnything()
|
|
||||||
kombu.pools.producers[connection].AndReturn(producer)
|
|
||||||
producer.acquire(block=True).AndReturn(producer)
|
|
||||||
producer.__enter__().AndReturn(producer)
|
|
||||||
kombu.common.maybe_declare(exchange, producer.channel)
|
|
||||||
message = {'event_type': 'image.exists.verified.old',
|
|
||||||
'message_id': 'some_other_uuid',
|
|
||||||
'original_message_id': 'some_uuid'}
|
|
||||||
producer.publish(message, key)
|
|
||||||
producer.__exit__(None, None, None)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.glance_verifier.send_verified_notification(
|
|
||||||
exist, exchange, connection, routing_keys=routing_keys)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_send_verified_notification_default_routing_key(self):
|
|
||||||
connection = self.mox.CreateMockAnything()
|
|
||||||
exchange = self.mox.CreateMockAnything()
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.raw = self.mox.CreateMockAnything()
|
|
||||||
exist_dict = [
|
|
||||||
'monitor.info',
|
|
||||||
{
|
|
||||||
'event_type': 'test',
|
|
||||||
'message_id': 'some_uuid'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
exist_str = json.dumps(exist_dict)
|
|
||||||
exist.raw.json = exist_str
|
|
||||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
|
||||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
|
||||||
producer = self.mox.CreateMockAnything()
|
|
||||||
producer.channel = self.mox.CreateMockAnything()
|
|
||||||
kombu.pools.producers[connection].AndReturn(producer)
|
|
||||||
producer.acquire(block=True).AndReturn(producer)
|
|
||||||
producer.__enter__().AndReturn(producer)
|
|
||||||
kombu.common.maybe_declare(exchange, producer.channel)
|
|
||||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
|
||||||
uuid.uuid4().AndReturn('some_other_uuid')
|
|
||||||
message = {'event_type': 'image.exists.verified.old',
|
|
||||||
'message_id': 'some_other_uuid',
|
|
||||||
'original_message_id': 'some_uuid'}
|
|
||||||
producer.publish(message, exist_dict[0])
|
|
||||||
producer.__exit__(None, None, None)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.glance_verifier.send_verified_notification(exist, exchange,
|
|
||||||
connection)
|
|
||||||
self.mox.VerifyAll()
|
|
@ -17,13 +17,7 @@
|
|||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
# IN THE SOFTWARE.
|
# IN THE SOFTWARE.
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
import mox
|
|
||||||
from stacktach.models import RawData, GlanceRawData, GenericRawData, ImageDeletes, InstanceExists, ImageExists
|
|
||||||
from tests.unit.utils import IMAGE_UUID_1
|
|
||||||
from stacktach import datetime_to_decimal as dt, models
|
|
||||||
from stacktach.models import RawData, GlanceRawData, GenericRawData
|
from stacktach.models import RawData, GlanceRawData, GenericRawData
|
||||||
from tests.unit import StacktachBaseTestCase
|
from tests.unit import StacktachBaseTestCase
|
||||||
|
|
||||||
@ -37,85 +31,3 @@ class ModelsTestCase(StacktachBaseTestCase):
|
|||||||
|
|
||||||
def test_get_name_for_genericrawdata(self):
|
def test_get_name_for_genericrawdata(self):
|
||||||
self.assertEquals(GenericRawData.get_name(), 'GenericRawData')
|
self.assertEquals(GenericRawData.get_name(), 'GenericRawData')
|
||||||
|
|
||||||
|
|
||||||
class ImageDeletesTestCase(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.mox = mox.Mox()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.mox.UnsetStubs()
|
|
||||||
|
|
||||||
def test_find_delete_should_return_delete_issued_before_given_time(self):
|
|
||||||
delete = self.mox.CreateMockAnything()
|
|
||||||
deleted_max = datetime.utcnow()
|
|
||||||
self.mox.StubOutWithMock(ImageDeletes.objects, 'filter')
|
|
||||||
ImageDeletes.objects.filter(
|
|
||||||
uuid=IMAGE_UUID_1,
|
|
||||||
deleted_at__lte=dt.dt_to_decimal(deleted_max)).AndReturn(delete)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.assertEquals(ImageDeletes.find(
|
|
||||||
IMAGE_UUID_1, deleted_max), delete)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_find_delete_should_return_delete_with_the_given_uuid(self):
|
|
||||||
delete = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(ImageDeletes.objects, 'filter')
|
|
||||||
ImageDeletes.objects.filter(uuid=IMAGE_UUID_1).AndReturn(delete)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.assertEquals(ImageDeletes.find(IMAGE_UUID_1, None), delete)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
|
|
||||||
class ImageExistsTestCase(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.mox = mox.Mox()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.mox.UnsetStubs()
|
|
||||||
|
|
||||||
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
|
|
||||||
end_max = datetime.utcnow()
|
|
||||||
status = 'pending'
|
|
||||||
unordered_results = self.mox.CreateMockAnything()
|
|
||||||
expected_results = [1, 2]
|
|
||||||
related_results = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(ImageExists.objects, 'select_related')
|
|
||||||
ImageExists.objects.select_related().AndReturn(related_results)
|
|
||||||
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
|
|
||||||
end_max), status=status).AndReturn(unordered_results)
|
|
||||||
unordered_results.order_by('id').AndReturn(expected_results)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
results = ImageExists.find(end_max, status)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
self.assertEqual(results, [1, 2])
|
|
||||||
|
|
||||||
|
|
||||||
class InstanceExistsTestCase(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.mox = mox.Mox()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.mox.UnsetStubs()
|
|
||||||
|
|
||||||
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
|
|
||||||
end_max = datetime.utcnow()
|
|
||||||
status = 'pending'
|
|
||||||
unordered_results = self.mox.CreateMockAnything()
|
|
||||||
expected_results = [1, 2]
|
|
||||||
related_results = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(InstanceExists.objects, 'select_related')
|
|
||||||
InstanceExists.objects.select_related().AndReturn(related_results)
|
|
||||||
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
|
|
||||||
end_max), status=status).AndReturn(unordered_results)
|
|
||||||
unordered_results.order_by('id').AndReturn(expected_results)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
results = InstanceExists.find(end_max, status)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
self.assertEqual(results, [1, 2])
|
|
||||||
|
@ -267,7 +267,9 @@ class GlanceNotificationTestCase(StacktachBaseTestCase):
|
|||||||
self.mox.StubOutWithMock(db, 'create_image_exists')
|
self.mox.StubOutWithMock(db, 'create_image_exists')
|
||||||
self.mox.StubOutWithMock(db, 'get_image_usage')
|
self.mox.StubOutWithMock(db, 'get_image_usage')
|
||||||
|
|
||||||
db.get_image_usage(uuid=uuid).AndReturn(None)
|
created_at_range = (DECIMAL_DUMMY_TIME, DECIMAL_DUMMY_TIME+1)
|
||||||
|
db.get_image_usage(created_at__range=created_at_range,
|
||||||
|
uuid=uuid).AndReturn(None)
|
||||||
db.create_image_exists(
|
db.create_image_exists(
|
||||||
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
||||||
owner=TENANT_ID_1,
|
owner=TENANT_ID_1,
|
||||||
@ -320,8 +322,11 @@ class GlanceNotificationTestCase(StacktachBaseTestCase):
|
|||||||
self.mox.StubOutWithMock(db, 'get_image_usage')
|
self.mox.StubOutWithMock(db, 'get_image_usage')
|
||||||
self.mox.StubOutWithMock(db, 'get_image_delete')
|
self.mox.StubOutWithMock(db, 'get_image_delete')
|
||||||
|
|
||||||
db.get_image_usage(uuid=uuid).AndReturn(None)
|
created_at_range = (DECIMAL_DUMMY_TIME, DECIMAL_DUMMY_TIME+1)
|
||||||
db.get_image_delete(uuid=uuid).AndReturn(delete)
|
db.get_image_usage(created_at__range=created_at_range,
|
||||||
|
uuid=uuid).AndReturn(None)
|
||||||
|
db.get_image_delete(created_at__range=created_at_range,
|
||||||
|
uuid=uuid).AndReturn(delete)
|
||||||
db.create_image_exists(
|
db.create_image_exists(
|
||||||
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
||||||
owner=TENANT_ID_1,
|
owner=TENANT_ID_1,
|
||||||
@ -374,7 +379,9 @@ class GlanceNotificationTestCase(StacktachBaseTestCase):
|
|||||||
self.mox.StubOutWithMock(db, 'get_image_usage')
|
self.mox.StubOutWithMock(db, 'get_image_usage')
|
||||||
self.mox.StubOutWithMock(db, 'get_image_delete')
|
self.mox.StubOutWithMock(db, 'get_image_delete')
|
||||||
|
|
||||||
db.get_image_usage(uuid=uuid).AndReturn(usage)
|
created_at_range = (DECIMAL_DUMMY_TIME, DECIMAL_DUMMY_TIME+1)
|
||||||
|
db.get_image_usage(created_at__range=created_at_range,
|
||||||
|
uuid=uuid).AndReturn(usage)
|
||||||
db.create_image_exists(
|
db.create_image_exists(
|
||||||
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
||||||
owner=TENANT_ID_1,
|
owner=TENANT_ID_1,
|
||||||
|
@ -1,839 +0,0 @@
|
|||||||
# Copyright (c) 2013 - Rackspace Inc.
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to
|
|
||||||
# deal in the Software without restriction, including without limitation the
|
|
||||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
# sell copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
||||||
# IN THE SOFTWARE.
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import decimal
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import kombu.common
|
|
||||||
import kombu.entity
|
|
||||||
import kombu.pools
|
|
||||||
import mox
|
|
||||||
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
|
||||||
from stacktach import models
|
|
||||||
from tests.unit import StacktachBaseTestCase
|
|
||||||
from utils import make_verifier_config
|
|
||||||
from utils import INSTANCE_ID_1
|
|
||||||
from utils import RAX_OPTIONS_1
|
|
||||||
from utils import RAX_OPTIONS_2
|
|
||||||
from utils import OS_DISTRO_1
|
|
||||||
from utils import OS_DISTRO_2
|
|
||||||
from utils import OS_ARCH_1
|
|
||||||
from utils import OS_ARCH_2
|
|
||||||
from utils import OS_VERSION_1
|
|
||||||
from utils import OS_VERSION_2
|
|
||||||
from utils import TENANT_ID_1
|
|
||||||
from utils import TENANT_ID_2
|
|
||||||
from utils import INSTANCE_TYPE_ID_1
|
|
||||||
from verifier import nova_verifier
|
|
||||||
from verifier import AmbiguousResults
|
|
||||||
from verifier import FieldMismatch
|
|
||||||
from verifier import NotFound
|
|
||||||
from verifier import VerificationException
|
|
||||||
|
|
||||||
|
|
||||||
class NovaVerifierTestCase(StacktachBaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.mox = mox.Mox()
|
|
||||||
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
|
|
||||||
models.RawData.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'Deployment', use_mock_anything=True)
|
|
||||||
models.Deployment.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'Lifecycle', use_mock_anything=True)
|
|
||||||
models.Lifecycle.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'Timing', use_mock_anything=True)
|
|
||||||
models.Timing.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'RequestTracker',
|
|
||||||
use_mock_anything=True)
|
|
||||||
models.RequestTracker.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'InstanceUsage',
|
|
||||||
use_mock_anything=True)
|
|
||||||
models.InstanceUsage.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'InstanceDeletes',
|
|
||||||
use_mock_anything=True)
|
|
||||||
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'InstanceReconcile',
|
|
||||||
use_mock_anything=True)
|
|
||||||
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'InstanceExists',
|
|
||||||
use_mock_anything=True)
|
|
||||||
models.InstanceExists.objects = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
|
|
||||||
models.JsonReport.objects = self.mox.CreateMockAnything()
|
|
||||||
|
|
||||||
self._setup_verifier()
|
|
||||||
|
|
||||||
def _setup_verifier(self):
|
|
||||||
self.pool = self.mox.CreateMockAnything()
|
|
||||||
self.reconciler = self.mox.CreateMockAnything()
|
|
||||||
config = make_verifier_config(False)
|
|
||||||
self.verifier = nova_verifier.NovaVerifier(config,
|
|
||||||
pool=self.pool, reconciler=self.reconciler)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.mox.UnsetStubs()
|
|
||||||
self.verifier = None
|
|
||||||
self.pool = None
|
|
||||||
self.verifier_notif = None
|
|
||||||
|
|
||||||
def test_verify_for_launch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.instance_type_id = INSTANCE_TYPE_ID_1
|
|
||||||
exist.tenant = TENANT_ID_1
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.usage.instance_type_id = INSTANCE_TYPE_ID_1
|
|
||||||
exist.usage.tenant = TENANT_ID_1
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_launched_at_in_range(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.0')
|
|
||||||
exist.instance_type_id = 2
|
|
||||||
exist.usage.launched_at = decimal.Decimal('1.4')
|
|
||||||
exist.usage.instance_type_id = 2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
result = nova_verifier._verify_for_launch(exist)
|
|
||||||
self.assertIsNone(result)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_launched_at_missmatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.instance_type_id = 2
|
|
||||||
exist.usage.launched_at = decimal.Decimal('2.1')
|
|
||||||
exist.usage.instance_type_id = 2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
self.fail()
|
|
||||||
except FieldMismatch, fm:
|
|
||||||
self.assertEqual(fm.field_name, 'launched_at')
|
|
||||||
self.assertEqual(fm.expected, decimal.Decimal('1.1'))
|
|
||||||
self.assertEqual(fm.actual, decimal.Decimal('2.1'))
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_instance_type_id_missmatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.instance_type_id = 2
|
|
||||||
exist.usage.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.usage.instance_type_id = 3
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
self.fail()
|
|
||||||
except FieldMismatch, fm:
|
|
||||||
self.assertEqual(fm.field_name, 'instance_type_id')
|
|
||||||
self.assertEqual(fm.expected, 2)
|
|
||||||
self.assertEqual(fm.actual, 3)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_tenant_id_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.tenant = TENANT_ID_1
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.tenant = TENANT_ID_2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
|
|
||||||
self.assertEqual(exception.field_name, 'tenant')
|
|
||||||
self.assertEqual(exception.expected, TENANT_ID_1)
|
|
||||||
self.assertEqual(exception.actual, TENANT_ID_2)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_rax_options_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.rax_options = RAX_OPTIONS_1
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.rax_options = RAX_OPTIONS_2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
|
|
||||||
self.assertEqual(exception.field_name, 'rax_options')
|
|
||||||
self.assertEqual(exception.expected, RAX_OPTIONS_1)
|
|
||||||
self.assertEqual(exception.actual, RAX_OPTIONS_2)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_os_distro_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.os_distro = OS_DISTRO_1
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.os_distro = OS_DISTRO_2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
|
|
||||||
self.assertEqual(exception.field_name, 'os_distro')
|
|
||||||
self.assertEqual(exception.expected, OS_DISTRO_1)
|
|
||||||
self.assertEqual(exception.actual, OS_DISTRO_2)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_os_architecture_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.os_architecture = OS_ARCH_1
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.os_architecture = OS_ARCH_2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
|
|
||||||
self.assertEqual(exception.field_name, 'os_architecture')
|
|
||||||
self.assertEqual(exception.expected, OS_ARCH_1)
|
|
||||||
self.assertEqual(exception.actual, OS_ARCH_2)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_os_version_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.os_version = OS_VERSION_1
|
|
||||||
|
|
||||||
exist.usage = self.mox.CreateMockAnything()
|
|
||||||
exist.usage.os_version = OS_VERSION_2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
with self.assertRaises(FieldMismatch) as cm:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
exception = cm.exception
|
|
||||||
|
|
||||||
self.assertEqual(exception.field_name, 'os_version')
|
|
||||||
self.assertEqual(exception.expected, OS_VERSION_1)
|
|
||||||
self.assertEqual(exception.actual, OS_VERSION_2)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_late_usage(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = None
|
|
||||||
exist.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.launched_at = launched_at
|
|
||||||
exist.instance_type_id = 2
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceUsage.objects.filter(instance=INSTANCE_ID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(2)
|
|
||||||
models.InstanceUsage.find(INSTANCE_ID_1, dt.dt_from_decimal(
|
|
||||||
launched_at)).AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
usage = self.mox.CreateMockAnything()
|
|
||||||
results.__getitem__(0).AndReturn(usage)
|
|
||||||
usage.launched_at = decimal.Decimal('1.1')
|
|
||||||
usage.instance_type_id = 2
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_no_usage(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = None
|
|
||||||
exist.instance = INSTANCE_ID_1
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.instance_type_id = 2
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceUsage.objects.filter(instance=INSTANCE_ID_1) \
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(0)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
self.fail()
|
|
||||||
except NotFound, nf:
|
|
||||||
self.assertEqual(nf.object_type, 'InstanceUsage')
|
|
||||||
self.assertEqual(nf.search_params, {'instance': INSTANCE_ID_1})
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_launch_late_ambiguous_usage(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.usage = None
|
|
||||||
exist.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.launched_at = launched_at
|
|
||||||
exist.instance_type_id = 2
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceUsage.objects.filter(
|
|
||||||
instance=INSTANCE_ID_1).AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
models.InstanceUsage.find(
|
|
||||||
INSTANCE_ID_1, dt.dt_from_decimal(launched_at)).AndReturn(results)
|
|
||||||
results.count().AndReturn(2)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
self.fail()
|
|
||||||
except AmbiguousResults, nf:
|
|
||||||
self.assertEqual(nf.object_type, 'InstanceUsage')
|
|
||||||
search_params = {'instance': INSTANCE_ID_1,
|
|
||||||
'launched_at': decimal.Decimal('1.1')}
|
|
||||||
self.assertEqual(nf.search_params, search_params)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
exist.delete.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.delete.deleted_at = decimal.Decimal('5.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete_found_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = None
|
|
||||||
exist.instance = INSTANCE_ID_1
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceDeletes.find(INSTANCE_ID_1, dt.dt_from_decimal(
|
|
||||||
launched_at)).AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
delete = self.mox.CreateMockAnything()
|
|
||||||
delete.launched_at = decimal.Decimal('1.1')
|
|
||||||
delete.deleted_at = decimal.Decimal('5.1')
|
|
||||||
results.__getitem__(0).AndReturn(delete)
|
|
||||||
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete_non_delete(self):
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
deleted_at = decimal.Decimal('1.1')
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = None
|
|
||||||
exist.instance = INSTANCE_ID_1
|
|
||||||
exist.launched_at = launched_at
|
|
||||||
exist.deleted_at = None
|
|
||||||
exist.audit_period_ending = deleted_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceDeletes.find(
|
|
||||||
INSTANCE_ID_1, dt.dt_from_decimal(launched_at),
|
|
||||||
dt.dt_from_decimal(deleted_at)).AndReturn(results)
|
|
||||||
results.count().AndReturn(0)
|
|
||||||
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete_non_delete_found_deletes(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = None
|
|
||||||
exist.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
deleted_at = decimal.Decimal('1.3')
|
|
||||||
exist.launched_at = launched_at
|
|
||||||
exist.deleted_at = None
|
|
||||||
exist.audit_period_ending = deleted_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceDeletes.find(
|
|
||||||
INSTANCE_ID_1, dt.dt_from_decimal(launched_at),
|
|
||||||
dt.dt_from_decimal(deleted_at)).AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
self.fail()
|
|
||||||
except VerificationException, ve:
|
|
||||||
msg = 'Found InstanceDeletes for non-delete exist'
|
|
||||||
self.assertEqual(ve.reason, msg)
|
|
||||||
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete_launched_at_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
exist.delete.launched_at = decimal.Decimal('2.1')
|
|
||||||
exist.delete.deleted_at = decimal.Decimal('5.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
self.fail()
|
|
||||||
except FieldMismatch, fm:
|
|
||||||
self.assertEqual(fm.field_name, 'launched_at')
|
|
||||||
self.assertEqual(fm.expected, decimal.Decimal('1.1'))
|
|
||||||
self.assertEqual(fm.actual, decimal.Decimal('2.1'))
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_delete_deleted_at_mismatch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.delete = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.deleted_at = decimal.Decimal('5.1')
|
|
||||||
exist.delete.launched_at = decimal.Decimal('1.1')
|
|
||||||
exist.delete.deleted_at = decimal.Decimal('6.1')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
try:
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
self.fail()
|
|
||||||
except FieldMismatch, fm:
|
|
||||||
self.assertEqual(fm.field_name, 'deleted_at')
|
|
||||||
self.assertEqual(fm.expected, decimal.Decimal('5.1'))
|
|
||||||
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_with_reconciled_data(self):
|
|
||||||
exists = self.mox.CreateMockAnything()
|
|
||||||
exists.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
exists.launched_at = launched_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
|
||||||
recs = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
|
||||||
recs.count().AndReturn(1)
|
|
||||||
reconcile = self.mox.CreateMockAnything()
|
|
||||||
reconcile.deleted_at = None
|
|
||||||
recs[0].AndReturn(reconcile)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
nova_verifier._verify_for_launch(exists, launch=reconcile,
|
|
||||||
launch_type='InstanceReconcile')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
nova_verifier._verify_for_delete(exists, delete=None,
|
|
||||||
delete_type='InstanceReconcile')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
nova_verifier._verify_with_reconciled_data(exists)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_with_reconciled_data_deleted(self):
|
|
||||||
exists = self.mox.CreateMockAnything()
|
|
||||||
exists.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
deleted_at = decimal.Decimal('2.1')
|
|
||||||
exists.launched_at = launched_at
|
|
||||||
exists.deleted_at = deleted_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
|
||||||
recs = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
|
||||||
recs.count().AndReturn(1)
|
|
||||||
reconcile = self.mox.CreateMockAnything()
|
|
||||||
reconcile.deleted_at = deleted_at
|
|
||||||
recs[0].AndReturn(reconcile)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
nova_verifier._verify_for_launch(exists, launch=reconcile,
|
|
||||||
launch_type='InstanceReconcile')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
nova_verifier._verify_for_delete(exists, delete=reconcile,
|
|
||||||
delete_type='InstanceReconcile')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
nova_verifier._verify_with_reconciled_data(exists)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_with_reconciled_data_not_launched(self):
|
|
||||||
exists = self.mox.CreateMockAnything()
|
|
||||||
exists.instance = INSTANCE_ID_1
|
|
||||||
exists.launched_at = None
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
with self.assertRaises(VerificationException) as cm:
|
|
||||||
nova_verifier._verify_with_reconciled_data(exists)
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEquals(exception.reason, 'Exists without a launched_at')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_with_reconciled_data_ambiguous_results(self):
|
|
||||||
exists = self.mox.CreateMockAnything()
|
|
||||||
exists.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
deleted_at = decimal.Decimal('2.1')
|
|
||||||
exists.launched_at = launched_at
|
|
||||||
exists.deleted_at = deleted_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
|
||||||
recs = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
|
||||||
recs.count().AndReturn(2)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
with self.assertRaises(AmbiguousResults) as cm:
|
|
||||||
nova_verifier._verify_with_reconciled_data(exists)
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_with_reconciled_data_instance_not_found(self):
|
|
||||||
exists = self.mox.CreateMockAnything()
|
|
||||||
exists.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
deleted_at = decimal.Decimal('2.1')
|
|
||||||
exists.launched_at = launched_at
|
|
||||||
exists.deleted_at = deleted_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(0)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
with self.assertRaises(NotFound) as cm:
|
|
||||||
nova_verifier._verify_with_reconciled_data(exists)
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_with_reconciled_data_reconcile_not_found(self):
|
|
||||||
exists = self.mox.CreateMockAnything()
|
|
||||||
exists.instance = INSTANCE_ID_1
|
|
||||||
launched_at = decimal.Decimal('1.1')
|
|
||||||
deleted_at = decimal.Decimal('2.1')
|
|
||||||
exists.launched_at = launched_at
|
|
||||||
exists.deleted_at = deleted_at
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
|
||||||
.AndReturn(results)
|
|
||||||
results.count().AndReturn(1)
|
|
||||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
|
||||||
recs = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
|
||||||
recs.count().AndReturn(0)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
with self.assertRaises(NotFound) as cm:
|
|
||||||
nova_verifier._verify_with_reconciled_data(exists)
|
|
||||||
exception = cm.exception
|
|
||||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_pass(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_verified')
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
nova_verifier._verify_for_delete(exist)
|
|
||||||
exist.mark_verified()
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertTrue(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_no_launched_at(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = None
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
exist.mark_failed(reason="Exists without a launched_at")
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
|
||||||
nova_verifier._verify_with_reconciled_data(exist)\
|
|
||||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_fails_reconciled_verify_uses_second_exception(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
ex1 = VerificationException('test1')
|
|
||||||
nova_verifier._verify_for_launch(exist).AndRaise(ex1)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
|
||||||
nova_verifier._verify_with_reconciled_data(exist)\
|
|
||||||
.AndRaise(VerificationException('test2'))
|
|
||||||
exist.mark_failed(reason='test2')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_launch_fail(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
verify_exception = VerificationException('test')
|
|
||||||
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
|
||||||
nova_verifier._verify_with_reconciled_data(exist)\
|
|
||||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
|
||||||
exist.mark_failed(reason='test')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_fail_reconcile_success(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_verified')
|
|
||||||
verify_exception = VerificationException('test')
|
|
||||||
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
|
||||||
nova_verifier._verify_with_reconciled_data(exist)
|
|
||||||
exist.mark_verified(reconciled=True)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertTrue(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_fail_with_reconciled_data_exception(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
verify_exception = VerificationException('test')
|
|
||||||
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
|
||||||
nova_verifier._verify_with_reconciled_data(exist)\
|
|
||||||
.AndRaise(Exception())
|
|
||||||
exist.mark_failed(reason='Exception')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_delete_fail(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
verify_exception = VerificationException('test')
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
nova_verifier._verify_for_delete(exist).AndRaise(verify_exception)
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
|
||||||
nova_verifier._verify_with_reconciled_data(exist)\
|
|
||||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
|
||||||
exist.mark_failed(reason='test')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_exception_during_launch(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
nova_verifier._verify_for_launch(exist).AndRaise(Exception())
|
|
||||||
exist.mark_failed(reason='Exception')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_exception_during_delete(self):
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.launched_at = decimal.Decimal('1.1')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
|
||||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
|
||||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
|
||||||
nova_verifier._verify_for_launch(exist)
|
|
||||||
nova_verifier._verify_for_delete(exist).AndRaise(Exception())
|
|
||||||
exist.mark_failed(reason='Exception')
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
result, exists = nova_verifier._verify(exist)
|
|
||||||
self.assertFalse(result)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_range_without_callback(self):
|
|
||||||
when_max = datetime.datetime.utcnow()
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceExists.PENDING = 'pending'
|
|
||||||
models.InstanceExists.VERIFYING = 'verifying'
|
|
||||||
models.InstanceExists.find(
|
|
||||||
ending_max=when_max, status='pending').AndReturn(results)
|
|
||||||
results.count().AndReturn(2)
|
|
||||||
exist1 = self.mox.CreateMockAnything()
|
|
||||||
exist2 = self.mox.CreateMockAnything()
|
|
||||||
results.__getslice__(0, 1000).AndReturn(results)
|
|
||||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
|
||||||
exist1.update_status('verifying')
|
|
||||||
exist2.update_status('verifying')
|
|
||||||
exist1.save()
|
|
||||||
exist2.save()
|
|
||||||
self.pool.apply_async(nova_verifier._verify, args=(exist1,),
|
|
||||||
callback=None)
|
|
||||||
self.pool.apply_async(nova_verifier._verify, args=(exist2,),
|
|
||||||
callback=None)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.verifier.verify_for_range(when_max)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_verify_for_range_with_callback(self):
|
|
||||||
callback = self.mox.CreateMockAnything()
|
|
||||||
when_max = datetime.datetime.utcnow()
|
|
||||||
results = self.mox.CreateMockAnything()
|
|
||||||
models.InstanceExists.PENDING = 'pending'
|
|
||||||
models.InstanceExists.VERIFYING = 'verifying'
|
|
||||||
models.InstanceExists.find(
|
|
||||||
ending_max=when_max, status='pending').AndReturn(results)
|
|
||||||
results.count().AndReturn(2)
|
|
||||||
exist1 = self.mox.CreateMockAnything()
|
|
||||||
exist2 = self.mox.CreateMockAnything()
|
|
||||||
results.__getslice__(0, 1000).AndReturn(results)
|
|
||||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
|
||||||
exist1.update_status('verifying')
|
|
||||||
exist2.update_status('verifying')
|
|
||||||
exist1.save()
|
|
||||||
exist2.save()
|
|
||||||
self.pool.apply_async(nova_verifier._verify, args=(exist1,),
|
|
||||||
callback=callback)
|
|
||||||
self.pool.apply_async(nova_verifier._verify, args=(exist2,),
|
|
||||||
callback=callback)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.verifier.verify_for_range(when_max, callback=callback)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_reconcile_failed(self):
|
|
||||||
self.verifier.reconcile = True
|
|
||||||
exists1 = self.mox.CreateMockAnything()
|
|
||||||
exists2 = self.mox.CreateMockAnything()
|
|
||||||
self.verifier.failed = [exists1, exists2]
|
|
||||||
self.reconciler.failed_validation(exists1)
|
|
||||||
self.reconciler.failed_validation(exists2)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
self.verifier.reconcile_failed()
|
|
||||||
self.assertEqual(len(self.verifier.failed), 0)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_send_verified_notification_routing_keys(self):
|
|
||||||
connection = self.mox.CreateMockAnything()
|
|
||||||
exchange = self.mox.CreateMockAnything()
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.raw = self.mox.CreateMockAnything()
|
|
||||||
exist_dict = [
|
|
||||||
'monitor.info',
|
|
||||||
{
|
|
||||||
'event_type': 'test',
|
|
||||||
'message_id': 'some_uuid'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
exist_str = json.dumps(exist_dict)
|
|
||||||
exist.raw.json = exist_str
|
|
||||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
|
||||||
uuid.uuid4().AndReturn('some_other_uuid')
|
|
||||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
|
||||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
|
||||||
routing_keys = ['notifications.info', 'monitor.info']
|
|
||||||
for key in routing_keys:
|
|
||||||
producer = self.mox.CreateMockAnything()
|
|
||||||
producer.channel = self.mox.CreateMockAnything()
|
|
||||||
kombu.pools.producers[connection].AndReturn(producer)
|
|
||||||
producer.acquire(block=True).AndReturn(producer)
|
|
||||||
producer.__enter__().AndReturn(producer)
|
|
||||||
kombu.common.maybe_declare(exchange, producer.channel)
|
|
||||||
message = {'event_type': 'compute.instance.exists.verified.old',
|
|
||||||
'message_id': 'some_other_uuid',
|
|
||||||
'original_message_id': 'some_uuid'}
|
|
||||||
producer.publish(message, key)
|
|
||||||
producer.__exit__(None, None, None)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.verifier.send_verified_notification(exist, exchange, connection,
|
|
||||||
routing_keys=routing_keys)
|
|
||||||
self.mox.VerifyAll()
|
|
||||||
|
|
||||||
def test_send_verified_notification_default_routing_key(self):
|
|
||||||
connection = self.mox.CreateMockAnything()
|
|
||||||
exchange = self.mox.CreateMockAnything()
|
|
||||||
exist = self.mox.CreateMockAnything()
|
|
||||||
exist.raw = self.mox.CreateMockAnything()
|
|
||||||
exist_dict = [
|
|
||||||
'monitor.info',
|
|
||||||
{
|
|
||||||
'event_type': 'test',
|
|
||||||
'message_id': 'some_uuid'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
exist_str = json.dumps(exist_dict)
|
|
||||||
exist.raw.json = exist_str
|
|
||||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
|
||||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
|
||||||
producer = self.mox.CreateMockAnything()
|
|
||||||
producer.channel = self.mox.CreateMockAnything()
|
|
||||||
kombu.pools.producers[connection].AndReturn(producer)
|
|
||||||
producer.acquire(block=True).AndReturn(producer)
|
|
||||||
producer.__enter__().AndReturn(producer)
|
|
||||||
kombu.common.maybe_declare(exchange, producer.channel)
|
|
||||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
|
||||||
uuid.uuid4().AndReturn('some_other_uuid')
|
|
||||||
message = {'event_type': 'compute.instance.exists.verified.old',
|
|
||||||
'message_id': 'some_other_uuid',
|
|
||||||
'original_message_id': 'some_uuid'}
|
|
||||||
producer.publish(message, exist_dict[0])
|
|
||||||
producer.__exit__(None, None, None)
|
|
||||||
self.mox.ReplayAll()
|
|
||||||
|
|
||||||
self.verifier.send_verified_notification(exist, exchange, connection)
|
|
||||||
self.mox.VerifyAll()
|
|
@ -3,6 +3,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import mox
|
import mox
|
||||||
from stacktach import stacklog
|
from stacktach import stacklog
|
||||||
|
import __builtin__
|
||||||
from stacktach.stacklog import ExchangeLogger
|
from stacktach.stacklog import ExchangeLogger
|
||||||
from tests.unit import StacktachBaseTestCase
|
from tests.unit import StacktachBaseTestCase
|
||||||
|
|
||||||
|
1216
tests/unit/test_verifier_db.py
Normal file
1216
tests/unit/test_verifier_db.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -25,7 +25,7 @@ TENANT_ID_2 = 'testtenantid2'
|
|||||||
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
from stacktach import datetime_to_decimal as dt
|
||||||
|
|
||||||
IMAGE_UUID_1 = "12345678-6352-4dbc-8271-96cc54bf14cd"
|
IMAGE_UUID_1 = "1"
|
||||||
|
|
||||||
INSTANCE_ID_1 = "08f685d9-6352-4dbc-8271-96cc54bf14cd"
|
INSTANCE_ID_1 = "08f685d9-6352-4dbc-8271-96cc54bf14cd"
|
||||||
INSTANCE_ID_2 = "515adf96-41d3-b86d-5467-e584edc61dab"
|
INSTANCE_ID_2 = "515adf96-41d3-b86d-5467-e584edc61dab"
|
||||||
@ -56,14 +56,6 @@ OS_VERSION_1 = "1"
|
|||||||
OS_VERSION_2 = "2"
|
OS_VERSION_2 = "2"
|
||||||
|
|
||||||
TIMESTAMP_1 = "2013-06-20 17:31:57.939614"
|
TIMESTAMP_1 = "2013-06-20 17:31:57.939614"
|
||||||
SETTLE_TIME = 5
|
|
||||||
SETTLE_UNITS = "minutes"
|
|
||||||
TICK_TIME = 10
|
|
||||||
HOST = '10.0.0.1'
|
|
||||||
PORT = '5672'
|
|
||||||
VIRTUAL_HOST = '/'
|
|
||||||
USERID = 'rabbit'
|
|
||||||
PASSWORD = 'password'
|
|
||||||
|
|
||||||
def decimal_utc(t = datetime.datetime.utcnow()):
|
def decimal_utc(t = datetime.datetime.utcnow()):
|
||||||
return dt.dt_to_decimal(t)
|
return dt.dt_to_decimal(t)
|
||||||
@ -146,28 +138,3 @@ def create_tracker(mox, request_id, lifecycle, start, last_timing=None,
|
|||||||
tracker.last_timing=last_timing
|
tracker.last_timing=last_timing
|
||||||
tracker.duration=duration
|
tracker.duration=duration
|
||||||
return tracker
|
return tracker
|
||||||
|
|
||||||
|
|
||||||
class FakeVerifierConfig(object):
|
|
||||||
def __init__(self, host, port, virtual_host, userid, password, tick_time,
|
|
||||||
settle_time, settle_units, durable_queue, topics, notifs):
|
|
||||||
self.host = lambda: host
|
|
||||||
self.port = lambda: port
|
|
||||||
self.virtual_host = lambda: virtual_host
|
|
||||||
self.userid = lambda: userid
|
|
||||||
self.password = lambda: password
|
|
||||||
self.pool_size = lambda: 5
|
|
||||||
self.tick_time = lambda: tick_time
|
|
||||||
self.settle_time = lambda: settle_time
|
|
||||||
self.settle_units = lambda: settle_units
|
|
||||||
self.durable_queue = lambda: durable_queue
|
|
||||||
self.topics = lambda: topics
|
|
||||||
self.enable_notifications = lambda: notifs
|
|
||||||
|
|
||||||
|
|
||||||
def make_verifier_config(notifs):
|
|
||||||
topics = {'exchange': ['notifications.info']}
|
|
||||||
config = FakeVerifierConfig(HOST, PORT, VIRTUAL_HOST, USERID,
|
|
||||||
PASSWORD, TICK_TIME, SETTLE_TIME,
|
|
||||||
SETTLE_UNITS, True, topics, notifs)
|
|
||||||
return config
|
|
@ -1,153 +0,0 @@
|
|||||||
# Copyright (c) 2012 - Rackspace Inc.
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to
|
|
||||||
# deal in the Software without restriction, including without limitation the
|
|
||||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
# sell copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
||||||
# IN THE SOFTWARE.
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
from django.db import transaction
|
|
||||||
|
|
||||||
|
|
||||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
|
||||||
os.pardir, os.pardir))
|
|
||||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
|
||||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
|
||||||
|
|
||||||
from stacktach import stacklog, message_service
|
|
||||||
LOG = stacklog.get_logger('verifier')
|
|
||||||
|
|
||||||
|
|
||||||
def _has_field(d1, d2, field1, field2=None):
|
|
||||||
if not field2:
|
|
||||||
field2 = field1
|
|
||||||
|
|
||||||
return d1.get(field1) is not None and d2.get(field2) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_simple_field(d1, d2, field1, field2=None):
|
|
||||||
if not field2:
|
|
||||||
field2 = field1
|
|
||||||
|
|
||||||
if not _has_field(d1, d2, field1, field2):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
if d1[field1] != d2[field2]:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_date_field(d1, d2, same_second=False):
|
|
||||||
if d1 and d2:
|
|
||||||
if d1 == d2:
|
|
||||||
return True
|
|
||||||
elif same_second and int(d1) == int(d2):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Verifier(object):
|
|
||||||
def __init__(self, config, pool=None, reconciler=None):
|
|
||||||
self.config = config
|
|
||||||
self.pool = pool or multiprocessing.Pool(config.pool_size())
|
|
||||||
self.enable_notifications = config.enable_notifications()
|
|
||||||
self.reconciler = reconciler
|
|
||||||
self.results = []
|
|
||||||
self.failed = []
|
|
||||||
|
|
||||||
def clean_results(self):
|
|
||||||
pending = []
|
|
||||||
finished = 0
|
|
||||||
successful = 0
|
|
||||||
|
|
||||||
for result in self.results:
|
|
||||||
if result.ready():
|
|
||||||
finished += 1
|
|
||||||
if result.successful():
|
|
||||||
(verified, exists) = result.get()
|
|
||||||
if self.reconciler and not verified:
|
|
||||||
self.failed.append(exists)
|
|
||||||
successful += 1
|
|
||||||
else:
|
|
||||||
pending.append(result)
|
|
||||||
|
|
||||||
self.results = pending
|
|
||||||
errored = finished - successful
|
|
||||||
return len(self.results), successful, errored
|
|
||||||
|
|
||||||
def _keep_running(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _utcnow(self):
|
|
||||||
return datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
def _run(self, callback=None):
|
|
||||||
tick_time = self.config.tick_time()
|
|
||||||
settle_units = self.config.settle_units()
|
|
||||||
settle_time = self.config.settle_time()
|
|
||||||
while self._keep_running():
|
|
||||||
with transaction.commit_on_success():
|
|
||||||
now = self._utcnow()
|
|
||||||
kwargs = {settle_units: settle_time}
|
|
||||||
ending_max = now - datetime.timedelta(**kwargs)
|
|
||||||
new = self.verify_for_range(ending_max, callback=callback)
|
|
||||||
values = ((self.exchange(), new,) + self.clean_results())
|
|
||||||
if self.reconciler:
|
|
||||||
self.reconcile_failed()
|
|
||||||
msg = "%s: N: %s, P: %s, S: %s, E: %s" % values
|
|
||||||
LOG.info(msg)
|
|
||||||
time.sleep(tick_time)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
if self.enable_notifications:
|
|
||||||
exchange_name = self.exchange()
|
|
||||||
exchange = message_service.create_exchange(
|
|
||||||
exchange_name, 'topic',
|
|
||||||
durable=self.config.durable_queue())
|
|
||||||
routing_keys = self.config.topics()[exchange_name]
|
|
||||||
|
|
||||||
with message_service.create_connection(
|
|
||||||
self.config.host(), self.config.port(),
|
|
||||||
self.config.userid(), self.config.password(),
|
|
||||||
"librabbitmq", self.config.virtual_host()) as conn:
|
|
||||||
def callback(result):
|
|
||||||
(verified, exist) = result
|
|
||||||
if verified:
|
|
||||||
self.send_verified_notification(
|
|
||||||
exist, conn, exchange, routing_keys=routing_keys)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._run(callback=callback)
|
|
||||||
except Exception, e:
|
|
||||||
print e
|
|
||||||
raise e
|
|
||||||
else:
|
|
||||||
self._run()
|
|
||||||
|
|
||||||
def verify_for_range(self, ending_max, callback=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def reconcile_failed(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def exchange(self):
|
|
||||||
pass
|
|
@ -1,89 +0,0 @@
|
|||||||
# Copyright (c) 2013 - Rackspace Inc.
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to
|
|
||||||
# deal in the Software without restriction, including without limitation the
|
|
||||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
# sell copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
||||||
# IN THE SOFTWARE.
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
config_filename = os.environ.get('STACKTACH_VERIFIER_CONFIG',
|
|
||||||
'stacktach_verifier_config.json')
|
|
||||||
try:
|
|
||||||
from local_settings import *
|
|
||||||
config_filename = STACKTACH_VERIFIER_CONFIG
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
config = None
|
|
||||||
with open(config_filename, "r") as f:
|
|
||||||
config = json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
def enable_notifications():
|
|
||||||
return config['enable_notifications']
|
|
||||||
|
|
||||||
|
|
||||||
def topics():
|
|
||||||
return config['rabbit']['topics']
|
|
||||||
|
|
||||||
|
|
||||||
def tick_time():
|
|
||||||
return config['tick_time']
|
|
||||||
|
|
||||||
|
|
||||||
def settle_units():
|
|
||||||
return config['settle_units']
|
|
||||||
|
|
||||||
|
|
||||||
def settle_time():
|
|
||||||
return config['settle_time']
|
|
||||||
|
|
||||||
|
|
||||||
def reconcile():
|
|
||||||
return config.get('reconcile', False)
|
|
||||||
|
|
||||||
|
|
||||||
def reconciler_config():
|
|
||||||
return config.get(
|
|
||||||
'reconciler_config', '/etc/stacktach/reconciler_config.json')
|
|
||||||
|
|
||||||
def pool_size():
|
|
||||||
return config['pool_size']
|
|
||||||
|
|
||||||
|
|
||||||
def durable_queue():
|
|
||||||
return config['rabbit']['durable_queue']
|
|
||||||
|
|
||||||
|
|
||||||
def host():
|
|
||||||
return config['rabbit']['host']
|
|
||||||
|
|
||||||
|
|
||||||
def port():
|
|
||||||
return config['rabbit']['port']
|
|
||||||
|
|
||||||
|
|
||||||
def userid():
|
|
||||||
return config['rabbit']['userid']
|
|
||||||
|
|
||||||
|
|
||||||
def password():
|
|
||||||
return config['rabbit']['password']
|
|
||||||
|
|
||||||
|
|
||||||
def virtual_host():
|
|
||||||
return config['rabbit']['virtual_host']
|
|
529
verifier/dbverifier.py
Normal file
529
verifier/dbverifier.py
Normal file
@ -0,0 +1,529 @@
|
|||||||
|
# Copyright (c) 2012 - Rackspace Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to
|
||||||
|
# deal in the Software without restriction, including without limitation the
|
||||||
|
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
# sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
# IN THE SOFTWARE.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.db import transaction
|
||||||
|
import kombu.common
|
||||||
|
import kombu.entity
|
||||||
|
import kombu.pools
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||||
|
os.pardir, os.pardir))
|
||||||
|
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||||
|
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||||
|
|
||||||
|
from stacktach import stacklog
|
||||||
|
|
||||||
|
stacklog.set_default_logger_name('verifier')
|
||||||
|
LOG = stacklog.get_logger()
|
||||||
|
|
||||||
|
from stacktach import models
|
||||||
|
from stacktach import datetime_to_decimal as dt
|
||||||
|
from stacktach import reconciler
|
||||||
|
from verifier import AmbiguousResults
|
||||||
|
from verifier import FieldMismatch
|
||||||
|
from verifier import NotFound
|
||||||
|
from verifier import VerificationException
|
||||||
|
|
||||||
|
|
||||||
|
def _list_exists(ending_max=None, status=None):
|
||||||
|
params = {}
|
||||||
|
if ending_max:
|
||||||
|
params['audit_period_ending__lte'] = dt.dt_to_decimal(ending_max)
|
||||||
|
if status:
|
||||||
|
params['status'] = status
|
||||||
|
return models.InstanceExists.objects.select_related()\
|
||||||
|
.filter(**params).order_by('id')
|
||||||
|
|
||||||
|
|
||||||
|
def _find_launch(instance, launched):
|
||||||
|
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||||
|
end = start + datetime.timedelta(microseconds=999999)
|
||||||
|
params = {'instance': instance,
|
||||||
|
'launched_at__gte': dt.dt_to_decimal(start),
|
||||||
|
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||||
|
return models.InstanceUsage.objects.filter(**params)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_reconcile(instance, launched):
|
||||||
|
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||||
|
end = start + datetime.timedelta(microseconds=999999)
|
||||||
|
params = {'instance': instance,
|
||||||
|
'launched_at__gte': dt.dt_to_decimal(start),
|
||||||
|
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||||
|
return models.InstanceReconcile.objects.filter(**params)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_delete(instance, launched, deleted_max=None):
|
||||||
|
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||||
|
end = start + datetime.timedelta(microseconds=999999)
|
||||||
|
params = {'instance': instance,
|
||||||
|
'launched_at__gte': dt.dt_to_decimal(start),
|
||||||
|
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||||
|
if deleted_max:
|
||||||
|
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
|
||||||
|
return models.InstanceDeletes.objects.filter(**params)
|
||||||
|
|
||||||
|
|
||||||
|
def _mark_exist_verified(exist,
|
||||||
|
reconciled=False,
|
||||||
|
reason=None):
|
||||||
|
if not reconciled:
|
||||||
|
exist.status = models.InstanceExists.VERIFIED
|
||||||
|
else:
|
||||||
|
exist.status = models.InstanceExists.RECONCILED
|
||||||
|
if reason is not None:
|
||||||
|
exist.fail_reason = reason
|
||||||
|
|
||||||
|
exist.save()
|
||||||
|
|
||||||
|
|
||||||
|
def _mark_exist_failed(exist, reason=None):
|
||||||
|
exist.status = models.InstanceExists.FAILED
|
||||||
|
if reason:
|
||||||
|
exist.fail_reason = reason
|
||||||
|
exist.save()
|
||||||
|
|
||||||
|
|
||||||
|
def _has_field(d1, d2, field1, field2=None):
|
||||||
|
if not field2:
|
||||||
|
field2 = field1
|
||||||
|
|
||||||
|
return d1.get(field1) is not None and d2.get(field2) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_simple_field(d1, d2, field1, field2=None):
|
||||||
|
if not field2:
|
||||||
|
field2 = field1
|
||||||
|
|
||||||
|
if not _has_field(d1, d2, field1, field2):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
if d1[field1] != d2[field2]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_date_field(d1, d2, same_second=False):
|
||||||
|
if d1 and d2:
|
||||||
|
if d1 == d2:
|
||||||
|
return True
|
||||||
|
elif same_second and int(d1) == int(d2):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_field_mismatch(exists, launch):
|
||||||
|
if not _verify_date_field(launch.launched_at, exists.launched_at,
|
||||||
|
same_second=True):
|
||||||
|
raise FieldMismatch('launched_at', exists.launched_at,
|
||||||
|
launch.launched_at)
|
||||||
|
|
||||||
|
if launch.instance_type_id != exists.instance_type_id:
|
||||||
|
raise FieldMismatch('instance_type_id', exists.instance_type_id,
|
||||||
|
launch.instance_type_id)
|
||||||
|
|
||||||
|
if launch.tenant != exists.tenant:
|
||||||
|
raise FieldMismatch('tenant', exists.tenant,
|
||||||
|
launch.tenant)
|
||||||
|
|
||||||
|
if launch.rax_options != exists.rax_options:
|
||||||
|
raise FieldMismatch('rax_options', exists.rax_options,
|
||||||
|
launch.rax_options)
|
||||||
|
|
||||||
|
if launch.os_architecture != exists.os_architecture:
|
||||||
|
raise FieldMismatch('os_architecture', exists.os_architecture,
|
||||||
|
launch.os_architecture)
|
||||||
|
|
||||||
|
if launch.os_version != exists.os_version:
|
||||||
|
raise FieldMismatch('os_version', exists.os_version,
|
||||||
|
launch.os_version)
|
||||||
|
|
||||||
|
if launch.os_distro != exists.os_distro:
|
||||||
|
raise FieldMismatch('os_distro', exists.os_distro,
|
||||||
|
launch.os_distro)
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_for_launch(exist, launch=None, launch_type="InstanceUsage"):
|
||||||
|
|
||||||
|
if not launch and exist.usage:
|
||||||
|
launch = exist.usage
|
||||||
|
elif not launch:
|
||||||
|
if models.InstanceUsage.objects\
|
||||||
|
.filter(instance=exist.instance).count() > 0:
|
||||||
|
launches = _find_launch(exist.instance,
|
||||||
|
dt.dt_from_decimal(exist.launched_at))
|
||||||
|
count = launches.count()
|
||||||
|
query = {
|
||||||
|
'instance': exist.instance,
|
||||||
|
'launched_at': exist.launched_at
|
||||||
|
}
|
||||||
|
if count > 1:
|
||||||
|
raise AmbiguousResults(launch_type, query)
|
||||||
|
elif count == 0:
|
||||||
|
raise NotFound(launch_type, query)
|
||||||
|
launch = launches[0]
|
||||||
|
else:
|
||||||
|
raise NotFound(launch_type, {'instance': exist.instance})
|
||||||
|
|
||||||
|
_verify_field_mismatch(exist, launch)
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_for_delete(exist, delete=None, delete_type="InstanceDelete"):
|
||||||
|
|
||||||
|
if not delete and exist.delete:
|
||||||
|
# We know we have a delete and we have it's id
|
||||||
|
delete = exist.delete
|
||||||
|
elif not delete:
|
||||||
|
if exist.deleted_at:
|
||||||
|
# We received this exists before the delete, go find it
|
||||||
|
deletes = _find_delete(exist.instance,
|
||||||
|
dt.dt_from_decimal(exist.launched_at))
|
||||||
|
if deletes.count() == 1:
|
||||||
|
delete = deletes[0]
|
||||||
|
else:
|
||||||
|
query = {
|
||||||
|
'instance': exist.instance,
|
||||||
|
'launched_at': exist.launched_at
|
||||||
|
}
|
||||||
|
raise NotFound(delete_type, query)
|
||||||
|
else:
|
||||||
|
# We don't know if this is supposed to have a delete or not.
|
||||||
|
# Thus, we need to check if we have a delete for this instance.
|
||||||
|
# We need to be careful though, since we could be verifying an
|
||||||
|
# exist event that we got before the delete. So, we restrict the
|
||||||
|
# search to only deletes before this exist's audit period ended.
|
||||||
|
# If we find any, we fail validation
|
||||||
|
launched_at = dt.dt_from_decimal(exist.launched_at)
|
||||||
|
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
||||||
|
deletes = _find_delete(exist.instance, launched_at, deleted_at_max)
|
||||||
|
if deletes.count() > 0:
|
||||||
|
reason = 'Found %ss for non-delete exist' % delete_type
|
||||||
|
raise VerificationException(reason)
|
||||||
|
|
||||||
|
if delete:
|
||||||
|
if not _verify_date_field(delete.launched_at, exist.launched_at,
|
||||||
|
same_second=True):
|
||||||
|
raise FieldMismatch('launched_at', exist.launched_at,
|
||||||
|
delete.launched_at)
|
||||||
|
|
||||||
|
if not _verify_date_field(delete.deleted_at, exist.deleted_at,
|
||||||
|
same_second=True):
|
||||||
|
raise FieldMismatch('deleted_at', exist.deleted_at,
|
||||||
|
delete.deleted_at)
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_with_reconciled_data(exist):
|
||||||
|
if not exist.launched_at:
|
||||||
|
raise VerificationException("Exists without a launched_at")
|
||||||
|
|
||||||
|
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
|
||||||
|
if query.count() > 0:
|
||||||
|
recs = _find_reconcile(exist.instance,
|
||||||
|
dt.dt_from_decimal(exist.launched_at))
|
||||||
|
search_query = {'instance': exist.instance,
|
||||||
|
'launched_at': exist.launched_at}
|
||||||
|
count = recs.count()
|
||||||
|
if count > 1:
|
||||||
|
raise AmbiguousResults('InstanceReconcile', search_query)
|
||||||
|
elif count == 0:
|
||||||
|
raise NotFound('InstanceReconcile', search_query)
|
||||||
|
reconcile = recs[0]
|
||||||
|
else:
|
||||||
|
raise NotFound('InstanceReconcile', {'instance': exist.instance})
|
||||||
|
|
||||||
|
_verify_for_launch(exist, launch=reconcile,
|
||||||
|
launch_type="InstanceReconcile")
|
||||||
|
delete = None
|
||||||
|
if reconcile.deleted_at is not None:
|
||||||
|
delete = reconcile
|
||||||
|
_verify_for_delete(exist, delete=delete,
|
||||||
|
delete_type="InstanceReconcile")
|
||||||
|
|
||||||
|
|
||||||
|
def _attempt_reconciled_verify(exist, orig_e):
|
||||||
|
verified = False
|
||||||
|
try:
|
||||||
|
# Attempt to verify against reconciled data
|
||||||
|
_verify_with_reconciled_data(exist)
|
||||||
|
verified = True
|
||||||
|
_mark_exist_verified(exist)
|
||||||
|
except NotFound, rec_e:
|
||||||
|
# No reconciled data, just mark it failed
|
||||||
|
_mark_exist_failed(exist, reason=str(orig_e))
|
||||||
|
except VerificationException, rec_e:
|
||||||
|
# Verification failed against reconciled data, mark it failed
|
||||||
|
# using the second failure.
|
||||||
|
_mark_exist_failed(exist, reason=str(rec_e))
|
||||||
|
except Exception, rec_e:
|
||||||
|
_mark_exist_failed(exist, reason=rec_e.__class__.__name__)
|
||||||
|
LOG.exception(rec_e)
|
||||||
|
return verified
|
||||||
|
|
||||||
|
|
||||||
|
def _verify(exist):
|
||||||
|
verified = False
|
||||||
|
try:
|
||||||
|
if not exist.launched_at:
|
||||||
|
raise VerificationException("Exists without a launched_at")
|
||||||
|
|
||||||
|
_verify_for_launch(exist)
|
||||||
|
_verify_for_delete(exist)
|
||||||
|
|
||||||
|
verified = True
|
||||||
|
_mark_exist_verified(exist)
|
||||||
|
except VerificationException, orig_e:
|
||||||
|
# Something is wrong with the InstanceUsage record
|
||||||
|
verified = _attempt_reconciled_verify(exist, orig_e)
|
||||||
|
except Exception, e:
|
||||||
|
_mark_exist_failed(exist, reason=e.__class__.__name__)
|
||||||
|
LOG.exception(e)
|
||||||
|
|
||||||
|
return verified, exist
|
||||||
|
|
||||||
|
|
||||||
|
def _send_notification(message, routing_key, connection, exchange):
|
||||||
|
with kombu.pools.producers[connection].acquire(block=True) as producer:
|
||||||
|
kombu.common.maybe_declare(exchange, producer.channel)
|
||||||
|
producer.publish(message, routing_key)
|
||||||
|
|
||||||
|
|
||||||
|
def send_verified_notification(exist, connection, exchange, routing_keys=None):
|
||||||
|
body = exist.raw.json
|
||||||
|
json_body = json.loads(body)
|
||||||
|
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
|
||||||
|
json_body[1]['original_message_id'] = json_body[1]['message_id']
|
||||||
|
json_body[1]['message_id'] = str(uuid.uuid4())
|
||||||
|
if routing_keys is None:
|
||||||
|
_send_notification(json_body[1], json_body[0], connection, exchange)
|
||||||
|
else:
|
||||||
|
for key in routing_keys:
|
||||||
|
_send_notification(json_body[1], key, connection, exchange)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_exchange(name, type, exclusive=False, auto_delete=False,
|
||||||
|
durable=True):
|
||||||
|
return kombu.entity.Exchange(name, type=type, exclusive=auto_delete,
|
||||||
|
auto_delete=exclusive, durable=durable)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_connection(config):
|
||||||
|
rabbit = config['rabbit']
|
||||||
|
conn_params = dict(hostname=rabbit['host'],
|
||||||
|
port=rabbit['port'],
|
||||||
|
userid=rabbit['userid'],
|
||||||
|
password=rabbit['password'],
|
||||||
|
transport="librabbitmq",
|
||||||
|
virtual_host=rabbit['virtual_host'])
|
||||||
|
return kombu.connection.BrokerConnection(**conn_params)
|
||||||
|
|
||||||
|
|
||||||
|
class Verifier(object):
|
||||||
|
|
||||||
|
def __init__(self, config, pool=None, rec=None):
|
||||||
|
self.config = config
|
||||||
|
self.pool = pool or multiprocessing.Pool(self.config['pool_size'])
|
||||||
|
self.reconcile = self.config.get('reconcile', False)
|
||||||
|
self.reconciler = self._load_reconciler(config, rec=rec)
|
||||||
|
self.results = []
|
||||||
|
self.failed = []
|
||||||
|
|
||||||
|
def _load_reconciler(self, config, rec=None):
|
||||||
|
if rec:
|
||||||
|
return rec
|
||||||
|
|
||||||
|
if self.reconcile:
|
||||||
|
config_loc = config.get('reconciler_config',
|
||||||
|
'/etc/stacktach/reconciler_config.json')
|
||||||
|
with open(config_loc, 'r') as rec_config_file:
|
||||||
|
rec_config = json.load(rec_config_file)
|
||||||
|
return reconciler.Reconciler(rec_config)
|
||||||
|
|
||||||
|
def clean_results(self):
|
||||||
|
pending = []
|
||||||
|
finished = 0
|
||||||
|
successful = 0
|
||||||
|
|
||||||
|
for result in self.results:
|
||||||
|
if result.ready():
|
||||||
|
finished += 1
|
||||||
|
if result.successful():
|
||||||
|
(verified, exists) = result.get()
|
||||||
|
if self.reconcile and not verified:
|
||||||
|
self.failed.append(exists)
|
||||||
|
successful += 1
|
||||||
|
else:
|
||||||
|
pending.append(result)
|
||||||
|
|
||||||
|
self.results = pending
|
||||||
|
errored = finished - successful
|
||||||
|
return len(self.results), successful, errored
|
||||||
|
|
||||||
|
def verify_for_range(self, ending_max, callback=None):
|
||||||
|
exists = _list_exists(ending_max=ending_max,
|
||||||
|
status=models.InstanceExists.PENDING)
|
||||||
|
count = exists.count()
|
||||||
|
added = 0
|
||||||
|
update_interval = datetime.timedelta(seconds=30)
|
||||||
|
next_update = datetime.datetime.utcnow() + update_interval
|
||||||
|
LOG.info("Adding %s exists to queue." % count)
|
||||||
|
while added < count:
|
||||||
|
for exist in exists[0:1000]:
|
||||||
|
exist.status = models.InstanceExists.VERIFYING
|
||||||
|
exist.save()
|
||||||
|
result = self.pool.apply_async(_verify, args=(exist,),
|
||||||
|
callback=callback)
|
||||||
|
self.results.append(result)
|
||||||
|
added += 1
|
||||||
|
if datetime.datetime.utcnow() > next_update:
|
||||||
|
values = ((added,) + self.clean_results())
|
||||||
|
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||||
|
LOG.info(msg)
|
||||||
|
next_update = datetime.datetime.utcnow() + update_interval
|
||||||
|
return count
|
||||||
|
|
||||||
|
def reconcile_failed(self):
|
||||||
|
for failed_exist in self.failed:
|
||||||
|
if self.reconciler.failed_validation(failed_exist):
|
||||||
|
_mark_exist_verified(failed_exist, reconciled=True)
|
||||||
|
self.failed = []
|
||||||
|
|
||||||
|
def _keep_running(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _utcnow(self):
|
||||||
|
return datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
def _run(self, callback=None):
|
||||||
|
tick_time = self.config['tick_time']
|
||||||
|
settle_units = self.config['settle_units']
|
||||||
|
settle_time = self.config['settle_time']
|
||||||
|
while self._keep_running():
|
||||||
|
with transaction.commit_on_success():
|
||||||
|
now = self._utcnow()
|
||||||
|
kwargs = {settle_units: settle_time}
|
||||||
|
ending_max = now - datetime.timedelta(**kwargs)
|
||||||
|
new = self.verify_for_range(ending_max,
|
||||||
|
callback=callback)
|
||||||
|
values = ((new,) + self.clean_results())
|
||||||
|
if self.reconcile:
|
||||||
|
self.reconcile_failed()
|
||||||
|
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||||
|
LOG.info(msg)
|
||||||
|
time.sleep(tick_time)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if self.config['enable_notifications']:
|
||||||
|
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
|
||||||
|
'topic',
|
||||||
|
durable=self.config['rabbit']['durable_queue'])
|
||||||
|
routing_keys = None
|
||||||
|
if self.config['rabbit'].get('routing_keys') is not None:
|
||||||
|
routing_keys = self.config['rabbit']['routing_keys']
|
||||||
|
|
||||||
|
with _create_connection(self.config) as conn:
|
||||||
|
def callback(result):
|
||||||
|
(verified, exist) = result
|
||||||
|
if verified:
|
||||||
|
send_verified_notification(exist, conn, exchange,
|
||||||
|
routing_keys=routing_keys)
|
||||||
|
|
||||||
|
self._run(callback=callback)
|
||||||
|
else:
|
||||||
|
self._run()
|
||||||
|
|
||||||
|
def _run_once(self, callback=None):
|
||||||
|
tick_time = self.config['tick_time']
|
||||||
|
settle_units = self.config['settle_units']
|
||||||
|
settle_time = self.config['settle_time']
|
||||||
|
now = self._utcnow()
|
||||||
|
kwargs = {settle_units: settle_time}
|
||||||
|
ending_max = now - datetime.timedelta(**kwargs)
|
||||||
|
new = self.verify_for_range(ending_max, callback=callback)
|
||||||
|
|
||||||
|
LOG.info("Verifying %s exist events" % new)
|
||||||
|
while len(self.results) > 0:
|
||||||
|
LOG.info("P: %s, F: %s, E: %s" % self.clean_results())
|
||||||
|
if self.reconcile:
|
||||||
|
self.reconcile_failed()
|
||||||
|
time.sleep(tick_time)
|
||||||
|
|
||||||
|
def run_once(self):
|
||||||
|
if self.config['enable_notifications']:
|
||||||
|
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
|
||||||
|
'topic',
|
||||||
|
durable=self.config['rabbit']['durable_queue'])
|
||||||
|
routing_keys = None
|
||||||
|
if self.config['rabbit'].get('routing_keys') is not None:
|
||||||
|
routing_keys = self.config['rabbit']['routing_keys']
|
||||||
|
|
||||||
|
with _create_connection(self.config) as conn:
|
||||||
|
def callback(result):
|
||||||
|
(verified, exist) = result
|
||||||
|
if verified:
|
||||||
|
send_verified_notification(exist, conn, exchange,
|
||||||
|
routing_keys=routing_keys)
|
||||||
|
|
||||||
|
self._run_once(callback=callback)
|
||||||
|
else:
|
||||||
|
self._run_once()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description=
|
||||||
|
"Stacktach Instance Exists Verifier")
|
||||||
|
parser.add_argument('--tick-time',
|
||||||
|
help='Time in seconds the verifier will sleep before'
|
||||||
|
'it will check for new exists records.',
|
||||||
|
default=30)
|
||||||
|
parser.add_argument('--run-once',
|
||||||
|
help='Check database once and verify all returned'
|
||||||
|
'exists records, then stop',
|
||||||
|
type=bool,
|
||||||
|
default=False)
|
||||||
|
parser.add_argument('--settle-time',
|
||||||
|
help='Time the verifier will wait for records to'
|
||||||
|
'settle before it will verify them.',
|
||||||
|
default=10)
|
||||||
|
parser.add_argument('--settle-units',
|
||||||
|
help='Units for settle time',
|
||||||
|
default='minutes')
|
||||||
|
parser.add_argument('--pool-size',
|
||||||
|
help='Number of processes created to verify records',
|
||||||
|
type=int,
|
||||||
|
default=10)
|
||||||
|
args = parser.parse_args()
|
||||||
|
config = {'tick_time': args.tick_time, 'settle_time': args.settle_time,
|
||||||
|
'settle_units': args.settle_units, 'pool_size': args.pool_size}
|
||||||
|
|
||||||
|
verifier = Verifier(config)
|
||||||
|
if args.run_once:
|
||||||
|
verifier.run_once()
|
||||||
|
else:
|
||||||
|
verifier.run()
|
@ -1,172 +0,0 @@
|
|||||||
# Copyright (c) 2012 - Rackspace Inc.
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to
|
|
||||||
# deal in the Software without restriction, including without limitation the
|
|
||||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
# sell copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
||||||
# IN THE SOFTWARE.
|
|
||||||
import json
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import uuid
|
|
||||||
from verifier.base_verifier import Verifier
|
|
||||||
|
|
||||||
|
|
||||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
|
||||||
os.pardir, os.pardir))
|
|
||||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
|
||||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
|
||||||
|
|
||||||
from stacktach import models
|
|
||||||
from verifier import FieldMismatch, VerificationException, base_verifier
|
|
||||||
from verifier import NotFound
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
|
||||||
import datetime
|
|
||||||
from stacktach import stacklog, message_service
|
|
||||||
LOG = stacklog.get_logger('verifier')
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_field_mismatch(exists, usage):
|
|
||||||
if not base_verifier._verify_date_field(
|
|
||||||
usage.created_at, exists.created_at, same_second=True):
|
|
||||||
raise FieldMismatch('created_at', exists.created_at,
|
|
||||||
usage.created_at)
|
|
||||||
|
|
||||||
if usage.owner != exists.owner:
|
|
||||||
raise FieldMismatch('owner', exists.owner,
|
|
||||||
usage.owner)
|
|
||||||
|
|
||||||
if usage.size != exists.size:
|
|
||||||
raise FieldMismatch('size', exists.size,
|
|
||||||
usage.size)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_for_usage(exist, usage=None):
|
|
||||||
usage_type = "ImageUsage"
|
|
||||||
if not usage and exist.usage:
|
|
||||||
usage = exist.usage
|
|
||||||
elif not usage:
|
|
||||||
usages = models.ImageUsage.objects.filter(uuid=exist.uuid)
|
|
||||||
usage_count = usages.count()
|
|
||||||
if usage_count == 0:
|
|
||||||
query = {'uuid': exist.uuid}
|
|
||||||
raise NotFound(usage_type, query)
|
|
||||||
usage = usages[0]
|
|
||||||
_verify_field_mismatch(exist, usage)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_for_delete(exist, delete=None):
|
|
||||||
delete_type = "ImageDelete"
|
|
||||||
if not delete and exist.delete:
|
|
||||||
# We know we have a delete and we have it's id
|
|
||||||
delete = exist.delete
|
|
||||||
elif not delete:
|
|
||||||
if exist.deleted_at:
|
|
||||||
# We received this exists before the delete, go find it
|
|
||||||
deletes = models.ImageDeletes.find(uuid=exist.uuid)
|
|
||||||
if deletes.count() == 1:
|
|
||||||
delete = deletes[0]
|
|
||||||
else:
|
|
||||||
query = {
|
|
||||||
'instance': exist.instance,
|
|
||||||
'launched_at': exist.launched_at
|
|
||||||
}
|
|
||||||
raise NotFound(delete_type, query)
|
|
||||||
else:
|
|
||||||
# We don't know if this is supposed to have a delete or not.
|
|
||||||
# Thus, we need to check if we have a delete for this instance.
|
|
||||||
# We need to be careful though, since we could be verifying an
|
|
||||||
# exist event that we got before the delete. So, we restrict the
|
|
||||||
# search to only deletes before this exist's audit period ended.
|
|
||||||
# If we find any, we fail validation
|
|
||||||
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
|
||||||
deletes = models.ImageDeletes.find(
|
|
||||||
exist.uuid, deleted_at_max)
|
|
||||||
if deletes.count() > 0:
|
|
||||||
reason = 'Found %ss for non-delete exist' % delete_type
|
|
||||||
raise VerificationException(reason)
|
|
||||||
|
|
||||||
if delete:
|
|
||||||
if not base_verifier._verify_date_field(
|
|
||||||
delete.created_at, exist.created_at, same_second=True):
|
|
||||||
raise FieldMismatch('created_at', exist.created_at,
|
|
||||||
delete.created_at)
|
|
||||||
|
|
||||||
if not base_verifier._verify_date_field(
|
|
||||||
delete.deleted_at, exist.deleted_at, same_second=True):
|
|
||||||
raise FieldMismatch('deleted_at', exist.deleted_at,
|
|
||||||
delete.deleted_at)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify(exist):
|
|
||||||
verified = False
|
|
||||||
try:
|
|
||||||
_verify_for_usage(exist)
|
|
||||||
_verify_for_delete(exist)
|
|
||||||
|
|
||||||
verified = True
|
|
||||||
exist.mark_verified()
|
|
||||||
except Exception, e:
|
|
||||||
exist.mark_failed(reason=e.__class__.__name__)
|
|
||||||
LOG.exception("glance: %s" % e)
|
|
||||||
|
|
||||||
return verified, exist
|
|
||||||
|
|
||||||
|
|
||||||
class GlanceVerifier(Verifier):
|
|
||||||
def __init__(self, config, pool=None):
|
|
||||||
super(GlanceVerifier, self).__init__(config, pool=pool)
|
|
||||||
|
|
||||||
def verify_for_range(self, ending_max, callback=None):
|
|
||||||
exists = models.ImageExists.find(
|
|
||||||
ending_max=ending_max, status=models.ImageExists.PENDING)
|
|
||||||
count = exists.count()
|
|
||||||
added = 0
|
|
||||||
update_interval = datetime.timedelta(seconds=30)
|
|
||||||
next_update = datetime.datetime.utcnow() + update_interval
|
|
||||||
LOG.info("glance: Adding %s exists to queue." % count)
|
|
||||||
while added < count:
|
|
||||||
for exist in exists[0:1000]:
|
|
||||||
exist.status = models.ImageExists.VERIFYING
|
|
||||||
exist.save()
|
|
||||||
result = self.pool.apply_async(_verify, args=(exist,),
|
|
||||||
callback=callback)
|
|
||||||
self.results.append(result)
|
|
||||||
added += 1
|
|
||||||
if datetime.datetime.utcnow() > next_update:
|
|
||||||
values = ((added,) + self.clean_results())
|
|
||||||
msg = "glance: N: %s, P: %s, S: %s, E: %s" % values
|
|
||||||
LOG.info(msg)
|
|
||||||
next_update = datetime.datetime.utcnow() + update_interval
|
|
||||||
return count
|
|
||||||
|
|
||||||
def send_verified_notification(self, exist, connection, exchange,
|
|
||||||
routing_keys=None):
|
|
||||||
body = exist.raw.json
|
|
||||||
json_body = json.loads(body)
|
|
||||||
json_body[1]['event_type'] = 'image.exists.verified.old'
|
|
||||||
json_body[1]['original_message_id'] = json_body[1]['message_id']
|
|
||||||
json_body[1]['message_id'] = str(uuid.uuid4())
|
|
||||||
if routing_keys is None:
|
|
||||||
message_service.send_notification(json_body[1], json_body[0],
|
|
||||||
connection, exchange)
|
|
||||||
else:
|
|
||||||
for key in routing_keys:
|
|
||||||
message_service.send_notification(json_body[1], key,
|
|
||||||
connection, exchange)
|
|
||||||
|
|
||||||
def exchange(self):
|
|
||||||
return 'glance'
|
|
@ -1,268 +0,0 @@
|
|||||||
# Copyright (c) 2012 - Rackspace Inc.
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to
|
|
||||||
# deal in the Software without restriction, including without limitation the
|
|
||||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
# sell copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
||||||
# IN THE SOFTWARE.
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
|
||||||
os.pardir, os.pardir))
|
|
||||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
|
||||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
|
||||||
|
|
||||||
from verifier import base_verifier
|
|
||||||
from stacktach import models
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
|
||||||
from verifier import FieldMismatch
|
|
||||||
from verifier import AmbiguousResults
|
|
||||||
from verifier import NotFound
|
|
||||||
from verifier import VerificationException
|
|
||||||
from stacktach import stacklog, message_service
|
|
||||||
LOG = stacklog.get_logger('verifier')
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_field_mismatch(exists, launch):
|
|
||||||
if not base_verifier._verify_date_field(
|
|
||||||
launch.launched_at, exists.launched_at, same_second=True):
|
|
||||||
raise FieldMismatch('launched_at', exists.launched_at,
|
|
||||||
launch.launched_at)
|
|
||||||
|
|
||||||
if launch.instance_type_id != exists.instance_type_id:
|
|
||||||
raise FieldMismatch('instance_type_id', exists.instance_type_id,
|
|
||||||
launch.instance_type_id)
|
|
||||||
|
|
||||||
if launch.tenant != exists.tenant:
|
|
||||||
raise FieldMismatch('tenant', exists.tenant,
|
|
||||||
launch.tenant)
|
|
||||||
|
|
||||||
if launch.rax_options != exists.rax_options:
|
|
||||||
raise FieldMismatch('rax_options', exists.rax_options,
|
|
||||||
launch.rax_options)
|
|
||||||
|
|
||||||
if launch.os_architecture != exists.os_architecture:
|
|
||||||
raise FieldMismatch('os_architecture', exists.os_architecture,
|
|
||||||
launch.os_architecture)
|
|
||||||
|
|
||||||
if launch.os_version != exists.os_version:
|
|
||||||
raise FieldMismatch('os_version', exists.os_version,
|
|
||||||
launch.os_version)
|
|
||||||
|
|
||||||
if launch.os_distro != exists.os_distro:
|
|
||||||
raise FieldMismatch('os_distro', exists.os_distro,
|
|
||||||
launch.os_distro)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_for_launch(exist, launch=None,
|
|
||||||
launch_type="InstanceUsage"):
|
|
||||||
|
|
||||||
if not launch and exist.usage:
|
|
||||||
launch = exist.usage
|
|
||||||
elif not launch:
|
|
||||||
if models.InstanceUsage.objects\
|
|
||||||
.filter(instance=exist.instance).count() > 0:
|
|
||||||
launches = models.InstanceUsage.find(
|
|
||||||
exist.instance, dt.dt_from_decimal(exist.launched_at))
|
|
||||||
count = launches.count()
|
|
||||||
query = {
|
|
||||||
'instance': exist.instance,
|
|
||||||
'launched_at': exist.launched_at
|
|
||||||
}
|
|
||||||
if count > 1:
|
|
||||||
raise AmbiguousResults(launch_type, query)
|
|
||||||
elif count == 0:
|
|
||||||
raise NotFound(launch_type, query)
|
|
||||||
launch = launches[0]
|
|
||||||
else:
|
|
||||||
raise NotFound(launch_type, {'instance': exist.instance})
|
|
||||||
|
|
||||||
_verify_field_mismatch(exist, launch)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_for_delete(exist, delete=None,
|
|
||||||
delete_type="InstanceDeletes"):
|
|
||||||
|
|
||||||
if not delete and exist.delete:
|
|
||||||
# We know we have a delete and we have it's id
|
|
||||||
delete = exist.delete
|
|
||||||
elif not delete:
|
|
||||||
if exist.deleted_at:
|
|
||||||
# We received this exists before the delete, go find it
|
|
||||||
deletes = models.InstanceDeletes.find(
|
|
||||||
exist.instance, dt.dt_from_decimal(exist.launched_at))
|
|
||||||
if deletes.count() == 1:
|
|
||||||
delete = deletes[0]
|
|
||||||
else:
|
|
||||||
query = {
|
|
||||||
'instance': exist.instance,
|
|
||||||
'launched_at': exist.launched_at
|
|
||||||
}
|
|
||||||
raise NotFound(delete_type, query)
|
|
||||||
else:
|
|
||||||
# We don't know if this is supposed to have a delete or not.
|
|
||||||
# Thus, we need to check if we have a delete for this instance.
|
|
||||||
# We need to be careful though, since we could be verifying an
|
|
||||||
# exist event that we got before the delete. So, we restrict the
|
|
||||||
# search to only deletes before this exist's audit period ended.
|
|
||||||
# If we find any, we fail validation
|
|
||||||
launched_at = dt.dt_from_decimal(exist.launched_at)
|
|
||||||
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
|
||||||
deletes = models.InstanceDeletes.find(exist.instance, launched_at,
|
|
||||||
deleted_at_max)
|
|
||||||
if deletes.count() > 0:
|
|
||||||
reason = 'Found %s for non-delete exist' % delete_type
|
|
||||||
raise VerificationException(reason)
|
|
||||||
|
|
||||||
if delete:
|
|
||||||
if not base_verifier._verify_date_field(
|
|
||||||
delete.launched_at, exist.launched_at, same_second=True):
|
|
||||||
raise FieldMismatch('launched_at', exist.launched_at,
|
|
||||||
delete.launched_at)
|
|
||||||
|
|
||||||
if not base_verifier._verify_date_field(
|
|
||||||
delete.deleted_at, exist.deleted_at, same_second=True):
|
|
||||||
raise FieldMismatch(
|
|
||||||
'deleted_at', exist.deleted_at, delete.deleted_at)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_with_reconciled_data(exist):
|
|
||||||
if not exist.launched_at:
|
|
||||||
raise VerificationException("Exists without a launched_at")
|
|
||||||
|
|
||||||
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
|
|
||||||
if query.count() > 0:
|
|
||||||
recs = models.InstanceReconcile.find(exist.instance,
|
|
||||||
dt.dt_from_decimal((
|
|
||||||
exist.launched_at)))
|
|
||||||
search_query = {'instance': exist.instance,
|
|
||||||
'launched_at': exist.launched_at}
|
|
||||||
count = recs.count()
|
|
||||||
if count > 1:
|
|
||||||
raise AmbiguousResults('InstanceReconcile', search_query)
|
|
||||||
elif count == 0:
|
|
||||||
raise NotFound('InstanceReconcile', search_query)
|
|
||||||
reconcile = recs[0]
|
|
||||||
else:
|
|
||||||
raise NotFound('InstanceReconcile', {'instance': exist.instance})
|
|
||||||
|
|
||||||
_verify_for_launch(exist, launch=reconcile,
|
|
||||||
launch_type="InstanceReconcile")
|
|
||||||
delete = None
|
|
||||||
if reconcile.deleted_at is not None:
|
|
||||||
delete = reconcile
|
|
||||||
_verify_for_delete(exist, delete=delete, delete_type="InstanceReconcile")
|
|
||||||
|
|
||||||
|
|
||||||
def _attempt_reconciled_verify(exist, orig_e):
|
|
||||||
verified = False
|
|
||||||
try:
|
|
||||||
# Attempt to verify against reconciled data
|
|
||||||
_verify_with_reconciled_data(exist)
|
|
||||||
verified = True
|
|
||||||
exist.mark_verified(reconciled=True)
|
|
||||||
except NotFound, rec_e:
|
|
||||||
# No reconciled data, just mark it failed
|
|
||||||
exist.mark_failed(reason=str(orig_e))
|
|
||||||
except VerificationException, rec_e:
|
|
||||||
# Verification failed against reconciled data, mark it failed
|
|
||||||
# using the second failure.
|
|
||||||
exist.mark_failed(reason=str(rec_e))
|
|
||||||
except Exception, rec_e:
|
|
||||||
exist.mark_failed(reason=rec_e.__class__.__name__)
|
|
||||||
LOG.exception("nova: %s" % rec_e)
|
|
||||||
return verified
|
|
||||||
|
|
||||||
|
|
||||||
def _verify(exist):
|
|
||||||
verified = False
|
|
||||||
try:
|
|
||||||
if not exist.launched_at:
|
|
||||||
raise VerificationException("Exists without a launched_at")
|
|
||||||
|
|
||||||
_verify_for_launch(exist)
|
|
||||||
_verify_for_delete(exist)
|
|
||||||
|
|
||||||
verified = True
|
|
||||||
exist.mark_verified()
|
|
||||||
except VerificationException, orig_e:
|
|
||||||
# Something is wrong with the InstanceUsage record
|
|
||||||
verified = _attempt_reconciled_verify(exist, orig_e)
|
|
||||||
except Exception, e:
|
|
||||||
exist.mark_failed(reason=e.__class__.__name__)
|
|
||||||
LOG.exception("nova: %s" % e)
|
|
||||||
|
|
||||||
return verified, exist
|
|
||||||
|
|
||||||
|
|
||||||
class NovaVerifier(base_verifier.Verifier):
|
|
||||||
def __init__(self, config, pool=None, reconciler=None):
|
|
||||||
super(NovaVerifier, self).__init__(config,
|
|
||||||
pool=pool,
|
|
||||||
reconciler=reconciler)
|
|
||||||
|
|
||||||
def send_verified_notification(self, exist, connection, exchange,
|
|
||||||
routing_keys=None):
|
|
||||||
body = exist.raw.json
|
|
||||||
json_body = json.loads(body)
|
|
||||||
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
|
|
||||||
json_body[1]['original_message_id'] = json_body[1]['message_id']
|
|
||||||
json_body[1]['message_id'] = str(uuid.uuid4())
|
|
||||||
if routing_keys is None:
|
|
||||||
message_service.send_notification(
|
|
||||||
json_body[1], json_body[0], connection, exchange)
|
|
||||||
else:
|
|
||||||
for key in routing_keys:
|
|
||||||
message_service.send_notification(
|
|
||||||
json_body[1], key, connection, exchange)
|
|
||||||
|
|
||||||
def verify_for_range(self, ending_max, callback=None):
|
|
||||||
exists = models.InstanceExists.find(
|
|
||||||
ending_max=ending_max, status=models.InstanceExists.PENDING)
|
|
||||||
count = exists.count()
|
|
||||||
added = 0
|
|
||||||
update_interval = datetime.timedelta(seconds=30)
|
|
||||||
next_update = datetime.datetime.utcnow() + update_interval
|
|
||||||
LOG.info("nova: Adding %s exists to queue." % count)
|
|
||||||
while added < count:
|
|
||||||
for exist in exists[0:1000]:
|
|
||||||
exist.update_status(models.InstanceExists.VERIFYING)
|
|
||||||
exist.save()
|
|
||||||
result = self.pool.apply_async(
|
|
||||||
_verify, args=(exist,),
|
|
||||||
callback=callback)
|
|
||||||
self.results.append(result)
|
|
||||||
added += 1
|
|
||||||
if datetime.datetime.utcnow() > next_update:
|
|
||||||
values = ((added,) + self.clean_results())
|
|
||||||
msg = "nova: N: %s, P: %s, S: %s, E: %s" % values
|
|
||||||
LOG.info(msg)
|
|
||||||
next_update = datetime.datetime.utcnow() + update_interval
|
|
||||||
return count
|
|
||||||
|
|
||||||
def reconcile_failed(self):
|
|
||||||
for failed_exist in self.failed:
|
|
||||||
self.reconciler.failed_validation(failed_exist)
|
|
||||||
self.failed = []
|
|
||||||
|
|
||||||
def exchange(self):
|
|
||||||
return 'nova'
|
|
@ -17,8 +17,8 @@
|
|||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
# IN THE SOFTWARE.
|
# IN THE SOFTWARE.
|
||||||
import json
|
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
@ -30,11 +30,10 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
|||||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||||
|
|
||||||
from stacktach import reconciler
|
from verifier import dbverifier
|
||||||
from verifier import nova_verifier
|
|
||||||
from verifier import glance_verifier
|
|
||||||
import verifier.config as verifier_config
|
|
||||||
|
|
||||||
|
config_filename = os.environ.get('STACKTACH_VERIFIER_CONFIG',
|
||||||
|
'stacktach_verifier_config.json')
|
||||||
try:
|
try:
|
||||||
from local_settings import *
|
from local_settings import *
|
||||||
config_filename = STACKTACH_VERIFIER_CONFIG
|
config_filename = STACKTACH_VERIFIER_CONFIG
|
||||||
@ -43,45 +42,30 @@ except ImportError:
|
|||||||
|
|
||||||
process = None
|
process = None
|
||||||
|
|
||||||
processes = []
|
|
||||||
|
|
||||||
|
|
||||||
def kill_time(signal, frame):
|
def kill_time(signal, frame):
|
||||||
print "dying ..."
|
print "dying ..."
|
||||||
for process in processes:
|
if process:
|
||||||
process.terminate()
|
process.terminate()
|
||||||
print "rose"
|
print "rose"
|
||||||
for process in processes:
|
if process:
|
||||||
process.join()
|
process.join()
|
||||||
print "bud"
|
print "bud"
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
def _load_nova_reconciler():
|
|
||||||
config_loc = verifier_config.reconciler_config()
|
|
||||||
with open(config_loc, 'r') as rec_config_file:
|
|
||||||
rec_config = json.load(rec_config_file)
|
|
||||||
return reconciler.Reconciler(rec_config)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
def make_and_start_verifier(exchange):
|
config = None
|
||||||
|
with open(config_filename, "r") as f:
|
||||||
|
config = json.load(f)
|
||||||
|
|
||||||
|
def make_and_start_verifier(config):
|
||||||
# Gotta create it and run it this way so things don't get
|
# Gotta create it and run it this way so things don't get
|
||||||
# lost when the process is forked.
|
# lost when the process is forked.
|
||||||
verifier = None
|
verifier = dbverifier.Verifier(config)
|
||||||
if exchange == "nova":
|
|
||||||
reconcile = verifier_config.reconcile()
|
|
||||||
reconciler = None
|
|
||||||
if reconcile:
|
|
||||||
reconciler = _load_nova_reconciler()
|
|
||||||
verifier = nova_verifier.NovaVerifier(verifier_config,
|
|
||||||
reconciler=reconciler)
|
|
||||||
elif exchange == "glance":
|
|
||||||
verifier = glance_verifier.GlanceVerifier(verifier_config)
|
|
||||||
|
|
||||||
verifier.run()
|
verifier.run()
|
||||||
|
|
||||||
for exchange in verifier_config.topics().keys():
|
process = Process(target=make_and_start_verifier, args=(config,))
|
||||||
process = Process(target=make_and_start_verifier, args=(exchange,))
|
|
||||||
process.start()
|
process.start()
|
||||||
signal.signal(signal.SIGINT, kill_time)
|
signal.signal(signal.SIGINT, kill_time)
|
||||||
signal.signal(signal.SIGTERM, kill_time)
|
signal.signal(signal.SIGTERM, kill_time)
|
||||||
|
@ -39,3 +39,5 @@ def deployments():
|
|||||||
|
|
||||||
def topics():
|
def topics():
|
||||||
return config['topics']
|
return config['topics']
|
||||||
|
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ except ImportError:
|
|||||||
|
|
||||||
from pympler.process import ProcessMemoryInfo
|
from pympler.process import ProcessMemoryInfo
|
||||||
|
|
||||||
from stacktach import db, message_service
|
from stacktach import db
|
||||||
from stacktach import stacklog
|
from stacktach import stacklog
|
||||||
from stacktach import views
|
from stacktach import views
|
||||||
|
|
||||||
@ -59,15 +59,15 @@ class Consumer(kombu.mixins.ConsumerMixin):
|
|||||||
self.queue_name_prefix = queue_name_prefix
|
self.queue_name_prefix = queue_name_prefix
|
||||||
|
|
||||||
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
|
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
|
||||||
return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive,
|
return kombu.entity.Exchange(name, type=type, exclusive=exclusive,
|
||||||
durable=self.durable,
|
durable=self.durable,
|
||||||
auto_delete=auto_delete)
|
auto_delete=auto_delete)
|
||||||
|
|
||||||
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
|
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
|
||||||
auto_delete=False):
|
auto_delete=False):
|
||||||
return message_service.create_queue(
|
return kombu.Queue(name, nova_exchange, durable=self.durable,
|
||||||
name, nova_exchange, durable=self.durable, auto_delete=exclusive,
|
auto_delete=exclusive, exclusive=auto_delete,
|
||||||
exclusive=auto_delete, queue_arguments=self.queue_arguments,
|
queue_arguments=self.queue_arguments,
|
||||||
routing_key=routing_key)
|
routing_key=routing_key)
|
||||||
|
|
||||||
def get_consumers(self, Consumer, channel):
|
def get_consumers(self, Consumer, channel):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user