Merge branch 'master' of git://github.com/rackerlabs/stacktach into stacky_refactor2
Conflicts: stacktach/stacky_server.py
This commit is contained in:
commit
f39b20c218
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,4 +1,5 @@
|
||||
.idea/
|
||||
.venv/
|
||||
|
||||
*.pyc
|
||||
local_settings.py
|
||||
|
@ -6,3 +6,5 @@ librabbitmq>=1.0.0
|
||||
prettytable>=0.7.2
|
||||
argparse
|
||||
Pympler
|
||||
requests
|
||||
south
|
@ -11,7 +11,9 @@
|
||||
"userid": "rabbit",
|
||||
"password": "rabbit",
|
||||
"virtual_host": "/",
|
||||
"exchange_name": "stacktach",
|
||||
"routing_keys": ["notifications.info"]
|
||||
"topics": {
|
||||
"nova": ["notifications.info"],
|
||||
"glance": ["notifications.info"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,8 +10,26 @@
|
||||
"exit_on_exception": true,
|
||||
"queue_name": "stacktach",
|
||||
"topics": {
|
||||
"nova": ["monitor.info", "monitor.error"],
|
||||
"glance": ["monitor_glance.info", "monitor_glance.error"]
|
||||
"nova": [
|
||||
{
|
||||
"queue": "monitor.info",
|
||||
"routing_key": "monitor.info"
|
||||
},
|
||||
{
|
||||
"queue": "monitor.error",
|
||||
"routing_key": "monitor.error"
|
||||
}
|
||||
],
|
||||
"glance": [
|
||||
{
|
||||
"queue": "stacktach_monitor_glance.info",
|
||||
"routing_key": "monitor_glance.info"
|
||||
},
|
||||
{
|
||||
"queue": "stacktach_monitor_glance.error",
|
||||
"routing_key": "monitor_glance.error"
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -25,8 +43,16 @@
|
||||
"exit_on_exception": false,
|
||||
"queue_name": "stacktach",
|
||||
"topics": {
|
||||
"nova": ["monitor.info", "monitor.error"],
|
||||
"glance": ["monitor_glance.info", "monitor_glance.error"]
|
||||
"nova": [
|
||||
{
|
||||
"queue": "monitor.info",
|
||||
"routing_key": "monitor.info"
|
||||
},
|
||||
{
|
||||
"queue": "monitor.error",
|
||||
"routing_key": "monitor.error"
|
||||
}
|
||||
]
|
||||
}
|
||||
}]
|
||||
}
|
||||
|
4
etc/test-requires.txt
Normal file
4
etc/test-requires.txt
Normal file
@ -0,0 +1,4 @@
|
||||
nose
|
||||
coverage
|
||||
mox
|
||||
nose-exclude
|
@ -33,16 +33,21 @@ from stacktach import models
|
||||
from stacktach.reconciler import Reconciler
|
||||
|
||||
OLD_LAUNCHES_QUERY = """
|
||||
select * from stacktach_instanceusage where
|
||||
launched_at is not null and
|
||||
launched_at < %s and
|
||||
instance not in
|
||||
(select distinct(instance)
|
||||
from stacktach_instancedeletes where
|
||||
deleted_at < %s union
|
||||
select distinct(instance)
|
||||
from stacktach_instancereconcile where
|
||||
deleted_at < %s);"""
|
||||
select stacktach_instanceusage.id,
|
||||
stacktach_instanceusage.instance,
|
||||
stacktach_instanceusage.launched_at from stacktach_instanceusage
|
||||
left outer join stacktach_instancedeletes on
|
||||
stacktach_instanceusage.instance = stacktach_instancedeletes.instance
|
||||
left outer join stacktach_instancereconcile on
|
||||
stacktach_instanceusage.instance = stacktach_instancereconcile.instance
|
||||
where (
|
||||
stacktach_instancereconcile.deleted_at is null and (
|
||||
stacktach_instancedeletes.deleted_at is null or
|
||||
stacktach_instancedeletes.deleted_at > %s
|
||||
)
|
||||
or (stacktach_instancereconcile.deleted_at is not null and
|
||||
stacktach_instancereconcile.deleted_at > %s)
|
||||
) and stacktach_instanceusage.launched_at < %s;"""
|
||||
|
||||
reconciler = None
|
||||
|
||||
|
7
run_tests_venv.sh
Executable file
7
run_tests_venv.sh
Executable file
@ -0,0 +1,7 @@
|
||||
#!/bin/sh
|
||||
virtualenv .venv
|
||||
. .venv/bin/activate
|
||||
pip install -r etc/pip-requires.txt
|
||||
pip install -r etc/test-requires.txt
|
||||
nosetests tests --exclude-dir=stacktach --with-coverage --cover-package=stacktach,worker,verifier --cover-erase
|
||||
|
31
stacktach/message_service.py
Normal file
31
stacktach/message_service.py
Normal file
@ -0,0 +1,31 @@
|
||||
import kombu
|
||||
import kombu.entity
|
||||
import kombu.pools
|
||||
import kombu.connection
|
||||
import kombu.common
|
||||
|
||||
def send_notification(message, routing_key, connection, exchange):
|
||||
with kombu.pools.producers[connection].acquire(block=True) as producer:
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
producer.publish(message, routing_key)
|
||||
|
||||
|
||||
def create_exchange(name, exchange_type, exclusive=False, auto_delete=False,
|
||||
durable=True):
|
||||
return kombu.entity.Exchange(name, type=exchange_type, exclusive=exclusive,
|
||||
auto_delete=auto_delete, durable=durable)
|
||||
|
||||
|
||||
def create_connection(hostname, port, userid, password, transport,
|
||||
virtual_host):
|
||||
return kombu.connection.BrokerConnection(
|
||||
hostname=hostname, port=port, userid=userid, password=password,
|
||||
transport=transport, virtual_host=virtual_host)
|
||||
|
||||
|
||||
def create_queue(name, exchange, routing_key, exclusive=False,
|
||||
auto_delete=False, queue_arguments=None, durable=True):
|
||||
return kombu.Queue(name, exchange, durable=durable,
|
||||
auto_delete=auto_delete, exclusive=exclusive,
|
||||
queue_arguments=queue_arguments,
|
||||
routing_key=routing_key)
|
@ -0,0 +1,228 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
|
||||
# Changing field 'ImageUsage.owner'
|
||||
db.alter_column(u'stacktach_imageusage', 'owner', self.gf('django.db.models.fields.CharField')(max_length=50, null=True))
|
||||
|
||||
# Changing field 'ImageExists.owner'
|
||||
db.alter_column(u'stacktach_imageexists', 'owner', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
|
||||
|
||||
def backwards(self, orm):
|
||||
|
||||
# User chose to not deal with backwards NULL issues for 'ImageUsage.owner'
|
||||
raise RuntimeError("Cannot reverse this migration. 'ImageUsage.owner' and its values cannot be restored.")
|
||||
|
||||
# User chose to not deal with backwards NULL issues for 'ImageExists.owner'
|
||||
raise RuntimeError("Cannot reverse this migration. 'ImageExists.owner' and its values cannot be restored.")
|
||||
|
||||
models = {
|
||||
u'stacktach.deployment': {
|
||||
'Meta': {'object_name': 'Deployment'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
|
||||
},
|
||||
u'stacktach.genericrawdata': {
|
||||
'Meta': {'object_name': 'GenericRawData'},
|
||||
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
|
||||
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.glancerawdata': {
|
||||
'Meta': {'object_name': 'GlanceRawData'},
|
||||
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
|
||||
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
|
||||
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'status': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'db_index': 'True'}),
|
||||
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '36', 'null': 'True', 'blank': 'True'}),
|
||||
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.imagedeletes': {
|
||||
'Meta': {'object_name': 'ImageDeletes'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
|
||||
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.imageexists': {
|
||||
'Meta': {'object_name': 'ImageExists'},
|
||||
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'created_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageDeletes']"}),
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'fail_reason': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['stacktach.GlanceRawData']"}),
|
||||
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
|
||||
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
|
||||
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
|
||||
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageUsage']"}),
|
||||
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.imageusage': {
|
||||
'Meta': {'object_name': 'ImageUsage'},
|
||||
'created_at': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
|
||||
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
|
||||
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.instancedeletes': {
|
||||
'Meta': {'object_name': 'InstanceDeletes'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
|
||||
},
|
||||
u'stacktach.instanceexists': {
|
||||
'Meta': {'object_name': 'InstanceExists'},
|
||||
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
|
||||
},
|
||||
u'stacktach.instancereconcile': {
|
||||
'Meta': {'object_name': 'InstanceReconcile'},
|
||||
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.instanceusage': {
|
||||
'Meta': {'object_name': 'InstanceUsage'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.jsonreport': {
|
||||
'Meta': {'object_name': 'JsonReport'},
|
||||
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
|
||||
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
|
||||
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
|
||||
},
|
||||
u'stacktach.lifecycle': {
|
||||
'Meta': {'object_name': 'Lifecycle'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
|
||||
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.rawdata': {
|
||||
'Meta': {'object_name': 'RawData'},
|
||||
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
|
||||
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
|
||||
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'json': ('django.db.models.fields.TextField', [], {}),
|
||||
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
|
||||
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
|
||||
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
|
||||
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
|
||||
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.rawdataimagemeta': {
|
||||
'Meta': {'object_name': 'RawDataImageMeta'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
|
||||
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
|
||||
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'stacktach.requesttracker': {
|
||||
'Meta': {'object_name': 'RequestTracker'},
|
||||
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
|
||||
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
|
||||
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
|
||||
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
|
||||
},
|
||||
u'stacktach.timing': {
|
||||
'Meta': {'object_name': 'Timing'},
|
||||
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
|
||||
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
|
||||
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
|
||||
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['stacktach']
|
@ -12,16 +12,20 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import copy
|
||||
|
||||
from django.db import models
|
||||
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
|
||||
|
||||
def routing_key_type(key):
|
||||
if key.endswith('error'):
|
||||
return 'E'
|
||||
return ' '
|
||||
|
||||
|
||||
class Deployment(models.Model):
|
||||
name = models.CharField(max_length=50)
|
||||
|
||||
@ -31,7 +35,7 @@ class Deployment(models.Model):
|
||||
|
||||
class GenericRawData(models.Model):
|
||||
result_titles = [["#", "?", "When", "Deployment", "Event", "Host",
|
||||
"Instance", "Request id"]]
|
||||
"Instance", "Request id"]]
|
||||
deployment = models.ForeignKey(Deployment)
|
||||
tenant = models.CharField(max_length=50, null=True, blank=True,
|
||||
db_index=True)
|
||||
@ -71,6 +75,7 @@ class GenericRawData(models.Model):
|
||||
self.instance, self.request_id])
|
||||
return results
|
||||
|
||||
|
||||
class RawData(models.Model):
|
||||
result_titles = [["#", "?", "When", "Deployment", "Event", "Host",
|
||||
"State", "State'", "Task'"]]
|
||||
@ -173,6 +178,16 @@ class InstanceUsage(models.Model):
|
||||
raw = raws[0]
|
||||
return raw.deployment
|
||||
|
||||
@staticmethod
|
||||
def find(instance, launched_at):
|
||||
start = launched_at - datetime.timedelta(
|
||||
microseconds=launched_at.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
return InstanceUsage.objects.filter(**params)
|
||||
|
||||
|
||||
class InstanceDeletes(models.Model):
|
||||
instance = models.CharField(max_length=50, null=True,
|
||||
@ -186,6 +201,17 @@ class InstanceDeletes(models.Model):
|
||||
def deployment(self):
|
||||
return self.raw.deployment
|
||||
|
||||
@staticmethod
|
||||
def find(instance, launched, deleted_max=None):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
if deleted_max:
|
||||
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
|
||||
return InstanceDeletes.objects.filter(**params)
|
||||
|
||||
|
||||
class InstanceReconcile(models.Model):
|
||||
row_created = models.DateTimeField(auto_now_add=True)
|
||||
@ -209,6 +235,15 @@ class InstanceReconcile(models.Model):
|
||||
source = models.CharField(max_length=150, null=True,
|
||||
blank=True, db_index=True)
|
||||
|
||||
@staticmethod
|
||||
def find(instance, launched):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
return InstanceReconcile.objects.filter(**params)
|
||||
|
||||
|
||||
class InstanceExists(models.Model):
|
||||
PENDING = 'pending'
|
||||
@ -260,6 +295,32 @@ class InstanceExists(models.Model):
|
||||
def deployment(self):
|
||||
return self.raw.deployment
|
||||
|
||||
@staticmethod
|
||||
def find(ending_max, status):
|
||||
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
|
||||
'status': status}
|
||||
return InstanceExists.objects.select_related()\
|
||||
.filter(**params).order_by('id')
|
||||
|
||||
def mark_verified(self, reconciled=False, reason=None):
|
||||
if not reconciled:
|
||||
self.status = InstanceExists.VERIFIED
|
||||
else:
|
||||
self.status = InstanceExists.RECONCILED
|
||||
if reason is not None:
|
||||
self.fail_reason = reason
|
||||
|
||||
self.save()
|
||||
|
||||
def mark_failed(self, reason=None):
|
||||
self.status = InstanceExists.FAILED
|
||||
if reason:
|
||||
self.fail_reason = reason
|
||||
self.save()
|
||||
|
||||
def update_status(self, new_status):
|
||||
self.status = new_status
|
||||
|
||||
|
||||
class Timing(models.Model):
|
||||
"""Each Timing record corresponds to a .start/.end event pair
|
||||
@ -364,7 +425,7 @@ class ImageUsage(models.Model):
|
||||
uuid = models.CharField(max_length=50, db_index=True)
|
||||
created_at = models.DecimalField(max_digits=20,
|
||||
decimal_places=6, db_index=True)
|
||||
owner = models.CharField(max_length=50, db_index=True)
|
||||
owner = models.CharField(max_length=50, db_index=True, null=True)
|
||||
size = models.BigIntegerField(max_length=20)
|
||||
last_raw = models.ForeignKey(GlanceRawData, null=True)
|
||||
|
||||
@ -376,6 +437,13 @@ class ImageDeletes(models.Model):
|
||||
null=True)
|
||||
raw = models.ForeignKey(GlanceRawData, null=True)
|
||||
|
||||
@staticmethod
|
||||
def find(uuid, deleted_max=None):
|
||||
params = {'uuid': uuid}
|
||||
if deleted_max:
|
||||
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
|
||||
return ImageDeletes.objects.filter(**params)
|
||||
|
||||
|
||||
class ImageExists(models.Model):
|
||||
PENDING = 'pending'
|
||||
@ -409,9 +477,28 @@ class ImageExists(models.Model):
|
||||
usage = models.ForeignKey(ImageUsage, related_name='+', null=True)
|
||||
delete = models.ForeignKey(ImageDeletes, related_name='+', null=True)
|
||||
send_status = models.IntegerField(default=0, db_index=True)
|
||||
owner = models.CharField(max_length=255, db_index=True)
|
||||
owner = models.CharField(max_length=255, db_index=True, null=True)
|
||||
size = models.BigIntegerField(max_length=20)
|
||||
|
||||
def update_status(self, new_status):
|
||||
self.status = new_status
|
||||
|
||||
@staticmethod
|
||||
def find(ending_max, status):
|
||||
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
|
||||
'status': status}
|
||||
return ImageExists.objects.select_related().filter(**params).order_by('id')
|
||||
|
||||
def mark_verified(self):
|
||||
self.status = InstanceExists.VERIFIED
|
||||
self.save()
|
||||
|
||||
def mark_failed(self, reason=None):
|
||||
self.status = InstanceExists.FAILED
|
||||
if reason:
|
||||
self.fail_reason = reason
|
||||
self.save()
|
||||
|
||||
|
||||
def get_model_fields(model):
|
||||
return model._meta.fields
|
||||
|
@ -95,25 +95,38 @@ class GlanceNotification(Notification):
|
||||
def __init__(self, body, deployment, routing_key, json):
|
||||
super(GlanceNotification, self).__init__(body, deployment,
|
||||
routing_key, json)
|
||||
self.properties = self.payload.get('properties', {})
|
||||
self.image_type = image_type.get_numeric_code(self.payload)
|
||||
self.status = self.payload.get('status', None)
|
||||
self.uuid = self.payload.get('id', None)
|
||||
self.size = self.payload.get('size', None)
|
||||
created_at = self.payload.get('created_at', None)
|
||||
self.created_at = created_at and utils.str_time_to_unix(created_at)
|
||||
audit_period_beginning = self.payload.get(
|
||||
'audit_period_beginning', None)
|
||||
self.audit_period_beginning = audit_period_beginning and\
|
||||
utils.str_time_to_unix(audit_period_beginning)
|
||||
audit_period_ending = self.payload.get(
|
||||
'audit_period_ending', None)
|
||||
self.audit_period_ending = audit_period_ending and \
|
||||
utils.str_time_to_unix(audit_period_ending)
|
||||
if isinstance(self.payload, dict):
|
||||
self.properties = self.payload.get('properties', {})
|
||||
self.image_type = image_type.get_numeric_code(self.payload)
|
||||
self.status = self.payload.get('status', None)
|
||||
self.uuid = self.payload.get('id', None)
|
||||
self.size = self.payload.get('size', None)
|
||||
created_at = self.payload.get('created_at', None)
|
||||
self.created_at = created_at and utils.str_time_to_unix(created_at)
|
||||
audit_period_beginning = self.payload.get(
|
||||
'audit_period_beginning', None)
|
||||
self.audit_period_beginning = audit_period_beginning and\
|
||||
utils.str_time_to_unix(audit_period_beginning)
|
||||
audit_period_ending = self.payload.get(
|
||||
'audit_period_ending', None)
|
||||
self.audit_period_ending = audit_period_ending and \
|
||||
utils.str_time_to_unix(audit_period_ending)
|
||||
else:
|
||||
self.properties = {}
|
||||
self.image_type = None
|
||||
self.status = None
|
||||
self.uuid = None
|
||||
self.size = None
|
||||
self.created_at = None
|
||||
self.audit_period_beginning = None
|
||||
self.audit_period_ending = None
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
return self.payload.get('owner', None)
|
||||
if isinstance(self.payload, dict):
|
||||
return self.payload.get('owner', None)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def instance(self):
|
||||
@ -121,7 +134,10 @@ class GlanceNotification(Notification):
|
||||
@property
|
||||
def deleted_at(self):
|
||||
deleted_at = self.body.get('deleted_at', None)
|
||||
deleted_at = deleted_at or self.payload.get('deleted_at', None)
|
||||
|
||||
if isinstance(self.payload, dict):
|
||||
deleted_at = deleted_at or self.payload.get('deleted_at', None)
|
||||
|
||||
return deleted_at and utils.str_time_to_unix(deleted_at)
|
||||
|
||||
def save(self):
|
||||
@ -150,14 +166,11 @@ class GlanceNotification(Notification):
|
||||
'size': self.size,
|
||||
'raw': raw
|
||||
}
|
||||
created_at_range = (self.created_at, self.created_at+1)
|
||||
usage = db.get_image_usage(
|
||||
uuid=self.uuid, created_at__range=created_at_range)
|
||||
usage = db.get_image_usage(uuid=self.uuid)
|
||||
values['usage'] = usage
|
||||
values['created_at'] = self.created_at
|
||||
if self.deleted_at:
|
||||
delete = db.get_image_delete(
|
||||
uuid=self.uuid, created_at__range=created_at_range)
|
||||
delete = db.get_image_delete(uuid=self.uuid)
|
||||
values['delete'] = delete
|
||||
values['deleted_at'] = self.deleted_at
|
||||
|
||||
@ -236,6 +249,7 @@ class NovaNotification(Notification):
|
||||
host=self.host,
|
||||
instance=self.instance,
|
||||
request_id=self.request_id,
|
||||
image_type=self.image_type,
|
||||
state=self.state,
|
||||
old_state=self.old_state,
|
||||
task=self.task,
|
||||
|
@ -120,6 +120,10 @@ class Reconciler(object):
|
||||
reconciled = False
|
||||
launch = models.InstanceUsage.objects.get(id=launched_id)
|
||||
region = self._region_for_usage(launch)
|
||||
|
||||
if not region:
|
||||
return False
|
||||
|
||||
try:
|
||||
instance = self.client.get_instance(region, launch.instance)
|
||||
if instance['deleted'] and instance['deleted_at'] is not None:
|
||||
@ -140,6 +144,10 @@ class Reconciler(object):
|
||||
def failed_validation(self, exists):
|
||||
reconciled = False
|
||||
region = self._region_for_usage(exists)
|
||||
|
||||
if not region:
|
||||
return False
|
||||
|
||||
try:
|
||||
instance = self.client.get_instance(region, exists.instance,
|
||||
get_metadata=True)
|
||||
|
@ -171,8 +171,9 @@ def do_hosts(request):
|
||||
return rsp(json.dumps(results))
|
||||
|
||||
|
||||
def do_uuid(request, service='nova'):
|
||||
def do_uuid(request):
|
||||
uuid = str(request.GET['uuid'])
|
||||
service = str(request.GET.get('service', 'nova'))
|
||||
if not utils.is_uuid_like(uuid):
|
||||
msg = "%s is not uuid-like" % uuid
|
||||
return error_response(400, 'Bad Request', msg)
|
||||
@ -341,6 +342,7 @@ def append_generic_raw_attributes(event, results):
|
||||
results.append(["Req ID", event.request_id])
|
||||
return results
|
||||
|
||||
|
||||
def _append_raw_attributes(event, results, service):
|
||||
if service == 'nova':
|
||||
return append_nova_raw_attributes(event, results)
|
||||
@ -349,8 +351,11 @@ def _append_raw_attributes(event, results, service):
|
||||
if service == 'generic':
|
||||
return append_generic_raw_attributes(event, results)
|
||||
|
||||
def do_show(request, event_id, service='nova'):
|
||||
|
||||
def do_show(request, event_id):
|
||||
service = str(request.GET.get('service', 'nova'))
|
||||
event_id = int(event_id)
|
||||
|
||||
results = []
|
||||
model = _model_factory(service)
|
||||
try:
|
||||
@ -374,7 +379,9 @@ def _model_factory(service):
|
||||
return models.GenericRawData.objects
|
||||
|
||||
|
||||
def do_watch(request, deployment_id, service='nova'):
|
||||
def do_watch(request, deployment_id):
|
||||
service = str(request.GET.get('service', 'nova'))
|
||||
|
||||
model = _model_factory(service)
|
||||
deployment_id = int(deployment_id)
|
||||
since = request.GET.get('since')
|
||||
@ -577,7 +584,8 @@ def do_jsonreport(request, report_id):
|
||||
return rsp(report.json)
|
||||
|
||||
|
||||
def search(request, service):
|
||||
def search(request):
|
||||
service = str(request.GET.get('service', 'nova'))
|
||||
field = request.GET.get('field')
|
||||
value = request.GET.get('value')
|
||||
model = _model_factory(service)
|
||||
|
@ -3,11 +3,10 @@ from django.conf.urls import patterns, url
|
||||
|
||||
urlpatterns = patterns('',
|
||||
url(r'^$', 'stacktach.views.welcome', name='welcome'),
|
||||
|
||||
url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'),
|
||||
url(r'stacky/events/$', 'stacktach.stacky_server.do_events'),
|
||||
url(r'stacky/hosts/$', 'stacktach.stacky_server.do_hosts'),
|
||||
url(r'stacky/uuid/(?P<service>\w+)/$', 'stacktach.stacky_server.do_uuid'),
|
||||
url(r'stacky/uuid/$', 'stacktach.stacky_server.do_uuid'),
|
||||
url(r'stacky/timings/$', 'stacktach.stacky_server.do_timings'),
|
||||
url(r'stacky/timings/uuid/$', 'stacktach.stacky_server.do_timings_uuid'),
|
||||
url(r'stacky/summary/$', 'stacktach.stacky_server.do_summary'),
|
||||
@ -17,11 +16,9 @@ urlpatterns = patterns('',
|
||||
'stacktach.stacky_server.do_jsonreport'),
|
||||
url(r'stacky/show/(?P<event_id>\d+)/$',
|
||||
'stacktach.stacky_server.do_show'),
|
||||
url(r'stacky/show/(?P<service>\w+)/(?P<event_id>\d+)/$',
|
||||
'stacktach.stacky_server.do_show'),
|
||||
url(r'stacky/watch/(?P<deployment_id>\d+)/(?P<service>\w+)/$',
|
||||
url(r'stacky/watch/(?P<deployment_id>\d+)/$',
|
||||
'stacktach.stacky_server.do_watch'),
|
||||
url(r'stacky/search/(?P<service>\w+)/$', 'stacktach.stacky_server.search'),
|
||||
url(r'stacky/search/$', 'stacktach.stacky_server.search'),
|
||||
url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'),
|
||||
url(r'stacky/kpi/(?P<tenant_id>\w+)/$', 'stacktach.stacky_server.do_kpi'),
|
||||
url(r'stacky/usage/launches/$',
|
||||
|
@ -288,6 +288,7 @@ def _process_exists(raw, notification):
|
||||
def _process_glance_usage(raw, notification):
|
||||
notification.save_usage(raw)
|
||||
|
||||
|
||||
def _process_glance_delete(raw, notification):
|
||||
notification.save_delete(raw)
|
||||
|
||||
|
@ -19,7 +19,9 @@
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
|
||||
def setup_sys_path():
|
||||
@ -44,4 +46,58 @@ setup_environment()
|
||||
|
||||
from stacktach import stacklog
|
||||
|
||||
stacklog.set_default_logger_location("%s.log")
|
||||
stacklog.set_default_logger_location("/tmp/%s.log")
|
||||
|
||||
|
||||
class _AssertRaisesContext(object):
|
||||
"""A context manager used to implement TestCase.assertRaises* methods."""
|
||||
|
||||
def __init__(self, expected, test_case, expected_regexp=None):
|
||||
self.expected = expected
|
||||
self.failureException = test_case.failureException
|
||||
self.expected_regexp = expected_regexp
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_type is None:
|
||||
try:
|
||||
exc_name = self.expected.__name__
|
||||
except AttributeError:
|
||||
exc_name = str(self.expected)
|
||||
raise self.failureException(
|
||||
"{0} not raised".format(exc_name))
|
||||
if not issubclass(exc_type, self.expected):
|
||||
# let unexpected exceptions pass through
|
||||
return False
|
||||
self.exception = exc_value # store for later retrieval
|
||||
if self.expected_regexp is None:
|
||||
return True
|
||||
|
||||
expected_regexp = self.expected_regexp
|
||||
if isinstance(expected_regexp, basestring):
|
||||
expected_regexp = re.compile(expected_regexp)
|
||||
if not expected_regexp.search(str(exc_value)):
|
||||
raise self.failureException('"%s" does not match "%s"' %
|
||||
(expected_regexp.pattern, str(exc_value)))
|
||||
return True
|
||||
|
||||
|
||||
class StacktachBaseTestCase(unittest.TestCase):
|
||||
|
||||
def assertIsNotNone(self, obj, msg=None):
|
||||
self.assertTrue(obj is not None, msg)
|
||||
|
||||
def assertIsNone(self, obj, msg=None):
|
||||
self.assertTrue(obj is None, msg)
|
||||
|
||||
def assertIsInstance(self, obj, cls, msg=None):
|
||||
self.assertTrue(isinstance(obj, cls), msg)
|
||||
|
||||
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
|
||||
context = _AssertRaisesContext(excClass, self)
|
||||
if callableObj is None:
|
||||
return context
|
||||
with context:
|
||||
callableObj(*args, **kwargs)
|
250
tests/unit/test_base_verifier.py
Normal file
250
tests/unit/test_base_verifier.py
Normal file
@ -0,0 +1,250 @@
|
||||
import datetime
|
||||
import time
|
||||
from django.db import transaction
|
||||
import mox
|
||||
from stacktach import message_service
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
from tests.unit.utils import HOST, PORT, VIRTUAL_HOST, USERID, PASSWORD, TICK_TIME, SETTLE_TIME, SETTLE_UNITS
|
||||
from tests.unit.utils import make_verifier_config
|
||||
from verifier import base_verifier
|
||||
|
||||
|
||||
class BaseVerifierTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
config = make_verifier_config(False)
|
||||
self.pool = self.mox.CreateMockAnything()
|
||||
self.reconciler = self.mox.CreateMockAnything()
|
||||
self.verifier_with_reconciler = base_verifier.Verifier(config,
|
||||
pool=self.pool, reconciler=self.reconciler)
|
||||
self.verifier_without_notifications = self\
|
||||
._verifier_with_notifications_disabled()
|
||||
self.verifier_with_notifications = self\
|
||||
._verifier_with_notifications_enabled()
|
||||
|
||||
def _verifier_with_notifications_disabled(self):
|
||||
config = make_verifier_config(False)
|
||||
reconciler = self.mox.CreateMockAnything()
|
||||
return base_verifier.Verifier(config,
|
||||
pool=self.pool,
|
||||
reconciler=reconciler)
|
||||
|
||||
def _verifier_with_notifications_enabled(self):
|
||||
config = make_verifier_config(True)
|
||||
reconciler = self.mox.CreateMockAnything()
|
||||
return base_verifier.Verifier(config,
|
||||
pool=self.pool,
|
||||
reconciler=reconciler)
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def test_should_create_verifier_with_reconciler(self):
|
||||
config = make_verifier_config(False)
|
||||
rec = self.mox.CreateMockAnything()
|
||||
verifier = base_verifier.Verifier(config, pool=None, reconciler=rec)
|
||||
self.assertEqual(verifier.reconciler, rec)
|
||||
|
||||
def test_clean_results_full(self):
|
||||
result_not_ready = self.mox.CreateMockAnything()
|
||||
result_not_ready.ready().AndReturn(False)
|
||||
result_unsuccessful = self.mox.CreateMockAnything()
|
||||
result_unsuccessful.ready().AndReturn(True)
|
||||
result_unsuccessful.successful().AndReturn(False)
|
||||
result_successful = self.mox.CreateMockAnything()
|
||||
result_successful.ready().AndReturn(True)
|
||||
result_successful.successful().AndReturn(True)
|
||||
result_successful.get().AndReturn((True, None))
|
||||
result_failed_verification = self.mox.CreateMockAnything()
|
||||
result_failed_verification.ready().AndReturn(True)
|
||||
result_failed_verification.successful().AndReturn(True)
|
||||
failed_exists = self.mox.CreateMockAnything()
|
||||
result_failed_verification.get().AndReturn((False, failed_exists))
|
||||
self.verifier_with_reconciler.results = [result_not_ready,
|
||||
result_unsuccessful,
|
||||
result_successful,
|
||||
result_failed_verification]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
|
||||
self.assertEqual(result_count, 1)
|
||||
self.assertEqual(success_count, 2)
|
||||
self.assertEqual(errored, 1)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.results), 1)
|
||||
self.assertEqual(self.verifier_with_reconciler.results[0], result_not_ready)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 1)
|
||||
self.assertEqual(self.verifier_with_reconciler.failed[0], result_failed_verification)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_pending(self):
|
||||
result_not_ready = self.mox.CreateMockAnything()
|
||||
result_not_ready.ready().AndReturn(False)
|
||||
self.verifier_with_reconciler.results = [result_not_ready]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
|
||||
self.assertEqual(result_count, 1)
|
||||
self.assertEqual(success_count, 0)
|
||||
self.assertEqual(errored, 0)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.results), 1)
|
||||
self.assertEqual(self.verifier_with_reconciler.results[0], result_not_ready)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_successful(self):
|
||||
self.verifier_with_reconciler.reconcile = True
|
||||
result_successful = self.mox.CreateMockAnything()
|
||||
result_successful.ready().AndReturn(True)
|
||||
result_successful.successful().AndReturn(True)
|
||||
result_successful.get().AndReturn((True, None))
|
||||
self.verifier_with_reconciler.results = [result_successful]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
|
||||
self.assertEqual(result_count, 0)
|
||||
self.assertEqual(success_count, 1)
|
||||
self.assertEqual(errored, 0)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_unsuccessful(self):
|
||||
result_unsuccessful = self.mox.CreateMockAnything()
|
||||
result_unsuccessful.ready().AndReturn(True)
|
||||
result_unsuccessful.successful().AndReturn(False)
|
||||
self.verifier_with_reconciler.results = [result_unsuccessful]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = \
|
||||
self.verifier_with_reconciler.clean_results()
|
||||
self.assertEqual(result_count, 0)
|
||||
self.assertEqual(success_count, 0)
|
||||
self.assertEqual(errored, 1)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_clean_results_fail_verification(self):
|
||||
result_failed_verification = self.mox.CreateMockAnything()
|
||||
result_failed_verification.ready().AndReturn(True)
|
||||
result_failed_verification.successful().AndReturn(True)
|
||||
failed_exists = self.mox.CreateMockAnything()
|
||||
result_failed_verification.get().AndReturn((False, failed_exists))
|
||||
self.verifier_with_reconciler.results = [result_failed_verification]
|
||||
self.mox.ReplayAll()
|
||||
(result_count, success_count, errored) = \
|
||||
self.verifier_with_reconciler.clean_results()
|
||||
self.assertEqual(result_count, 0)
|
||||
self.assertEqual(success_count, 1)
|
||||
self.assertEqual(errored, 0)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
|
||||
self.assertEqual(len(self.verifier_with_reconciler.failed), 1)
|
||||
self.assertEqual(self.verifier_with_reconciler.failed[0], failed_exists)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_notifications(self):
|
||||
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
|
||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
|
||||
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
|
||||
self.mox.ReplayAll()
|
||||
self.verifier_with_notifications.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_notifications_with_routing_keys(self):
|
||||
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
|
||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
|
||||
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
|
||||
self.mox.ReplayAll()
|
||||
self.verifier_with_notifications.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_no_notifications(self):
|
||||
self.mox.StubOutWithMock(self.verifier_without_notifications, '_run')
|
||||
self.verifier_without_notifications._run()
|
||||
self.mox.ReplayAll()
|
||||
self.verifier_without_notifications.run()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_full_no_notifications(self):
|
||||
self.mox.StubOutWithMock(transaction, 'commit_on_success')
|
||||
tran = self.mox.CreateMockAnything()
|
||||
tran.__enter__().AndReturn(tran)
|
||||
tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
|
||||
transaction.commit_on_success().AndReturn(tran)
|
||||
self.mox.StubOutWithMock(self.verifier_without_notifications, '_keep_running')
|
||||
self.verifier_without_notifications._keep_running().AndReturn(True)
|
||||
start = datetime.datetime.utcnow()
|
||||
self.mox.StubOutWithMock(self.verifier_without_notifications, '_utcnow')
|
||||
self.verifier_without_notifications._utcnow().AndReturn(start)
|
||||
settle_offset = {SETTLE_UNITS: SETTLE_TIME}
|
||||
ending_max = start - datetime.timedelta(**settle_offset)
|
||||
self.mox.StubOutWithMock(self.verifier_without_notifications, 'verify_for_range')
|
||||
self.verifier_without_notifications.verify_for_range(ending_max, callback=None)
|
||||
self.mox.StubOutWithMock(self.verifier_without_notifications, 'reconcile_failed')
|
||||
result1 = self.mox.CreateMockAnything()
|
||||
result2 = self.mox.CreateMockAnything()
|
||||
self.verifier_without_notifications.results = [result1, result2]
|
||||
result1.ready().AndReturn(True)
|
||||
result1.successful().AndReturn(True)
|
||||
result1.get().AndReturn((True, None))
|
||||
result2.ready().AndReturn(True)
|
||||
result2.successful().AndReturn(True)
|
||||
result2.get().AndReturn((True, None))
|
||||
self.verifier_without_notifications.reconcile_failed()
|
||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
||||
time.sleep(TICK_TIME)
|
||||
self.verifier_without_notifications._keep_running().AndReturn(False)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.verifier_without_notifications.run()
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_run_full(self):
|
||||
self.mox.StubOutWithMock(transaction, 'commit_on_success')
|
||||
tran = self.mox.CreateMockAnything()
|
||||
tran.__enter__().AndReturn(tran)
|
||||
tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
|
||||
transaction.commit_on_success().AndReturn(tran)
|
||||
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
|
||||
self.verifier_with_notifications.exchange().AndReturn('exchange')
|
||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_keep_running')
|
||||
self.verifier_with_notifications._keep_running().AndReturn(True)
|
||||
start = datetime.datetime.utcnow()
|
||||
self.mox.StubOutWithMock(self.verifier_with_notifications, '_utcnow')
|
||||
self.verifier_with_notifications._utcnow().AndReturn(start)
|
||||
settle_offset = {SETTLE_UNITS: SETTLE_TIME}
|
||||
ending_max = start - datetime.timedelta(**settle_offset)
|
||||
self.mox.StubOutWithMock(self.verifier_with_notifications, 'verify_for_range')
|
||||
self.verifier_with_notifications.verify_for_range(ending_max,
|
||||
callback=mox.Not(mox.Is(None)))
|
||||
self.mox.StubOutWithMock(self.verifier_with_notifications, 'reconcile_failed')
|
||||
result1 = self.mox.CreateMockAnything()
|
||||
result2 = self.mox.CreateMockAnything()
|
||||
self.verifier_with_notifications.results = [result1, result2]
|
||||
result1.ready().AndReturn(True)
|
||||
result1.successful().AndReturn(True)
|
||||
result1.get().AndReturn((True, None))
|
||||
result2.ready().AndReturn(True)
|
||||
result2.successful().AndReturn(True)
|
||||
result2.get().AndReturn((True, None))
|
||||
self.verifier_with_notifications.reconcile_failed()
|
||||
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
|
||||
time.sleep(TICK_TIME)
|
||||
self.verifier_with_notifications._keep_running().AndReturn(False)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.verifier_with_notifications.run()
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def _mock_exchange_create_and_connect(self, verifier):
|
||||
self.mox.StubOutWithMock(verifier, 'exchange')
|
||||
self.verifier_with_notifications.exchange().AndReturn('exchange')
|
||||
self.mox.StubOutWithMock(message_service, 'create_exchange')
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
message_service.create_exchange('exchange', 'topic', durable=True) \
|
||||
.AndReturn(exchange)
|
||||
self.mox.StubOutWithMock(message_service, 'create_connection')
|
||||
conn = self.mox.CreateMockAnything()
|
||||
conn.__enter__().AndReturn(conn)
|
||||
conn.__exit__(None, None, None)
|
||||
message_service.create_connection(HOST, PORT, USERID,
|
||||
PASSWORD, "librabbitmq",
|
||||
VIRTUAL_HOST).AndReturn(conn)
|
@ -20,11 +20,11 @@
|
||||
|
||||
import datetime
|
||||
import decimal
|
||||
import unittest
|
||||
|
||||
from stacktach import datetime_to_decimal
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
class DatetimeToDecimalTestCase(unittest.TestCase):
|
||||
class DatetimeToDecimalTestCase(StacktachBaseTestCase):
|
||||
|
||||
def test_datetime_to_decimal(self):
|
||||
expected_decimal = decimal.Decimal('1356093296.123')
|
||||
|
@ -20,7 +20,6 @@
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from django.db.models import FieldDoesNotExist
|
||||
from django.db import transaction
|
||||
@ -29,13 +28,14 @@ import mox
|
||||
from stacktach import dbapi
|
||||
from stacktach import models
|
||||
from stacktach import utils as stacktach_utils
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
import utils
|
||||
from utils import INSTANCE_ID_1
|
||||
from utils import MESSAGE_ID_1
|
||||
from utils import MESSAGE_ID_2
|
||||
|
||||
|
||||
class DBAPITestCase(unittest.TestCase):
|
||||
class DBAPITestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
dne_exception = models.InstanceExists.DoesNotExist
|
||||
|
416
tests/unit/test_glance_verifier.py
Normal file
416
tests/unit/test_glance_verifier.py
Normal file
@ -0,0 +1,416 @@
|
||||
# Copyright (c) 2013 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
from datetime import datetime
|
||||
|
||||
import decimal
|
||||
import json
|
||||
import uuid
|
||||
import kombu
|
||||
|
||||
import mox
|
||||
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
from stacktach import models
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
from utils import IMAGE_UUID_1
|
||||
from utils import make_verifier_config
|
||||
from verifier import glance_verifier
|
||||
from verifier import FieldMismatch
|
||||
from verifier import NotFound
|
||||
from verifier import VerificationException
|
||||
|
||||
|
||||
class GlanceVerifierTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.mox.StubOutWithMock(models, 'ImageUsage', use_mock_anything=True)
|
||||
models.ImageUsage.objects = self.mox.CreateMockAnything()
|
||||
self.pool = self.mox.CreateMockAnything()
|
||||
config = make_verifier_config(False)
|
||||
self.glance_verifier = glance_verifier.GlanceVerifier(config,
|
||||
pool=self.pool)
|
||||
self.mox.StubOutWithMock(models, 'ImageDeletes',
|
||||
use_mock_anything=True)
|
||||
models.ImageDeletes.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'ImageExists',
|
||||
use_mock_anything=True)
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
self.verifier = None
|
||||
|
||||
def test_verify_usage_should_not_raise_exception_on_success(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.created_at = decimal.Decimal('1.1')
|
||||
exist.owner = 'owner'
|
||||
exist.size = 1234
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.created_at = decimal.Decimal('1.1')
|
||||
exist.usage.size = 1234
|
||||
exist.usage.owner = 'owner'
|
||||
self.mox.ReplayAll()
|
||||
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_usage_created_at_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.created_at = decimal.Decimal('1.1')
|
||||
exist.usage.created_at = decimal.Decimal('2.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
|
||||
exception = cm.exception
|
||||
self.assertEqual(exception.field_name, 'created_at')
|
||||
self.assertEqual(exception.expected, decimal.Decimal('1.1'))
|
||||
self.assertEqual(exception.actual, decimal.Decimal('2.1'))
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_usage_owner_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.owner = 'owner'
|
||||
exist.usage.owner = 'not_owner'
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
|
||||
exception = cm.exception
|
||||
self.assertEqual(exception.field_name, 'owner')
|
||||
self.assertEqual(exception.expected, 'owner')
|
||||
self.assertEqual(exception.actual, 'not_owner')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_usage_size_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.size = 1234
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.size = 5678
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
exception = cm.exception
|
||||
|
||||
self.assertEqual(exception.field_name, 'size')
|
||||
self.assertEqual(exception.expected, 1234)
|
||||
self.assertEqual(exception.actual, 5678)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_usage_for_late_usage(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = None
|
||||
exist.uuid = IMAGE_UUID_1
|
||||
exist.created_at = decimal.Decimal('1.1')
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageUsage.objects.filter(uuid=IMAGE_UUID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
usage = self.mox.CreateMockAnything()
|
||||
results.__getitem__(0).AndReturn(usage)
|
||||
usage.created_at = decimal.Decimal('1.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_usage_raises_not_found_for_no_usage(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = None
|
||||
exist.uuid = IMAGE_UUID_1
|
||||
exist.created_at = decimal.Decimal('1.1')
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageUsage.objects.filter(uuid=IMAGE_UUID_1) \
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(0)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(NotFound) as cm:
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
exception = cm.exception
|
||||
self.assertEqual(exception.object_type, 'ImageUsage')
|
||||
self.assertEqual(exception.search_params, {'uuid': IMAGE_UUID_1})
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = self.mox.CreateMockAnything()
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
exist.delete.deleted_at = decimal.Decimal('5.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete_when_late_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.uuid = IMAGE_UUID_1
|
||||
exist.delete = None
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageDeletes.find(uuid=IMAGE_UUID_1).AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
delete = self.mox.CreateMockAnything()
|
||||
delete.deleted_at = decimal.Decimal('5.1')
|
||||
results.__getitem__(0).AndReturn(delete)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete_when_no_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = None
|
||||
exist.uuid = IMAGE_UUID_1
|
||||
exist.deleted_at = None
|
||||
audit_period_ending = decimal.Decimal('1.2')
|
||||
exist.audit_period_ending = audit_period_ending
|
||||
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageDeletes.find(
|
||||
IMAGE_UUID_1, dt.dt_from_decimal(audit_period_ending)).AndReturn(
|
||||
results)
|
||||
results.count().AndReturn(0)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete_found_delete_when_exist_deleted_at_is_none(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = None
|
||||
exist.uuid = IMAGE_UUID_1
|
||||
audit_period_ending = decimal.Decimal('1.3')
|
||||
exist.deleted_at = None
|
||||
exist.audit_period_ending = audit_period_ending
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageDeletes.find(
|
||||
IMAGE_UUID_1, dt.dt_from_decimal(audit_period_ending)).AndReturn(
|
||||
results)
|
||||
results.count().AndReturn(1)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(VerificationException) as ve:
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
exception = ve.exception
|
||||
self.assertEqual(exception.reason,
|
||||
'Found ImageDeletes for non-delete exist')
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete_deleted_at_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = self.mox.CreateMockAnything()
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
exist.delete.deleted_at = decimal.Decimal('4.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as fm:
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
exception = fm.exception
|
||||
self.assertEqual(exception.field_name, 'deleted_at')
|
||||
self.assertEqual(exception.expected, decimal.Decimal('5.1'))
|
||||
self.assertEqual(exception.actual, decimal.Decimal('4.1'))
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete_size_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
exist.delete.launched_at = decimal.Decimal('1.1')
|
||||
exist.delete.deleted_at = decimal.Decimal('6.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
self.fail()
|
||||
except FieldMismatch, fm:
|
||||
self.assertEqual(fm.field_name, 'deleted_at')
|
||||
self.assertEqual(fm.expected, decimal.Decimal('5.1'))
|
||||
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_should_verify_exists_for_usage_and_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
|
||||
glance_verifier._verify_for_usage(exist)
|
||||
self.mox.StubOutWithMock(glance_verifier, '_verify_for_delete')
|
||||
glance_verifier._verify_for_delete(exist)
|
||||
exist.mark_verified()
|
||||
self.mox.ReplayAll()
|
||||
|
||||
verified, exist = glance_verifier._verify(exist)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
self.assertTrue(verified)
|
||||
|
||||
|
||||
def test_verify_exist_marks_exist_as_failed_if_field_mismatch_exception_is_raised(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
|
||||
field_mismatch_exc = FieldMismatch('field', 'expected', 'actual')
|
||||
glance_verifier._verify_for_usage(exist).AndRaise(exception=field_mismatch_exc)
|
||||
exist.mark_failed(reason='FieldMismatch')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
verified, exist = glance_verifier._verify(exist)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
self.assertFalse(verified)
|
||||
|
||||
def test_verify_for_range_without_callback(self):
|
||||
when_max = datetime.utcnow()
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageExists.PENDING = 'pending'
|
||||
models.ImageExists.VERIFYING = 'verifying'
|
||||
self.mox.StubOutWithMock(models.ImageExists, 'find')
|
||||
models.ImageExists.find(
|
||||
ending_max=when_max,
|
||||
status=models.ImageExists.PENDING).AndReturn(results)
|
||||
results.count().AndReturn(2)
|
||||
exist1 = self.mox.CreateMockAnything()
|
||||
exist2 = self.mox.CreateMockAnything()
|
||||
results.__getslice__(0, 1000).AndReturn(results)
|
||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
||||
exist1.save()
|
||||
exist2.save()
|
||||
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
|
||||
callback=None)
|
||||
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
|
||||
callback=None)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.glance_verifier.verify_for_range(when_max)
|
||||
self.assertEqual(exist1.status, 'verifying')
|
||||
self.assertEqual(exist2.status, 'verifying')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_range_with_callback(self):
|
||||
callback = self.mox.CreateMockAnything()
|
||||
when_max = datetime.utcnow()
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.ImageExists.PENDING = 'pending'
|
||||
models.ImageExists.VERIFYING = 'verifying'
|
||||
models.ImageExists.find(
|
||||
ending_max=when_max,
|
||||
status=models.ImageExists.PENDING).AndReturn(results)
|
||||
results.count().AndReturn(2)
|
||||
exist1 = self.mox.CreateMockAnything()
|
||||
exist2 = self.mox.CreateMockAnything()
|
||||
results.__getslice__(0, 1000).AndReturn(results)
|
||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
||||
exist1.save()
|
||||
exist2.save()
|
||||
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
|
||||
callback=callback)
|
||||
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
|
||||
callback=callback)
|
||||
self.mox.ReplayAll()
|
||||
self.glance_verifier.verify_for_range(
|
||||
when_max, callback=callback)
|
||||
self.assertEqual(exist1.status, 'verifying')
|
||||
self.assertEqual(exist2.status, 'verifying')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_send_verified_notification_routing_keys(self):
|
||||
connection = self.mox.CreateMockAnything()
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.raw = self.mox.CreateMockAnything()
|
||||
exist_dict = [
|
||||
'monitor.info',
|
||||
{
|
||||
'event_type': 'test',
|
||||
'message_id': 'some_uuid'
|
||||
}
|
||||
]
|
||||
exist_str = json.dumps(exist_dict)
|
||||
exist.raw.json = exist_str
|
||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
||||
uuid.uuid4().AndReturn('some_other_uuid')
|
||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
||||
routing_keys = ['notifications.info', 'monitor.info']
|
||||
for key in routing_keys:
|
||||
producer = self.mox.CreateMockAnything()
|
||||
producer.channel = self.mox.CreateMockAnything()
|
||||
kombu.pools.producers[connection].AndReturn(producer)
|
||||
producer.acquire(block=True).AndReturn(producer)
|
||||
producer.__enter__().AndReturn(producer)
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
message = {'event_type': 'image.exists.verified.old',
|
||||
'message_id': 'some_other_uuid',
|
||||
'original_message_id': 'some_uuid'}
|
||||
producer.publish(message, key)
|
||||
producer.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.glance_verifier.send_verified_notification(
|
||||
exist, exchange, connection, routing_keys=routing_keys)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_send_verified_notification_default_routing_key(self):
|
||||
connection = self.mox.CreateMockAnything()
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.raw = self.mox.CreateMockAnything()
|
||||
exist_dict = [
|
||||
'monitor.info',
|
||||
{
|
||||
'event_type': 'test',
|
||||
'message_id': 'some_uuid'
|
||||
}
|
||||
]
|
||||
exist_str = json.dumps(exist_dict)
|
||||
exist.raw.json = exist_str
|
||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
||||
producer = self.mox.CreateMockAnything()
|
||||
producer.channel = self.mox.CreateMockAnything()
|
||||
kombu.pools.producers[connection].AndReturn(producer)
|
||||
producer.acquire(block=True).AndReturn(producer)
|
||||
producer.__enter__().AndReturn(producer)
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
||||
uuid.uuid4().AndReturn('some_other_uuid')
|
||||
message = {'event_type': 'image.exists.verified.old',
|
||||
'message_id': 'some_other_uuid',
|
||||
'original_message_id': 'some_uuid'}
|
||||
producer.publish(message, exist_dict[0])
|
||||
producer.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.glance_verifier.send_verified_notification(exist, exchange,
|
||||
connection)
|
||||
self.mox.VerifyAll()
|
@ -18,12 +18,11 @@
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import unittest
|
||||
|
||||
from stacktach import image_type
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class ImageTypeTestCase(unittest.TestCase):
|
||||
class ImageTypeTestCase(StacktachBaseTestCase):
|
||||
|
||||
# Abstractions
|
||||
def _test_get_numeric_code(self, image, os_type, os_distro, expected,
|
||||
|
@ -17,12 +17,18 @@
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
from datetime import datetime
|
||||
|
||||
import unittest
|
||||
import mox
|
||||
from stacktach.models import RawData, GlanceRawData, GenericRawData, ImageDeletes, InstanceExists, ImageExists
|
||||
from tests.unit.utils import IMAGE_UUID_1
|
||||
from stacktach import datetime_to_decimal as dt, models
|
||||
from stacktach.models import RawData, GlanceRawData, GenericRawData
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class ModelsTestCase(unittest.TestCase):
|
||||
class ModelsTestCase(StacktachBaseTestCase):
|
||||
def test_get_name_for_rawdata(self):
|
||||
self.assertEquals(RawData.get_name(), 'RawData')
|
||||
|
||||
@ -30,4 +36,86 @@ class ModelsTestCase(unittest.TestCase):
|
||||
self.assertEquals(GlanceRawData.get_name(), 'GlanceRawData')
|
||||
|
||||
def test_get_name_for_genericrawdata(self):
|
||||
self.assertEquals(GenericRawData.get_name(), 'GenericRawData')
|
||||
self.assertEquals(GenericRawData.get_name(), 'GenericRawData')
|
||||
|
||||
|
||||
class ImageDeletesTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def test_find_delete_should_return_delete_issued_before_given_time(self):
|
||||
delete = self.mox.CreateMockAnything()
|
||||
deleted_max = datetime.utcnow()
|
||||
self.mox.StubOutWithMock(ImageDeletes.objects, 'filter')
|
||||
ImageDeletes.objects.filter(
|
||||
uuid=IMAGE_UUID_1,
|
||||
deleted_at__lte=dt.dt_to_decimal(deleted_max)).AndReturn(delete)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.assertEquals(ImageDeletes.find(
|
||||
IMAGE_UUID_1, deleted_max), delete)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_find_delete_should_return_delete_with_the_given_uuid(self):
|
||||
delete = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(ImageDeletes.objects, 'filter')
|
||||
ImageDeletes.objects.filter(uuid=IMAGE_UUID_1).AndReturn(delete)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.assertEquals(ImageDeletes.find(IMAGE_UUID_1, None), delete)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class ImageExistsTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
|
||||
end_max = datetime.utcnow()
|
||||
status = 'pending'
|
||||
unordered_results = self.mox.CreateMockAnything()
|
||||
expected_results = [1, 2]
|
||||
related_results = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(ImageExists.objects, 'select_related')
|
||||
ImageExists.objects.select_related().AndReturn(related_results)
|
||||
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
|
||||
end_max), status=status).AndReturn(unordered_results)
|
||||
unordered_results.order_by('id').AndReturn(expected_results)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
results = ImageExists.find(end_max, status)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
self.assertEqual(results, [1, 2])
|
||||
|
||||
|
||||
class InstanceExistsTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
|
||||
end_max = datetime.utcnow()
|
||||
status = 'pending'
|
||||
unordered_results = self.mox.CreateMockAnything()
|
||||
expected_results = [1, 2]
|
||||
related_results = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(InstanceExists.objects, 'select_related')
|
||||
InstanceExists.objects.select_related().AndReturn(related_results)
|
||||
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
|
||||
end_max), status=status).AndReturn(unordered_results)
|
||||
unordered_results.order_by('id').AndReturn(expected_results)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
results = InstanceExists.find(end_max, status)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
self.assertEqual(results, [1, 2])
|
||||
|
@ -18,7 +18,7 @@
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import unittest
|
||||
import json
|
||||
|
||||
import mox
|
||||
|
||||
@ -29,6 +29,8 @@ from stacktach.notification import Notification
|
||||
from stacktach.notification import NovaNotification
|
||||
from stacktach.notification import GlanceNotification
|
||||
from stacktach import db
|
||||
from stacktach import image_type
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
from tests.unit.utils import REQUEST_ID_1
|
||||
from tests.unit.utils import DECIMAL_DUMMY_TIME
|
||||
from tests.unit.utils import DUMMY_TIME
|
||||
@ -38,7 +40,7 @@ from tests.unit.utils import INSTANCE_ID_1
|
||||
from tests.unit.utils import MESSAGE_ID_1
|
||||
|
||||
|
||||
class NovaNotificationTestCase(unittest.TestCase):
|
||||
class NovaNotificationTestCase(StacktachBaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
@ -119,13 +121,13 @@ class NovaNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
raw = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(db, 'create_nova_rawdata')
|
||||
db.create_nova_rawdata(
|
||||
deployment="1",
|
||||
tenant=TENANT_ID_1,
|
||||
json=json,
|
||||
json=json_body,
|
||||
routing_key=routing_key,
|
||||
when=utils.str_time_to_unix(TIMESTAMP_1),
|
||||
publisher="compute.global.preprod-ord.ohthree.com",
|
||||
@ -134,6 +136,7 @@ class NovaNotificationTestCase(unittest.TestCase):
|
||||
host="global.preprod-ord.ohthree.com",
|
||||
instance=INSTANCE_ID_1,
|
||||
request_id=REQUEST_ID_1,
|
||||
image_type=image_type.get_numeric_code(body['payload']),
|
||||
old_state='old_state',
|
||||
old_task='old_task',
|
||||
os_architecture='os_arch',
|
||||
@ -145,12 +148,12 @@ class NovaNotificationTestCase(unittest.TestCase):
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = NovaNotification(body, deployment, routing_key, json)
|
||||
notification = NovaNotification(body, deployment, routing_key, json_body)
|
||||
self.assertEquals(notification.save(), raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class GlanceNotificationTestCase(unittest.TestCase):
|
||||
class GlanceNotificationTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
@ -174,13 +177,13 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
raw = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
|
||||
db.create_glance_rawdata(
|
||||
deployment="1",
|
||||
owner=TENANT_ID_1,
|
||||
json=json,
|
||||
json=json_body,
|
||||
routing_key=routing_key,
|
||||
when=utils.str_time_to_unix("2013-06-20 17:31:57.939614"),
|
||||
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
|
||||
@ -196,7 +199,42 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json)
|
||||
json_body)
|
||||
self.assertEquals(notification.save(), raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_save_should_persist_glance_rawdata_erro_payload_to_database(self):
|
||||
body = {
|
||||
"event_type": "image.upload",
|
||||
"timestamp": "2013-06-20 17:31:57.939614",
|
||||
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
|
||||
"payload": "error_message"
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.error"
|
||||
json_body = json.dumps([routing_key, body])
|
||||
raw = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
|
||||
db.create_glance_rawdata(
|
||||
deployment="1",
|
||||
owner=None,
|
||||
json=json_body,
|
||||
routing_key=routing_key,
|
||||
when=utils.str_time_to_unix("2013-06-20 17:31:57.939614"),
|
||||
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
|
||||
event="image.upload",
|
||||
service="glance-api01-r2961",
|
||||
host="global.preprod-ord.ohthree.com",
|
||||
instance=None,
|
||||
request_id='',
|
||||
image_type=None,
|
||||
status=None,
|
||||
uuid=None).AndReturn(raw)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json_body)
|
||||
self.assertEquals(notification.save(), raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@ -226,14 +264,12 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
|
||||
self.mox.StubOutWithMock(db, 'create_image_exists')
|
||||
self.mox.StubOutWithMock(db, 'get_image_usage')
|
||||
|
||||
created_at_range = (DECIMAL_DUMMY_TIME, DECIMAL_DUMMY_TIME+1)
|
||||
db.get_image_usage(created_at__range=created_at_range,
|
||||
uuid=uuid).AndReturn(None)
|
||||
db.get_image_usage(uuid=uuid).AndReturn(None)
|
||||
db.create_image_exists(
|
||||
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
||||
owner=TENANT_ID_1,
|
||||
@ -247,7 +283,7 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json)
|
||||
json_body)
|
||||
notification.save_exists(raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@ -280,17 +316,14 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
|
||||
self.mox.StubOutWithMock(db, 'create_image_exists')
|
||||
self.mox.StubOutWithMock(db, 'get_image_usage')
|
||||
self.mox.StubOutWithMock(db, 'get_image_delete')
|
||||
|
||||
created_at_range = (DECIMAL_DUMMY_TIME, DECIMAL_DUMMY_TIME+1)
|
||||
db.get_image_usage(created_at__range=created_at_range,
|
||||
uuid=uuid).AndReturn(None)
|
||||
db.get_image_delete(created_at__range=created_at_range,
|
||||
uuid=uuid).AndReturn(delete)
|
||||
db.get_image_usage(uuid=uuid).AndReturn(None)
|
||||
db.get_image_delete(uuid=uuid).AndReturn(delete)
|
||||
db.create_image_exists(
|
||||
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
||||
owner=TENANT_ID_1,
|
||||
@ -306,7 +339,7 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json)
|
||||
json_body)
|
||||
notification.save_exists(raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@ -337,15 +370,13 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
|
||||
self.mox.StubOutWithMock(db, 'create_image_exists')
|
||||
self.mox.StubOutWithMock(db, 'get_image_usage')
|
||||
self.mox.StubOutWithMock(db, 'get_image_delete')
|
||||
|
||||
created_at_range = (DECIMAL_DUMMY_TIME, DECIMAL_DUMMY_TIME+1)
|
||||
db.get_image_usage(created_at__range=created_at_range,
|
||||
uuid=uuid).AndReturn(usage)
|
||||
db.get_image_usage(uuid=uuid).AndReturn(usage)
|
||||
db.create_image_exists(
|
||||
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
|
||||
owner=TENANT_ID_1,
|
||||
@ -359,7 +390,7 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json)
|
||||
json_body)
|
||||
notification.save_exists(raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@ -380,7 +411,7 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
|
||||
self.mox.StubOutWithMock(db, 'create_image_usage')
|
||||
db.create_image_usage(
|
||||
@ -391,7 +422,8 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
uuid=uuid).AndReturn(raw)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key, json)
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json_body)
|
||||
notification.save_usage(raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@ -409,7 +441,7 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "glance_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
|
||||
self.mox.StubOutWithMock(db, 'create_image_delete')
|
||||
db.create_image_delete(
|
||||
@ -418,12 +450,13 @@ class GlanceNotificationTestCase(unittest.TestCase):
|
||||
deleted_at=utils.str_time_to_unix(deleted_at)).AndReturn(raw)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = GlanceNotification(body, deployment, routing_key, json)
|
||||
notification = GlanceNotification(body, deployment, routing_key,
|
||||
json_body)
|
||||
notification.save_delete(raw)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class NotificationTestCase(unittest.TestCase):
|
||||
class NotificationTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
@ -447,13 +480,13 @@ class NotificationTestCase(unittest.TestCase):
|
||||
}
|
||||
deployment = "1"
|
||||
routing_key = "generic_monitor.info"
|
||||
json = '{["routing_key", {%s}]}' % body
|
||||
json_body = json.dumps([routing_key, body])
|
||||
raw = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(db, 'create_generic_rawdata')
|
||||
db.create_generic_rawdata(
|
||||
deployment="1",
|
||||
tenant=TENANT_ID_1,
|
||||
json=json,
|
||||
json=json_body,
|
||||
routing_key=routing_key,
|
||||
when=utils.str_time_to_unix(TIMESTAMP_1),
|
||||
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
|
||||
@ -466,6 +499,6 @@ class NotificationTestCase(unittest.TestCase):
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
notification = Notification(body, deployment, routing_key, json)
|
||||
notification = Notification(body, deployment, routing_key, json_body)
|
||||
self.assertEquals(notification.save(), raw)
|
||||
self.mox.VerifyAll()
|
||||
|
839
tests/unit/test_nova_verifier.py
Normal file
839
tests/unit/test_nova_verifier.py
Normal file
@ -0,0 +1,839 @@
|
||||
# Copyright (c) 2013 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import datetime
|
||||
import decimal
|
||||
import json
|
||||
import uuid
|
||||
|
||||
import kombu.common
|
||||
import kombu.entity
|
||||
import kombu.pools
|
||||
import mox
|
||||
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
from stacktach import models
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
from utils import make_verifier_config
|
||||
from utils import INSTANCE_ID_1
|
||||
from utils import RAX_OPTIONS_1
|
||||
from utils import RAX_OPTIONS_2
|
||||
from utils import OS_DISTRO_1
|
||||
from utils import OS_DISTRO_2
|
||||
from utils import OS_ARCH_1
|
||||
from utils import OS_ARCH_2
|
||||
from utils import OS_VERSION_1
|
||||
from utils import OS_VERSION_2
|
||||
from utils import TENANT_ID_1
|
||||
from utils import TENANT_ID_2
|
||||
from utils import INSTANCE_TYPE_ID_1
|
||||
from verifier import nova_verifier
|
||||
from verifier import AmbiguousResults
|
||||
from verifier import FieldMismatch
|
||||
from verifier import NotFound
|
||||
from verifier import VerificationException
|
||||
|
||||
|
||||
class NovaVerifierTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
|
||||
models.RawData.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'Deployment', use_mock_anything=True)
|
||||
models.Deployment.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'Lifecycle', use_mock_anything=True)
|
||||
models.Lifecycle.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'Timing', use_mock_anything=True)
|
||||
models.Timing.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'RequestTracker',
|
||||
use_mock_anything=True)
|
||||
models.RequestTracker.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceUsage',
|
||||
use_mock_anything=True)
|
||||
models.InstanceUsage.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceDeletes',
|
||||
use_mock_anything=True)
|
||||
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceReconcile',
|
||||
use_mock_anything=True)
|
||||
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'InstanceExists',
|
||||
use_mock_anything=True)
|
||||
models.InstanceExists.objects = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
|
||||
models.JsonReport.objects = self.mox.CreateMockAnything()
|
||||
|
||||
self._setup_verifier()
|
||||
|
||||
def _setup_verifier(self):
|
||||
self.pool = self.mox.CreateMockAnything()
|
||||
self.reconciler = self.mox.CreateMockAnything()
|
||||
config = make_verifier_config(False)
|
||||
self.verifier = nova_verifier.NovaVerifier(config,
|
||||
pool=self.pool, reconciler=self.reconciler)
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
self.verifier = None
|
||||
self.pool = None
|
||||
self.verifier_notif = None
|
||||
|
||||
def test_verify_for_launch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.instance_type_id = INSTANCE_TYPE_ID_1
|
||||
exist.tenant = TENANT_ID_1
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.launched_at = decimal.Decimal('1.1')
|
||||
exist.usage.instance_type_id = INSTANCE_TYPE_ID_1
|
||||
exist.usage.tenant = TENANT_ID_1
|
||||
self.mox.ReplayAll()
|
||||
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_launched_at_in_range(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.0')
|
||||
exist.instance_type_id = 2
|
||||
exist.usage.launched_at = decimal.Decimal('1.4')
|
||||
exist.usage.instance_type_id = 2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
result = nova_verifier._verify_for_launch(exist)
|
||||
self.assertIsNone(result)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_launched_at_missmatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.instance_type_id = 2
|
||||
exist.usage.launched_at = decimal.Decimal('2.1')
|
||||
exist.usage.instance_type_id = 2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
self.fail()
|
||||
except FieldMismatch, fm:
|
||||
self.assertEqual(fm.field_name, 'launched_at')
|
||||
self.assertEqual(fm.expected, decimal.Decimal('1.1'))
|
||||
self.assertEqual(fm.actual, decimal.Decimal('2.1'))
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_instance_type_id_missmatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.instance_type_id = 2
|
||||
exist.usage.launched_at = decimal.Decimal('1.1')
|
||||
exist.usage.instance_type_id = 3
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
self.fail()
|
||||
except FieldMismatch, fm:
|
||||
self.assertEqual(fm.field_name, 'instance_type_id')
|
||||
self.assertEqual(fm.expected, 2)
|
||||
self.assertEqual(fm.actual, 3)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_tenant_id_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.tenant = TENANT_ID_1
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.tenant = TENANT_ID_2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
exception = cm.exception
|
||||
|
||||
self.assertEqual(exception.field_name, 'tenant')
|
||||
self.assertEqual(exception.expected, TENANT_ID_1)
|
||||
self.assertEqual(exception.actual, TENANT_ID_2)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_rax_options_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.rax_options = RAX_OPTIONS_1
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.rax_options = RAX_OPTIONS_2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
exception = cm.exception
|
||||
|
||||
self.assertEqual(exception.field_name, 'rax_options')
|
||||
self.assertEqual(exception.expected, RAX_OPTIONS_1)
|
||||
self.assertEqual(exception.actual, RAX_OPTIONS_2)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_os_distro_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.os_distro = OS_DISTRO_1
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.os_distro = OS_DISTRO_2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
exception = cm.exception
|
||||
|
||||
self.assertEqual(exception.field_name, 'os_distro')
|
||||
self.assertEqual(exception.expected, OS_DISTRO_1)
|
||||
self.assertEqual(exception.actual, OS_DISTRO_2)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_os_architecture_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.os_architecture = OS_ARCH_1
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.os_architecture = OS_ARCH_2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
exception = cm.exception
|
||||
|
||||
self.assertEqual(exception.field_name, 'os_architecture')
|
||||
self.assertEqual(exception.expected, OS_ARCH_1)
|
||||
self.assertEqual(exception.actual, OS_ARCH_2)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_os_version_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.os_version = OS_VERSION_1
|
||||
|
||||
exist.usage = self.mox.CreateMockAnything()
|
||||
exist.usage.os_version = OS_VERSION_2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with self.assertRaises(FieldMismatch) as cm:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
exception = cm.exception
|
||||
|
||||
self.assertEqual(exception.field_name, 'os_version')
|
||||
self.assertEqual(exception.expected, OS_VERSION_1)
|
||||
self.assertEqual(exception.actual, OS_VERSION_2)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_late_usage(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = None
|
||||
exist.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
exist.launched_at = launched_at
|
||||
exist.instance_type_id = 2
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceUsage.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(2)
|
||||
models.InstanceUsage.find(INSTANCE_ID_1, dt.dt_from_decimal(
|
||||
launched_at)).AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
usage = self.mox.CreateMockAnything()
|
||||
results.__getitem__(0).AndReturn(usage)
|
||||
usage.launched_at = decimal.Decimal('1.1')
|
||||
usage.instance_type_id = 2
|
||||
self.mox.ReplayAll()
|
||||
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_no_usage(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = None
|
||||
exist.instance = INSTANCE_ID_1
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.instance_type_id = 2
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceUsage.objects.filter(instance=INSTANCE_ID_1) \
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(0)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
self.fail()
|
||||
except NotFound, nf:
|
||||
self.assertEqual(nf.object_type, 'InstanceUsage')
|
||||
self.assertEqual(nf.search_params, {'instance': INSTANCE_ID_1})
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_launch_late_ambiguous_usage(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.usage = None
|
||||
exist.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
exist.launched_at = launched_at
|
||||
exist.instance_type_id = 2
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceUsage.objects.filter(
|
||||
instance=INSTANCE_ID_1).AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
models.InstanceUsage.find(
|
||||
INSTANCE_ID_1, dt.dt_from_decimal(launched_at)).AndReturn(results)
|
||||
results.count().AndReturn(2)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
self.fail()
|
||||
except AmbiguousResults, nf:
|
||||
self.assertEqual(nf.object_type, 'InstanceUsage')
|
||||
search_params = {'instance': INSTANCE_ID_1,
|
||||
'launched_at': decimal.Decimal('1.1')}
|
||||
self.assertEqual(nf.search_params, search_params)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
exist.delete.launched_at = decimal.Decimal('1.1')
|
||||
exist.delete.deleted_at = decimal.Decimal('5.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete_found_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = None
|
||||
exist.instance = INSTANCE_ID_1
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceDeletes.find(INSTANCE_ID_1, dt.dt_from_decimal(
|
||||
launched_at)).AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
delete = self.mox.CreateMockAnything()
|
||||
delete.launched_at = decimal.Decimal('1.1')
|
||||
delete.deleted_at = decimal.Decimal('5.1')
|
||||
results.__getitem__(0).AndReturn(delete)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete_non_delete(self):
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('1.1')
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = None
|
||||
exist.instance = INSTANCE_ID_1
|
||||
exist.launched_at = launched_at
|
||||
exist.deleted_at = None
|
||||
exist.audit_period_ending = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceDeletes.find(
|
||||
INSTANCE_ID_1, dt.dt_from_decimal(launched_at),
|
||||
dt.dt_from_decimal(deleted_at)).AndReturn(results)
|
||||
results.count().AndReturn(0)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete_non_delete_found_deletes(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = None
|
||||
exist.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('1.3')
|
||||
exist.launched_at = launched_at
|
||||
exist.deleted_at = None
|
||||
exist.audit_period_ending = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceDeletes.find(
|
||||
INSTANCE_ID_1, dt.dt_from_decimal(launched_at),
|
||||
dt.dt_from_decimal(deleted_at)).AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
self.fail()
|
||||
except VerificationException, ve:
|
||||
msg = 'Found InstanceDeletes for non-delete exist'
|
||||
self.assertEqual(ve.reason, msg)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete_launched_at_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
exist.delete.launched_at = decimal.Decimal('2.1')
|
||||
exist.delete.deleted_at = decimal.Decimal('5.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
self.fail()
|
||||
except FieldMismatch, fm:
|
||||
self.assertEqual(fm.field_name, 'launched_at')
|
||||
self.assertEqual(fm.expected, decimal.Decimal('1.1'))
|
||||
self.assertEqual(fm.actual, decimal.Decimal('2.1'))
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_delete_deleted_at_mismatch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.delete = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
exist.deleted_at = decimal.Decimal('5.1')
|
||||
exist.delete.launched_at = decimal.Decimal('1.1')
|
||||
exist.delete.deleted_at = decimal.Decimal('6.1')
|
||||
self.mox.ReplayAll()
|
||||
|
||||
try:
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
self.fail()
|
||||
except FieldMismatch, fm:
|
||||
self.assertEqual(fm.field_name, 'deleted_at')
|
||||
self.assertEqual(fm.expected, decimal.Decimal('5.1'))
|
||||
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
exists.launched_at = launched_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
||||
recs.count().AndReturn(1)
|
||||
reconcile = self.mox.CreateMockAnything()
|
||||
reconcile.deleted_at = None
|
||||
recs[0].AndReturn(reconcile)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
nova_verifier._verify_for_launch(exists, launch=reconcile,
|
||||
launch_type='InstanceReconcile')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
nova_verifier._verify_for_delete(exists, delete=None,
|
||||
delete_type='InstanceReconcile')
|
||||
self.mox.ReplayAll()
|
||||
nova_verifier._verify_with_reconciled_data(exists)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_deleted(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
||||
recs.count().AndReturn(1)
|
||||
reconcile = self.mox.CreateMockAnything()
|
||||
reconcile.deleted_at = deleted_at
|
||||
recs[0].AndReturn(reconcile)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
nova_verifier._verify_for_launch(exists, launch=reconcile,
|
||||
launch_type='InstanceReconcile')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
nova_verifier._verify_for_delete(exists, delete=reconcile,
|
||||
delete_type='InstanceReconcile')
|
||||
self.mox.ReplayAll()
|
||||
nova_verifier._verify_with_reconciled_data(exists)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_not_launched(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
exists.launched_at = None
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(VerificationException) as cm:
|
||||
nova_verifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.reason, 'Exists without a launched_at')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_ambiguous_results(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
||||
recs.count().AndReturn(2)
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(AmbiguousResults) as cm:
|
||||
nova_verifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_instance_not_found(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(0)
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(NotFound) as cm:
|
||||
nova_verifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_with_reconciled_data_reconcile_not_found(self):
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = decimal.Decimal('1.1')
|
||||
deleted_at = decimal.Decimal('2.1')
|
||||
exists.launched_at = launched_at
|
||||
exists.deleted_at = deleted_at
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
|
||||
.AndReturn(results)
|
||||
results.count().AndReturn(1)
|
||||
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
|
||||
recs = self.mox.CreateMockAnything()
|
||||
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
|
||||
recs.count().AndReturn(0)
|
||||
self.mox.ReplayAll()
|
||||
with self.assertRaises(NotFound) as cm:
|
||||
nova_verifier._verify_with_reconciled_data(exists)
|
||||
exception = cm.exception
|
||||
self.assertEquals(exception.object_type, 'InstanceReconcile')
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_pass(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_verified')
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
nova_verifier._verify_for_delete(exist)
|
||||
exist.mark_verified()
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_no_launched_at(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = None
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
exist.mark_failed(reason="Exists without a launched_at")
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
||||
nova_verifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_fails_reconciled_verify_uses_second_exception(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
ex1 = VerificationException('test1')
|
||||
nova_verifier._verify_for_launch(exist).AndRaise(ex1)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
||||
nova_verifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(VerificationException('test2'))
|
||||
exist.mark_failed(reason='test2')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_launch_fail(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
verify_exception = VerificationException('test')
|
||||
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
||||
nova_verifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
||||
exist.mark_failed(reason='test')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_fail_reconcile_success(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_verified')
|
||||
verify_exception = VerificationException('test')
|
||||
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
||||
nova_verifier._verify_with_reconciled_data(exist)
|
||||
exist.mark_verified(reconciled=True)
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertTrue(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_fail_with_reconciled_data_exception(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
verify_exception = VerificationException('test')
|
||||
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
||||
nova_verifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(Exception())
|
||||
exist.mark_failed(reason='Exception')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_delete_fail(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
verify_exception = VerificationException('test')
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
nova_verifier._verify_for_delete(exist).AndRaise(verify_exception)
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
|
||||
nova_verifier._verify_with_reconciled_data(exist)\
|
||||
.AndRaise(NotFound('InstanceReconcile', {}))
|
||||
exist.mark_failed(reason='test')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_exception_during_launch(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
nova_verifier._verify_for_launch(exist).AndRaise(Exception())
|
||||
exist.mark_failed(reason='Exception')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_exception_during_delete(self):
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.launched_at = decimal.Decimal('1.1')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
|
||||
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
|
||||
self.mox.StubOutWithMock(exist, 'mark_failed')
|
||||
nova_verifier._verify_for_launch(exist)
|
||||
nova_verifier._verify_for_delete(exist).AndRaise(Exception())
|
||||
exist.mark_failed(reason='Exception')
|
||||
self.mox.ReplayAll()
|
||||
result, exists = nova_verifier._verify(exist)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_range_without_callback(self):
|
||||
when_max = datetime.datetime.utcnow()
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceExists.PENDING = 'pending'
|
||||
models.InstanceExists.VERIFYING = 'verifying'
|
||||
models.InstanceExists.find(
|
||||
ending_max=when_max, status='pending').AndReturn(results)
|
||||
results.count().AndReturn(2)
|
||||
exist1 = self.mox.CreateMockAnything()
|
||||
exist2 = self.mox.CreateMockAnything()
|
||||
results.__getslice__(0, 1000).AndReturn(results)
|
||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
||||
exist1.update_status('verifying')
|
||||
exist2.update_status('verifying')
|
||||
exist1.save()
|
||||
exist2.save()
|
||||
self.pool.apply_async(nova_verifier._verify, args=(exist1,),
|
||||
callback=None)
|
||||
self.pool.apply_async(nova_verifier._verify, args=(exist2,),
|
||||
callback=None)
|
||||
self.mox.ReplayAll()
|
||||
self.verifier.verify_for_range(when_max)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_verify_for_range_with_callback(self):
|
||||
callback = self.mox.CreateMockAnything()
|
||||
when_max = datetime.datetime.utcnow()
|
||||
results = self.mox.CreateMockAnything()
|
||||
models.InstanceExists.PENDING = 'pending'
|
||||
models.InstanceExists.VERIFYING = 'verifying'
|
||||
models.InstanceExists.find(
|
||||
ending_max=when_max, status='pending').AndReturn(results)
|
||||
results.count().AndReturn(2)
|
||||
exist1 = self.mox.CreateMockAnything()
|
||||
exist2 = self.mox.CreateMockAnything()
|
||||
results.__getslice__(0, 1000).AndReturn(results)
|
||||
results.__iter__().AndReturn([exist1, exist2].__iter__())
|
||||
exist1.update_status('verifying')
|
||||
exist2.update_status('verifying')
|
||||
exist1.save()
|
||||
exist2.save()
|
||||
self.pool.apply_async(nova_verifier._verify, args=(exist1,),
|
||||
callback=callback)
|
||||
self.pool.apply_async(nova_verifier._verify, args=(exist2,),
|
||||
callback=callback)
|
||||
self.mox.ReplayAll()
|
||||
self.verifier.verify_for_range(when_max, callback=callback)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_reconcile_failed(self):
|
||||
self.verifier.reconcile = True
|
||||
exists1 = self.mox.CreateMockAnything()
|
||||
exists2 = self.mox.CreateMockAnything()
|
||||
self.verifier.failed = [exists1, exists2]
|
||||
self.reconciler.failed_validation(exists1)
|
||||
self.reconciler.failed_validation(exists2)
|
||||
self.mox.ReplayAll()
|
||||
self.verifier.reconcile_failed()
|
||||
self.assertEqual(len(self.verifier.failed), 0)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_send_verified_notification_routing_keys(self):
|
||||
connection = self.mox.CreateMockAnything()
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.raw = self.mox.CreateMockAnything()
|
||||
exist_dict = [
|
||||
'monitor.info',
|
||||
{
|
||||
'event_type': 'test',
|
||||
'message_id': 'some_uuid'
|
||||
}
|
||||
]
|
||||
exist_str = json.dumps(exist_dict)
|
||||
exist.raw.json = exist_str
|
||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
||||
uuid.uuid4().AndReturn('some_other_uuid')
|
||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
||||
routing_keys = ['notifications.info', 'monitor.info']
|
||||
for key in routing_keys:
|
||||
producer = self.mox.CreateMockAnything()
|
||||
producer.channel = self.mox.CreateMockAnything()
|
||||
kombu.pools.producers[connection].AndReturn(producer)
|
||||
producer.acquire(block=True).AndReturn(producer)
|
||||
producer.__enter__().AndReturn(producer)
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
message = {'event_type': 'compute.instance.exists.verified.old',
|
||||
'message_id': 'some_other_uuid',
|
||||
'original_message_id': 'some_uuid'}
|
||||
producer.publish(message, key)
|
||||
producer.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.verifier.send_verified_notification(exist, exchange, connection,
|
||||
routing_keys=routing_keys)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_send_verified_notification_default_routing_key(self):
|
||||
connection = self.mox.CreateMockAnything()
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
exist = self.mox.CreateMockAnything()
|
||||
exist.raw = self.mox.CreateMockAnything()
|
||||
exist_dict = [
|
||||
'monitor.info',
|
||||
{
|
||||
'event_type': 'test',
|
||||
'message_id': 'some_uuid'
|
||||
}
|
||||
]
|
||||
exist_str = json.dumps(exist_dict)
|
||||
exist.raw.json = exist_str
|
||||
self.mox.StubOutWithMock(kombu.pools, 'producers')
|
||||
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
|
||||
producer = self.mox.CreateMockAnything()
|
||||
producer.channel = self.mox.CreateMockAnything()
|
||||
kombu.pools.producers[connection].AndReturn(producer)
|
||||
producer.acquire(block=True).AndReturn(producer)
|
||||
producer.__enter__().AndReturn(producer)
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
self.mox.StubOutWithMock(uuid, 'uuid4')
|
||||
uuid.uuid4().AndReturn('some_other_uuid')
|
||||
message = {'event_type': 'compute.instance.exists.verified.old',
|
||||
'message_id': 'some_other_uuid',
|
||||
'original_message_id': 'some_uuid'}
|
||||
producer.publish(message, exist_dict[0])
|
||||
producer.__exit__(None, None, None)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
self.verifier.send_verified_notification(exist, exchange, connection)
|
||||
self.mox.VerifyAll()
|
@ -19,7 +19,6 @@
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
import mox
|
||||
import requests
|
||||
@ -30,6 +29,7 @@ from stacktach import utils as stackutils
|
||||
from stacktach.reconciler import exceptions
|
||||
from stacktach.reconciler import nova
|
||||
from stacktach.reconciler import utils as rec_utils
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
from tests.unit import utils
|
||||
from tests.unit.utils import INSTANCE_ID_1
|
||||
from tests.unit.utils import TENANT_ID_1
|
||||
@ -45,7 +45,7 @@ DEFAULT_OS_VERSION = "1.1"
|
||||
DEFAULT_RAX_OPTIONS = "rax_ops"
|
||||
|
||||
|
||||
class ReconcilerTestCase(unittest.TestCase):
|
||||
class ReconcilerTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.client = self.mox.CreateMockAnything()
|
||||
@ -229,6 +229,21 @@ class ReconcilerTestCase(unittest.TestCase):
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_missing_exists_for_instance_region_not_found(self):
|
||||
launch_id = 1
|
||||
beginning_d = utils.decimal_utc()
|
||||
launch = self.mox.CreateMockAnything()
|
||||
launch.instance = INSTANCE_ID_1
|
||||
launch.launched_at = beginning_d - (60*60)
|
||||
launch.instance_type_id = 1
|
||||
models.InstanceUsage.objects.get(id=launch_id).AndReturn(launch)
|
||||
launch.deployment().AndReturn(None)
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.missing_exists_for_instance(launch_id,
|
||||
beginning_d)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation(self):
|
||||
exists = self._fake_usage(is_exists=True, mock_deployment=True)
|
||||
launched_at = exists.launched_at
|
||||
@ -344,6 +359,21 @@ class ReconcilerTestCase(unittest.TestCase):
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_failed_validation_region_not_found(self):
|
||||
beginning_d = utils.decimal_utc()
|
||||
exists = self.mox.CreateMockAnything()
|
||||
exists.instance = INSTANCE_ID_1
|
||||
launched_at = beginning_d - (60*60)
|
||||
exists.launched_at = launched_at
|
||||
exists.instance_type_id = 1
|
||||
exists.deleted_at = None
|
||||
exists.deployment().AndReturn(None)
|
||||
ex = exceptions.NotFound()
|
||||
self.mox.ReplayAll()
|
||||
result = self.reconciler.failed_validation(exists)
|
||||
self.assertFalse(result)
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_fields_match(self):
|
||||
exists = self._fake_usage(is_exists=True)
|
||||
kwargs = {'launched_at': exists.launched_at}
|
||||
@ -415,7 +445,7 @@ json_bridge_config = {
|
||||
}
|
||||
|
||||
|
||||
class NovaJSONBridgeClientTestCase(unittest.TestCase):
|
||||
class NovaJSONBridgeClientTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.client = nova.JSONBridgeClient(json_bridge_config)
|
||||
|
@ -1,14 +1,12 @@
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
from unittest import TestCase
|
||||
import mox
|
||||
from stacktach import stacklog
|
||||
import __builtin__
|
||||
from stacktach.stacklog import ExchangeLogger
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class StacklogTestCase(TestCase):
|
||||
class StacklogTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
@ -30,7 +28,7 @@ class StacklogTestCase(TestCase):
|
||||
os.remove(file)
|
||||
|
||||
|
||||
class ExchangeLoggerTestCase(TestCase):
|
||||
class ExchangeLoggerTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
@ -44,7 +42,7 @@ class ExchangeLoggerTestCase(TestCase):
|
||||
mock_logger.setLevel(logging.DEBUG)
|
||||
self.mox.StubOutClassWithMocks(logging.handlers,
|
||||
'TimedRotatingFileHandler')
|
||||
filename = "{0}.log".format(name)
|
||||
filename = "/tmp/{0}.log".format(name)
|
||||
handler = logging.handlers.TimedRotatingFileHandler(
|
||||
filename, backupCount=3, interval=1, when='midnight')
|
||||
self.mox.StubOutClassWithMocks(logging, 'Formatter')
|
||||
|
@ -20,7 +20,6 @@
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import unittest
|
||||
|
||||
import mox
|
||||
|
||||
@ -40,9 +39,10 @@ from utils import IMAGE_UUID_1
|
||||
from stacktach import stacklog
|
||||
from stacktach import notification
|
||||
from stacktach import views
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class StacktachRawParsingTestCase(unittest.TestCase):
|
||||
class StacktachRawParsingTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
views.STACKDB = self.mox.CreateMockAnything()
|
||||
@ -101,7 +101,7 @@ class StacktachRawParsingTestCase(unittest.TestCase):
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class StacktachLifecycleTestCase(unittest.TestCase):
|
||||
class StacktachLifecycleTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
views.STACKDB = self.mox.CreateMockAnything()
|
||||
@ -287,7 +287,7 @@ class StacktachLifecycleTestCase(unittest.TestCase):
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class StacktachUsageParsingTestCase(unittest.TestCase):
|
||||
class StacktachUsageParsingTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
views.STACKDB = self.mox.CreateMockAnything()
|
||||
@ -830,7 +830,7 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class StacktachImageUsageParsingTestCase(unittest.TestCase):
|
||||
class StacktachImageUsageParsingTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
views.STACKDB = self.mox.CreateMockAnything()
|
||||
|
@ -18,17 +18,15 @@
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
import mox
|
||||
|
||||
from stacktach import db
|
||||
from stacktach import stacklog
|
||||
from stacktach import models
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class StacktachDBTestCase(unittest.TestCase):
|
||||
class StacktachDBTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.log = self.mox.CreateMockAnything()
|
||||
|
@ -18,17 +18,16 @@
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import unittest
|
||||
|
||||
import mox
|
||||
|
||||
from stacktach import utils as stacktach_utils
|
||||
from utils import INSTANCE_ID_1
|
||||
from utils import MESSAGE_ID_1
|
||||
from utils import REQUEST_ID_1
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class StacktachUtilsTestCase(unittest.TestCase):
|
||||
class StacktachUtilsTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
|
@ -21,7 +21,6 @@
|
||||
import datetime
|
||||
import decimal
|
||||
import json
|
||||
import unittest
|
||||
|
||||
import mox
|
||||
|
||||
@ -33,8 +32,10 @@ from utils import INSTANCE_ID_1
|
||||
from utils import INSTANCE_ID_2
|
||||
from utils import REQUEST_ID_1
|
||||
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
class StackyServerTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
|
||||
@ -344,7 +345,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
"2013-07-17 10:16:10.717219", "deployment",
|
||||
"test.start", "example.com", "state"]]
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {'uuid': INSTANCE_ID_1}
|
||||
fake_request.GET = {'uuid': INSTANCE_ID_1, 'service': 'glance'}
|
||||
result = self.mox.CreateMockAnything()
|
||||
models.GlanceRawData.objects.select_related().AndReturn(result)
|
||||
result.filter(uuid=INSTANCE_ID_1).AndReturn(result)
|
||||
@ -355,7 +356,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.do_uuid(fake_request,'glance')
|
||||
resp = stacky_server.do_uuid(fake_request)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
@ -377,7 +378,8 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {'uuid': INSTANCE_ID_1,
|
||||
'when_min': '1.1',
|
||||
'when_max': '2.1'}
|
||||
'when_max': '2.1',
|
||||
'service': 'glance'}
|
||||
result = self.mox.CreateMockAnything()
|
||||
models.GlanceRawData.objects.select_related().AndReturn(result)
|
||||
result.filter(uuid=INSTANCE_ID_1,
|
||||
@ -390,7 +392,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.do_uuid(fake_request,'glance')
|
||||
resp = stacky_server.do_uuid(fake_request)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
@ -716,6 +718,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
def test_do_show(self):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {}
|
||||
raw = self._create_raw()
|
||||
models.RawData.objects.get(id=1).AndReturn(raw)
|
||||
self.mox.ReplayAll()
|
||||
@ -729,11 +732,12 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
def test_do_show_for_glance_rawdata(self):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {'service':'glance'}
|
||||
raw = self._create_raw()
|
||||
models.GlanceRawData.objects.get(id=1).AndReturn(raw)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.do_show(fake_request, 1, 'glance')
|
||||
resp = stacky_server.do_show(fake_request, 1)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
@ -742,11 +746,12 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
def test_do_show_for_generic_rawdata(self):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {'service':'generic'}
|
||||
raw = self._create_raw()
|
||||
models.GenericRawData.objects.get(id=1).AndReturn(raw)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.do_show(fake_request, 1, 'generic')
|
||||
resp = stacky_server.do_show(fake_request, 1)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
@ -755,6 +760,8 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
def test_do_show_should_return_empty_result_on_object_not_found_exception(self):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {}
|
||||
|
||||
raw = self._create_raw()
|
||||
models.RawData.objects.get(id=1).AndReturn(raw)
|
||||
self.mox.ReplayAll()
|
||||
@ -768,7 +775,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
def test_do_watch_for_glance(self):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {}
|
||||
fake_request.GET = {'service': 'glance'}
|
||||
self.mox.StubOutWithMock(stacky_server, 'get_deployments')
|
||||
deployment1 = self.mox.CreateMockAnything()
|
||||
deployment1.id = 1
|
||||
@ -785,7 +792,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
results.__iter__().AndReturn([self._create_raw()].__iter__())
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.do_watch(fake_request, 0, 'glance')
|
||||
resp = stacky_server.do_watch(fake_request, 0)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
self.assertEqual(len(json_resp), 3)
|
||||
@ -875,7 +882,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
|
||||
def test_do_watch_with_event_name(self):
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {'event_name': 'test.start'}
|
||||
fake_request.GET = {'event_name': 'test.start','service': 'nova'}
|
||||
self.mox.StubOutWithMock(stacky_server, 'get_deployments')
|
||||
deployment1 = self.mox.CreateMockAnything()
|
||||
deployment1.id = 1
|
||||
@ -893,7 +900,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
results.__iter__().AndReturn([self._create_raw()].__iter__())
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.do_watch(fake_request, 0, 'nova')
|
||||
resp = stacky_server.do_watch(fake_request, 0)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
self.assertEqual(len(json_resp), 3)
|
||||
@ -1276,7 +1283,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.search(fake_request, 'nova')
|
||||
resp = stacky_server.search(fake_request)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
@ -1299,7 +1306,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.search(fake_request, 'nova')
|
||||
resp = stacky_server.search(fake_request)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
@ -1318,7 +1325,8 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
"2013-07-17 10:16:10.717219", "deployment",
|
||||
"test.start", "example.com", "active", None, None]]
|
||||
fake_request = self.mox.CreateMockAnything()
|
||||
fake_request.GET = {'field': 'tenant', 'value': 'tenant', 'limit': '2'}
|
||||
fake_request.GET = {'field': 'tenant', 'value': 'tenant', 'limit': '2',
|
||||
'service': 'nova'}
|
||||
raw1 = self._create_raw()
|
||||
raw2 = self._create_raw()
|
||||
raw3 = self._create_raw()
|
||||
@ -1330,7 +1338,7 @@ class StackyServerTestCase(unittest.TestCase):
|
||||
raw2.search_results(search_result, mox.IgnoreArg(),' ').AndReturn(search_result_2)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
resp = stacky_server.search(fake_request, 'nova')
|
||||
resp = stacky_server.search(fake_request)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
json_resp = json.loads(resp.content)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -19,7 +19,6 @@
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import json
|
||||
import unittest
|
||||
|
||||
import kombu
|
||||
import mox
|
||||
@ -27,15 +26,22 @@ import mox
|
||||
from stacktach import db
|
||||
from stacktach import views
|
||||
import worker.worker as worker
|
||||
from tests.unit import StacktachBaseTestCase
|
||||
|
||||
|
||||
class ConsumerTestCase(unittest.TestCase):
|
||||
class ConsumerTestCase(StacktachBaseTestCase):
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
|
||||
def _test_topics(self):
|
||||
return [
|
||||
dict(queue="queue1", routing_key="monitor.info"),
|
||||
dict(queue="queue2", routing_key="monitor.error")
|
||||
]
|
||||
|
||||
def test_get_consumers(self):
|
||||
created_queues = []
|
||||
created_callbacks = []
|
||||
@ -49,15 +55,14 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
self.mox.StubOutWithMock(worker.Consumer, '_create_exchange')
|
||||
self.mox.StubOutWithMock(worker.Consumer, '_create_queue')
|
||||
consumer = worker.Consumer('test', None, None, True, {}, "nova",
|
||||
["monitor.info", "monitor.error"],
|
||||
"stacktach_")
|
||||
self._test_topics())
|
||||
exchange = self.mox.CreateMockAnything()
|
||||
consumer._create_exchange('nova', 'topic').AndReturn(exchange)
|
||||
info_queue = self.mox.CreateMockAnything()
|
||||
error_queue = self.mox.CreateMockAnything()
|
||||
consumer._create_queue('stacktach_nova', exchange, 'monitor.info')\
|
||||
consumer._create_queue('queue1', exchange, 'monitor.info')\
|
||||
.AndReturn(info_queue)
|
||||
consumer._create_queue('stacktach_nova', exchange, 'monitor.error')\
|
||||
consumer._create_queue('queue2', exchange, 'monitor.error')\
|
||||
.AndReturn(error_queue)
|
||||
self.mox.ReplayAll()
|
||||
consumers = consumer.get_consumers(Consumer, None)
|
||||
@ -73,8 +78,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
def test_create_exchange(self):
|
||||
args = {'key': 'value'}
|
||||
consumer = worker.Consumer('test', None, None, True, args, 'nova',
|
||||
["monitor.info", "monitor.error"],
|
||||
"stacktach_")
|
||||
self._test_topics())
|
||||
|
||||
self.mox.StubOutClassWithMocks(kombu.entity, 'Exchange')
|
||||
exchange = kombu.entity.Exchange('nova', type='topic', exclusive=False,
|
||||
@ -91,8 +95,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
exclusive=False, routing_key='routing.key',
|
||||
queue_arguments={})
|
||||
consumer = worker.Consumer('test', None, None, True, {}, 'nova',
|
||||
["monitor.info", "monitor.error"],
|
||||
"stacktach_")
|
||||
self._test_topics())
|
||||
self.mox.ReplayAll()
|
||||
actual_queue = consumer._create_queue('name', exchange, 'routing.key',
|
||||
exclusive=False,
|
||||
@ -109,8 +112,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
exclusive=False, routing_key='routing.key',
|
||||
queue_arguments=queue_args)
|
||||
consumer = worker.Consumer('test', None, None, True, queue_args,
|
||||
'nova', ["monitor.info", "monitor.error"],
|
||||
"stacktach_")
|
||||
'nova', self._test_topics())
|
||||
self.mox.ReplayAll()
|
||||
actual_queue = consumer._create_queue('name', exchange, 'routing.key',
|
||||
exclusive=False,
|
||||
@ -126,8 +128,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
|
||||
exchange = 'nova'
|
||||
consumer = worker.Consumer('test', None, deployment, True, {},
|
||||
exchange, ["monitor.info", "monitor.error"],
|
||||
"stacktach_")
|
||||
exchange, self._test_topics())
|
||||
routing_key = 'monitor.info'
|
||||
message.delivery_info = {'routing_key': routing_key}
|
||||
body_dict = {u'key': u'value'}
|
||||
@ -165,7 +166,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
'rabbit_password': 'rabbit',
|
||||
'rabbit_virtual_host': '/',
|
||||
"services": ["nova"],
|
||||
"topics": {"nova": ["monitor.info", "monitor.error"]}
|
||||
"topics": {"nova": self._test_topics()}
|
||||
}
|
||||
self.mox.StubOutWithMock(db, 'get_or_create_deployment')
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
@ -188,8 +189,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
exchange = 'nova'
|
||||
consumer = worker.Consumer(config['name'], conn, deployment,
|
||||
config['durable_queue'], {}, exchange,
|
||||
["monitor.info", "monitor.error"],
|
||||
"stacktach_")
|
||||
self._test_topics())
|
||||
consumer.run()
|
||||
worker.continue_running().AndReturn(False)
|
||||
self.mox.ReplayAll()
|
||||
@ -208,7 +208,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
'queue_arguments': {'x-ha-policy': 'all'},
|
||||
'queue_name_prefix': "test_name_",
|
||||
"services": ["nova"],
|
||||
"topics": {"nova": ["monitor.info", "monitor.error"]}
|
||||
"topics": {"nova": self._test_topics()}
|
||||
}
|
||||
self.mox.StubOutWithMock(db, 'get_or_create_deployment')
|
||||
deployment = self.mox.CreateMockAnything()
|
||||
@ -232,8 +232,7 @@ class ConsumerTestCase(unittest.TestCase):
|
||||
consumer = worker.Consumer(config['name'], conn, deployment,
|
||||
config['durable_queue'],
|
||||
config['queue_arguments'], exchange,
|
||||
["monitor.info", "monitor.error"],
|
||||
"test_name_")
|
||||
self._test_topics())
|
||||
consumer.run()
|
||||
worker.continue_running().AndReturn(False)
|
||||
self.mox.ReplayAll()
|
||||
|
@ -25,7 +25,7 @@ TENANT_ID_2 = 'testtenantid2'
|
||||
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
|
||||
IMAGE_UUID_1 = "1"
|
||||
IMAGE_UUID_1 = "12345678-6352-4dbc-8271-96cc54bf14cd"
|
||||
|
||||
INSTANCE_ID_1 = "08f685d9-6352-4dbc-8271-96cc54bf14cd"
|
||||
INSTANCE_ID_2 = "515adf96-41d3-b86d-5467-e584edc61dab"
|
||||
@ -56,6 +56,14 @@ OS_VERSION_1 = "1"
|
||||
OS_VERSION_2 = "2"
|
||||
|
||||
TIMESTAMP_1 = "2013-06-20 17:31:57.939614"
|
||||
SETTLE_TIME = 5
|
||||
SETTLE_UNITS = "minutes"
|
||||
TICK_TIME = 10
|
||||
HOST = '10.0.0.1'
|
||||
PORT = '5672'
|
||||
VIRTUAL_HOST = '/'
|
||||
USERID = 'rabbit'
|
||||
PASSWORD = 'password'
|
||||
|
||||
def decimal_utc(t = datetime.datetime.utcnow()):
|
||||
return dt.dt_to_decimal(t)
|
||||
@ -137,4 +145,29 @@ def create_tracker(mox, request_id, lifecycle, start, last_timing=None,
|
||||
tracker.start=start
|
||||
tracker.last_timing=last_timing
|
||||
tracker.duration=duration
|
||||
return tracker
|
||||
return tracker
|
||||
|
||||
|
||||
class FakeVerifierConfig(object):
|
||||
def __init__(self, host, port, virtual_host, userid, password, tick_time,
|
||||
settle_time, settle_units, durable_queue, topics, notifs):
|
||||
self.host = lambda: host
|
||||
self.port = lambda: port
|
||||
self.virtual_host = lambda: virtual_host
|
||||
self.userid = lambda: userid
|
||||
self.password = lambda: password
|
||||
self.pool_size = lambda: 5
|
||||
self.tick_time = lambda: tick_time
|
||||
self.settle_time = lambda: settle_time
|
||||
self.settle_units = lambda: settle_units
|
||||
self.durable_queue = lambda: durable_queue
|
||||
self.topics = lambda: topics
|
||||
self.enable_notifications = lambda: notifs
|
||||
|
||||
|
||||
def make_verifier_config(notifs):
|
||||
topics = {'exchange': ['notifications.info']}
|
||||
config = FakeVerifierConfig(HOST, PORT, VIRTUAL_HOST, USERID,
|
||||
PASSWORD, TICK_TIME, SETTLE_TIME,
|
||||
SETTLE_UNITS, True, topics, notifs)
|
||||
return config
|
153
verifier/base_verifier.py
Normal file
153
verifier/base_verifier.py
Normal file
@ -0,0 +1,153 @@
|
||||
# Copyright (c) 2012 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import multiprocessing
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
|
||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir, os.pardir))
|
||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||
|
||||
from stacktach import stacklog, message_service
|
||||
LOG = stacklog.get_logger('verifier')
|
||||
|
||||
|
||||
def _has_field(d1, d2, field1, field2=None):
|
||||
if not field2:
|
||||
field2 = field1
|
||||
|
||||
return d1.get(field1) is not None and d2.get(field2) is not None
|
||||
|
||||
|
||||
def _verify_simple_field(d1, d2, field1, field2=None):
|
||||
if not field2:
|
||||
field2 = field1
|
||||
|
||||
if not _has_field(d1, d2, field1, field2):
|
||||
return False
|
||||
else:
|
||||
if d1[field1] != d2[field2]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _verify_date_field(d1, d2, same_second=False):
|
||||
if d1 and d2:
|
||||
if d1 == d2:
|
||||
return True
|
||||
elif same_second and int(d1) == int(d2):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class Verifier(object):
|
||||
def __init__(self, config, pool=None, reconciler=None):
|
||||
self.config = config
|
||||
self.pool = pool or multiprocessing.Pool(config.pool_size())
|
||||
self.enable_notifications = config.enable_notifications()
|
||||
self.reconciler = reconciler
|
||||
self.results = []
|
||||
self.failed = []
|
||||
|
||||
def clean_results(self):
|
||||
pending = []
|
||||
finished = 0
|
||||
successful = 0
|
||||
|
||||
for result in self.results:
|
||||
if result.ready():
|
||||
finished += 1
|
||||
if result.successful():
|
||||
(verified, exists) = result.get()
|
||||
if self.reconciler and not verified:
|
||||
self.failed.append(exists)
|
||||
successful += 1
|
||||
else:
|
||||
pending.append(result)
|
||||
|
||||
self.results = pending
|
||||
errored = finished - successful
|
||||
return len(self.results), successful, errored
|
||||
|
||||
def _keep_running(self):
|
||||
return True
|
||||
|
||||
def _utcnow(self):
|
||||
return datetime.datetime.utcnow()
|
||||
|
||||
def _run(self, callback=None):
|
||||
tick_time = self.config.tick_time()
|
||||
settle_units = self.config.settle_units()
|
||||
settle_time = self.config.settle_time()
|
||||
while self._keep_running():
|
||||
with transaction.commit_on_success():
|
||||
now = self._utcnow()
|
||||
kwargs = {settle_units: settle_time}
|
||||
ending_max = now - datetime.timedelta(**kwargs)
|
||||
new = self.verify_for_range(ending_max, callback=callback)
|
||||
values = ((self.exchange(), new,) + self.clean_results())
|
||||
if self.reconciler:
|
||||
self.reconcile_failed()
|
||||
msg = "%s: N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
time.sleep(tick_time)
|
||||
|
||||
def run(self):
|
||||
if self.enable_notifications:
|
||||
exchange_name = self.exchange()
|
||||
exchange = message_service.create_exchange(
|
||||
exchange_name, 'topic',
|
||||
durable=self.config.durable_queue())
|
||||
routing_keys = self.config.topics()[exchange_name]
|
||||
|
||||
with message_service.create_connection(
|
||||
self.config.host(), self.config.port(),
|
||||
self.config.userid(), self.config.password(),
|
||||
"librabbitmq", self.config.virtual_host()) as conn:
|
||||
def callback(result):
|
||||
(verified, exist) = result
|
||||
if verified:
|
||||
self.send_verified_notification(
|
||||
exist, conn, exchange, routing_keys=routing_keys)
|
||||
|
||||
try:
|
||||
self._run(callback=callback)
|
||||
except Exception, e:
|
||||
print e
|
||||
raise e
|
||||
else:
|
||||
self._run()
|
||||
|
||||
def verify_for_range(self, ending_max, callback=None):
|
||||
pass
|
||||
|
||||
def reconcile_failed(self):
|
||||
pass
|
||||
|
||||
def exchange(self):
|
||||
pass
|
89
verifier/config.py
Normal file
89
verifier/config.py
Normal file
@ -0,0 +1,89 @@
|
||||
# Copyright (c) 2013 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
import json
|
||||
import os
|
||||
|
||||
config_filename = os.environ.get('STACKTACH_VERIFIER_CONFIG',
|
||||
'stacktach_verifier_config.json')
|
||||
try:
|
||||
from local_settings import *
|
||||
config_filename = STACKTACH_VERIFIER_CONFIG
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
config = None
|
||||
with open(config_filename, "r") as f:
|
||||
config = json.load(f)
|
||||
|
||||
|
||||
def enable_notifications():
|
||||
return config['enable_notifications']
|
||||
|
||||
|
||||
def topics():
|
||||
return config['rabbit']['topics']
|
||||
|
||||
|
||||
def tick_time():
|
||||
return config['tick_time']
|
||||
|
||||
|
||||
def settle_units():
|
||||
return config['settle_units']
|
||||
|
||||
|
||||
def settle_time():
|
||||
return config['settle_time']
|
||||
|
||||
|
||||
def reconcile():
|
||||
return config.get('reconcile', False)
|
||||
|
||||
|
||||
def reconciler_config():
|
||||
return config.get(
|
||||
'reconciler_config', '/etc/stacktach/reconciler_config.json')
|
||||
|
||||
def pool_size():
|
||||
return config['pool_size']
|
||||
|
||||
|
||||
def durable_queue():
|
||||
return config['rabbit']['durable_queue']
|
||||
|
||||
|
||||
def host():
|
||||
return config['rabbit']['host']
|
||||
|
||||
|
||||
def port():
|
||||
return config['rabbit']['port']
|
||||
|
||||
|
||||
def userid():
|
||||
return config['rabbit']['userid']
|
||||
|
||||
|
||||
def password():
|
||||
return config['rabbit']['password']
|
||||
|
||||
|
||||
def virtual_host():
|
||||
return config['rabbit']['virtual_host']
|
@ -1,529 +0,0 @@
|
||||
# Copyright (c) 2012 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from django.db import transaction
|
||||
import kombu.common
|
||||
import kombu.entity
|
||||
import kombu.pools
|
||||
import multiprocessing
|
||||
|
||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir, os.pardir))
|
||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||
|
||||
from stacktach import stacklog
|
||||
|
||||
stacklog.set_default_logger_name('verifier')
|
||||
LOG = stacklog.get_logger()
|
||||
|
||||
from stacktach import models
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
from stacktach import reconciler
|
||||
from verifier import AmbiguousResults
|
||||
from verifier import FieldMismatch
|
||||
from verifier import NotFound
|
||||
from verifier import VerificationException
|
||||
|
||||
|
||||
def _list_exists(ending_max=None, status=None):
|
||||
params = {}
|
||||
if ending_max:
|
||||
params['audit_period_ending__lte'] = dt.dt_to_decimal(ending_max)
|
||||
if status:
|
||||
params['status'] = status
|
||||
return models.InstanceExists.objects.select_related()\
|
||||
.filter(**params).order_by('id')
|
||||
|
||||
|
||||
def _find_launch(instance, launched):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
return models.InstanceUsage.objects.filter(**params)
|
||||
|
||||
|
||||
def _find_reconcile(instance, launched):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
return models.InstanceReconcile.objects.filter(**params)
|
||||
|
||||
|
||||
def _find_delete(instance, launched, deleted_max=None):
|
||||
start = launched - datetime.timedelta(microseconds=launched.microsecond)
|
||||
end = start + datetime.timedelta(microseconds=999999)
|
||||
params = {'instance': instance,
|
||||
'launched_at__gte': dt.dt_to_decimal(start),
|
||||
'launched_at__lte': dt.dt_to_decimal(end)}
|
||||
if deleted_max:
|
||||
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
|
||||
return models.InstanceDeletes.objects.filter(**params)
|
||||
|
||||
|
||||
def _mark_exist_verified(exist,
|
||||
reconciled=False,
|
||||
reason=None):
|
||||
if not reconciled:
|
||||
exist.status = models.InstanceExists.VERIFIED
|
||||
else:
|
||||
exist.status = models.InstanceExists.RECONCILED
|
||||
if reason is not None:
|
||||
exist.fail_reason = reason
|
||||
|
||||
exist.save()
|
||||
|
||||
|
||||
def _mark_exist_failed(exist, reason=None):
|
||||
exist.status = models.InstanceExists.FAILED
|
||||
if reason:
|
||||
exist.fail_reason = reason
|
||||
exist.save()
|
||||
|
||||
|
||||
def _has_field(d1, d2, field1, field2=None):
|
||||
if not field2:
|
||||
field2 = field1
|
||||
|
||||
return d1.get(field1) is not None and d2.get(field2) is not None
|
||||
|
||||
|
||||
def _verify_simple_field(d1, d2, field1, field2=None):
|
||||
if not field2:
|
||||
field2 = field1
|
||||
|
||||
if not _has_field(d1, d2, field1, field2):
|
||||
return False
|
||||
else:
|
||||
if d1[field1] != d2[field2]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _verify_date_field(d1, d2, same_second=False):
|
||||
if d1 and d2:
|
||||
if d1 == d2:
|
||||
return True
|
||||
elif same_second and int(d1) == int(d2):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _verify_field_mismatch(exists, launch):
|
||||
if not _verify_date_field(launch.launched_at, exists.launched_at,
|
||||
same_second=True):
|
||||
raise FieldMismatch('launched_at', exists.launched_at,
|
||||
launch.launched_at)
|
||||
|
||||
if launch.instance_type_id != exists.instance_type_id:
|
||||
raise FieldMismatch('instance_type_id', exists.instance_type_id,
|
||||
launch.instance_type_id)
|
||||
|
||||
if launch.tenant != exists.tenant:
|
||||
raise FieldMismatch('tenant', exists.tenant,
|
||||
launch.tenant)
|
||||
|
||||
if launch.rax_options != exists.rax_options:
|
||||
raise FieldMismatch('rax_options', exists.rax_options,
|
||||
launch.rax_options)
|
||||
|
||||
if launch.os_architecture != exists.os_architecture:
|
||||
raise FieldMismatch('os_architecture', exists.os_architecture,
|
||||
launch.os_architecture)
|
||||
|
||||
if launch.os_version != exists.os_version:
|
||||
raise FieldMismatch('os_version', exists.os_version,
|
||||
launch.os_version)
|
||||
|
||||
if launch.os_distro != exists.os_distro:
|
||||
raise FieldMismatch('os_distro', exists.os_distro,
|
||||
launch.os_distro)
|
||||
|
||||
|
||||
def _verify_for_launch(exist, launch=None, launch_type="InstanceUsage"):
|
||||
|
||||
if not launch and exist.usage:
|
||||
launch = exist.usage
|
||||
elif not launch:
|
||||
if models.InstanceUsage.objects\
|
||||
.filter(instance=exist.instance).count() > 0:
|
||||
launches = _find_launch(exist.instance,
|
||||
dt.dt_from_decimal(exist.launched_at))
|
||||
count = launches.count()
|
||||
query = {
|
||||
'instance': exist.instance,
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
if count > 1:
|
||||
raise AmbiguousResults(launch_type, query)
|
||||
elif count == 0:
|
||||
raise NotFound(launch_type, query)
|
||||
launch = launches[0]
|
||||
else:
|
||||
raise NotFound(launch_type, {'instance': exist.instance})
|
||||
|
||||
_verify_field_mismatch(exist, launch)
|
||||
|
||||
|
||||
def _verify_for_delete(exist, delete=None, delete_type="InstanceDelete"):
|
||||
|
||||
if not delete and exist.delete:
|
||||
# We know we have a delete and we have it's id
|
||||
delete = exist.delete
|
||||
elif not delete:
|
||||
if exist.deleted_at:
|
||||
# We received this exists before the delete, go find it
|
||||
deletes = _find_delete(exist.instance,
|
||||
dt.dt_from_decimal(exist.launched_at))
|
||||
if deletes.count() == 1:
|
||||
delete = deletes[0]
|
||||
else:
|
||||
query = {
|
||||
'instance': exist.instance,
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
raise NotFound(delete_type, query)
|
||||
else:
|
||||
# We don't know if this is supposed to have a delete or not.
|
||||
# Thus, we need to check if we have a delete for this instance.
|
||||
# We need to be careful though, since we could be verifying an
|
||||
# exist event that we got before the delete. So, we restrict the
|
||||
# search to only deletes before this exist's audit period ended.
|
||||
# If we find any, we fail validation
|
||||
launched_at = dt.dt_from_decimal(exist.launched_at)
|
||||
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
||||
deletes = _find_delete(exist.instance, launched_at, deleted_at_max)
|
||||
if deletes.count() > 0:
|
||||
reason = 'Found %ss for non-delete exist' % delete_type
|
||||
raise VerificationException(reason)
|
||||
|
||||
if delete:
|
||||
if not _verify_date_field(delete.launched_at, exist.launched_at,
|
||||
same_second=True):
|
||||
raise FieldMismatch('launched_at', exist.launched_at,
|
||||
delete.launched_at)
|
||||
|
||||
if not _verify_date_field(delete.deleted_at, exist.deleted_at,
|
||||
same_second=True):
|
||||
raise FieldMismatch('deleted_at', exist.deleted_at,
|
||||
delete.deleted_at)
|
||||
|
||||
|
||||
def _verify_with_reconciled_data(exist):
|
||||
if not exist.launched_at:
|
||||
raise VerificationException("Exists without a launched_at")
|
||||
|
||||
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
|
||||
if query.count() > 0:
|
||||
recs = _find_reconcile(exist.instance,
|
||||
dt.dt_from_decimal(exist.launched_at))
|
||||
search_query = {'instance': exist.instance,
|
||||
'launched_at': exist.launched_at}
|
||||
count = recs.count()
|
||||
if count > 1:
|
||||
raise AmbiguousResults('InstanceReconcile', search_query)
|
||||
elif count == 0:
|
||||
raise NotFound('InstanceReconcile', search_query)
|
||||
reconcile = recs[0]
|
||||
else:
|
||||
raise NotFound('InstanceReconcile', {'instance': exist.instance})
|
||||
|
||||
_verify_for_launch(exist, launch=reconcile,
|
||||
launch_type="InstanceReconcile")
|
||||
delete = None
|
||||
if reconcile.deleted_at is not None:
|
||||
delete = reconcile
|
||||
_verify_for_delete(exist, delete=delete,
|
||||
delete_type="InstanceReconcile")
|
||||
|
||||
|
||||
def _attempt_reconciled_verify(exist, orig_e):
|
||||
verified = False
|
||||
try:
|
||||
# Attempt to verify against reconciled data
|
||||
_verify_with_reconciled_data(exist)
|
||||
verified = True
|
||||
_mark_exist_verified(exist)
|
||||
except NotFound, rec_e:
|
||||
# No reconciled data, just mark it failed
|
||||
_mark_exist_failed(exist, reason=str(orig_e))
|
||||
except VerificationException, rec_e:
|
||||
# Verification failed against reconciled data, mark it failed
|
||||
# using the second failure.
|
||||
_mark_exist_failed(exist, reason=str(rec_e))
|
||||
except Exception, rec_e:
|
||||
_mark_exist_failed(exist, reason=rec_e.__class__.__name__)
|
||||
LOG.exception(rec_e)
|
||||
return verified
|
||||
|
||||
|
||||
def _verify(exist):
|
||||
verified = False
|
||||
try:
|
||||
if not exist.launched_at:
|
||||
raise VerificationException("Exists without a launched_at")
|
||||
|
||||
_verify_for_launch(exist)
|
||||
_verify_for_delete(exist)
|
||||
|
||||
verified = True
|
||||
_mark_exist_verified(exist)
|
||||
except VerificationException, orig_e:
|
||||
# Something is wrong with the InstanceUsage record
|
||||
verified = _attempt_reconciled_verify(exist, orig_e)
|
||||
except Exception, e:
|
||||
_mark_exist_failed(exist, reason=e.__class__.__name__)
|
||||
LOG.exception(e)
|
||||
|
||||
return verified, exist
|
||||
|
||||
|
||||
def _send_notification(message, routing_key, connection, exchange):
|
||||
with kombu.pools.producers[connection].acquire(block=True) as producer:
|
||||
kombu.common.maybe_declare(exchange, producer.channel)
|
||||
producer.publish(message, routing_key)
|
||||
|
||||
|
||||
def send_verified_notification(exist, connection, exchange, routing_keys=None):
|
||||
body = exist.raw.json
|
||||
json_body = json.loads(body)
|
||||
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
|
||||
json_body[1]['original_message_id'] = json_body[1]['message_id']
|
||||
json_body[1]['message_id'] = str(uuid.uuid4())
|
||||
if routing_keys is None:
|
||||
_send_notification(json_body[1], json_body[0], connection, exchange)
|
||||
else:
|
||||
for key in routing_keys:
|
||||
_send_notification(json_body[1], key, connection, exchange)
|
||||
|
||||
|
||||
def _create_exchange(name, type, exclusive=False, auto_delete=False,
|
||||
durable=True):
|
||||
return kombu.entity.Exchange(name, type=type, exclusive=auto_delete,
|
||||
auto_delete=exclusive, durable=durable)
|
||||
|
||||
|
||||
def _create_connection(config):
|
||||
rabbit = config['rabbit']
|
||||
conn_params = dict(hostname=rabbit['host'],
|
||||
port=rabbit['port'],
|
||||
userid=rabbit['userid'],
|
||||
password=rabbit['password'],
|
||||
transport="librabbitmq",
|
||||
virtual_host=rabbit['virtual_host'])
|
||||
return kombu.connection.BrokerConnection(**conn_params)
|
||||
|
||||
|
||||
class Verifier(object):
|
||||
|
||||
def __init__(self, config, pool=None, rec=None):
|
||||
self.config = config
|
||||
self.pool = pool or multiprocessing.Pool(self.config['pool_size'])
|
||||
self.reconcile = self.config.get('reconcile', False)
|
||||
self.reconciler = self._load_reconciler(config, rec=rec)
|
||||
self.results = []
|
||||
self.failed = []
|
||||
|
||||
def _load_reconciler(self, config, rec=None):
|
||||
if rec:
|
||||
return rec
|
||||
|
||||
if self.reconcile:
|
||||
config_loc = config.get('reconciler_config',
|
||||
'/etc/stacktach/reconciler_config.json')
|
||||
with open(config_loc, 'r') as rec_config_file:
|
||||
rec_config = json.load(rec_config_file)
|
||||
return reconciler.Reconciler(rec_config)
|
||||
|
||||
def clean_results(self):
|
||||
pending = []
|
||||
finished = 0
|
||||
successful = 0
|
||||
|
||||
for result in self.results:
|
||||
if result.ready():
|
||||
finished += 1
|
||||
if result.successful():
|
||||
(verified, exists) = result.get()
|
||||
if self.reconcile and not verified:
|
||||
self.failed.append(exists)
|
||||
successful += 1
|
||||
else:
|
||||
pending.append(result)
|
||||
|
||||
self.results = pending
|
||||
errored = finished - successful
|
||||
return len(self.results), successful, errored
|
||||
|
||||
def verify_for_range(self, ending_max, callback=None):
|
||||
exists = _list_exists(ending_max=ending_max,
|
||||
status=models.InstanceExists.PENDING)
|
||||
count = exists.count()
|
||||
added = 0
|
||||
update_interval = datetime.timedelta(seconds=30)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
LOG.info("Adding %s exists to queue." % count)
|
||||
while added < count:
|
||||
for exist in exists[0:1000]:
|
||||
exist.status = models.InstanceExists.VERIFYING
|
||||
exist.save()
|
||||
result = self.pool.apply_async(_verify, args=(exist,),
|
||||
callback=callback)
|
||||
self.results.append(result)
|
||||
added += 1
|
||||
if datetime.datetime.utcnow() > next_update:
|
||||
values = ((added,) + self.clean_results())
|
||||
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
return count
|
||||
|
||||
def reconcile_failed(self):
|
||||
for failed_exist in self.failed:
|
||||
if self.reconciler.failed_validation(failed_exist):
|
||||
_mark_exist_verified(failed_exist, reconciled=True)
|
||||
self.failed = []
|
||||
|
||||
def _keep_running(self):
|
||||
return True
|
||||
|
||||
def _utcnow(self):
|
||||
return datetime.datetime.utcnow()
|
||||
|
||||
def _run(self, callback=None):
|
||||
tick_time = self.config['tick_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_time = self.config['settle_time']
|
||||
while self._keep_running():
|
||||
with transaction.commit_on_success():
|
||||
now = self._utcnow()
|
||||
kwargs = {settle_units: settle_time}
|
||||
ending_max = now - datetime.timedelta(**kwargs)
|
||||
new = self.verify_for_range(ending_max,
|
||||
callback=callback)
|
||||
values = ((new,) + self.clean_results())
|
||||
if self.reconcile:
|
||||
self.reconcile_failed()
|
||||
msg = "N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
time.sleep(tick_time)
|
||||
|
||||
def run(self):
|
||||
if self.config['enable_notifications']:
|
||||
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
|
||||
'topic',
|
||||
durable=self.config['rabbit']['durable_queue'])
|
||||
routing_keys = None
|
||||
if self.config['rabbit'].get('routing_keys') is not None:
|
||||
routing_keys = self.config['rabbit']['routing_keys']
|
||||
|
||||
with _create_connection(self.config) as conn:
|
||||
def callback(result):
|
||||
(verified, exist) = result
|
||||
if verified:
|
||||
send_verified_notification(exist, conn, exchange,
|
||||
routing_keys=routing_keys)
|
||||
|
||||
self._run(callback=callback)
|
||||
else:
|
||||
self._run()
|
||||
|
||||
def _run_once(self, callback=None):
|
||||
tick_time = self.config['tick_time']
|
||||
settle_units = self.config['settle_units']
|
||||
settle_time = self.config['settle_time']
|
||||
now = self._utcnow()
|
||||
kwargs = {settle_units: settle_time}
|
||||
ending_max = now - datetime.timedelta(**kwargs)
|
||||
new = self.verify_for_range(ending_max, callback=callback)
|
||||
|
||||
LOG.info("Verifying %s exist events" % new)
|
||||
while len(self.results) > 0:
|
||||
LOG.info("P: %s, F: %s, E: %s" % self.clean_results())
|
||||
if self.reconcile:
|
||||
self.reconcile_failed()
|
||||
time.sleep(tick_time)
|
||||
|
||||
def run_once(self):
|
||||
if self.config['enable_notifications']:
|
||||
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
|
||||
'topic',
|
||||
durable=self.config['rabbit']['durable_queue'])
|
||||
routing_keys = None
|
||||
if self.config['rabbit'].get('routing_keys') is not None:
|
||||
routing_keys = self.config['rabbit']['routing_keys']
|
||||
|
||||
with _create_connection(self.config) as conn:
|
||||
def callback(result):
|
||||
(verified, exist) = result
|
||||
if verified:
|
||||
send_verified_notification(exist, conn, exchange,
|
||||
routing_keys=routing_keys)
|
||||
|
||||
self._run_once(callback=callback)
|
||||
else:
|
||||
self._run_once()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description=
|
||||
"Stacktach Instance Exists Verifier")
|
||||
parser.add_argument('--tick-time',
|
||||
help='Time in seconds the verifier will sleep before'
|
||||
'it will check for new exists records.',
|
||||
default=30)
|
||||
parser.add_argument('--run-once',
|
||||
help='Check database once and verify all returned'
|
||||
'exists records, then stop',
|
||||
type=bool,
|
||||
default=False)
|
||||
parser.add_argument('--settle-time',
|
||||
help='Time the verifier will wait for records to'
|
||||
'settle before it will verify them.',
|
||||
default=10)
|
||||
parser.add_argument('--settle-units',
|
||||
help='Units for settle time',
|
||||
default='minutes')
|
||||
parser.add_argument('--pool-size',
|
||||
help='Number of processes created to verify records',
|
||||
type=int,
|
||||
default=10)
|
||||
args = parser.parse_args()
|
||||
config = {'tick_time': args.tick_time, 'settle_time': args.settle_time,
|
||||
'settle_units': args.settle_units, 'pool_size': args.pool_size}
|
||||
|
||||
verifier = Verifier(config)
|
||||
if args.run_once:
|
||||
verifier.run_once()
|
||||
else:
|
||||
verifier.run()
|
172
verifier/glance_verifier.py
Normal file
172
verifier/glance_verifier.py
Normal file
@ -0,0 +1,172 @@
|
||||
# Copyright (c) 2012 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
import json
|
||||
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
from verifier.base_verifier import Verifier
|
||||
|
||||
|
||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir, os.pardir))
|
||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||
|
||||
from stacktach import models
|
||||
from verifier import FieldMismatch, VerificationException, base_verifier
|
||||
from verifier import NotFound
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
import datetime
|
||||
from stacktach import stacklog, message_service
|
||||
LOG = stacklog.get_logger('verifier')
|
||||
|
||||
|
||||
def _verify_field_mismatch(exists, usage):
|
||||
if not base_verifier._verify_date_field(
|
||||
usage.created_at, exists.created_at, same_second=True):
|
||||
raise FieldMismatch('created_at', exists.created_at,
|
||||
usage.created_at)
|
||||
|
||||
if usage.owner != exists.owner:
|
||||
raise FieldMismatch('owner', exists.owner,
|
||||
usage.owner)
|
||||
|
||||
if usage.size != exists.size:
|
||||
raise FieldMismatch('size', exists.size,
|
||||
usage.size)
|
||||
|
||||
|
||||
def _verify_for_usage(exist, usage=None):
|
||||
usage_type = "ImageUsage"
|
||||
if not usage and exist.usage:
|
||||
usage = exist.usage
|
||||
elif not usage:
|
||||
usages = models.ImageUsage.objects.filter(uuid=exist.uuid)
|
||||
usage_count = usages.count()
|
||||
if usage_count == 0:
|
||||
query = {'uuid': exist.uuid}
|
||||
raise NotFound(usage_type, query)
|
||||
usage = usages[0]
|
||||
_verify_field_mismatch(exist, usage)
|
||||
|
||||
|
||||
def _verify_for_delete(exist, delete=None):
|
||||
delete_type = "ImageDelete"
|
||||
if not delete and exist.delete:
|
||||
# We know we have a delete and we have it's id
|
||||
delete = exist.delete
|
||||
elif not delete:
|
||||
if exist.deleted_at:
|
||||
# We received this exists before the delete, go find it
|
||||
deletes = models.ImageDeletes.find(uuid=exist.uuid)
|
||||
if deletes.count() == 1:
|
||||
delete = deletes[0]
|
||||
else:
|
||||
query = {
|
||||
'instance': exist.instance,
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
raise NotFound(delete_type, query)
|
||||
else:
|
||||
# We don't know if this is supposed to have a delete or not.
|
||||
# Thus, we need to check if we have a delete for this instance.
|
||||
# We need to be careful though, since we could be verifying an
|
||||
# exist event that we got before the delete. So, we restrict the
|
||||
# search to only deletes before this exist's audit period ended.
|
||||
# If we find any, we fail validation
|
||||
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
||||
deletes = models.ImageDeletes.find(
|
||||
exist.uuid, deleted_at_max)
|
||||
if deletes.count() > 0:
|
||||
reason = 'Found %ss for non-delete exist' % delete_type
|
||||
raise VerificationException(reason)
|
||||
|
||||
if delete:
|
||||
if not base_verifier._verify_date_field(
|
||||
delete.created_at, exist.created_at, same_second=True):
|
||||
raise FieldMismatch('created_at', exist.created_at,
|
||||
delete.created_at)
|
||||
|
||||
if not base_verifier._verify_date_field(
|
||||
delete.deleted_at, exist.deleted_at, same_second=True):
|
||||
raise FieldMismatch('deleted_at', exist.deleted_at,
|
||||
delete.deleted_at)
|
||||
|
||||
|
||||
def _verify(exist):
|
||||
verified = False
|
||||
try:
|
||||
_verify_for_usage(exist)
|
||||
_verify_for_delete(exist)
|
||||
|
||||
verified = True
|
||||
exist.mark_verified()
|
||||
except Exception, e:
|
||||
exist.mark_failed(reason=e.__class__.__name__)
|
||||
LOG.exception("glance: %s" % e)
|
||||
|
||||
return verified, exist
|
||||
|
||||
|
||||
class GlanceVerifier(Verifier):
|
||||
def __init__(self, config, pool=None):
|
||||
super(GlanceVerifier, self).__init__(config, pool=pool)
|
||||
|
||||
def verify_for_range(self, ending_max, callback=None):
|
||||
exists = models.ImageExists.find(
|
||||
ending_max=ending_max, status=models.ImageExists.PENDING)
|
||||
count = exists.count()
|
||||
added = 0
|
||||
update_interval = datetime.timedelta(seconds=30)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
LOG.info("glance: Adding %s exists to queue." % count)
|
||||
while added < count:
|
||||
for exist in exists[0:1000]:
|
||||
exist.status = models.ImageExists.VERIFYING
|
||||
exist.save()
|
||||
result = self.pool.apply_async(_verify, args=(exist,),
|
||||
callback=callback)
|
||||
self.results.append(result)
|
||||
added += 1
|
||||
if datetime.datetime.utcnow() > next_update:
|
||||
values = ((added,) + self.clean_results())
|
||||
msg = "glance: N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
return count
|
||||
|
||||
def send_verified_notification(self, exist, connection, exchange,
|
||||
routing_keys=None):
|
||||
body = exist.raw.json
|
||||
json_body = json.loads(body)
|
||||
json_body[1]['event_type'] = 'image.exists.verified.old'
|
||||
json_body[1]['original_message_id'] = json_body[1]['message_id']
|
||||
json_body[1]['message_id'] = str(uuid.uuid4())
|
||||
if routing_keys is None:
|
||||
message_service.send_notification(json_body[1], json_body[0],
|
||||
connection, exchange)
|
||||
else:
|
||||
for key in routing_keys:
|
||||
message_service.send_notification(json_body[1], key,
|
||||
connection, exchange)
|
||||
|
||||
def exchange(self):
|
||||
return 'glance'
|
268
verifier/nova_verifier.py
Normal file
268
verifier/nova_verifier.py
Normal file
@ -0,0 +1,268 @@
|
||||
# Copyright (c) 2012 - Rackspace Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
|
||||
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir, os.pardir))
|
||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||
|
||||
from verifier import base_verifier
|
||||
from stacktach import models
|
||||
from stacktach import datetime_to_decimal as dt
|
||||
from verifier import FieldMismatch
|
||||
from verifier import AmbiguousResults
|
||||
from verifier import NotFound
|
||||
from verifier import VerificationException
|
||||
from stacktach import stacklog, message_service
|
||||
LOG = stacklog.get_logger('verifier')
|
||||
|
||||
|
||||
def _verify_field_mismatch(exists, launch):
|
||||
if not base_verifier._verify_date_field(
|
||||
launch.launched_at, exists.launched_at, same_second=True):
|
||||
raise FieldMismatch('launched_at', exists.launched_at,
|
||||
launch.launched_at)
|
||||
|
||||
if launch.instance_type_id != exists.instance_type_id:
|
||||
raise FieldMismatch('instance_type_id', exists.instance_type_id,
|
||||
launch.instance_type_id)
|
||||
|
||||
if launch.tenant != exists.tenant:
|
||||
raise FieldMismatch('tenant', exists.tenant,
|
||||
launch.tenant)
|
||||
|
||||
if launch.rax_options != exists.rax_options:
|
||||
raise FieldMismatch('rax_options', exists.rax_options,
|
||||
launch.rax_options)
|
||||
|
||||
if launch.os_architecture != exists.os_architecture:
|
||||
raise FieldMismatch('os_architecture', exists.os_architecture,
|
||||
launch.os_architecture)
|
||||
|
||||
if launch.os_version != exists.os_version:
|
||||
raise FieldMismatch('os_version', exists.os_version,
|
||||
launch.os_version)
|
||||
|
||||
if launch.os_distro != exists.os_distro:
|
||||
raise FieldMismatch('os_distro', exists.os_distro,
|
||||
launch.os_distro)
|
||||
|
||||
|
||||
def _verify_for_launch(exist, launch=None,
|
||||
launch_type="InstanceUsage"):
|
||||
|
||||
if not launch and exist.usage:
|
||||
launch = exist.usage
|
||||
elif not launch:
|
||||
if models.InstanceUsage.objects\
|
||||
.filter(instance=exist.instance).count() > 0:
|
||||
launches = models.InstanceUsage.find(
|
||||
exist.instance, dt.dt_from_decimal(exist.launched_at))
|
||||
count = launches.count()
|
||||
query = {
|
||||
'instance': exist.instance,
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
if count > 1:
|
||||
raise AmbiguousResults(launch_type, query)
|
||||
elif count == 0:
|
||||
raise NotFound(launch_type, query)
|
||||
launch = launches[0]
|
||||
else:
|
||||
raise NotFound(launch_type, {'instance': exist.instance})
|
||||
|
||||
_verify_field_mismatch(exist, launch)
|
||||
|
||||
|
||||
def _verify_for_delete(exist, delete=None,
|
||||
delete_type="InstanceDeletes"):
|
||||
|
||||
if not delete and exist.delete:
|
||||
# We know we have a delete and we have it's id
|
||||
delete = exist.delete
|
||||
elif not delete:
|
||||
if exist.deleted_at:
|
||||
# We received this exists before the delete, go find it
|
||||
deletes = models.InstanceDeletes.find(
|
||||
exist.instance, dt.dt_from_decimal(exist.launched_at))
|
||||
if deletes.count() == 1:
|
||||
delete = deletes[0]
|
||||
else:
|
||||
query = {
|
||||
'instance': exist.instance,
|
||||
'launched_at': exist.launched_at
|
||||
}
|
||||
raise NotFound(delete_type, query)
|
||||
else:
|
||||
# We don't know if this is supposed to have a delete or not.
|
||||
# Thus, we need to check if we have a delete for this instance.
|
||||
# We need to be careful though, since we could be verifying an
|
||||
# exist event that we got before the delete. So, we restrict the
|
||||
# search to only deletes before this exist's audit period ended.
|
||||
# If we find any, we fail validation
|
||||
launched_at = dt.dt_from_decimal(exist.launched_at)
|
||||
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
|
||||
deletes = models.InstanceDeletes.find(exist.instance, launched_at,
|
||||
deleted_at_max)
|
||||
if deletes.count() > 0:
|
||||
reason = 'Found %s for non-delete exist' % delete_type
|
||||
raise VerificationException(reason)
|
||||
|
||||
if delete:
|
||||
if not base_verifier._verify_date_field(
|
||||
delete.launched_at, exist.launched_at, same_second=True):
|
||||
raise FieldMismatch('launched_at', exist.launched_at,
|
||||
delete.launched_at)
|
||||
|
||||
if not base_verifier._verify_date_field(
|
||||
delete.deleted_at, exist.deleted_at, same_second=True):
|
||||
raise FieldMismatch(
|
||||
'deleted_at', exist.deleted_at, delete.deleted_at)
|
||||
|
||||
|
||||
def _verify_with_reconciled_data(exist):
|
||||
if not exist.launched_at:
|
||||
raise VerificationException("Exists without a launched_at")
|
||||
|
||||
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
|
||||
if query.count() > 0:
|
||||
recs = models.InstanceReconcile.find(exist.instance,
|
||||
dt.dt_from_decimal((
|
||||
exist.launched_at)))
|
||||
search_query = {'instance': exist.instance,
|
||||
'launched_at': exist.launched_at}
|
||||
count = recs.count()
|
||||
if count > 1:
|
||||
raise AmbiguousResults('InstanceReconcile', search_query)
|
||||
elif count == 0:
|
||||
raise NotFound('InstanceReconcile', search_query)
|
||||
reconcile = recs[0]
|
||||
else:
|
||||
raise NotFound('InstanceReconcile', {'instance': exist.instance})
|
||||
|
||||
_verify_for_launch(exist, launch=reconcile,
|
||||
launch_type="InstanceReconcile")
|
||||
delete = None
|
||||
if reconcile.deleted_at is not None:
|
||||
delete = reconcile
|
||||
_verify_for_delete(exist, delete=delete, delete_type="InstanceReconcile")
|
||||
|
||||
|
||||
def _attempt_reconciled_verify(exist, orig_e):
|
||||
verified = False
|
||||
try:
|
||||
# Attempt to verify against reconciled data
|
||||
_verify_with_reconciled_data(exist)
|
||||
verified = True
|
||||
exist.mark_verified(reconciled=True)
|
||||
except NotFound, rec_e:
|
||||
# No reconciled data, just mark it failed
|
||||
exist.mark_failed(reason=str(orig_e))
|
||||
except VerificationException, rec_e:
|
||||
# Verification failed against reconciled data, mark it failed
|
||||
# using the second failure.
|
||||
exist.mark_failed(reason=str(rec_e))
|
||||
except Exception, rec_e:
|
||||
exist.mark_failed(reason=rec_e.__class__.__name__)
|
||||
LOG.exception("nova: %s" % rec_e)
|
||||
return verified
|
||||
|
||||
|
||||
def _verify(exist):
|
||||
verified = False
|
||||
try:
|
||||
if not exist.launched_at:
|
||||
raise VerificationException("Exists without a launched_at")
|
||||
|
||||
_verify_for_launch(exist)
|
||||
_verify_for_delete(exist)
|
||||
|
||||
verified = True
|
||||
exist.mark_verified()
|
||||
except VerificationException, orig_e:
|
||||
# Something is wrong with the InstanceUsage record
|
||||
verified = _attempt_reconciled_verify(exist, orig_e)
|
||||
except Exception, e:
|
||||
exist.mark_failed(reason=e.__class__.__name__)
|
||||
LOG.exception("nova: %s" % e)
|
||||
|
||||
return verified, exist
|
||||
|
||||
|
||||
class NovaVerifier(base_verifier.Verifier):
|
||||
def __init__(self, config, pool=None, reconciler=None):
|
||||
super(NovaVerifier, self).__init__(config,
|
||||
pool=pool,
|
||||
reconciler=reconciler)
|
||||
|
||||
def send_verified_notification(self, exist, connection, exchange,
|
||||
routing_keys=None):
|
||||
body = exist.raw.json
|
||||
json_body = json.loads(body)
|
||||
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
|
||||
json_body[1]['original_message_id'] = json_body[1]['message_id']
|
||||
json_body[1]['message_id'] = str(uuid.uuid4())
|
||||
if routing_keys is None:
|
||||
message_service.send_notification(
|
||||
json_body[1], json_body[0], connection, exchange)
|
||||
else:
|
||||
for key in routing_keys:
|
||||
message_service.send_notification(
|
||||
json_body[1], key, connection, exchange)
|
||||
|
||||
def verify_for_range(self, ending_max, callback=None):
|
||||
exists = models.InstanceExists.find(
|
||||
ending_max=ending_max, status=models.InstanceExists.PENDING)
|
||||
count = exists.count()
|
||||
added = 0
|
||||
update_interval = datetime.timedelta(seconds=30)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
LOG.info("nova: Adding %s exists to queue." % count)
|
||||
while added < count:
|
||||
for exist in exists[0:1000]:
|
||||
exist.update_status(models.InstanceExists.VERIFYING)
|
||||
exist.save()
|
||||
result = self.pool.apply_async(
|
||||
_verify, args=(exist,),
|
||||
callback=callback)
|
||||
self.results.append(result)
|
||||
added += 1
|
||||
if datetime.datetime.utcnow() > next_update:
|
||||
values = ((added,) + self.clean_results())
|
||||
msg = "nova: N: %s, P: %s, S: %s, E: %s" % values
|
||||
LOG.info(msg)
|
||||
next_update = datetime.datetime.utcnow() + update_interval
|
||||
return count
|
||||
|
||||
def reconcile_failed(self):
|
||||
for failed_exist in self.failed:
|
||||
self.reconciler.failed_validation(failed_exist)
|
||||
self.failed = []
|
||||
|
||||
def exchange(self):
|
||||
return 'nova'
|
@ -17,8 +17,8 @@
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import json
|
||||
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
@ -30,10 +30,11 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
|
||||
sys.path.insert(0, POSSIBLE_TOPDIR)
|
||||
|
||||
from verifier import dbverifier
|
||||
from stacktach import reconciler
|
||||
from verifier import nova_verifier
|
||||
from verifier import glance_verifier
|
||||
import verifier.config as verifier_config
|
||||
|
||||
config_filename = os.environ.get('STACKTACH_VERIFIER_CONFIG',
|
||||
'stacktach_verifier_config.json')
|
||||
try:
|
||||
from local_settings import *
|
||||
config_filename = STACKTACH_VERIFIER_CONFIG
|
||||
@ -42,31 +43,47 @@ except ImportError:
|
||||
|
||||
process = None
|
||||
|
||||
processes = []
|
||||
|
||||
|
||||
def kill_time(signal, frame):
|
||||
print "dying ..."
|
||||
if process:
|
||||
for process in processes:
|
||||
process.terminate()
|
||||
print "rose"
|
||||
if process:
|
||||
for process in processes:
|
||||
process.join()
|
||||
print "bud"
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
config = None
|
||||
with open(config_filename, "r") as f:
|
||||
config = json.load(f)
|
||||
def _load_nova_reconciler():
|
||||
config_loc = verifier_config.reconciler_config()
|
||||
with open(config_loc, 'r') as rec_config_file:
|
||||
rec_config = json.load(rec_config_file)
|
||||
return reconciler.Reconciler(rec_config)
|
||||
|
||||
def make_and_start_verifier(config):
|
||||
if __name__ == '__main__':
|
||||
def make_and_start_verifier(exchange):
|
||||
# Gotta create it and run it this way so things don't get
|
||||
# lost when the process is forked.
|
||||
verifier = dbverifier.Verifier(config)
|
||||
verifier = None
|
||||
if exchange == "nova":
|
||||
reconcile = verifier_config.reconcile()
|
||||
reconciler = None
|
||||
if reconcile:
|
||||
reconciler = _load_nova_reconciler()
|
||||
verifier = nova_verifier.NovaVerifier(verifier_config,
|
||||
reconciler=reconciler)
|
||||
elif exchange == "glance":
|
||||
verifier = glance_verifier.GlanceVerifier(verifier_config)
|
||||
|
||||
verifier.run()
|
||||
|
||||
process = Process(target=make_and_start_verifier, args=(config,))
|
||||
process.start()
|
||||
for exchange in verifier_config.topics().keys():
|
||||
process = Process(target=make_and_start_verifier, args=(exchange,))
|
||||
process.start()
|
||||
processes.append(process)
|
||||
signal.signal(signal.SIGINT, kill_time)
|
||||
signal.signal(signal.SIGTERM, kill_time)
|
||||
signal.pause()
|
||||
|
@ -33,7 +33,7 @@ case "$1" in
|
||||
/sbin/start-stop-daemon --start --pidfile $PIDFILE --make-pidfile -b --exec $DAEMON $ARGS
|
||||
;;
|
||||
status)
|
||||
status_of_proc "$DAEMON" "verifier" && exit 0 || exit $?
|
||||
status_of_proc -p "${PIDFILE}" "$DAEMON" "verifier" && exit 0 || exit $?
|
||||
;;
|
||||
*)
|
||||
echo "Usage: verifier.sh {start|stop|restart|status}"
|
||||
|
@ -39,5 +39,3 @@ def deployments():
|
||||
|
||||
def topics():
|
||||
return config['topics']
|
||||
|
||||
|
||||
|
@ -33,7 +33,7 @@ case "$1" in
|
||||
/sbin/start-stop-daemon --start --pidfile $PIDFILE --make-pidfile -b --exec $DAEMON $ARGS
|
||||
;;
|
||||
status)
|
||||
status_of_proc "$DAEMON" "stacktach" && exit 0 || exit $?
|
||||
status_of_proc -p "${PIDFILE}" "$DAEMON" "stacktach" && exit 0 || exit $?
|
||||
;;
|
||||
*)
|
||||
echo "Usage: stacktach.sh {start|stop|restart|status}"
|
||||
|
@ -34,7 +34,7 @@ except ImportError:
|
||||
|
||||
from pympler.process import ProcessMemoryInfo
|
||||
|
||||
from stacktach import db
|
||||
from stacktach import db, message_service
|
||||
from stacktach import stacklog
|
||||
from stacktach import views
|
||||
|
||||
@ -44,7 +44,7 @@ LOG = stacklog.get_logger()
|
||||
|
||||
class Consumer(kombu.mixins.ConsumerMixin):
|
||||
def __init__(self, name, connection, deployment, durable, queue_arguments,
|
||||
exchange, topics, queue_name_prefix):
|
||||
exchange, topics):
|
||||
self.connection = connection
|
||||
self.deployment = deployment
|
||||
self.durable = durable
|
||||
@ -56,25 +56,24 @@ class Consumer(kombu.mixins.ConsumerMixin):
|
||||
self.total_processed = 0
|
||||
self.topics = topics
|
||||
self.exchange = exchange
|
||||
self.queue_name_prefix = queue_name_prefix
|
||||
|
||||
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
|
||||
return kombu.entity.Exchange(name, type=type, exclusive=exclusive,
|
||||
return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive,
|
||||
durable=self.durable,
|
||||
auto_delete=auto_delete)
|
||||
|
||||
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
|
||||
auto_delete=False):
|
||||
return kombu.Queue(name, nova_exchange, durable=self.durable,
|
||||
auto_delete=exclusive, exclusive=auto_delete,
|
||||
queue_arguments=self.queue_arguments,
|
||||
routing_key=routing_key)
|
||||
return message_service.create_queue(
|
||||
name, nova_exchange, durable=self.durable, auto_delete=exclusive,
|
||||
exclusive=auto_delete, queue_arguments=self.queue_arguments,
|
||||
routing_key=routing_key)
|
||||
|
||||
def get_consumers(self, Consumer, channel):
|
||||
exchange = self._create_exchange(self.exchange, "topic")
|
||||
|
||||
queue_name = "%s%s" % (self.queue_name_prefix, self.exchange)
|
||||
queues = [self._create_queue(queue_name, exchange, topic)
|
||||
queues = [self._create_queue(topic['queue'], exchange,
|
||||
topic['routing_key'])
|
||||
for topic in self.topics]
|
||||
|
||||
return [Consumer(queues=queues, callbacks=[self.on_nova])]
|
||||
@ -154,7 +153,6 @@ def run(deployment_config, exchange):
|
||||
queue_arguments = deployment_config.get('queue_arguments', {})
|
||||
exit_on_exception = deployment_config.get('exit_on_exception', False)
|
||||
topics = deployment_config.get('topics', {})
|
||||
queue_name_prefix = deployment_config.get('queue_name_prefix', 'stacktach_')
|
||||
|
||||
deployment, new = db.get_or_create_deployment(name)
|
||||
|
||||
@ -177,8 +175,7 @@ def run(deployment_config, exchange):
|
||||
try:
|
||||
consumer = Consumer(name, conn, deployment, durable,
|
||||
queue_arguments, exchange,
|
||||
topics[exchange],
|
||||
queue_name_prefix)
|
||||
topics[exchange])
|
||||
consumer.run()
|
||||
except Exception as e:
|
||||
LOG.error("!!!!Exception!!!!")
|
||||
@ -199,4 +196,4 @@ POST_PROCESS_METHODS = {
|
||||
'RawData': views.post_process_rawdata,
|
||||
'GlanceRawData': views.post_process_glancerawdata,
|
||||
'GenericRawData': views.post_process_genericrawdata
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user