Merge branch 'master' of git://github.com/rackerlabs/stacktach

This commit is contained in:
Andrew Melton 2013-07-15 12:18:59 -04:00
commit 7cfb7d958a
28 changed files with 3019 additions and 1415 deletions

View File

@ -0,0 +1,13 @@
{
"client_class": "JSONBridgeClient",
"client": {
"url": "http://jsonbridge.example.com:8080/query/",
"username": "bridgeuser",
"password": "super_secure_password",
"databases": {
"RegionOne": "nova-regionone",
"RegionTwo": "nova-regiontwo"
}
},
"region_mapping_loc": "etc/sample_region_mapping.json"
}

View File

@ -0,0 +1,6 @@
{
"RegionOne.dev.global": "RegionOne",
"RegionOne.dev.cell1": "RegionOne",
"RegionTwo.dev.global": "RegionTwo",
"RegionTwo.dev.cell1": "RegionTwo"
}

0
manage.py Normal file → Executable file
View File

View File

@ -30,12 +30,21 @@ from django.db.models import F
from stacktach import datetime_to_decimal as dt
from stacktach import models
from stacktach.reconciler import Reconciler
OLD_LAUNCHES_QUERY = "select * from stacktach_instanceusage " \
"where launched_at is not null and " \
"launched_at < %s and instance not in " \
"(select distinct(instance) " \
"from stacktach_instancedeletes where deleted_at < %s)"
OLD_LAUNCHES_QUERY = """
select * from stacktach_instanceusage where
launched_at is not null and
launched_at < %s and
instance not in
(select distinct(instance)
from stacktach_instancedeletes where
deleted_at < %s union
select distinct(instance)
from stacktach_instancereconcile where
deleted_at < %s);"""
reconciler = None
def _get_new_launches(beginning, ending):
@ -63,35 +72,45 @@ def _get_exists(beginning, ending):
return models.InstanceExists.objects.filter(**filters)
def _audit_launches_to_exists(launches, exists):
def _audit_launches_to_exists(launches, exists, beginning):
fails = []
for (instance, launches) in launches.items():
if instance in exists:
for launch1 in launches:
for expected in launches:
found = False
for launch2 in exists[instance]:
if int(launch1['launched_at']) == int(launch2['launched_at']):
for actual in exists[instance]:
if int(expected['launched_at']) == \
int(actual['launched_at']):
# HACK (apmelton): Truncate the decimal because we may not
# have the milliseconds.
found = True
if not found:
rec = False
if reconciler:
args = (expected['id'], beginning)
rec = reconciler.missing_exists_for_instance(*args)
msg = "Couldn't find exists for launch (%s, %s)"
msg = msg % (instance, launch1['launched_at'])
fails.append(['Launch', launch1['id'], msg])
msg = msg % (instance, expected['launched_at'])
fails.append(['Launch', expected['id'], msg, 'Y' if rec else 'N'])
else:
rec = False
if reconciler:
args = (launches[0]['id'], beginning)
rec = reconciler.missing_exists_for_instance(*args)
msg = "No exists for instance (%s)" % instance
fails.append(['Launch', '-', msg])
fails.append(['Launch', '-', msg, 'Y' if rec else 'N'])
return fails
def _status_queries(exists_query):
verified = exists_query.filter(status=models.InstanceExists.VERIFIED)
reconciled = exists_query.filter(status=models.InstanceExists.RECONCILED)
fail = exists_query.filter(status=models.InstanceExists.FAILED)
pending = exists_query.filter(status=models.InstanceExists.PENDING)
verifying = exists_query.filter(status=models.InstanceExists.VERIFYING)
return verified, fail, pending, verifying
return verified, reconciled, fail, pending, verifying
def _send_status_queries(exists_query):
@ -108,7 +127,8 @@ def _send_status_queries(exists_query):
def _audit_for_exists(exists_query):
(verified, fail, pending, verifying) = _status_queries(exists_query)
(verified, reconciled,
fail, pending, verifying) = _status_queries(exists_query)
(success, unsent, redirect,
client_error, server_error) = _send_status_queries(verified)
@ -116,6 +136,7 @@ def _audit_for_exists(exists_query):
report = {
'count': exists_query.count(),
'verified': verified.count(),
'reconciled': reconciled.count(),
'failed': fail.count(),
'pending': pending.count(),
'verifying': verifying.count(),
@ -175,8 +196,13 @@ def _launch_audit_for_period(beginning, ending):
else:
launches_dict[instance] = [l, ]
old_launches = models.InstanceUsage.objects.raw(OLD_LAUNCHES_QUERY,
[beginning, beginning])
# NOTE (apmelton)
# Django's safe substitution doesn't allow dict substitution...
# Thus, we send it 'beginning' three times...
old_launches = models.InstanceUsage.objects\
.raw(OLD_LAUNCHES_QUERY,
[beginning, beginning, beginning])
old_launches_dict = {}
for launch in old_launches:
instance = launch.instance
@ -205,7 +231,8 @@ def _launch_audit_for_period(beginning, ending):
exists_dict[instance] = [e, ]
launch_to_exists_fails = _audit_launches_to_exists(launches_dict,
exists_dict)
exists_dict,
beginning)
return launch_to_exists_fails, new_launches.count(), len(old_launches_dict)
@ -222,11 +249,11 @@ def audit_for_period(beginning, ending):
summary = {
'verifier': verify_summary,
'launch_fails': {
'total_failures': len(detail),
'launch_summary': {
'new_launches': new_count,
'old_launches': old_count
}
'old_launches': old_count,
'failures': len(detail)
},
}
details = {
@ -266,7 +293,7 @@ def store_results(start, end, summary, details):
'created': dt.dt_to_decimal(datetime.datetime.utcnow()),
'period_start': start,
'period_end': end,
'version': 2,
'version': 4,
'name': 'nova usage audit'
}
@ -276,7 +303,7 @@ def store_results(start, end, summary, details):
def make_json_report(summary, details):
report = [{'summary': summary},
['Object', 'ID', 'Error Description']]
['Object', 'ID', 'Error Description', 'Reconciled?']]
report.extend(details['exist_fails'])
report.extend(details['launch_fails'])
return json.dumps(report)
@ -302,8 +329,20 @@ if __name__ == '__main__':
help="If set to true, report will be stored. "
"Otherwise, it will just be printed",
type=bool, default=False)
parser.add_argument('--reconcile',
help="Enabled reconciliation",
type=bool, default=False)
parser.add_argument('--reconciler_config',
help="Location of the reconciler config file",
type=str,
default='/etc/stacktach/reconciler-config.json')
args = parser.parse_args()
if args.reconcile:
with open(args.reconciler_config) as f:
reconciler_config = json.load(f)
reconciler = Reconciler(reconciler_config)
if args.utcdatetime is not None:
time = args.utcdatetime
else:

View File

@ -21,8 +21,19 @@ def get_or_create_deployment(name):
def create_rawdata(**kwargs):
return models.RawData(**kwargs)
imagemeta_fields = ['os_architecture', 'os_version',
'os_distro', 'rax_options']
imagemeta_kwargs = \
dict((k, v) for k, v in kwargs.iteritems() if k in imagemeta_fields)
rawdata_kwargs = \
dict((k, v) for k, v in kwargs.iteritems() if k not in imagemeta_fields)
rawdata = models.RawData(**rawdata_kwargs)
rawdata.save()
imagemeta_kwargs.update({'raw_id': rawdata.id})
save(models.RawDataImageMeta(**imagemeta_kwargs))
return rawdata
def create_lifecycle(**kwargs):
return models.Lifecycle(**kwargs)

View File

@ -0,0 +1,211 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RawDataImageMeta'
db.create_table(u'stacktach_rawdataimagemeta', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('raw', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stacktach.RawData'])),
('os_architecture', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('os_distro', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('os_version', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('rax_options', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal(u'stacktach', ['RawDataImageMeta'])
# Adding field 'InstanceExists.os_architecture'
db.add_column(u'stacktach_instanceexists', 'os_architecture',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.os_distro'
db.add_column(u'stacktach_instanceexists', 'os_distro',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.os_version'
db.add_column(u'stacktach_instanceexists', 'os_version',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.rax_options'
db.add_column(u'stacktach_instanceexists', 'rax_options',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.os_architecture'
db.add_column(u'stacktach_instanceusage', 'os_architecture',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.os_distro'
db.add_column(u'stacktach_instanceusage', 'os_distro',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.os_version'
db.add_column(u'stacktach_instanceusage', 'os_version',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.rax_options'
db.add_column(u'stacktach_instanceusage', 'rax_options',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'RawDataImageMeta'
db.delete_table(u'stacktach_rawdataimagemeta')
# Deleting field 'InstanceExists.os_architecture'
db.delete_column(u'stacktach_instanceexists', 'os_architecture')
# Deleting field 'InstanceExists.os_distro'
db.delete_column(u'stacktach_instanceexists', 'os_distro')
# Deleting field 'InstanceExists.os_version'
db.delete_column(u'stacktach_instanceexists', 'os_version')
# Deleting field 'InstanceExists.rax_options'
db.delete_column(u'stacktach_instanceexists', 'rax_options')
# Deleting field 'InstanceUsage.os_architecture'
db.delete_column(u'stacktach_instanceusage', 'os_architecture')
# Deleting field 'InstanceUsage.os_distro'
db.delete_column(u'stacktach_instanceusage', 'os_distro')
# Deleting field 'InstanceUsage.os_version'
db.delete_column(u'stacktach_instanceusage', 'os_version')
# Deleting field 'InstanceUsage.rax_options'
db.delete_column(u'stacktach_instanceusage', 'rax_options')
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -0,0 +1,224 @@
# -*- coding: utf-8 -*-
import copy
from south.v2 import DataMigration
from stacktach.notification import Notification
from stacktach.views import NOTIFICATIONS
try:
import ujson as json
except ImportError:
try:
import simplejson as json
except ImportError:
import json
USAGE_EVENTS = [
'compute.instance.create.start',
'compute.instance.create.end',
'compute.instance.rebuild.start',
'compute.instance.rebuild.end',
'compute.instance.resize.prep.start',
'compute.instance.resize.prep.end',
'compute.instance.resize.revert.start',
'compute.instance.resize.revert.end',
'compute.instance.finish_resize.end',
'compute.instance.delete.end',
'compute.instance.exists']
USAGE_EVENTS_EXCEPT_EXISTS = copy.deepcopy(USAGE_EVENTS)
USAGE_EVENTS_EXCEPT_EXISTS.remove('compute.instance.exists')
class Migration(DataMigration):
def _find_latest_usage_related_raw_id_for_request_id(self, orm, request_id):
rawdata = orm.RawData.objects.filter(
request_id=request_id,
event__in=USAGE_EVENTS_EXCEPT_EXISTS).order_by('id')[:1].values('id')
if rawdata.count() > 0:
return rawdata[0]['id']
return None
def _notification(self, json_message):
json_dict = json.loads(json_message)
routing_key = json_dict[0]
body = json_dict[1]
notification = NOTIFICATIONS[routing_key](body)
return notification
def forwards(self, orm):
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
print "Started inserting records in RawDataImageMeta"
rawdata_all = orm.RawData.objects.filter(event__in=USAGE_EVENTS).values('json', 'id')
for rawdata in rawdata_all:
notification = self._notification(rawdata['json'])
orm.RawDataImageMeta.objects.create(
raw_id=rawdata['id'],
os_architecture=notification.os_architecture,
os_distro=notification.os_distro,
os_version=notification.os_version,
rax_options=notification.rax_options)
print "Inserted %s records in RawDataImageMeta" % rawdata_all.count()
print "\nStarted updating records in InstanceExists"
exists = orm.InstanceExists.objects.values('raw_id')
exists_update_count = 0
for exist in exists:
image_metadata = orm.RawDataImageMeta.objects.filter(raw_id=exist['raw_id'])
if image_metadata.count() == 0:
print "RawDataImageMeta not found for InstanceExists with raw_id %s" % exist['raw_id']
continue
orm.InstanceExists.objects.filter(
raw_id=exist['raw_id']).update(
os_architecture=image_metadata[0].os_architecture,
os_distro=image_metadata[0].os_distro,
os_version=image_metadata[0].os_version,
rax_options=image_metadata[0].rax_options)
exists_update_count += 1
print "Updated %s records in InstanceExists" % exists_update_count
print "\nStarted updating records in InstacnceUsages"
usages = orm.InstanceUsage.objects.all().values('request_id')
usages_update_count = 0
for usage in usages:
raw_id = self._find_latest_usage_related_raw_id_for_request_id(orm, usage['request_id'])
if not raw_id:
print "No Rawdata entry found for a usage related event with request_id %s" % usage['request_id']
continue
image_metadata = orm.RawDataImageMeta.objects.filter(raw_id=raw_id)[0]
orm.InstanceUsage.objects.filter(
request_id=usage['request_id']).update(
os_architecture=image_metadata.os_architecture,
os_distro=image_metadata.os_distro,
os_version=image_metadata.os_version,
rax_options=image_metadata.rax_options)
usages_update_count += 1
print "Updated %s records in InstanceUsages" % usages_update_count
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']
symmetrical = True

View File

@ -0,0 +1,160 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'InstanceReconcile'
db.create_table(u'stacktach_instancereconcile', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('row_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('row_updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('instance', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('launched_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
('deleted_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
('instance_type_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('source', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=150, null=True, blank=True)),
))
db.send_create_signal(u'stacktach', ['InstanceReconcile'])
def backwards(self, orm):
# Deleting model 'InstanceReconcile'
db.delete_table(u'stacktach_instancereconcile')
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instancereconcile': {
'Meta': {'object_name': 'InstanceReconcile'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -0,0 +1,189 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'InstanceReconcile.tenant'
db.add_column(u'stacktach_instancereconcile', 'tenant',
self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True),
keep_default=False)
# Adding field 'InstanceReconcile.os_architecture'
db.add_column(u'stacktach_instancereconcile', 'os_architecture',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceReconcile.os_distro'
db.add_column(u'stacktach_instancereconcile', 'os_distro',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceReconcile.os_version'
db.add_column(u'stacktach_instancereconcile', 'os_version',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceReconcile.rax_options'
db.add_column(u'stacktach_instancereconcile', 'rax_options',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'InstanceReconcile.tenant'
db.delete_column(u'stacktach_instancereconcile', 'tenant')
# Deleting field 'InstanceReconcile.os_architecture'
db.delete_column(u'stacktach_instancereconcile', 'os_architecture')
# Deleting field 'InstanceReconcile.os_distro'
db.delete_column(u'stacktach_instancereconcile', 'os_distro')
# Deleting field 'InstanceReconcile.os_version'
db.delete_column(u'stacktach_instancereconcile', 'os_version')
# Deleting field 'InstanceReconcile.rax_options'
db.delete_column(u'stacktach_instancereconcile', 'rax_options')
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instancereconcile': {
'Meta': {'object_name': 'InstanceReconcile'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -59,6 +59,14 @@ class RawData(models.Model):
return "%s %s %s" % (self.event, self.instance, self.state)
class RawDataImageMeta(models.Model):
raw = models.ForeignKey(RawData, null=False)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
class Lifecycle(models.Model):
"""The Lifecycle table is the Master for a group of
Timing detail records. There is one Lifecycle row for
@ -88,6 +96,18 @@ class InstanceUsage(models.Model):
db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
def deployment(self):
raws = RawData.objects.filter(request_id=self.request_id)
if raws.count() == 0:
return False
raw = raws[0]
return raw.deployment
class InstanceDeletes(models.Model):
instance = models.CharField(max_length=50, null=True,
@ -98,16 +118,44 @@ class InstanceDeletes(models.Model):
decimal_places=6, db_index=True)
raw = models.ForeignKey(RawData, null=True)
def deployment(self):
return self.raw.deployment
class InstanceReconcile(models.Model):
row_created = models.DateTimeField(auto_now_add=True)
row_updated = models.DateTimeField(auto_now=True)
instance = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
launched_at = models.DecimalField(null=True, max_digits=20,
decimal_places=6, db_index=True)
deleted_at = models.DecimalField(null=True, max_digits=20,
decimal_places=6, db_index=True)
instance_type_id = models.CharField(max_length=50,
null=True,
blank=True,
db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
source = models.CharField(max_length=150, null=True,
blank=True, db_index=True)
class InstanceExists(models.Model):
PENDING = 'pending'
VERIFYING = 'verifying'
VERIFIED = 'verified'
RECONCILED = 'reconciled'
FAILED = 'failed'
STATUS_CHOICES = [
(PENDING, 'Pending Verification'),
(VERIFYING, 'Currently Being Verified'),
(VERIFIED, 'Passed Verification'),
(RECONCILED, 'Passed Verification After Reconciliation'),
(FAILED, 'Failed Verification'),
]
instance = models.CharField(max_length=50, null=True,
@ -138,6 +186,13 @@ class InstanceExists(models.Model):
send_status = models.IntegerField(null=True, default=0, db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
def deployment(self):
return self.raw.deployment
class Timing(models.Model):
@ -181,3 +236,7 @@ class JsonReport(models.Model):
name = models.CharField(max_length=50, db_index=True)
version = models.IntegerField(default=1)
json = models.TextField()
def get_model_fields(model):
return model._meta.fields

85
stacktach/notification.py Normal file
View File

@ -0,0 +1,85 @@
from stacktach import utils
from stacktach import image_type
class Notification(object):
def __init__(self, body):
self.body = body
self.request_id = body.get('_context_request_id', "")
self.payload = body.get('payload', {})
self.state = self.payload.get('state', "")
self.old_state = self.payload.get('old_state', "")
self.old_task = self.payload.get('old_task_state', "")
self.task = self.payload.get('new_task_state', "")
self.image_type = image_type.get_numeric_code(self.payload)
self.publisher = self.body['publisher_id']
self.event = self.body['event_type']
image_meta = self.payload.get('image_meta', {})
self.os_architecture = image_meta.get('org.openstack__1__architecture',
'')
self.os_distro = image_meta.get('org.openstack__1__os_distro', '')
self.os_version = image_meta.get('org.openstack__1__os_version', '')
self.rax_options = image_meta.get('com.rackspace__1__options', '')
@property
def when(self):
when = self.body.get('timestamp', None)
if not when:
when = self.body['_context_timestamp'] # Old way of doing it
when = utils.str_time_to_unix(when)
return when
def rawdata_kwargs(self, deployment, routing_key, json):
return {
'deployment': deployment,
'routing_key': routing_key,
'event': self.event,
'publisher': self.publisher,
'json': json,
'state': self.state,
'old_state': self.old_state,
'task': self.task,
'old_task': self.old_task,
'image_type': self.image_type,
'when': self.when,
'publisher': self.publisher,
'service': self.service,
'host': self.host,
'instance': self.instance,
'request_id': self.request_id,
'tenant': self.tenant,
'os_architecture': self.os_architecture,
'os_distro': self.os_distro,
'os_version': self.os_version,
'rax_options': self.rax_options
}
@property
def instance(self):
# instance UUID's seem to hide in a lot of odd places.
instance = self.payload.get('instance_id', None)
instance = self.payload.get('instance_uuid', instance)
if not instance:
instance = self.payload.get('exception', {}).get('kwargs', {}).get('uuid')
if not instance:
instance = self.payload.get('instance', {}).get('uuid')
return instance
@property
def host(self):
host = None
parts = self.publisher.split('.')
if len(parts) > 1:
host = ".".join(parts[1:])
return host
@property
def service(self):
parts = self.publisher.split('.')
return parts[0]
@property
def tenant(self):
tenant = self.body.get('_context_project_id', None)
tenant = self.payload.get('tenant_id', tenant)
return tenant

View File

@ -0,0 +1,157 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import json
from stacktach import models
from stacktach.reconciler import exceptions
from stacktach.reconciler import nova
from stacktach import stacklog
DEFAULT_CLIENT = nova.JSONBridgeClient
CONFIG = {
'client_class': 'JSONBridgeClient',
'client': {
'url': 'http://stack.dev.ramielrowe.com:8080/query/',
'username': '',
'password': '',
'databases': {
'RegionOne': 'nova',
}
},
'region_mapping_loc': '/etc/stacktach/region_mapping.json'
}
class Reconciler(object):
def __init__(self, config, client=None, region_mapping=None):
self.config = config
self.client = (client or Reconciler.load_client(config))
self.region_mapping = (region_mapping or
Reconciler.load_region_mapping(config))
@classmethod
def load_client(cls, config):
client_class = config.get('client_class')
if client_class == 'JSONBridgeClient':
return nova.JSONBridgeClient(config['client'])
else:
return DEFAULT_CLIENT(config['client'])
@classmethod
def load_region_mapping(cls, config):
with open(config['region_mapping_loc']) as f:
return json.load(f)
def _region_for_usage(self, usage):
deployment = usage.deployment()
if deployment:
deployment_name = str(deployment.name)
if deployment_name in self.region_mapping:
return self.region_mapping[deployment_name]
else:
return False
else:
return False
def _reconcile_instance(self, usage, src, deleted_at=None):
values = {
'instance': usage.instance,
'launched_at': usage.launched_at,
'deleted_at': deleted_at,
'instance_type_id': usage.instance_type_id,
'source': 'reconciler:%s' % src,
'tenant': usage.tenant,
'os_architecture': usage.os_architecture,
'os_distro': usage.os_distro,
'os_version': usage.os_version,
'rax_options': usage.rax_options,
}
models.InstanceReconcile(**values).save()
def _fields_match(self, exists, instance):
match_code = 0
if (exists.launched_at != instance['launched_at'] or
exists.instance_type_id != instance['instance_type_id'] or
exists.tenant != instance['tenant'] or
exists.os_architecture != instance['os_architecture'] or
exists.os_distro != instance['os_distro'] or
exists.os_version != instance['os_version'] or
exists.rax_options != instance['rax_options']):
match_code = 1
if exists.deleted_at is not None:
# Exists says deleted
if (instance['deleted'] and
exists.deleted_at != instance['deleted_at']):
# Nova says deleted, but times don't match
match_code = 2
elif not instance['deleted']:
# Nova says not deleted
match_code = 3
elif exists.deleted_at is None and instance['deleted']:
# Exists says not deleted, but Nova says deleted
match_code = 4
return match_code
def missing_exists_for_instance(self, launched_id,
period_beginning):
reconciled = False
launch = models.InstanceUsage.objects.get(id=launched_id)
region = self._region_for_usage(launch)
try:
instance = self.client.get_instance(region, launch.instance)
if instance['deleted'] and instance['deleted_at'] is not None:
# Check to see if instance has been deleted
deleted_at = instance['deleted_at']
if deleted_at < period_beginning:
# Check to see if instance was deleted before period.
# If so, we shouldn't expect an exists.
self._reconcile_instance(launch, self.client.src_str,
deleted_at=instance['deleted_at'])
reconciled = True
except exceptions.NotFound:
stacklog.info("Couldn't find instance for launch %s" % launched_id)
return reconciled
def failed_validation(self, exists):
reconciled = False
region = self._region_for_usage(exists)
try:
instance = self.client.get_instance(region, exists.instance)
match_code = self._fields_match(exists, instance)
if match_code == 0:
self._reconcile_instance(exists, self.client.src_str,
deleted_at=exists.deleted_at)
reconciled = True
else:
msg = "Exists %s failed reconciliation with code %s"
msg %= (exists.id, match_code)
stacklog.info(msg)
except exceptions.NotFound:
stacklog.info("Couldn't find instance for exists %s" % exists.id)
return reconciled

View File

@ -0,0 +1,3 @@
class NotFound(Exception):
def __init__(self, message="NotFound"):
self.message = message

View File

@ -0,0 +1,90 @@
import requests
from stacktach import utils as stackutils
from stacktach.reconciler import exceptions
from stacktach.reconciler.utils import empty_reconciler_instance
GET_INSTANCE_QUERY = "SELECT * FROM instances where uuid ='%s';"
METADATA_MAPPING = {
'image_org.openstack__1__architecture': 'os_architecture',
'image_org.openstack__1__os_distro': 'os_distro',
'image_org.openstack__1__os_version': 'os_version',
'image_com.rackspace__1__options': 'rax_options',
}
METADATA_FIELDS = ["'%s'" % x for x in METADATA_MAPPING.keys()]
METADATA_FIELDS = ','.join(METADATA_FIELDS)
GET_INSTANCE_SYSTEM_METADATA = """
SELECT * FROM instance_system_metadata
WHERE instance_uuid = '%s' AND
deleted = 0 AND `key` IN (%s);
"""
GET_INSTANCE_SYSTEM_METADATA %= ('%s', METADATA_FIELDS)
def _json(result):
if callable(result.json):
return result.json()
else:
return result.json
class JSONBridgeClient(object):
src_str = 'json_bridge:nova_db'
def __init__(self, config):
self.config = config
def _url_for_region(self, region):
return self.config['url'] + self.config['databases'][region]
def _do_query(self, region, query):
data = {'sql': query}
credentials = (self.config['username'], self.config['password'])
return _json(requests.post(self._url_for_region(region), data,
verify=False, auth=credentials))
def _to_reconciler_instance(self, instance, metadata=None):
r_instance = empty_reconciler_instance()
r_instance.update({
'id': instance['uuid'],
'tenant': instance['project_id'],
'instance_type_id': str(instance['instance_type_id']),
})
if instance['launched_at'] is not None:
launched_at = stackutils.str_time_to_unix(instance['launched_at'])
r_instance['launched_at'] = launched_at
if instance['terminated_at'] is not None:
deleted_at = stackutils.str_time_to_unix(instance['terminated_at'])
r_instance['deleted_at'] = deleted_at
if instance['deleted'] != 0:
r_instance['deleted'] = True
if metadata is not None:
r_instance.update(metadata)
return r_instance
def _get_instance_meta(self, region, uuid):
results = self._do_query(region, GET_INSTANCE_SYSTEM_METADATA % uuid)
metadata = {}
for result in results['result']:
key = result['key']
if key in METADATA_MAPPING:
metadata[METADATA_MAPPING[key]] = result['value']
return metadata
def get_instance(self, region, uuid, get_metadata=False):
results = self._do_query(region, GET_INSTANCE_QUERY % uuid)['result']
if len(results) > 0:
metadata = None
if get_metadata:
metadata = self._get_instance_meta(region, uuid)
return self._to_reconciler_instance(results[0], metadata=metadata)
else:
msg = "Couldn't find instance (%s) using JSON Bridge in region (%s)"
raise exceptions.NotFound(msg % (uuid, region))

View File

@ -0,0 +1,14 @@
def empty_reconciler_instance():
r_instance = {
'id': None,
'tenant': None,
'launched_at': None,
'deleted': False,
'deleted_at': None,
'instance_type_id': None,
'os_architecture': '',
'os_distro': '',
'os_version': '',
'rax_options': '',
}
return r_instance

View File

@ -73,4 +73,10 @@ def warn(msg, name=None):
def error(msg, name=None):
if name is None:
name = default_logger_name
get_logger(name=name).error(msg)
get_logger(name=name).error(msg)
def info(msg, name=None):
if name is None:
name = default_logger_name
get_logger(name=name).info(msg)

View File

@ -1,4 +1,4 @@
# Copyright (c) 2012 - Rackspace Inc.
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
@ -18,900 +18,40 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import datetime
import decimal
from django.utils import unittest
import datetime_to_decimal
from models import *
import test_utils
from test_utils import INSTANCE_ID_1
from test_utils import INSTANCE_ID_2
from test_utils import MESSAGE_ID_1
from test_utils import MESSAGE_ID_2
from test_utils import REQUEST_ID_1
from test_utils import REQUEST_ID_2
from test_utils import REQUEST_ID_3
from test_utils import create_raw
import utils
import views
class ViewsUtilsTestCase(unittest.TestCase):
def test_srt_time_to_unix(self):
unix = utils.str_time_to_unix('2012-12-21 12:34:56.123')
self.assertEqual(unix, decimal.Decimal('1356093296.123'))
class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
def setUp(self):
self.deployment = Deployment(name='TestDeployment')
self.deployment.save()
when1 = utils.str_time_to_unix('2012-12-21 12:34:50.123')
when2 = utils.str_time_to_unix('2012-12-21 12:34:56.123')
when3 = utils.str_time_to_unix('2012-12-21 12:36:56.124')
self.update_raw = create_raw(self.deployment, when1,
'compute.instance.update',
host='api', service='api')
self.start_raw = create_raw(self.deployment, when2,
'compute.instance.reboot.start')
self.end_raw = create_raw(self.deployment, when3,
'compute.instance.reboot.end',
old_task='reboot')
def tearDown(self):
Deployment.objects.all().delete()
RawData.objects.all().delete()
Lifecycle.objects.all().delete()
Timing.objects.all().delete()
RequestTracker.objects.all().delete()
def assertOnLifecycle(self, lifecycle, instance, last_raw):
self.assertEqual(lifecycle.instance, instance)
self.assertEqual(lifecycle.last_raw.id, last_raw.id)
self.assertEqual(lifecycle.last_state, last_raw.state)
self.assertEqual(lifecycle.last_task_state, last_raw.old_task)
def assertOnTiming(self, timing, lifecycle, start_raw, end_raw, diff):
self.assertEqual(timing.lifecycle.id, lifecycle.id)
self.assertEqual(timing.start_raw.id, start_raw.id)
self.assertEqual(timing.end_raw.id, end_raw.id)
self.assertEqual(timing.start_when, start_raw.when)
self.assertEqual(timing.end_when, end_raw.when)
self.assertEqual(timing.diff, decimal.Decimal(diff))
def assertOnTracker(self, tracker, request_id, lifecycle, start, diff=None):
self.assertEqual(tracker.request_id, request_id)
self.assertEqual(tracker.lifecycle.id, lifecycle.id)
self.assertEqual(tracker.start, start)
if diff:
self.assertEqual(tracker.duration, diff)
def test_aggregate_lifecycle_and_timing(self):
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle = lifecycles[0]
self.assertOnLifecycle(lifecycle, INSTANCE_ID_1, self.start_raw)
views.aggregate_lifecycle(self.end_raw)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle = lifecycles[0]
self.assertOnLifecycle(lifecycle, INSTANCE_ID_1, self.end_raw)
timings = Timing.objects.select_related()\
.filter(lifecycle=lifecycle)
self.assertEqual(len(lifecycles), 1)
timing = timings[0]
expected_diff = self.end_raw.when - self.start_raw.when
self.assertOnTiming(timing, lifecycle, self.start_raw, self.end_raw,
expected_diff)
def test_multiple_instance_lifecycles(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2)
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 2)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, self.start_raw)
lifecycle2 = lifecycles[1]
self.assertOnLifecycle(lifecycle2, INSTANCE_ID_2, start_raw2)
views.aggregate_lifecycle(end_raw2)
views.aggregate_lifecycle(self.end_raw)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 2)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, self.end_raw)
lifecycle2 = lifecycles[1]
self.assertOnLifecycle(lifecycle2, INSTANCE_ID_2, end_raw2)
timings = Timing.objects.all().order_by('id')
self.assertEqual(len(timings), 2)
timing1 = timings[0]
expected_diff1 = self.end_raw.when - self.start_raw.when
self.assertOnTiming(timing1, lifecycle1, self.start_raw, self.end_raw,
expected_diff1)
expected_diff2 = end_raw2.when - start_raw2.when
timing2 = timings[1]
self.assertOnTiming(timing2, lifecycle2, start_raw2, end_raw2,
expected_diff2)
def test_same_instance_multiple_timings(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
request_id=REQUEST_ID_2)
# First action started
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
# Second action started, first end is late
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
# Finally get first end
views.aggregate_lifecycle(self.end_raw)
# Second end
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, end_raw2)
timings = Timing.objects.all().order_by('id')
self.assertEqual(len(timings), 2)
timing1 = timings[0]
expected_diff1 = self.end_raw.when - self.start_raw.when
self.assertOnTiming(timing1, lifecycle1, self.start_raw, self.end_raw,
expected_diff1)
expected_diff2 = end_raw2.when - start_raw2.when
timing2 = timings[1]
self.assertOnTiming(timing2, lifecycle1, start_raw2, end_raw2,
expected_diff2)
def test_aggregate_lifecycle_and_kpi(self):
views.aggregate_lifecycle(self.update_raw)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle = lifecycles[0]
self.assertOnLifecycle(lifecycle, INSTANCE_ID_1, self.update_raw)
trackers = RequestTracker.objects.filter(request_id=REQUEST_ID_1)
self.assertEqual(len(trackers), 1)
tracker = trackers[0]
self.assertOnTracker(tracker, REQUEST_ID_1, lifecycle,
self.update_raw.when)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(self.end_raw)
trackers = RequestTracker.objects.filter(request_id=REQUEST_ID_1)
self.assertEqual(len(trackers), 1)
tracker = trackers[0]
expected_diff = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker, REQUEST_ID_1, lifecycle,
self.update_raw.when, expected_diff)
def test_multiple_instance_kpi(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
instance=INSTANCE_ID_2,
old_task='resize',
request_id=REQUEST_ID_2)
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(self.end_raw)
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 2)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, self.end_raw)
lifecycle2 = lifecycles[1]
self.assertOnLifecycle(lifecycle2, INSTANCE_ID_2, end_raw2)
trackers = RequestTracker.objects.all().order_by('id')
self.assertEqual(len(trackers), 2)
tracker1 = trackers[0]
expected_diff = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker1, REQUEST_ID_1, lifecycle1,
self.update_raw.when, expected_diff)
tracker2 = trackers[1]
expected_diff2 = end_raw2.when-update_raw2.when
self.assertOnTracker(tracker2, REQUEST_ID_2, lifecycle2,
update_raw2.when, expected_diff2)
def test_single_instance_multiple_kpi(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
request_id=REQUEST_ID_2)
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(self.end_raw)
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 1)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, end_raw2)
trackers = RequestTracker.objects.all().order_by('id')
self.assertEqual(len(trackers), 2)
tracker1 = trackers[0]
expected_diff1 = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker1, REQUEST_ID_1, lifecycle1,
self.update_raw.when, expected_diff1)
tracker2 = trackers[1]
expected_diff2 = end_raw2.when-update_raw2.when
self.assertOnTracker(tracker2, REQUEST_ID_2, lifecycle1,
update_raw2.when, expected_diff2)
def test_single_instance_multiple_kpi_out_of_order(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
request_id=REQUEST_ID_2)
# First action started
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
# Second action started, first end is late
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
# Finally get first end
views.aggregate_lifecycle(self.end_raw)
# Second end
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 1)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, end_raw2)
trackers = RequestTracker.objects.all().order_by('id')
self.assertEqual(len(trackers), 2)
tracker1 = trackers[0]
expected_diff1 = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker1, REQUEST_ID_1, lifecycle1,
self.update_raw.when, expected_diff1)
tracker2 = trackers[1]
expected_diff2 = end_raw2.when-update_raw2.when
self.assertOnTracker(tracker2, REQUEST_ID_2, lifecycle1,
update_raw2.when, expected_diff2)
class ViewsUsageTestCase(unittest.TestCase):
def setUp(self):
self.deployment = Deployment(name='TestDeployment')
self.deployment.save()
def tearDown(self):
RawData.objects.all().delete()
InstanceUsage.objects.all().delete()
InstanceExists.objects.all().delete()
def test_process_new_launch_create_start(self):
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
json = test_utils.make_create_start_json()
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['create_start'], json=json)
views._process_usage_for_new_launch(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance, INSTANCE_ID_1)
self.assertEqual(usage.instance_type_id, '1')
self.assertEqual(usage.request_id, REQUEST_ID_1)
def test_process_new_launch_resize_prep_start(self):
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
json = test_utils.make_resize_prep_start_json()
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_prep_start'], json=json)
views._process_usage_for_new_launch(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance, INSTANCE_ID_1)
self.assertEqual(usage.request_id, REQUEST_ID_1)
# The instance_type_id from resize prep notifications is the old one,
# thus we ignore it.
self.assertIsNone(usage.instance_type_id)
def test_process_new_launch_resize_revert_start(self):
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
json = test_utils.make_resize_revert_start_json()
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_revert_start'],
json=json)
views._process_usage_for_new_launch(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance, INSTANCE_ID_1)
self.assertEqual(usage.request_id, REQUEST_ID_1)
# The instance_type_id from resize revert notifications is the old one,
# thus we ignore it.
self.assertIsNone(usage.instance_type_id)
def test_process_updates_create_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_create_end_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['create_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.launched_at, when)
def test_process_updates_resize_finish_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '2',
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_resize_finish_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_finish_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.launched_at, when)
def test_process_updates_revert_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_resize_revert_end_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_revert_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.launched_at, when)
self.assertEqual(usage.instance_type_id, '1')
def test_process_updates_resize_prep_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_resize_prep_end_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_prep_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance_type_id, '2')
def test_process_delete(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
deleted_str = '2012-12-21 12:34:50.123'
deleted = utils.str_time_to_unix(deleted_str)
json = test_utils.make_delete_end_json(launched_str, deleted_str)
raw = create_raw(self.deployment, deleted,
views.INSTANCE_EVENT['delete_end'], json=json)
views._process_delete(raw)
delete = InstanceDeletes.objects.all()
self.assertEqual(len(delete), 1)
delete = delete[0]
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launched)
self.assertEqual(delete.deleted_at, deleted)
self.assertEqual(delete.raw.id, raw.id)
def test_process_exists(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
exists_str = '2012-12-21 23:30:00.000'
exists_time = utils.str_time_to_unix(exists_str)
json = test_utils.make_exists_json(launched_str)
raw = create_raw(self.deployment, exists_time,
views.INSTANCE_EVENT['exists'], json=json)
views._process_exists(raw)
usage = InstanceExists.objects.filter(instance=INSTANCE_ID_1,
launched_at = launched)[0]
exists_rows = InstanceExists.objects.all()
self.assertEqual(len(exists_rows), 1)
exists = exists_rows[0]
self.assertEqual(exists.instance, INSTANCE_ID_1)
self.assertEqual(exists.launched_at, launched)
self.assertEqual(exists.status, InstanceExists.PENDING)
self.assertEqual(exists.usage.id, usage.id)
self.assertEqual(exists.raw.id, raw.id)
self.assertEqual(exists.message_id, MESSAGE_ID_1)
self.assertIsNone(exists.deleted_at)
self.assertEqual(exists.instance_type_id, '1')
def test_process_exists_with_deleted_at(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
deleted_str = '2012-12-21 06:36:50.123'
deleted = utils.str_time_to_unix(deleted_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
exists_str = '2012-12-21 23:30:00.000'
exists_time = utils.str_time_to_unix(exists_str)
json = test_utils.make_exists_json(launched_str, deleted_at=deleted_str)
raw = create_raw(self.deployment, exists_time,
views.INSTANCE_EVENT['exists'], json=json)
views._process_exists(raw)
usage = InstanceExists.objects.filter(instance=INSTANCE_ID_1,
launched_at = launched)[0]
exists_rows = InstanceExists.objects.all()
self.assertEqual(len(exists_rows), 1)
exists = exists_rows[0]
self.assertEqual(exists.instance, INSTANCE_ID_1)
self.assertEqual(exists.launched_at, launched)
self.assertEqual(exists.status, InstanceExists.PENDING)
self.assertEqual(exists.usage.id, usage.id)
self.assertEqual(exists.raw.id, raw.id)
self.assertEqual(exists.message_id, MESSAGE_ID_1)
self.assertEqual(exists.deleted_at, deleted)
self.assertEqual(exists.instance_type_id, '1')
class ViewsUsageWorkflowTestCase(unittest.TestCase):
def setUp(self):
self.deployment = Deployment(name='TestDeployment')
self.deployment.save()
def tearDown(self):
RawData.objects.all().delete()
InstanceUsage.objects.all().delete()
InstanceExists.objects.all().delete()
def assertOnUsage(self, usage, instance, type_id, launched, request_id):
self.assertEqual(usage.instance, instance)
self.assertEqual(usage.instance_type_id, type_id)
self.assertEqual(usage.launched_at, launched)
self.assertEqual(usage.request_id, request_id)
def test_create_workflow(self):
created_str = '2012-12-21 06:30:50.123'
created = utils.str_time_to_unix(created_str)
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
create_start_json = test_utils.make_create_start_json()
create_end_json = test_utils.make_create_end_json(launched_str)
create_start_raw = create_raw(self.deployment, created,
views.INSTANCE_EVENT['create_start'],
json=create_start_json)
create_end_raw = create_raw(self.deployment, launched,
views.INSTANCE_EVENT['create_end'],
json=create_end_json)
views.aggregate_usage(create_start_raw)
views.aggregate_usage(create_end_raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertOnUsage(usage, INSTANCE_ID_1, '1', launched, REQUEST_ID_1)
def test_create_workflow_start_late(self):
created_str = '2012-12-21 06:30:50.123'
created = utils.str_time_to_unix(created_str)
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
create_start_json = test_utils.make_create_start_json()
create_end_json = test_utils.make_create_end_json(launched_str)
create_start_raw = create_raw(self.deployment, created,
views.INSTANCE_EVENT['create_start'],
json=create_start_json)
create_end_raw = create_raw(self.deployment, launched,
views.INSTANCE_EVENT['create_end'],
json=create_end_json)
views.aggregate_usage(create_end_raw)
views.aggregate_usage(create_start_raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertOnUsage(usage, INSTANCE_ID_1, '1', launched, REQUEST_ID_1)
def test_resize_workflow(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
pre_end_str = '2012-12-22 06:36:50.123'
prep_end_time = utils.str_time_to_unix(pre_end_str)
finish_str = '2012-12-22 06:38:50.123'
finish_time = utils.str_time_to_unix(finish_str)
prep_start_json = test_utils\
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
prep_end_json = test_utils\
.make_resize_prep_end_json(new_instance_type_id='2',
request_id=REQUEST_ID_2)
finish_json = test_utils\
.make_resize_finish_json(launched_at=finish_str,
request_id=REQUEST_ID_2)
prep_start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_prep_start'],
request_id=REQUEST_ID_2,
json=prep_start_json)
prep_end_raw = create_raw(self.deployment, prep_end_time,
views.INSTANCE_EVENT['resize_prep_end'],
request_id=REQUEST_ID_2,
json=prep_end_json)
finish_raw = create_raw(self.deployment, finish_time,
views.INSTANCE_EVENT['resize_finish_end'],
request_id=REQUEST_ID_2,
json=finish_json)
views.aggregate_usage(prep_start_raw)
views.aggregate_usage(prep_end_raw)
views.aggregate_usage(finish_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 2)
usage_before = usages[0]
usage_after = usages[1]
self.assertOnUsage(usage_before, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after, INSTANCE_ID_1, '2', finish_time,
REQUEST_ID_2)
def test_resize_workflow_out_of_order(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
pre_end_str = '2012-12-22 06:36:50.123'
prep_end_time = utils.str_time_to_unix(pre_end_str)
finish_str = '2012-12-22 06:38:50.123'
finish_time = utils.str_time_to_unix(finish_str)
prep_start_json = test_utils\
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
prep_end_json = test_utils\
.make_resize_prep_end_json(new_instance_type_id='2',
request_id=REQUEST_ID_2)
finish_json = test_utils\
.make_resize_finish_json(launched_at=finish_str,
request_id=REQUEST_ID_2)
prep_start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_prep_start'],
request_id=REQUEST_ID_2,
json=prep_start_json)
prep_end_raw = create_raw(self.deployment, prep_end_time,
views.INSTANCE_EVENT['resize_prep_end'],
request_id=REQUEST_ID_2,
json=prep_end_json)
finish_raw = create_raw(self.deployment, finish_time,
views.INSTANCE_EVENT['resize_finish_end'],
request_id=REQUEST_ID_2,
json=finish_json)
# Resize Started, notification on time
views.aggregate_usage(prep_start_raw)
# Received finish_end, prep_end late
views.aggregate_usage(finish_raw)
# Finally receive the late prep_end
views.aggregate_usage(prep_end_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 2)
usage_before = usages[0]
usage_after = usages[1]
self.assertOnUsage(usage_before, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after, INSTANCE_ID_1, '2', finish_time,
REQUEST_ID_2)
def test_resize_workflow_start_late(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
pre_end_str = '2012-12-22 06:36:50.123'
prep_end_time = utils.str_time_to_unix(pre_end_str)
finish_str = '2012-12-22 06:38:50.123'
finish_time = utils.str_time_to_unix(finish_str)
prep_start_json = test_utils\
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
prep_end_json = test_utils\
.make_resize_prep_end_json(new_instance_type_id='2',
request_id=REQUEST_ID_2)
finish_json = test_utils\
.make_resize_finish_json(launched_at=finish_str,
request_id=REQUEST_ID_2)
prep_start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_prep_start'],
request_id=REQUEST_ID_2,
json=prep_start_json)
prep_end_raw = create_raw(self.deployment, prep_end_time,
views.INSTANCE_EVENT['resize_prep_end'],
request_id=REQUEST_ID_2,
json=prep_end_json)
finish_raw = create_raw(self.deployment, finish_time,
views.INSTANCE_EVENT['resize_finish_end'],
request_id=REQUEST_ID_2,
json=finish_json)
views.aggregate_usage(prep_end_raw)
views.aggregate_usage(prep_start_raw)
views.aggregate_usage(finish_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 2)
usage_before = usages[0]
usage_after = usages[1]
self.assertOnUsage(usage_before, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after, INSTANCE_ID_1, '2', finish_time,
REQUEST_ID_2)
def test_resize_revert_workflow(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
resize_launched_str = '2012-12-22 06:34:50.123'
resize_launched = utils.str_time_to_unix(resize_launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_2,
'instance_type_id': '2',
'launched_at': resize_launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
end_str = '2012-12-22 06:36:50.123'
end_time = utils.str_time_to_unix(end_str)
start_json = test_utils\
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
end_json = test_utils\
.make_resize_revert_end_json(launched_at=end_str,
request_id=REQUEST_ID_3)
start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_start'],
request_id=REQUEST_ID_3, json=start_json)
end_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_end'],
request_id=REQUEST_ID_3, json=end_json)
views.aggregate_usage(start_raw)
views.aggregate_usage(end_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 3)
usage_before_resize = usages[0]
usage_after_resize = usages[1]
usage_after_revert = usages[2]
self.assertOnUsage(usage_before_resize, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after_resize, INSTANCE_ID_1, '2',
resize_launched, REQUEST_ID_2)
self.assertOnUsage(usage_after_revert, INSTANCE_ID_1, '1', end_time,
REQUEST_ID_3)
def test_resize_revert_workflow_start_late(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
resize_launched_str = '2012-12-22 06:34:50.123'
resize_launched = utils.str_time_to_unix(resize_launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_2,
'instance_type_id': '2',
'launched_at': resize_launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
end_str = '2012-12-22 06:36:50.123'
end_time = utils.str_time_to_unix(end_str)
start_json = test_utils\
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
end_json = test_utils\
.make_resize_revert_end_json(launched_at=end_str,
request_id=REQUEST_ID_3)
start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_start'],
request_id=REQUEST_ID_3, json=start_json)
end_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_end'],
request_id=REQUEST_ID_3, json=end_json)
views.aggregate_usage(end_raw)
views.aggregate_usage(start_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 3)
usage_before_resize = usages[0]
usage_after_resize = usages[1]
usage_after_revert = usages[2]
self.assertOnUsage(usage_before_resize, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after_resize, INSTANCE_ID_1, '2',
resize_launched, REQUEST_ID_2)
self.assertOnUsage(usage_after_revert, INSTANCE_ID_1, '1', end_time,
REQUEST_ID_3)
from datetime import datetime
import unittest
import db
from stacktach.datetime_to_decimal import dt_to_decimal
from stacktach.models import RawDataImageMeta
from stacktach.models import RawData
from stacktach.models import get_model_fields
class RawDataImageMetaDbTestCase(unittest.TestCase):
def test_create_raw_data_should_populate_rawdata_and_rawdata_imagemeta(self):
deployment = db.get_or_create_deployment('deployment1')[0]
kwargs = {
'deployment': deployment,
'when': dt_to_decimal(datetime.utcnow()),
'tenant': '1', 'json': '{}', 'routing_key': 'monitor.info',
'state': 'verifying', 'old_state': 'pending',
'old_task': '', 'task': '', 'image_type': 1,
'publisher': '', 'event': 'compute.instance.exists',
'service': '', 'host': '', 'instance': '1234-5678-9012-3456',
'request_id': '1234', 'os_architecture': 'x86', 'os_version': '1',
'os_distro': 'windows', 'rax_options': '2'}
rawdata = db.create_rawdata(**kwargs)
for field in get_model_fields(RawData):
if field.name != 'id':
self.assertEquals(getattr(rawdata, field.name),
kwargs[field.name])
raw_image_meta = RawDataImageMeta.objects.all()[0]
self.assertEquals(raw_image_meta.raw, rawdata)
self.assertEquals(raw_image_meta.os_architecture,
kwargs['os_architecture'])
self.assertEquals(raw_image_meta.os_version, kwargs['os_version'])
self.assertEquals(raw_image_meta.os_distro, kwargs['os_distro'])
self.assertEquals(raw_image_meta.rax_options, kwargs['rax_options'])

View File

@ -20,7 +20,7 @@ urlpatterns = patterns('',
url(r'stacky/watch/(?P<deployment_id>\d+)/$',
'stacktach.stacky_server.do_watch'),
url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'),
url(r'stacky/kpi/(?P<tenant_id>\d+)/$', 'stacktach.stacky_server.do_kpi'),
url(r'stacky/kpi/(?P<tenant_id>\w+)/$', 'stacktach.stacky_server.do_kpi'),
url(r'stacky/usage/launches/$',
'stacktach.stacky_server.do_list_usage_launches'),
url(r'stacky/usage/deletes/$',

View File

@ -9,11 +9,10 @@ from django.shortcuts import render_to_response
from stacktach import datetime_to_decimal as dt
from stacktach import db as stackdb
from stacktach import image_type
from stacktach import models
from stacktach import stacklog
from stacktach import utils
from stacktach.notification import Notification
STACKDB = stackdb
@ -26,67 +25,11 @@ def log_warn(msg):
LOG.warn(msg)
def _extract_states(payload):
return {
'state' : payload.get('state', ""),
'old_state' : payload.get('old_state', ""),
'old_task' : payload.get('old_task_state', ""),
'task' : payload.get('new_task_state', ""),
'image_type' : image_type.get_numeric_code(payload)
}
def _monitor_message(routing_key, body):
event = body['event_type']
publisher = body['publisher_id']
request_id = body['_context_request_id']
parts = publisher.split('.')
service = parts[0]
if len(parts) > 1:
host = ".".join(parts[1:])
else:
host = None
payload = body['payload']
request_spec = payload.get('request_spec', None)
# instance UUID's seem to hide in a lot of odd places.
instance = payload.get('instance_id', None)
instance = payload.get('instance_uuid', instance)
if not instance:
instance = payload.get('exception', {}).get('kwargs', {}).get('uuid')
if not instance:
instance = payload.get('instance', {}).get('uuid')
tenant = body.get('_context_project_id', None)
tenant = payload.get('tenant_id', tenant)
resp = dict(host=host, instance=instance, publisher=publisher,
service=service, event=event, tenant=tenant,
request_id=request_id)
resp.update(_extract_states(payload))
return resp
def _compute_update_message(routing_key, body):
publisher = None
instance = None
args = body['args']
host = args['host']
request_id = body['_context_request_id']
service = args['service_name']
event = body['method']
tenant = args.get('_context_project_id', None)
resp = dict(host=host, instance=instance, publisher=publisher,
service=service, event=event, tenant=tenant,
request_id=request_id)
payload = body.get('payload', {})
resp.update(_extract_states(payload))
return resp
# routing_key : handler
HANDLERS = {'monitor.info':_monitor_message,
'monitor.error':_monitor_message,
'':_compute_update_message}
NOTIFICATIONS = {
'monitor.info': Notification,
'monitor.error': Notification}
def start_kpi_tracking(lifecycle, raw):
@ -250,6 +193,12 @@ def _process_usage_for_new_launch(raw, body):
usage.launched_at = utils.str_time_to_unix(payload['launched_at'])
usage.tenant = payload['tenant_id']
image_meta = payload.get('image_meta', {})
usage.rax_options = image_meta.get('com.rackspace__1__options', '')
usage.os_architecture = image_meta.get('org.openstack__1__architecture',
'')
usage.os_version = image_meta.get('org.openstack__1__os_version', '')
usage.os_distro = image_meta.get('org.openstack__1__os_distro', '')
STACKDB.save(usage)
@ -277,6 +226,13 @@ def _process_usage_for_updates(raw, body):
usage.instance_type_id = payload['new_instance_type_id']
usage.tenant = payload['tenant_id']
image_meta = payload.get('image_meta', {})
usage.rax_options = image_meta.get('com.rackspace__1__options', '')
usage.os_architecture = image_meta.get('org.openstack__1__architecture',
'')
usage.os_version = image_meta.get('org.openstack__1__os_version', '')
usage.os_distro = image_meta.get('org.openstack__1__os_distro', '')
STACKDB.save(usage)
@ -321,6 +277,13 @@ def _process_exists(raw, body):
values['usage'] = usage
values['raw'] = raw
values['tenant'] = payload['tenant_id']
image_meta = payload.get('image_meta', {})
values['rax_options'] = image_meta.get('com.rackspace__1__options', '')
os_arch = image_meta.get('org.openstack__1__architecture', '')
values['os_architecture'] = os_arch
os_version = image_meta.get('org.openstack__1__os_version', '')
values['os_version'] = os_version
values['os_distro'] = image_meta.get('org.openstack__1__os_distro', '')
deleted_at = payload.get('deleted_at')
if deleted_at and deleted_at != '':
@ -370,22 +333,12 @@ def process_raw_data(deployment, args, json_args):
routing_key, body = args
record = None
handler = HANDLERS.get(routing_key, None)
if handler:
values = handler(routing_key, body)
notification = NOTIFICATIONS[routing_key](body)
if notification:
values = notification.rawdata_kwargs(deployment, routing_key, json_args)
if not values:
return record
values['deployment'] = deployment
try:
when = body['timestamp']
except KeyError:
when = body['_context_timestamp'] # Old way of doing it
values['when'] = utils.str_time_to_unix(when)
values['routing_key'] = routing_key
values['json'] = json_args
record = STACKDB.create_rawdata(**values)
STACKDB.save(record)
return record

View File

@ -0,0 +1,180 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from decimal import Decimal
import unittest
from stacktach.notification import Notification
from tests.unit.utils import REQUEST_ID_1, TENANT_ID_1, INSTANCE_ID_1
class NotificationTestCase(unittest.TestCase):
def test_rawdata_kwargs(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
def test_rawdata_kwargs_missing_image_meta(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
}
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
def test_rawdata_kwargs_for_message_with_no_host(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], None)
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
def test_rawdata_kwargs_for_message_with_exception(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'exception': {'kwargs':{'uuid': INSTANCE_ID_1}},
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)

View File

@ -0,0 +1,471 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import datetime
import unittest
import mox
import requests
from stacktach import models
from stacktach import reconciler
from stacktach import utils as stackutils
from stacktach.reconciler import exceptions
from stacktach.reconciler import nova
from stacktach.reconciler import utils as rec_utils
from tests.unit import utils
from tests.unit.utils import INSTANCE_ID_1
from tests.unit.utils import TENANT_ID_1
region_mapping = {
'RegionOne.prod.cell1': 'RegionOne',
'RegionTwo.prod.cell1': 'RegionTwo',
}
DEFAULT_OS_ARCH = 'os_arch'
DEFAULT_OS_DISTRO = 'os_dist'
DEFAULT_OS_VERSION = "1.1"
DEFAULT_RAX_OPTIONS = "rax_ops"
class ReconcilerTestCase(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.client = self.mox.CreateMockAnything()
self.client.src_str = 'mocked_client'
self.reconciler = reconciler.Reconciler({},
client=self.client,
region_mapping=region_mapping)
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
models.RawData.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Deployment', use_mock_anything=True)
models.Deployment.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Lifecycle', use_mock_anything=True)
models.Lifecycle.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Timing', use_mock_anything=True)
models.Timing.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'RequestTracker',
use_mock_anything=True)
models.RequestTracker.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceUsage',
use_mock_anything=True)
models.InstanceUsage.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceReconcile',
use_mock_anything=True)
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceDeletes',
use_mock_anything=True)
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceExists',
use_mock_anything=True)
models.InstanceExists.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
models.JsonReport.objects = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def _fake_usage(self, is_exists=False, is_deleted=False,
mock_deployment=False):
usage = self.mox.CreateMockAnything()
usage.id = 1
beginning_d = utils.decimal_utc()
usage.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
usage.launched_at = launched_at
usage.instance_type_id = 1
usage.tenant = TENANT_ID_1
usage.os_architecture = DEFAULT_OS_ARCH
usage.os_distro = DEFAULT_OS_DISTRO
usage.os_version = DEFAULT_OS_VERSION
usage.rax_options = DEFAULT_RAX_OPTIONS
if is_exists:
usage.deleted_at = None
if is_deleted:
usage.deleted_at = beginning_d
if mock_deployment:
deployment = self.mox.CreateMockAnything()
deployment.name = 'RegionOne.prod.cell1'
usage.deployment().AndReturn(deployment)
return usage
def _fake_reconciler_instance(self, uuid=INSTANCE_ID_1, launched_at=None,
deleted_at=None, deleted=False,
instance_type_id=1, tenant=TENANT_ID_1,
os_arch=DEFAULT_OS_ARCH,
os_distro=DEFAULT_OS_DISTRO,
os_verison=DEFAULT_OS_VERSION,
rax_options=DEFAULT_RAX_OPTIONS):
instance = rec_utils.empty_reconciler_instance()
instance.update({
'id': uuid,
'launched_at': launched_at,
'deleted_at': deleted_at,
'deleted': deleted,
'instance_type_id': instance_type_id,
'tenant': tenant,
'os_architecture': os_arch,
'os_distro': os_distro,
'os_version': os_verison,
'rax_options': rax_options,
})
return instance
def test_load_client_json_bridge(self):
mock_config = self.mox.CreateMockAnything()
config = {'client_class': 'JSONBridgeClient', 'client': mock_config}
nova.JSONBridgeClient(mock_config)
self.mox.ReplayAll()
reconciler.Reconciler.load_client(config)
self.mox.VerifyAll()
def test_load_client_no_class_loads_default_class(self):
mock_config = self.mox.CreateMockAnything()
config = {'client': mock_config}
nova.JSONBridgeClient(mock_config)
self.mox.ReplayAll()
reconciler.Reconciler.load_client(config)
self.mox.VerifyAll()
def test_load_client_incorrect_class_loads_default_class(self):
mock_config = self.mox.CreateMockAnything()
config = {'client_class': 'BadConfigValue', 'client': mock_config}
nova.JSONBridgeClient(mock_config)
self.mox.ReplayAll()
reconciler.Reconciler.load_client(config)
self.mox.VerifyAll()
def test_region_for_launch(self):
launch = self.mox.CreateMockAnything()
deployment = self.mox.CreateMockAnything()
deployment.name = 'RegionOne.prod.cell1'
launch.deployment().AndReturn(deployment)
self.mox.ReplayAll()
region = self.reconciler._region_for_usage(launch)
self.assertEqual('RegionOne', region)
self.mox.VerifyAll()
def test_region_for_launch_no_mapping(self):
launch = self.mox.CreateMockAnything()
deployment = self.mox.CreateMockAnything()
deployment.name = 'RegionOne.prod.cell2'
launch.deployment().AndReturn(deployment)
self.mox.ReplayAll()
region = self.reconciler._region_for_usage(launch)
self.assertFalse(region)
self.mox.VerifyAll()
def test_region_for_launch_no_raws(self):
launch = self.mox.CreateMockAnything()
launch.deployment()
self.mox.ReplayAll()
region = self.reconciler._region_for_usage(launch)
self.assertFalse(region)
self.mox.VerifyAll()
def test_missing_exists_for_instance(self):
launch = self._fake_usage(mock_deployment=True)
launched_at = launch.launched_at
deleted_at = launched_at + (60*30)
period_beginning = deleted_at + 1
models.InstanceUsage.objects.get(id=launch.id).AndReturn(launch)
rec_inst = self._fake_reconciler_instance(deleted=True,
deleted_at=deleted_at)
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndReturn(rec_inst)
reconcile_vals = {
'instance': launch.instance,
'launched_at': launch.launched_at,
'deleted_at': deleted_at,
'instance_type_id': launch.instance_type_id,
'source': 'reconciler:mocked_client',
'tenant': TENANT_ID_1,
'os_architecture': DEFAULT_OS_ARCH,
'os_distro': DEFAULT_OS_DISTRO,
'os_version': DEFAULT_OS_VERSION,
'rax_options': DEFAULT_RAX_OPTIONS,
}
result = self.mox.CreateMockAnything()
models.InstanceReconcile(**reconcile_vals).AndReturn(result)
result.save()
self.mox.ReplayAll()
result = self.reconciler.missing_exists_for_instance(launch.id,
period_beginning)
self.assertTrue(result)
self.mox.VerifyAll()
def test_missing_exists_for_instance_not_found(self):
launch_id = 1
beginning_d = utils.decimal_utc()
launch = self.mox.CreateMockAnything()
launch.instance = INSTANCE_ID_1
launch.launched_at = beginning_d - (60*60)
launch.instance_type_id = 1
models.InstanceUsage.objects.get(id=launch_id).AndReturn(launch)
deployment = self.mox.CreateMockAnything()
launch.deployment().AndReturn(deployment)
deployment.name = 'RegionOne.prod.cell1'
ex = exceptions.NotFound()
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndRaise(ex)
self.mox.ReplayAll()
result = self.reconciler.missing_exists_for_instance(launch_id,
beginning_d)
self.assertFalse(result)
self.mox.VerifyAll()
def test_failed_validation(self):
exists = self._fake_usage(is_exists=True, mock_deployment=True)
launched_at = exists.launched_at
rec_inst = self._fake_reconciler_instance(launched_at=launched_at)
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndReturn(rec_inst)
reconcile_vals = {
'instance': exists.instance,
'launched_at': exists.launched_at,
'deleted_at': exists.deleted_at,
'instance_type_id': exists.instance_type_id,
'source': 'reconciler:mocked_client',
'tenant': TENANT_ID_1,
'os_architecture': DEFAULT_OS_ARCH,
'os_distro': DEFAULT_OS_DISTRO,
'os_version': DEFAULT_OS_VERSION,
'rax_options': DEFAULT_RAX_OPTIONS,
}
result = self.mox.CreateMockAnything()
models.InstanceReconcile(**reconcile_vals).AndReturn(result)
result.save()
self.mox.ReplayAll()
result = self.reconciler.failed_validation(exists)
self.assertTrue(result)
self.mox.VerifyAll()
def test_failed_validation_deleted(self):
exists = self._fake_usage(is_exists=True, is_deleted=True,
mock_deployment=True)
launched_at = exists.launched_at
deleted_at = exists.deleted_at
rec_inst = self._fake_reconciler_instance(launched_at=launched_at,
deleted=True,
deleted_at=deleted_at)
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndReturn(rec_inst)
reconcile_vals = {
'instance': exists.instance,
'launched_at': exists.launched_at,
'deleted_at': exists.deleted_at,
'instance_type_id': exists.instance_type_id,
'source': 'reconciler:mocked_client',
'tenant': TENANT_ID_1,
'os_architecture': DEFAULT_OS_ARCH,
'os_distro': DEFAULT_OS_DISTRO,
'os_version': DEFAULT_OS_VERSION,
'rax_options': DEFAULT_RAX_OPTIONS,
}
result = self.mox.CreateMockAnything()
models.InstanceReconcile(**reconcile_vals).AndReturn(result)
result.save()
self.mox.ReplayAll()
result = self.reconciler.failed_validation(exists)
self.assertTrue(result)
self.mox.VerifyAll()
def test_failed_validation_deleted_not_matching(self):
beginning_d = utils.decimal_utc()
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.deleted_at = beginning_d
deployment = self.mox.CreateMockAnything()
exists.deployment().AndReturn(deployment)
deployment.name = 'RegionOne.prod.cell1'
rec_inst = self._fake_reconciler_instance(launched_at=launched_at,
deleted=True,
deleted_at=beginning_d+1)
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndReturn(rec_inst)
self.mox.ReplayAll()
result = self.reconciler.failed_validation(exists)
self.assertFalse(result)
self.mox.VerifyAll()
def test_failed_validation_deleted_not_deleted_from_client(self):
beginning_d = utils.decimal_utc()
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.deleted_at = beginning_d
deployment = self.mox.CreateMockAnything()
exists.deployment().AndReturn(deployment)
deployment.name = 'RegionOne.prod.cell1'
rec_inst = self._fake_reconciler_instance(launched_at=launched_at)
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndReturn(rec_inst)
self.mox.ReplayAll()
result = self.reconciler.failed_validation(exists)
self.assertFalse(result)
self.mox.VerifyAll()
def test_failed_validation_not_found(self):
beginning_d = utils.decimal_utc()
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.deleted_at = None
deployment = self.mox.CreateMockAnything()
exists.deployment().AndReturn(deployment)
deployment.name = 'RegionOne.prod.cell1'
ex = exceptions.NotFound()
self.client.get_instance('RegionOne', INSTANCE_ID_1).AndRaise(ex)
self.mox.ReplayAll()
result = self.reconciler.failed_validation(exists)
self.assertFalse(result)
self.mox.VerifyAll()
def test_fields_match(self):
exists = self._fake_usage(is_exists=True)
kwargs = {'launched_at': exists.launched_at}
instance = self._fake_reconciler_instance(**kwargs)
self.mox.ReplayAll()
match_code = self.reconciler._fields_match(exists, instance)
self.assertEqual(match_code, 0)
self.mox.VerifyAll()
def test_fields_match_field_with_deleted(self):
exists = self._fake_usage(is_exists=True, is_deleted=True)
kwargs = {'launched_at': exists.launched_at,
'deleted': True,
'deleted_at': exists.deleted_at}
instance = self._fake_reconciler_instance(**kwargs)
self.mox.ReplayAll()
match_code = self.reconciler._fields_match(exists, instance)
self.assertEqual(match_code, 0)
self.mox.VerifyAll()
def test_fields_match_field_miss_match(self):
exists = self._fake_usage(is_exists=True)
kwargs = {'launched_at': exists.launched_at + 1}
instance = self._fake_reconciler_instance(**kwargs)
self.mox.ReplayAll()
match_code = self.reconciler._fields_match(exists, instance)
self.assertEqual(match_code, 1)
self.mox.VerifyAll()
def test_fields_match_field_with_deleted_miss_match(self):
exists = self._fake_usage(is_exists=True, is_deleted=True)
kwargs = {'launched_at': exists.launched_at,
'deleted': True,
'deleted_at': exists.deleted_at+1}
instance = self._fake_reconciler_instance(**kwargs)
self.mox.ReplayAll()
match_code = self.reconciler._fields_match(exists, instance)
self.assertEqual(match_code, 2)
self.mox.VerifyAll()
def test_fields_match_field_not_deleted_in_nova(self):
exists = self._fake_usage(is_exists=True, is_deleted=True)
kwargs = {'launched_at': exists.launched_at}
instance = self._fake_reconciler_instance(**kwargs)
self.mox.ReplayAll()
match_code = self.reconciler._fields_match(exists, instance)
self.assertEqual(match_code, 3)
self.mox.VerifyAll()
def test_fields_match_field_not_deleted_in_exists(self):
exists = self._fake_usage(is_exists=True)
kwargs = {'launched_at': exists.launched_at,
'deleted': True,
'deleted_at': exists.launched_at + 1}
instance = self._fake_reconciler_instance(**kwargs)
self.mox.ReplayAll()
match_code = self.reconciler._fields_match(exists, instance)
self.assertEqual(match_code, 4)
self.mox.VerifyAll()
json_bridge_config = {
'url': 'http://json_bridge.example.com/query/',
'username': 'user',
'password': 'pass',
'databases': {
'RegionOne': 'nova',
}
}
class NovaJSONBridgeClientTestCase(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.client = nova.JSONBridgeClient(json_bridge_config)
self.mox.StubOutWithMock(requests, 'post')
def tearDown(self):
self.mox.UnsetStubs()
def mock_for_query(self, database, query, results):
url = json_bridge_config['url'] + database
data = {'sql': query}
auth = (json_bridge_config['username'], json_bridge_config['password'])
result = {'result': results}
response = self.mox.CreateMockAnything()
requests.post(url, data, auth=auth, verify=False)\
.AndReturn(response)
response.json().AndReturn(result)
def _fake_instance(self, uuid=INSTANCE_ID_1, launched_at=None,
terminated_at=None, deleted=0, instance_type_id=1,
project_id=TENANT_ID_1):
return {
'uuid': uuid,
'launched_at': launched_at,
'terminated_at': terminated_at,
'deleted': deleted,
'instance_type_id': instance_type_id,
'project_id': project_id
}
def test_get_instance(self):
launched_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=5)
launched_at = str(launched_at)
terminated_at = str(datetime.datetime.utcnow())
results = [self._fake_instance(launched_at=launched_at,
terminated_at=terminated_at,
deleted=True)]
self.mock_for_query('nova', nova.GET_INSTANCE_QUERY % INSTANCE_ID_1,
results)
self.mox.ReplayAll()
instance = self.client.get_instance('RegionOne', INSTANCE_ID_1)
self.assertIsNotNone(instance)
self.assertEqual(instance['id'], INSTANCE_ID_1)
self.assertEqual(instance['instance_type_id'], '1')
launched_at_dec = stackutils.str_time_to_unix(launched_at)
self.assertEqual(instance['launched_at'], launched_at_dec)
terminated_at_dec = stackutils.str_time_to_unix(terminated_at)
self.assertEqual(instance['deleted_at'], terminated_at_dec)
self.assertTrue(instance['deleted'])
self.mox.VerifyAll()
def test_get_instance_not_found(self):
self.mock_for_query('nova', nova.GET_INSTANCE_QUERY % INSTANCE_ID_1,
[])
self.mox.ReplayAll()
self.assertRaises(exceptions.NotFound, self.client.get_instance,
'RegionOne', INSTANCE_ID_1)
self.mox.VerifyAll()

View File

@ -26,6 +26,10 @@ import mox
import utils
from utils import INSTANCE_ID_1
from utils import OS_VERSION_1
from utils import OS_ARCH_1
from utils import OS_DISTRO_1
from utils import RAX_OPTIONS_1
from utils import MESSAGE_ID_1
from utils import REQUEST_ID_1
from utils import TENANT_ID_1
@ -49,124 +53,6 @@ class StacktachRawParsingTestCase(unittest.TestCase):
self.assertTrue(key in resp, msg='%s not in response' % key)
self.assertEqual(resp[key], kwargs[key])
def test_monitor_message(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host='cpu1-n01.example.com',
instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1,
state='active',
old_state='building',
old_task='build')
def test_monitor_message_no_host(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host=None, instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1, state='active',
old_state='building', old_task='build')
def test_monitor_message_exception(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'exception': {'kwargs':{'uuid': INSTANCE_ID_1}},
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host='cpu1-n01.example.com',
instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1,
state='active', old_state='building',
old_task='build')
def test_monitor_message_exception(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'instance': {'uuid': INSTANCE_ID_1},
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host='cpu1-n01.example.com',
instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1,
state='active', old_state='building',
old_task='build')
def test_compute_update_message(self):
body = {
'_context_request_id': REQUEST_ID_1,
'method': 'some_method',
'args': {
'host': 'compute',
'service_name': 'compute',
'_context_project_id': TENANT_ID_1
},
'payload': {
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
}
}
resp = views._compute_update_message(None, body)
print resp
self.assertOnHandlerResponse(resp, publisher=None, instance=None,
host='compute', tenant=TENANT_ID_1,
event='some_method',
request_id=REQUEST_ID_1, state='active',
old_state='building', old_task='build')
def test_process_raw_data(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25 13:38:23.123'
@ -175,22 +61,25 @@ class StacktachRawParsingTestCase(unittest.TestCase):
}
args = ('monitor.info', dict)
json_args = json.dumps(args)
old_info_handler = views.HANDLERS['monitor.info']
views.HANDLERS['monitor.info'] = lambda key, mess: {'host': 'api'}
raw_values = {
'deployment': deployment,
'when': utils.decimal_utc(datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S.%f")),
'when': utils.decimal_utc(datetime.datetime.strptime(when, '%Y-%m-%d %H:%M:%S.%f')),
'host': 'api',
'routing_key': 'monitor.info',
'json': json_args
}
raw = self.mox.CreateMockAnything()
views.STACKDB.create_rawdata(**raw_values).AndReturn(raw)
views.STACKDB.save(raw)
old_info_handler = views.NOTIFICATIONS['monitor.info']
mock_notification = self.mox.CreateMockAnything()
mock_notification.rawdata_kwargs(deployment, 'monitor.info', json_args).AndReturn(raw_values)
views.NOTIFICATIONS['monitor.info'] = lambda message_body: mock_notification
views.STACKDB.create_rawdata(**raw_values)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args)
self.mox.VerifyAll()
views.HANDLERS['monitor.info'] = old_info_handler
views.NOTIFICATIONS['monitor.info'] = old_info_handler
def test_process_raw_data_old_timestamp(self):
deployment = self.mox.CreateMockAnything()
@ -199,24 +88,25 @@ class StacktachRawParsingTestCase(unittest.TestCase):
'_context_timestamp': when,
}
args = ('monitor.info', dict)
json_args = json.dumps(args)
old_info_handler = views.HANDLERS['monitor.info']
views.HANDLERS['monitor.info'] = lambda key, mess: {'host': 'api'}
json_args = json.dumps(args[1])
raw_values = {
'deployment': deployment,
'when': utils.decimal_utc(datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S.%f")),
'when': utils.decimal_utc(datetime.datetime.strptime(when, '%Y-%m-%dT%H:%M:%S.%f')),
'host': 'api',
'routing_key': 'monitor.info',
'json': json_args
}
raw = self.mox.CreateMockAnything()
views.STACKDB.create_rawdata(**raw_values).AndReturn(raw)
views.STACKDB.save(raw)
old_info_handler = views.NOTIFICATIONS['monitor.info']
mock_notification = self.mox.CreateMockAnything()
mock_notification.rawdata_kwargs(deployment, 'monitor.info', json_args).AndReturn(raw_values)
views.NOTIFICATIONS['monitor.info'] = lambda message_body: mock_notification
views.STACKDB.create_rawdata(**raw_values)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args)
self.mox.VerifyAll()
views.HANDLERS['monitor.info'] = old_info_handler
views.NOTIFICATIONS['monitor.info'] = old_info_handler
class StacktachLifecycleTestCase(unittest.TestCase):
def setUp(self):
@ -421,7 +311,8 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
stacklog.get_logger(name=name).AndReturn(self.log)
def test_process_usage_for_new_launch_create_start(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.create.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -430,11 +321,16 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEquals(usage.instance_type_id, '1')
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.rebuild.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -443,11 +339,15 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEquals(usage.instance_type_id, '1')
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.rebuild.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -457,11 +357,16 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.resize.prep.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -471,11 +376,16 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_revert_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1,'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.resize.revert.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -485,12 +395,19 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1, 'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.prep.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
@ -500,12 +417,20 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.create.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -513,12 +438,20 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_success_message(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
notification[1]['payload']['message'] = "Success"
event = 'compute.instance.create.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -527,12 +460,20 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_error_message(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
notification[1]['payload']['message'] = "Error"
event = 'compute.instance.create.end'
when_time = DUMMY_TIME
@ -547,8 +488,13 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_revert_end(self):
kwargs = {'launched': str(DUMMY_TIME), 'type_id': INSTANCE_TYPE_ID_1, 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'type_id': INSTANCE_TYPE_ID_1,
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.revert.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -557,12 +503,21 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_prep_end(self):
kwargs = {'launched': str(DUMMY_TIME), 'new_type_id': INSTANCE_TYPE_ID_2, 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'new_type_id': INSTANCE_TYPE_ID_2,
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.prep.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -570,6 +525,10 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_2)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
@ -649,7 +608,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
notif = utils.create_nova_notif(launched=str(launch_time),
audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1)
tenant_id=TENANT_ID_1,
os_architecture=OS_ARCH_1,
os_version=OS_VERSION_1,
os_distro=OS_DISTRO_1,
rax_options=RAX_OPTIONS_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
@ -668,7 +631,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
'instance_type_id': '1',
'usage': usage,
'raw': raw,
'tenant': TENANT_ID_1
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
@ -709,7 +676,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
deleted=str(deleted_time),
audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1)
tenant_id=TENANT_ID_1,
os_architecture=OS_ARCH_1,
os_version=OS_VERSION_1,
os_distro=OS_DISTRO_1,
rax_options=RAX_OPTIONS_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
@ -734,7 +705,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
'usage': usage,
'delete': delete,
'raw': raw,
'tenant': TENANT_ID_1
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)

View File

@ -129,9 +129,6 @@ class StacktachDBTestCase(unittest.TestCase):
self.assertEqual(returned, object)
self.mox.VerifyAll()
def test_create_rawdata(self):
self._test_db_create_func(models.RawData, db.create_rawdata)
def test_create_lifecycle(self):
self._test_db_create_func(models.Lifecycle, db.create_lifecycle)

View File

@ -1,4 +1,4 @@
# Copyright (c) 2012 - Rackspace Inc.
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
@ -21,6 +21,7 @@
import datetime
import decimal
import json
import time
import unittest
import uuid
@ -28,15 +29,21 @@ import kombu.common
import kombu.entity
import kombu.pools
import mox
import multiprocessing
from stacktach import datetime_to_decimal as dt
from stacktach import models
from utils import INSTANCE_ID_1
from utils import RAX_OPTIONS_1
from utils import RAX_OPTIONS_2
from utils import OS_DISTRO_1
from utils import OS_DISTRO_2
from utils import OS_ARCH_1
from utils import OS_ARCH_2
from utils import OS_VERSION_1
from utils import OS_VERSION_2
from utils import TENANT_ID_1
from utils import TENANT_ID_2
from utils import INSTANCE_TYPE_ID_1
from verifier import dbverifier
from verifier import AmbiguousResults
from verifier import FieldMismatch
@ -64,14 +71,58 @@ class VerifierTestCase(unittest.TestCase):
self.mox.StubOutWithMock(models, 'InstanceDeletes',
use_mock_anything=True)
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceReconcile',
use_mock_anything=True)
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceExists',
use_mock_anything=True)
models.InstanceExists.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
models.JsonReport.objects = self.mox.CreateMockAnything()
self._setup_verifier()
def _setup_verifier(self):
self.config = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": False,
}
self.pool = self.mox.CreateMockAnything()
self.reconciler = self.mox.CreateMockAnything()
self.verifier = dbverifier.Verifier(self.config,
pool=self.pool,
rec=self.reconciler)
self.config_notif = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": True,
"rabbit": {
"durable_queue": False,
"host": "10.0.0.1",
"port": 5672,
"userid": "rabbit",
"password": "rabbit",
"virtual_host": "/",
"exchange_name": "stacktach",
}
}
self.pool_notif = self.mox.CreateMockAnything()
self.reconciler_notif = self.mox.CreateMockAnything()
self.verifier_notif = dbverifier.Verifier(self.config_notif,
pool=self.pool_notif,
rec=self.reconciler)
def tearDown(self):
self.mox.UnsetStubs()
self.verifier = None
self.pool = None
self.verifier_notif = None
self.pool_notif = None
def test_verify_for_launch(self):
exist = self.mox.CreateMockAnything()
@ -159,6 +210,78 @@ class VerifierTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_verify_for_launch_rax_options_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.rax_options = RAX_OPTIONS_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.rax_options = RAX_OPTIONS_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'rax_options')
self.assertEqual(exception.expected, RAX_OPTIONS_1)
self.assertEqual(exception.actual, RAX_OPTIONS_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_distro_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_distro = OS_DISTRO_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_distro = OS_DISTRO_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_distro')
self.assertEqual(exception.expected, OS_DISTRO_1)
self.assertEqual(exception.actual, OS_DISTRO_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_architecture_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_architecture = OS_ARCH_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_architecture = OS_ARCH_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_architecture')
self.assertEqual(exception.expected, OS_ARCH_1)
self.assertEqual(exception.actual, OS_ARCH_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_version_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_version = OS_VERSION_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_version = OS_VERSION_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_version')
self.assertEqual(exception.expected, OS_VERSION_1)
self.assertEqual(exception.actual, OS_VERSION_2)
self.mox.VerifyAll()
def test_verify_for_launch_late_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None
@ -358,6 +481,157 @@ class VerifierTestCase(unittest.TestCase):
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
self.mox.VerifyAll()
def test_verify_with_reconciled_data(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
exists.launched_at = launched_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_min = decimal.Decimal('1')
launched_max = decimal.Decimal('1.999999')
filter = {
'instance': INSTANCE_ID_1,
'launched_at__gte': launched_min,
'launched_at__lte': launched_max
}
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
recs.count().AndReturn(1)
reconcile = self.mox.CreateMockAnything()
reconcile.deleted_at = None
recs[0].AndReturn(reconcile)
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
dbverifier._verify_for_launch(exists, launch=reconcile,
launch_type='InstanceReconcile')
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
dbverifier._verify_for_delete(exists, delete=None,
delete_type='InstanceReconcile')
self.mox.ReplayAll()
dbverifier._verify_with_reconciled_data(exists)
self.mox.VerifyAll()
def test_verify_with_reconciled_data_deleted(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_min = decimal.Decimal('1')
launched_max = decimal.Decimal('1.999999')
filter = {
'instance': INSTANCE_ID_1,
'launched_at__gte': launched_min,
'launched_at__lte': launched_max
}
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
recs.count().AndReturn(1)
reconcile = self.mox.CreateMockAnything()
reconcile.deleted_at = deleted_at
recs[0].AndReturn(reconcile)
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
dbverifier._verify_for_launch(exists, launch=reconcile,
launch_type='InstanceReconcile')
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
dbverifier._verify_for_delete(exists, delete=reconcile,
delete_type='InstanceReconcile')
self.mox.ReplayAll()
dbverifier._verify_with_reconciled_data(exists)
self.mox.VerifyAll()
def test_verify_with_reconciled_data_not_launched(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
exists.launched_at = None
self.mox.ReplayAll()
with self.assertRaises(VerificationException) as cm:
dbverifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.reason, 'Exists without a launched_at')
self.mox.VerifyAll()
def test_verify_with_reconciled_data_ambiguous_results(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_min = decimal.Decimal('1')
launched_max = decimal.Decimal('1.999999')
filter = {
'instance': INSTANCE_ID_1,
'launched_at__gte': launched_min,
'launched_at__lte': launched_max
}
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
recs.count().AndReturn(2)
self.mox.ReplayAll()
with self.assertRaises(AmbiguousResults) as cm:
dbverifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.object_type, 'InstanceReconcile')
self.mox.VerifyAll()
def test_verify_with_reconciled_data_instance_not_found(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(0)
self.mox.ReplayAll()
with self.assertRaises(NotFound) as cm:
dbverifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.object_type, 'InstanceReconcile')
self.mox.VerifyAll()
def test_verify_with_reconciled_data_reconcile_not_found(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_min = decimal.Decimal('1')
launched_max = decimal.Decimal('1.999999')
filter = {
'instance': INSTANCE_ID_1,
'launched_at__gte': launched_min,
'launched_at__lte': launched_max
}
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(**filter).AndReturn(recs)
recs.count().AndReturn(0)
self.mox.ReplayAll()
with self.assertRaises(NotFound) as cm:
dbverifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.object_type, 'InstanceReconcile')
self.mox.VerifyAll()
def test_verify_pass(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
@ -369,7 +643,8 @@ class VerifierTestCase(unittest.TestCase):
dbverifier._verify_for_delete(exist)
dbverifier._mark_exist_verified(exist)
self.mox.ReplayAll()
dbverifier._verify(exist)
result, exists = dbverifier._verify(exist)
self.assertTrue(result)
self.mox.VerifyAll()
def test_verify_no_launched_at(self):
@ -381,8 +656,29 @@ class VerifierTestCase(unittest.TestCase):
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
dbverifier._mark_exist_failed(exist,
reason="Exists without a launched_at")
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
dbverifier._verify_with_reconciled_data(exist)\
.AndRaise(NotFound('InstanceReconcile', {}))
self.mox.ReplayAll()
dbverifier._verify(exist)
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_fails_reconciled_verify_uses_second_exception(self):
exist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
ex1 = VerificationException('test1')
dbverifier._verify_for_launch(exist).AndRaise(ex1)
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
self.mox.StubOutWithMock(dbverifier, '_mark_exist_failed')
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
dbverifier._verify_with_reconciled_data(exist)\
.AndRaise(VerificationException('test2'))
dbverifier._mark_exist_failed(exist, reason='test2')
self.mox.ReplayAll()
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_launch_fail(self):
@ -394,9 +690,48 @@ class VerifierTestCase(unittest.TestCase):
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
verify_exception = VerificationException('test')
dbverifier._verify_for_launch(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
dbverifier._verify_with_reconciled_data(exist)\
.AndRaise(NotFound('InstanceReconcile', {}))
dbverifier._mark_exist_failed(exist, reason='test')
self.mox.ReplayAll()
dbverifier._verify(exist)
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_fail_reconcile_success(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
self.mox.StubOutWithMock(dbverifier, '_mark_exist_failed')
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
verify_exception = VerificationException('test')
dbverifier._verify_for_launch(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
dbverifier._verify_with_reconciled_data(exist)
dbverifier._mark_exist_verified(exist, reconciled=True)
self.mox.ReplayAll()
result, exists = dbverifier._verify(exist)
self.assertTrue(result)
self.mox.VerifyAll()
def test_verify_fail_with_reconciled_data_exception(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(dbverifier, '_verify_for_launch')
self.mox.StubOutWithMock(dbverifier, '_verify_for_delete')
self.mox.StubOutWithMock(dbverifier, '_mark_exist_failed')
self.mox.StubOutWithMock(dbverifier, '_mark_exist_verified')
verify_exception = VerificationException('test')
dbverifier._verify_for_launch(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
dbverifier._verify_with_reconciled_data(exist)\
.AndRaise(Exception())
dbverifier._mark_exist_failed(exist, reason='Exception')
self.mox.ReplayAll()
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_delete_fail(self):
@ -409,9 +744,13 @@ class VerifierTestCase(unittest.TestCase):
verify_exception = VerificationException('test')
dbverifier._verify_for_launch(exist)
dbverifier._verify_for_delete(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(dbverifier, '_verify_with_reconciled_data')
dbverifier._verify_with_reconciled_data(exist)\
.AndRaise(NotFound('InstanceReconcile', {}))
dbverifier._mark_exist_failed(exist, reason='test')
self.mox.ReplayAll()
dbverifier._verify(exist)
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_exception_during_launch(self):
@ -424,7 +763,8 @@ class VerifierTestCase(unittest.TestCase):
dbverifier._verify_for_launch(exist).AndRaise(Exception())
dbverifier._mark_exist_failed(exist, reason='Exception')
self.mox.ReplayAll()
dbverifier._verify(exist)
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_exception_during_delete(self):
@ -438,11 +778,11 @@ class VerifierTestCase(unittest.TestCase):
dbverifier._verify_for_delete(exist).AndRaise(Exception())
dbverifier._mark_exist_failed(exist, reason='Exception')
self.mox.ReplayAll()
dbverifier._verify(exist)
result, exists = dbverifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_for_range_without_callback(self):
pool = self.mox.CreateMockAnything()
when_max = datetime.datetime.utcnow()
results = self.mox.CreateMockAnything()
models.InstanceExists.objects.select_related().AndReturn(results)
@ -461,17 +801,113 @@ class VerifierTestCase(unittest.TestCase):
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist1.save()
exist2.save()
pool.apply_async(dbverifier._verify, args=(exist1,), callback=None)
pool.apply_async(dbverifier._verify, args=(exist2,), callback=None)
self.pool.apply_async(dbverifier._verify, args=(exist1,),
callback=None)
self.pool.apply_async(dbverifier._verify, args=(exist2,),
callback=None)
self.mox.ReplayAll()
dbverifier.verify_for_range(pool, when_max)
self.verifier.verify_for_range(when_max)
self.assertEqual(exist1.status, 'verifying')
self.assertEqual(exist2.status, 'verifying')
self.mox.VerifyAll()
def test_clean_results_full(self):
self.verifier.reconcile = True
result_not_ready = self.mox.CreateMockAnything()
result_not_ready.ready().AndReturn(False)
result_unsuccessful = self.mox.CreateMockAnything()
result_unsuccessful.ready().AndReturn(True)
result_unsuccessful.successful().AndReturn(False)
result_successful = self.mox.CreateMockAnything()
result_successful.ready().AndReturn(True)
result_successful.successful().AndReturn(True)
result_successful.get().AndReturn((True, None))
result_failed_verification = self.mox.CreateMockAnything()
result_failed_verification.ready().AndReturn(True)
result_failed_verification.successful().AndReturn(True)
failed_exists = self.mox.CreateMockAnything()
result_failed_verification.get().AndReturn((False, failed_exists))
self.verifier.results = [result_not_ready,
result_unsuccessful,
result_successful,
result_failed_verification]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier.clean_results()
self.assertEqual(result_count, 1)
self.assertEqual(success_count, 2)
self.assertEqual(errored, 1)
self.assertEqual(len(self.verifier.results), 1)
self.assertEqual(self.verifier.results[0], result_not_ready)
self.assertEqual(len(self.verifier.failed), 1)
self.assertEqual(self.verifier.failed[0], result_failed_verification)
self.mox.VerifyAll()
def test_clean_results_pending(self):
self.verifier.reconcile = True
result_not_ready = self.mox.CreateMockAnything()
result_not_ready.ready().AndReturn(False)
self.verifier.results = [result_not_ready]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier.clean_results()
self.assertEqual(result_count, 1)
self.assertEqual(success_count, 0)
self.assertEqual(errored, 0)
self.assertEqual(len(self.verifier.results), 1)
self.assertEqual(self.verifier.results[0], result_not_ready)
self.assertEqual(len(self.verifier.failed), 0)
self.mox.VerifyAll()
def test_clean_results_successful(self):
self.verifier.reconcile = True
result_successful = self.mox.CreateMockAnything()
result_successful.ready().AndReturn(True)
result_successful.successful().AndReturn(True)
result_successful.get().AndReturn((True, None))
self.verifier.results = [result_successful]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier.clean_results()
self.assertEqual(result_count, 0)
self.assertEqual(success_count, 1)
self.assertEqual(errored, 0)
self.assertEqual(len(self.verifier.results), 0)
self.assertEqual(len(self.verifier.failed), 0)
self.mox.VerifyAll()
def test_clean_results_unsuccessful(self):
self.verifier.reconcile = True
result_unsuccessful = self.mox.CreateMockAnything()
result_unsuccessful.ready().AndReturn(True)
result_unsuccessful.successful().AndReturn(False)
self.verifier.results = [result_unsuccessful]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier.clean_results()
self.assertEqual(result_count, 0)
self.assertEqual(success_count, 0)
self.assertEqual(errored, 1)
self.assertEqual(len(self.verifier.results), 0)
self.assertEqual(len(self.verifier.failed), 0)
self.mox.VerifyAll()
def test_clean_results_fail_verification(self):
self.verifier.reconcile = True
result_failed_verification = self.mox.CreateMockAnything()
result_failed_verification.ready().AndReturn(True)
result_failed_verification.successful().AndReturn(True)
failed_exists = self.mox.CreateMockAnything()
result_failed_verification.get().AndReturn((False, failed_exists))
self.verifier.results = [result_failed_verification]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier.clean_results()
self.assertEqual(result_count, 0)
self.assertEqual(success_count, 1)
self.assertEqual(errored, 0)
self.assertEqual(len(self.verifier.results), 0)
self.assertEqual(len(self.verifier.failed), 1)
self.assertEqual(self.verifier.failed[0], failed_exists)
self.mox.VerifyAll()
def test_verify_for_range_with_callback(self):
callback = self.mox.CreateMockAnything()
pool = self.mox.CreateMockAnything()
when_max = datetime.datetime.utcnow()
results = self.mox.CreateMockAnything()
models.InstanceExists.objects.select_related().AndReturn(results)
@ -490,14 +926,28 @@ class VerifierTestCase(unittest.TestCase):
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist1.save()
exist2.save()
pool.apply_async(dbverifier._verify, args=(exist1,), callback=callback)
pool.apply_async(dbverifier._verify, args=(exist2,), callback=callback)
self.pool.apply_async(dbverifier._verify, args=(exist1,),
callback=callback)
self.pool.apply_async(dbverifier._verify, args=(exist2,),
callback=callback)
self.mox.ReplayAll()
dbverifier.verify_for_range(pool, when_max, callback=callback)
self.verifier.verify_for_range(when_max, callback=callback)
self.assertEqual(exist1.status, 'verifying')
self.assertEqual(exist2.status, 'verifying')
self.mox.VerifyAll()
def test_reconcile_failed(self):
self.verifier.reconcile = True
exists1 = self.mox.CreateMockAnything()
exists2 = self.mox.CreateMockAnything()
self.verifier.failed = [exists1, exists2]
self.reconciler.failed_validation(exists1)
self.reconciler.failed_validation(exists2)
self.mox.ReplayAll()
self.verifier.reconcile_failed()
self.assertEqual(len(self.verifier.failed), 0)
self.mox.VerifyAll()
def test_send_verified_notification_default_routing_key(self):
connection = self.mox.CreateMockAnything()
exchange = self.mox.CreateMockAnything()
@ -570,140 +1020,183 @@ class VerifierTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_run_notifications(self):
config = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": True,
"rabbit": {
"durable_queue": False,
"host": "10.0.0.1",
"port": 5672,
"userid": "rabbit",
"password": "rabbit",
"virtual_host": "/",
"exchange_name": "stacktach"
}
}
self.mox.StubOutWithMock(multiprocessing, 'Pool')
pool = self.mox.CreateMockAnything()
multiprocessing.Pool(2).AndReturn(pool)
self.mox.StubOutWithMock(dbverifier, '_create_exchange')
exchange = self.mox.CreateMockAnything()
dbverifier._create_exchange('stacktach', 'topic', durable=False)\
.AndReturn(exchange)
self.mox.StubOutWithMock(dbverifier, '_create_connection')
conn = self.mox.CreateMockAnything()
dbverifier._create_connection(config).AndReturn(conn)
dbverifier._create_connection(self.config_notif).AndReturn(conn)
conn.__enter__().AndReturn(conn)
self.mox.StubOutWithMock(dbverifier, '_run')
dbverifier._run(config, pool, callback=mox.IgnoreArg())
self.mox.StubOutWithMock(self.verifier_notif, '_run')
self.verifier_notif._run(callback=mox.Not(mox.Is(None)))
conn.__exit__(None, None, None)
self.mox.ReplayAll()
dbverifier.run(config)
self.verifier_notif.run()
self.mox.VerifyAll()
def test_run_notifications_with_routing_keys(self):
config = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": True,
"rabbit": {
"durable_queue": False,
"host": "10.0.0.1",
"port": 5672,
"userid": "rabbit",
"password": "rabbit",
"virtual_host": "/",
"exchange_name": "stacktach",
}
}
self.mox.StubOutWithMock(multiprocessing, 'Pool')
pool = self.mox.CreateMockAnything()
multiprocessing.Pool(2).AndReturn(pool)
self.mox.StubOutWithMock(dbverifier, '_create_exchange')
exchange = self.mox.CreateMockAnything()
dbverifier._create_exchange('stacktach', 'topic', durable=False) \
.AndReturn(exchange)
self.mox.StubOutWithMock(dbverifier, '_create_connection')
conn = self.mox.CreateMockAnything()
dbverifier._create_connection(config).AndReturn(conn)
dbverifier._create_connection(self.config_notif).AndReturn(conn)
conn.__enter__().AndReturn(conn)
self.mox.StubOutWithMock(dbverifier, '_run')
dbverifier._run(config, pool, callback=mox.IgnoreArg())
self.mox.StubOutWithMock(self.verifier_notif, '_run')
self.verifier_notif._run(callback=mox.Not(mox.Is(None)))
conn.__exit__(None, None, None)
self.mox.ReplayAll()
dbverifier.run(config)
self.verifier_notif.run()
self.mox.VerifyAll()
def test_run_no_notifications(self):
config = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": False,
}
self.mox.StubOutWithMock(multiprocessing, 'Pool')
pool = self.mox.CreateMockAnything()
multiprocessing.Pool(2).AndReturn(pool)
self.mox.StubOutWithMock(dbverifier, '_run')
dbverifier._run(config, pool)
self.mox.StubOutWithMock(self.verifier, '_run')
self.verifier._run()
self.mox.ReplayAll()
dbverifier.run(config)
self.verifier.run()
self.mox.VerifyAll()
def test_run_once_notifications(self):
config = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": True,
"rabbit": {
"durable_queue": False,
"host": "10.0.0.1",
"port": 5672,
"userid": "rabbit",
"password": "rabbit",
"virtual_host": "/",
"exchange_name": "stacktach"
}
}
self.mox.StubOutWithMock(multiprocessing, 'Pool')
pool = self.mox.CreateMockAnything()
multiprocessing.Pool(2).AndReturn(pool)
self.mox.StubOutWithMock(dbverifier, '_create_exchange')
exchange = self.mox.CreateMockAnything()
dbverifier._create_exchange('stacktach', 'topic', durable=False) \
.AndReturn(exchange)
self.mox.StubOutWithMock(dbverifier, '_create_connection')
conn = self.mox.CreateMockAnything()
dbverifier._create_connection(config).AndReturn(conn)
dbverifier._create_connection(self.config_notif).AndReturn(conn)
conn.__enter__().AndReturn(conn)
self.mox.StubOutWithMock(dbverifier, '_run_once')
dbverifier._run_once(config, pool, callback=mox.IgnoreArg())
self.mox.StubOutWithMock(self.verifier_notif, '_run_once')
self.verifier_notif._run_once(callback=mox.Not(mox.Is(None)))
conn.__exit__(None, None, None)
self.mox.ReplayAll()
dbverifier.run_once(config)
self.verifier_notif.run_once()
self.mox.VerifyAll()
def test_run_once_no_notifications(self):
config = {
"tick_time": 30,
"settle_time": 5,
"settle_units": "minutes",
"pool_size": 2,
"enable_notifications": False,
}
self.mox.StubOutWithMock(multiprocessing, 'Pool')
pool = self.mox.CreateMockAnything()
multiprocessing.Pool(2).AndReturn(pool)
self.mox.StubOutWithMock(dbverifier, '_run_once')
dbverifier._run_once(config, pool)
self.mox.StubOutWithMock(self.verifier, '_run_once')
self.verifier._run_once()
self.mox.ReplayAll()
dbverifier.run_once(config)
self.verifier.run_once()
self.mox.VerifyAll()
def test_run_full_no_notifications(self):
self.verifier.reconcile = True
self.mox.StubOutWithMock(self.verifier, '_keep_running')
self.verifier._keep_running().AndReturn(True)
start = datetime.datetime.utcnow()
self.mox.StubOutWithMock(self.verifier, '_utcnow')
self.verifier._utcnow().AndReturn(start)
settle_time = self.config['settle_time']
settle_units = self.config['settle_units']
settle_offset = {settle_units: settle_time}
ending_max = start - datetime.timedelta(**settle_offset)
self.mox.StubOutWithMock(self.verifier, 'verify_for_range')
self.verifier.verify_for_range(ending_max, callback=None)
self.mox.StubOutWithMock(self.verifier, 'reconcile_failed')
result1 = self.mox.CreateMockAnything()
result2 = self.mox.CreateMockAnything()
self.verifier.results = [result1, result2]
result1.ready().AndReturn(True)
result1.successful().AndReturn(True)
result1.get().AndReturn((True, None))
result2.ready().AndReturn(True)
result2.successful().AndReturn(True)
result2.get().AndReturn((True, None))
self.verifier.reconcile_failed()
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
time.sleep(self.config['tick_time'])
self.verifier._keep_running().AndReturn(False)
self.mox.ReplayAll()
self.verifier.run()
self.mox.VerifyAll()
def test_run_full(self):
self.verifier_notif.reconcile = True
self.mox.StubOutWithMock(self.verifier_notif, '_keep_running')
self.verifier_notif._keep_running().AndReturn(True)
start = datetime.datetime.utcnow()
self.mox.StubOutWithMock(self.verifier_notif, '_utcnow')
self.verifier_notif._utcnow().AndReturn(start)
settle_time = self.config['settle_time']
settle_units = self.config['settle_units']
settle_offset = {settle_units: settle_time}
ending_max = start - datetime.timedelta(**settle_offset)
self.mox.StubOutWithMock(self.verifier_notif, 'verify_for_range')
self.verifier_notif.verify_for_range(ending_max,
callback=mox.Not(mox.Is(None)))
self.mox.StubOutWithMock(self.verifier_notif, 'reconcile_failed')
result1 = self.mox.CreateMockAnything()
result2 = self.mox.CreateMockAnything()
self.verifier_notif.results = [result1, result2]
result1.ready().AndReturn(True)
result1.successful().AndReturn(True)
result1.get().AndReturn((True, None))
result2.ready().AndReturn(True)
result2.successful().AndReturn(True)
result2.get().AndReturn((True, None))
self.verifier_notif.reconcile_failed()
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
time.sleep(self.config['tick_time'])
self.verifier_notif._keep_running().AndReturn(False)
self.mox.ReplayAll()
self.verifier_notif.run()
self.mox.VerifyAll()
def test_run_once_full_no_notifications(self):
self.verifier.reconcile = True
start = datetime.datetime.utcnow()
self.mox.StubOutWithMock(self.verifier, '_utcnow')
self.verifier._utcnow().AndReturn(start)
settle_time = self.config['settle_time']
settle_units = self.config['settle_units']
settle_offset = {settle_units: settle_time}
ending_max = start - datetime.timedelta(**settle_offset)
self.mox.StubOutWithMock(self.verifier, 'verify_for_range')
self.verifier.verify_for_range(ending_max, callback=None)
result1 = self.mox.CreateMockAnything()
result2 = self.mox.CreateMockAnything()
self.verifier.results = [result1, result2]
result1.ready().AndReturn(True)
result1.successful().AndReturn(True)
result1.get().AndReturn((True, None))
result2.ready().AndReturn(True)
result2.successful().AndReturn(True)
result2.get().AndReturn((True, None))
self.mox.StubOutWithMock(self.verifier, 'reconcile_failed')
self.verifier.reconcile_failed()
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
time.sleep(self.config['tick_time'])
self.mox.ReplayAll()
self.verifier.run_once()
self.mox.VerifyAll()
def test_run_once_full(self):
self.verifier_notif.reconcile = True
start = datetime.datetime.utcnow()
self.mox.StubOutWithMock(self.verifier_notif, '_utcnow')
self.verifier_notif._utcnow().AndReturn(start)
settle_time = self.config['settle_time']
settle_units = self.config['settle_units']
settle_offset = {settle_units: settle_time}
ending_max = start - datetime.timedelta(**settle_offset)
self.mox.StubOutWithMock(self.verifier_notif, 'verify_for_range')
self.verifier_notif.verify_for_range(ending_max,
callback=mox.Not(mox.Is(None)))
result1 = self.mox.CreateMockAnything()
result2 = self.mox.CreateMockAnything()
self.verifier_notif.results = [result1, result2]
result1.ready().AndReturn(True)
result1.successful().AndReturn(True)
result1.get().AndReturn((True, None))
result2.ready().AndReturn(True)
result2.successful().AndReturn(True)
result2.get().AndReturn((True, None))
self.mox.StubOutWithMock(self.verifier_notif, 'reconcile_failed')
self.verifier_notif.reconcile_failed()
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
time.sleep(self.config['tick_time'])
self.mox.ReplayAll()
self.verifier_notif.run_once()
self.mox.VerifyAll()

View File

@ -40,6 +40,18 @@ REQUEST_ID_1 = 'req-611a4d70-9e47-4b27-a95e-27996cc40c06'
REQUEST_ID_2 = 'req-a951dec0-52ee-425d-9f56-d68bd1ad00ac'
REQUEST_ID_3 = 'req-039a33f7-5849-4406-8166-4db8cd085f52'
RAX_OPTIONS_1 = '1'
RAX_OPTIONS_2 = '2'
OS_DISTRO_1 = "linux"
OS_DISTRO_2 = "selinux"
OS_ARCH_1 = "x86"
OS_ARCH_2 = "x64"
OS_VERSION_1 = "1"
OS_VERSION_2 = "2"
def decimal_utc(t = datetime.datetime.utcnow()):
return dt.dt_to_decimal(t)
@ -48,29 +60,29 @@ def decimal_utc(t = datetime.datetime.utcnow()):
def create_nova_notif(request_id=None, instance=INSTANCE_ID_1, type_id='1',
launched=None, deleted=None, new_type_id=None,
message_id=MESSAGE_ID_1, audit_period_beginning=None,
audit_period_ending=None, tenant_id = None):
audit_period_ending=None, tenant_id=None,
rax_options=None, os_architecture=None,
os_version=None, os_distro=None):
notif = ['', {
'message_id': message_id,
'payload': {
'image_meta': {},
'instance_id': instance,
'instance_type_id': type_id,
}
}
}]
if request_id:
notif[1]['_context_request_id'] = request_id
if launched:
notif[1]['payload']['launched_at'] = launched
if deleted:
notif[1]['payload']['deleted_at'] = deleted
if new_type_id:
notif[1]['payload']['new_instance_type_id'] = new_type_id
if audit_period_beginning:
notif[1]['payload']['audit_period_beginning'] = audit_period_beginning
if audit_period_ending:
notif[1]['payload']['audit_period_ending'] = audit_period_ending
if tenant_id:
notif[1]['payload']['tenant_id'] = tenant_id
notif[1]['_context_request_id'] = request_id
notif[1]['payload']['launched_at'] = launched
notif[1]['payload']['deleted_at'] = deleted
notif[1]['payload']['new_instance_type_id'] = new_type_id
notif[1]['payload']['audit_period_beginning'] = audit_period_beginning
notif[1]['payload']['audit_period_ending'] = audit_period_ending
notif[1]['payload']['tenant_id'] = tenant_id
notif[1]['payload']['image_meta']['com.rackspace__1__options'] = rax_options
notif[1]['payload']['image_meta']['org.openstack__1__architecture'] = os_architecture
notif[1]['payload']['image_meta']['org.openstack__1__os_distro'] = os_distro
notif[1]['payload']['image_meta']['org.openstack__1__os_version'] = os_version
return notif

View File

@ -23,7 +23,7 @@ import datetime
import json
import os
import sys
from time import sleep
import time
import uuid
from django.db import transaction
@ -44,6 +44,7 @@ LOG = stacklog.get_logger()
from stacktach import models
from stacktach import datetime_to_decimal as dt
from stacktach import reconciler
from verifier import AmbiguousResults
from verifier import FieldMismatch
from verifier import NotFound
@ -69,6 +70,15 @@ def _find_launch(instance, launched):
return models.InstanceUsage.objects.filter(**params)
def _find_reconcile(instance, launched):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
return models.InstanceReconcile.objects.filter(**params)
def _find_delete(instance, launched, deleted_max=None):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
@ -80,8 +90,16 @@ def _find_delete(instance, launched, deleted_max=None):
return models.InstanceDeletes.objects.filter(**params)
def _mark_exist_verified(exist):
exist.status = models.InstanceExists.VERIFIED
def _mark_exist_verified(exist,
reconciled=False,
reason=None):
if not reconciled:
exist.status = models.InstanceExists.VERIFIED
else:
exist.status = models.InstanceExists.RECONCILED
if reason is not None:
exist.fail_reason = reason
exist.save()
@ -135,11 +153,28 @@ def _verify_field_mismatch(exists, launch):
raise FieldMismatch('tenant', exists.tenant,
launch.tenant)
if launch.rax_options != exists.rax_options:
raise FieldMismatch('rax_options', exists.rax_options,
launch.rax_options)
def _verify_for_launch(exist):
if exist.usage:
if launch.os_architecture != exists.os_architecture:
raise FieldMismatch('os_architecture', exists.os_architecture,
launch.os_architecture)
if launch.os_version != exists.os_version:
raise FieldMismatch('os_version', exists.os_version,
launch.os_version)
if launch.os_distro != exists.os_distro:
raise FieldMismatch('os_distro', exists.os_distro,
launch.os_distro)
def _verify_for_launch(exist, launch=None, launch_type="InstanceUsage"):
if not launch and exist.usage:
launch = exist.usage
else:
elif not launch:
if models.InstanceUsage.objects\
.filter(instance=exist.instance).count() > 0:
launches = _find_launch(exist.instance,
@ -150,23 +185,22 @@ def _verify_for_launch(exist):
'launched_at': exist.launched_at
}
if count > 1:
raise AmbiguousResults('InstanceUsage', query)
raise AmbiguousResults(launch_type, query)
elif count == 0:
raise NotFound('InstanceUsage', query)
raise NotFound(launch_type, query)
launch = launches[0]
else:
raise NotFound('InstanceUsage', {'instance': exist.instance})
raise NotFound(launch_type, {'instance': exist.instance})
_verify_field_mismatch(exist, launch)
def _verify_for_delete(exist):
def _verify_for_delete(exist, delete=None, delete_type="InstanceDelete"):
delete = None
if exist.delete:
if not delete and exist.delete:
# We know we have a delete and we have it's id
delete = exist.delete
else:
elif not delete:
if exist.deleted_at:
# We received this exists before the delete, go find it
deletes = _find_delete(exist.instance,
@ -178,7 +212,7 @@ def _verify_for_delete(exist):
'instance': exist.instance,
'launched_at': exist.launched_at
}
raise NotFound('InstanceDelete', query)
raise NotFound(delete_type, query)
else:
# We don't know if this is supposed to have a delete or not.
# Thus, we need to check if we have a delete for this instance.
@ -190,7 +224,7 @@ def _verify_for_delete(exist):
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
deletes = _find_delete(exist.instance, launched_at, deleted_at_max)
if deletes.count() > 0:
reason = 'Found InstanceDeletes for non-delete exist'
reason = 'Found %ss for non-delete exist' % delete_type
raise VerificationException(reason)
if delete:
@ -205,6 +239,54 @@ def _verify_for_delete(exist):
delete.deleted_at)
def _verify_with_reconciled_data(exist):
if not exist.launched_at:
raise VerificationException("Exists without a launched_at")
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
if query.count() > 0:
recs = _find_reconcile(exist.instance,
dt.dt_from_decimal(exist.launched_at))
search_query = {'instance': exist.instance,
'launched_at': exist.launched_at}
count = recs.count()
if count > 1:
raise AmbiguousResults('InstanceReconcile', search_query)
elif count == 0:
raise NotFound('InstanceReconcile', search_query)
reconcile = recs[0]
else:
raise NotFound('InstanceReconcile', {'instance': exist.instance})
_verify_for_launch(exist, launch=reconcile,
launch_type="InstanceReconcile")
delete = None
if reconcile.deleted_at is not None:
delete = reconcile
_verify_for_delete(exist, delete=delete,
delete_type="InstanceReconcile")
def _attempt_reconciled_verify(exist, orig_e):
verified = False
try:
# Attempt to verify against reconciled data
_verify_with_reconciled_data(exist)
verified = True
_mark_exist_verified(exist, reconciled=True)
except NotFound, rec_e:
# No reconciled data, just mark it failed
_mark_exist_failed(exist, reason=str(orig_e))
except VerificationException, rec_e:
# Verification failed against reconciled data, mark it failed
# using the second failure.
_mark_exist_failed(exist, reason=str(rec_e))
except Exception, rec_e:
_mark_exist_failed(exist, reason=rec_e.__class__.__name__)
LOG.exception(rec_e)
return verified
def _verify(exist):
verified = False
try:
@ -216,8 +298,9 @@ def _verify(exist):
verified = True
_mark_exist_verified(exist)
except VerificationException, e:
_mark_exist_failed(exist, reason=str(e))
except VerificationException, orig_e:
# Something is wrong with the InstanceUsage record
verified = _attempt_reconciled_verify(exist, orig_e)
except Exception, e:
_mark_exist_failed(exist, reason=e.__class__.__name__)
LOG.exception(e)
@ -225,54 +308,6 @@ def _verify(exist):
return verified, exist
results = []
def verify_for_range(pool, ending_max, callback=None):
exists = _list_exists(ending_max=ending_max,
status=models.InstanceExists.PENDING)
count = exists.count()
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("Adding %s exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.status = models.InstanceExists.VERIFYING
exist.save()
result = pool.apply_async(_verify, args=(exist,),
callback=callback)
results.append(result)
added += 1
if datetime.datetime.utcnow() > next_update:
values = ((added,) + clean_results())
msg = "N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count
def clean_results():
global results
pending = []
finished = 0
successful = 0
for result in results:
if result.ready():
finished += 1
if result.successful():
successful += 1
else:
pending.append(result)
results = pending
errored = finished - successful
return len(results), successful, errored
def _send_notification(message, routing_key, connection, exchange):
with kombu.pools.producers[connection].acquire(block=True) as producer:
kombu.common.maybe_declare(exchange, producer.channel)
@ -309,81 +344,154 @@ def _create_connection(config):
return kombu.connection.BrokerConnection(**conn_params)
def _run(config, pool, callback=None):
tick_time = config['tick_time']
settle_units = config['settle_units']
settle_time = config['settle_time']
while True:
with transaction.commit_on_success():
now = datetime.datetime.utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = verify_for_range(pool, ending_max, callback=callback)
class Verifier(object):
msg = "N: %s, P: %s, S: %s, E: %s" % ((new,) + clean_results())
LOG.info(msg)
sleep(tick_time)
def __init__(self, config, pool=None, rec=None):
self.config = config
self.pool = pool or multiprocessing.Pool(self.config['pool_size'])
self.reconcile = self.config.get('reconcile', False)
self.reconciler = self._load_reconciler(config, rec=rec)
self.results = []
self.failed = []
def _load_reconciler(self, config, rec=None):
if rec:
return rec
def run(config):
pool = multiprocessing.Pool(config['pool_size'])
if self.reconcile:
config_loc = config.get('reconciler_config',
'/etc/stacktach/reconciler_config.json')
with open(config_loc, 'r') as rec_config_file:
rec_config = json.load(rec_config_file)
return reconciler.Reconciler(rec_config)
if config['enable_notifications']:
exchange = _create_exchange(config['rabbit']['exchange_name'],
'topic',
durable=config['rabbit']['durable_queue'])
routing_keys = None
if config['rabbit'].get('routing_keys') is not None:
routing_keys = config['rabbit']['routing_keys']
def clean_results(self):
pending = []
finished = 0
successful = 0
with _create_connection(config) as conn:
def callback(result):
(verified, exist) = result
if verified:
send_verified_notification(exist, conn, exchange,
routing_keys=routing_keys)
for result in self.results:
if result.ready():
finished += 1
if result.successful():
(verified, exists) = result.get()
if self.reconcile and not verified:
self.failed.append(exists)
successful += 1
else:
pending.append(result)
_run(config, pool, callback=callback)
else:
_run(config, pool)
self.results = pending
errored = finished - successful
return len(self.results), successful, errored
def verify_for_range(self, ending_max, callback=None):
exists = _list_exists(ending_max=ending_max,
status=models.InstanceExists.PENDING)
count = exists.count()
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("Adding %s exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.status = models.InstanceExists.VERIFYING
exist.save()
result = self.pool.apply_async(_verify, args=(exist,),
callback=callback)
self.results.append(result)
added += 1
if datetime.datetime.utcnow() > next_update:
values = ((added,) + self.clean_results())
msg = "N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count
def _run_once(config, pool, callback=None):
tick_time = config['tick_time']
settle_units = config['settle_units']
settle_time = config['settle_time']
now = datetime.datetime.utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = verify_for_range(pool, ending_max, callback=callback)
def reconcile_failed(self):
for failed_exist in self.failed:
self.reconciler.failed_validation(failed_exist)
self.failed = []
LOG.info("Verifying %s exist events" % new)
while len(results) > 0:
LOG.info("P: %s, F: %s, E: %s" % clean_results())
sleep(tick_time)
def _keep_running(self):
return True
def _utcnow(self):
return datetime.datetime.utcnow()
def run_once(config):
pool = multiprocessing.Pool(config['pool_size'])
def _run(self, callback=None):
tick_time = self.config['tick_time']
settle_units = self.config['settle_units']
settle_time = self.config['settle_time']
while self._keep_running():
with transaction.commit_on_success():
now = self._utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = self.verify_for_range(ending_max,
callback=callback)
values = ((new,) + self.clean_results())
if self.reconcile:
self.reconcile_failed()
msg = "N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
time.sleep(tick_time)
if config['enable_notifications']:
exchange = _create_exchange(config['rabbit']['exchange_name'],
'topic',
durable=config['rabbit']['durable_queue'])
routing_keys = None
if config['rabbit'].get('routing_keys') is not None:
routing_keys = config['rabbit']['routing_keys']
def run(self):
if self.config['enable_notifications']:
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
'topic',
durable=self.config['rabbit']['durable_queue'])
routing_keys = None
if self.config['rabbit'].get('routing_keys') is not None:
routing_keys = self.config['rabbit']['routing_keys']
with _create_connection(config) as conn:
def callback(result):
(verified, exist) = result
if verified:
send_verified_notification(exist, conn, exchange,
routing_keys=routing_keys)
with _create_connection(self.config) as conn:
def callback(result):
(verified, exist) = result
if verified:
send_verified_notification(exist, conn, exchange,
routing_keys=routing_keys)
_run_once(config, pool, callback=callback)
else:
_run_once(config, pool)
self._run(callback=callback)
else:
self._run()
def _run_once(self, callback=None):
tick_time = self.config['tick_time']
settle_units = self.config['settle_units']
settle_time = self.config['settle_time']
now = self._utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = self.verify_for_range(ending_max, callback=callback)
LOG.info("Verifying %s exist events" % new)
while len(self.results) > 0:
LOG.info("P: %s, F: %s, E: %s" % self.clean_results())
if self.reconcile:
self.reconcile_failed()
time.sleep(tick_time)
def run_once(self):
if self.config['enable_notifications']:
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
'topic',
durable=self.config['rabbit']['durable_queue'])
routing_keys = None
if self.config['rabbit'].get('routing_keys') is not None:
routing_keys = self.config['rabbit']['routing_keys']
with _create_connection(self.config) as conn:
def callback(result):
(verified, exist) = result
if verified:
send_verified_notification(exist, conn, exchange,
routing_keys=routing_keys)
self._run_once(callback=callback)
else:
self._run_once()
if __name__ == '__main__':
@ -413,7 +521,8 @@ if __name__ == '__main__':
config = {'tick_time': args.tick_time, 'settle_time': args.settle_time,
'settle_units': args.settle_units, 'pool_size': args.pool_size}
verifier = Verifier(config)
if args.run_once:
run_once(config)
verifier.run_once()
else:
run(config)
verifier.run()

View File

@ -59,7 +59,13 @@ if __name__ == '__main__':
with open(config_filename, "r") as f:
config = json.load(f)
process = Process(target=dbverifier.run, args=(config, ))
def make_and_start_verifier(config):
# Gotta create it and run it this way so things don't get
# lost when the process is forked.
verifier = dbverifier.Verifier(config)
verifier.run()
process = Process(target=make_and_start_verifier, args=(config,))
process.start()
signal.signal(signal.SIGINT, kill_time)
signal.signal(signal.SIGTERM, kill_time)

View File

@ -55,7 +55,8 @@ class NovaConsumer(kombu.mixins.ConsumerMixin):
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
return kombu.entity.Exchange(name, type=type, exclusive=exclusive,
durable=self.durable, auto_delete=auto_delete)
durable=self.durable,
auto_delete=auto_delete)
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
auto_delete=False):