From c1e8e305e1ae67766b12e8e87b5ad26230f14ee7 Mon Sep 17 00:00:00 2001 From: root Date: Mon, 11 Feb 2013 15:10:34 -0600 Subject: [PATCH 1/4] requests working --- migrations/001_base.sql | 61 ++++++++++ migrations/002_task.sql | 62 ++++++++++ migrations/002_task_indexes.sql | 31 +++++ migrations/003_populate_task_and_image.py | 63 ++++++++++ reports/requests.py | 140 ++++++++++++++++++++++ stacktach/datetime_to_decimal.py | 2 + stacktach/models.py | 3 + stacktach/views.py | 15 ++- worker/worker.py | 30 +++-- 9 files changed, 391 insertions(+), 16 deletions(-) create mode 100644 migrations/001_base.sql create mode 100644 migrations/002_task.sql create mode 100644 migrations/002_task_indexes.sql create mode 100644 migrations/003_populate_task_and_image.py create mode 100644 reports/requests.py diff --git a/migrations/001_base.sql b/migrations/001_base.sql new file mode 100644 index 0000000..5f81c41 --- /dev/null +++ b/migrations/001_base.sql @@ -0,0 +1,61 @@ +BEGIN; +CREATE TABLE `stacktach_deployment` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL +) +; +CREATE TABLE `stacktach_rawdata` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `deployment_id` integer NOT NULL, + `tenant` varchar(50), + `json` longtext NOT NULL, + `routing_key` varchar(50), + `state` varchar(20), + `old_state` varchar(20), + `old_task` varchar(30), + `when` numeric(20, 6) NOT NULL, + `publisher` varchar(100), + `event` varchar(50), + `service` varchar(50), + `host` varchar(100), + `instance` varchar(50), + `request_id` varchar(50) +) +; +ALTER TABLE `stacktach_rawdata` ADD CONSTRAINT `deployment_id_refs_id_362370d` FOREIGN KEY (`deployment_id`) REFERENCES `stacktach_deployment` (`id`); +CREATE TABLE `stacktach_lifecycle` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `instance` varchar(50), + `last_state` varchar(50), + `last_task_state` varchar(50), + `last_raw_id` integer +) +; +ALTER TABLE `stacktach_lifecycle` ADD CONSTRAINT `last_raw_id_refs_id_d5fb17d3` FOREIGN KEY (`last_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +CREATE TABLE `stacktach_timing` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL, + `lifecycle_id` integer NOT NULL, + `start_raw_id` integer, + `end_raw_id` integer, + `start_when` numeric(20, 6), + `end_when` numeric(20, 6), + `diff` numeric(20, 6) +) +; +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `lifecycle_id_refs_id_4255ead8` FOREIGN KEY (`lifecycle_id`) REFERENCES `stacktach_lifecycle` (`id`); +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `start_raw_id_refs_id_c32dfe04` FOREIGN KEY (`start_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `end_raw_id_refs_id_c32dfe04` FOREIGN KEY (`end_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +CREATE TABLE `stacktach_requesttracker` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `request_id` varchar(50) NOT NULL, + `lifecycle_id` integer NOT NULL, + `last_timing_id` integer, + `start` numeric(20, 6) NOT NULL, + `duration` numeric(20, 6) NOT NULL, + `completed` bool NOT NULL +) +; +ALTER TABLE `stacktach_requesttracker` ADD CONSTRAINT `lifecycle_id_refs_id_e457729` FOREIGN KEY (`lifecycle_id`) REFERENCES `stacktach_lifecycle` (`id`); +ALTER TABLE `stacktach_requesttracker` ADD CONSTRAINT `last_timing_id_refs_id_f0827cca` FOREIGN KEY (`last_timing_id`) REFERENCES `stacktach_timing` (`id`); +COMMIT; diff --git a/migrations/002_task.sql b/migrations/002_task.sql new file mode 100644 index 0000000..be9a43b --- /dev/null +++ b/migrations/002_task.sql @@ -0,0 +1,62 @@ +BEGIN; +CREATE TABLE `stacktach_deployment` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL +) +; +CREATE TABLE `stacktach_rawdata` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `deployment_id` integer NOT NULL, + `tenant` varchar(50), + `json` longtext NOT NULL, + `routing_key` varchar(50), + `state` varchar(20), + `old_state` varchar(20), + `old_task` varchar(30), + `task` varchar(30), + `when` numeric(20, 6) NOT NULL, + `publisher` varchar(100), + `event` varchar(50), + `service` varchar(50), + `host` varchar(100), + `instance` varchar(50), + `request_id` varchar(50) +) +; +ALTER TABLE `stacktach_rawdata` ADD CONSTRAINT `deployment_id_refs_id_362370d` FOREIGN KEY (`deployment_id`) REFERENCES `stacktach_deployment` (`id`); +CREATE TABLE `stacktach_lifecycle` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `instance` varchar(50), + `last_state` varchar(50), + `last_task_state` varchar(50), + `last_raw_id` integer +) +; +ALTER TABLE `stacktach_lifecycle` ADD CONSTRAINT `last_raw_id_refs_id_d5fb17d3` FOREIGN KEY (`last_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +CREATE TABLE `stacktach_timing` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL, + `lifecycle_id` integer NOT NULL, + `start_raw_id` integer, + `end_raw_id` integer, + `start_when` numeric(20, 6), + `end_when` numeric(20, 6), + `diff` numeric(20, 6) +) +; +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `lifecycle_id_refs_id_4255ead8` FOREIGN KEY (`lifecycle_id`) REFERENCES `stacktach_lifecycle` (`id`); +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `start_raw_id_refs_id_c32dfe04` FOREIGN KEY (`start_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `end_raw_id_refs_id_c32dfe04` FOREIGN KEY (`end_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +CREATE TABLE `stacktach_requesttracker` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `request_id` varchar(50) NOT NULL, + `lifecycle_id` integer NOT NULL, + `last_timing_id` integer, + `start` numeric(20, 6) NOT NULL, + `duration` numeric(20, 6) NOT NULL, + `completed` bool NOT NULL +) +; +ALTER TABLE `stacktach_requesttracker` ADD CONSTRAINT `lifecycle_id_refs_id_e457729` FOREIGN KEY (`lifecycle_id`) REFERENCES `stacktach_lifecycle` (`id`); +ALTER TABLE `stacktach_requesttracker` ADD CONSTRAINT `last_timing_id_refs_id_f0827cca` FOREIGN KEY (`last_timing_id`) REFERENCES `stacktach_timing` (`id`); +COMMIT; diff --git a/migrations/002_task_indexes.sql b/migrations/002_task_indexes.sql new file mode 100644 index 0000000..69a7c0f --- /dev/null +++ b/migrations/002_task_indexes.sql @@ -0,0 +1,31 @@ +BEGIN; +CREATE INDEX `stacktach_rawdata_4ac6801` ON `stacktach_rawdata` (`deployment_id`); +CREATE INDEX `stacktach_rawdata_2207f86d` ON `stacktach_rawdata` (`tenant`); +CREATE INDEX `stacktach_rawdata_2192f43a` ON `stacktach_rawdata` (`routing_key`); +CREATE INDEX `stacktach_rawdata_355bfc27` ON `stacktach_rawdata` (`state`); +CREATE INDEX `stacktach_rawdata_b716e0bb` ON `stacktach_rawdata` (`old_state`); +CREATE INDEX `stacktach_rawdata_8182be12` ON `stacktach_rawdata` (`old_task`); +CREATE INDEX `stacktach_rawdata_1c149b74` ON `stacktach_rawdata` (`task`); +CREATE INDEX `stacktach_rawdata_feaed089` ON `stacktach_rawdata` (`when`); +CREATE INDEX `stacktach_rawdata_878a2906` ON `stacktach_rawdata` (`publisher`); +CREATE INDEX `stacktach_rawdata_a90f9116` ON `stacktach_rawdata` (`event`); +CREATE INDEX `stacktach_rawdata_52c5ef6b` ON `stacktach_rawdata` (`service`); +CREATE INDEX `stacktach_rawdata_38dbea87` ON `stacktach_rawdata` (`host`); +CREATE INDEX `stacktach_rawdata_888b756a` ON `stacktach_rawdata` (`instance`); +CREATE INDEX `stacktach_rawdata_792812e8` ON `stacktach_rawdata` (`request_id`); +CREATE INDEX `stacktach_lifecycle_888b756a` ON `stacktach_lifecycle` (`instance`); +CREATE INDEX `stacktach_lifecycle_9b2555fd` ON `stacktach_lifecycle` (`last_state`); +CREATE INDEX `stacktach_lifecycle_67421a0e` ON `stacktach_lifecycle` (`last_task_state`); +CREATE INDEX `stacktach_lifecycle_dcf9e5f3` ON `stacktach_lifecycle` (`last_raw_id`); +CREATE INDEX `stacktach_timing_52094d6e` ON `stacktach_timing` (`name`); +CREATE INDEX `stacktach_timing_9f222e6b` ON `stacktach_timing` (`lifecycle_id`); +CREATE INDEX `stacktach_timing_efab905a` ON `stacktach_timing` (`start_raw_id`); +CREATE INDEX `stacktach_timing_c8bb8daf` ON `stacktach_timing` (`end_raw_id`); +CREATE INDEX `stacktach_timing_4401d15e` ON `stacktach_timing` (`diff`); +CREATE INDEX `stacktach_requesttracker_792812e8` ON `stacktach_requesttracker` (`request_id`); +CREATE INDEX `stacktach_requesttracker_9f222e6b` ON `stacktach_requesttracker` (`lifecycle_id`); +CREATE INDEX `stacktach_requesttracker_ce616a96` ON `stacktach_requesttracker` (`last_timing_id`); +CREATE INDEX `stacktach_requesttracker_29f4f2ea` ON `stacktach_requesttracker` (`start`); +CREATE INDEX `stacktach_requesttracker_8eb45f9b` ON `stacktach_requesttracker` (`duration`); +CREATE INDEX `stacktach_requesttracker_e490d511` ON `stacktach_requesttracker` (`completed`); +COMMIT; diff --git a/migrations/003_populate_task_and_image.py b/migrations/003_populate_task_and_image.py new file mode 100644 index 0000000..01b6619 --- /dev/null +++ b/migrations/003_populate_task_and_image.py @@ -0,0 +1,63 @@ +import datetime +import json +import sys + +sys.path.append("/stacktach") + +from stacktach import datetime_to_decimal as dt +from stacktach import image_type +from stacktach import models + + +if __name__ != '__main__': + sys.exit(1) + +states = {} + + +def fix_chunk(hours, length): + now = datetime.datetime.utcnow() + start = now - datetime.timedelta(hours=hours+length) + end = now - datetime.timedelta(hours=hours) + dstart = dt.dt_to_decimal(start) + dend = dt.dt_to_decimal(end) + + done = 0 + updated = 0 + block = 0 + print "Hours ago (%d to %d) %d - %d" % (hours + length, hours, dstart, dend) + updates = models.RawData.objects.filter(event='compute.instance.update', + when__gt=dstart, when__lte=dend)\ + .only('task', 'image_type', 'json') + + for raw in updates: + queue, body = json.loads(raw.json) + payload = body.get('payload', {}) + task = payload.get('new_task_state', None) + + if task != None and task != 'None': + states[task] = states.get(task, 0) + 1 + raw.task = task + + image_type_num = image_type.get_numeric_code(payload) + updated += 1 + raw.save() + + done += 1 + if done >= 10000: + block += 1 + done = 0 + print "# 10k blocks processed: %d (events %d)" % \ + (block, updated) + updated = 0 + + for kv in states.iteritems(): + print "%s = %d" % kv + +for day in xrange(2, 90): + hours = day * 24 + steps = 12 + chunk = 24 / steps + for x in xrange(steps): + fix_chunk(hours, chunk) + hours += chunk diff --git a/reports/requests.py b/reports/requests.py new file mode 100644 index 0000000..b12ce9c --- /dev/null +++ b/reports/requests.py @@ -0,0 +1,140 @@ +import datetime +import json +import sys + +sys.path.append("/stacktach") + +from stacktach import datetime_to_decimal as dt +from stacktach import image_type +from stacktach import models + + +if __name__ != '__main__': + sys.exit(1) + +hours = 0 +length = 24 + +now = datetime.datetime.utcnow() +start = now - datetime.timedelta(hours=hours+length) +end = now - datetime.timedelta(hours=hours) + +dnow = dt.dt_to_decimal(now) +dstart = dt.dt_to_decimal(start) +dend = dt.dt_to_decimal(end) + +codes = {} + +# Get all the instances that have changed in the last N hours ... +updates = models.RawData.objects.filter(event='compute.instance.update', + when__gt=dstart, when__lte=dend)\ + .values('instance').distinct() + +expiry = 60 * 60 # 1 hour +cmds = ['create', 'rebuild', 'rescue', 'resize', 'snapshot'] + +failures = {} +tenant_issues = {} + +for uuid_dict in updates: + uuid = uuid_dict['instance'] + + # All the unique Request ID's for this instance during that timespan. + reqs = models.RawData.objects.filter(instance=uuid, + when__gt=dstart, when__lte=dend) \ + .values('request_id').distinct() + + + for req_dict in reqs: + report = False + req = req_dict['request_id'] + raws = models.RawData.objects.filter(request_id=req)\ + .exclude(event='compute.instance.exists')\ + .order_by('when') + + start = None + err = None + + operation = None + platform = 0 + tenant = 0 + dump = False + + for raw in raws: + if not start: + start = raw.when + if 'error' in raw.routing_key: + err = raw + report = True + + if raw.tenant: + if tenant > 0 and raw.tenant != tenant: + print "Conflicting tenant ID", raw.tenant, tenant + tenant = raw.tenant + + for cmd in cmds: + if cmd in raw.event: + operation = cmd + break + + if raw.image_type > 0: + platform = raw.image_type + + if dump: + print " %s %s T:%s %s %s %s %s %s"\ + % (raw.id, raw.routing_key, raw.tenant, + raw.service, raw.host, raw.deployment.name, + raw.event, dt.dt_from_decimal(raw.when)) + if raw.event == 'compute.instance.update': + print " State: %s->%s, Task %s->%s" % \ + (raw.old_state, raw.state, raw.old_task, raw.task) + + if not start: + continue + + end = raw.when + diff = end - start + + if diff > 3600: + report = True + + if report: + print "------", uuid, "----------" + print " Req:", req + print " Duration: %.2f minutes" % (diff / 60) + print " Operation:", operation + print " Platform:", image_type.readable(platform) + key = (operation, platform) + failures[key] = failures.get(key, 0) + 1 + tenant_issues[tenant] = tenant_issues.get(tenant, 0) + 1 + + if err: + queue, body = json.loads(err.json) + payload = body['payload'] + print "Error. EventID: %s, Tenant %s, Service %s, Host %s, "\ + "Deployment %s, Event %s, When %s"\ + % (err.id, err.tenant, err.service, err.host, err.deployment.name, + err.event, dt.dt_from_decimal(err.when)) + exc = payload.get('exception') + if exc: + print exc + code = exc.get('kwargs', {}).get('code') + if code: + codes[code] = codes.get(code, 0) + 1 + +print "-- Failures by operation by platform --" +for failure, count in failures.iteritems(): + operation, platform = failure + readable = image_type.readable(platform) + text = "n/a" + if readable: + text = ", ".join(readable) + print "%s on %s = %d" % (operation, text, count) + +print "-- Errors by Tenant --" +for tenant, count in tenant_issues.iteritems(): + print "T %s = %d" % (tenant, count) + +print "-- Return code counts --" +for k, v in codes.iteritems(): + print k, v diff --git a/stacktach/datetime_to_decimal.py b/stacktach/datetime_to_decimal.py index 96c8ea3..1781cb0 100644 --- a/stacktach/datetime_to_decimal.py +++ b/stacktach/datetime_to_decimal.py @@ -12,6 +12,8 @@ def dt_to_decimal(utc): def dt_from_decimal(dec): + if dec == None: + return "n/a" integer = int(dec) micro = (dec - decimal.Decimal(integer)) * decimal.Decimal(1000000) diff --git a/stacktach/models.py b/stacktach/models.py index 9fde6b2..38acaec 100644 --- a/stacktach/models.py +++ b/stacktach/models.py @@ -38,6 +38,9 @@ class RawData(models.Model): blank=True, db_index=True) old_task = models.CharField(max_length=30, null=True, blank=True, db_index=True) + task = models.CharField(max_length=30, null=True, + blank=True, db_index=True) + image_type = models.IntegerField(null=True, default=0, db_index=True) when = models.DecimalField(max_digits=20, decimal_places=6, db_index=True) publisher = models.CharField(max_length=100, null=True, diff --git a/stacktach/views.py b/stacktach/views.py index c01a922..b2ac66b 100644 --- a/stacktach/views.py +++ b/stacktach/views.py @@ -1,23 +1,26 @@ # Copyright 2012 - Dark Secret Software Inc. +import datetime +import json +import pprint + from django import db from django import http from django.shortcuts import render_to_response from django import template -from stacktach import models from stacktach import datetime_to_decimal as dt - -import datetime -import json -import pprint +from stacktach import image_type +from stacktach import models def _extract_states(payload): return { 'state' : payload.get('state', ""), 'old_state' : payload.get('old_state', ""), - 'old_task' : payload.get('old_task_state', "") + 'old_task' : payload.get('old_task_state', ""), + 'task' : payload.get('new_task_state', ""), + 'image_type' : image_type.get_numeric_code(payload) } diff --git a/worker/worker.py b/worker/worker.py index 9a3165f..ae16b04 100644 --- a/worker/worker.py +++ b/worker/worker.py @@ -23,6 +23,7 @@ import kombu.connection import kombu.entity import kombu.mixins import logging +import sys import time from pympler.process import ProcessMemoryInfo @@ -35,6 +36,8 @@ LOG = logging.getLogger(__name__) LOG.setLevel(logging.DEBUG) handler = logging.handlers.TimedRotatingFileHandler('worker.log', when='h', interval=6, backupCount=4) +formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') +handler.setFormatter(formatter) LOG.addHandler(handler) @@ -138,13 +141,20 @@ def run(deployment_config): virtual_host=virtual_host) while True: - LOG.debug("Processing on '%s'" % name) - with kombu.connection.BrokerConnection(**params) as conn: - try: - consumer = NovaConsumer(name, conn, deployment, durable) - consumer.run() - except Exception as e: - LOG.exception("name=%s, exception=%s. Reconnecting in 5s" % - (name, e)) - time.sleep(5) - LOG.debug("Completed processing on '%s'" % name) + try: + LOG.debug("Processing on '%s'" % name) + with kombu.connection.BrokerConnection(**params) as conn: + try: + consumer = NovaConsumer(name, conn, deployment, durable) + consumer.run() + except Exception as e: + LOG.exception("name=%s, exception=%s. Reconnecting in 5s" % + (name, e)) + time.sleep(5) + LOG.debug("Completed processing on '%s'" % name) + except: + e = sys.exc_info()[0] + msg = "Uncaught exception: deployment=%s, exception=%s. Retrying in 5s" + LOG.exception(msg % (name, e)) + time.sleep(5) + From eb834bcc9d05047379db99d10a56db7d15198d36 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Mon, 11 Feb 2013 18:53:22 -0600 Subject: [PATCH 2/4] prettytable integrated and error breakdown --- migrations/002_delta.sql | 2 + migrations/003_delta.sql | 2 + migrations/003_image_type.sql | 63 +++++++++++ migrations/003_image_type_indexes.sql | 32 ++++++ migrations/003_populate_task_and_image.py | 2 +- reports/requests.py | 127 ++++++++++++++++++---- stacktach/image_type.py | 58 ++++++++++ 7 files changed, 261 insertions(+), 25 deletions(-) create mode 100644 migrations/002_delta.sql create mode 100644 migrations/003_delta.sql create mode 100644 migrations/003_image_type.sql create mode 100644 migrations/003_image_type_indexes.sql create mode 100644 stacktach/image_type.py diff --git a/migrations/002_delta.sql b/migrations/002_delta.sql new file mode 100644 index 0000000..eb89393 --- /dev/null +++ b/migrations/002_delta.sql @@ -0,0 +1,2 @@ +ALTER TABLE stacktach_rawdata ADD task VARCHAR(30); +CREATE INDEX `stacktach_rawdata_1c149b74` ON `stacktach_rawdata` (`task`); diff --git a/migrations/003_delta.sql b/migrations/003_delta.sql new file mode 100644 index 0000000..8c949bf --- /dev/null +++ b/migrations/003_delta.sql @@ -0,0 +1,2 @@ +ALTER TABLE stacktach_rawdata ADD image_type integer; +CREATE INDEX `stacktach_rawdata_cfde77eb` ON `stacktach_rawdata` (`image_type`); diff --git a/migrations/003_image_type.sql b/migrations/003_image_type.sql new file mode 100644 index 0000000..133d383 --- /dev/null +++ b/migrations/003_image_type.sql @@ -0,0 +1,63 @@ +BEGIN; +CREATE TABLE `stacktach_deployment` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL +) +; +CREATE TABLE `stacktach_rawdata` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `deployment_id` integer NOT NULL, + `tenant` varchar(50), + `json` longtext NOT NULL, + `routing_key` varchar(50), + `state` varchar(20), + `old_state` varchar(20), + `old_task` varchar(30), + `task` varchar(30), + `image_type` integer, + `when` numeric(20, 6) NOT NULL, + `publisher` varchar(100), + `event` varchar(50), + `service` varchar(50), + `host` varchar(100), + `instance` varchar(50), + `request_id` varchar(50) +) +; +ALTER TABLE `stacktach_rawdata` ADD CONSTRAINT `deployment_id_refs_id_362370d` FOREIGN KEY (`deployment_id`) REFERENCES `stacktach_deployment` (`id`); +CREATE TABLE `stacktach_lifecycle` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `instance` varchar(50), + `last_state` varchar(50), + `last_task_state` varchar(50), + `last_raw_id` integer +) +; +ALTER TABLE `stacktach_lifecycle` ADD CONSTRAINT `last_raw_id_refs_id_d5fb17d3` FOREIGN KEY (`last_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +CREATE TABLE `stacktach_timing` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `name` varchar(50) NOT NULL, + `lifecycle_id` integer NOT NULL, + `start_raw_id` integer, + `end_raw_id` integer, + `start_when` numeric(20, 6), + `end_when` numeric(20, 6), + `diff` numeric(20, 6) +) +; +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `lifecycle_id_refs_id_4255ead8` FOREIGN KEY (`lifecycle_id`) REFERENCES `stacktach_lifecycle` (`id`); +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `start_raw_id_refs_id_c32dfe04` FOREIGN KEY (`start_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +ALTER TABLE `stacktach_timing` ADD CONSTRAINT `end_raw_id_refs_id_c32dfe04` FOREIGN KEY (`end_raw_id`) REFERENCES `stacktach_rawdata` (`id`); +CREATE TABLE `stacktach_requesttracker` ( + `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, + `request_id` varchar(50) NOT NULL, + `lifecycle_id` integer NOT NULL, + `last_timing_id` integer, + `start` numeric(20, 6) NOT NULL, + `duration` numeric(20, 6) NOT NULL, + `completed` bool NOT NULL +) +; +ALTER TABLE `stacktach_requesttracker` ADD CONSTRAINT `lifecycle_id_refs_id_e457729` FOREIGN KEY (`lifecycle_id`) REFERENCES `stacktach_lifecycle` (`id`); +ALTER TABLE `stacktach_requesttracker` ADD CONSTRAINT `last_timing_id_refs_id_f0827cca` FOREIGN KEY (`last_timing_id`) REFERENCES `stacktach_timing` (`id`); +COMMIT; diff --git a/migrations/003_image_type_indexes.sql b/migrations/003_image_type_indexes.sql new file mode 100644 index 0000000..ada6b63 --- /dev/null +++ b/migrations/003_image_type_indexes.sql @@ -0,0 +1,32 @@ +BEGIN; +CREATE INDEX `stacktach_rawdata_4ac6801` ON `stacktach_rawdata` (`deployment_id`); +CREATE INDEX `stacktach_rawdata_2207f86d` ON `stacktach_rawdata` (`tenant`); +CREATE INDEX `stacktach_rawdata_2192f43a` ON `stacktach_rawdata` (`routing_key`); +CREATE INDEX `stacktach_rawdata_355bfc27` ON `stacktach_rawdata` (`state`); +CREATE INDEX `stacktach_rawdata_b716e0bb` ON `stacktach_rawdata` (`old_state`); +CREATE INDEX `stacktach_rawdata_8182be12` ON `stacktach_rawdata` (`old_task`); +CREATE INDEX `stacktach_rawdata_1c149b74` ON `stacktach_rawdata` (`task`); +CREATE INDEX `stacktach_rawdata_cfde77eb` ON `stacktach_rawdata` (`image_type`); +CREATE INDEX `stacktach_rawdata_feaed089` ON `stacktach_rawdata` (`when`); +CREATE INDEX `stacktach_rawdata_878a2906` ON `stacktach_rawdata` (`publisher`); +CREATE INDEX `stacktach_rawdata_a90f9116` ON `stacktach_rawdata` (`event`); +CREATE INDEX `stacktach_rawdata_52c5ef6b` ON `stacktach_rawdata` (`service`); +CREATE INDEX `stacktach_rawdata_38dbea87` ON `stacktach_rawdata` (`host`); +CREATE INDEX `stacktach_rawdata_888b756a` ON `stacktach_rawdata` (`instance`); +CREATE INDEX `stacktach_rawdata_792812e8` ON `stacktach_rawdata` (`request_id`); +CREATE INDEX `stacktach_lifecycle_888b756a` ON `stacktach_lifecycle` (`instance`); +CREATE INDEX `stacktach_lifecycle_9b2555fd` ON `stacktach_lifecycle` (`last_state`); +CREATE INDEX `stacktach_lifecycle_67421a0e` ON `stacktach_lifecycle` (`last_task_state`); +CREATE INDEX `stacktach_lifecycle_dcf9e5f3` ON `stacktach_lifecycle` (`last_raw_id`); +CREATE INDEX `stacktach_timing_52094d6e` ON `stacktach_timing` (`name`); +CREATE INDEX `stacktach_timing_9f222e6b` ON `stacktach_timing` (`lifecycle_id`); +CREATE INDEX `stacktach_timing_efab905a` ON `stacktach_timing` (`start_raw_id`); +CREATE INDEX `stacktach_timing_c8bb8daf` ON `stacktach_timing` (`end_raw_id`); +CREATE INDEX `stacktach_timing_4401d15e` ON `stacktach_timing` (`diff`); +CREATE INDEX `stacktach_requesttracker_792812e8` ON `stacktach_requesttracker` (`request_id`); +CREATE INDEX `stacktach_requesttracker_9f222e6b` ON `stacktach_requesttracker` (`lifecycle_id`); +CREATE INDEX `stacktach_requesttracker_ce616a96` ON `stacktach_requesttracker` (`last_timing_id`); +CREATE INDEX `stacktach_requesttracker_29f4f2ea` ON `stacktach_requesttracker` (`start`); +CREATE INDEX `stacktach_requesttracker_8eb45f9b` ON `stacktach_requesttracker` (`duration`); +CREATE INDEX `stacktach_requesttracker_e490d511` ON `stacktach_requesttracker` (`completed`); +COMMIT; diff --git a/migrations/003_populate_task_and_image.py b/migrations/003_populate_task_and_image.py index 01b6619..eb5eb4d 100644 --- a/migrations/003_populate_task_and_image.py +++ b/migrations/003_populate_task_and_image.py @@ -39,7 +39,7 @@ def fix_chunk(hours, length): states[task] = states.get(task, 0) + 1 raw.task = task - image_type_num = image_type.get_numeric_code(payload) + raw.image_type = image_type.get_numeric_code(payload, raw.image_type) updated += 1 raw.save() diff --git a/reports/requests.py b/reports/requests.py index b12ce9c..7c3aced 100644 --- a/reports/requests.py +++ b/reports/requests.py @@ -2,6 +2,8 @@ import datetime import json import sys +import prettytable + sys.path.append("/stacktach") from stacktach import datetime_to_decimal as dt @@ -34,6 +36,9 @@ expiry = 60 * 60 # 1 hour cmds = ['create', 'rebuild', 'rescue', 'resize', 'snapshot'] failures = {} +causes = {} +error_messages = {} +successes = {} tenant_issues = {} for uuid_dict in updates: @@ -55,10 +60,10 @@ for uuid_dict in updates: start = None err = None - operation = None + operation = "n/a" platform = 0 tenant = 0 - dump = False + cell = "n/a" for raw in raws: if not start: @@ -75,20 +80,12 @@ for uuid_dict in updates: for cmd in cmds: if cmd in raw.event: operation = cmd + cell = raw.deployment.name break if raw.image_type > 0: platform = raw.image_type - if dump: - print " %s %s T:%s %s %s %s %s %s"\ - % (raw.id, raw.routing_key, raw.tenant, - raw.service, raw.host, raw.deployment.name, - raw.event, dt.dt_from_decimal(raw.when)) - if raw.event == 'compute.instance.update': - print " State: %s->%s, Task %s->%s" % \ - (raw.old_state, raw.state, raw.old_task, raw.task) - if not start: continue @@ -98,13 +95,16 @@ for uuid_dict in updates: if diff > 3600: report = True - if report: + key = (operation, platform, cell) + if not report: + successes[key] = successes.get(key, 0) + 1 + else: print "------", uuid, "----------" print " Req:", req print " Duration: %.2f minutes" % (diff / 60) print " Operation:", operation print " Platform:", image_type.readable(platform) - key = (operation, platform) + cause = "> %d min" % (expiry / 60) failures[key] = failures.get(key, 0) + 1 tenant_issues[tenant] = tenant_issues.get(tenant, 0) + 1 @@ -117,24 +117,103 @@ for uuid_dict in updates: err.event, dt.dt_from_decimal(err.when)) exc = payload.get('exception') if exc: - print exc + # group the messages ... + exc_str = str(exc) + print exc_str + error_messages[exc_str] = error_messages.get(exc_str, 0) + 1 + + # extract the code, if any ... code = exc.get('kwargs', {}).get('code') if code: codes[code] = codes.get(code, 0) + 1 + cause = code + cause_key = (key, cause) + causes[cause_key] = causes.get(cause_key, 0) + 1 -print "-- Failures by operation by platform --" -for failure, count in failures.iteritems(): - operation, platform = failure + +def dump_breakdown(totals, label): + p = prettytable.PrettyTable(["Category", "Count"]) + for k, v in totals.iteritems(): + p.add_row([k, v]) + print label + p.sortby = 'Count' + print p + + +def dump_summary(info, label): + print "-- %s by operation by cell by platform --" % (label,) + p = prettytable.PrettyTable(["Operation", "Cell", "Platform", "Count"]) + total = 0 + op_totals = {} + cell_totals = {} + platform_totals = {} + for key, count in info.iteritems(): + operation, platform, cell = key + readable = image_type.readable(platform) + text = "n/a" + if readable: + text = ", ".join(readable) + op_totals[operation] = op_totals.get(operation, 0) + count + cell_totals[cell] = cell_totals.get(cell, 0) + count + platform_totals[text] = platform_totals.get(text, 0) + count + + p.add_row([operation, cell, text, count]) + total += count + p.sortby = 'Count' + print p + + dump_breakdown(op_totals, "Total %s by Operation" % label) + dump_breakdown(cell_totals, "Total %s by Cell" % label) + dump_breakdown(platform_totals, "Total %s by Platform" % label) + + print + + return total + + +print +print "SUMMARY" +print +good = dump_summary(successes, "Success") +bad = dump_summary(failures, "Failures") +print "=====================================================" +print "Total Success: %d Total Failure: %d" % (good, bad) +print + +print "-- Errors by Tenant --" +p = prettytable.PrettyTable(["Tenant", "Count"]) +for tenant, count in tenant_issues.iteritems(): + p.add_row([tenant, count]) +p.sortby = 'Count' +print p + +print +print "-- Return code counts --" +p = prettytable.PrettyTable(["Return Code", "Count"]) +for k, v in codes.iteritems(): + p.add_row([k, v]) +p.sortby = 'Count' +print p + +print +print "-- Cause breakdown --" +p = prettytable.PrettyTable(["Cause", "Operation", "Cell", "Platform", "Count"]) +for cause_key, count in causes.iteritems(): + key, cause = cause_key + operation, platform, cell = key readable = image_type.readable(platform) text = "n/a" if readable: text = ", ".join(readable) - print "%s on %s = %d" % (operation, text, count) + p.add_row([cause, operation, cell, text, count]) +p.sortby = 'Count' +print p -print "-- Errors by Tenant --" -for tenant, count in tenant_issues.iteritems(): - print "T %s = %d" % (tenant, count) +print +print "-- Error Message Counts --" +p = prettytable.PrettyTable(["Count", "Message"]) +for k, v in error_messages.iteritems(): + p.add_row([v, k[:80]]) +p.sortby = 'Count' +print p -print "-- Return code counts --" -for k, v in codes.iteritems(): - print k, v diff --git a/stacktach/image_type.py b/stacktach/image_type.py new file mode 100644 index 0000000..33af6ad --- /dev/null +++ b/stacktach/image_type.py @@ -0,0 +1,58 @@ +BASE_IMAGE = 0x1 +SNAPSHOT_IMAGE = 0x2 +LINUX_IMAGE = 0x10 + +OS_UBUNTU = 0x100 +OS_DEBIAN = 0x200 +OS_CENTOS = 0x400 +OS_RHEL = 0x800 + + +def isset(num, flag): + return num & flag > 0 + + +flags = {'base' : BASE_IMAGE, + 'snapshot' : SNAPSHOT_IMAGE, + 'linux' : LINUX_IMAGE, + 'ubuntu' : OS_UBUNTU, + 'debian' : OS_DEBIAN, + 'centos' : OS_CENTOS, + 'rhel' : OS_RHEL} + + +def readable(num): + result = [] + for k, v in flags.iteritems(): + if isset(num, v): + result.append(k) + return result + + +def get_numeric_code(payload, default=0): + meta = payload.get('image_meta', {}) + if default == None: + default = 0 + num = default + + image_type = meta.get('image_type', '') + if image_type == 'base': + num |= BASE_IMAGE + if image_type == 'snapshot': + num |= SNAPSHOT_IMAGE + + os_type = meta.get('os_type', '') + if os_type == 'linux': + num |= LINUX_IMAGE + + os_distro = meta.get('os_distro', '') + if os_distro == 'ubuntu': + num |= OS_UBUNTU + if os_distro == 'debian': + num |= OS_DEBIAN + if os_distro == 'centos': + num |= OS_CENTOS + if os_distro == 'rhel': + num |= OS_RHEL + + return num From 56f65969e179b14a449eedf41b8c440633f3494c Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Mon, 11 Feb 2013 19:53:20 -0600 Subject: [PATCH 3/4] Specify end date on command line --- reports/requests.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/reports/requests.py b/reports/requests.py index 7c3aced..48daeea 100644 --- a/reports/requests.py +++ b/reports/requests.py @@ -1,6 +1,7 @@ import datetime import json import sys +import time import prettytable @@ -14,14 +15,24 @@ from stacktach import models if __name__ != '__main__': sys.exit(1) +yesterday = datetime.datetime.utcnow().date() - datetime.timedelta(days=1) +if len(sys.argv) == 2: + try: + t = time.strptime(sys.argv[1], "%Y-%m-%d") + yesterday = datetime.datetime(*t[:6]) + except Exception, e: + print e + print "Usage: python requests.py YYYY-MM-DD (the end date)" + sys.exit(1) + hours = 0 length = 24 -now = datetime.datetime.utcnow() -start = now - datetime.timedelta(hours=hours+length) -end = now - datetime.timedelta(hours=hours) +start = datetime.datetime(year=yesterday.year, month=yesterday.month, day=yesterday.day) +end = start + datetime.timedelta(hours=length-1, minutes=59, seconds=59) + +print "Generating report for %s to %s" % (start, end) -dnow = dt.dt_to_decimal(now) dstart = dt.dt_to_decimal(start) dend = dt.dt_to_decimal(end) @@ -30,7 +41,7 @@ codes = {} # Get all the instances that have changed in the last N hours ... updates = models.RawData.objects.filter(event='compute.instance.update', when__gt=dstart, when__lte=dend)\ - .values('instance').distinct() + .values('instance').distinct() expiry = 60 * 60 # 1 hour cmds = ['create', 'rebuild', 'rescue', 'resize', 'snapshot'] From 9d40ce24dd8b2ca934b66dfa8fb1ffd30f9ecbfd Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Tue, 12 Feb 2013 14:04:54 -0600 Subject: [PATCH 4/4] tigher summary report with percentiles on times --- migrations/003_populate_task_and_image.py | 2 +- reports/pretty.py | 166 ++++++++++++++++++++++ reports/requests.py | 44 ++++-- stacktach/datetime_to_decimal.py | 13 ++ stacktach/image_type.py | 4 +- 5 files changed, 219 insertions(+), 10 deletions(-) create mode 100644 reports/pretty.py diff --git a/migrations/003_populate_task_and_image.py b/migrations/003_populate_task_and_image.py index eb5eb4d..dd09d79 100644 --- a/migrations/003_populate_task_and_image.py +++ b/migrations/003_populate_task_and_image.py @@ -54,7 +54,7 @@ def fix_chunk(hours, length): for kv in states.iteritems(): print "%s = %d" % kv -for day in xrange(2, 90): +for day in xrange(0, 90): hours = day * 24 steps = 12 chunk = 24 / steps diff --git a/reports/pretty.py b/reports/pretty.py new file mode 100644 index 0000000..d177867 --- /dev/null +++ b/reports/pretty.py @@ -0,0 +1,166 @@ +import datetime +import json +import sys +import time + +import prettytable + +sys.path.append("/stacktach") + +from stacktach import datetime_to_decimal as dt +from stacktach import image_type +from stacktach import models + + +if __name__ != '__main__': + sys.exit(1) + +yesterday = datetime.datetime.utcnow().date() - datetime.timedelta(days=1) +if len(sys.argv) == 2: + try: + t = time.strptime(sys.argv[1], "%Y-%m-%d") + yesterday = datetime.datetime(*t[:6]) + except Exception, e: + print e + print "Usage: python requests.py YYYY-MM-DD (the end date)" + sys.exit(1) + +percentile = 90 +hours = 24 + +start = datetime.datetime(year=yesterday.year, month=yesterday.month, + day=yesterday.day) +end = start + datetime.timedelta(hours=hours-1, minutes=59, seconds=59) + +print "Generating report for %s to %s" % (start, end) + +dstart = dt.dt_to_decimal(start) +dend = dt.dt_to_decimal(end) + +codes = {} + +# Get all the instances that have changed in the last N hours ... +updates = models.RawData.objects.filter(event='compute.instance.update', + when__gt=dstart, when__lte=dend)\ + .values('instance').distinct() + +expiry = 60 * 60 # 1 hour +cmds = ['create', 'rebuild', 'rescue', 'resize', 'snapshot'] + +failures = {} +durations = {} +attempts = {} + +for uuid_dict in updates: + uuid = uuid_dict['instance'] + + # All the unique Request ID's for this instance during that timespan. + reqs = models.RawData.objects.filter(instance=uuid, + when__gt=dstart, when__lte=dend) \ + .values('request_id').distinct() + + + for req_dict in reqs: + report = False + req = req_dict['request_id'] + raws = models.RawData.objects.filter(request_id=req)\ + .exclude(event='compute.instance.exists')\ + .order_by('when') + + start = None + err = None + + operation = "aux" + image_type_num = 0 + + for raw in raws: + if not start: + start = raw.when + if 'error' in raw.routing_key: + err = raw + report = True + + for cmd in cmds: + if cmd in raw.event: + operation = cmd + break + + if raw.image_type: + image_type_num |= raw.image_type + + image = "?" + if image_type.isset(image_type_num, image_type.BASE_IMAGE): + image = "base" + if image_type.isset(image_type_num, image_type.SNAPSHOT_IMAGE): + image = "snap" + + if not start: + continue + + end = raw.when + diff = end - start + + if diff > 3600: + report = True + + key = (operation, image) + + # Track durations for all attempts, good and bad ... + _durations = durations.get(key, []) + _durations.append(diff) + durations[key] = _durations + + attempts[key] = attempts.get(key, 0) + 1 + + if report: + failures[key] = failures.get(key, 0) + 1 + +# Print the results ... +cols = ["Operation", "Image", "Min*", "Max*", "Avg*", + "Requests", "# Fail", "Fail %"] +p = prettytable.PrettyTable(cols) +for c in cols[2:]: + p.align[c] = 'r' +p.sortby = cols[0] + +pct = (float(100 - percentile) / 2.0) / 100.0 +print "* Using %d-th percentile for results (+/-%.1f%% cut)" % \ + (percentile, pct * 100.0) +total = 0 +failure_total = 0 +for key, count in attempts.iteritems(): + total += count + operation, image = key + + failure_count = failures.get(key, 0) + failure_total += failure_count + failure_percentage = float(failure_count) / float(count) + _failure_percentage = "%.1f%%" % (failure_percentage * 100.0) + + # N-th % of durations ... + _values = durations[key] + _values.sort() + _outliers = int(float(len(_values)) * pct) + if _outliers > 0: + before = len(_values) + _values = _values[_outliers:-_outliers] + print "culling %d -> %d" % (before, len(_values)) + _min = 99999999 + _max = 0 + _total = 0.0 + for value in _values: + _min = min(_min, value) + _max = max(_max, value) + _total += float(value) + _avg = float(_total) / float(len(_values)) + _fmin = dt.sec_to_str(_min) + _fmax = dt.sec_to_str(_max) + _favg = dt.sec_to_str(_avg) + + p.add_row([operation, image, _fmin, _fmax, _favg, count, + failure_count, _failure_percentage]) +print p + +print "Total: %d, Failures: %d, Failure Rate: %.1f%%" % \ + (total, failure_total, + (float(failure_total)/float(total)) * 100.0) diff --git a/reports/requests.py b/reports/requests.py index 48daeea..942fed8 100644 --- a/reports/requests.py +++ b/reports/requests.py @@ -26,9 +26,10 @@ if len(sys.argv) == 2: sys.exit(1) hours = 0 -length = 24 +length = 6 -start = datetime.datetime(year=yesterday.year, month=yesterday.month, day=yesterday.day) +start = datetime.datetime(year=yesterday.year, month=yesterday.month, + day=yesterday.day) end = start + datetime.timedelta(hours=length-1, minutes=59, seconds=59) print "Generating report for %s to %s" % (start, end) @@ -48,6 +49,7 @@ cmds = ['create', 'rebuild', 'rescue', 'resize', 'snapshot'] failures = {} causes = {} +durations = {} error_messages = {} successes = {} tenant_issues = {} @@ -71,10 +73,10 @@ for uuid_dict in updates: start = None err = None - operation = "n/a" + operation = "aux" platform = 0 tenant = 0 - cell = "n/a" + cell = "unk" for raw in raws: if not start: @@ -107,6 +109,17 @@ for uuid_dict in updates: report = True key = (operation, platform, cell) + + # Track durations for all attempts, good and bad ... + duration_min, duration_max, duration_count, duration_total = \ + durations.get(key, (9999999, 0, 0, 0)) + duration_min = min(duration_min, diff) + duration_max = max(duration_max, diff) + duration_count += 1 + duration_total += diff + durations[key] = (duration_min, duration_max, duration_count, + duration_total) + if not report: successes[key] = successes.get(key, 0) + 1 else: @@ -124,14 +137,16 @@ for uuid_dict in updates: payload = body['payload'] print "Error. EventID: %s, Tenant %s, Service %s, Host %s, "\ "Deployment %s, Event %s, When %s"\ - % (err.id, err.tenant, err.service, err.host, err.deployment.name, + % (err.id, err.tenant, err.service, err.host, + err.deployment.name, err.event, dt.dt_from_decimal(err.when)) exc = payload.get('exception') if exc: # group the messages ... exc_str = str(exc) print exc_str - error_messages[exc_str] = error_messages.get(exc_str, 0) + 1 + error_messages[exc_str] = \ + error_messages.get(exc_str, 0) + 1 # extract the code, if any ... code = exc.get('kwargs', {}).get('code') @@ -151,9 +166,15 @@ def dump_breakdown(totals, label): print p + + def dump_summary(info, label): print "-- %s by operation by cell by platform --" % (label,) - p = prettytable.PrettyTable(["Operation", "Cell", "Platform", "Count"]) + p = prettytable.PrettyTable(["Operation", "Cell", "Platform", "Count", + "Min", "Max", "Avg"]) + for c in ["Count", "Min", "Max", "Avg"]: + p.align[c] = 'r' + total = 0 op_totals = {} cell_totals = {} @@ -164,11 +185,18 @@ def dump_summary(info, label): text = "n/a" if readable: text = ", ".join(readable) + + _min, _max, _count, _total = durations[key] + _avg = float(_total) / float(_count) + _fmin = dt.sec_to_str(_min) + _fmax = dt.sec_to_str(_max) + _favg = dt.sec_to_str(_avg * 100.0) + op_totals[operation] = op_totals.get(operation, 0) + count cell_totals[cell] = cell_totals.get(cell, 0) + count platform_totals[text] = platform_totals.get(text, 0) + count - p.add_row([operation, cell, text, count]) + p.add_row([operation, cell, text, count, _fmin, _fmax, _favg]) total += count p.sortby = 'Count' print p diff --git a/stacktach/datetime_to_decimal.py b/stacktach/datetime_to_decimal.py index 1781cb0..4c14caa 100644 --- a/stacktach/datetime_to_decimal.py +++ b/stacktach/datetime_to_decimal.py @@ -21,6 +21,19 @@ def dt_from_decimal(dec): return daittyme.replace(microsecond=micro) +def sec_to_str(sec): + sec = int(sec) + if sec < 60: + return "%ds" % sec + minutes = sec / 60 + sec = sec % 60 + if minutes < 60: + return "%d:%02ds" % (minutes, sec) + hours = minutes / 60 + minutes = minutes % 60 + return "%02d:%02d:%02d" % (hours, minutes, sec) + + if __name__ == '__main__': now = datetime.datetime.utcnow() d = dt_to_decimal(now) diff --git a/stacktach/image_type.py b/stacktach/image_type.py index 33af6ad..83c4eb0 100644 --- a/stacktach/image_type.py +++ b/stacktach/image_type.py @@ -9,6 +9,8 @@ OS_RHEL = 0x800 def isset(num, flag): + if not num: + return False return num & flag > 0 @@ -41,7 +43,7 @@ def get_numeric_code(payload, default=0): if image_type == 'snapshot': num |= SNAPSHOT_IMAGE - os_type = meta.get('os_type', '') + os_type = meta.get('os_type', payload.get('os_type', '')) if os_type == 'linux': num |= LINUX_IMAGE