Changes to the config system.

This patch addresses issues with the need for a global config.
Now any module needing config access can simply import the config module.
To facilitate this there were a lot of minor changes to config use throughout artifice,
as well as changes to classes that were passed a config on creation.
Also some changes to constants so that the new config system can work with them easier.

Change-Id: I6e9b4cbf0ff30683dc13e37f13334f7ed7ee7add
This commit is contained in:
adriant 2014-04-01 17:24:14 +13:00
parent bd2493be20
commit ab75581297
11 changed files with 135 additions and 89 deletions

View File

@ -1,6 +1,6 @@
import flask import flask
from flask import Flask, Blueprint from flask import Flask, Blueprint
from artifice import interface, database from artifice import interface, database, config
from artifice.sales_order import RatesFile from artifice.sales_order import RatesFile
from artifice.models import UsageEntry, SalesOrder, Tenant, billing from artifice.models import UsageEntry, SalesOrder, Tenant, billing
import sqlalchemy import sqlalchemy
@ -25,36 +25,31 @@ Session = None
app = Blueprint("main", __name__) app = Blueprint("main", __name__)
config = None
invoicer = None invoicer = None
DEFAULT_TIMEZONE = "Pacific/Auckland" DEFAULT_TIMEZONE = "Pacific/Auckland"
current_region = "Wellington" # FIXME
def get_app(conf): def get_app(conf):
actual_app = Flask(__name__) actual_app = Flask(__name__)
actual_app.register_blueprint(app, url_prefix="/") actual_app.register_blueprint(app, url_prefix="/")
global engine config.setup_config(conf)
engine = create_engine(conf["main"]["database_uri"], poolclass=NullPool)
global config global engine
config = conf engine = create_engine(config.main["database_uri"], poolclass=NullPool)
global Session global Session
Session = scoped_session(lambda: create_session(bind=engine)) Session = scoped_session(lambda: create_session(bind=engine))
global invoicer global invoicer
module, kls = config["main"]["export_provider"].split(":") module, kls = config.main["export_provider"].split(":")
# TODO: Try/except block # TODO: Try/except block
invoicer = getattr(importlib.import_module(module), kls) invoicer = getattr(importlib.import_module(module), kls)
if config["main"].get("timezone"): if config.main.get("timezone"):
global DEFAULT_TIMEZONE global DEFAULT_TIMEZONE
DEFAULT_TIMEZONE = config["main"]["timezone"] DEFAULT_TIMEZONE = config.main["timezone"]
return actual_app return actual_app
@ -154,7 +149,7 @@ def run_usage_collection():
session = Session() session = Session()
artifice = interface.Artifice(config) artifice = interface.Artifice()
db = database.Database(session) db = database.Database(session)
tenants = artifice.tenants tenants = artifice.tenants
@ -197,7 +192,7 @@ def generate_sales_order(tenant, session, end, rates):
# and will probably result in the CSV exporter being changed. # and will probably result in the CSV exporter being changed.
billable = billing.build_billable(usage, session) billable = billing.build_billable(usage, session)
session.close() session.close()
exporter = invoicer(start, end, config["export_config"], rates) exporter = invoicer(start, end, config.export_config, rates)
exporter.bill(billable) exporter.bill(billable)
exporter.close() exporter.close()
return {"id": tenant.id, return {"id": tenant.id,
@ -263,7 +258,7 @@ def run_sales_order_generation():
# Handled like this for a later move to Celery distributed workers # Handled like this for a later move to Celery distributed workers
resp = {"tenants": []} resp = {"tenants": []}
rates = RatesFile(config['export_config']) rates = RatesFile(config.export_config)
for tenant in tenant_query: for tenant in tenant_query:
resp['tenants'].append(generate_sales_order(tenant, session, end, rates)) resp['tenants'].append(generate_sales_order(tenant, session, end, rates))

20
artifice/config.py Normal file
View File

@ -0,0 +1,20 @@
# This is simply a namespace for global config storage
main = None
export_config = None
auth = None
ceilometer = None
transformers = None
def setup_config(conf):
global main
main = conf['main']
global export_config
export_config = conf['export_config']
global auth
auth = conf['auth']
global ceilometer
ceilometer = conf['ceilometer']
global transformers
transformers = conf['transformers']

View File

@ -12,15 +12,15 @@ date_format = "%Y-%m-%dT%H:%M:%S"
other_date_format = "%Y-%m-%dT%H:%M:%S.%f" other_date_format = "%Y-%m-%dT%H:%M:%S.%f"
# VM states: # VM states:
active = 1 states = {'active': 1,
building = 2 'building': 2,
paused = 3 'paused': 3,
suspended = 4 'suspended': 4,
stopped = 5 'stopped': 5,
rescued = 6 'rescued': 6,
resized = 7 'resized': 7,
soft_deleted = 8 'soft_deleted': 8,
deleted = 9 'deleted': 9,
error = 10 'error': 10,
shelved = 11 'shelved': 11,
shelved_offloaded = 12 'shelved_offloaded': 12}

View File

@ -1,9 +1,12 @@
from novaclient.v1_1 import client from novaclient.v1_1 import client
import config
def flavor_name(f_id): def flavor_name(f_id):
# TODO get from config: nova = client.Client(
nova = client.Client("admin", "openstack", "demo", config.auth['username'],
"http://localhost:5000/v2.0", config.auth['password'],
config.auth['default_tenant'],
config.auth['end_point'],
service_type="compute") service_type="compute")
return nova.flavors.get(f_id).name return nova.flavors.get(f_id).name

View File

@ -4,6 +4,7 @@ import auth
from ceilometerclient.v2.client import Client as ceilometer from ceilometerclient.v2.client import Client as ceilometer
from artifice.models import resources from artifice.models import resources
from constants import date_format from constants import date_format
import config
def add_dates(start, end): def add_dates(start, end):
@ -23,21 +24,20 @@ def add_dates(start, end):
class Artifice(object): class Artifice(object):
"""Produces billable artifacts""" """Produces billable artifacts"""
def __init__(self, config): def __init__(self):
super(Artifice, self).__init__() super(Artifice, self).__init__()
self.config = config
# This is the Keystone client connection, which provides our # This is the Keystone client connection, which provides our
# OpenStack authentication # OpenStack authentication
self.auth = auth.Keystone( self.auth = auth.Keystone(
username=config["openstack"]["username"], username=config.auth["username"],
password=config["openstack"]["password"], password=config.auth["password"],
tenant_name=config["openstack"]["default_tenant"], tenant_name=config.auth["default_tenant"],
auth_url=config["openstack"]["authentication_url"] auth_url=config.auth["end_point"]
) )
self.ceilometer = ceilometer( self.ceilometer = ceilometer(
self.config["ceilometer"]["host"], config.ceilometer["host"],
# Uses a lambda as ceilometer apparently wants # Uses a lambda as ceilometer apparently wants
# to use it as a callable? # to use it as a callable?
token=lambda: self.auth.auth_token token=lambda: self.auth.auth_token

View File

@ -1,6 +1,7 @@
import datetime import datetime
import constants import constants
import helpers import helpers
import config
class TransformerValidationError(Exception): class TransformerValidationError(Exception):
@ -42,10 +43,11 @@ class Uptime(Transformer):
required_meters = ['state', 'flavor'] required_meters = ['state', 'flavor']
def _transform_usage(self, meters, start, end): def _transform_usage(self, meters, start, end):
# this NEEDS to be moved to a config file # get tracked states from config
tracked_states = [constants.active, constants.building, tracked = config.transformers['uptime']['tracked_states']
constants.paused, constants.rescued,
constants.resized] tracked_states = {constants.states[i] for i in tracked}
usage_dict = {} usage_dict = {}
state = meters['state'] state = meters['state']

View File

@ -1,17 +1,27 @@
--- ---
ceilometer: main:
host: http://localhost:8777/ region: Wellington
invoice_object: timezone: Pacific/Auckland
export_provider: billing.csv_invoice:Csv
database_uri: postgres://artifice:123456@localhost:5432/artifice
export_config:
delimiter: ',' delimiter: ','
output_file: '%(tenant)s-%(start)s-%(end)s.csv' output_file: '%(tenant)s-%(start)s-%(end)s.csv'
output_path: ./ output_path: ./
rates: rates:
file: /etc/artifice/csv_rates.csv file: /etc/artifice/csv_rates.csv
main: auth:
export_provider: billing.csv_invoice:Csv end_point: http://localhost:35357/v2.0
database_uri: postgres://artifice:123456@localhost:5432/artifice
openstack:
authentication_url: http://localhost:35357/v2.0
default_tenant: demo default_tenant: demo
username: admin username: admin
password: openstack password: openstack
ceilometer:
host: http://localhost:8777/
transformers:
uptime:
tracked_states:
- active
- building
- paused
- rescued
- resized

View File

@ -4,11 +4,10 @@ from sqlalchemy.orm import sessionmaker, scoped_session,create_session
from sqlalchemy.pool import NullPool from sqlalchemy.pool import NullPool
from artifice.models import Resource, Tenant, UsageEntry, SalesOrder, Base from artifice.models import Resource, Tenant, UsageEntry, SalesOrder, Base
from artifice import config
from .constants import DATABASE_NAME, PG_DATABASE_URI, MY_DATABASE_URI
from .constants import config as test_config
DATABASE_NAME = "test_artifice"
PG_DATABASE_URI = "postgresql://aurynn:postgres@localhost/%s" % DATABASE_NAME
MY_DATABASE_URI = "mysql://root:password@localhost/%s" % DATABASE_NAME
def setUp(): def setUp():
subprocess.call(["/usr/bin/createdb","%s" % DATABASE_NAME]) subprocess.call(["/usr/bin/createdb","%s" % DATABASE_NAME])
@ -21,6 +20,10 @@ def setUp():
mysql_engine.dispose() mysql_engine.dispose()
pg_engine.dispose() pg_engine.dispose()
# setup test config:
config.setup_config(test_config)
def tearDown(): def tearDown():
mysql_engine = create_engine(MY_DATABASE_URI, poolclass=NullPool) mysql_engine = create_engine(MY_DATABASE_URI, poolclass=NullPool)

View File

@ -1,19 +1,17 @@
from . import PG_DATABASE_URI
DATABASE_NAME = "test_artifice"
PG_DATABASE_URI = "postgresql://aurynn:postgres@localhost/%s" % DATABASE_NAME
MY_DATABASE_URI = "mysql://root:password@localhost/%s" % DATABASE_NAME
config = { config = {
"ceilometer": {
"host": "http://localhost:8777/"
},
"main": { "main": {
"region": "Wellington",
"timezone": "Pacific/Auckland",
"export_provider": "tests.mock_exporter:MockExporter", "export_provider": "tests.mock_exporter:MockExporter",
"database_uri": PG_DATABASE_URI "database_uri": PG_DATABASE_URI
}, },
"openstack": {
"username": "admin",
"authentication_url": "http://localhost:35357/v2.0",
"password": "openstack",
"default_tenant": "demo"
},
"export_config": { "export_config": {
"output_path": "./", "output_path": "./",
"delimiter": ",", "delimiter": ",",
@ -22,7 +20,21 @@ config = {
"file": "examples/test_rates.csv" "file": "examples/test_rates.csv"
} }
}, },
"artifice": {} "auth": {
"end_point": "http://localhost:35357/v2.0",
"username": "admin",
"password": "openstack",
"default_tenant": "demo"
},
"ceilometer": {
"host": "http://localhost:8777/"
},
"transformers": {
"uptime": {
"tracked_states": ["active", "building",
"paused", "rescued", "resized"]
}
}
} }
# from test data: # from test data:

View File

@ -1,8 +1,8 @@
import mock import mock
from artifice import interface, models from artifice import interface, models, config
from artifice.models import billing from artifice.models import billing
from .data_samples import RESOURCES, MAPPINGS from .data_samples import RESOURCES, MAPPINGS
from .constants import config, TENANTS, AUTH_TOKEN from .constants import TENANTS, AUTH_TOKEN
from datetime import timedelta from datetime import timedelta
import json import json
@ -21,14 +21,14 @@ def get_usage(sqlmock, Keystone):
return data return data
interface.Meter.get_meter = get_meter interface.Meter.get_meter = get_meter
artifice = interface.Artifice(config) artifice = interface.Artifice()
artifice.auth.tenants.list.return_value = TENANTS artifice.auth.tenants.list.return_value = TENANTS
Keystone.assert_called_with( Keystone.assert_called_with(
username=config["openstack"]["username"], username=config.auth["username"],
password=config["openstack"]["password"], password=config.auth["password"],
tenant_name=config["openstack"]["default_tenant"], tenant_name=config.auth["default_tenant"],
auth_url=config["openstack"]["authentication_url"] auth_url=config.auth["end_point"]
) )
tenants = None tenants = None
tenants = artifice.tenants tenants = artifice.tenants

View File

@ -1,6 +1,7 @@
import artifice.transformers import artifice.transformers
from artifice.transformers import TransformerValidationError from artifice.transformers import TransformerValidationError
import artifice.constants as constants from artifice import constants
from artifice.constants import states
import unittest import unittest
import mock import mock
import datetime import datetime
@ -78,8 +79,8 @@ class UptimeTransformerTests(unittest.TestCase):
{'timestamp': testdata.t1, 'counter_volume': testdata.flavor}, {'timestamp': testdata.t1, 'counter_volume': testdata.flavor},
]), ]),
'state': TestMeter([ 'state': TestMeter([
{'timestamp': testdata.t0, 'counter_volume': constants.active}, {'timestamp': testdata.t0, 'counter_volume': states['active']},
{'timestamp': testdata.t1, 'counter_volume': constants.active} {'timestamp': testdata.t1, 'counter_volume': states['active']}
]), ]),
} }
@ -99,8 +100,8 @@ class UptimeTransformerTests(unittest.TestCase):
{'timestamp': testdata.t1, 'counter_volume': testdata.flavor}, {'timestamp': testdata.t1, 'counter_volume': testdata.flavor},
]), ]),
'state': TestMeter([ 'state': TestMeter([
{'timestamp': testdata.t0, 'counter_volume': constants.stopped}, {'timestamp': testdata.t0, 'counter_volume': states['stopped']},
{'timestamp': testdata.t1, 'counter_volume': constants.stopped} {'timestamp': testdata.t1, 'counter_volume': states['stopped']}
]), ]),
} }
@ -119,9 +120,9 @@ class UptimeTransformerTests(unittest.TestCase):
{'timestamp': testdata.t1, 'counter_volume': testdata.flavor}, {'timestamp': testdata.t1, 'counter_volume': testdata.flavor},
]), ]),
'state': TestMeter([ 'state': TestMeter([
{'timestamp': testdata.t0, 'counter_volume': constants.active}, {'timestamp': testdata.t0, 'counter_volume': states['active']},
{'timestamp': testdata.t0_30, 'counter_volume': constants.stopped}, {'timestamp': testdata.t0_30, 'counter_volume': states['stopped']},
{'timestamp': testdata.t1, 'counter_volume': constants.stopped} {'timestamp': testdata.t1, 'counter_volume': states['stopped']}
]), ]),
} }
@ -141,9 +142,9 @@ class UptimeTransformerTests(unittest.TestCase):
{'timestamp': testdata.t1, 'counter_volume': testdata.flavor2}, {'timestamp': testdata.t1, 'counter_volume': testdata.flavor2},
]), ]),
'state': TestMeter([ 'state': TestMeter([
{'timestamp': testdata.t0, 'counter_volume': constants.active}, {'timestamp': testdata.t0, 'counter_volume': states['active']},
{'timestamp': testdata.t0_30, 'counter_volume': constants.active}, {'timestamp': testdata.t0_30, 'counter_volume': states['active']},
{'timestamp': testdata.t1, 'counter_volume': constants.active} {'timestamp': testdata.t1, 'counter_volume': states['active']}
]), ]),
} }
@ -162,8 +163,8 @@ class UptimeTransformerTests(unittest.TestCase):
{'timestamp': testdata.t1, 'counter_volume': testdata.flavor}, {'timestamp': testdata.t1, 'counter_volume': testdata.flavor},
]), ]),
'state': TestMeter([ 'state': TestMeter([
{'timestamp': testdata.t0_10, 'counter_volume': constants.active}, {'timestamp': testdata.t0_10, 'counter_volume': states['active']},
{'timestamp': testdata.t1, 'counter_volume': constants.active}, {'timestamp': testdata.t1, 'counter_volume': states['active']},
]), ]),
} }
@ -186,9 +187,9 @@ class UptimeTransformerTests(unittest.TestCase):
{'timestamp': testdata.t1, 'counter_volume': testdata.flavor}, {'timestamp': testdata.t1, 'counter_volume': testdata.flavor},
]), ]),
'state': TestMeter([ 'state': TestMeter([
{'timestamp': testdata.tpre, 'counter_volume': constants.active}, {'timestamp': testdata.tpre, 'counter_volume': states['active']},
{'timestamp': testdata.t0_10, 'counter_volume': constants.active}, {'timestamp': testdata.t0_10, 'counter_volume': states['active']},
{'timestamp': testdata.t1, 'counter_volume': constants.active}, {'timestamp': testdata.t1, 'counter_volume': states['active']},
]), ]),
} }