Merge "use configured max_retries and retry_interval for database connection"
This commit is contained in:
commit
fea37703be
@ -77,10 +77,13 @@ class Connection(base.Connection):
|
||||
)
|
||||
|
||||
def __init__(self, url):
|
||||
self._engine_facade = db_session.EngineFacade(
|
||||
url,
|
||||
**dict(cfg.CONF.database.items())
|
||||
)
|
||||
# Set max_retries to 0, since oslo.db in certain cases may attempt
|
||||
# to retry making the db connection retried max_retries ^ 2 times
|
||||
# in failure case and db reconnection has already been implemented
|
||||
# in storage.__init__.get_connection_from_config function
|
||||
options = dict(cfg.CONF.database.items())
|
||||
options['max_retries'] = 0
|
||||
self._engine_facade = db_session.EngineFacade(url, **options)
|
||||
|
||||
def upgrade(self):
|
||||
# NOTE(gordc): to minimise memory, only import migration when needed
|
||||
|
@ -85,10 +85,13 @@ class Connection(base.Connection):
|
||||
)
|
||||
|
||||
def __init__(self, url):
|
||||
self._engine_facade = db_session.EngineFacade(
|
||||
url,
|
||||
**dict(cfg.CONF.database.items())
|
||||
)
|
||||
# Set max_retries to 0, since oslo.db in certain cases may attempt
|
||||
# to retry making the db connection retried max_retries ^ 2 times
|
||||
# in failure case and db reconnection has already been implemented
|
||||
# in storage.__init__.get_connection_from_config function
|
||||
options = dict(cfg.CONF.database.items())
|
||||
options['max_retries'] = 0
|
||||
self._engine_facade = db_session.EngineFacade(url, **options)
|
||||
|
||||
def upgrade(self):
|
||||
# NOTE(gordc): to minimise memory, only import migration when needed
|
||||
|
@ -88,21 +88,24 @@ class StorageBadAggregate(Exception):
|
||||
code = 400
|
||||
|
||||
|
||||
# Convert retry_interval secs to msecs for retry decorator
|
||||
@retrying.retry(wait_fixed=cfg.CONF.database.retry_interval * 1000,
|
||||
stop_max_attempt_number=cfg.CONF.database.max_retries
|
||||
if cfg.CONF.database.max_retries >= 0
|
||||
else None)
|
||||
def get_connection_from_config(conf, purpose=None):
|
||||
if conf.database_connection:
|
||||
conf.set_override('connection', conf.database_connection,
|
||||
group='database')
|
||||
namespace = 'ceilometer.metering.storage'
|
||||
url = conf.database.connection
|
||||
if purpose:
|
||||
namespace = 'ceilometer.%s.storage' % purpose
|
||||
url = getattr(conf.database, '%s_connection' % purpose) or url
|
||||
return get_connection(url, namespace)
|
||||
retries = conf.database.max_retries
|
||||
|
||||
# Convert retry_interval secs to msecs for retry decorator
|
||||
@retrying.retry(wait_fixed=conf.database.retry_interval * 1000,
|
||||
stop_max_attempt_number=retries if retries >= 0 else None)
|
||||
def _inner():
|
||||
if conf.database_connection:
|
||||
conf.set_override('connection', conf.database_connection,
|
||||
group='database')
|
||||
namespace = 'ceilometer.metering.storage'
|
||||
url = conf.database.connection
|
||||
if purpose:
|
||||
namespace = 'ceilometer.%s.storage' % purpose
|
||||
url = getattr(conf.database, '%s_connection' % purpose) or url
|
||||
return get_connection(url, namespace)
|
||||
|
||||
return _inner()
|
||||
|
||||
|
||||
def get_connection(url, namespace):
|
||||
|
@ -221,11 +221,9 @@ class Connection(base.Connection):
|
||||
# to retry making the db connection retried max_retries ^ 2 times
|
||||
# in failure case and db reconnection has already been implemented
|
||||
# in storage.__init__.get_connection_from_config function
|
||||
cfg.CONF.set_override('max_retries', 0, group='database')
|
||||
self._engine_facade = db_session.EngineFacade(
|
||||
url,
|
||||
**dict(cfg.CONF.database.items())
|
||||
)
|
||||
options = dict(cfg.CONF.database.items())
|
||||
options['max_retries'] = 0
|
||||
self._engine_facade = db_session.EngineFacade(url, **options)
|
||||
|
||||
def upgrade(self):
|
||||
# NOTE(gordc): to minimise memory, only import migration when needed
|
||||
|
Loading…
x
Reference in New Issue
Block a user