use configured max_retries and retry_interval for database connection
We are using retrying for database connection, but that decorator can only have default value of max_retries and retry_interval because it happens at import time. We should use configured values instead, so this patch moves those code into inner function, which makes retrying get parameters at running time. We override max_retries for some rare case when using oslo.db, but that is not the right behavior. For example, collector will connect to metering and event databases, if one is sqlalchemy and the other one is MongoDB then when MongoDB lost connection in serving time, then the max_retries will be zero which will cause safe_mongo_call not work as expected. The right behavior for that sqlalchemy problem is enforcing max_retries when we pass it to connection __init__ method. Note: This patch also applies the enforcement to alarm and event sqlalchemy implementations. ref: https://review.openstack.org/#/c/127128/9 Change-Id: Ia8bec410c1cabe850c8d033daef5a5a0ddae8954 Closes-Bug: #1421485
This commit is contained in:
parent
21bde474f4
commit
bf221170da
@ -77,10 +77,13 @@ class Connection(base.Connection):
|
||||
)
|
||||
|
||||
def __init__(self, url):
|
||||
self._engine_facade = db_session.EngineFacade(
|
||||
url,
|
||||
**dict(cfg.CONF.database.items())
|
||||
)
|
||||
# Set max_retries to 0, since oslo.db in certain cases may attempt
|
||||
# to retry making the db connection retried max_retries ^ 2 times
|
||||
# in failure case and db reconnection has already been implemented
|
||||
# in storage.__init__.get_connection_from_config function
|
||||
options = dict(cfg.CONF.database.items())
|
||||
options['max_retries'] = 0
|
||||
self._engine_facade = db_session.EngineFacade(url, **options)
|
||||
|
||||
def upgrade(self):
|
||||
# NOTE(gordc): to minimise memory, only import migration when needed
|
||||
|
@ -85,10 +85,13 @@ class Connection(base.Connection):
|
||||
)
|
||||
|
||||
def __init__(self, url):
|
||||
self._engine_facade = db_session.EngineFacade(
|
||||
url,
|
||||
**dict(cfg.CONF.database.items())
|
||||
)
|
||||
# Set max_retries to 0, since oslo.db in certain cases may attempt
|
||||
# to retry making the db connection retried max_retries ^ 2 times
|
||||
# in failure case and db reconnection has already been implemented
|
||||
# in storage.__init__.get_connection_from_config function
|
||||
options = dict(cfg.CONF.database.items())
|
||||
options['max_retries'] = 0
|
||||
self._engine_facade = db_session.EngineFacade(url, **options)
|
||||
|
||||
def upgrade(self):
|
||||
# NOTE(gordc): to minimise memory, only import migration when needed
|
||||
|
@ -88,21 +88,24 @@ class StorageBadAggregate(Exception):
|
||||
code = 400
|
||||
|
||||
|
||||
# Convert retry_interval secs to msecs for retry decorator
|
||||
@retrying.retry(wait_fixed=cfg.CONF.database.retry_interval * 1000,
|
||||
stop_max_attempt_number=cfg.CONF.database.max_retries
|
||||
if cfg.CONF.database.max_retries >= 0
|
||||
else None)
|
||||
def get_connection_from_config(conf, purpose=None):
|
||||
if conf.database_connection:
|
||||
conf.set_override('connection', conf.database_connection,
|
||||
group='database')
|
||||
namespace = 'ceilometer.metering.storage'
|
||||
url = conf.database.connection
|
||||
if purpose:
|
||||
namespace = 'ceilometer.%s.storage' % purpose
|
||||
url = getattr(conf.database, '%s_connection' % purpose) or url
|
||||
return get_connection(url, namespace)
|
||||
retries = conf.database.max_retries
|
||||
|
||||
# Convert retry_interval secs to msecs for retry decorator
|
||||
@retrying.retry(wait_fixed=conf.database.retry_interval * 1000,
|
||||
stop_max_attempt_number=retries if retries >= 0 else None)
|
||||
def _inner():
|
||||
if conf.database_connection:
|
||||
conf.set_override('connection', conf.database_connection,
|
||||
group='database')
|
||||
namespace = 'ceilometer.metering.storage'
|
||||
url = conf.database.connection
|
||||
if purpose:
|
||||
namespace = 'ceilometer.%s.storage' % purpose
|
||||
url = getattr(conf.database, '%s_connection' % purpose) or url
|
||||
return get_connection(url, namespace)
|
||||
|
||||
return _inner()
|
||||
|
||||
|
||||
def get_connection(url, namespace):
|
||||
|
@ -221,11 +221,9 @@ class Connection(base.Connection):
|
||||
# to retry making the db connection retried max_retries ^ 2 times
|
||||
# in failure case and db reconnection has already been implemented
|
||||
# in storage.__init__.get_connection_from_config function
|
||||
cfg.CONF.set_override('max_retries', 0, group='database')
|
||||
self._engine_facade = db_session.EngineFacade(
|
||||
url,
|
||||
**dict(cfg.CONF.database.items())
|
||||
)
|
||||
options = dict(cfg.CONF.database.items())
|
||||
options['max_retries'] = 0
|
||||
self._engine_facade = db_session.EngineFacade(url, **options)
|
||||
|
||||
def upgrade(self):
|
||||
# NOTE(gordc): to minimise memory, only import migration when needed
|
||||
|
Loading…
x
Reference in New Issue
Block a user