Improve way of cache backend selection
At the moment we don't provide any option rather then use memcached backend. With that we also hardocde list of packages that should be installed inside virtualenv for selected backend. Adding bmemcached requirement to oslo_cache.memcache_pool [1] gives us opportunity to refactor this bit of deployment and allow to be more flexible in backend selection and requirements installation for it. [1] https://review.opendev.org/c/openstack/oslo.cache/+/854628 Depends-On: https://review.opendev.org/c/openstack/openstack-ansible/+/858981/4 Change-Id: I2810208301fb31eeeabf31e4b38add7f8aa3e00b
This commit is contained in:
parent
3b4fc2e9aa
commit
0a24c61e3e
@ -357,6 +357,8 @@ keystone_memcached_servers: "{{ memcached_servers }}"
|
||||
# The cache_servers default backend is memcached, so this variable
|
||||
# should point to a list of memcached servers.
|
||||
# If empty, caching is disabled.
|
||||
keystone_cache_backend: "{{ openstack_cache_backend | default('oslo_cache.memcache_pool') }}"
|
||||
keystone_cache_backend_map: "{{ openstack_cache_backend_map | default(_keystone_cache_backend_map) }}"
|
||||
keystone_cache_servers: "{{ keystone_memcached_servers.split(',') }}"
|
||||
|
||||
## LDAP Section
|
||||
@ -572,8 +574,7 @@ keystone_pip_packages:
|
||||
- ldappool
|
||||
- osprofiler
|
||||
- PyMySQL
|
||||
- pymemcache
|
||||
- python-memcached
|
||||
- "{{ _keystone_cache_backend_package }}"
|
||||
- python-openstackclient
|
||||
- systemd-python
|
||||
- uWSGI
|
||||
|
10
releasenotes/notes/cache_backend-3ac67f78fa111445.yaml
Normal file
10
releasenotes/notes/cache_backend-3ac67f78fa111445.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
New variables have been added to manage used cache backends:
|
||||
|
||||
* ``openstack_cache_backend``: defines driver, that will be used for
|
||||
caching.
|
||||
Default: oslo_cache.memcache_pool
|
||||
* ``openstack_cache_backend_map``: maps selected backend to
|
||||
the oslo driver that should be installed and configured for it.
|
@ -43,13 +43,15 @@ transport_url = {{ keystone_oslomsg_notify_transport }}://{% for host in keyston
|
||||
|
||||
{% if keystone_cache_servers | length > 0 %}
|
||||
[cache]
|
||||
backend = oslo_cache.memcache_pool
|
||||
backend = {{ keystone_cache_backend }}
|
||||
{% if keystone_cache_backend in keystone_cache_backend_map['dogpile'] %}
|
||||
# FIXME(lbragstad): Some strange behaviors have been reported when using
|
||||
# multiple memcached instances with backend_argument. This has been documented
|
||||
# in https://bugs.launchpad.net/oslo.cache/+bug/1743036
|
||||
# For the time being, memcache_servers works with a single memcached instance
|
||||
# and multiple instances.
|
||||
memcache_servers = {{ keystone_cache_servers | join(',') }}
|
||||
{% endif %}
|
||||
config_prefix = cache.keystone
|
||||
enabled = true
|
||||
{% endif %}
|
||||
|
@ -37,3 +37,22 @@ uwsgi_keystone_services: |-
|
||||
|
||||
_keystone_is_first_play_host: "{{ (keystone_services['keystone-wsgi-public']['group'] in group_names and inventory_hostname == ((groups[keystone_services['keystone-wsgi-public']['group']] | intersect(ansible_play_hosts)) | list)[0]) | bool }}"
|
||||
_keystone_is_last_play_host: "{{ (keystone_services['keystone-wsgi-public']['group'] in group_names and inventory_hostname == ((groups[keystone_services['keystone-wsgi-public']['group']] | intersect(ansible_play_hosts)) | list)[-1]) | bool }}"
|
||||
_keystone_cache_backend_map:
|
||||
dogpile:
|
||||
- oslo_cache.memcache_pool
|
||||
- dogpile.cache.pymemcache
|
||||
- dogpile.cache.memcached
|
||||
- dogpile.cache.bmemcached
|
||||
mongo:
|
||||
- oslo_cache.mongo
|
||||
etcd3gw:
|
||||
- oslo_cache.etcd3gw
|
||||
|
||||
_keystone_cache_backend_package: |-
|
||||
{% set oslo = namespace(backend='dogpile') %}
|
||||
{% for key, value in _keystone_cache_backend_map.items() %}
|
||||
{% if keystone_cache_backend in value %}
|
||||
{% set oslo.backend = key %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
oslo.cache[{{ oslo.backend }}]
|
||||
|
Loading…
Reference in New Issue
Block a user