openstack-ansible/playbooks/os-cinder-install.yml
Markos Chandras 4603188934 Add support for using distribution packages for OpenStack services
Add new 'aio_distro_basekit' jobs to test the minimal basekit deployment
using distribution packages for the OpenStack services.

We can skip all repo-* related playbooks and roles since we are not
building pip packages for OpenStack services anymore. Finally, we can
populate the utility container using the distribution packages for the
OpenStack client instead of using the wheel packages.

Change-Id: Ia8c394123b5588fff8c4acbe1532ed5a6dc7e8ec
Depends-On: https://review.openstack.org/#/c/583161/
Depends-On: https://review.openstack.org/#/c/567530/
Depends-On: https://review.openstack.org/#/c/580455/
Implements: blueprint openstack-distribution-packages
2018-07-20 08:14:32 +01:00

229 lines
7.5 KiB
YAML

---
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
- name: Prepare MQ services
hosts: cinder_all
gather_facts: no
user: root
environment: "{{ deployment_environment_variables | default({}) }}"
vars_files:
- "defaults/{{ install_method }}_install.yml"
tags:
- cinder
tasks:
- name: Configure oslo messaging rpc vhost/user
include: common-tasks/oslomsg-rpc-vhost-user.yml
static: no
vars:
rpc_user: "{{ cinder_oslomsg_rpc_userid }}"
rpc_password: "{{ cinder_oslomsg_rpc_password }}"
rpc_vhost: "{{ cinder_oslomsg_rpc_vhost }}"
when:
- groups[cinder_oslomsg_rpc_host_group] | length > 0
run_once: yes
- name: Configure oslo messaging notify vhost/user
include: common-tasks/oslomsg-notify-vhost-user.yml
static: no
vars:
notify_user: "{{ cinder_oslomsg_notify_userid }}"
notify_password: "{{ cinder_oslomsg_notify_password }}"
notify_vhost: "{{ cinder_oslomsg_notify_vhost }}"
when:
- "cinder_ceilometer_enabled | bool"
- groups[cinder_oslomsg_notify_host_group] | length > 0
run_once: yes
- name: Install cinder scheduler services
include: common-playbooks/cinder.yml
vars:
cinder_hosts: "cinder_scheduler:!cinder_api"
cinder_serial: "{{ cinder_scheduler_serial | default(['1', '100%']) }}"
- name: Install cinder volume services
include: common-playbooks/cinder.yml
vars:
cinder_hosts: "cinder_volume:!cinder_scheduler:!cinder_api"
cinder_serial: "{{ cinder_backend_serial | default('1', '100%') }}"
- name: Install cinder backup services
include: common-playbooks/cinder.yml
vars:
cinder_hosts: "cinder_backup:!cinder_volume:!cinder_scheduler:!cinder_api"
cinder_serial: "{{ cinder_backend_serial | default(['1', '100%']) }}"
- name: Install cinder API services
include: common-playbooks/cinder.yml
vars:
cinder_hosts: "cinder_api"
cinder_serial: "{{ cinder_api_serial | default(['1', '100%']) }}"
# These facts are set against the deployment host to ensure that
# they are fast to access. This is done in preference to setting
# them against each target as the hostvars extraction will take
# a long time if executed against a large inventory.
- name: Refresh local facts after all software changes are made
hosts: cinder_all
gather_facts: no
user: root
environment: "{{ deployment_environment_variables | default({}) }}"
vars_files:
- "defaults/{{ install_method }}_install.yml"
tags:
- cinder
tasks:
- name: refresh local facts
setup:
filter: ansible_local
gather_subset: "!all"
# This variable contains the values of the local fact set for the cinder
# venv tag for all hosts in the 'cinder_all' host group.
- name: Gather software version list
set_fact:
cinder_all_software_versions: "{{ (groups['cinder_all'] | map('extract', hostvars, ['ansible_local', 'openstack_ansible', 'cinder', 'venv_tag'])) | list }}"
delegate_to: localhost
run_once: yes
# This variable outputs a boolean value which is True when
# cinder_all_software_versions contains a list of defined
# values. If they are not defined, it means that not all
# hosts have their software deployed yet.
- name: Set software deployed fact
set_fact:
cinder_all_software_deployed: "{{ (cinder_all_software_versions | select('defined')) | list == cinder_all_software_versions }}"
delegate_to: localhost
run_once: yes
# This variable outputs a boolean when all the values in
# cinder_all_software_versions are the same and the software
# has been deployed to all hosts in the group.
- name: Set software updated fact
set_fact:
cinder_all_software_updated: "{{ ((cinder_all_software_versions | unique) | length == 1) and (cinder_all_software_deployed | bool) }}"
delegate_to: localhost
run_once: yes
- name: Restart cinder agents to ensure new RPC object version is used
hosts: cinder_backup,cinder_volume,cinder_scheduler
gather_facts: no
serial: "{{ cinder_backend_serial | default(['1', '100%']) }}"
user: root
environment: "{{ deployment_environment_variables | default({}) }}"
vars_files:
- "defaults/{{ install_method }}_install.yml"
tags:
- cinder
tasks:
- name: Execute cinder service reload
include: common-tasks/restart-service.yml
vars:
service_name: "{{ item }}"
service_action: "reloaded"
service_fact: "cinder"
with_items:
- "cinder-scheduler"
- "cinder-backup"
- "cinder-volume"
when:
- "cinder_all_software_updated | bool"
- "ansible_local['openstack_ansible']['cinder']['need_service_restart'] | bool"
- name: Restart cinder API to ensure new RPC object version is used
hosts: cinder_api
gather_facts: no
serial: "{{ cinder_api_serial | default(['1','100%']) }}"
user: root
environment: "{{ deployment_environment_variables | default({}) }}"
vars_files:
- "defaults/{{ install_method }}_install.yml"
tags:
- cinder
tasks:
# In order to ensure that the service restart does not
# cause an unexpected outage, we drain the load balancer
# back end for this container.
- include: common-tasks/haproxy-endpoint-manage.yml
vars:
haproxy_state: disabled
when:
- "cinder_all_software_updated | bool"
- "ansible_local['openstack_ansible']['cinder']['need_service_restart'] | bool"
- "groups['cinder_api'] | length > 1"
- name: Execute cinder service restart
include: common-tasks/restart-service.yml
vars:
service_name: "cinder-api"
service_action: "restarted"
service_fact: "cinder"
when:
- "cinder_all_software_updated | bool"
- "ansible_local['openstack_ansible']['cinder']['need_service_restart'] | bool"
# Now that service restart is done, we can set
# the load balancer back end for this container
# to available again.
- include: common-tasks/haproxy-endpoint-manage.yml
vars:
haproxy_state: enabled
when: "groups['cinder_api'] | length > 1"
- name: Perform online database migrations
hosts: cinder_api[0]
gather_facts: no
user: root
environment: "{{ deployment_environment_variables | default({}) }}"
vars_files:
- "defaults/{{ install_method }}_install.yml"
tags:
- cinder
tasks:
- name: Perform online data migrations
command: "{{ cinder_bin }}/cinder-manage db online_data_migrations"
become: yes
become_user: "{{ cinder_system_user_name }}"
when:
- "cinder_all_software_updated | bool"
- "ansible_local['openstack_ansible']['cinder']['need_online_data_migrations'] | bool"
changed_when: false
register: data_migrations
- name: Disable the online migrations requirement
ini_file:
dest: "/etc/ansible/facts.d/openstack_ansible.fact"
section: cinder
option: need_online_data_migrations
value: False
when:
- data_migrations is succeeded