kolla-ansible/tests/run.yml
Mark Goddard d8733b290e CI: Use upper constraints when installing clients
Clients are starting to release versions that don't support Python 2.
The ironic scenario is currently failing on stable branches for this
reason.

Use upper constraints to avoid installing these new versions on stable
branches.

Change-Id: I4f91b53cbf2297d70da4b54d6c402c1427aacdd9
2020-02-25 11:21:06 +00:00

654 lines
24 KiB
YAML

---
- hosts: all
any_errors_fatal: true
tasks:
# NOTE(yoctozepto): ensure we pick up fact changes from pre
- name: Refresh facts
setup:
# NOTE(yoctozepto): setting vars as facts for all to have them around in all the plays
- name: set facts for commonly used variables
vars:
# NOTE(yoctozepto): needed here to use in other facts too
openstack_core_enabled: "{{ scenario not in ['bifrost', 'mariadb'] }}"
set_fact:
kolla_inventory_path: "/etc/kolla/inventory"
logs_dir: "/tmp/logs"
kolla_ansible_src_dir: "{{ ansible_env.PWD }}/src/{{ zuul.project.canonical_hostname }}/openstack/kolla-ansible"
kolla_ansible_local_src_dir: "{{ zuul.executor.work_root }}/src/{{ zuul.project.canonical_hostname }}/openstack/kolla-ansible"
infra_dockerhub_mirror: "http://{{ zuul_site_mirror_fqdn }}:8082/"
ceph_ansible_src_dir: "{{ ansible_env.PWD }}/src/github.com/ceph/ceph-ansible"
need_build_image: false
build_image_tag: "change_{{ zuul.change | default('none') }}"
openstack_core_enabled: "{{ openstack_core_enabled }}"
openstack_core_tested: "{{ scenario in ['core', 'ceph-ansible', 'zun', 'cells', 'swift', 'linuxbridge'] }}"
dashboard_enabled: "{{ openstack_core_enabled }}"
# TODO(mgoddard): Remove when previous_release is ussuri.
playbook_python_version: "{{ '2' if is_upgrade and previous_release == 'train' else '3' }}"
- name: Install xfsprogs package for Swift filesystems
become: true
package:
name: xfsprogs
when: scenario == 'swift'
- name: Prepare disks for a storage service
script: "setup_disks.sh {{ disk_type }}"
when: scenario in ['ceph-ansible', 'zun', 'swift']
become: true
vars:
disk_type: "{{ ceph_storetype if scenario in ['ceph-ansible'] else scenario }}"
ceph_storetype: "{{ hostvars[inventory_hostname].get('ceph_osd_storetype') }}"
- hosts: primary
any_errors_fatal: true
tasks:
- name: detect whether need build images
set_fact:
need_build_image: true
when:
- item.project.short_name == "kolla"
- item.branch == zuul.branch
with_items: "{{ zuul['items'] }}"
- name: enable building images on AArch64
set_fact:
need_build_image: true
when:
- ansible_architecture == 'aarch64'
# NOTE(yoctozepto): required to template template_overrides.j2 for Zuul
- name: Include kolla Zuul vars if building new images
include_vars:
file: "{{ zuul.executor.work_root }}/src/opendev.org/openstack/kolla/tests/vars/zuul.yml"
when: need_build_image # only then kolla sources are available (and used)
# NOTE(mgoddard): This only affects the remote copy of the repo, not the
# one on the executor.
- name: checkout the previous kolla-ansible branch
shell:
cmd: |
git checkout stable/{{ previous_release | lower }}
echo "kolla-ansible checked out to:"
git log --pretty=oneline -1
chdir: "{{ kolla_ansible_src_dir }}"
when: is_upgrade
- name: ensure /etc/kolla exists
file:
path: "/etc/kolla"
state: "directory"
mode: 0777
become: true
# NOTE(yoctozepto): required to customize kolla to use local mirrors
- name: Template template_overrides.j2
template:
src: "{{ zuul.executor.work_root }}/src/opendev.org/openstack/kolla/tests/templates/template_overrides.j2"
dest: /etc/kolla/template_overrides.j2
when: need_build_image # only then kolla sources are available (and used)
- name: Ensure /etc/docker exists
file:
path: "/etc/docker"
state: directory
become: true
# TODO(mnasiadka): remove cinder and glance in Victoria
- name: Ensure configuration directories exist
file:
path: "/etc/kolla/config/{{ item }}"
state: directory
loop:
- cinder
- glance
- neutron
- nova
- bifrost
- swift
- name: generate configuration files
template:
src: "{{ kolla_ansible_local_src_dir }}/{{ item.src }}"
dest: "{{ item.dest }}"
become: "{{ item.become | default(false) }}"
vars:
is_previous_release: "{{ is_upgrade }}"
with_items:
# Ansible inventory
- src: "tests/templates/inventory.j2"
dest: "{{ kolla_inventory_path }}"
# globals.yml
- src: "tests/templates/globals-default.j2"
dest: /etc/kolla/globals.yml
# nova-compute.conf
- src: "tests/templates/nova-compute-overrides.j2"
dest: /etc/kolla/config/nova/nova-compute.conf
when: "{{ openstack_core_enabled }}"
# neutron.conf
- src: "tests/templates/neutron-server-overrides.j2"
dest: /etc/kolla/config/neutron.conf
when: "{{ openstack_core_enabled }}"
# bifrost/dib.yml
- src: "tests/templates/bifrost-dib-overrides.j2"
dest: /etc/kolla/config/bifrost/dib.yml
when: "{{ scenario == 'bifrost' }}"
# ironic.conf
- src: "tests/templates/ironic-overrides.j2"
dest: /etc/kolla/config/ironic.conf
when: "{{ scenario == 'ironic' }}"
# Ceph-Ansible inventory
- src: "tests/templates/ceph-inventory.j2"
dest: /etc/kolla/ceph-inventory
when: "{{ scenario == 'ceph-ansible' }}"
# ceph-ansible.yml
- src: "tests/templates/ceph-ansible.j2"
dest: /etc/kolla/ceph-ansible.yml
when: "{{ scenario == 'ceph-ansible' }}"
### TODO(mnasiadka): Remove following ceph-ansible block in Victoria
# external ceph: glance-api.conf on upgrade from Train
- src: "tests/templates/ceph-ansible-upgrade/glance-api.conf.j2"
dest: /etc/kolla/config/glance/glance-api.conf
when: "{{ scenario == 'ceph-ansible' and is_previous_release and previous_release == 'train' }}"
# external ceph: cinder-volume.conf on upgrade from Train
- src: "tests/templates/ceph-ansible-upgrade/cinder-volume.conf.j2"
dest: /etc/kolla/config/cinder/cinder-volume.conf
when: "{{ scenario == 'ceph-ansible' and is_previous_release and previous_release == 'train' }}"
# external ceph: cinder-backup.conf on upgrade from Train
- src: "tests/templates/ceph-ansible-upgrade/cinder-backup.conf.j2"
dest: /etc/kolla/config/cinder/cinder-backup.conf
when: "{{ scenario == 'ceph-ansible' and is_previous_release and previous_release == 'train' }}"
### TODO(mnasiadka): End of remove block for ceph-ansible in Victoria
when: item.when | default(true)
- block:
- name: ensure ironic config directory exists
file:
path: /etc/kolla/config/ironic
state: directory
mode: 0777
- name: download Ironic Python Agent (IPA) images
get_url:
url: "https://tarballs.openstack.org/ironic-python-agent/tinyipa/files/{{ item.src }}"
dest: "/etc/kolla/config/ironic/{{ item.dest }}"
with_items:
- src: "tinyipa-{{ zuul.branch | replace('/', '-') }}.gz"
dest: ironic-agent.initramfs
- src: "tinyipa-{{ zuul.branch | replace('/', '-') }}.vmlinuz"
dest: ironic-agent.kernel
when: scenario == "ironic"
- name: ensure /etc/ansible exists
file:
path: /etc/ansible
state: directory
become: true
- name: install kolla-ansible and dependencies
vars:
# Test latest ansible version on Ubuntu, minimum supported on others.
ansible_version_constraint: "{{ '==2.9.*' if base_distro == 'ubuntu' else '==2.8.*' }}"
pip:
name:
- "{{ kolla_ansible_src_dir }}"
- "ansible{{ ansible_version_constraint }}"
- "ara<1.0.0"
# TODO(mgoddard): Always use pip3 when previous_release is ussuri.
executable: "pip{{ playbook_python_version }}"
become: true
# TODO(mgoddard): Always use python3 when previous_release is ussuri.
- name: get ARA callback plugin path
command: "python{{ playbook_python_version }} -m ara.setup.callback_plugins"
changed_when: false
register: ara_callback_plugins
- name: template ansible.cfg
template:
src: "{{ kolla_ansible_local_src_dir }}/tests/templates/ansible.cfg.j2"
dest: /etc/ansible/ansible.cfg
become: true
- name: copy passwords.yml file
copy:
src: "{{ kolla_ansible_src_dir }}/etc/kolla/passwords.yml"
dest: /etc/kolla/passwords.yml
remote_src: true
- name: generate passwords
command: kolla-genpwd
- name: slurp kolla passwords
slurp:
src: /etc/kolla/passwords.yml
register: passwords_yml
- name: write out kolla SSH private key
copy:
content: "{{ (passwords_yml.content | b64decode | from_yaml).kolla_ssh_key.private_key }}"
dest: ~/.ssh/id_rsa_kolla
mode: 0600
- name: authorise kolla public key for zuul user
authorized_key:
user: "{{ ansible_env.USER }}"
key: "{{ (passwords_yml.content | b64decode | from_yaml).kolla_ssh_key.public_key }}"
# Delegate to each host in turn. If more tasks require execution on all
# hosts in future, break out into a separate play.
with_inventory_hostnames:
- all
delegate_to: "{{ item }}"
- name: Record the running state of the environment as seen by the setup module
shell:
cmd: ansible all -i {{ kolla_inventory_path }} -e ansible_user={{ ansible_user }} -m setup > /tmp/logs/ansible/initial-setup
- name: Set facts for actions
set_fact:
# NOTE(yoctozepto): no support for upgrades for now
docker_image_tag: "{{ build_image_tag if need_build_image else zuul.branch | basename }}"
docker_image_prefix: "{{ 'primary:4000/lokolla/' if need_build_image else 'kolla/' }}"
# NOTE(mgoddard): We are using the script module here and later to ensure
# we use the local copy of these scripts, rather than the one on the remote
# host, which could be checked out to a previous release (in an upgrade
# job).
- name: Run setup_gate.sh script
script:
cmd: setup_gate.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
BASE_DISTRO: "{{ base_distro }}"
BASE_DISTRO_MAJOR_VERSION: "{{ ansible_distribution_major_version }}"
INSTALL_TYPE: "{{ install_type }}"
BUILD_IMAGE: "{{ need_build_image }}"
TAG: "{{ build_image_tag }}"
KOLLA_SRC_DIR: "{{ ansible_env.HOME }}/src/opendev.org/openstack/kolla"
SCENARIO: "{{ scenario }}"
UPPER_CONSTRAINTS: "{{ ansible_env.HOME }}/src/opendev.org/openstack/requirements/upper-constraints.txt"
- name: Run init-swift.sh script
script:
cmd: init-swift.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
KOLLA_SWIFT_BASE_IMAGE: "{{ docker_image_prefix }}{{ base_distro }}-{{ install_type }}-swift-base:{{ docker_image_tag }}"
# NOTE(yoctozepto): no IPv6 for now
STORAGE_NODES: "{{ groups['all'] | map('extract', hostvars,
['ansible_'+api_interface_name, 'ipv4', 'address'])
| join(' ') }}"
when: scenario == 'swift'
# At this point we have generated all necessary configuration, and are
# ready to deploy the control plane services. Control flow now depends on
# the scenario being exercised.
# Deploy ceph-ansible on ceph-ansible scenarios
- block:
- name: Run deploy-ceph-ansible.sh script
script:
cmd: deploy-ceph-ansible.sh
executable: /bin/bash
chdir: "{{ ceph_ansible_src_dir }}"
environment:
BASE_DISTRO: "{{ base_distro }}"
BASE_DISTRO_MAJOR_VERSION: "{{ ansible_distribution_major_version }}"
- name: Ensure required kolla config directories exist
file:
state: directory
name: "/etc/kolla/config/{{ item.name }}"
mode: 0777
with_items: "{{ ceph_ansible_services }}"
- name: copy ceph.conf to enabled services
copy:
src: "/etc/ceph/ceph.conf"
dest: "/etc/kolla/config/{{ item.name }}/ceph.conf"
remote_src: True
with_items: "{{ ceph_ansible_services }}"
- name: copy keyrings to enabled services
copy:
remote_src: True
src: "/etc/ceph/{{ item.keyring }}"
dest: "/etc/kolla/config/{{ item.name }}/{{ item.keyring }}"
with_items: "{{ ceph_ansible_services }}"
become: True
vars:
ceph_ansible_services:
- { name: 'cinder/cinder-volume', keyring: "ceph.client.cinder.keyring" }
- { name: 'cinder/cinder-backup', keyring: "ceph.client.cinder.keyring" }
- { name: 'cinder/cinder-backup', keyring: "ceph.client.cinder-backup.keyring" }
- { name: 'glance', keyring: "ceph.client.glance.keyring" }
- { name: 'nova', keyring: "ceph.client.nova.keyring" }
- { name: 'nova', keyring: "ceph.client.cinder.keyring" }
when: scenario == "ceph-ansible"
# Deploy control plane. For upgrade jobs this is the previous release.
- block:
- name: Run deploy.sh script
script:
cmd: deploy.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
TLS_ENABLED: "{{ tls_enabled }}"
# NOTE(yoctozepto): this is nice as the first step after the deployment
# because it waits for the services to stabilize well enough so that
# the dashboard is able to show the login prompt
- name: Run test-dashboard.sh script
script:
cmd: test-dashboard.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
TLS_ENABLED: "{{ tls_enabled }}"
when: dashboard_enabled
- name: Run init-core-openstack.sh script
script:
cmd: init-core-openstack.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: openstack_core_tested or scenario in ['ironic', 'scenario_nfv', 'zun']
- name: Run test-core-openstack.sh script
script:
cmd: test-core-openstack.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
SCENARIO: "{{ scenario }}"
when: openstack_core_tested
- name: Run test-zun.sh script
script:
cmd: test-zun.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == 'zun'
- name: Run test-swift.sh script
script:
cmd: test-swift.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == 'swift'
- name: Run test-scenario-nfv.sh script
script:
cmd: test-scenario-nfv.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "scenario_nfv"
- name: Run test-ironic.sh script
script:
cmd: test-ironic.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "ironic"
- name: Run test-masakari.sh script
script:
cmd: test-masakari.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "masakari"
- name: Run test-mariadb.sh script
script:
cmd: test-mariadb.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "mariadb"
when: scenario != "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
any_errors_fatal: true
tasks:
- name: Pre-upgrade sanity checks
block:
- name: Run pre-upgrade check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run pre-upgrade check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
# Using script rather than shell here because check-logs.sh does not
# exist in Stein branch.
- name: Run check-logs.sh script
script:
cmd: check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: is_upgrade
- hosts: primary
any_errors_fatal: true
tasks:
# Upgrade: update config.
- block:
# NOTE(mgoddard): This only affects the remote copy of the repo, not the
# one on the executor.
- name: checkout the current kolla-ansible branch
shell:
cmd: |
git checkout {{ zuul.branch }}
echo "kolla-ansible checked out to:"
git log --pretty=oneline -1
chdir: "{{ kolla_ansible_src_dir }}"
# TODO(mnasiadka): Remove in Victoria
- name: Remove not needed external ceph required configs
file:
name: "{{ item }}"
state: absent
with_items:
- /etc/kolla/config/glance/glance-api.conf
- /etc/kolla/config/cinder/cinder-volume.conf
- /etc/kolla/config/cinder/cinder-backup.conf
- name: Generate configuration files
template:
src: "{{ kolla_ansible_local_src_dir }}/{{ item.src }}"
dest: "{{ item.dest }}"
vars:
is_previous_release: false
with_items:
# Ansible inventory
- src: "tests/templates/inventory.j2"
dest: "{{ kolla_inventory_path }}"
# globals.yml
- src: "tests/templates/globals-default.j2"
dest: /etc/kolla/globals.yml
# nova-compute.conf
- src: "tests/templates/nova-compute-overrides.j2"
dest: /etc/kolla/config/nova/nova-compute.conf
when: item.when | default(true)
# TODO(mgoddard): Remove this block when previous_release is ussuri.
- block:
- name: remove ansible and ARA for python 2
pip:
name:
- ansible
- ara
executable: pip2
state: absent
become: true
- name: install ansible and ARA for python 3
vars:
# Test latest ansible version on Ubuntu, minimum supported on others.
ansible_version_constraint: "{{ '==2.9.*' if base_distro == 'ubuntu' else '==2.8.*' }}"
pip:
name:
- "ansible{{ ansible_version_constraint }}"
- "ara<1.0.0"
executable: pip3
become: true
- name: get ARA callback plugin path
command: "python3 -m ara.setup.callback_plugins"
changed_when: false
register: ara_callback_plugins
- name: template ansible.cfg
template:
src: "{{ kolla_ansible_local_src_dir }}/tests/templates/ansible.cfg.j2"
dest: /etc/ansible/ansible.cfg
become: true
- name: upgrade kolla-ansible
pip:
name: "{{ kolla_ansible_src_dir }}"
executable: pip3
become: true
# Update passwords.yml to include any new passwords added in this
# release.
- name: move passwords.yml to passwords.yml.old
command: mv /etc/kolla/passwords.yml /etc/kolla/passwords.yml.old
- name: copy passwords.yml file
copy:
src: "{{ kolla_ansible_src_dir }}/etc/kolla/passwords.yml"
dest: /etc/kolla/passwords.yml
remote_src: true
- name: generate new passwords
command: kolla-genpwd
- name: merge old and new passwords
command: >-
kolla-mergepwd
--old /etc/kolla/passwords.yml.old
--new /etc/kolla/passwords.yml
--final /etc/kolla/passwords.yml
# Perform an upgrade to the in-development code.
- name: Run upgrade.sh script
shell:
cmd: tests/upgrade.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
# NOTE(yoctozepto): this is nice as the first step after the upgrade
# because it waits for the services to stabilize well enough so that
# the dashboard is able to show the login prompt
- name: Run test-dashboard.sh script
shell:
cmd: tests/test-dashboard.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: dashboard_enabled
- name: Run test-core-openstack.sh script
shell:
cmd: tests/test-core-openstack.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
SCENARIO: "{{ scenario }}"
when: openstack_core_tested
when: is_upgrade
# Bifrost testing.
- block:
- name: Run deploy-bifrost.sh script
shell:
cmd: tests/deploy-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run test-bifrost.sh script
shell:
cmd: tests/test-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run upgrade-bifrost.sh script
shell:
cmd: tests/upgrade-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
any_errors_fatal: true
tasks:
- name: Post-deploy/upgrade sanity checks
block:
- name: Run check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-logs.sh script
shell:
cmd: tests/check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- hosts: primary
any_errors_fatal: true
tasks:
- name: Run reconfigure.sh script
script:
cmd: reconfigure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when:
- not is_upgrade
- scenario != "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
any_errors_fatal: true
tasks:
- name: Post-reconfigure sanity checks
block:
- name: Run check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-logs.sh script
shell:
cmd: tests/check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when:
- not is_upgrade
- scenario != "bifrost"