kolla-ansible/tests/run.yml

406 lines
14 KiB
YAML

---
- hosts: all
tasks:
# NOTE(yoctozepto): setting vars as facts for all to have them around in all the plays
- set_fact:
kolla_inventory_path: "/etc/kolla/inventory"
logs_dir: "/tmp/logs"
kolla_ansible_src_dir: "{{ ansible_env.PWD }}/src/{{ zuul.project.canonical_hostname }}/openstack/kolla-ansible"
kolla_ansible_local_src_dir: "{{ zuul.executor.work_root }}/src/{{ zuul.project.canonical_hostname }}/openstack/kolla-ansible"
need_build_image: false
build_image_tag: "change_{{ zuul.change | default('none') }}"
is_upgrade: "{{ 'upgrade' in scenario }}"
is_ceph: "{{ 'ceph' in scenario }}"
- name: Prepare disks for Ceph or LVM
script: "setup_disks.sh {{ disk_type }}"
when: scenario == "cinder-lvm" or is_ceph
become: true
vars:
disk_type: "{{ ceph_storetype if is_ceph else 'cinder-lvm' }}"
ceph_storetype: "{{ hostvars[inventory_hostname].get('ceph_osd_storetype') }}"
- hosts: primary
tasks:
# FIXME: in multi node env, api_interface may be different on each node.
- name: detect api_interface_name variable
vars:
ansible_interface_name: "ansible_{{ item.replace('-', '_') }}"
api_interface_address: "{{ hostvars[inventory_hostname]['nodepool']['private_ipv4'] }}"
set_fact:
api_interface_name: "{{ item }}"
api_interface_address: "{{ api_interface_address }}"
when:
- hostvars[inventory_hostname][ansible_interface_name]['ipv4'] is defined
- hostvars[inventory_hostname][ansible_interface_name]['ipv4']['address'] == api_interface_address
with_items: "{{ ansible_interfaces }}"
- name: detect whether need build images
set_fact:
need_build_image: true
when:
- item.project.short_name == "kolla"
- item.branch == zuul.branch
with_items: "{{ zuul['items'] }}"
# NOTE(yoctozepto): required to template template_overrides.j2 for Zuul
- name: Include kolla Zuul vars if building new images
include_vars:
file: "{{ zuul.executor.work_root }}/src/opendev.org/openstack/kolla/tests/vars/zuul.yml"
when: need_build_image # only then kolla sources are available (and used)
# NOTE(mgoddard): This only affects the remote copy of the repo, not the
# one on the executor.
- name: checkout the previous kolla-ansible branch
shell:
cmd: |
git checkout stable/{{ previous_release | lower }}
echo "kolla-ansible checked out to:"
git log --pretty=oneline -1
chdir: "{{ kolla_ansible_src_dir }}"
when: is_upgrade
- name: ensure /etc/kolla exists
file:
path: "/etc/kolla"
state: "directory"
mode: 0777
become: true
# NOTE(yoctozepto): required to customize kolla to use local mirrors
- name: Template template_overrides.j2
template:
src: "{{ zuul.executor.work_root }}/src/opendev.org/openstack/kolla/tests/templates/template_overrides.j2"
dest: /etc/kolla/template_overrides.j2
when: need_build_image # only then kolla sources are available (and used)
# Use the initial repo to generate config files. For upgrade jobs, this
# repo is only available on the remote node, so use the remote-template
# role.
- name: generate configuration files
include_role:
role: remote-template
vars:
is_previous_release: "{{ is_upgrade }}"
infra_dockerhub_mirror: "http://{{ zuul_site_mirror_fqdn }}:8082/"
# Role variables.
remote_template_src: "{{ kolla_ansible_src_dir }}/{{ item.src }}"
remote_template_dest: "{{ item.dest }}"
remote_template_become: "{{ item.become | default(false) }}"
with_items:
# Docker daemon.json
- src: "tests/templates/docker_daemon.json.j2"
dest: "/etc/docker/daemon.json"
become: true
# Ansible inventory
- src: "tests/templates/inventory.j2"
dest: "{{ kolla_inventory_path }}"
# globals.yml
- src: "tests/templates/globals-default.j2"
dest: /etc/kolla/globals.yml
# nova-compute.conf
- src: "tests/templates/nova-compute-overrides.j2"
dest: /etc/kolla/config/nova/nova-compute.conf
when: "{{ scenario != 'bifrost' }}"
# ceph.conf
- src: "tests/templates/ceph-overrides.j2"
dest: /etc/kolla/config/ceph.conf
when: "{{ is_ceph }}"
# bifrost/dib.yml
- src: "tests/templates/bifrost-dib-overrides.j2"
dest: /etc/kolla/config/bifrost/dib.yml
when: "{{ scenario == 'bifrost' }}"
- src: "tests/templates/ironic-overrides.j2"
dest: /etc/kolla/config/ironic.conf
when: "{{ scenario == 'ironic' }}"
when: item.when | default(true)
- block:
- name: ensure ironic config directory exists
file:
path: /etc/kolla/config/ironic
state: directory
mode: 0777
- name: download Ironic Python Agent (IPA) images
get_url:
url: "https://tarballs.openstack.org/ironic-python-agent/tinyipa/files/{{ item.src }}"
dest: "/etc/kolla/config/ironic/{{ item.dest }}"
with_items:
- src: "tinyipa-{{ zuul.branch | replace('/', '-') }}.gz"
dest: ironic-agent.initramfs
- src: "tinyipa-{{ zuul.branch | replace('/', '-') }}.vmlinuz"
dest: ironic-agent.kernel
when: scenario == "ironic"
- name: install kolla-ansible requirements
pip:
requirements: "{{ kolla_ansible_src_dir }}/requirements.txt"
become: true
- name: copy passwords.yml file
copy:
src: "{{ kolla_ansible_src_dir }}/etc/kolla/passwords.yml"
dest: /etc/kolla/passwords.yml
remote_src: true
- name: generate passwords
shell: "{{ kolla_ansible_src_dir }}/tools/generate_passwords.py"
- name: slurp kolla passwords
slurp:
src: /etc/kolla/passwords.yml
register: passwords_yml
- name: write out kolla SSH private key
copy:
content: "{{ (passwords_yml.content | b64decode | from_yaml).kolla_ssh_key.private_key }}"
dest: ~/.ssh/id_rsa_kolla
mode: 0600
- name: authorise kolla public key for zuul user
authorized_key:
user: "{{ ansible_env.USER }}"
key: "{{ (passwords_yml.content | b64decode | from_yaml).kolla_ssh_key.public_key }}"
# Delegate to each host in turn. If more tasks require execution on all
# hosts in future, break out into a separate play.
with_inventory_hostnames:
- all
delegate_to: "{{ item }}"
# NOTE(mgoddard): We are using the script module here and later to ensure
# we use the local copy of these scripts, rather than the one on the remote
# host, which could be checked out to a previous release (in an upgrade
# job).
- name: Run setup_gate.sh script
script:
cmd: ../tools/setup_gate.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
BASE_DISTRO: "{{ base_distro }}"
INSTALL_TYPE: "{{ install_type }}"
BUILD_IMAGE: "{{ need_build_image }}"
TAG: "{{ build_image_tag }}"
KOLLA_SRC_DIR: "{{ ansible_env.HOME }}/src/opendev.org/openstack/kolla"
ACTION: "{{ scenario }}"
# At this point we have generated all necessary configuration, and are
# ready to deploy the control plane services. Control flow now depends on
# the scenario being exercised.
# Deploy control plane. For upgrade jobs this is the previous release.
- block:
- name: Run deploy.sh script
script:
cmd: deploy.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
ACTION: "{{ scenario }}"
- name: Run test-openstack.sh script
script:
cmd: test-openstack.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
ACTION: "{{ scenario }}"
when: scenario not in ['ironic', 'scenario_nfv']
- name: Run test-zun.sh script
shell:
cmd: tests/test-zun.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: "{{ 'zun' in scenario }}"
- name: Run test-scenario-nfv.sh script
script:
cmd: test-scenario-nfv.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "scenario_nfv"
- name: Run test-ironic.sh script
script:
cmd: test-ironic.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "ironic"
- name: Run test-masakari.sh script
script:
cmd: test-masakari.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "masakari"
- name: Run reconfigure.sh script
script:
cmd: reconfigure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
ACTION: "{{ scenario }}"
when: not is_upgrade
when: scenario != "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
tasks:
- name: Pre-upgrade sanity checks
block:
- name: Run pre-upgrade check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run pre-upgrade check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
# Using script rather than shell here because check-logs.sh does not
# exist in Stein branch.
- name: Run check-logs.sh script
script:
cmd: check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: is_upgrade
- hosts: primary
tasks:
# Upgrade: update config.
- block:
# NOTE(mgoddard): This only affects the remote copy of the repo, not the
# one on the executor.
- name: checkout the current kolla-ansible branch
shell:
cmd: |
git checkout {{ zuul.branch }}
echo "kolla-ansible checked out to:"
git log --pretty=oneline -1
chdir: "{{ kolla_ansible_src_dir }}"
# Use the new kolla-ansible repo to generate config files.
# This is the branch checked out on the executor, so we can use
# template directly rather than the remote-template role.
- name: Generate configuration files
template:
src: "{{ kolla_ansible_local_src_dir }}/{{ item.src }}"
dest: "{{ item.dest }}"
vars:
is_previous_release: false
with_items:
# Ansible inventory
- src: "tests/templates/inventory.j2"
dest: "{{ kolla_inventory_path }}"
# globals.yml
- src: "tests/templates/globals-default.j2"
dest: /etc/kolla/globals.yml
# nova-compute.conf
- src: "tests/templates/nova-compute-overrides.j2"
dest: /etc/kolla/config/nova/nova-compute.conf
# ceph.conf
- src: "tests/templates/ceph-overrides.j2"
dest: /etc/kolla/config/ceph.conf
when: "{{ is_ceph }}"
when: item.when | default(true)
- name: upgrade kolla-ansible requirements
pip:
requirements: "{{ kolla_ansible_src_dir }}/requirements.txt"
become: true
# Update passwords.yml to include any new passwords added in this
# release.
- name: move passwords.yml to passwords.yml.old
command: mv /etc/kolla/passwords.yml /etc/kolla/passwords.yml.old
- name: copy passwords.yml file
copy:
src: "{{ kolla_ansible_src_dir }}/etc/kolla/passwords.yml"
dest: /etc/kolla/passwords.yml
remote_src: true
- name: generate new passwords
shell: "{{ kolla_ansible_src_dir }}/tools/generate_passwords.py"
- name: merge old and new passwords
shell: >-
{{ kolla_ansible_src_dir }}/tools/merge_passwords.py
--old /etc/kolla/passwords.yml.old
--new /etc/kolla/passwords.yml
--final /etc/kolla/passwords.yml
# Perform an upgrade to the in-development code.
- name: Run upgrade.sh script
shell:
cmd: tests/upgrade.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
ACTION: "{{ scenario }}"
- name: Run test-openstack.sh script
shell:
cmd: tests/test-openstack.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
environment:
ACTION: "{{ scenario }}"
- name: Run test-zun.sh script
shell:
cmd: tests/test-zun.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: "{{ 'zun' in scenario }}"
when: is_upgrade
# Bifrost testing.
- block:
- name: Run deploy-bifrost.sh script
shell:
cmd: tests/deploy-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run test-bifrost.sh script
shell:
cmd: tests/test-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run upgrade-bifrost.sh script
shell:
cmd: tests/upgrade-bifrost.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
when: scenario == "bifrost"
# NOTE(yoctozepto): each host checks itself
- hosts: all
tasks:
- name: Run check-failure.sh script
shell:
cmd: tests/check-failure.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-config.sh script
shell:
cmd: tests/check-config.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"
- name: Run check-logs.sh script
shell:
cmd: tests/check-logs.sh
executable: /bin/bash
chdir: "{{ kolla_ansible_src_dir }}"