Enhanced deployment script to support baremetal multi-node

Additionally aligned between Treasuremap and Airchipctl to use the same
environemnt variable "SITE" for a site name.

Change-Id: I50494b50691a40efe68996cd11ccede6517b23d6
Signed-off-by: James Gu <james.gu@att.com>
This commit is contained in:
James Gu 2021-03-19 13:16:46 -07:00
parent edd0b84d84
commit 1342ab708f
5 changed files with 43 additions and 31 deletions

View File

@ -20,4 +20,4 @@
default_zuul_dir: "{{ ansible_user_dir }}/{{ zuul.project.src_dir }}"
environment:
AIRSHIP_CONFIG_PHASE_REPO_URL: "{{ remote_work_dir | default(local_src_dir) | default(default_zuul_dir) }}"
SITE_NAME: "{{ site | default('test-site') }}"
SITE: "{{ site | default('test-site') }}"

View File

@ -18,7 +18,7 @@ export USER_NAME=${USER:-"ubuntu"}
IMAGE_DIR=${IMAGE_DIR:-"/srv/images"}
CLEANUP_SERVE_DIR=${CLEANUP_SERVE_DIR:-"false"}
SITE_NAME=${SITE_NAME:-test-site}
SITE=${SITE:-test-site}
# List of phases to run to build images.
IMAGE_PHASE_PLANS=${IMAGE_PHASE_PLANS:-"iso"}

View File

@ -20,14 +20,16 @@ export KUBECONFIG_TARGET_CONTEXT=${KUBECONFIG_TARGET_CONTEXT:-"target-cluster"}
# TODO (dukov) this is needed due to sushy tools inserts cdrom image to
# all vms. This can be removed once sushy tool is fixed
echo "Ensure all cdrom images are ejected."
for vm in $(sudo virsh list --all --name |grep -v ${EPHEMERAL_DOMAIN_NAME})
do
sudo virsh domblklist $vm |
awk 'NF==2 {print $1}' |
grep -v Target |
xargs -I{} sudo virsh change-media $vm {} --eject || :
done
if type "virsh" > /dev/null; then
echo "Ensure all cdrom images are ejected."
for vm in $(sudo virsh list --all --name |grep -v ${EPHEMERAL_DOMAIN_NAME})
do
sudo virsh domblklist $vm |
awk 'NF==2 {print $1}' |
grep -v Target |
xargs -I{} sudo virsh change-media $vm {} --eject || :
done
fi
echo "Create target k8s cluster resources"
airshipctl phase run controlplane-ephemeral --debug

View File

@ -18,32 +18,42 @@ set -e
export TIMEOUT=${TIMEOUT:-3600}
export KUBECONFIG=${KUBECONFIG:-"$HOME/.airship/kubeconfig"}
export KUBECONFIG_TARGET_CONTEXT=${KUBECONFIG_TARGET_CONTEXT:-"target-cluster"}
WORKER_NODE="node03"
WORKER_NODE=${WORKER_NODE:-"node03"}
EPHEMERAL_DOMAIN_NAME="air-ephemeral"
echo "Stop ephemeral node"
sudo virsh destroy air-ephemeral
# all vms. This can be removed once sushy tool is fixed
if type "virsh" > /dev/null; then
for vm in $(sudo virsh list --all --name --state-running |grep ${EPHEMERAL_DOMAIN_NAME})
do
echo "Stop ephemeral node '$vm'"
sudo virsh destroy $vm
done
fi
node_timeout () {
end=$(($(date +%s) + $TIMEOUT))
end=$(($(date +%s) + $TIMEOUT))
for worker in $WORKER_NODE
do
while true; do
if (kubectl --request-timeout 20s --kubeconfig $KUBECONFIG --context $KUBECONFIG_TARGET_CONTEXT get $1 $WORKER_NODE | grep -qw $2) ; then
if [ "$1" = "node" ]; then
kubectl --kubeconfig $KUBECONFIG --context $KUBECONFIG_TARGET_CONTEXT label nodes $WORKER_NODE node-role.kubernetes.io/worker=""
fi
echo -e "\nGet $1 status"
kubectl --kubeconfig $KUBECONFIG --context $KUBECONFIG_TARGET_CONTEXT get $1
break
else
now=$(date +%s)
if [ $now -gt $end ]; then
echo -e "\n$1 is not ready before TIMEOUT."
exit 1
fi
echo -n .
sleep 15
if (kubectl --request-timeout 20s --kubeconfig $KUBECONFIG --context $KUBECONFIG_TARGET_CONTEXT get $1 $worker | grep -qw $2) ; then
if [ "$1" = "node" ]; then
kubectl --kubeconfig $KUBECONFIG --context $KUBECONFIG_TARGET_CONTEXT label nodes $worker node-role.kubernetes.io/worker=""
fi
echo -e "\nGet $1 status"
kubectl --kubeconfig $KUBECONFIG --context $KUBECONFIG_TARGET_CONTEXT get $1
break
else
now=$(date +%s)
if [ $now -gt $end ]; then
echo -e "\n$1 is not ready before TIMEOUT."
exit 1
fi
echo -n .
sleep 15
fi
done
done
}
echo "Deploy worker node"

View File

@ -36,7 +36,7 @@ export SYSTEM_REBOOT_DELAY=30
export AIRSHIP_CONFIG_PRIMARY_REPO_BRANCH=${BRANCH:-"master"}
# the git repo url or local file system path to a cloned repo, e.g., /home/stack/airshipctl
export AIRSHIP_CONFIG_PRIMARY_REPO_URL=${REPO:-"https://review.opendev.org/airship/airshipctl"}
export AIRSHIP_SITE_NAME="airshipctl/manifests/site/az-test-site"
export SITE="airshipctl/manifests/site/az-test-site"
export AIRSHIP_CONFIG_MANIFEST_DIRECTORY=${remote_work_dir}
export AIRSHIP_CONFIG_CA_DATA=$(cat tools/deployment/certificates/airship_config_ca_data| base64 -w0)
export AIRSHIP_CONFIG_EPHEMERAL_IP=${IP_Ephemeral:-"10.23.25.101"}