Global refactoring of CI scripts
Global refactoring is include: * drop Icehouse support; * improvements nodepool's slave installing script; * remove deprecated jobs; * create config template for old integration tests; * create config files for PYPI; * improve and refactor function.sh, execution tests scripts; Change-Id: Id6b6c9dd5f3f1bf500e0b2172f8cfc82a4fc9a62
This commit is contained in:
parent
b6fc1f82ba
commit
22275a32f5
@ -33,17 +33,17 @@ providers:
|
||||
password: 'nova'
|
||||
auth-url: 'http://172.18.168.42:5000/v2.0'
|
||||
project-id: 'ci'
|
||||
max-servers: 5
|
||||
max-servers: 6
|
||||
boot-timeout: 120
|
||||
pool: public
|
||||
networks:
|
||||
- net-id: 'CI_LAB_PRIVATE_NETWORK_ID'
|
||||
- net-id: 'NEUTRON_LAB_PRIVATE_NETWORK_ID'
|
||||
images:
|
||||
- name: trusty-neutron
|
||||
base-image: 'ubuntu-14.04'
|
||||
min-ram: 2048
|
||||
private-key: '/etc/nodepool/id_dsa'
|
||||
setup: prepare_node_bare.sh
|
||||
setup: prepare_node.sh
|
||||
- name: ui-neutron
|
||||
base-image: 'ubuntu-12.04'
|
||||
min-ram: 4096
|
||||
@ -54,17 +54,17 @@ providers:
|
||||
password: 'nova'
|
||||
auth-url: 'http://172.18.168.43:5000/v2.0'
|
||||
project-id: 'ci'
|
||||
max-servers: 5
|
||||
max-servers: 6
|
||||
boot-timeout: 120
|
||||
pool: public
|
||||
networks:
|
||||
- net-id: 'STACK_SAHARA_PRIVATE_NETWORK_ID'
|
||||
- net-id: 'NOVA_NET_LAB_PRIVATE_NETWORK_ID'
|
||||
images:
|
||||
- name: trusty-nova
|
||||
base-image: 'ubuntu-14.04'
|
||||
min-ram: 2048
|
||||
private-key: '/etc/nodepool/id_dsa'
|
||||
setup: prepare_node_bare.sh
|
||||
setup: prepare_node.sh
|
||||
|
||||
targets:
|
||||
- name: sahara-gate
|
||||
|
@ -17,59 +17,55 @@
|
||||
# limitations under the License.
|
||||
|
||||
HOSTNAME=$1
|
||||
SUDO=$2
|
||||
THIN=$3
|
||||
SUDO='true'
|
||||
THIN='true'
|
||||
MYSQL_PASS=MYSQL_ROOT_PASSWORD
|
||||
|
||||
sudo hostname $HOSTNAME
|
||||
wget https://git.openstack.org/cgit/openstack-infra/system-config/plain/install_puppet.sh
|
||||
sudo bash -xe install_puppet.sh
|
||||
sudo git clone https://review.openstack.org/p/openstack-infra/system-config.git \
|
||||
/root/config
|
||||
sudo /bin/bash /root/config/install_modules.sh
|
||||
#if [ -z "$NODEPOOL_SSH_KEY" ] ; then
|
||||
sudo puppet apply --modulepath=/root/config/modules:/etc/puppet/modules \
|
||||
-e "class {'openstack_project::single_use_slave': sudo => $SUDO, thin => $THIN, enable_unbound => false, }"
|
||||
#else
|
||||
# sudo puppet apply --modulepath=/root/config/modules:/etc/puppet/modules \
|
||||
# -e "class {'openstack_project::single_use_slave': install_users => false, sudo => $SUDO, thin => $THIN, ssh_key => '$NODEPOOL_SSH_KEY', }"
|
||||
#fi
|
||||
|
||||
sudo mkdir -p /opt/git
|
||||
#sudo -i python /opt/nodepool-scripts/cache_git_repos.py
|
||||
|
||||
# APT_PACKAGES variable using for installing packages via apt-get
|
||||
# PIP_PACKAGES variable using for installing packages via pip
|
||||
APT_PACKAGES="mysql-server libpq-dev libmysqlclient-dev"
|
||||
# RabbitMQ for distributed Sahara mode
|
||||
APT_PACKAGES+=" rabbitmq-server"
|
||||
# Required libraries
|
||||
APT_PACKAGES+=" libxslt1-dev libffi-dev"
|
||||
# Required packages for DIB
|
||||
APT_PACKAGES+=" qemu kpartx"
|
||||
# pep8-trunk job requirements
|
||||
APT_PACKAGES+=" gettext"
|
||||
# Glance-client is required for diskimage-integration jobs
|
||||
PIP_PACKAGES="python-glanceclient==0.16"
|
||||
# Requirements for Sahara
|
||||
PIP_PACKAGES+=" mysql-python"
|
||||
# Requirements for Cloudera plugin
|
||||
PIP_PACKAGES+=" cm-api"
|
||||
|
||||
echo "mysql-server mysql-server/root_password select $MYSQL_PASS" | sudo debconf-set-selections
|
||||
echo "mysql-server mysql-server/root_password_again select $MYSQL_PASS" | sudo debconf-set-selections
|
||||
sudo apt-get -y install mysql-server libpq-dev libmysqlclient-dev
|
||||
|
||||
sudo apt-get install -y $APT_PACKAGES
|
||||
#Remove ccahe because it's useless for single-use nodes and may cause problems
|
||||
sudo apt-get remove -y ccache
|
||||
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "create database sahara"
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "CREATE USER 'sahara-citest'@'localhost' IDENTIFIED BY 'sahara-citest'"
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "GRANT ALL ON sahara.* TO 'sahara-citest'@'localhost'"
|
||||
mysql -uroot -p$MYSQL_PASS -Bse "flush privileges"
|
||||
sudo service mysql stop
|
||||
|
||||
#install RabbitMQ for distributed Sahara mode
|
||||
sudo apt-get install rabbitmq-server -y
|
||||
|
||||
#install required libraries
|
||||
sudo apt-get install libxslt1-dev libffi-dev -y
|
||||
|
||||
#Remove ccahe because it's useless for single-use nodes and may cause problems
|
||||
sudo apt-get remove -y ccache
|
||||
|
||||
#glance-client is required for diskimage-integration jobs
|
||||
sudo pip install python-glanceclient==0.16
|
||||
sudo apt-get install qemu kpartx -y
|
||||
|
||||
#install Sahara requirements
|
||||
sudo pip install mysql-python psycopg2
|
||||
sudo pip install $PIP_PACKAGES
|
||||
cd /tmp && git clone https://git.openstack.org/openstack/sahara
|
||||
cd sahara && sudo pip install -U -r requirements.txt
|
||||
|
||||
# install requirements for Cloudera plugin
|
||||
sudo pip install cm-api
|
||||
|
||||
# pep8-trunk job requirements
|
||||
sudo apt-get install gettext -y
|
||||
cd /home/jenkins && rm -rf /tmp/sahara
|
||||
|
||||
# Java tarbal for diskimage jobs
|
||||
sudo wget --no-check-certificate --no-cookies --header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" \
|
||||
@ -77,13 +73,33 @@ sudo wget --no-check-certificate --no-cookies --header "Cookie: gpw_e24=http%3A%
|
||||
|
||||
pushd /home/jenkins
|
||||
sudo git clone https://git.openstack.org/openstack/tempest
|
||||
pushd tempest && sudo pip install -U -r requirements.txt && popd
|
||||
# temporary comment
|
||||
#pushd tempest && sudo pip install -U -r requirements.txt && popd
|
||||
RELEASE_DIB="0.1.29"
|
||||
sudo git clone https://git.openstack.org/openstack/diskimage-builder
|
||||
sudo git --git-dir=/home/jenkins/diskimage-builder/.git --work-tree=/home/jenkins/diskimage-builder/ checkout $RELEASE_DIB
|
||||
sudo chown -R jenkins:jenkins /home/jenkins
|
||||
popd
|
||||
|
||||
# create simple openrc file
|
||||
if [[ "$HOSTNAME" =~ neutron ]]; then
|
||||
OPENSTACK_HOST="172.18.168.42"
|
||||
HOST="c1"
|
||||
USE_NEUTRON=true
|
||||
else
|
||||
OPENSTACK_HOST="172.18.168.43"
|
||||
HOST="c2"
|
||||
USE_NEUTRON=false
|
||||
fi
|
||||
echo "export OS_USERNAME=ci-user
|
||||
export OS_TENANT_NAME=ci
|
||||
export OS_PASSWORD=nova
|
||||
export OPENSTACK_HOST=$OPENSTACK_HOST
|
||||
export HOST=$HOST
|
||||
export USE_NEUTRON=$USE_NEUTRON
|
||||
export OS_AUTH_URL=http://$OPENSTACK_HOST:5000/v2.0/
|
||||
" > /home/jenkins/ci_openrc
|
||||
|
||||
sudo su - jenkins -c "echo '
|
||||
JENKINS_PUBLIC_KEY' >> /home/jenkins/.ssh/authorized_keys"
|
||||
sync
|
||||
|
@ -1,23 +0,0 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
# Copyright (C) 2011-2013 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
#
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
HOSTNAME=$1
|
||||
SUDO='true'
|
||||
THIN='true'
|
||||
|
||||
./prepare_node.sh "$HOSTNAME" "$SUDO" "$THIN"
|
2
config/sahara/.pydistutils.cfg
Normal file
2
config/sahara/.pydistutils.cfg
Normal file
@ -0,0 +1,2 @@
|
||||
[easy_install]
|
||||
index_url = http://172.18.168.44/simple/
|
67
config/sahara/itest.conf.sample
Normal file
67
config/sahara/itest.conf.sample
Normal file
@ -0,0 +1,67 @@
|
||||
[COMMON]
|
||||
OS_USERNAME = ci-user
|
||||
OS_PASSWORD = nova
|
||||
OS_TENANT_NAME = ci
|
||||
OS_TENANT_ID = tenantid
|
||||
OS_AUTH_URL = http://127.0.0.1:5000/v2.0
|
||||
SAHARA_HOST = localhost
|
||||
FLAVOR_ID = 20
|
||||
CLUSTER_CREATION_TIMEOUT = 60
|
||||
CLUSTER_NAME = name
|
||||
FLOATING_IP_POOL = public
|
||||
NEUTRON_ENABLED = false
|
||||
INTERNAL_NEUTRON_NETWORK = private
|
||||
JOB_LAUNCH_TIMEOUT = 15
|
||||
HDFS_INITIALIZATION_TIMEOUT = 10
|
||||
|
||||
[VANILLA]
|
||||
IMAGE_NAME = vanilla_image
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_CINDER_TEST = False
|
||||
SKIP_CLUSTER_CONFIG_TEST = False
|
||||
SKIP_EDP_TEST = False
|
||||
SKIP_MAP_REDUCE_TEST = True
|
||||
SKIP_SWIFT_TEST = True
|
||||
SKIP_SCALING_TEST = False
|
||||
SKIP_TRANSIENT_CLUSTER_TEST = True
|
||||
ONLY_TRANSIENT_CLUSTER_TEST = False
|
||||
|
||||
[VANILLA_TWO]
|
||||
IMAGE_NAME = vanilla_two_image
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_CINDER_TEST = False
|
||||
SKIP_MAP_REDUCE_TEST = True
|
||||
SKIP_SWIFT_TEST = True
|
||||
SKIP_SCALING_TEST = False
|
||||
SKIP_EDP_TEST = False
|
||||
|
||||
[HDP]
|
||||
IMAGE_NAME = hdp_image
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_CINDER_TEST = False
|
||||
SKIP_MAP_REDUCE_TEST = True
|
||||
SKIP_SWIFT_TEST = True
|
||||
SKIP_SCALING_TEST = False
|
||||
SKIP_EDP_TEST = False
|
||||
|
||||
[HDP2]
|
||||
IMAGE_NAME = hdp_two_image
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_SWIFT_TEST = True
|
||||
SKIP_SCALING_TEST = False
|
||||
SKIP_EDP_TEST = False
|
||||
|
||||
[CDH]
|
||||
IMAGE_NAME = cdh_image
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_CINDER_TEST = False
|
||||
SKIP_MAP_REDUCE_TEST = True
|
||||
SKIP_SWIFT_TEST = True
|
||||
SKIP_SCALING_TEST = False
|
||||
SKIP_EDP_TEST = False
|
||||
|
||||
[SPARK]
|
||||
IMAGE_NAME = spark_image
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = False
|
||||
SKIP_SCALING_TEST = False
|
||||
SKIP_EDP_TEST = False
|
7
config/sahara/pip.conf
Normal file
7
config/sahara/pip.conf
Normal file
@ -0,0 +1,7 @@
|
||||
[global]
|
||||
timeout = 60
|
||||
index-url = http://172.18.168.44/simple/
|
||||
extra-index-url = https://pypi.python.org/simple/
|
||||
download-cache = /home/jenkins/.pip/cache/
|
||||
[install]
|
||||
use-mirrors = true
|
@ -13,7 +13,7 @@ network:
|
||||
clusters:
|
||||
- plugin_name: spark
|
||||
plugin_version: 1.0.0
|
||||
image: %IMAGE_NAME%
|
||||
image: %spark_image%
|
||||
node_group_templates:
|
||||
- name: master
|
||||
flavor_id: '20'
|
||||
@ -36,7 +36,7 @@ clusters:
|
||||
HDFS:
|
||||
dfs.replication: 1
|
||||
cluster:
|
||||
name: %CLUSTER_NAME%
|
||||
name: %cluster_name%
|
||||
scenario:
|
||||
- run_jobs
|
||||
edp_jobs_flow: spark_edp
|
||||
|
@ -13,7 +13,7 @@ network:
|
||||
clusters:
|
||||
- plugin_name: vanilla
|
||||
plugin_version: 2.6.0
|
||||
image: %IMAGE_NAME%
|
||||
image: %vanilla_two_six_image%
|
||||
node_group_templates:
|
||||
- name: worker-dn-nm
|
||||
flavor_id: '20'
|
||||
@ -67,7 +67,7 @@ clusters:
|
||||
node_configs:
|
||||
*ng_configs
|
||||
cluster:
|
||||
name: %CLUSTER_NAME%
|
||||
name: %cluster_name%
|
||||
cluster_template:
|
||||
name: vanilla-2-6-0
|
||||
node_group_templates:
|
||||
|
@ -106,7 +106,7 @@ projects:
|
||||
jobs:
|
||||
- name: ^.*$
|
||||
parameter-function: set_ci_tenant
|
||||
branch: ^(master|stable/juno|stable/icehouse)$
|
||||
branch: ^(master|stable/juno)$
|
||||
- name: gate-sahara-pep8-trunk
|
||||
voting: false
|
||||
- name: ^gate-sahara-.*$
|
||||
@ -114,6 +114,10 @@ jobs:
|
||||
- '^sahara/.*$'
|
||||
- '^tools/.*$'
|
||||
- '^[^\/]*$'
|
||||
- name: dib-neutron-direct-vanilla_1-fedora-aio
|
||||
voting: false
|
||||
- name: dib-neutron-heat-vanilla_2.6-fedora-scenario
|
||||
voting: false
|
||||
- name: gate-sahara-neutron-heat-vanilla_2.4
|
||||
branch: ^(stable/juno)$
|
||||
- name: gate-saharaclient-neutron-heat-vanilla_2.4
|
||||
|
@ -37,6 +37,6 @@ def single_use_node(item, job, params):
|
||||
|
||||
def set_ci_tenant(item, job, params):
|
||||
single_use_node(item, job, params)
|
||||
params['CI_LAB_TENANT_ID'] = '-CI_LAB_TENANT_ID-'
|
||||
params['STACK_SAHARA_TENANT_ID'] = '-STACK_SAHARA_TENANT_ID-'
|
||||
params['NEUTRON_LAB_TENANT_ID'] = '-NEUTRON_LAB_TENANT_ID-'
|
||||
params['NOVA_NET_LAB_TENANT_ID'] = '-NOVA_NET_LAB_TENANT_ID-'
|
||||
params['CLUSTER_HASH'] = str(uuid.uuid4()).split('-')[0]
|
||||
|
@ -4,9 +4,9 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh hdp_2"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/dib.sh hdp_2
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
@ -23,9 +23,9 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh {plugin} {os}"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/dib.sh {plugin} {os}
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
@ -42,9 +42,9 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh {plugin}"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/dib.sh {plugin}
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
@ -61,9 +61,9 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh spark"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/dib.sh spark
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
@ -80,9 +80,9 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh vanilla_2.4 {os}"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/dib.sh vanilla_2.4 {os}
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
@ -99,9 +99,9 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/dib.sh vanilla_2.6 {os}"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/dib.sh vanilla_2.6 {os}
|
||||
|
||||
properties:
|
||||
- zeromq-event
|
||||
|
@ -1,23 +0,0 @@
|
||||
- job:
|
||||
name: sahara-pypimirror
|
||||
node: master
|
||||
project-type: freestyle
|
||||
description: "<b>This job is managed by Jenkins Job Builder, do not edit it through WebUI.\
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://git.openstack.org/stackforge/sahara-ci-config\">https://git.openstack.org/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p><b>Title</b>: Pypi mirror updater<br/>\
|
||||
\n<b>Description</b>: This job creates/updates Pypi mirror used for testing with all Sahara requirements from all Sahara branches (based on openstack-infra jeepyb scripts). Mirror url:\
|
||||
\n<a href=\"http://sahara.mirantis.net/pypi/\">http://sahara.mirantis.net/pypi</a><br/>\
|
||||
\n<b>Maintainer</b>: Sergey Kolekonov<br/>"
|
||||
defaults: global
|
||||
disabled: true
|
||||
concurrent: false
|
||||
|
||||
triggers:
|
||||
- timed: '0 */3 * * *'
|
||||
|
||||
builders:
|
||||
- shell: "cd /opt/ci/pypi-mirror/pypi-mirror && tox -e venv -- run-mirror -c mirror.yaml"
|
||||
|
||||
publishers:
|
||||
- email:
|
||||
recipients: elastic-hadoop-eng@mirantis.com
|
@ -16,9 +16,9 @@
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/gate-ui-tests.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts ./sahara-ci-config/slave-scripts/gate-ui-tests.sh
|
||||
|
||||
publishers:
|
||||
- console-log
|
||||
@ -31,12 +31,7 @@
|
||||
- unstable: true
|
||||
- failure: true
|
||||
- aborted: true
|
||||
- trigger-parameterized-builds:
|
||||
- project: "integration-cleanup"
|
||||
predefined-parameters:
|
||||
"PREV_BUILD=$BUILD_NUMBER-$ZUUL_CHANGE-$ZUUL_PATCHSET\
|
||||
\nPREV_JOB=$JOB_NAME\
|
||||
\nHOST_NAME=$NODE_NAME"
|
||||
- trigger-cleanup
|
||||
|
||||
- project:
|
||||
name: sahara-dashboard
|
||||
|
@ -4,19 +4,14 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config /tmp/sahara-ci-config\
|
||||
\nFUNCTION_PATH=/tmp/sahara-ci-config/slave-scripts/functions.sh bash -x /tmp/sahara-ci-config/slave-scripts/tempest.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=$WORKSPACE/sahara-ci-config/slave-scripts bash $WORKSPACE/sahara-ci-config/slave-scripts/tempest.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
- build-blocker:
|
||||
blocking-jobs:
|
||||
- "tempest-sahara-tests"
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
@ -29,19 +24,14 @@
|
||||
concurrent: false
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/tempest.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=$WORKSPACE/sahara-ci-config/slave-scripts bash $WORKSPACE/sahara-ci-config/slave-scripts/tempest.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
- build-blocker:
|
||||
blocking-jobs:
|
||||
- "tempest-saharaclient-tests"
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
@ -51,19 +41,13 @@
|
||||
- job-template:
|
||||
name: 'gate-saharaclient-neutron-{plugin-neutron}'
|
||||
defaults: global
|
||||
concurrent: true
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config /tmp/sahara-ci-config\
|
||||
\nFUNCTION_PATH=/tmp/sahara-ci-config/slave-scripts/functions.sh bash -x /tmp/sahara-ci-config/slave-scripts/gate-saharaclient.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=$WORKSPACE/sahara-ci-config/slave-scripts bash $WORKSPACE/sahara-ci-config/slave-scripts/gate-saharaclient.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
@ -73,19 +57,13 @@
|
||||
- job-template:
|
||||
name: 'gate-saharaclient-nova-{plugin-nova_network}'
|
||||
defaults: global
|
||||
concurrent: true
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config /tmp/sahara-ci-config\
|
||||
\nFUNCTION_PATH=/tmp/sahara-ci-config/slave-scripts/functions.sh bash -x /tmp/sahara-ci-config/slave-scripts/gate-saharaclient.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=$WORKSPACE/sahara-ci-config/slave-scripts bash $WORKSPACE/sahara-ci-config/slave-scripts/gate-saharaclient.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
@ -95,19 +73,13 @@
|
||||
- job-template:
|
||||
name: 'gate-sahara-neutron-{plugin-neutron}'
|
||||
defaults: global
|
||||
concurrent: true
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/gate-sahara.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts bash sahara-ci-config/slave-scripts/gate-sahara.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
@ -117,19 +89,13 @@
|
||||
- job-template:
|
||||
name: 'gate-sahara-nova-{plugin-nova_network}'
|
||||
defaults: global
|
||||
concurrent: true
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nFUNCTION_PATH=sahara-ci-config/slave-scripts/functions.sh bash -x sahara-ci-config/slave-scripts/gate-sahara.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
FUNCTION_PATH=sahara-ci-config/slave-scripts bash sahara-ci-config/slave-scripts/gate-sahara.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
wrappers:
|
||||
- timeout:
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
publishers:
|
||||
- sahara-logs
|
||||
- console-log
|
||||
@ -166,7 +132,6 @@
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://git.openstack.org/stackforge/sahara-ci-config\">https://git.openstack.org/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p>This job destroys vms which were not deleted after integration tests"
|
||||
disabled: false
|
||||
concurrent: false
|
||||
node: 'master'
|
||||
|
||||
parameters:
|
||||
@ -184,9 +149,10 @@
|
||||
description:
|
||||
|
||||
builders:
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nbash sahara-ci-config/slave-scripts/integration-cleanup.sh"
|
||||
- shell: |
|
||||
rm -rf sahara-ci-config
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
bash sahara-ci-config/slave-scripts/integration-cleanup.sh
|
||||
|
||||
- job:
|
||||
name: gate-sahara-pep8-trunk
|
||||
@ -196,14 +162,13 @@
|
||||
\n<p>Please use this repository to make changes: <a href=\"https://git.openstack.org/stackforge/sahara-ci-config\">https://git.openstack.org/stackforge/sahara-ci-config</a></b>\
|
||||
\n<p>This job runs pep8 check using trunk version of hacking"
|
||||
disabled: false
|
||||
concurrent: true
|
||||
node: trusty-neutron || trusty-nova
|
||||
|
||||
builders:
|
||||
- gerrit-git-prep
|
||||
- shell: "rm -rf sahara-ci-config\
|
||||
\ngit clone https://git.openstack.org/stackforge/sahara-ci-config\
|
||||
\nbash -x sahara-ci-config/slave-scripts/gate-sahara-pep8-trunk.sh"
|
||||
- shell: |
|
||||
git clone https://git.openstack.org/stackforge/sahara-ci-config
|
||||
bash sahara-ci-config/slave-scripts/gate-sahara-pep8-trunk.sh
|
||||
properties:
|
||||
- zeromq-event
|
||||
publishers:
|
||||
|
@ -21,4 +21,4 @@
|
||||
- pollscm: "* * * * * "
|
||||
|
||||
builders:
|
||||
- shell: "bash $WORKSPACE/slave-scripts/update_config.sh"
|
||||
- shell: "$WORKSPACE/slave-scripts/update_config.sh"
|
||||
|
@ -20,4 +20,4 @@
|
||||
- timed: '0 6 * * *'
|
||||
|
||||
builders:
|
||||
- shell: "bash -e /opt/ci/jenkins-jobs/sahara-ci-config/slave-scripts/update_pool.sh"
|
||||
- shell: "/opt/ci/jenkins-jobs/sahara-ci-config/slave-scripts/update_pool.sh"
|
||||
|
369
slave-scripts/dib.sh
Normal file → Executable file
369
slave-scripts/dib.sh
Normal file → Executable file
@ -1,123 +1,39 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -xe
|
||||
|
||||
. $FUNCTION_PATH
|
||||
# source CI credentials
|
||||
. /home/jenkins/ci_openrc
|
||||
# source functions
|
||||
. $FUNCTION_PATH/functions-common.sh
|
||||
. $FUNCTION_PATH/functions-dib.sh
|
||||
|
||||
check_openstack_host
|
||||
|
||||
check_error_code() {
|
||||
if [ "$1" != "0" -o ! -f "$2" ]; then
|
||||
echo "$2 image doesn't build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
register_vanilla_image() {
|
||||
# 1 - hadoop version, 2 - username, 3 - image name
|
||||
case "$1" in
|
||||
1)
|
||||
glance image-create --name $3 --file $3.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'="${2}"
|
||||
;;
|
||||
2.4)
|
||||
glance image-create --name $3 --file $3.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.4.1'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'="${2}"
|
||||
;;
|
||||
2.6)
|
||||
glance image-create --name $3 --file $3.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.6.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'="${2}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
register_hdp_image() {
|
||||
# 1 - hadoop version, 2 - username, 3 - image name
|
||||
case "$1" in
|
||||
1)
|
||||
glance image-create --name $3 --file $3.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_1.3.2'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'="${2}"
|
||||
;;
|
||||
2)
|
||||
glance image-create --name $3 --file $3.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_2.0.6'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'="${2}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
register_cdh_image() {
|
||||
# 1 - username, 2 - image name
|
||||
glance image-create --name $2 --file $2.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_5.3.0'='True' --property '_sahara_tag_5'='True' --property '_sahara_tag_cdh'='True' --property '_sahara_username'="${1}"
|
||||
}
|
||||
|
||||
register_spark_image() {
|
||||
# 1 - username, 2 - image name
|
||||
glance image-create --name $2 --file $2.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' --property '_sahara_tag_spark'='True' --property '_sahara_tag_1.0.0'='True' --property '_sahara_username'="${1}"
|
||||
}
|
||||
|
||||
delete_image() {
|
||||
glance image-delete $1
|
||||
}
|
||||
|
||||
upload_image() {
|
||||
# 1 - plugin, 2 - username, 3 - image name
|
||||
delete_image $3
|
||||
|
||||
case "$1" in
|
||||
vanilla-1)
|
||||
register_vanilla_image "1" "$2" "$3"
|
||||
;;
|
||||
vanilla-2.4)
|
||||
register_vanilla_image "2.4" "$2" "$3"
|
||||
;;
|
||||
vanilla-2.6)
|
||||
register_vanilla_image "2.6" "$2" "$3"
|
||||
;;
|
||||
hdp1)
|
||||
register_hdp_image "1" "$2" "$3"
|
||||
;;
|
||||
hdp2)
|
||||
register_hdp_image "2" "$2" "$3"
|
||||
;;
|
||||
cdh)
|
||||
register_cdh_image "$2" "$3"
|
||||
;;
|
||||
spark)
|
||||
register_spark_image "$2" "$3"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
rename_image() {
|
||||
# 1 - source image, 2 - target image
|
||||
glance image-update $1 --name $2
|
||||
}
|
||||
|
||||
ENGINE_TYPE=$(echo $JOB_NAME | awk -F '-' '{ print $3 }')
|
||||
|
||||
plugin="$1"
|
||||
CLUSTER_HASH=${CLUSTER_HASH:-$RANDOM}
|
||||
cluster_name="$HOST-$ZUUL_CHANGE-$CLUSTER_HASH"
|
||||
|
||||
SAHARA_PATH="/tmp/sahara"
|
||||
# default (deprecated) config file for integration tests
|
||||
tests_config_file="$SAHARA_PATH/sahara/tests/integration/configs/itest.conf"
|
||||
tests_config_file_template="$sahara_templates_configs_path/itest.conf.sample"
|
||||
sahara_conf_path="$SAHARA_PATH/etc/sahara/sahara.conf"
|
||||
|
||||
engine=$(echo $JOB_NAME | awk -F '-' '{ print $3 }')
|
||||
job_type="$1"
|
||||
image_type=${2:-ubuntu}
|
||||
hadoop_version=1
|
||||
GERRIT_CHANGE_NUMBER=$ZUUL_CHANGE
|
||||
SKIP_CINDER_TEST=True
|
||||
SKIP_CLUSTER_CONFIG_TEST=True
|
||||
SKIP_EDP_TEST=False
|
||||
SKIP_MAP_REDUCE_TEST=True
|
||||
SKIP_SWIFT_TEST=True
|
||||
SKIP_SCALING_TEST=True
|
||||
SKIP_TRANSIENT_TEST=True
|
||||
SKIP_ONLY_TRANSIENT_TEST=False
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN=False
|
||||
VANILLA_IMAGE=$HOST-sahara-vanilla-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_1
|
||||
VANILLA_TWO_IMAGE=$HOST-sahara-vanilla-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2
|
||||
HDP_IMAGE=$HOST-sahara-hdp-centos-${GERRIT_CHANGE_NUMBER}-hadoop_1
|
||||
HDP_TWO_IMAGE=$HOST-sahara-hdp-centos-${GERRIT_CHANGE_NUMBER}-hadoop_2
|
||||
SPARK_IMAGE=$HOST-sahara-spark-ubuntu-${GERRIT_CHANGE_NUMBER}
|
||||
CDH_IMAGE=$HOST-${image_type}-cdh-${GERRIT_CHANGE_NUMBER}
|
||||
TESTS_CONFIG_FILE='sahara/tests/integration/configs/itest.conf'
|
||||
|
||||
if [[ "$ENGINE_TYPE" == 'heat' ]]
|
||||
then
|
||||
HEAT_JOB=True
|
||||
echo "Heat detected"
|
||||
fi
|
||||
# Image names
|
||||
vanilla_image=$HOST-sahara-vanilla-${image_type}-${ZUUL_CHANGE}-hadoop_1
|
||||
vanilla_two_four_image=$HOST-sahara-vanilla-${image_type}-${ZUUL_CHANGE}-hadoop_2.4
|
||||
vanilla_two_six_image=$HOST-sahara-vanilla-${image_type}-${ZUUL_CHANGE}-hadoop_2.6
|
||||
hdp_image=$HOST-sahara-hdp-centos-${ZUUL_CHANGE}-hadoop_1
|
||||
hdp_two_image=$HOST-sahara-hdp-centos-${ZUUL_CHANGE}-hadoop_2
|
||||
spark_image=$HOST-sahara-spark-ubuntu-${ZUUL_CHANGE}
|
||||
cdh_image=$HOST-${image_type}-cdh-${ZUUL_CHANGE}
|
||||
|
||||
case $plugin in
|
||||
# Clone Sahara
|
||||
git clone https://review.openstack.org/openstack/sahara $SAHARA_PATH
|
||||
|
||||
case $job_type in
|
||||
vanilla*)
|
||||
# Up local HTTPServer with Java source
|
||||
pushd /home/jenkins
|
||||
python -m SimpleHTTPServer 8000 > /dev/null &
|
||||
popd
|
||||
@ -128,195 +44,108 @@ case $plugin in
|
||||
username=${image_type}
|
||||
fi
|
||||
|
||||
hadoop_version=$(echo $plugin | awk -F '_' '{print $2}')
|
||||
hadoop_version=$(echo $job_type | awk -F '_' '{print $2}')
|
||||
case $hadoop_version in
|
||||
1)
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_vanilla_hadoop_1_image_name=${VANILLA_IMAGE} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 1
|
||||
check_error_code $? ${VANILLA_IMAGE}.qcow2
|
||||
upload_image "vanilla-1" "${username}" ${VANILLA_IMAGE}
|
||||
PLUGIN_TYPE=vanilla1
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_vanilla_hadoop_1_image_name=${vanilla_image} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 1
|
||||
check_error_code $? ${vanilla_image}.qcow2
|
||||
upload_image "vanilla-1" "${username}" ${vanilla_image}
|
||||
insert_config_value $tests_config_file_template VANILLA SKIP_CINDER_TEST True
|
||||
insert_config_value $tests_config_file_template VANILLA SKIP_CLUSTER_CONFIG_TEST True
|
||||
insert_config_value $tests_config_file_template VANILLA SKIP_SCALING_TEST True
|
||||
plugin=vanilla1
|
||||
;;
|
||||
2.4)
|
||||
VANILLA_TWO_IMAGE=$HOST-sahara-vanilla-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.4
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Vanilla 2.4 plugin is not supported in stable/icehouse" && exit 0
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_vanilla_hadoop_2_4_image_name=${VANILLA_TWO_IMAGE} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.4
|
||||
check_error_code $? ${VANILLA_TWO_IMAGE}.qcow2
|
||||
upload_image "vanilla-2.4" "${username}" ${VANILLA_TWO_IMAGE}
|
||||
hadoop_version=2-4
|
||||
PLUGIN_TYPE=vanilla2
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_vanilla_hadoop_2_4_image_name=${vanilla_two_four_image} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.4
|
||||
check_error_code $? ${vanilla_two_four_image}.qcow2
|
||||
upload_image "vanilla-2.4" "${username}" ${vanilla_two_four_image}
|
||||
DISTRIBUTE_MODE=True
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_CINDER_TEST True
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_CLUSTER_CONFIG_TEST True
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_SCALING_TEST True
|
||||
plugin=vanilla2
|
||||
;;
|
||||
2.6)
|
||||
VANILLA_TWO_IMAGE=$HOST-sahara-vanilla-${image_type}-${GERRIT_CHANGE_NUMBER}-hadoop_2.6
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" -o "$ZUUL_BRANCH" == "stable/juno" ] && echo "Vanilla 2.6 plugin is not supported in stable/icehouse and stable/juno" && exit 0
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_vanilla_hadoop_2_6_image_name=${VANILLA_TWO_IMAGE} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.6
|
||||
check_error_code $? ${VANILLA_TWO_IMAGE}.qcow2
|
||||
upload_image "vanilla-2.6" "${username}" ${VANILLA_TWO_IMAGE}
|
||||
hadoop_version=2-6
|
||||
PLUGIN_TYPE=vanilla2
|
||||
# Skipping hive job check for fedora and centos images because it's causing the test failure
|
||||
if [ "$image_type" != "ubuntu" ] ; then
|
||||
SKIP_EDP_JOB_TYPES=Hive
|
||||
fi
|
||||
TESTS_CONFIG_FILE="$WORKSPACE/sahara-ci-config/config/sahara/sahara-test-config-vanilla-2.6.yaml"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_vanilla_hadoop_2_6_image_name=${vanilla_two_six_image} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p vanilla -i $image_type -v 2.6
|
||||
check_error_code $? ${vanilla_two_six_image}.qcow2
|
||||
upload_image "vanilla-2.6" "${username}" ${vanilla_two_six_image}
|
||||
DISTRIBUTE_MODE=True
|
||||
tests_config_file="$sahara_templates_configs_path/sahara-test-config-vanilla-2.6.yaml"
|
||||
insert_scenario_value $tests_config_file vanilla_two_six_image
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
|
||||
spark)
|
||||
# Up local HTTPServer with Java source
|
||||
pushd /home/jenkins
|
||||
python -m SimpleHTTPServer 8000 > /dev/null &
|
||||
popd
|
||||
|
||||
image_type="ubuntu"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_spark_image_name=${SPARK_IMAGE} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p "spark"
|
||||
check_error_code $? ${SPARK_IMAGE}.qcow2
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Tests for Spark plugin is not implemented in stable/icehouse" && exit 0
|
||||
upload_image "spark" "ubuntu" ${SPARK_IMAGE}
|
||||
PLUGIN_TYPE=$plugin
|
||||
[[ "$JOB_NAME" =~ scenario ]] && TESTS_CONFIG_FILE="$WORKSPACE/sahara-ci-config/config/sahara/sahara-test-config-spark.yaml"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ubuntu_spark_image_name=${spark_image} JAVA_DOWNLOAD_URL='http://127.0.0.1:8000/jdk-7u51-linux-x64.tar.gz' SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p "spark"
|
||||
check_error_code $? ${spark_image}.qcow2
|
||||
upload_image "spark" "ubuntu" ${spark_image}
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tests_config_file="$sahara_templates_configs_path/sahara-test-config-spark.yaml"
|
||||
insert_scenario_value $tests_config_file spark_image
|
||||
else
|
||||
insert_config_value $tests_config_file_template SPARK SKIP_CINDER_TEST True
|
||||
insert_config_value $tests_config_file_template SPARK SKIP_CLUSTER_CONFIG_TEST True
|
||||
insert_config_value $tests_config_file_template SPARK SKIP_SCALING_TEST True
|
||||
plugin=spark
|
||||
fi
|
||||
insert_config_value $sahara_conf_path DEFAULT plugins spark
|
||||
;;
|
||||
|
||||
hdp_1)
|
||||
image_type="centos"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_hdp_hadoop_1_image_name=${HDP_IMAGE} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p hdp -v 1
|
||||
check_error_code $? ${HDP_IMAGE}.qcow2
|
||||
upload_image "hdp1" "root" ${HDP_IMAGE}
|
||||
PLUGIN_TYPE="hdp1"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" centos_hdp_hadoop_1_image_name=${hdp_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p hdp -v 1
|
||||
check_error_code $? ${hdp_image}.qcow2
|
||||
upload_image "hdp1" "root" ${hdp_image}
|
||||
insert_config_value $tests_config_file_template HDP SKIP_CINDER_TEST True
|
||||
insert_config_value $tests_config_file_template HDP SKIP_CLUSTER_CONFIG_TEST True
|
||||
insert_config_value $tests_config_file_template HDP SKIP_SCALING_TEST True
|
||||
plugin=hdp1
|
||||
;;
|
||||
|
||||
hdp_2)
|
||||
image_type="centos"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" ${image_type}_hdp_hadoop_2_image_name=${HDP_TWO_IMAGE} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p hdp -v 2
|
||||
check_error_code $? ${HDP_TWO_IMAGE}.qcow2
|
||||
upload_image "hdp2" "root" ${HDP_TWO_IMAGE}
|
||||
hadoop_version="2"
|
||||
PLUGIN_TYPE="hdp2"
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" centos_hdp_hadoop_2_image_name=${hdp_two_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p hdp -v 2
|
||||
check_error_code $? ${hdp_two_image}.qcow2
|
||||
upload_image "hdp2" "root" ${hdp_two_image}
|
||||
DISTRIBUTE_MODE=True
|
||||
insert_config_value $tests_config_file_template HDP2 SKIP_CINDER_TEST True
|
||||
insert_config_value $tests_config_file_template HDP2 SKIP_CLUSTER_CONFIG_TEST True
|
||||
insert_config_value $tests_config_file_template HDP2 SKIP_SCALING_TEST True
|
||||
plugin=hdp2
|
||||
;;
|
||||
|
||||
cdh)
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "CDH plugin is not supported in stable/icehouse" && exit 0
|
||||
if [ "${image_type}" == 'centos' ]; then
|
||||
username='cloud-user'
|
||||
else
|
||||
username='ubuntu'
|
||||
fi
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" cloudera_5_3_${image_type}_image_name=${CDH_IMAGE} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p cloudera -i $image_type -v 5.3
|
||||
check_error_code $? ${CDH_IMAGE}.qcow2
|
||||
upload_image "cdh" ${username} ${CDH_IMAGE}
|
||||
hadoop_version="2"
|
||||
PLUGIN_TYPE=$plugin
|
||||
sudo DIB_REPO_PATH="/home/jenkins/diskimage-builder" cloudera_5_3_${image_type}_image_name=${cdh_image} SIM_REPO_PATH=$WORKSPACE bash -x diskimage-create/diskimage-create.sh -p cloudera -i $image_type -v 5.3
|
||||
check_error_code $? ${cdh_image}.qcow2
|
||||
upload_image "cdh" ${username} ${cdh_image}
|
||||
insert_config_value $sahara_conf_path DEFAULT plugins cdh
|
||||
insert_config_value $tests_config_file_template CDH SKIP_CINDER_TEST True
|
||||
insert_config_value $tests_config_file_template CDH SKIP_CLUSTER_CONFIG_TEST True
|
||||
insert_config_value $tests_config_file_template CDH SKIP_SCALING_TEST True
|
||||
insert_config_value $tests_config_file_template CDH CM_REPO_LIST_URL "http://$OPENSTACK_HOST/cdh-repo/cm.list"
|
||||
insert_config_value $tests_config_file_template CDH CDH_REPO_LIST_URL "http://$OPENSTACK_HOST/cdh-repo/cdh.list"
|
||||
plugin=cdh
|
||||
;;
|
||||
esac
|
||||
|
||||
# This parameter is used for cluster name, because cluster name's length exceeds limit 64 characters with $image_type.
|
||||
image_os="uos"
|
||||
if [ "$image_type" == "centos" ]; then
|
||||
image_os="cos"
|
||||
elif [ "$image_type" == "fedora" ]; then
|
||||
image_os="fos"
|
||||
cd $SAHARA_PATH
|
||||
if [ "$ZUUL_BRANCH" != "master" ]; then
|
||||
git checkout "$ZUUL_BRANCH"
|
||||
sudo pip install -U -r requirements.txt
|
||||
fi
|
||||
|
||||
cd /tmp/
|
||||
TOX_LOG=/tmp/sahara/.tox/venv/log/venv-1.log
|
||||
|
||||
create_database
|
||||
|
||||
sudo rm -rf sahara
|
||||
git clone https://review.openstack.org/openstack/sahara
|
||||
cd sahara
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && sudo pip install -U -r requirements.txt
|
||||
sudo pip install .
|
||||
|
||||
enable_pypi
|
||||
|
||||
write_sahara_main_conf etc/sahara/sahara.conf
|
||||
start_sahara etc/sahara/sahara.conf
|
||||
|
||||
cd /tmp/sahara
|
||||
|
||||
CLUSTER_NAME="$HOST-$CLUSTER_HASH-$ZUUL_CHANGE"
|
||||
write_tests_conf
|
||||
|
||||
run_tests
|
||||
|
||||
print_python_env /tmp/sahara
|
||||
|
||||
mv /tmp/sahara/logs $WORKSPACE
|
||||
|
||||
if [ "$FAILURE" != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$STATUS" != 0 ]]
|
||||
then
|
||||
if [[ "${plugin}" =~ vanilla ]]; then
|
||||
if [ "${hadoop_version}" == "1" ]; then
|
||||
delete_image $VANILLA_IMAGE
|
||||
else
|
||||
delete_image $VANILLA_TWO_IMAGE
|
||||
fi
|
||||
fi
|
||||
if [ "${plugin}" == "hdp_1" ]; then
|
||||
delete_image $HDP_IMAGE
|
||||
fi
|
||||
if [ "${plugin}" == "hdp_2" ]; then
|
||||
delete_image $HDP_TWO_IMAGE
|
||||
fi
|
||||
if [ "${plugin}" == "cdh" ]; then
|
||||
delete_image $CDH_IMAGE
|
||||
fi
|
||||
if [ "${plugin}" == "spark" ]; then
|
||||
delete_image $SPARK_IMAGE
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$ZUUL_PIPELINE" == "check" -o "$ZUUL_BRANCH" != "master" ]
|
||||
then
|
||||
if [[ "${plugin}" =~ vanilla ]]; then
|
||||
if [ "${hadoop_version}" == "1" ]; then
|
||||
delete_image $VANILLA_IMAGE
|
||||
else
|
||||
delete_image $VANILLA_TWO_IMAGE
|
||||
fi
|
||||
fi
|
||||
if [ "${plugin}" == "hdp_1" ]; then
|
||||
delete_image $HDP_IMAGE
|
||||
fi
|
||||
if [ "${plugin}" == "hdp_2" ]; then
|
||||
delete_image $HDP_TWO_IMAGE
|
||||
fi
|
||||
if [ "${plugin}" == "cdh" ]; then
|
||||
delete_image $CDH_IMAGE
|
||||
fi
|
||||
if [ "${plugin}" == "spark" ]; then
|
||||
delete_image $SPARK_IMAGE
|
||||
fi
|
||||
else
|
||||
if [[ "${plugin}" =~ vanilla ]]; then
|
||||
hadoop_version=$(echo $plugin | awk -F '_' '{print $2}')
|
||||
if [ "${hadoop_version}" == "1" ]; then
|
||||
delete_image ${image_type}_vanilla_1_latest
|
||||
rename_image $VANILLA_IMAGE ${image_type}_vanilla_1_latest
|
||||
else
|
||||
delete_image ${image_type}_vanilla_${hadoop_version}_latest
|
||||
rename_image $VANILLA_TWO_IMAGE ${image_type}_vanilla_${hadoop_version}_latest
|
||||
fi
|
||||
fi
|
||||
if [ "${plugin}" == "hdp_1" ]; then
|
||||
delete_image sahara_hdp_1_latest
|
||||
rename_image $HDP_IMAGE sahara_hdp_1_latest
|
||||
fi
|
||||
if [ "${plugin}" == "hdp_2" ]; then
|
||||
delete_image sahara_hdp_2_latest
|
||||
rename_image $HDP_TWO_IMAGE sahara_hdp_2_latest
|
||||
fi
|
||||
if [ "${plugin}" == "cdh" ]; then
|
||||
delete_image ${image_type}_cdh_latest
|
||||
rename_image $CDH_IMAGE ${image_type}_cdh_latest
|
||||
fi
|
||||
if [ "${plugin}" == "spark" ]; then
|
||||
delete_image sahara_spark_latest
|
||||
rename_image $SPARK_IMAGE sahara_spark_latest
|
||||
fi
|
||||
fi
|
||||
write_sahara_main_conf "$sahara_conf_path" "$engine"
|
||||
write_tests_conf "$tests_config_file" "$cluster_name"
|
||||
start_sahara "$sahara_conf_path" && run_tests_for_dib_image "$tests_config_file" "$plugin"
|
||||
print_python_env
|
||||
cleanup_image "$job_type" "$image_type"
|
||||
|
157
slave-scripts/functions-common.sh
Executable file
157
slave-scripts/functions-common.sh
Executable file
@ -0,0 +1,157 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
sahara_templates_configs_path=$WORKSPACE/sahara-ci-config/config/sahara
|
||||
|
||||
enable_pypi() {
|
||||
mkdir -p ~/.pip
|
||||
export PIP_USE_MIRRORS=True
|
||||
cp $sahara_templates_configs_path/pip.conf ~/.pip/pip.conf
|
||||
cp $sahara_templates_configs_path/.pydistutils.cfg ~/.pydistutils.cfg
|
||||
}
|
||||
|
||||
conf_has_option() {
|
||||
local file=$1
|
||||
local section=$2
|
||||
local option=$3
|
||||
local line
|
||||
|
||||
line=$(sed -ne "/^\[$section\]/,/^\[.*\]/ { /^$option[ \t]*=/ p; }" "$file")
|
||||
[ -n "$line" ]
|
||||
}
|
||||
|
||||
insert_config_value() {
|
||||
local file=$1
|
||||
local section=$2
|
||||
local option=$3
|
||||
local value=$4
|
||||
|
||||
[[ -z $section || -z $option ]] && return
|
||||
|
||||
if ! grep -q "^\[$section\]" "$file" 2>/dev/null; then
|
||||
# Add section at the end
|
||||
echo -e "\n[$section]" >>"$file"
|
||||
fi
|
||||
if ! conf_has_option "$file" "$section" "$option"; then
|
||||
# Add it
|
||||
sed -i -e "/^\[$section\]/ a\\
|
||||
$option = $value
|
||||
" "$file"
|
||||
else
|
||||
local sep=$(echo -ne "\x01")
|
||||
# Replace it
|
||||
sed -i -e '/^\['${section}'\]/,/^\[.*\]/ s'${sep}'^\('${option}'[ \t]*=[ \t]*\).*$'${sep}'\1'"${value}"${sep} "$file"
|
||||
fi
|
||||
}
|
||||
|
||||
insert_scenario_value() {
|
||||
local config=$1
|
||||
local value=$2
|
||||
sed -i "s/%${value}%/${!value}/g" $config
|
||||
}
|
||||
|
||||
write_sahara_main_conf() {
|
||||
local conf_path=$1
|
||||
local engine=$2
|
||||
insert_config_value $conf_path DEFAULT infrastructure_engine $engine
|
||||
insert_config_value $conf_path DEFAULT use_identity_api_v3 true
|
||||
insert_config_value $conf_path DEFAULT use_neutron $USE_NEUTRON
|
||||
insert_config_value $conf_path DEFAULT min_transient_cluster_active_time 30
|
||||
insert_config_value $conf_path DEFAULT node_domain ci
|
||||
insert_config_value $conf_path database connection mysql://sahara-citest:sahara-citest@localhost/sahara?charset=utf8
|
||||
insert_config_value $conf_path keystone_authtoken auth_uri http://$OPENSTACK_HOST:5000/v2.0/
|
||||
insert_config_value $conf_path keystone_authtoken identity_uri http://$OPENSTACK_HOST:35357/
|
||||
insert_config_value $conf_path keystone_authtoken admin_user $OS_USERNAME
|
||||
insert_config_value $conf_path keystone_authtoken admin_password $OS_PASSWORD
|
||||
insert_config_value $conf_path keystone_authtoken admin_tenant_name $OS_TENANT_NAME
|
||||
|
||||
echo "----------- sahara.conf -----------"
|
||||
cat $conf_path
|
||||
echo "--------------- end ---------------"
|
||||
}
|
||||
|
||||
print_python_env() {
|
||||
[ -f $SAHARA_PATH/.tox/integration/bin/pip ] && $SAHARA_PATH/.tox/integration/bin/pip freeze > $WORKSPACE/logs/python-integration-env.txt
|
||||
[ -f $SAHARA_PATH/.tox/scenario/bin/pip ] && $SAHARA_PATH/.tox/scenario/bin/pip freeze > $WORKSPACE/logs/python-scenario-env.txt
|
||||
pip freeze > $WORKSPACE/logs/python-system-env.txt
|
||||
}
|
||||
|
||||
failure() {
|
||||
local reason=$1
|
||||
print_python_env
|
||||
echo "$reason"
|
||||
exit 1
|
||||
}
|
||||
|
||||
start_sahara() {
|
||||
local conf_path=$1
|
||||
local conf_dir=$(dirname $1)
|
||||
mkdir $WORKSPACE/logs
|
||||
sahara-db-manage --config-file $conf_path upgrade head || failure "Command 'sahara-db-manage' failed"
|
||||
if [ "$DISTRIBUTE_MODE" == "True" ]; then
|
||||
screen -dmS sahara-api /bin/bash -c "PYTHONUNBUFFERED=1 sahara-api --config-dir $conf_dir -d --log-file $WORKSPACE/logs/sahara-log-api.txt"
|
||||
sleep 2
|
||||
screen -dmS sahara-engine_1 /bin/bash -c "PYTHONUNBUFFERED=1 sahara-engine --config-dir $conf_dir -d --log-file $WORKSPACE/logs/sahara-log-engine-1.txt"
|
||||
screen -dmS sahara-engine_2 /bin/bash -c "PYTHONUNBUFFERED=1 sahara-engine --config-dir $conf_dir -d --log-file $WORKSPACE/logs/sahara-log-engine-2.txt"
|
||||
else
|
||||
screen -dmS sahara-all /bin/bash -c "PYTHONUNBUFFERED=1 sahara-all --config-dir $conf_dir -d --log-file $WORKSPACE/logs/sahara-log.txt"
|
||||
fi
|
||||
|
||||
api_responding_timeout=30
|
||||
if ! timeout ${api_responding_timeout} sh -c "while ! curl -s http://127.0.0.1:8386/v1.1/ 2>/dev/null | grep -q 'Authentication required' ; do sleep 1; done"; then
|
||||
failure "Sahara API failed to respond within ${api_responding_timeout} seconds"
|
||||
fi
|
||||
}
|
||||
|
||||
write_tests_conf() {
|
||||
local test_conf=$1
|
||||
local cluster_name=$2
|
||||
local addr=$(ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}')
|
||||
if [ "$USE_NEUTRON" == "true" ]; then
|
||||
NETWORK="neutron"
|
||||
TENANT_ID=$NEUTRON_LAB_TENANT_ID
|
||||
else
|
||||
NETWORK="nova-network"
|
||||
TENANT_ID=$NOVA_NET_LAB_TENANT_ID
|
||||
fi
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
insert_scenario_value $test_conf OS_USERNAME
|
||||
insert_scenario_value $test_conf OS_PASSWORD
|
||||
insert_scenario_value $test_conf OS_TENANT_NAME
|
||||
insert_scenario_value $test_conf OPENSTACK_HOST
|
||||
insert_scenario_value $test_conf NETWORK
|
||||
insert_scenario_value $test_conf TENANT_ID
|
||||
insert_scenario_value $test_conf cluster_name
|
||||
else
|
||||
cp $sahara_templates_configs_path/itest.conf.sample $test_conf
|
||||
insert_config_value $test_conf COMMON OS_USERNAME $OS_USERNAME
|
||||
insert_config_value $test_conf COMMON OS_PASSWORD $OS_PASSWORD
|
||||
insert_config_value $test_conf COMMON OS_TENANT_NAME $OS_TENANT_NAME
|
||||
insert_config_value $test_conf COMMON OS_TENANT_ID $TENANT_ID
|
||||
insert_config_value $test_conf COMMON OS_AUTH_URL $OS_AUTH_URL
|
||||
insert_config_value $test_conf COMMON NEUTRON_ENABLED $USE_NEUTRON
|
||||
insert_config_value $test_conf COMMON SAHARA_HOST $addr
|
||||
insert_config_value $test_conf COMMON CLUSTER_NAME $cluster_name
|
||||
fi
|
||||
|
||||
echo "----------- tests config -----------"
|
||||
cat $test_conf
|
||||
echo "---------------- end ---------------"
|
||||
}
|
||||
|
||||
run_tests() {
|
||||
local config=$1
|
||||
local plugin=$2
|
||||
local concurrency=${3:-"1"}
|
||||
echo "Integration tests are started"
|
||||
export PYTHONUNBUFFERED=1
|
||||
if [[ "$JOB_NAME" =~ scenario ]]
|
||||
then
|
||||
# Temporary use additional log file, due to wrong status code from tox scenario tests
|
||||
# tox -e scenario $config || failure "Integration tests are failed"
|
||||
tox -e scenario $config | tee log.txt
|
||||
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
|
||||
[ "$STATUS" != "0" ] && failure "Integration tests have failed"
|
||||
else
|
||||
tox -e integration -- $plugin --concurrency=$concurrency || failure "Integration tests have failed"
|
||||
fi
|
||||
}
|
113
slave-scripts/functions-dib.sh
Executable file
113
slave-scripts/functions-dib.sh
Executable file
@ -0,0 +1,113 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
CUR_IMAGE=none
|
||||
|
||||
register_new_image() {
|
||||
local image_name=$1
|
||||
local image_properties=$2
|
||||
glance image-create --name $1 --file $1.qcow2 --disk-format qcow2 --container-format bare --is-public=true --property '_sahara_tag_ci'='True' $image_properties
|
||||
}
|
||||
|
||||
delete_image() {
|
||||
# "|| true" here, to avoid error code producing in case of missing image
|
||||
glance image-delete $1 || true
|
||||
}
|
||||
|
||||
upload_image() {
|
||||
local plugin=$1
|
||||
local username=$2
|
||||
local image=$3
|
||||
delete_image "$image"
|
||||
|
||||
case "$plugin" in
|
||||
vanilla-1)
|
||||
image_properties="--property '_sahara_tag_1.2.1'='True' --property '_sahara_tag_1.1.2'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
vanilla-2.4)
|
||||
image_properties="--property '_sahara_tag_2.4.1'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
vanilla-2.6)
|
||||
image_properties="--property '_sahara_tag_2.6.0'='True' --property '_sahara_tag_vanilla'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
hdp1)
|
||||
image_properties="--property '_sahara_tag_1.3.2'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
hdp2)
|
||||
image_properties="--property '_sahara_tag_2.0.6'='True' --property '_sahara_tag_hdp'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
cdh)
|
||||
image_properties="--property '_sahara_tag_5.3.0'='True' --property '_sahara_tag_5'='True' --property '_sahara_tag_cdh'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
spark)
|
||||
image_properties="--property '_sahara_tag_spark'='True' --property '_sahara_tag_1.0.0'='True' --property '_sahara_username'=${username}"
|
||||
;;
|
||||
esac
|
||||
register_new_image "$image" "$image_properties"
|
||||
CUR_IMAGE="$image"
|
||||
}
|
||||
|
||||
rename_image() {
|
||||
# 1 - source image, 2 - target image
|
||||
glance image-update $1 --name $2
|
||||
}
|
||||
|
||||
check_error_code() {
|
||||
if [ "$1" != "0" -o ! -f "$2" ]; then
|
||||
echo "$2 image isn't build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
failure() {
|
||||
local reason=$1
|
||||
echo "$reason"
|
||||
print_python_env
|
||||
delete_image "$CUR_IMAGE"
|
||||
exit 1
|
||||
}
|
||||
|
||||
cleanup_image() {
|
||||
local job_type=$1
|
||||
local os=$2
|
||||
if [ "$ZUUL_PIPELINE" == "check" -o "$ZUUL_BRANCH" != "master" ]; then
|
||||
delete_image "$CUR_NAME"
|
||||
else
|
||||
case $job_type in
|
||||
vanilla*)
|
||||
hadoop_version=$(echo $job_type | awk -F '_' '{print $2}')
|
||||
delete_image ${os}_vanilla_${hadoop_version}_latest
|
||||
rename_image "$CUR_NAME" ${os}_vanilla_${hadoop_version}_latest
|
||||
;;
|
||||
hdp_1)
|
||||
delete_image sahara_hdp_1_latest
|
||||
rename_image "$CUR_NAME" sahara_hdp_1_latest
|
||||
;;
|
||||
hdp_2)
|
||||
delete_image sahara_hdp_2_latest
|
||||
rename_image "$CUR_NAME" sahara_hdp_2_latest
|
||||
;;
|
||||
cdh)
|
||||
delete_image ${os}_cdh_latest
|
||||
rename_image "$CUR_NAME" ${os}_cdh_latest
|
||||
;;
|
||||
spark)
|
||||
delete_image sahara_spark_latest
|
||||
rename_image "$CUR_NAME" sahara_spark_latest
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
run_tests_for_dib_image() {
|
||||
local config=$1
|
||||
local plugin=$2
|
||||
echo "Integration tests are started"
|
||||
export PYTHONUNBUFFERED=1
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tox -e scenario $config || tee log.txt
|
||||
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
|
||||
[ "$STATUS" != "0" ] && failure "Integration tests have failed"
|
||||
else
|
||||
tox -e integration -- $plugin --concurrency=1 || failure "Integration tests have failed"
|
||||
fi
|
||||
}
|
@ -1,313 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
TMP_LOG=/tmp/tox.log
|
||||
LOG_FILE=/tmp/tox_log.txt
|
||||
BUILD_ID=dontKill
|
||||
TIMEOUT=60
|
||||
export ADDR=$(ifconfig eth0| awk -F ' *|:' '/inet addr/{print $4}')
|
||||
|
||||
# This function determines Openstack host by checking internal address (second octet)
|
||||
check_openstack_host() {
|
||||
NETWORK=$(ifconfig eth0 | awk -F ' *|:' '/inet addr/{print $4}' | awk -F . '{print $2}')
|
||||
export OS_USERNAME=ci-user
|
||||
export OS_TENANT_NAME=ci
|
||||
export OS_PASSWORD=nova
|
||||
if [ "$NETWORK" == "0" ]; then
|
||||
export OPENSTACK_HOST="172.18.168.42"
|
||||
export HOST="c1"
|
||||
export TENANT_ID="$CI_LAB_TENANT_ID"
|
||||
export USE_NEUTRON=true
|
||||
else
|
||||
export OPENSTACK_HOST="172.18.168.43"
|
||||
export HOST="c2"
|
||||
export TENANT_ID="$STACK_SAHARA_TENANT_ID"
|
||||
export USE_NEUTRON=false
|
||||
fi
|
||||
export OS_AUTH_URL=http://$OPENSTACK_HOST:5000/v2.0/
|
||||
}
|
||||
|
||||
create_database() {
|
||||
mysql -usahara-citest -psahara-citest -Bse "DROP DATABASE IF EXISTS sahara"
|
||||
mysql -usahara-citest -psahara-citest -Bse "create database sahara"
|
||||
}
|
||||
|
||||
enable_pypi() {
|
||||
mkdir ~/.pip
|
||||
export PIP_USE_MIRRORS=True
|
||||
echo "
|
||||
[global]
|
||||
timeout = 60
|
||||
index-url = http://172.18.168.44/simple/
|
||||
extra-index-url = https://pypi.python.org/simple/
|
||||
download-cache = /home/jenkins/.pip/cache/
|
||||
[install]
|
||||
use-mirrors = true
|
||||
" > ~/.pip/pip.conf
|
||||
echo "
|
||||
[easy_install]
|
||||
index_url = http://172.18.168.44/simple/
|
||||
" > ~/.pydistutils.cfg
|
||||
}
|
||||
|
||||
write_sahara_main_conf() {
|
||||
conf_path=$1
|
||||
echo "[DEFAULT]
|
||||
" >> $conf_path
|
||||
if [ "$HEAT_JOB" == "True" ]
|
||||
then
|
||||
echo "infrastructure_engine=heat
|
||||
" >> $conf_path
|
||||
else
|
||||
echo "infrastructure_engine=direct
|
||||
" >> $conf_path
|
||||
fi
|
||||
if [ "$PLUGIN_TYPE" == "cdh" ]
|
||||
then
|
||||
echo "plugins=cdh
|
||||
" >> $conf_path
|
||||
elif [ "$PLUGIN_TYPE" == "spark" ]
|
||||
then
|
||||
echo "plugins=spark
|
||||
" >> $conf_path
|
||||
elif [ "$TEMPEST" == "True" ]; then
|
||||
echo "plugins=fake
|
||||
" >> $conf_path
|
||||
fi
|
||||
echo "os_auth_host=$OPENSTACK_HOST
|
||||
os_auth_port=5000
|
||||
os_admin_username=ci-user
|
||||
os_admin_password=nova
|
||||
os_admin_tenant_name=ci
|
||||
use_identity_api_v3=true
|
||||
use_neutron=$USE_NEUTRON
|
||||
min_transient_cluster_active_time=30
|
||||
node_domain = nl
|
||||
[database]
|
||||
connection=mysql://sahara-citest:sahara-citest@localhost/sahara?charset=utf8
|
||||
[keystone_authtoken]
|
||||
auth_uri=http://$OPENSTACK_HOST:5000/v2.0/
|
||||
identity_uri=http://$OPENSTACK_HOST:35357/
|
||||
admin_user=ci-user
|
||||
admin_password=nova
|
||||
admin_tenant_name=ci" >> $conf_path
|
||||
|
||||
echo "----------- sahara.conf -----------"
|
||||
cat $conf_path
|
||||
echo "----------- end of sahara.conf -----------"
|
||||
}
|
||||
|
||||
start_sahara() {
|
||||
conf_path=$1
|
||||
conf_dir=$(dirname $1)
|
||||
mkdir logs
|
||||
if [ "$ZUUL_BRANCH" == "stable/icehouse" ]
|
||||
then
|
||||
sahara_bin=sahara-api
|
||||
else
|
||||
sahara_bin=sahara-all
|
||||
fi
|
||||
sahara-db-manage --config-file $conf_path upgrade head
|
||||
status=$?
|
||||
if [[ "$status" != 0 ]]
|
||||
then
|
||||
echo "Command 'sahara-db-manage' failed"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$ZUUL_BRANCH" == "master" -a \( "$PLUGIN_TYPE" == "vanilla2" -a "$hadoop_version" == "2-6" -o "$PLUGIN_TYPE" == "hdp2" -o "$PLUGIN_TYPE" == " transient" \) -o "$hadoop_version" == "2-4" ]; then
|
||||
screen -dmS sahara-api /bin/bash -c "PYTHONUNBUFFERED=1 sahara-api --config-dir $conf_dir -d --log-file logs/sahara-log-api.txt"
|
||||
sleep 2
|
||||
screen -dmS sahara-engine_1 /bin/bash -c "PYTHONUNBUFFERED=1 sahara-engine --config-dir $conf_dir -d --log-file logs/sahara-log-engine-1.txt"
|
||||
screen -dmS sahara-engine_2 /bin/bash -c "PYTHONUNBUFFERED=1 sahara-engine --config-dir $conf_dir -d --log-file logs/sahara-log-engine-2.txt"
|
||||
else
|
||||
screen -dmS $sahara_bin /bin/bash -c "PYTHONUNBUFFERED=1 $sahara_bin --config-dir $conf_dir -d --log-file logs/sahara-log.txt"
|
||||
fi
|
||||
|
||||
api_responding_timeout=30
|
||||
FAILURE=0
|
||||
if ! timeout ${api_responding_timeout} sh -c "while ! curl -s http://127.0.0.1:8386/v1.1/ 2>/dev/null | grep -q 'Authentication required' ; do sleep 1; done"; then
|
||||
echo "Sahara API failed to respond within ${api_responding_timeout} seconds"
|
||||
FAILURE=1
|
||||
fi
|
||||
}
|
||||
|
||||
insert_scenario_value() {
|
||||
value=$1
|
||||
sed -i "s/%${value}%/${!value}/g" $TESTS_CONFIG_FILE
|
||||
}
|
||||
|
||||
write_tests_conf() {
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
case $PLUGIN_TYPE in
|
||||
vanilla2)
|
||||
IMAGE_NAME="$VANILLA_TWO_IMAGE"
|
||||
;;
|
||||
spark)
|
||||
IMAGE_NAME="$SPARK_IMAGE"
|
||||
;;
|
||||
esac
|
||||
[ "$USE_NEUTRON" == "true" ] && NETWORK="neutron"
|
||||
[ "$USE_NEUTRON" == "false" ] && NETWORK="nova-network"
|
||||
insert_scenario_value OS_USERNAME
|
||||
insert_scenario_value OS_PASSWORD
|
||||
insert_scenario_value OS_TENANT_NAME
|
||||
insert_scenario_value OPENSTACK_HOST
|
||||
insert_scenario_value NETWORK
|
||||
insert_scenario_value CLUSTER_NAME
|
||||
insert_scenario_value TENANT_ID
|
||||
insert_scenario_value IMAGE_NAME
|
||||
else
|
||||
echo "[COMMON]
|
||||
OS_USERNAME = 'ci-user'
|
||||
OS_PASSWORD = 'nova'
|
||||
OS_TENANT_NAME = 'ci'
|
||||
OS_TENANT_ID = '$TENANT_ID'
|
||||
OS_AUTH_URL = 'http://$OPENSTACK_HOST:5000/v2.0'
|
||||
SAHARA_HOST = '$ADDR'
|
||||
FLAVOR_ID = '20'
|
||||
CLUSTER_CREATION_TIMEOUT = $TIMEOUT
|
||||
CLUSTER_NAME = '$CLUSTER_NAME'
|
||||
FLOATING_IP_POOL = 'public'
|
||||
NEUTRON_ENABLED = $USE_NEUTRON
|
||||
INTERNAL_NEUTRON_NETWORK = 'private'
|
||||
JOB_LAUNCH_TIMEOUT = 15
|
||||
HDFS_INITIALIZATION_TIMEOUT = 10
|
||||
|
||||
[VANILLA]
|
||||
IMAGE_NAME = '$VANILLA_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_CINDER_TEST = '$SKIP_CINDER_TEST'
|
||||
SKIP_CLUSTER_CONFIG_TEST = $SKIP_CLUSTER_CONFIG_TEST
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_MAP_REDUCE_TEST = $SKIP_MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SKIP_SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
SKIP_TRANSIENT_CLUSTER_TEST = $SKIP_TRANSIENT_TEST
|
||||
ONLY_TRANSIENT_CLUSTER_TEST = $SKIP_ONLY_TRANSIENT_TEST
|
||||
|
||||
[VANILLA_TWO]
|
||||
IMAGE_NAME = '$VANILLA_TWO_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_CINDER_TEST = '$SKIP_CINDER_TEST'
|
||||
SKIP_MAP_REDUCE_TEST = $SKIP_MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SKIP_SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_EDP_JOB_TYPES = $SKIP_EDP_JOB_TYPES
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
|
||||
if [ "$PLUGIN_TYPE" == "transient" ]; then
|
||||
if [ "$ZUUL_BRANCH" == "master" ]; then
|
||||
echo "HADOOP_VERSION = '2.6.0'
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
elif [[ "$ZUUL_BRANCH" =~ juno ]]; then
|
||||
echo "HADOOP_VERSION = '2.4.1'
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$PLUGIN_TYPE" == "vanilla2" -a \( "$hadoop_version" == "2-4" -o "$hadoop_version" == "2-6" \) ]; then
|
||||
if [ "$hadoop_version" == "2-4" ]; then
|
||||
version="2.4.1"
|
||||
else
|
||||
version="2.6.0"
|
||||
fi
|
||||
echo "HADOOP_VERSION = '${version}'
|
||||
HADOOP_EXAMPLES_JAR_PATH = '/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-${version}.jar'
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
fi
|
||||
|
||||
echo "[HDP]
|
||||
IMAGE_NAME = '$HDP_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_CINDER_TEST = '$SKIP_CINDER_TEST'
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_MAP_REDUCE_TEST = $SKIP_MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SKIP_SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
|
||||
[HDP2]
|
||||
IMAGE_NAME = '$HDP_TWO_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_SWIFT_TEST = $SKIP_SWIFT_TEST
|
||||
|
||||
[CDH]
|
||||
IMAGE_NAME = '$CDH_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_MAP_REDUCE_TEST = $SKIP_MAP_REDUCE_TEST
|
||||
SKIP_SWIFT_TEST = $SKIP_SWIFT_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
SKIP_CINDER_TEST = $SKIP_CINDER_TEST
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
CM_REPO_LIST_URL = 'http://$OPENSTACK_HOST/cdh-repo/cm.list'
|
||||
CDH_REPO_LIST_URL = 'http://$OPENSTACK_HOST/cdh-repo/cdh.list'
|
||||
|
||||
[SPARK]
|
||||
IMAGE_NAME = '$SPARK_IMAGE'
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN = $SKIP_ALL_TESTS_FOR_PLUGIN
|
||||
SKIP_EDP_TEST = $SKIP_EDP_TEST
|
||||
SKIP_SCALING_TEST = $SKIP_SCALING_TEST
|
||||
" >> $TESTS_CONFIG_FILE
|
||||
fi
|
||||
}
|
||||
|
||||
run_tests() {
|
||||
if [ "$FAILURE" = 0 ]; then
|
||||
echo "Integration tests are started"
|
||||
export PYTHONUNBUFFERED=1
|
||||
case $PLUGIN_TYPE in
|
||||
hdp1)
|
||||
if [ "$ZUUL_BRANCH" == "stable/icehouse" ]
|
||||
then
|
||||
tox -e integration -- hdp --concurrency=1
|
||||
STATUS=$?
|
||||
else
|
||||
tox -e integration -- hdp1 --concurrency=1
|
||||
STATUS=$?
|
||||
fi
|
||||
;;
|
||||
hdp2)
|
||||
tox -e integration -- hdp2 --concurrency=1
|
||||
STATUS=$?
|
||||
;;
|
||||
vanilla1)
|
||||
tox -e integration -- vanilla1 --concurrency=1
|
||||
STATUS=$?
|
||||
;;
|
||||
vanilla2)
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tox -e scenario $TESTS_CONFIG_FILE | tee tox.log
|
||||
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
|
||||
else
|
||||
tox -e integration -- vanilla2 --concurrency=1
|
||||
STATUS=$?
|
||||
fi
|
||||
;;
|
||||
transient)
|
||||
tox -e integration -- transient --concurrency=3
|
||||
STATUS=$?
|
||||
;;
|
||||
cdh)
|
||||
tox -e integration -- cdh --concurrency=1
|
||||
STATUS=$?
|
||||
;;
|
||||
spark)
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tox -e scenario $TESTS_CONFIG_FILE | tee tox.log
|
||||
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
|
||||
else
|
||||
tox -e integration -- spark --concurrency=1
|
||||
STATUS=$?
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
print_python_env() {
|
||||
sahara_workspace=$1
|
||||
cd $sahara_workspace
|
||||
.tox/integration/bin/pip freeze > logs/python-integration-env.txt
|
||||
pip freeze > logs/python-system-env.txt
|
||||
}
|
0
slave-scripts/gate-sahara-pep8-trunk.sh
Normal file → Executable file
0
slave-scripts/gate-sahara-pep8-trunk.sh
Normal file → Executable file
187
slave-scripts/gate-sahara.sh
Normal file → Executable file
187
slave-scripts/gate-sahara.sh
Normal file → Executable file
@ -1,130 +1,99 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -xe
|
||||
|
||||
. $FUNCTION_PATH
|
||||
# source CI credentials
|
||||
. /home/jenkins/ci_openrc
|
||||
# source main functions
|
||||
. $FUNCTION_PATH/functions-common.sh
|
||||
|
||||
check_openstack_host
|
||||
|
||||
sudo pip install .
|
||||
|
||||
WORKSPACE=${1:-$WORKSPACE}
|
||||
ENGINE_TYPE=$(echo $JOB_NAME | awk -F '-' '{ print $4 }')
|
||||
JOB_TYPE=$(echo $JOB_NAME | awk -F '-' '{ print $5 }')
|
||||
|
||||
hadoop_version=1
|
||||
CLUSTER_HASH=${CLUSTER_HASH:-$RANDOM}
|
||||
SKIP_CINDER_TEST=False
|
||||
SKIP_CLUSTER_CONFIG_TEST=False
|
||||
SKIP_EDP_TEST=False
|
||||
SKIP_MAP_REDUCE_TEST=True
|
||||
SKIP_SWIFT_TEST=True
|
||||
SKIP_SCALING_TEST=False
|
||||
SKIP_TRANSIENT_TEST=True
|
||||
SKIP_ONLY_TRANSIENT_TEST=False
|
||||
SKIP_ALL_TESTS_FOR_PLUGIN=False
|
||||
HDP_IMAGE=sahara_hdp_1_latest
|
||||
HDP_TWO_IMAGE=sahara_hdp_2_latest
|
||||
VANILLA_IMAGE=ubuntu_vanilla_1_latest
|
||||
VANILLA_TWO_IMAGE=ubuntu_vanilla_2.4_latest
|
||||
VANILLA_TWO_SIX_IMAGE=ubuntu_vanilla_2.6_latest
|
||||
SPARK_IMAGE=sahara_spark_latest
|
||||
HEAT_JOB=False
|
||||
TESTS_CONFIG_FILE="$WORKSPACE/sahara/tests/integration/configs/itest.conf"
|
||||
cluster_name="$HOST-$ZUUL_CHANGE-$CLUSTER_HASH"
|
||||
|
||||
if [[ "$ENGINE_TYPE" == 'heat' ]]
|
||||
then
|
||||
HEAT_JOB=True
|
||||
echo "Heat detected"
|
||||
fi
|
||||
SAHARA_PATH=${1:-$WORKSPACE}
|
||||
sahara_conf_path=$SAHARA_PATH/etc/sahara/sahara.conf
|
||||
# default (deprecated) config file for integration tests
|
||||
tests_config_file="$SAHARA_PATH/sahara/tests/integration/configs/itest.conf"
|
||||
tests_config_file_template="$sahara_templates_configs_path/itest.conf.sample"
|
||||
|
||||
case $JOB_TYPE in
|
||||
job_type=$(echo $JOB_NAME | awk -F '-' '{ print $5 }')
|
||||
engine_type=$(echo $JOB_NAME | awk -F '-' '{ print $4 }')
|
||||
|
||||
# Image names
|
||||
hdp_image=sahara_hdp_1_latest
|
||||
hdp_two_image=sahara_hdp_2_latest
|
||||
vanilla_image=ubuntu_vanilla_1_latest
|
||||
vanilla_two_four_image=ubuntu_vanilla_2.4_latest
|
||||
vanilla_two_six_image=ubuntu_vanilla_2.6_latest
|
||||
spark_image=sahara_spark_latest
|
||||
cdh_centos_image=centos_cdh_latest
|
||||
cdh_ubuntu_image=ubuntu_cdh_latest
|
||||
|
||||
case $job_type in
|
||||
hdp_1)
|
||||
PLUGIN_TYPE=hdp1
|
||||
echo "HDP1 detected"
|
||||
plugin=hdp1
|
||||
insert_config_value $tests_config_file_template HDP IMAGE_NAME $hdp_image
|
||||
;;
|
||||
hdp_2)
|
||||
PLUGIN_TYPE=hdp2
|
||||
hadoop_version=2
|
||||
echo "HDP2 detected"
|
||||
DISTRIBUTE_MODE=True
|
||||
plugin=hdp2
|
||||
insert_config_value $tests_config_file_template HDP2 IMAGE_NAME $hdp_two_image
|
||||
;;
|
||||
vanilla*)
|
||||
hadoop_version=$(echo $JOB_TYPE | awk -F '_' '{ print $2}')
|
||||
if [ "$hadoop_version" == "1" ]; then
|
||||
PLUGIN_TYPE=vanilla1
|
||||
echo "Vanilla detected"
|
||||
else
|
||||
PLUGIN_TYPE=vanilla2
|
||||
if [ "$hadoop_version" == "2.4" ]; then
|
||||
hadoop_version=2-4
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Vanilla 2.4 plugin is not supported in stable/icehouse" && exit 0
|
||||
else
|
||||
hadoop_version=2-6
|
||||
VANILLA_TWO_IMAGE=$VANILLA_TWO_SIX_IMAGE
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" -o "$ZUUL_BRANCH" == "stable/juno" ] && echo "Vanilla 2.6 plugin is not supported in stable/icehouse and stable/juno" && exit 0
|
||||
TESTS_CONFIG_FILE="$WORKSPACE/sahara-ci-config/config/sahara/sahara-test-config-vanilla-2.6.yaml"
|
||||
fi
|
||||
echo "Vanilla2 detected"
|
||||
fi
|
||||
vanilla_1)
|
||||
plugin=vanilla1
|
||||
insert_config_value $tests_config_file_template VANILLA IMAGE_NAME $vanilla_image
|
||||
;;
|
||||
vanilla_2.4)
|
||||
DISTRIBUTE_MODE=True
|
||||
plugin=vanilla2
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO IMAGE_NAME $vanilla_two_four_image
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_VERSION 2.4.1
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_EXAMPLES_JAR_PATH "/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.4.1.jar"
|
||||
;;
|
||||
vanilla_2.6)
|
||||
DISTRIBUTE_MODE=True
|
||||
tests_config_file="$sahara_templates_configs_path/sahara-test-config-vanilla-2.6.yaml"
|
||||
insert_scenario_value $tests_config_file vanilla_two_six_image
|
||||
;;
|
||||
transient)
|
||||
PLUGIN_TYPE=transient
|
||||
SKIP_EDP_TEST=False
|
||||
SKIP_TRANSIENT_TEST=False
|
||||
SKIP_ONLY_TRANSIENT_TEST=True
|
||||
SKIP_TRANSIENT_JOB=True
|
||||
TRANSIENT_JOB=True
|
||||
[ "$ZUUL_BRANCH" == "master" ] && VANILLA_TWO_IMAGE=$VANILLA_TWO_SIX_IMAGE
|
||||
[ "$HEAT_JOB" == "True" ] && [ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Heat_Transient plugin is not supported in stable/icehouse" && exit 0
|
||||
echo "Transient detected"
|
||||
plugin=transient
|
||||
concurrency=3
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO SKIP_TRANSIENT_CLUSTER_TEST False
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO ONLY_TRANSIENT_CLUSTER_TEST True
|
||||
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO IMAGE_NAME $vanilla_two_four_image
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_VERSION 2.4.1
|
||||
else
|
||||
DISTRIBUTE_MODE=True
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO IMAGE_NAME $vanilla_two_six_image
|
||||
insert_config_value $tests_config_file_template VANILLA_TWO HADOOP_VERSION 2.6.0
|
||||
fi
|
||||
;;
|
||||
cdh*)
|
||||
os_version=$(echo $JOB_TYPE | awk -F '_' '{ print $2}')
|
||||
if [ "$os_version" == "centos" ]; then
|
||||
CDH_IMAGE=centos_cdh_latest
|
||||
hadoop_version=2c
|
||||
plugin=cdh
|
||||
insert_config_value $sahara_conf_path DEFAULT plugins cdh
|
||||
if [[ "$job_type" =~ centos ]]; then
|
||||
insert_config_value $tests_config_file_template CDH IMAGE_NAME $cdh_centos_image
|
||||
else
|
||||
CDH_IMAGE=ubuntu_cdh_latest
|
||||
hadoop_version=2u
|
||||
insert_config_value $tests_config_file_template CDH IMAGE_NAME $cdh_ubuntu_image
|
||||
fi
|
||||
SKIP_SCALING_TEST=True
|
||||
PLUGIN_TYPE=cdh
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "CDH plugin is not supported in stable/icehouse" && exit 0
|
||||
echo "CDH detected"
|
||||
insert_config_value $tests_config_file_template CDH SKIP_SCALING_TEST True
|
||||
;;
|
||||
spark)
|
||||
PLUGIN_TYPE=spark
|
||||
SKIP_EDP_TEST=False
|
||||
SKIP_SCALING_TEST=False
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && echo "Spark plugin is not supported in stable/icehouse" && exit 0
|
||||
[[ "$JOB_NAME" =~ scenario ]] && TESTS_CONFIG_FILE="$WORKSPACE/sahara-ci-config/config/sahara/sahara-test-config-spark.yaml"
|
||||
echo "Spark detected"
|
||||
plugin=spark
|
||||
insert_config_value $sahara_conf_path DEFAULT plugins spark
|
||||
if [ "$ZUUL_BRANCH" == "stable/juno" ]; then
|
||||
insert_config_value $tests_config_file_template SPARK IMAGE_NAME $spark_image
|
||||
else
|
||||
tests_config_file="$sahara_templates_configs_path/sahara-test-config-spark.yaml"
|
||||
insert_scenario_value $tests_config_file spark_image
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
echo "$plugin detected"
|
||||
|
||||
cd $WORKSPACE
|
||||
[ "$ZUUL_BRANCH" == "stable/icehouse" ] && sudo pip install -U -r requirements.txt
|
||||
|
||||
TOX_LOG=$WORKSPACE/.tox/venv/log/venv-1.log
|
||||
|
||||
create_database
|
||||
[ "$ZUUL_BRANCH" != "master" ] && sudo pip install -U -r requirements.txt
|
||||
sudo pip install .
|
||||
enable_pypi
|
||||
|
||||
write_sahara_main_conf etc/sahara/sahara.conf
|
||||
start_sahara etc/sahara/sahara.conf
|
||||
|
||||
cd $WORKSPACE
|
||||
|
||||
CLUSTER_NAME="$HOST-$CLUSTER_HASH-$ZUUL_CHANGE"
|
||||
write_tests_conf
|
||||
|
||||
run_tests
|
||||
|
||||
print_python_env $WORKSPACE
|
||||
|
||||
if [ "$FAILURE" != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$STATUS" != 0 ]]
|
||||
then
|
||||
exit 1
|
||||
fi
|
||||
write_sahara_main_conf "$sahara_conf_path" "$engine_type"
|
||||
write_tests_conf "$tests_config_file" "$cluster_name"
|
||||
start_sahara "$sahara_conf_path" && run_tests "$tests_config_file" "$plugin" "$concurrency"
|
||||
print_python_env
|
||||
|
20
slave-scripts/gate-saharaclient.sh
Normal file → Executable file
20
slave-scripts/gate-saharaclient.sh
Normal file → Executable file
@ -1,12 +1,18 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -xe
|
||||
|
||||
sudo rm -rf /tmp/sahara
|
||||
git clone https://git.openstack.org/openstack/sahara /tmp/sahara
|
||||
cd /tmp/sahara
|
||||
# prepare test dependencies
|
||||
tox -e integration --notest
|
||||
if [ "$ZUUL_BRANCH" != "master" ]; then
|
||||
git checkout "$ZUUL_BRANCH"
|
||||
sudo pip install -U -r requirements.txt
|
||||
fi
|
||||
|
||||
# change sahara-client
|
||||
.tox/integration/bin/pip install $WORKSPACE
|
||||
if [[ "$JOB_NAME" =~ scenario ]]; then
|
||||
tox -e scenario --notest
|
||||
.tox/scenario/bin/pip install $WORKSPACE
|
||||
else
|
||||
tox -e integration --notest
|
||||
.tox/integration/bin/pip install $WORKSPACE
|
||||
fi
|
||||
|
||||
bash -x /tmp/sahara-ci-config/slave-scripts/gate-sahara.sh /tmp/sahara
|
||||
bash -x $WORKSPACE/sahara-ci-config/slave-scripts/gate-sahara.sh /tmp/sahara
|
||||
|
6
slave-scripts/gate-ui-tests.sh
Normal file → Executable file
6
slave-scripts/gate-ui-tests.sh
Normal file → Executable file
@ -1,8 +1,6 @@
|
||||
#!/bin/bash -e
|
||||
#!/bin/bash -xe
|
||||
|
||||
. $FUNCTION_PATH
|
||||
|
||||
check_openstack_host
|
||||
. $FUNCTION_PATH/functions-common.sh
|
||||
|
||||
sudo iptables -F
|
||||
if [ ! -d saharadashboard ]
|
||||
|
29
slave-scripts/integration-cleanup.sh
Normal file → Executable file
29
slave-scripts/integration-cleanup.sh
Normal file → Executable file
@ -1,31 +1,30 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -e
|
||||
|
||||
cd sahara-ci-config/slave-scripts
|
||||
sleep 20
|
||||
|
||||
source $JENKINS_HOME/credentials
|
||||
set -x
|
||||
JOB_TYPE=$(echo $PREV_JOB | awk -F '-' '{ print $1 }')
|
||||
HOST=$(echo $HOST_NAME | awk -F '-' '{ print $2 }')
|
||||
if [ "$HOST" == "neutron" ]; then
|
||||
export os_auth_url="http://$OPENSTACK_HOST_CI_LAB:5000/v2.0"
|
||||
export os_image_endpoint="http://$OPENSTACK_HOST_CI_LAB:8004/v1/$CI_LAB_TENANT_ID"
|
||||
HOST="c1"
|
||||
job_type=$(echo $PREV_JOB | awk -F '-' '{ print $1 }')
|
||||
if [[ "$HOST_NAME" =~ neutron ]]; then
|
||||
export os_auth_url="http://$OPENSTACK_HOST_NEUTRON_LAB:5000/v2.0"
|
||||
export os_image_endpoint="http://$OPENSTACK_HOST_NEUTRON_LAB:8004/v1/$NEUTRON_LAB_TENANT_ID"
|
||||
host="c1"
|
||||
else
|
||||
export os_auth_url="http://$OPENSTACK_HOST_SAHARA_STACK:5000/v2.0"
|
||||
export os_image_endpoint="http://$OPENSTACK_HOST_SAHARA_STACK:8004/v1/$STACK_SAHARA_TENANT_ID"
|
||||
HOST="c2"
|
||||
export os_auth_url="http://$OPENSTACK_HOST_NOVA_NET_LAB:5000/v2.0"
|
||||
export os_image_endpoint="http://$OPENSTACK_HOST_NOVA_NET_LAB:8004/v1/$NOVA_NET_LAB_TENANT_ID"
|
||||
host="c2"
|
||||
fi
|
||||
if [[ $(echo $PREV_JOB | awk -F '-' '{ print $2 }') =~ ui ]]; then
|
||||
python cleanup.py cleanup .*$PREV_BUILD-selenium.*
|
||||
elif [ $JOB_TYPE == "tempest" ]; then
|
||||
elif [ "$job_type" == "tempest" ]; then
|
||||
python cleanup.py cleanup .*sahara-cluster.*
|
||||
else
|
||||
ENGINE=$(echo $PREV_JOB | awk -F '-' '{ print $4 }')
|
||||
if [ $ENGINE == 'heat' ]
|
||||
engine=$(echo $PREV_JOB | awk -F '-' '{ print $4 }')
|
||||
if [ "$engine" == "heat" ]
|
||||
then
|
||||
python cleanup.py cleanup-heat .*$HOST-$CLUSTER_HASH-$CHANGE_NUMBER.*
|
||||
python cleanup.py cleanup-heat .*$host-$CHANGE_NUMBER-$CLUSTER_HASH.*
|
||||
else
|
||||
python cleanup.py cleanup .*$HOST-$CLUSTER_HASH-$CHANGE_NUMBER.*
|
||||
python cleanup.py cleanup .*$host-$CHANGE_NUMBER-$CLUSTER_HASH.*
|
||||
fi
|
||||
fi
|
||||
|
87
slave-scripts/tempest.sh
Normal file → Executable file
87
slave-scripts/tempest.sh
Normal file → Executable file
@ -1,10 +1,14 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -xe
|
||||
|
||||
. $FUNCTION_PATH
|
||||
# source CI credentials
|
||||
. /home/jenkins/ci_openrc
|
||||
# source main functions
|
||||
. $FUNCTION_PATH/functions-common.sh
|
||||
|
||||
PROJECT=$(echo $JOB_NAME | awk -F '-' '{ print $2 }')
|
||||
project=$(echo $JOB_NAME | awk -F '-' '{ print $2 }')
|
||||
image_id=$(glance image-list | grep ubuntu-test-image | awk '{print $2}')
|
||||
|
||||
if [ "$PROJECT" == "sahara" ]; then
|
||||
if [ "$project" == "sahara" ]; then
|
||||
SAHARA_PATH="$WORKSPACE"
|
||||
git clone http://github.com/openstack/python-saharaclient /tmp/saharaclient
|
||||
cd /tmp/saharaclient
|
||||
@ -15,58 +19,43 @@ else
|
||||
git clone http://github.com/openstack/sahara $SAHARA_PATH
|
||||
sudo pip install .
|
||||
fi
|
||||
|
||||
check_openstack_host
|
||||
|
||||
TEMPEST=True
|
||||
IMAGE_ID=$(glance image-list | grep ubuntu-test-image | awk '{print $2}')
|
||||
sahara_conf_path=$SAHARA_PATH/etc/sahara/sahara.conf
|
||||
|
||||
cd /home/jenkins
|
||||
|
||||
cp -r $SAHARA_PATH/sahara/tests/tempest tempest/
|
||||
|
||||
cd tempest
|
||||
# create tempest conf file
|
||||
insert_config_value etc/tempest.conf DEFAULT lock_path /tmp
|
||||
insert_config_value etc/tempest.conf identity admin_password $OS_PASSWORD
|
||||
insert_config_value etc/tempest.conf identity admin_tenant_name $OS_TENANT_NAME
|
||||
insert_config_value etc/tempest.conf identity admin_username $OS_USERNAME
|
||||
insert_config_value etc/tempest.conf identity password $OS_PASSWORD
|
||||
insert_config_value etc/tempest.conf identity tenant_name $OS_TENANT_NAME
|
||||
insert_config_value etc/tempest.conf identity username $OS_USERNAME
|
||||
insert_config_value etc/tempest.conf identity uri "http://$OPENSTACK_HOST:5000/v2.0/"
|
||||
insert_config_value etc/tempest.conf identity uri_v3 "http://$OPENSTACK_HOST:5000/v3/"
|
||||
insert_config_value etc/tempest.conf service_available neutron $USE_NEUTRON
|
||||
insert_config_value etc/tempest.conf service_available sahara true
|
||||
|
||||
echo "[DEFAULT]
|
||||
lock_path = /tmp
|
||||
# create tests file
|
||||
[ "$USE_NEUTRON" == "true" ] && tenant_id=$NEUTRON_LAB_TENANT_ID
|
||||
[ "$USE_NEUTRON" == "false" ] && tenant_id=$NOVA_NET_LAB_TENANT_ID
|
||||
insert_config_value tempest/scenario/data_processing/etc/sahara_tests.conf data_processing flavor_id 2
|
||||
insert_config_value tempest/scenario/data_processing/etc/sahara_tests.conf data_processing sahara_url "http://localhost:8386/v1.1/$tenant_id"
|
||||
insert_config_value tempest/scenario/data_processing/etc/sahara_tests.conf data_processing ssh_username ubuntu
|
||||
insert_config_value tempest/scenario/data_processing/etc/sahara_tests.conf data_processing floating_ip_pool public
|
||||
insert_config_value tempest/scenario/data_processing/etc/sahara_tests.conf data_processing private_network private
|
||||
insert_config_value tempest/scenario/data_processing/etc/sahara_tests.conf data_processing fake_image_id $image_id
|
||||
|
||||
[identity]
|
||||
admin_password = nova
|
||||
admin_tenant_name = ci
|
||||
admin_username = ci-user
|
||||
password = nova
|
||||
tenant_name = ci
|
||||
uri = http://$OPENSTACK_HOST:5000/v2.0/
|
||||
uri_v3 = http://$OPENSTACK_HOST:5000/v3/
|
||||
username = ci-user
|
||||
|
||||
[service_available]
|
||||
neutron = $USE_NEUTRON
|
||||
sahara = true
|
||||
" > etc/tempest.conf
|
||||
|
||||
echo "[data_processing]
|
||||
flavor_id=2
|
||||
ssh_username=ubuntu
|
||||
floating_ip_pool=public
|
||||
private_network=private
|
||||
fake_image_id=$IMAGE_ID
|
||||
" > tempest/scenario/data_processing/etc/sahara_tests.conf
|
||||
|
||||
create_database
|
||||
enable_pypi
|
||||
|
||||
sudo pip install $SAHARA_PATH/.
|
||||
write_sahara_main_conf $SAHARA_PATH/etc/sahara/sahara.conf
|
||||
start_sahara $SAHARA_PATH/etc/sahara/sahara.conf
|
||||
|
||||
tox -e all -- tempest.scenario.data_processing.client_tests | tee tox.log
|
||||
insert_config_value $sahara_conf_path DEFAULT plugins fake
|
||||
write_sahara_main_conf $sahara_conf_path "direct"
|
||||
start_sahara $sahara_conf_path
|
||||
# Temporary use additional log file, due to wrong status code from tox scenario tests
|
||||
# tox -e all -- tempest.scenario.data_processing.client_tests || failure "Tempest tests are failed"
|
||||
tox -e all -- tempest.scenario.data_processing.client_tests | tee log.txt
|
||||
STATUS=$(grep "\ -\ Failed" tox.log | awk '{print $3}')
|
||||
|
||||
mv logs $WORKSPACE
|
||||
print_python_env $WORKSPACE
|
||||
|
||||
if [ "$STATUS" != "0" ]
|
||||
then
|
||||
exit 1
|
||||
fi
|
||||
[ "$STATUS" != "0" ] && failure "Tempest tests have failed"
|
||||
print_python_env
|
||||
|
10
slave-scripts/update_config.sh
Normal file → Executable file
10
slave-scripts/update_config.sh
Normal file → Executable file
@ -1,8 +1,8 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -e
|
||||
|
||||
source $JENKINS_HOME/credentials
|
||||
sed "s%-CI_LAB_TENANT_ID-%$CI_LAB_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
|
||||
sed "s%-STACK_SAHARA_TENANT_ID-%$STACK_SAHARA_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
|
||||
sed "s%-NEUTRON_LAB_TENANT_ID-%$NEUTRON_LAB_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
|
||||
sed "s%-NOVA_NET_LAB_TENANT_ID-%$NOVA_NET_LAB_TENANT_ID%g" -i $WORKSPACE/config/zuul/openstack_functions.py
|
||||
|
||||
sudo su - jenkins -c "cat $WORKSPACE/slave-scripts/credentials.conf > /etc/jenkins_jobs/credentials.conf"
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/zuul.conf > /etc/zuul/zuul.conf"
|
||||
@ -12,8 +12,8 @@ sudo su - zuul -c "cat $WORKSPACE/config/zuul/logging.conf > /etc/zuul/logging.c
|
||||
sudo su - zuul -c "cat $WORKSPACE/config/zuul/openstack_functions.py > /etc/zuul/openstack_functions.py"
|
||||
sudo service zuul reload
|
||||
|
||||
sed "s%- net-id: 'CI_LAB_PRIVATE_NETWORK_ID'%- net-id: '$CI_LAB_PRIVATE_NETWORK_ID'%g" -i $WORKSPACE/config/nodepool/sahara.yaml
|
||||
sed "s%- net-id: 'STACK_SAHARA_PRIVATE_NETWORK_ID'%- net-id: '$STACK_SAHARA_PRIVATE_NETWORK_ID'%g" -i $WORKSPACE/config/nodepool/sahara.yaml
|
||||
sed "s%- net-id: 'NEUTRON_LAB_PRIVATE_NETWORK_ID'%- net-id: '$NEUTRON_LAB_PRIVATE_NETWORK_ID'%g" -i $WORKSPACE/config/nodepool/sahara.yaml
|
||||
sed "s%- net-id: 'NOVA_NET_LAB_PRIVATE_NETWORK_ID'%- net-id: '$NOVA_NET_LAB_PRIVATE_NETWORK_ID'%g" -i $WORKSPACE/config/nodepool/sahara.yaml
|
||||
sed "s%apikey: JENKINS_API_KEY%apikey: $JENKINS_API_KEY%g" -i $WORKSPACE/config/nodepool/sahara.yaml
|
||||
sed "s%credentials-id: CREDENTIALS_ID%credentials-id: $CREDENTIALS_ID%g" -i $WORKSPACE/config/nodepool/sahara.yaml
|
||||
sudo su - nodepool -c "cat $WORKSPACE/config/nodepool/sahara.yaml > /etc/nodepool/nodepool.yaml"
|
||||
|
2
slave-scripts/update_pool.sh
Normal file → Executable file
2
slave-scripts/update_pool.sh
Normal file → Executable file
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -e
|
||||
|
||||
for i in $(nodepool-client list | grep ci-lab | awk -F '|' '{ print $2 }')
|
||||
do
|
||||
|
Loading…
Reference in New Issue
Block a user