Added slave scripts to puppet.

Added all of the files from slave_scripts to jenkins_slave.
Includes the changes I submitted in https://review.openstack.org/6063
Changed the jobs to use these rather than the ones from openstack-ci.

Change-Id: I4bd9e78b3d9c0cbbc6a0b17fc95c60458b06a0f7
This commit is contained in:
Monty Taylor 2012-04-01 21:02:25 -07:00
parent 59b1f43e53
commit 17b2bda1a0
29 changed files with 631 additions and 9 deletions

View File

@ -6,6 +6,6 @@
<selector class="hudson.plugins.copyartifact.StatusBuildSelector"/> <selector class="hudson.plugins.copyartifact.StatusBuildSelector"/>
</hudson.plugins.copyartifact.CopyArtifact> </hudson.plugins.copyartifact.CopyArtifact>
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/copy-bundle.sh</command> <command>/usr/local/jenkins/slave_scripts/copy-bundle.sh</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>
</builders> </builders>

View File

@ -1,4 +1,4 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/run-cover.sh</command> <command>/usr/local/jenkins/slave_scripts/run-cover.sh</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -1,4 +1,4 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/run-docs.sh</command> <command>/usr/local/jenkins/slave_scripts/run-docs.sh</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -1,3 +1,3 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/gerrit-git-prep.sh <%= site %> </command> <command>/usr/local/jenkins/slave_scripts/gerrit-git-prep.sh <%= site %> </command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -17,6 +17,6 @@ mkdir build
#export DO_UPLOAD=&quot;no&quot; #export DO_UPLOAD=&quot;no&quot;
export PROJECT=&quot;<%= project %>&quot; export PROJECT=&quot;<%= project %>&quot;
export GERRIT_REFNAME=$BRANCH export GERRIT_REFNAME=$BRANCH
/bin/bash -xe ~/openstack-ci/slave_scripts/create-ppa-package.sh /usr/local/jenkins/slave_scripts/create-ppa-package.sh
</command> </command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -1,3 +1,3 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/run-tox.sh 26</command> <command>/usr/local/jenkins/slave_scripts/run-tox.sh 26</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -1,3 +1,3 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/run-tox.sh 26</command> <command>/usr/local/jenkins/slave_scripts/run-tox.sh 26</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -1,3 +1,3 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/create-tarball.sh</command> <command>/usr/local/jenkins/slave_scripts/create-tarball.sh</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -1,3 +1,3 @@
<hudson.tasks.Shell> <hudson.tasks.Shell>
<command>bash -x ~/openstack-ci/slave_scripts/build-bundle.sh</command> <command>/usr/local/jenkins/slave_scripts/build-bundle.sh</command>
</hudson.tasks.Shell> </hudson.tasks.Shell>

View File

@ -0,0 +1,5 @@
#!/bin/bash -xe
for host in $HEAD_HOST ${COMPUTE_HOSTS//,/ }; do
cp /var/log/orchestra/rsyslog/$host/syslog $WORKSPACE/logs/$host-syslog.txt
done

View File

@ -0,0 +1,44 @@
#!/bin/bash -xe
WORKSPACE=`pwd`
mkdir -p logs
rm -f logs/*
cd `dirname "$0"`
echo "Jenkins: resetting hosts..."
for host in $HEAD_HOST ${COMPUTE_HOSTS//,/ }; do
scp lvm-kexec-reset.sh root@$host:/var/tmp/
ssh root@$host /var/tmp/lvm-kexec-reset.sh
sudo rm -f /var/log/orchestra/rsyslog/$host/syslog
done
# Have rsyslog reopen log files we rm'd from under it
sudo restart rsyslog
# wait for the host to come up (2 ping responses or timeout after 5 minutes)
echo "Jenkins: Waiting for head host to return after reset..."
sleep 10
if ! timeout 300 ./ping.py $HEAD_HOST; then
echo "Jenkins: ERROR: Head node did not come back up after reset"
exit 1
fi
echo "Jenkins: Pre-populating PIP cache"
for host in $HEAD_HOST ${COMPUTE_HOSTS//,/ }; do
scp -r ~/cache/pip root@$host:/var/cache/pip
done
echo "Jenkins: Caching images."
cd ~/devstack
source stackrc
for image_url in ${IMAGE_URLS//,/ }; do
# Downloads the image (uec ami+aki style), then extracts it.
IMAGE_FNAME=`echo "$image_url" | python -c "import sys; print sys.stdin.read().split('/')[-1]"`
IMAGE_NAME=`echo "$IMAGE_FNAME" | python -c "import sys; print sys.stdin.read().split('.tar.gz')[0].split('.tgz')[0]"`
if [ ! -f files/$IMAGE_FNAME ]; then
wget -c $image_url -O files/$IMAGE_FNAME
fi
done
echo "Jenkins: Executing build_bm_multi.sh."
./tools/build_bm_multi.sh

View File

@ -0,0 +1,22 @@
#!/bin/bash -xe
set -x
sudo cobbler sync
sudo cobbler system edit --netboot-enabled=Y --name=baremetal1
sudo cobbler system edit --netboot-enabled=Y --name=baremetal2
sudo cobbler system edit --netboot-enabled=Y --name=baremetal3
sudo cobbler system edit --netboot-enabled=Y --name=baremetal4
sudo cobbler system edit --netboot-enabled=Y --name=baremetal5
sudo cobbler system edit --netboot-enabled=Y --name=baremetal6
sudo cobbler system edit --netboot-enabled=Y --name=baremetal7
sudo cobbler system edit --netboot-enabled=Y --name=baremetal8
sudo cobbler system edit --netboot-enabled=Y --name=baremetal9
sudo cobbler system reboot --name=baremetal1
sudo cobbler system reboot --name=baremetal2
sudo cobbler system reboot --name=baremetal3
sudo cobbler system reboot --name=baremetal4
sudo cobbler system reboot --name=baremetal5
sudo cobbler system reboot --name=baremetal6
sudo cobbler system reboot --name=baremetal7
sudo cobbler system reboot --name=baremetal8
sudo cobbler system reboot --name=baremetal9

View File

@ -0,0 +1,29 @@
#!/bin/bash -xe
# Make sure there is a location on this builder to cache pip downloads
mkdir -p ~/cache/pip
export PIP_DOWNLOAD_CACHE=~/cache/pip
# Start with a clean slate
rm -fr jenkins_venvs
mkdir -p jenkins_venvs
# Build a venv for every known branch
for branch in `git branch -r |grep "origin/"|grep -v HEAD|sed "s/origin\///"`
do
echo "Building venv for $branch"
git checkout $branch
mkdir -p jenkins_venvs/$branch
bundle=jenkins_venvs/$branch/.cache.bundle
if [ -f tools/pip-requires ] ; then
if [ -f tools/test-requires ] ; then
pip bundle $bundle -r tools/pip-requires -r tools/test-requires
else
pip bundle $bundle -r tools/pip-requires
fi
else
pip bundle $bundle distribute openstack.nose_plugin
fi
done
git checkout master

View File

@ -0,0 +1,32 @@
#!/bin/bash -xe
# Make sure there is a location on this builder to cache pip downloads
mkdir -p ~/cache/pip
export PIP_DOWNLOAD_CACHE=~/cache/pip
# Start with a clean slate
rm -fr jenkins_venvs
mkdir -p jenkins_venvs
# Update the list of remote refs to pick up new branches
git remote update
# Build a venv for every known branch
for branch in `git branch -r |grep "origin/"|grep -v HEAD|sed "s/origin\///"`
do
echo "Building venv for $branch"
git checkout $branch
mkdir -p jenkins_venvs/$branch
python tools/install_venv.py
virtualenv --relocatable .venv
if [ -e tools/test-requires ]
then
pip bundle .cache.bundle -r tools/pip-requires -r tools/test-requires
else
pip bundle .cache.bundle -r tools/pip-requires
fi
tar cvfz jenkins_venvs/$branch/venv.tgz .venv .cache.bundle
rm -fr .venv
mv .cache.bundle jenkins_venvs/$branch/
done
git checkout master

View File

@ -0,0 +1,15 @@
#!/bin/bash -xe
# Support jobs, such as nova-docs, which are not yet triggered by gerrit
if [ "x$GERRIT_BRANCH" = "x" ] ; then
GERRIT_BRANCH=master
fi
mv jenkins_venvs/$GERRIT_BRANCH/.cache.bundle .
rm -fr jenkins_venvs
if [ -f tools/test-requires -a \
`git diff HEAD^1 tools/test-requires 2>/dev/null | wc -l` -gt 0 -o \
`git diff HEAD^1 tools/pip-requires 2>/dev/null | wc -l` -gt 0 ]
then
rm .cache.bundle
fi

View File

@ -0,0 +1,43 @@
#!/bin/bash -xe
if [ -n "$GERRIT_REFNAME" ]
then
GERRIT_BRANCH=$GERRIT_REFNAME
fi
# Support jobs, such as nova-docs, which are not yet triggered by gerrit
if [ "x$GERRIT_BRANCH" = "x" ] ; then
GERRIT_BRANCH=master
fi
mv jenkins_venvs/$GERRIT_BRANCH/venv.tgz .
rm -fr jenkins_venvs
tar xfz venv.tgz
rm venv.tgz
if [ -f .cache.bundle ] ; then
rm -rf .venv
virtualenv --no-site-packages .venv
pip install --upgrade -E .venv file://`pwd`/.cache.bundle
rm .cache.bundle
else
perl -MCwd -ple '$CWD=Cwd::abs_path();s,^VIRTUAL_ENV=.*,VIRTUAL_ENV="$CWD/.venv",' -i .venv/bin/activate
# This gets easy-install.pth as well as $PROJECT.pth. Examples for glance:
# easy-install.pth looks like: /home/jenkins/workspace/glance-venv/.venv/src/keystone
# $PROJECT.pth looks like: /home/jenkins/workspace/glance-venv
perl -MCwd -ple '$CWD=Cwd::abs_path();s,^/.*/workspace/[^/]+(/.*)$,$CWD$1,' -i .venv/lib/python2.7/site-packages/*.pth
# This is needed for the keystone install for glance
perl -MCwd -ple '$CWD=Cwd::abs_path();s,^/.*/.venv/src/(.*),$CWD/.venv/src/$1,' -i .venv/lib/python2.7/site-packages/*.egg-link
# This is needed for the keystone install for glance
perl -MCwd -ple '$CWD=Cwd::abs_path();s,/.*/.venv/src/(.*),$CWD/.venv/src/$1,' -i .venv/bin/*
fi
if [ -f tools/test-requires ] ; then
TEST_REQUIRES='-r tools/test-requires'
fi
if [ -f tools/test-requires -a \
`git diff HEAD^1 tools/test-requires 2>/dev/null | wc -l` -gt 0 -o \
`git diff HEAD^1 tools/pip-requires 2>/dev/null | wc -l` -gt 0 ]
then
rm -rf .venv
virtualenv --no-site-packages .venv
pip -E .venv install --upgrade -r tools/pip-requires $TEST_REQUIRES
fi

View File

@ -0,0 +1,78 @@
#!/bin/bash -xe
if [ -z "$PROJECT" ]
then
echo '$PROJECT not set.'
exit 1
fi
case "$GERRIT_REFNAME" in
master)
export PPAS="ppa:openstack-ppa/bleeding-edge"
;;
milestone-proposed)
export PPAS="ppa:openstack-ppa/milestone-proposed"
;;
*)
echo "No PPA defined for branch $GERRIT_REFNAME"
exit 0
esac
HUDSON=http://localhost:8080/
# We keep packaging for openstack trunk in lp:~o-u-p/$project/ubuntu
# For a release (diablo, essex), it's in lp:~o-u-p/$project/$release
OPENSTACK_RELEASE=${OPENSTACK_RELEASE:-ubuntu}
BZR_BRANCH=${BZR_BRANCH:-lp:~openstack-ubuntu-packagers/$PROJECT/${OPENSTACK_RELEASE}}
PPAS=${PPAS:-ppa:$PROJECT-core/trunk}
PACKAGING_REVNO=${PACKAGING_REVNO:--1}
series=${series:-lucid}
cd build
tarball="$(echo dist/$PROJECT*.tar.gz)"
version="${tarball%.tar.gz}"
version="${version#*$PROJECT-}"
base_version=$version
if [ -n "${EXTRAVERSION}" ]
then
version="${version%~*}${EXTRAVERSION}~${version#*~}"
fi
tar xvzf "${tarball}"
echo ln -s "${tarball}" "${PROJECT}_${version}.orig.tar.gz"
ln -s "${tarball}" "${PROJECT}_${version}.orig.tar.gz"
# Overlay packaging
# (Intentionally using the natty branch. For these PPA builds, we don't need to diverge
# (yet, at least), so it makes the branch management easier this way.
# Note: Doing a checkout and deleting .bzr afterwards instead of just doing an export,
# because export refuses to overlay over an existing directory, so this was easier.
# (We need to not have the .bzr in there, otherwise vcsversion.py might get overwritten)
echo bzr checkout -r ${PACKAGING_REVNO} --lightweight $BZR_BRANCH $PROJECT-*
bzr checkout -r ${PACKAGING_REVNO} --lightweight $BZR_BRANCH $PROJECT-*
cd $PROJECT-*
if [ -d .git ]
then
PACKAGING_REVNO="$(git log --oneline | wc -l)"
rm -rf .git
else
PACKAGING_REVNO="$(bzr revno --tree)"
rm -rf .bzr
fi
# Please don't change this. It's the only way I'll get notified
# if an upload fails.
export DEBFULLNAME="Soren Hansen"
export DEBEMAIL="soren@openstack.org"
buildno=$BUILD_NUMBER
pkgversion="${version}-0ubuntu0~${series}${buildno}"
dch -b --force-distribution --v "${pkgversion}" "Automated PPA build. Packaging revision: ${PACKAGING_REVNO}." -D $series
dpkg-buildpackage -rfakeroot -S -sa -nc -k32EE128C
if ! [ "$DO_UPLOAD" = "no" ]
then
for ppa in $PPAS
do
dput --force $ppa "../${PROJECT}_${pkgversion}_source.changes"
done
fi
cd ..

View File

@ -0,0 +1,84 @@
#!/bin/bash -xe
# In case we start doing something more sophisticated with other refs
# later (such as tags).
BRANCH=$GERRIT_REFNAME
if [ $BRANCH == "milestone-proposed" ]
then
REVNOPREFIX="r"
fi
if [[ $BRANCH =~ ^stable/.*$ ]]
then
NOMILESTONE="true"
fi
# Should be ~ if tarball version is the one we're working *toward*. (By far preferred!)
# Should be + if tarball version is already released and we're moving forward after it.
SEPARATOR=${SEPARATOR:-'~'}
if [ -z "$PROJECT" ]
then
echo '$PROJECT not set.'
exit 1
fi
find_next_version() {
datestamp="${datestamp:-$(date +%Y%m%d)}"
git fetch origin +refs/meta/*:refs/remotes/meta/*
milestonever="$(git show meta/openstack/release:${BRANCH})"
if [ $? != 0 ]
then
if [ "$NOMILESTONE" = "true" ]
then
milestonever=""
else
echo "Milestone file ${BRANCH} not found. Bailing out." >&2
exit 1
fi
fi
version="$milestonever"
if [ -n "$version" ]
then
version="${version}~"
fi
revno="${revno:-$(git log --oneline | wc -l)}"
version="$(printf %s%s.%s%d "$version" "$datestamp" "$REVNOPREFIX" "$revno")"
printf "%s" "$version"
}
snapshotversion=$(find_next_version)
rm -f dist/*.tar.gz
if [ -f setup.py ] ; then
# Try tox and cached bundles first
if [ -e ".cache.bundle" ] ; then
if [ -f tox.ini ] ; then
if tox --showconfig | grep testenv | grep jenkinsvenv >/dev/null 2>&1
then
tox -ejenkinsvenv python setup.py sdist
else
tox -evenv python setup.py sdist
fi
else
rm -rf .venv
mv .cache.bundle .cache.pybundle
virtualenv --no-site-packages .venv
.venv/bin/pip install .cache.pybundle
rm .cache.pybundle
tools/with_venv.sh python setup.py sdist
fi
# Try old style venv's second
elif [ -d .venv -a -f tools/with_venv.sh ] ; then
tools/with_venv.sh python setup.py sdist
# Last but not least, just make a tarball
else
python setup.py sdist
fi
# There should only be one, so this should be safe.
tarball=$(echo dist/*.tar.gz)
echo mv "$tarball" "dist/$(basename $tarball .tar.gz)${SEPARATOR}${snapshotversion}.tar.gz"
mv "$tarball" "dist/$(basename $tarball .tar.gz)${SEPARATOR}${snapshotversion}.tar.gz"
fi

View File

@ -0,0 +1,35 @@
#!/bin/bash -xe
SITE=$1
if [ -z "$SITE" ]
then
echo "The site name (eg 'openstack') must be the first argument."
exit 1
fi
if [ -z "$GERRIT_NEWREV" ] && [ -z "$GERRIT_REFSPEC" ]
then
echo "This job may only be triggered by Gerrit."
exit 1
fi
if [[ ! -e .git ]]
then
git clone https://review.$SITE.org/p/$GERRIT_PROJECT .
fi
git remote update || git remote update # attempt to work around bug #925790
git reset --hard
git clean -x -f -d -q
if [ ! -z "$GERRIT_REFSPEC" ]
then
git checkout $GERRIT_BRANCH
git reset --hard remotes/origin/$GERRIT_BRANCH
git clean -x -f -d -q
git fetch https://review.$SITE.org/p/$GERRIT_PROJECT $GERRIT_REFSPEC
git merge FETCH_HEAD
else
git checkout $GERRIT_NEWREV
git reset --hard $GERRIT_NEWREV
git clean -x -f -d -q
fi

View File

@ -0,0 +1,7 @@
#!/bin/bash -x
lvremove -f /dev/main/last_root
lvrename /dev/main/root last_root
lvcreate -L20G -s -n root /dev/main/orig_root
APPEND="`cat /proc/cmdline`"
kexec -l /vmlinuz --initrd=/initrd.img --append="$APPEND"
nohup bash -c "sleep 2; kexec -e" </dev/null >/dev/null 2>&1 &

View File

@ -0,0 +1,11 @@
#!/usr/bin/env python
import sys
from subprocess import *
p = Popen(["ping", sys.argv[1]], stdout=PIPE)
while True:
line = p.stdout.readline().strip()
if 'bytes from' in line:
p.terminate()
sys.exit(0)

View File

@ -0,0 +1,32 @@
#!/bin/bash -xe
git config user.name "OpenStack Jenkins"
git config user.email "jenkins@openstack.org"
# See if there is an open change in the launchpad/translations topic
# If so, amend the commit with new changes since then
previous=`ssh -p 29418 review.openstack.org gerrit query --current-patch-set status:open project:openstack/PROJECT topic:launchpad/translations | grep "^ number:" | awk '{print $2}'`
if [ "x${previous}" != "x" ] ; then
git review -d ${previous}
amend="--amend"
fi
tar xvfz po.tgz
rm po.tgz
for f in po/*po ; do
lang=`echo $f | cut -f2 -d/ | cut -f1 -d.`
if [ -d $PROJECT/locale/$lang ] ; then
cp $f $PROJECT/locale/$lang/LC_MESSAGES/$PROJECT.po
fi
done
python setup.py extract_messages
git add $PROJECT/locale/$PROJECT.pot
python setup.py update_catalog
for f in po/*po ; do
lang=`echo $f | cut -f2 -d/ | cut -f1 -d.`
if [ -d $PROJECT/locale/$lang ] ; then
git add $PROJECT/locale/$lang/LC_MESSAGES/$PROJECT.po
fi
done
git commit ${amend} -m "Imported Translations from Launchpad"
git review -t launchpad/translations

View File

@ -0,0 +1,25 @@
#!/bin/bash -xe
# If a bundle file is present, call tox with the jenkins version of
# the test environment so it is used. Otherwise, use the normal
# (non-bundle) test environment. Also, run pip freeze on the
# resulting environment at the end so that we have a record of exactly
# what packages we ended up testing.
#
if [ -f .cache.bundle ]
then
venv=jenkinscover
else
venv=cover
fi
tox -e$venv
result=$?
echo "Begin pip freeze output from test virtualenv:"
echo "======================================================================"
.tox/$venv/bin/pip freeze
echo "======================================================================"
exit $result

View File

@ -0,0 +1,27 @@
#!/bin/bash -xe
# If a bundle file is present, call tox with the jenkins version of
# the test environment so it is used. Otherwise, use the normal
# (non-bundle) test environment. Also, run pip freeze on the
# resulting environment at the end so that we have a record of exactly
# what packages we ended up testing.
#
if [ -f .cache.bundle ]
then
venv=jenkinsvenv
else
venv=venv
fi
mkdir -p doc/build
export HUDSON_PUBLISH_DOCS=1
tox -e$venv -- python setup.py build_sphinx
result=$?
echo "Begin pip freeze output from test virtualenv:"
echo "======================================================================"
.tox/$venv/bin/pip freeze
echo "======================================================================"
exit $result

View File

@ -0,0 +1,38 @@
#!/bin/bash -xe
# If a bundle file is present, call tox with the jenkins version of
# the test environment so it is used. Otherwise, use the normal
# (non-bundle) test environment. Also, run pip freeze on the
# resulting environment at the end so that we have a record of exactly
# what packages we ended up testing.
#
# Usage: run-tox.sh PYTHONVERSION
#
# Where PYTHONVERSION is the numeric version identifier used as a suffix
# in the tox.ini file. E.g., "26" or "27" for "py26"/"jenkins26" or
# "py27"/"jenkins27" respectively.
version=$1
if [ -z "$version" ]
then
echo "The tox environment python version (eg '27') must be the first argument."
exit 1
fi
if [ -f .cache.bundle ]
then
venv=jenkins$version
else
venv=py$version
fi
tox -e$venv
result=$?
echo "Begin pip freeze output from test virtualenv:"
echo "======================================================================"
.tox/$venv/bin/pip freeze
echo "======================================================================"
exit $result

View File

@ -0,0 +1,10 @@
#!/bin/bash -xe
mkdir -p ~/cache/pip
VENV=`mktemp -d`
virtualenv --no-site-packages $VENV
cd $VENV
. bin/activate
PIP_DOWNLOAD_CACHE=~/cache/pip pip install `cat ~/devstack/files/pips/*`
cd
rm -fr $VENV

View File

@ -0,0 +1,12 @@
#!/bin/bash -xe
URL=$1
echo "Jenkins: Waiting for Nova to start on infrastructure node"
RET=7
while [ $RET != 0 ]; do
curl -s $URL >/dev/null
RET=$?
sleep 1
done
echo "Jenkins: Nova is running."

View File

@ -0,0 +1,21 @@
#!/bin/bash -xe
# wait_for_pupet.sh LOGFILE HOSTNAME [HOSTNAME...]
# Search LOGFILE for puppet completion on each host
FINISH_RE="puppet-agent\[.*\]: Finished catalog run in .* seconds"
LOGFILE=$1
shift
HOSTS=$@
echo "Jenkins: Waiting for puppet to complete on all nodes"
DONE=0
while [ $DONE != 1 ]; do
DONE=1
for hostname in $HOSTS
do
if !(grep "$hostname $FINISH_RE" $LOGFILE >/dev/null); then DONE=0; fi
done
sleep 5
done
echo "Jenkins: Puppet is complete."

View File

@ -205,4 +205,56 @@ define jenkinsuser($ensure = present, $ssh_key) {
], ],
} }
file { '/usr/local/jenkins':
owner => 'root',
group => 'root',
mode => 755,
ensure => 'directory',
}
file { '/usr/local/jenkins/slave_scripts':
owner => 'root',
group => 'root',
mode => 755,
ensure => 'directory',
require => File['/usr/local/jenkins'],
}
$slave_scripts = [
'slave_scripts/baremetal-archive-logs.sh',
'slave_scripts/baremetal-deploy.sh',
'slave_scripts/baremetal-os-install.sh',
'slave_scripts/build-bundle.sh',
'slave_scripts/build-venv.sh',
'slave_scripts/copy-bundle.sh',
'slave_scripts/copy-venv.sh',
'slave_scripts/create-ppa-package.sh',
'slave_scripts/create-tarball.sh',
'slave_scripts/gerrit-git-prep.sh',
'slave_scripts/lvm-kexec-reset.sh',
'slave_scripts/ping.py',
'slave_scripts/propose_translations.sh',
'slave_scripts/run-cover.sh',
'slave_scripts/run-docs.sh',
'slave_scripts/run-tox.sh',
'slave_scripts/update-pip-cache.sh',
'slave_scripts/wait_for_nova.sh',
'slave_scripts/wait_for_puppet.sh',
]
file { $slave_scripts:
name => "/usr/local/jenkins/slave_scripts/${name}",
owner => 'root',
group => 'root',
mode => 750,
ensure => 'present',
require => File['/usr/local/jenkins/slave_scripts'],
source => [
"puppet:///modules/jenkins_slave/${name}",
],
}
} }