Initial commit

This commit is contained in:
Vitaly Parakhin 2015-04-02 19:07:14 +03:00
parent 188bbaf41e
commit 986785ed6a
15 changed files with 1697 additions and 0 deletions

21
config/common.cfg Normal file
View File

@ -0,0 +1,21 @@
# Base directory for mirror
MIRROR_ROOT="/var/www/mirror"
# Log dir
LOG_ROOT="/var/log/mirror-sync"
# Attempt to autodetect current Fuel version
if hash fuel 2>/dev/null; then
# working on Fuel master node
FUEL_VERSION=`fuel --fuel-version 2>&1 | awk -v sq="'" '/^release:/ { gsub(sq,""); print $2 }'`
if ! wget -nv https://raw.githubusercontent.com/stackforge/fuel-main/${FUEL_VERSION}/config.mk -O /dev/null; then
FUEL_BRANCH=master
else
FUEL_BRANCH=$FUEL_VERSION
fi
else
# working outside Fuel master node
echo "Cannot detect current Fuel version, using defaults. Please configure settings in config/common.cfg"
FUEL_VERSION=6.1
FUEL_BRANCH=master
fi

View File

@ -0,0 +1,41 @@
# Source common config
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/common.cfg"
# Sync source
UPSTREAM="osci-mirror-kha.kha.mirantis.net"
UPSTREAM_DIR="mirror-sync/mos/ubuntu"
UPSTREAM_DIR_HTTP="mos/ubuntu"
# NOTE! Partial repository sync feature requires apt support on your OS!
# PARTIAL_UPSTREAM=1 : sync full upstream repository
# PARTIAL_UPSTREAM=0 : sync only upstream packages required by MOS
PARTIAL_UPSTREAM=0
# Sync destination
LOCAL_DIR="${MIRROR_ROOT:-"/var/www/mirror"}/mos-ubuntu"
# What parts to sync
I18Ns=( en ) # Translations, not delete english, it might break repo
ARCHs=( amd64 )
DISTs=()
DISTs+=( mos${FUEL_VERSION}-updates mos${FUEL_VERSION}-security mos${FUEL_VERSION}-holdback )
declare -A DIST_COMPONENTs
DIST_COMPONENTs["mos${FUEL_VERSION}-updates"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-security"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-holdback"]="main restricted"
# Optional fetch configuration
FETCH_I18N="no" # Fetch translations
FETCH_SOURCES="no" # Fetch packages sources
FETCH_CONTENTS="no" # Fetch 'Contents' file fof distro
FETCH_INSTALLER="no" # Fetch separate 'debian-installer' component
FETCH_DIFF="no" # Fetch diffs of 'Packages' (speed's up client update)
FETCH_INDICES="no" # Fetch indices
# Misc
DEBUG="no"
QUIET="no"
DISABLE_CLEANUP="no"
LOG_FILE="${LOG_ROOT:-"/var/log/mirror-sync"}/ubuntu-update.log"

42
config/mos-ubuntu.cfg Normal file
View File

@ -0,0 +1,42 @@
# Source common config
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/common.cfg"
# Sync source
UPSTREAM="osci-mirror-kha.kha.mirantis.net"
UPSTREAM_DIR="mirror-sync/mos/ubuntu"
UPSTREAM_DIR_HTTP="mos/ubuntu"
# NOTE! Partial repository sync feature requires apt support on your OS!
# PARTIAL_UPSTREAM=1 : sync full upstream repository
# PARTIAL_UPSTREAM=0 : sync only upstream packages required by MOS
PARTIAL_UPSTREAM=0
# Sync destination
LOCAL_DIR="${MIRROR_ROOT:-"/var/www/mirror"}/mos-ubuntu"
# What parts to sync
I18Ns=( en ) # Translations, not delete english, it might break repo
ARCHs=( amd64 )
DISTs=()
DISTs+=( mos${FUEL_VERSION} mos${FUEL_VERSION}-updates mos${FUEL_VERSION}-security mos${FUEL_VERSION}-holdback )
declare -A DIST_COMPONENTs
DIST_COMPONENTs["mos${FUEL_VERSION}"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-updates"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-security"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-holdback"]="main restricted"
# Optional fetch configuration
FETCH_I18N="no" # Fetch translations
FETCH_SOURCES="no" # Fetch packages sources
FETCH_CONTENTS="no" # Fetch 'Contents' file fof distro
FETCH_INSTALLER="no" # Fetch separate 'debian-installer' component
FETCH_DIFF="no" # Fetch diffs of 'Packages' (speed's up client update)
FETCH_INDICES="no" # Fetch indices
# Misc
DEBUG="no"
QUIET="no"
DISABLE_CLEANUP="no"
LOG_FILE="${LOG_ROOT:-"/var/log/mirror-sync"}/ubuntu-update.log"

44
config/ubuntu.cfg Normal file
View File

@ -0,0 +1,44 @@
# Source common config
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/common.cfg"
# Sync source
UPSTREAM="ubuntu-mirror.telesys.org.ua"
UPSTREAM_DIR="ubuntu"
UPSTREAM_DIR_HTTP=${UPSTREAM_DIR}
# Sync destination
LOCAL_DIR="${MIRROR_ROOT:-"/var/www/mirror"}/ubuntu"
# NOTE! Partial repository sync feature requires apt support on your OS!
# PARTIAL_UPSTREAM=1 : sync full upstream repository
# PARTIAL_UPSTREAM=0 : sync only upstream packages required by MOS
PARTIAL_UPSTREAM=1
# Destionation for partial upstream mirror
PARTIAL_UPSTREAM_PATH="${MIRROR_ROOT:-"/var/www/mirror"}/ubuntu-part"
# What parts to sync
I18Ns=( en ) # Translations, not delete english, it might break repo
ARCHs=( amd64 )
DISTs=()
DISTs+=( trusty trusty-updates trusty-security )
declare -A DIST_COMPONENTs
DIST_COMPONENTs["trusty"]="main multiverse universe"
DIST_COMPONENTs["trusty-updates"]="main multiverse universe"
DIST_COMPONENTs["trusty-security"]="main multiverse universe"
# Optional fetch configuration
FETCH_I18N="no" # Fetch translations
FETCH_SOURCES="no" # Fetch packages sources
FETCH_CONTENTS="no" # Fetch 'Contents' file fof distro
FETCH_INSTALLER="yes" # Fetch separate 'debian-installer' component
FETCH_DIFF="no" # Fetch diffs of 'Packages' (speed's up client update)
FETCH_INDICES="yes" # Fetch indices
# Misc
DEBUG="no"
QUIET="no"
DISABLE_CLEANUP="no"
LOG_FILE="${LOG_ROOT:-"/var/log/mirror-sync"}/ubuntu-update.log"

612
deb-mirror Executable file
View File

@ -0,0 +1,612 @@
#!/bin/bash
# Source separate config file if given
CONFIG_FILE="$1"
[[ -r "$CONFIG_FILE" ]] && . "$CONFIG_FILE"
# Sync source
UPSTREAM=${UPSTREAM:-""}
UPSTREAM_DIR=${UPSTREAM_DIR:-""}
# Sync destination
LOCAL_DIR=${LOCAL_DIR:-""}
#declare -A DIST_COMPONENTs
# Optional fetch configuration
FETCH_I18N=${FETCH_I18N:-"yes"} # Fetch translations
FETCH_SOURCES=${FETCH_SOURCES:-"no"} # Fetch packages sources
FETCH_CONTENTS=${FETCH_CONTENTS:-"no"} # Fetch 'Contents' file for distro
FETCH_INSTALLER=${FETCH_INSTALLER:="no"} # Fetch separate 'debian-installer'
FETCH_DIFF=${FETCH_DIFF:-"no"} # Fetch diffs of 'Packages'
FETCH_INDICES=${FETCH_INDICES:-"yes"} # Fetch indices
# Misc
ARCH_ALL_IS_MANDATORY=${ARCH_ALL_IS_MANDATORY:-"no"}
#------------------------------------------------------------------------------#
POSSIBLE_COMPRESSIONS=( gz bz2 xz lzma )
BINROOT=$(dirname `readlink -f "$0"`)
VERSION="0.1"
. $BINROOT/util/msgs.sh
. $BINROOT/util/rsync.sh
. $BINROOT/util/dpkg.sh
. $BINROOT/util/checksum.sh
[[ -n "$UPSTREAM" ]] || fatal "UPSTREAM is not defined in config"
[[ -n "$UPSTREAM_DIR" ]] || fatal "UPSTREAM_DIR is not defined in config"
[[ -n "$LOCAL_DIR" ]] || fatal "LOCAL_DIR is not defined in config"
[[ -n "${ARCHs[@]}" ]] || fatal "ARCHs is not defined in config"
[[ -n "${DISTs[@]}" ]] || fatal "DISTs is not defined in config"
#------------------------------------------------------------------------------#
# Checks if 'value' contained within 'array'
# USAGE: contains 'what' 'where'
# $1 -- value to find in array
# $2 -- array to search
contains()
{
local e
for e in "${@:2}"; do [[ "$e" = "$1" ]] && return 0; done
return 1
}
on_SIGINT()
{
fatal "Got user interrupt, aborting"
exit 1
}
#------------------------------------------------------------------------------#
# MAIN()
#------------------------------------------------------------------------------#
# Trap user abort
trap "on_SIGINT" INT
info "Started $0 $*"
debug "Upstream source is: $UPSTREAM::$UPSTREAM_DIR"
debug "Local dir is: $LOCAL_DIR"
debug "Architectures to sync: ${ARCHs[@]}"
debug "Dists to sync: ${DISTs[@]}"
debug "FETCH_I18N: $FETCH_I18N "
debug "FETCH_SOURCES: $FETCH_SOURCES "
debug "FETCH_CONTENTS: $FETCH_CONTENTS "
debug "FETCH_INSTALLER: $FETCH_INSTALLER "
debug "FETCH_DIFF: $FETCH_DIFF "
debug "FETCH_INDICES: $FETCH_INDICES "
debug "ARCH_ALL_IS_MANDATORY: $ARCH_ALL_IS_MANDATORY"
debug "POSSIBLE_COMPRESSIONS: ${POSSIBLE_COMPRESSIONS[@]}"
debug "BINROOT: $BINROOT "
debug "PARTIAL_UPSTREAM: $PARTIAL_UPSTREAM "
debug "PARTIAL_UPSTREAM_PATH: $PARTIAL_UPSTREAM_PATH"
# Create dirs
mkdir -p $LOCAL_DIR/dists
mkdir -p $LOCAL_DIR/pool
# Array of Packages files, that contains package descriptions
packages_pool_files=()
sources_pool_files=()
debug_job_start "Checking if upstream mirror update is in progress..."
while rsync_file_exists "Archive-Update-in-Progress*"; do
info "'Archive-Update-in-Progress' file found on upstream mirror. Sleeping for 20 seconds"
sleep 20
done ; debug_job_ok
################################################################################
# Stage 1
# Download metainformation files
################################################################################
for dist in "${DISTs[@]}"; do
info "Fetching dist '$dist' lists"
############################################################################
# Phase 1: Check if we have aliased distro and create necessary symlinks
# aliases is specified after '@'-sign in dist name, separated by commas
# For example: 'wheezy@testing,somealias' means dist 'wheezy' with symlinks
# 'testing' and 'somealias' pointing to it
############################################################################
# TODO: get aliases from Release suite
if [ -n ${dist#*@} ]; then
normal_dist="${dist%%@*}"
for dist_alias in `echo ${dist#*@} | tr ',' ' '`; do
if [[ "$dist_alias" == "$normal_dist" ]]; then
continue
fi
if [ ! -L $LOCAL_DIR/dists/$dist_alias ]; then
debug "Creating dist alias '$dist_alias' -> '$normal_dist'"
ln -s "$normal_dist" "$LOCAL_DIR/dists/$dist_alias" || \
error "Error creating alias for $normal_dist ($dist_alias)"
fi
done
dist="$normal_dist"
unset normal_dist
fi
############################################################################
# Phase 2: Create distribution dir
############################################################################
mkdir -p $LOCAL_DIR/dists/$dist/
############################################################################
# Phase 3: Fetch Release files
# InRelease uses new scheme of inline Release signing
# Old scheme implies separate 'Release' and 'Release.gpg' files
############################################################################
debug "Fetching Release files"
for rel_file in InRelease Release Release.gpg; do
fetch "/dists/$dist/$rel_file" "$LOCAL_DIR/dists/$dist/"
done
release_file="$LOCAL_DIR/dists/$dist/InRelease"
# Check InRelease file
if [ -f "$release_file" ]; then
inrl_valid=$(date -d "`grep Valid-Until /srv/mirror/debian/debian_bg/dists/wheezy-updates/InRelease | awk '{$1=""; print $0}'`" +%s)
now=$(date +%s)
if [[ $(( $now - $inrl_valid )) -gt -86400 ]]; then
info "InRelease file will expire before the next update, removing it..."
rm -f "$release_file"
release_file="$LOCAL_DIR/dists/$dist/Release"
fi
else
release_file="$LOCAL_DIR/dists/$dist/Release"
fi
[ -f "$release_file" ] || fatal "Unable to find release file for dist $dist"
debug "Got Release file '$release_file'"
############################################################################
# Phase 4: check release signature
############################################################################
if [[ "$release_file" =~ ".*InRelease$" ]]; then
gpg --verify "$release_file" || \
fatal "Failed to check signature for $release_file"
elif [[ "$release_file" =~ ".*Release" ]]; then
gpg --verify "${release_file}.gpg" "${release_file}" || \
fatal "Failed to check signature for $release_file"
fi
############################################################################
# Phase 5: Determine which components and arches to download
# Case A: If we have user specified component list, and hasn't found any
# in distro description, then blindly use user given values
# Case B: If we have no user specified component list, try to get them from
# repository Release file, if it fails - bail out
# Case C: If we have both, calculate intersection of them
############################################################################
debug "Calculating arches/components to fetch from dist"
components=`get_dist_components $release_file "${DIST_COMPONENTs[$dist]}"`
arches=`get_dist_architectures $release_file ${ARCHs[@]}`
# Phase 6: Fork components into binary_components
# That allows us to add special sub-components specific to binary components
# such as 'debian-installer'
binary_components="$components"
############################################################################
# Phase 7: Check if we must fetch 'debian-installer' sub-components and add
# them to the binary_components list if needed
############################################################################
if [[ "$FETCH_INSTALLER" = "yes" ]]; then
for component in $components; do
if rsync_file_exists "dists/$dist/$component/debian-installer"; then
debug "Adding debian-installer '$component/debian-installer'"
binary_components="$binary_components $component/debian-installer"
else
error "Not found debian-installer at '$component/debian-installer'"
fi
done
fi
############################################################################
# Phase 8: Fetch binary components 'Packages' indexes and diffs
############################################################################
debug "Will fetch binary components: $binary_components"
for component in $binary_components; do
info "Fetching component '$component' binary package lists"
# Component path
comp_path="dists/$dist/$component"
# Create component dir
mkdir -p "$LOCAL_DIR/$comp_path"
# First, fetch binary packages lists
for arch in $arches; do
arch_path="$comp_path/binary-$arch"
# Check if remote the dir exists
if ! rsync_file_exists "$arch_path"; then
# Missing 'all' architecture in a non critical error
if [[ "$arch" = "all" ]] && [[ "$ARCH_ALL_IS_MANDATORY" != "yes" ]]; then
debug "Missing 'all' architecture in $dist/$component"
continue
fi
fatal "Arch '$arch' in '$dist/$component' doesn't exist"
fi
# Prepare component dir
mkdir -p "$LOCAL_DIR/$arch_path"
to_fetch=()
# List of files that we want to dl
to_fetch+=( "$arch_path/Release" )
to_fetch+=( "$arch_path/Packages" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$arch_path/Packages.$ext" )
done
# Check if we want a Packages.diff files Index too
if [[ "$FETCH_DIFF" = "yes" ]] && \
rsync_file_exists "$arch_path/Packages.diff/Index"; then
to_fetch+=( `rsync_ls "$arch_path/Packages.diff/*"` )
fi
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify all fetched files
for file in ${fetched_files[@]}; do
# Skip checking of diffs, they are mentioned in Index file
# Validate only Index file
if [[ "`dirname $file`" = "$LOCAL_DIR/$arch_path/Packages.diff" ]]; then
[[ "`basename $file`" != "Index" ]] && continue
fi
# Check file by Release file's checksum
debug_job_start "Checking file $file"
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
debug_job_ok
done
# Make sure that we have at least one valid packages list
packages_file=`guess_filename "$LOCAL_DIR/$arch_path/Packages"`
if [[ -z "$packages_file" ]]; then
fatal "Failed to find Packages file at $arch_path"
fi
# Check integrity of .diffs if we got them
diff_index=`guess_filename "$LOCAL_DIR/$arch_path/Packages.diff/Index"`
if [[ "$FETCH_DIFF" = "yes" ]] && [[ -n "$diff_index" ]]; then
diffs=`cat $diff_index | awk '/SHA1-Patches:/,0' | tail -n +2 | awk '{print $3}'`
for diff in $diffs; do
debug_job_start "Checking file $LOCAL_DIR/$arch_path/Packages.diff/$diff"
diff_exp_sha1=`cat $diff_index | awk '/SHA1-Patches:/,0' | grep "$diff" | awk '{print $1}'`
diff_real_sha1=`read_file "$LOCAL_DIR/$arch_path/Packages.diff/$diff" | sha1sum | awk '{print $1}'`
if [[ "$diff_exp_sha1" != "$diff_real_sha1" ]]; then
debug_job_err
error "Checksum failed on file $arch_path/Packages.diff/$diff, removing all diffs"
rm -rf "$LOCAL_DIR/$arch_path/Packages.diff"
break
fi
debug_job_ok
done
fi
# Parse package file and add packages from it to dl list
packages_pool_files+=( "$packages_file" )
done
done
############################################################################
# Phase 9: Fetch additional stuff for components, i18n, sources, 'Contents'
############################################################################
for component in $components; do
comp_path="dists/$dist/$component"
mkdir -p "$LOCAL_DIR/$comp_path"
# Second, the i18n packages
info "Fetching section '$component' i18n"
if [[ "$FETCH_I18N" = "yes" ]]; then
mkdir -p "$LOCAL_DIR/$comp_path/i18n/"
to_fetch=()
to_fetch+=( "$comp_path/i18n/Index" )
for i18n in ${I18Ns[@]}; do
to_fetch+=( "$comp_path/i18n/Translation-$i18n" )
# Translation files may have diffs too
to_fetch+=( "$comp_path/i18n/Translation-$i18n.diff/*" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/i18n/Translation-$i18n.$ext" )
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
# Skip checking of diffs, except it's Index file
if [[ "`dirname $file`" = "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff" ]]; then
[[ "`basename $file`" != "Index" ]] && continue
fi
debug_job_start "Checking file $file"
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
debug_job_ok
done
# Check integrity of .diffs if we got them
diff_index=`guess_filename "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/Index"`
if [[ -n "$diff_index" ]]; then
diffs=`cat $diff_index | awk '/SHA1-Patches:/,0' | tail -n +2 | awk '{print $3}'`
for diff in $diffs; do
debug_job_start "Checking file $LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/$diff"
diff_exp_sha1=`cat $diff_index | awk '/SHA1-Patches:/,0' | grep "$diff" | awk '{print $1}'`
diff_real_sha1=`read_file "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/$diff" | sha1sum | awk '{print $1}'`
if [[ "$diff_exp_sha1" != "$diff_real_sha1" ]]; then
debug_job_err
fatal "Checksum failed on file $comp_path/i18n/Translation-$i18n.diff/$diff"
fi
debug_job_ok
done
fi
done
fi
# Third is the Sources
if [[ "$FETCH_SOURCES" = "yes" ]]; then
info "Fetching component '$component' source package lists"
mkdir -p "$LOCAL_DIR/$comp_path/source/"
to_fetch=()
to_fetch+=( "$comp_path/source/Release" )
to_fetch+=( "$comp_path/source/Sources" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/source/Sources.$ext" )
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
done
sources_file=`guess_filename "$LOCAL_DIR/$comp_path/source/Sources"`
if [[ -z "$sources_file" ]]; then
fatal "Failed to find Sources file at $LOCAL_DIR/$comp_path/source"
fi
# Parse sources file and add packages from it to dl list
sources_pool_files+=( "$sources_file" )
fi
# Fetch the component contents packs
if [[ "$FETCH_CONTENTS" = "yes" ]]; then
info "Fetching component '$component' content lists"
to_fetch=()
for arch in $arches; do
to_fetch+=( "$comp_path/Contents-$arch" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/Contents-$arch.$ext" )
done
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
done
# If our component is "main", make link in the root of distribution
if [[ "$component" = "main" ]]; then
for arch in $arches; do
if [[ -e "$LOCAL_DIR/dists/$dist/$component/Contents-$arch.gz" ]]; then
debug "Creating link to main/Contents-$arch.gz at $LOCAL_DIR/dists/$dist"
ln -sf main/Contents-$arch.gz $LOCAL_DIR/dists/$dist/Contents-$arch.gz
else
debug "Deleting link to main/Contents-$arch.gz at $LOCAL_DIR/dists/$dist"
rm -f "$LOCAL_DIR/dists/$dist/Contents-$arch.gz"
fi
done
fi
fi
done
done
# Get the indices
if [[ "$FETCH_INDICES" = "yes" ]]; then
info "Fetching indices"
mkdir -p "$LOCAL_DIR/indices/"
for dist in "${DISTs[@]}"; do
fetch "/indices/override.$dist.*" "$LOCAL_DIR/indices/"
done
fi
################################################################################
# Stage 2
# Download pool of packages
################################################################################
info "Parsing package and sources files:"
info "${packages_pool_files[@]}"
info "${sources_pool_files[@]}"
files_to_dl_list=`mktemp --suffix="-deb-mirror"`
# File that contains md5sums of deb pkgs
deb_md5=`mktemp --suffix="-deb-mirror-md5"`
rsync_log=`mktemp --suffix="-deb-mirror-rslog"`
sort_temp=`mktemp --suffix="-deb-mirror-sort"`
$BINROOT/util/parsePackages.py ${packages_pool_files[@]} > "$files_to_dl_list" \
2> "$deb_md5" && \
$BINROOT/util/parseSources.py ${sources_pool_files[@]} >> "$files_to_dl_list" || \
fatal "Unable to create list of packages to fetch"
sort -u "$files_to_dl_list" > "$sort_temp" ; mv "$sort_temp" "$files_to_dl_list"
sort -u -k 3,3 "$deb_md5" > "$sort_temp" ; mv "$sort_temp" "$deb_md5"
# If partial mirroring is enabled, get the list of partial mirror packages
if [[ "$PARTIAL_UPSTREAM" = "1" ]]; then
info "Resolving dependencies for partial mirror"
[ -f /usr/bin/apt-get ] || fatal "APT not found on this system, creation of partial repository will fail"
# Detect kernel version of debian-installer
fuel_config_temp=`mktemp --suffix="-fuel-config-temp"`
if ! wget -nv https://raw.githubusercontent.com/stackforge/fuel-main/${FUEL_BRANCH}/config.mk -O $fuel_config_temp; then
fatal "Failed to fetch Fuel config"
fi
export UBUNTU_RELEASE=`awk -F '[:=?]' '/^UBUNTU_RELEASE\>/ {print $NF}' $fuel_config_temp`
export UBUNTU_NETBOOT_FLAVOR=`awk -F '[:=?]' '/^UBUNTU_NETBOOT_FLAVOR\>/ {print $NF}' $fuel_config_temp`
export UBUNTU_ARCH=`awk -F '[:=?]' '/^UBUNTU_ARCH\>/ {print $NF}' $fuel_config_temp`
INITRD_DIR="/dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/"
mkdir -p "$LOCAL_DIR/$INITRD_DIR"
fetch "/$INITRD_DIR/initrd.gz" "$LOCAL_DIR/$INITRD_DIR"
UBUNTU_INSTALLER_KERNEL_VERSION=`zcat "$LOCAL_DIR/$INITRD_DIR/initrd.gz" | cpio --list 'lib/modules/*/kernel' 2>/dev/null | cut -d"/" -f 3`
debug "Detected debian-installer kernel version: "$UBUNTU_INSTALLER_KERNEL_VERSION
rm -f "$fuel_config_temp"
# Generate list of MOS dependencies
export apt_altstate=`mktemp -d --suffix="-apt-altstate"`
export FUEL_BRANCH
$BINROOT/util/partial_ubuntu.sh || fatal "Cannot calculate list of dependencies"
# Create download lists for deb and udeb
awk 'FNR==NR {arr[$0];next} $3 in arr' $apt_altstate/deb "$deb_md5" > $apt_altstate/deb_md5
grep "\.udeb$" "$files_to_dl_list" | egrep -v "generic|virtual" > $apt_altstate/udeb_nonkernel
grep "\.udeb$" "$files_to_dl_list" | egrep "generic|virtual" | grep $UBUNTU_INSTALLER_KERNEL_VERSION > $apt_altstate/udeb_kernel
cat $apt_altstate/udeb_nonkernel $apt_altstate/udeb_kernel | sort -u > $apt_altstate/udeb
awk 'FNR==NR {arr[$0];next} $3 in arr' $apt_altstate/udeb "$deb_md5" > $apt_altstate/udeb_md5
cat $apt_altstate/netboot.list $apt_altstate/udeb $apt_altstate/deb > "$files_to_dl_list"
cat $apt_altstate/netboot_md5.list $apt_altstate/udeb_md5 $apt_altstate/deb_md5 > "$deb_md5"
rm -rf "$apt_altstate"
fi # "$PARTIAL_UPSTREAM" = "1"
info "Downloading pool files"
rsync --verbose --out-format="%i %n" --stats \
--recursive --perms --links --times --hard-links --sparse --safe-links \
--exclude=".tmp/" --exclude=".temp/" --exclude=".~tmp~/" \
--files-from="$files_to_dl_list" \
--bwlimit=5192 \
"${UPSTREAM}::${UPSTREAM_DIR}/" "$LOCAL_DIR" | tee "$rsync_log"
# --files-from="$files_to_dl_list" \--block-size=8192
#--max-delete=40000 --delay-updates --delete --delete-after \
# Check if rsync was ok
if [[ $? != 0 ]]; then
rm "$files_to_dl_list"
fatal "Failed to sync all package files, see log for details"
#error "Failed to sync all package files, see log for details"
else
info "Primary sync successfully completed"
fi
# Let's check new file MD5sums
fresh_files=`egrep "^>f......... .*" "$rsync_log" | awk '{print $2}'`
for fresh_file in $fresh_files; do
check_file "$deb_md5" "$LOCAL_DIR" "$fresh_file"
if [[ $? != 0 ]]; then
rm "$deb_md5"
rm "$rsync_log"
fatal "MD5sum check failed for file $LOCAL_DIR/$fresh_file"
fi
done
rm "$deb_md5"
rm "$rsync_log"
# Now iterate through all downloaded files and check if any of them are symlink
# download neccessary files if needed
# Yeah, some times section can contain a metainfo for symlink to file in
# diffirent section that is no longer exists in there, so it will be wiped as
# unused
wayback="`pwd`"
cd "$LOCAL_DIR/"
pool_current_files=`mktemp --suffix d-m_got`
pool_required_files=`mktemp --suffix d-m_req`
# Create lists of files that we got and that we need
find pool -type f -or -type l | sort -u > $pool_current_files
cat $files_to_dl_list | grep "^pool" | sort -u > $pool_required_files
cd "$wayback"
info "Cleaning up pool files"
# Clean obsolete files
obsolete_files=`comm -3 -2 "$pool_current_files" "$pool_required_files"`
for file in $obsolete_files; do
debug_job_start "Deleting '$LOCAL_DIR/$file'"
rm "$LOCAL_DIR/$file" && debug_job_ok || debug_job_err
done
info "Doublechecking that required pool files exists"
missing_files=`comm -3 -1 "$pool_current_files" "$pool_required_files"`
if [[ -n "$missing_files" ]]; then
error "Some files are missing after sync!!!:"
error "$missing_files"
fatal "Aborting due to missing files"
fi
rm "$files_to_dl_list"
rm "$pool_required_files"
rm "$pool_current_files"
# Timestamp
echo "Updated at: `date`" > $LOCAL_DIR/.lastupdate
# If partial mirroring is enabled, get the list of partial mirror packages
if [[ "$PARTIAL_UPSTREAM" = "1" ]]; then
# Prepare directory structure for partial repository
info "Generating partial mirror"
mkdir -p ${PARTIAL_UPSTREAM_PATH}/pool/debian-installer
mkdir -p ${PARTIAL_UPSTREAM_PATH}/pool/main
mkdir -p ${PARTIAL_UPSTREAM_PATH}/indices
mkdir -p ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/main/binary-amd64
mkdir -p ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/main/debian-installer/binary-amd64
mkdir -p ${PARTIAL_UPSTREAM_PATH}/${INITRD_DIR}
temp_dir=`mktemp -d --suffix="-reposync"`
find $LOCAL_DIR/pool/ -name *.deb -type f -exec cp -vuni '{}' ${temp_dir} ";"
rsync -a --delete ${temp_dir}/ ${PARTIAL_UPSTREAM_PATH}/pool/main
rm -f ${temp_dir}/*
find ${LOCAL_DIR}/pool/ -name *.udeb -type f -exec cp -vuni '{}' ${temp_dir} ";"
rsync -a --delete ${temp_dir}/ ${PARTIAL_UPSTREAM_PATH}/pool/debian-installer
rm -rf ${temp_dir}
rsync -a --delete ${LOCAL_DIR}/${INITRD_DIR}/ ${PARTIAL_UPSTREAM_PATH}/${INITRD_DIR}
# Generate "indices" folder
cat $LOCAL_DIR/indices/*extra* | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.extra.main
cat $LOCAL_DIR/indices/*.debian-installer | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.main.debian-installer
pushd $LOCAL_DIR/indices/
ls --ignore="*extra*" --ignore="*src" --ignore="*debian-installer" --quoting-style=shell | xargs cat | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.main
popd
# Generate Release file
cat <<EOF > ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/Release
Architectures: amd64
Codename: ${UBUNTU_RELEASE}
Components: main
Date: `date`
Description: Ubuntu ${UBUNTU_RELEASE} partial mirror
Label: Ubuntu
Origin: Ubuntu
Suite: ${UBUNTU_RELEASE}
EOF
# Build partial mirror
info "Generating metadata for partial mirror"
export BINROOT
$BINROOT/util/regenerate_ubuntu_repo ${PARTIAL_UPSTREAM_PATH} ${UBUNTU_RELEASE} || failure "Failed to generate partial mirror"
rm -rf $LOCAL_DIR
fi # "$PARTIAL_UPSTREAM" = "1"
info "Done"

11
mirror-sync Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
BINROOT=$(dirname `readlink -f "$0"`)
. $BINROOT/config/common.cfg
mkdir -p ${MIRROR_ROOT}
mkdir -p ${LOG_ROOT}
$BINROOT/deb-mirror $BINROOT/config/mos-ubuntu-updatesonly.cfg
$BINROOT/deb-mirror $BINROOT/config/ubuntu.cfg

41
util/checksum.sh Normal file
View File

@ -0,0 +1,41 @@
check_file()
{
checksum_file=$1
root=$2
file_to_check=$3
debug_job_start "Checking checksum of file '$root/$file_to_check'"
file_records=`grep "$file_to_check" "$checksum_file" | sort -u`
IFS='
'
[[ -z "$file_records" ]] && echo -n "checksums not found..." \
&& debug_job_skip && return 0
for file_record in $file_records; do
expected_checksum_type=`echo $file_record | awk '{print $1}'`
expected_checksum=`echo $file_record | awk '{print $2}'`
shopt -s nocasematch
if [[ $expected_checksum_type == "MD5" ]]; then
echo -n "MD5..."
actual_checksum=`md5sum "$root/$file_to_check" | head -c 32`
elif [[ $expected_checksum_type == "SHA1" ]] || [[ $expected_checksum_type == "SHA" ]]; then
echo -n "SHA1..."
actual_checksum=`sha1sum "$root/$file_to_check" | head -c 40`
elif [[ $expected_checksum_type == "SHA256" ]]; then
echo -n "SHA256..."
actual_checksum=`sha256sum "$root/$file_to_check" | head -c 64`
elif [[ $expected_checksum_type == "NONE" ]]; then
echo -n "NONE..."
actual_checksum=$expected_checksum
fi
shopt -u nocasematch
[[ "$expected_checksum" != "$actual_checksum" ]] && debug_job_err && return 1
done
debug_job_ok
return 0
}

289
util/dpkg-scanpackages Executable file
View File

@ -0,0 +1,289 @@
#!/usr/bin/perl
#
# dpkg-scanpackages
#
# Copyright © 2006-2012 Guillem Jover <guillem@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
use warnings;
use strict;
use IO::Handle;
use IO::File;
use Getopt::Long qw(:config posix_default bundling no_ignorecase);
use Dpkg ();
use Dpkg::Gettext;
use Dpkg::ErrorHandling;
#use Dpkg::Util qw(:list);
use Dpkg::Control;
use Dpkg::Version;
use Dpkg::Checksums;
use Dpkg::Compression::FileHandle;
use Dpkg::IPC;
textdomain('dpkg-dev');
# Do not pollute STDOUT with info messages
report_options(info_fh => \*STDERR);
my (@samemaint, @changedmaint);
my @spuriousover;
my %packages;
my %overridden;
my %options = (help => sub { usage(); exit 0; },
version => \&version,
type => undef,
arch => undef,
multiversion => 0,
'extra-override'=> undef,
medium => undef,
);
my @options_spec = (
'help|?',
'version',
'type|t=s',
'arch|a=s',
'multiversion|m!',
'extra-override|e=s',
'medium|M=s',
);
sub version {
printf _g("Debian %s version.\n"), $Dpkg::PROGNAME;
exit;
}
sub usage {
printf _g(
"Usage: %s [<option>...] <binary-path> [<override-file> [<path-prefix>]] > Packages
Options:
-t, --type <type> scan for <type> packages (default is 'deb').
-a, --arch <arch> architecture to scan for.
-m, --multiversion allow multiple versions of a single package.
-e, --extra-override <file>
use extra override file.
-M, --medium <medium> add X-Medium field for dselect multicd access method
-?, --help show this help message.
--version show the version.
"), $Dpkg::PROGNAME;
}
sub load_override
{
my $override = shift;
my $comp_file = Dpkg::Compression::FileHandle->new(filename => $override);
while (<$comp_file>) {
s/\#.*//;
s/\s+$//;
next unless $_;
my ($p, $priority, $section, $maintainer) = split(/\s+/, $_, 4);
if (not defined($packages{$p})) {
push(@spuriousover, $p);
next;
}
for my $package (@{$packages{$p}}) {
if ($maintainer) {
if ($maintainer =~ m/(.+?)\s*=\>\s*(.+)/) {
my $oldmaint = $1;
my $newmaint = $2;
my $debmaint = $$package{Maintainer};
if (none { $debmaint eq $_ } split m{\s*//\s*}, $oldmaint) {
push(@changedmaint,
sprintf(_g(' %s (package says %s, not %s)'),
$p, $$package{Maintainer}, $oldmaint));
} else {
$$package{Maintainer} = $newmaint;
}
} elsif ($$package{Maintainer} eq $maintainer) {
push(@samemaint, " $p ($maintainer)");
} else {
warning(_g('unconditional maintainer override for %s'), $p);
$$package{Maintainer} = $maintainer;
}
}
$$package{Priority} = $priority;
$$package{Section} = $section;
}
$overridden{$p} = 1;
}
close($comp_file);
}
sub load_override_extra
{
my $extra_override = shift;
my $comp_file = Dpkg::Compression::FileHandle->new(filename => $extra_override);
while (<$comp_file>) {
s/\#.*//;
s/\s+$//;
next unless $_;
my ($pr, $field, $value) = split(/\s+/, $_, 3);
my ($p, $parch) = split(/\//, $pr, 2);
if (defined($options{arch}) and defined($parch)) {
next unless ($options{arch} eq $parch);
}
next unless defined($packages{$p});
for my $package (@{$packages{$p}}) {
$$package{$field} = $value;
}
}
close($comp_file);
}
{
local $SIG{__WARN__} = sub { usageerr($_[0]) };
GetOptions(\%options, @options_spec);
}
if (not (@ARGV >= 1 and @ARGV <= 3)) {
usageerr(_g('one to three arguments expected'));
}
my $type = defined($options{type}) ? $options{type} : 'deb';
my $arch = $options{arch};
my @find_args;
if ($options{arch}) {
@find_args = ('(', '-name', "*_all.$type", '-o',
'-name', "*_${arch}.$type", ')');
}
else {
@find_args = ('-name', "*.$type");
}
my ($binarydir, $override, $pathprefix) = @ARGV;
if (not -d $binarydir) {
error(_g('binary dir %s not found'), $binarydir);
}
if (defined $override and not -e $override) {
error(_g('override file %s not found'), $override);
}
$pathprefix //= '';
my $find_h = IO::Handle->new();
open($find_h, '-|', 'find', '-L', "$binarydir/", @find_args, '-print')
or syserr(_g("couldn't open %s for reading"), $binarydir);
FILE:
while (<$find_h>) {
chomp;
my $fn = $_;
my $output;
my $pid = spawn(exec => [ 'dpkg-deb', '-I', $fn, 'control' ],
to_pipe => \$output);
my $fields = Dpkg::Control->new(type => CTRL_INDEX_PKG);
$fields->parse($output, $fn)
or error(_g("couldn't parse control information from %s"), $fn);
wait_child($pid, no_check => 1);
if ($?) {
warning(_g("\`dpkg-deb -I %s control' exited with %d, skipping package"),
$fn, $?);
next;
}
defined($fields->{'Package'})
or error(_g('no Package field in control file of %s'), $fn);
my $p = $fields->{'Package'};
if (defined($packages{$p}) and not $options{multiversion}) {
foreach (@{$packages{$p}}) {
if (version_compare_relation($fields->{'Version'}, REL_GT,
$_->{'Version'}))
{
warning(_g('package %s (filename %s) is repeat but newer version;'),
$p, $fn);
warning(_g('used that one and ignored data from %s!'),
$_->{Filename});
$packages{$p} = [];
} else {
warning(_g('package %s (filename %s) is repeat;'), $p, $fn);
warning(_g('ignored that one and using data from %s!'),
$_->{Filename});
next FILE;
}
}
}
warning(_g('package %s (filename %s) has Filename field!'), $p, $fn)
if defined($fields->{'Filename'});
$fields->{'Filename'} = "$pathprefix$fn";
my $sums = Dpkg::Checksums->new();
$sums->add_from_file($fn);
foreach my $alg (checksums_get_list()) {
if ($alg eq 'md5') {
$fields->{'MD5sum'} = $sums->get_checksum($fn, $alg);
} else {
$fields->{$alg} = $sums->get_checksum($fn, $alg);
}
}
$fields->{'Size'} = $sums->get_size($fn);
$fields->{'X-Medium'} = $options{medium} if defined $options{medium};
push @{$packages{$p}}, $fields;
}
close($find_h);
load_override($override) if defined $override;
load_override_extra($options{'extra-override'}) if defined $options{'extra-override'};
my @missingover=();
my $records_written = 0;
for my $p (sort keys %packages) {
if (defined($override) and not defined($overridden{$p})) {
push(@missingover,$p);
}
for my $package (@{$packages{$p}}) {
print("$package\n") or syserr(_g('failed when writing stdout'));
$records_written++;
}
}
close(STDOUT) or syserr(_g("couldn't close stdout"));
if (@changedmaint) {
warning(_g('Packages in override file with incorrect old maintainer value:'));
warning($_) foreach (@changedmaint);
}
if (@samemaint) {
warning(_g('Packages specifying same maintainer as override file:'));
warning($_) foreach (@samemaint);
}
if (@missingover) {
warning(_g('Packages in archive but missing from override file:'));
warning(' %s', join(' ', @missingover));
}
if (@spuriousover) {
warning(_g('Packages in override file but not in archive:'));
warning(' %s', join(' ', @spuriousover));
}
info(_g('Wrote %s entries to output Packages file.'), $records_written);

176
util/dpkg.sh Normal file
View File

@ -0,0 +1,176 @@
#!/bin/bash
# Guess filename based on POSSIBLE_COMPRESSIONS variable
# It will cycle through filenames (myname myname.gz myname.bz2 myname.xz etc...)
# and return first match that exists in the filesystem
# $1 -- base filename
guess_filename()
{
local to_return=""
local file="$1"
#debug "Guessing filename for $file"
if [[ ! -f "$file" ]]; then
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
if [[ -f "$file.$ext" ]]; then
#debug "Got match $file.$ext"
to_return="$file.$ext"
break
fi
done
else
to_return="$file"
fi
echo "$to_return"
}
# Determines if file is compressed, and uncompresses into stdout
# $1 -- file too cat
# $2=false -- Try to guess filename
read_file()
{
local file="$1"
local try_to_guess="${2:-'false'}"
if [[ ! -f "$file" ]]; then
if [[ "$try_to_guess" = "false" ]]; then
return
else
file=`guess_filename "$file"`
[[ -f "$file" ]] || return
fi
fi
case `file "$file"` in
*gzip*)
# We got a GZip
zcat "$file"
return;;
*bzip2*)
# We got a BZip2
bzcat "$file"
return;;
*XZ*)
# We got a XZ
xzcat "$file"
return;;
*text*)
# Must be a plain text
cat "$file"
return;;
esac
}
# Gets distro components from Release file
# $1 -- path to Release file
# $2 -- user component list
get_dist_components()
{
local dist_components=( `read_file "$1"| egrep "^Components: "| cut -d' ' -f'2-'` )
local user_components=${2:-""}
local to_return=""
if [[ -z "$user_components" ]]; then
echo "$dist_components"
elif [[ -z $dist_components ]]; then
echo "$user_components"
else
for ucomp in $user_components; do
if contains "$ucomp" "${dist_components[@]}"; then
to_return="${to_return} $ucomp"
fi
done
fi
echo $to_return
}
# Gets distro arches from Release file
# $1 -- path to Release file
# $2 -- user arch list
get_dist_architectures()
{
local dist_arches=( `read_file "$1"| egrep "^Architectures: "| cut -d' ' -f'2-'` )
local user_arches=( $* )
local to_return=""
# Filter out arches that not listed in 'ARCHs' global variable
for arch in ${user_arches[@]}; do
if contains "$arch" "${dist_arches[@]}"; then
to_return="${to_return} $arch"
fi
# Special case architecture that is not included in Release arches list
if [[ "$arch" = "all" ]]; then
to_return="${to_return} $arch"
fi
done
echo $to_return
}
# Checks dist file validity
# $1 -- Full path to release file
# $2 -- Relative path to target file from the repository root
pkg_file_valid()
{
local release="$1"
local pkg="$2"
# Check if release file has an md5sum section, if not then just return OK
if ! egrep -i '^MD5Sum:\s*$' $release &> /dev/null; then
debug "Release file '$release' doesn't contain MD5 info"
return 0
fi
# Get distro basedir
local dist_base=`dirname "$release"`
local pkg_path="$dist_base/$pkg"
local pkg_line=`cat "$release" | egrep -i "^ [0-9a-f]{32}\s+[0-9]+\s+$pkg\s*$"`
# Check if we found files md5 string. if not return all ok
# TODO: make option to raise error on missing md5sum
if [[ -z "$pkg_line" ]]; then
error "Can't find md5sum for '$pkg' in '$release', skipping"
return 0
fi
# Get line with MD5SUM for current package
local expected_md5sum=`echo "$pkg_line" | awk '{print $1}'`
local expected_size=`echo "$pkg_line" | awk '{print $2}'`
# Check file validity if it's not found just empty vars
local size=`stat -c%s "$pkg_path"`
local md5sum=`md5sum "$pkg_path"| awk '{print $1}'`
if [[ -e $pkg_path ]] && \
[[ $size = $expected_size ]] && \
[[ $md5sum = $expected_md5sum ]]; then
debug "File '$pkg' checked by '$release' is OK"
return 0
fi
error "File '$pkg_path' checked by '$release' is BAD"
debug "File details:"
debug "size = $size, expected $expected_size"
debug "md5sum = $md5sum, expected $expected_md5sum"
return 1
}
# DEPRECATED
parse_pkg_file()
{
local file="$1"
local to_return=()
local line
declare -A entry
read_file "$file" | \
while read line; do
if [[ -z "$line" ]]; then
continue
fi
param=${line%%:*}
entry[$param]=#${line#*: }
# #echo "${entry[md5sum]} ${entry[size]} ${entry[filename]}"
# entry=()
done
}

116
util/msgs.sh Normal file
View File

@ -0,0 +1,116 @@
#!/bin/bash
# Generic message display and job-contol
DEBUG=${DEBUG:-"no"}
QUIET=${QUIET:-"no"}
# If no LOG_FILE set, discard log output
LOG_FILE=${LOG_FILE:-"/dev/null"}
################################################################################
# Magic FD manipulations
################################################################################
# Log file wrapper function, reads stdin line by line and timestamps each line,
# also filters terminal colors
_log()
{
while IFS='' read -r line; do
echo "$(date) $line" | sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g" >> "$LOG_FILE"
done
}
# Set FD 5 -- main output FD
# Split it's output out helper '_log' function and stdout
# If QUIET is set - suppress console output
if [[ "$QUIET" = "yes" ]]; then
exec 5> >(tee -a >(_log) > /dev/null)
else
exec 5> >(tee -a >(_log))
fi
# Supress child's outputs if DEBUG set to 'no', append to main FD otherwise
if [[ "$DEBUG" = "no" ]]; then
exec 1>/dev/null
exec 2>/dev/null
else
exec 1>&5
exec 2>&5
fi
# FD 3 -- Pretty messages FD
# Prettyfied messages for user sent here
# By default send it's output to main FD
exec 3>&5
################################################################################
# Simple messaging functions
################################################################################
msgs_errors=()
msg()
{
echo " * $*" 1>&3
}
debug()
{
[[ "$DEBUG" = "yes" ]] && msg "DEBUG: $*"
}
info()
{
msg "INFO: $*"
}
error()
{
msg "ERROR: $*"
msgs_errors+=( "$*" )
}
fatal()
{
msg "FATAL: $1"
([ ! -z $2 ] && exit $2) || exit 1
}
################################################################################
# Job control functions
################################################################################
msgs_jobname=""
job_start()
{
msgs_jobname="$1"
echo -ne "$msgs_jobname..." 1>&3
#logger -t "$TAG" "$msgs_jobname"
}
job_ok()
{
echo -e "\e[0;32mOK\e[0m" 1>&3
#logger -t "$TAG" "$msgs_jobname... OK !"
}
job_err()
{
echo -e "\e[0;31mFAIL!\e[0m" 1>&3
#logger -t "$TAG" "$msgs_jobname... FAILED !"
errors="${errors}$msgs_jobname have failed\n"
}
job_skip()
{
echo -e "\e[0;33mSKIPPED!!\e[0m" 1>&3
#logger -t "$TAG" "$msgs_jobname... SKIPPED !"
}
debug_job_start()
{
[[ "$DEBUG" = "yes" ]] && job_start "$*"
}
debug_job_ok()
{
[[ "$DEBUG" = "yes" ]] && job_ok
}
debug_job_err()
{
[[ "$DEBUG" = "yes" ]] && job_err
}
debug_job_skip()
{
[[ "$DEBUG" = "yes" ]] && job_skip
}

35
util/parsePackages.py Executable file
View File

@ -0,0 +1,35 @@
#!/usr/bin/python
# This script parses contents of given 'Package' files, and creates rsync
# command line to synchronize mirror
import re
import sys
# Regex to parse
regex=re.compile("^(?P<param>[a-z0-9]+): (?P<value>.*)$", re.IGNORECASE)
for pkgfile in sys.argv[1:]:
if pkgfile.endswith(".gz"):
import gzip
file = gzip.open(pkgfile)
elif pkgfile.endswith(".bz2"):
import bz2
file = bz2.BZ2File(pkgfile)
else:
file = open(pkgfile)
# Current package
pkg={}
for line in file:
# If we have a blank line - it's means that we're on package separator
# Print the information about current package and clear current package info
if line == "\n":
sys.stdout.write(pkg["filename"] + "\n")
if "md5sum" in pkg:
sys.stderr.write("MD5 " + pkg["md5sum"] + " " + pkg["filename"] + "\n")
pkg={}
m = regex.match(line)
if m:
pkg[m.group("param").lower()] = m.group("value")

44
util/parseSources.py Executable file
View File

@ -0,0 +1,44 @@
#!/usr/bin/python
# This script parses contents of given 'Source' files, and creates rsync
# command line to synchronize mirror
import re
import sys
# Regex to parse
regex=re.compile("^(?P<param>[a-zA-Z0-9_-]+):\s?(?P<value>.*)$")
files_regex=re.compile("(?P<md5>[a-f0-9]{32}) [0-9]+ (?P<filename>.*)")
for pkgfile in sys.argv[1:]:
if pkgfile.endswith(".gz"):
import gzip
file = gzip.open(pkgfile)
elif pkgfile.endswith(".bz2"):
import bz2
file = bz2.BZ2File(pkgfile)
else:
file = open(pkgfile)
pkg={}
cur_param=""
for line in file:
if line == "\n":
#print("----------------------------------------------------")
basedir=pkg['directory']
files=files_regex.findall(pkg['files'])
for md5, file in files:
print basedir + "/" + file
pkg={}
continue
m = regex.match(line)
if m:
cur_param = m.group("param").lower()
pkg[cur_param] = m.group("value")
elif line.startswith(" "):
# We got a multiliner continuation
pkg[cur_param] += line.lstrip()
else:
print "IMMPOSSIBIRUUUU!!!!"
sys.exit(999)

92
util/partial_ubuntu.sh Executable file
View File

@ -0,0 +1,92 @@
#!/bin/bash
if [ -z "$FUEL_BRANCH" ]; then
echo "`basename $0`: FUEL_BRANCH is not defined!"
exit 1
fi
#apt_altstate=`mktemp -d --suffix="-apt-altstate"`
apt_lists_dir="$apt_altstate/var/lib/apt/lists"
apt_cache_dir="$apt_altstate/var/cache/apt"
null_dpkg_status="$apt_altstate/var/lib/dpkg/status"
apt_alt_etc="$apt_altstate/etc/apt"
mkdir -p "$apt_lists_dir"
mkdir -p "$apt_cache_dir"
mkdir -p "$apt_alt_etc/trusted.gpg.d/"
mkdir -p "$apt_alt_etc/preferences.d/"
mkdir -p "${null_dpkg_status%/*}"
touch "${null_dpkg_status}"
cp -a /usr/share/keyrings/ubuntu*.gpg "$apt_alt_etc/trusted.gpg.d/"
apt_altstate_opts="-o APT::Get::AllowUnauthenticated=1"
apt_altstate_opts="${apt_altstate_opts} -o Dir=${apt_altstate}"
apt_altstate_opts="${apt_altstate_opts} -o Dir::State::Lists=${apt_lists_dir}"
apt_altstate_opts="${apt_altstate_opts} -o Dir::State::status=${null_dpkg_status}"
apt_altstate_opts="${apt_altstate_opts} -o Dir::Cache=${apt_cache_dir}"
if ! source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/../config/ubuntu.cfg"; then
echo "`basename $0`: cannot read config for Ubuntu, please create one!"
exit 1
fi
for dist in ${DISTs[@]}; do
echo deb http://${UPSTREAM}/${UPSTREAM_DIR} $dist "${DIST_COMPONENTs[$dist]}" >> ${apt_alt_etc}/sources.list
done
if ! source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/../config/mos-ubuntu.cfg"; then
echo "`basename $0`: cannot read config for MOS Ubuntu, please create one!"
exit 1
fi
for dist in ${DISTs[@]}; do
echo deb http://${UPSTREAM}/${UPSTREAM_DIR_HTTP} $dist "${DIST_COMPONENTs[$dist]}" >> ${apt_alt_etc}/sources.list
done
cat <<EOF > ${apt_alt_etc}/preferences
Package: *
Pin: origin ${UPSTREAM}
Pin-Priority: 1001
EOF
if ! apt-get $apt_altstate_opts update; then
echo "`basename $0`: failed to populate alt apt state!"
exit 1
fi
if ! wget -nv https://raw.githubusercontent.com/stackforge/fuel-main/${FUEL_BRANCH}/requirements-deb.txt -O $apt_altstate/requirements-deb.txt; then
echo "`basename $0`: failed to fetch requirements-deb.txt"
exit 1
fi
echo "Processing Fuel dependencies..."
has_apt_errors=''
while read pkg; do
downloads_list="$apt_altstate/downloads_${pkg}.list"
if ! apt-get $apt_altstate_opts --print-uris --yes -qq install $pkg >"${downloads_list}" 2>>"$apt_altstate/apt-errors.log"; then
echo "package $pkg can not be installed" >>$apt_altstate/apt-errors.log
# run apt-get once more to get a verbose error message
apt-get $apt_altstate_opts --print-uris --yes install $pkg >>$apt_altstate/apt-errors.log 2>&1 || true
has_apt_errors='yes'
fi
sed -i "${downloads_list}" -n -e "s/^'\([^']\+\)['].*$/\1/p"
done < $apt_altstate/requirements-deb.txt
if [ -n "$has_apt_errors" ]; then
echo "`basename $0`some packages are not installable" >&2
cat < $apt_altstate/apt-errors.log >&2
exit 1
fi
# Prepare list of upstream packages to download
cat $apt_altstate/downloads_*.list | grep -v ${UPSTREAM} | perl -p -e 's/^.*?pool/pool/' | sort -u > $apt_altstate/deb
rm -f $apt_altstate/downloads_*.list
NETBOOT_FILES="linux initrd.gz"
for dload in $NETBOOT_FILES; do
echo dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/${dload} >> $apt_altstate/netboot.list
echo NONE NONE dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/${dload} >> $apt_altstate/netboot_md5.list
done
exit 0

59
util/regenerate_ubuntu_repo Executable file
View File

@ -0,0 +1,59 @@
#!/bin/bash
# Based on the method described here:
# http://troubleshootingrange.blogspot.com/2012/09/hosting-simple-apt-repository-on-centos.html
set -e
ARCH=amd64
REPO_PATH=$1
REPONAME=$2
BINDIR=${REPO_PATH}/dists/${REPONAME}/main
binoverride=indices/override.${REPONAME}.main
binoverride_udeb=indices/override.${REPONAME}.main.debian-installer
extraoverride=indices/override.${REPONAME}.extra.main
release_header=`head -8 ${REPO_PATH}/dists/${REPONAME}/Release`
package_deb=${BINDIR}/binary-${ARCH}/Packages
package_udeb=${BINDIR}/debian-installer/binary-${ARCH}/Packages
cd ${REPO_PATH}
# Scan *.deb packages
$BINROOT/util/dpkg-scanpackages -m --extra-override ${extraoverride} -a ${ARCH} pool/main ${binoverride} > ${package_deb}.tmp 2>/dev/null
gzip -9c ${package_deb}.tmp > ${package_deb}.gz.tmp
# Scan *.udeb packages
$BINROOT/util/dpkg-scanpackages -t udeb -m -a ${ARCH} pool/debian-installer ${binoverride_udeb} > ${package_udeb}.tmp 2>/dev/null
gzip -9c ${package_udeb}.tmp > ${package_udeb}.gz.tmp
# Replace original files with new ones
mv --backup -f ${package_deb}.tmp ${package_deb}
mv --backup -f ${package_deb}.gz.tmp ${package_deb}.gz
mv --backup -f ${package_udeb}.tmp ${package_udeb}
mv --backup -f ${package_udeb}.gz.tmp ${package_udeb}.gz
# Generate release file
cd ${REPO_PATH}/dists/${REPONAME}
echo "$release_header" > Release.tmp
# Generate hashes
c1=(MD5Sum: SHA1: SHA256: SHA512:)
c2=(md5 sha1 sha256 sha512)
i=0
while [ $i -lt ${#c1[*]} ]; do
echo ${c1[i]}
for hashme in `find main -type f \( -not -name "*~" -name "Package*" -o -name "Release*" \)`; do
ohash=`openssl dgst -${c2[$i]} ${hashme}`
chash="${ohash##* }"
size=`stat -c %s ${hashme}`
echo " ${chash} ${size} ${hashme}"
done
i=$(( $i + 1));
done >> Release.tmp
mv --backup -f Release.tmp Release

74
util/rsync.sh Normal file
View File

@ -0,0 +1,74 @@
#!/bin/bash
# Checks if remote file/dir exists
rsync_file_exists()
{
/usr/bin/rsync --no-motd --list-only "${UPSTREAM}::${UPSTREAM_DIR}/$1" &> /dev/null
return $?
}
# Fetches list of files from remote rsync repo by given mask
# $1 -- file mask
rsync_ls()
{
local to_return=()
local mask="$1"
files=`/usr/bin/rsync --no-motd --list-only \
--relative --recursive --no-implied-dirs \
--perms --links --times --hard-links --sparse --safe-links \
"${UPSTREAM}::${UPSTREAM_DIR}/$mask" | \
grep -v "^d" | sed -e "s/->.*//g" | awk '{$1=$2=$3=$4=""}1'`
for file in $files; do
to_return+=( "$file" )
done
echo "${to_return[@]}"
return 0
}
# Rsync wrapper function
fetch()
{
src_path="$1"
dst_path="$2"
shift; shift
opt_args=( $* )
# Create a dest dir if needed
dst_dir=`dirname $dst_path`
[[ -d "$dst_dir" ]] || mkdir -p "$dst_dir"
debug_job_start "Fetching '$src_path' to '$dst_path' with params '${opt_args[@]}'"
/usr/bin/rsync --no-motd --perms --links --times --hard-links --sparse --safe-links \
${opt_args[@]} \
"${UPSTREAM}::${UPSTREAM_DIR}/$src_path" "$dst_path"
local rsync_ec="$?"
if [[ $rsync_ec = 0 ]]; then
debug_job_ok
else
debug_job_err
fi
return $rsync_ec
}
# Fetches all files to specified root
# $1 -- Local root, where all files will be stored by it's relative path
# $* -- Files to fetch
fetch_all()
{
local root="$1"; shift
local fetched=()
local rsync_out=""
rsync_out=` echo $* | tr ' ' '\n' | \
rsync --no-motd --relative --out-format='%n' --files-from=- \
--no-implied-dirs --no-motd \
--perms --links --times --hard-links --sparse \
"${UPSTREAM}::${UPSTREAM_DIR}/" "$root" 2> /dev/null`
for line in $rsync_out; do
debug "Fetched file $LOCAL_DIR/$line"
fetched+=( "$LOCAL_DIR/$line" )
done
}