packetary/deb-mirror
Sergey Kulanov f2bccfc0b3 Bump version to 7.0. Update code from upstream repo
1. Bump version to 7.0

2. Use perestroika v5 repo schema

3. Fixing pool permissions after rsync
This change corrects the permissions of the pool folder so that it can
be consumed as a repository mirror via nginx.

4. Add packages (deb, rpm) specs

Related-bug: #1478118
Related-bug: #1476561
Change-Id: Iada03ddab5b7d551f13692cb1c87a634c2f10947
2015-08-11 17:41:01 +03:00

638 lines
27 KiB
Bash
Executable File

#!/bin/bash
# Source separate config file if given
CONFIG_FILE="$1"
[[ -r "$CONFIG_FILE" ]] && . "$CONFIG_FILE"
# Sync source
UPSTREAM=${UPSTREAM:-""}
UPSTREAM_DIR=${UPSTREAM_DIR:-""}
# Sync destination
LOCAL_DIR=${LOCAL_DIR:-""}
#declare -A DIST_COMPONENTs
# Optional fetch configuration
FETCH_I18N=${FETCH_I18N:-"yes"} # Fetch translations
FETCH_SOURCES=${FETCH_SOURCES:-"no"} # Fetch packages sources
FETCH_CONTENTS=${FETCH_CONTENTS:-"no"} # Fetch 'Contents' file for distro
FETCH_INSTALLER=${FETCH_INSTALLER:="no"} # Fetch separate 'debian-installer'
FETCH_DIFF=${FETCH_DIFF:-"no"} # Fetch diffs of 'Packages'
FETCH_INDICES=${FETCH_INDICES:-"yes"} # Fetch indices
# Misc
ARCH_ALL_IS_MANDATORY=${ARCH_ALL_IS_MANDATORY:-"no"}
#------------------------------------------------------------------------------#
POSSIBLE_COMPRESSIONS=( gz bz2 xz lzma )
BINROOT=$(dirname `readlink -f "$0"`)
. $BINROOT/util/msgs.sh
. $BINROOT/util/rsync.sh
. $BINROOT/util/dpkg.sh
. $BINROOT/util/checksum.sh
[[ -n "$UPSTREAM" ]] || fatal "UPSTREAM is not defined in config"
[[ -n "$UPSTREAM_DIR" ]] || fatal "UPSTREAM_DIR is not defined in config"
[[ -n "$LOCAL_DIR" ]] || fatal "LOCAL_DIR is not defined in config"
[[ -n "${ARCHs[@]}" ]] || fatal "ARCHs is not defined in config"
[[ -n "${DISTs[@]}" ]] || fatal "DISTs is not defined in config"
#------------------------------------------------------------------------------#
# Checks if 'value' contained within 'array'
# USAGE: contains 'what' 'where'
# $1 -- value to find in array
# $2 -- array to search
contains()
{
local e
for e in "${@:2}"; do [[ "$e" = "$1" ]] && return 0; done
return 1
}
on_SIGINT()
{
fatal "Got user interrupt, aborting"
exit 1
}
#------------------------------------------------------------------------------#
# MAIN()
#------------------------------------------------------------------------------#
# Trap user abort
trap "on_SIGINT" INT
info "Started $0 $*"
debug "Upstream source is: $UPSTREAM::$UPSTREAM_DIR"
debug "Local dir is: $LOCAL_DIR"
debug "Architectures to sync: ${ARCHs[@]}"
debug "Dists to sync: ${DISTs[@]}"
debug "FETCH_I18N: $FETCH_I18N "
debug "FETCH_SOURCES: $FETCH_SOURCES "
debug "FETCH_CONTENTS: $FETCH_CONTENTS "
debug "FETCH_INSTALLER: $FETCH_INSTALLER "
debug "FETCH_DIFF: $FETCH_DIFF "
debug "FETCH_INDICES: $FETCH_INDICES "
debug "ARCH_ALL_IS_MANDATORY: $ARCH_ALL_IS_MANDATORY"
debug "POSSIBLE_COMPRESSIONS: ${POSSIBLE_COMPRESSIONS[@]}"
debug "BINROOT: $BINROOT "
debug "PARTIAL_UPSTREAM: $PARTIAL_UPSTREAM "
debug "PARTIAL_UPSTREAM_PATH: $PARTIAL_UPSTREAM_PATH"
# Create dirs
mkdir -p $LOCAL_DIR/dists
mkdir -p $LOCAL_DIR/pool
# Array of Packages files, that contains package descriptions
packages_pool_files=()
sources_pool_files=()
if rsync_file_exists "."; then
info "Upstream mirror $UPSTREAM supports rsync protocol"
else
fatal "Upstream mirror $UPSTREAM does not support rsync protocol, aborting"
fi
debug_job_start "Checking if upstream mirror update is in progress..."
while rsync_file_exists "Archive-Update-in-Progress*"; do
info "'Archive-Update-in-Progress' file found on upstream mirror. Sleeping for 20 seconds"
sleep 20
done ; debug_job_ok
################################################################################
# Stage 1
# Download metainformation files
################################################################################
for dist in "${DISTs[@]}"; do
info "Fetching dist '$dist' lists"
############################################################################
# Phase 1: Check if we have aliased distro and create necessary symlinks
# aliases is specified after '@'-sign in dist name, separated by commas
# For example: 'wheezy@testing,somealias' means dist 'wheezy' with symlinks
# 'testing' and 'somealias' pointing to it
############################################################################
# TODO: get aliases from Release suite
if [ -n ${dist#*@} ]; then
normal_dist="${dist%%@*}"
for dist_alias in `echo ${dist#*@} | tr ',' ' '`; do
if [[ "$dist_alias" == "$normal_dist" ]]; then
continue
fi
if [ ! -L $LOCAL_DIR/dists/$dist_alias ]; then
debug "Creating dist alias '$dist_alias' -> '$normal_dist'"
ln -s "$normal_dist" "$LOCAL_DIR/dists/$dist_alias" || \
error "Error creating alias for $normal_dist ($dist_alias)"
fi
done
dist="$normal_dist"
unset normal_dist
fi
############################################################################
# Phase 2: Create distribution dir
############################################################################
mkdir -p $LOCAL_DIR/dists/$dist/
############################################################################
# Phase 3: Fetch Release files
# InRelease uses new scheme of inline Release signing
# Old scheme implies separate 'Release' and 'Release.gpg' files
############################################################################
debug "Fetching Release files"
for rel_file in InRelease Release Release.gpg; do
fetch "/dists/$dist/$rel_file" "$LOCAL_DIR/dists/$dist/"
done
release_file="$LOCAL_DIR/dists/$dist/InRelease"
# Check InRelease file
if [ -f "$release_file" ]; then
inrl_valid=$(date -d "`grep Valid-Until /srv/mirror/debian/debian_bg/dists/wheezy-updates/InRelease | awk '{$1=""; print $0}'`" +%s)
now=$(date +%s)
if [[ $(( $now - $inrl_valid )) -gt -86400 ]]; then
info "InRelease file will expire before the next update, removing it..."
rm -f "$release_file"
release_file="$LOCAL_DIR/dists/$dist/Release"
fi
else
release_file="$LOCAL_DIR/dists/$dist/Release"
fi
[ -f "$release_file" ] || fatal "Unable to find release file for dist $dist"
debug "Got Release file '$release_file'"
############################################################################
# Phase 4: check release signature
############################################################################
if [[ "$release_file" =~ ".*InRelease$" ]]; then
gpg --verify "$release_file" || \
fatal "Failed to check signature for $release_file"
elif [[ "$release_file" =~ ".*Release" ]]; then
gpg --verify "${release_file}.gpg" "${release_file}" || \
fatal "Failed to check signature for $release_file"
fi
############################################################################
# Phase 5: Determine which components and arches to download
# Case A: If we have user specified component list, and hasn't found any
# in distro description, then blindly use user given values
# Case B: If we have no user specified component list, try to get them from
# repository Release file, if it fails - bail out
# Case C: If we have both, calculate intersection of them
############################################################################
debug "Calculating arches/components to fetch from dist"
components=`get_dist_components $release_file "${DIST_COMPONENTs[$dist]}"`
arches=`get_dist_architectures $release_file ${ARCHs[@]}`
# Phase 6: Fork components into binary_components
# That allows us to add special sub-components specific to binary components
# such as 'debian-installer'
binary_components="$components"
############################################################################
# Phase 7: Check if we must fetch 'debian-installer' sub-components and add
# them to the binary_components list if needed
############################################################################
if [[ "$FETCH_INSTALLER" = "yes" ]]; then
for component in $components; do
if rsync_file_exists "dists/$dist/$component/debian-installer"; then
debug "Adding debian-installer '$component/debian-installer'"
binary_components="$binary_components $component/debian-installer"
else
error "Not found debian-installer at '$component/debian-installer'"
fi
done
fi
############################################################################
# Phase 8: Fetch binary components 'Packages' indexes and diffs
############################################################################
debug "Will fetch binary components: $binary_components"
for component in $binary_components; do
info "Fetching component '$component' binary package lists"
# Component path
comp_path="dists/$dist/$component"
# Create component dir
mkdir -p "$LOCAL_DIR/$comp_path"
# First, fetch binary packages lists
for arch in $arches; do
arch_path="$comp_path/binary-$arch"
# Check if remote the dir exists
if ! rsync_file_exists "$arch_path"; then
# Missing 'all' architecture in a non critical error
if [[ "$arch" = "all" ]] && [[ "$ARCH_ALL_IS_MANDATORY" != "yes" ]]; then
debug "Missing 'all' architecture in $dist/$component"
continue
fi
fatal "Arch '$arch' in '$dist/$component' doesn't exist"
fi
# Prepare component dir
mkdir -p "$LOCAL_DIR/$arch_path"
to_fetch=()
# List of files that we want to dl
to_fetch+=( "$arch_path/Release" )
to_fetch+=( "$arch_path/Packages" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$arch_path/Packages.$ext" )
done
# Check if we want a Packages.diff files Index too
if [[ "$FETCH_DIFF" = "yes" ]] && \
rsync_file_exists "$arch_path/Packages.diff/Index"; then
to_fetch+=( `rsync_ls "$arch_path/Packages.diff/*"` )
fi
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify all fetched files
for file in ${fetched_files[@]}; do
# Skip checking of diffs, they are mentioned in Index file
# Validate only Index file
if [[ "`dirname $file`" = "$LOCAL_DIR/$arch_path/Packages.diff" ]]; then
[[ "`basename $file`" != "Index" ]] && continue
fi
# Check file by Release file's checksum
debug_job_start "Checking file $file"
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
debug_job_ok
done
# Make sure that we have at least one valid packages list
packages_file=`guess_filename "$LOCAL_DIR/$arch_path/Packages"`
if [[ -z "$packages_file" ]]; then
fatal "Failed to find Packages file at $arch_path"
fi
# Check integrity of .diffs if we got them
diff_index=`guess_filename "$LOCAL_DIR/$arch_path/Packages.diff/Index"`
if [[ "$FETCH_DIFF" = "yes" ]] && [[ -n "$diff_index" ]]; then
diffs=`cat $diff_index | awk '/SHA1-Patches:/,0' | tail -n +2 | awk '{print $3}'`
for diff in $diffs; do
debug_job_start "Checking file $LOCAL_DIR/$arch_path/Packages.diff/$diff"
diff_exp_sha1=`cat $diff_index | awk '/SHA1-Patches:/,0' | grep "$diff" | awk '{print $1}'`
diff_real_sha1=`read_file "$LOCAL_DIR/$arch_path/Packages.diff/$diff" | sha1sum | awk '{print $1}'`
if [[ "$diff_exp_sha1" != "$diff_real_sha1" ]]; then
debug_job_err
error "Checksum failed on file $arch_path/Packages.diff/$diff, removing all diffs"
rm -rf "$LOCAL_DIR/$arch_path/Packages.diff"
break
fi
debug_job_ok
done
fi
# Parse package file and add packages from it to dl list
packages_pool_files+=( "$packages_file" )
done
done
############################################################################
# Phase 9: Fetch additional stuff for components, i18n, sources, 'Contents'
############################################################################
for component in $components; do
comp_path="dists/$dist/$component"
mkdir -p "$LOCAL_DIR/$comp_path"
# Second, the i18n packages
info "Fetching section '$component' i18n"
if [[ "$FETCH_I18N" = "yes" ]]; then
mkdir -p "$LOCAL_DIR/$comp_path/i18n/"
to_fetch=()
to_fetch+=( "$comp_path/i18n/Index" )
for i18n in ${I18Ns[@]}; do
to_fetch+=( "$comp_path/i18n/Translation-$i18n" )
# Translation files may have diffs too
to_fetch+=( "$comp_path/i18n/Translation-$i18n.diff/*" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/i18n/Translation-$i18n.$ext" )
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
# Skip checking of diffs, except it's Index file
if [[ "`dirname $file`" = "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff" ]]; then
[[ "`basename $file`" != "Index" ]] && continue
fi
debug_job_start "Checking file $file"
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
debug_job_ok
done
# Check integrity of .diffs if we got them
diff_index=`guess_filename "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/Index"`
if [[ -n "$diff_index" ]]; then
diffs=`cat $diff_index | awk '/SHA1-Patches:/,0' | tail -n +2 | awk '{print $3}'`
for diff in $diffs; do
debug_job_start "Checking file $LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/$diff"
diff_exp_sha1=`cat $diff_index | awk '/SHA1-Patches:/,0' | grep "$diff" | awk '{print $1}'`
diff_real_sha1=`read_file "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/$diff" | sha1sum | awk '{print $1}'`
if [[ "$diff_exp_sha1" != "$diff_real_sha1" ]]; then
debug_job_err
fatal "Checksum failed on file $comp_path/i18n/Translation-$i18n.diff/$diff"
fi
debug_job_ok
done
fi
done
fi
# Third is the Sources
if [[ "$FETCH_SOURCES" = "yes" ]]; then
info "Fetching component '$component' source package lists"
mkdir -p "$LOCAL_DIR/$comp_path/source/"
to_fetch=()
to_fetch+=( "$comp_path/source/Release" )
to_fetch+=( "$comp_path/source/Sources" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/source/Sources.$ext" )
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
done
sources_file=`guess_filename "$LOCAL_DIR/$comp_path/source/Sources"`
if [[ -z "$sources_file" ]]; then
fatal "Failed to find Sources file at $LOCAL_DIR/$comp_path/source"
fi
# Parse sources file and add packages from it to dl list
sources_pool_files+=( "$sources_file" )
fi
# Fetch the component contents packs
if [[ "$FETCH_CONTENTS" = "yes" ]]; then
info "Fetching component '$component' content lists"
to_fetch=()
for arch in $arches; do
to_fetch+=( "$comp_path/Contents-$arch" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/Contents-$arch.$ext" )
done
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
done
# If our component is "main", make link in the root of distribution
if [[ "$component" = "main" ]]; then
for arch in $arches; do
if [[ -e "$LOCAL_DIR/dists/$dist/$component/Contents-$arch.gz" ]]; then
debug "Creating link to main/Contents-$arch.gz at $LOCAL_DIR/dists/$dist"
ln -sf main/Contents-$arch.gz $LOCAL_DIR/dists/$dist/Contents-$arch.gz
else
debug "Deleting link to main/Contents-$arch.gz at $LOCAL_DIR/dists/$dist"
rm -f "$LOCAL_DIR/dists/$dist/Contents-$arch.gz"
fi
done
fi
fi
done
done
# Get the indices
if [[ "$FETCH_INDICES" = "yes" ]]; then
info "Fetching indices"
mkdir -p "$LOCAL_DIR/indices/"
for dist in "${DISTs[@]}"; do
fetch "/indices/override.$dist.*" "$LOCAL_DIR/indices/"
done
fi
################################################################################
# Stage 2
# Download pool of packages
################################################################################
info "Parsing package and sources files:"
info "${packages_pool_files[@]}"
info "${sources_pool_files[@]}"
files_to_dl_list=`mktemp --suffix="-deb-mirror"`
# File that contains md5sums of deb pkgs
deb_md5=`mktemp --suffix="-deb-mirror-md5"`
rsync_log=`mktemp --suffix="-deb-mirror-rslog"`
sort_temp=`mktemp --suffix="-deb-mirror-sort"`
$BINROOT/util/parsePackages.py ${packages_pool_files[@]} > "$files_to_dl_list" \
2> "$deb_md5" && \
$BINROOT/util/parseSources.py ${sources_pool_files[@]} >> "$files_to_dl_list" || \
fatal "Unable to create list of packages to fetch"
sort -u "$files_to_dl_list" > "$sort_temp" ; mv "$sort_temp" "$files_to_dl_list"
sort -u -k 3,3 "$deb_md5" > "$sort_temp" ; mv "$sort_temp" "$deb_md5"
# If partial mirroring is enabled, get the list of partial mirror packages
if [[ "$PARTIAL_UPSTREAM" = "1" ]]; then
info "Resolving dependencies for partial mirror"
# Detect kernel version of debian-installer
fuel_config=$BINROOT/config/config.mk
export UBUNTU_RELEASE=`grep -w ^UBUNTU_RELEASE $fuel_config | awk -F '[:=?]' '{print $NF}'`
export UBUNTU_NETBOOT_FLAVOR=`grep -w ^UBUNTU_NETBOOT_FLAVOR $fuel_config | awk -F '[:=?]' '{print $NF}'`
export UBUNTU_KERNEL_FLAVOR=`grep -w ^UBUNTU_KERNEL_FLAVOR $fuel_config | awk -F '[:=?]' '{print $NF}'`
export UBUNTU_ARCH=`grep -w ^UBUNTU_ARCH $fuel_config | awk -F '[:=?]' '{print $NF}'`
INITRD_DIR="/dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/"
mkdir -p "$LOCAL_DIR/$INITRD_DIR"
fetch "/$INITRD_DIR/initrd.gz" "$LOCAL_DIR/$INITRD_DIR"
export UBUNTU_INSTALLER_KERNEL_VERSION=`zcat "$LOCAL_DIR/$INITRD_DIR/initrd.gz" | cpio --list 'lib/modules/*/kernel' 2>/dev/null | cut -d"/" -f 3`
debug "Detected debian-installer kernel version: "$UBUNTU_INSTALLER_KERNEL_VERSION
# Generate list of MOS dependencies
export apt_altstate=`mktemp -d --suffix="-apt-altstate"`
export BINROOT
export FUEL_VERSION
if [[ "$DOCKER_MODE" = "true" ]]; then
( docker ps -a | grep fuel-createmirror ) && docker rm -f fuel-createmirror
# docker pull ubuntu:latest
docker -D run -d --name=fuel-createmirror --net=host -a stdout -a stderr -t \
-e UBUNTU_RELEASE=$UBUNTU_RELEASE -e UBUNTU_NETBOOT_FLAVOR=$UBUNTU_NETBOOT_FLAVOR \
-e UBUNTU_INSTALLER_KERNEL_VERSION=$UBUNTU_INSTALLER_KERNEL_VERSION -e UBUNTU_KERNEL_FLAVOR=$UBUNTU_KERNEL_FLAVOR \
-e RSYNC_PROXY=$RSYNC_PROXY -e FUEL_VERSION=$FUEL_VERSION -e http_proxy=$http_proxy \
-e UBUNTU_ARCH=$UBUNTU_ARCH -e BINROOT=$BINROOT \
-e apt_altstate=$apt_altstate -v $BINROOT:$BINROOT:rw -v $apt_altstate:$apt_altstate:rw ubuntu:latest
dockerctl shell fuel-createmirror $BINROOT/util/partial_ubuntu.sh || fatal "Cannot calculate list of dependencies"
# cleanup ubuntu container
docker rm -f fuel-createmirror
else
$BINROOT/util/partial_ubuntu.sh || fatal "Cannot calculate list of dependencies"
fi
# Create download lists for deb and udeb
awk 'FNR==NR {arr[$0];next} $3 in arr' $apt_altstate/deb "$deb_md5" > $apt_altstate/deb_md5
grep "\.udeb$" "$files_to_dl_list" | egrep -v "generic|virtual" > $apt_altstate/udeb_nonkernel
grep "\.udeb$" "$files_to_dl_list" | egrep "generic|virtual" | grep $UBUNTU_INSTALLER_KERNEL_VERSION > $apt_altstate/udeb_kernel
cat $apt_altstate/udeb_nonkernel $apt_altstate/udeb_kernel | sort -u > $apt_altstate/udeb
awk 'FNR==NR {arr[$0];next} $3 in arr' $apt_altstate/udeb "$deb_md5" > $apt_altstate/udeb_md5
cat $apt_altstate/netboot.list $apt_altstate/udeb $apt_altstate/deb > "$files_to_dl_list"
cat $apt_altstate/netboot_md5.list $apt_altstate/udeb_md5 $apt_altstate/deb_md5 > "$deb_md5"
rm -rf "$apt_altstate"
fi # "$PARTIAL_UPSTREAM" = "1"
info "Downloading pool files"
rsync --verbose --out-format="%i %n" --stats \
--recursive --perms --copy-links --times --hard-links --sparse --safe-links \
--exclude=".tmp/" --exclude=".temp/" --exclude=".~tmp~/" \
--files-from="$files_to_dl_list" \
--bwlimit=5192 \
"${UPSTREAM}::${UPSTREAM_DIR}/" "$LOCAL_DIR" | tee "$rsync_log"
# fix directory permissions for pool files
find "$LOCAL_DIR" -type d -exec chmod 755 {} \;
# --files-from="$files_to_dl_list" \--block-size=8192
#--max-delete=40000 --delay-updates --delete --delete-after \
# Check if rsync was ok
if [[ $? != 0 ]]; then
rm "$files_to_dl_list"
fatal "Failed to sync all package files, see log for details"
#error "Failed to sync all package files, see log for details"
else
info "Primary sync successfully completed"
fi
# Let's check new file MD5sums
fresh_files=`egrep "^>f......... .*" "$rsync_log" | awk '{print $2}'`
for fresh_file in $fresh_files; do
check_file "$deb_md5" "$LOCAL_DIR" "$fresh_file"
if [[ $? != 0 ]]; then
rm "$deb_md5"
rm "$rsync_log"
fatal "MD5sum check failed for file $LOCAL_DIR/$fresh_file"
fi
done
rm "$deb_md5"
rm "$rsync_log"
# Now iterate through all downloaded files and check if any of them are symlink
# download neccessary files if needed
# Yeah, some times section can contain a metainfo for symlink to file in
# diffirent section that is no longer exists in there, so it will be wiped as
# unused
wayback="`pwd`"
cd "$LOCAL_DIR/"
pool_current_files=`mktemp --suffix d-m_got`
pool_required_files=`mktemp --suffix d-m_req`
# Create lists of files that we got and that we need
find pool -type f -or -type l | sort -u > $pool_current_files
cat $files_to_dl_list | grep "^pool" | sort -u > $pool_required_files
cd "$wayback"
info "Cleaning up pool files"
# Clean obsolete files
obsolete_files=`comm -3 -2 "$pool_current_files" "$pool_required_files"`
for file in $obsolete_files; do
debug_job_start "Deleting '$LOCAL_DIR/$file'"
rm "$LOCAL_DIR/$file" && debug_job_ok || debug_job_err
done
info "Doublechecking that required pool files exists"
missing_files=`comm -3 -1 "$pool_current_files" "$pool_required_files"`
if [[ -n "$missing_files" ]]; then
error "Some files are missing after sync!!!:"
error "$missing_files"
fatal "Aborting due to missing files"
fi
rm "$files_to_dl_list"
rm "$pool_required_files"
rm "$pool_current_files"
# Timestamp
echo "Updated at: `date`" > $LOCAL_DIR/.lastupdate
# If partial mirroring is enabled, get the list of partial mirror packages
if [[ "$PARTIAL_UPSTREAM" = "1" ]]; then
# netboot images URI used by Nailgun differs from the one used in script
# see https://bugs.launchpad.net/bugs/1461927 for details
PARTIAL_INITRD_DIR="/dists/${UBUNTU_RELEASE}/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/"
# Prepare directory structure for partial repository
info "Generating partial mirror"
mkdir -p ${PARTIAL_UPSTREAM_PATH}/pool/debian-installer
mkdir -p ${PARTIAL_UPSTREAM_PATH}/pool/main
mkdir -p ${PARTIAL_UPSTREAM_PATH}/indices
mkdir -p ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/main/binary-amd64
mkdir -p ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/main/debian-installer/binary-amd64
mkdir -p ${PARTIAL_UPSTREAM_PATH}/${PARTIAL_INITRD_DIR}
temp_dir=`mktemp -d --suffix="-reposync"`
find $LOCAL_DIR/pool/ -name *.deb -type f -exec cp -vuni '{}' ${temp_dir} ";"
rsync -a --delete ${temp_dir}/ ${PARTIAL_UPSTREAM_PATH}/pool/main
rm -f ${temp_dir}/*
find ${LOCAL_DIR}/pool/ -name *.udeb -type f -exec cp -vuni '{}' ${temp_dir} ";"
rsync -a --delete ${temp_dir}/ ${PARTIAL_UPSTREAM_PATH}/pool/debian-installer
rm -rf ${temp_dir}
rsync -a --delete ${LOCAL_DIR}/${INITRD_DIR}/ ${PARTIAL_UPSTREAM_PATH}/${PARTIAL_INITRD_DIR}
find ${PARTIAL_UPSTREAM_PATH} -type d -print0 | xargs -0 chmod 755
# Generate "indices" folder
cat $LOCAL_DIR/indices/*extra* | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.extra.main
cat $LOCAL_DIR/indices/*.debian-installer | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.main.debian-installer
pushd $LOCAL_DIR/indices/
ls --ignore="*extra*" --ignore="*src" --ignore="*debian-installer" --quoting-style=shell | xargs cat | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.main
popd
# Generate Release file
cat <<EOF > ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/Release
Architectures: amd64
Codename: ${UBUNTU_RELEASE}
Components: main
Date: `date`
Description: Ubuntu ${UBUNTU_RELEASE} partial mirror
Label: Ubuntu
Origin: Ubuntu
Suite: ${UBUNTU_RELEASE}
EOF
# Build partial mirror
info "Generating metadata for partial mirror"
info "Applying fix for upstream dpkg-scanpackages"
patch -N /usr/bin/dpkg-scanpackages < $BINROOT/util/dpkg.patch
export BINROOT
$BINROOT/util/regenerate_ubuntu_repo ${PARTIAL_UPSTREAM_PATH} ${UBUNTU_RELEASE} || fatal "Failed to generate partial mirror"
rm -rf $LOCAL_DIR
fi # "$PARTIAL_UPSTREAM" = "1"
info "Done"