[DATABASE] Fixes archive name parsing issues
Added a parser for archive names to cover the situation when an archive name could be represented in two different formats 1) <database name>.<namespace>.<table name | all>.<date-time>.tar.gz 2) <database name>.<namespace>.<table name | all>.<backup mode>.<date-time>.tar.gz The first format is what is using at the moment, the second format is recommended for future use. Change-Id: I6b631b3b938c0a0242c5a8870284995b2cd8f27b
This commit is contained in:
parent
b5c285ae98
commit
a50d3da394
@ -15,7 +15,7 @@ apiVersion: v1
|
||||
appVersion: v1.0.0
|
||||
description: OpenStack-Helm Helm-Toolkit
|
||||
name: helm-toolkit
|
||||
version: 0.2.40
|
||||
version: 0.2.41
|
||||
home: https://docs.openstack.org/openstack-helm
|
||||
icon: https://www.openstack.org/themes/openstack/images/project-mascots/OpenStack-Helm/OpenStack_Project_OpenStackHelm_vertical.png
|
||||
sources:
|
||||
|
@ -253,6 +253,16 @@ store_backup_remotely() {
|
||||
return 1
|
||||
}
|
||||
|
||||
|
||||
function get_archive_date(){
|
||||
# get_archive_date function returns correct archive date
|
||||
# for different formats of archives' names
|
||||
# the old one: <database name>.<namespace>.<table name | all>.<date-time>.tar.gz
|
||||
# the new one: <database name>.<namespace>.<table name | all>.<backup mode>.<date-time>.tar.gz
|
||||
local A_FILE="$1"
|
||||
awk -F. '{print $(NF-2)}' <<< ${A_FILE} | tr -d "Z"
|
||||
}
|
||||
|
||||
# This function takes a list of archives' names as an input
|
||||
# and creates a hash table where keys are number of seconds
|
||||
# between current date and archive date (see seconds_difference),
|
||||
@ -271,21 +281,6 @@ store_backup_remotely() {
|
||||
# possible case, when we have several backups of the same date. E.g.
|
||||
# one manual, and one automatic.
|
||||
|
||||
function get_archive_date(){
|
||||
# get_archive_date function returns correct archive date
|
||||
# for different formats of archives' names
|
||||
# the old one: <database name>.<namespace>.<table name | all>.<date-time>.tar.gz
|
||||
# the new one: <database name>.<namespace>.<table name | all>.<backup mode>.<date-time>.tar.gz
|
||||
local A_FILE="$1"
|
||||
local A_DATE=""
|
||||
if [[ -z ${BACK_UP_MODE} ]]; then
|
||||
A_DATE=$( awk -F/ '{print $NF}' <<< ${ARCHIVE_FILE} | cut -d'.' -f 4 | tr -d "Z")
|
||||
else
|
||||
A_DATE=$( awk -F/ '{print $NF}' <<< ${ARCHIVE_FILE} | cut -d'.' -f 5 | tr -d "Z")
|
||||
fi
|
||||
echo ${A_DATE}
|
||||
}
|
||||
|
||||
declare -A fileTable
|
||||
create_hash_table() {
|
||||
unset fileTable
|
||||
@ -328,33 +323,6 @@ function get_backup_prefix() {
|
||||
done
|
||||
}
|
||||
|
||||
remove_old_local_archives() {
|
||||
if [[ -d $ARCHIVE_DIR ]]; then
|
||||
count=0
|
||||
SECONDS_TO_KEEP=$((${LOCAL_DAYS_TO_KEEP}*86400))
|
||||
log INFO "${DB_NAME}_backup" "Deleting backups older than ${LOCAL_DAYS_TO_KEEP} days"
|
||||
# We iterate over the hash table, checking the delta in seconds (hash keys),
|
||||
# and minimum number of backups we must have in place. List of keys has to be sorted.
|
||||
for INDEX in $(tr " " "\n" <<< ${!FILETABLE[@]} | sort -n -); do
|
||||
ARCHIVE_FILE=${FILETABLE[${INDEX}]}
|
||||
if [[ ${INDEX} -le ${SECONDS_TO_KEEP} || ${count} -lt ${LOCAL_DAYS_TO_KEEP} ]]; then
|
||||
((count++))
|
||||
log INFO "${DB_NAME}_backup" "Keeping file(s) ${ARCHIVE_FILE}."
|
||||
else
|
||||
log INFO "${DB_NAME}_backup" "Deleting file(s) ${ARCHIVE_FILE}."
|
||||
rm -rf $ARCHIVE_FILE
|
||||
if [[ $? -ne 0 ]]; then
|
||||
# Log error but don't exit so we can finish the script
|
||||
# because at this point we haven't sent backup to RGW yet
|
||||
log ERROR "${DB_NAME}_backup" "Failed to cleanup local backup. Cannot remove some of ${ARCHIVE_FILE}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
else
|
||||
log WARN "${DB_NAME}_backup" "The local backup directory ${$ARCHIVE_DIR} does not exist."
|
||||
fi
|
||||
}
|
||||
|
||||
remove_old_local_archives() {
|
||||
SECONDS_TO_KEEP=$(( $((${LOCAL_DAYS_TO_KEEP}))*86400))
|
||||
log INFO "${DB_NAME}_backup" "Deleting backups older than ${LOCAL_DAYS_TO_KEEP} days (${SECONDS_TO_KEEP} seconds)"
|
||||
@ -414,10 +382,12 @@ remove_old_remote_archives() {
|
||||
|
||||
# Cleanup now that we're done.
|
||||
for fd in ${BACKUP_FILES} ${DB_BACKUP_FILES}; do
|
||||
if [[ -f fd ]]; then
|
||||
rm -f fd
|
||||
else
|
||||
log WARN "${DB_NAME}_backup" "Can not delete a temporary file ${fd}"
|
||||
if [[ -f fd ]]; then
|
||||
rm -f fd
|
||||
else
|
||||
log WARN "${DB_NAME}_backup" "Can not delete a temporary file ${fd}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Main function to backup the databases. Calling functions need to supply:
|
||||
@ -517,8 +487,12 @@ backup_databases() {
|
||||
#Only delete the old archive after a successful archive
|
||||
if [[ "$REMOTE_DAYS_TO_KEEP" -gt 0 ]]; then
|
||||
prepare_list_of_remote_backups
|
||||
create_hash_table $(cat $DB_BACKUP_FILES)
|
||||
remove_old_remote_archives
|
||||
get_backup_prefix $(cat $DB_BACKUP_FILES)
|
||||
for ((i=0; i<${#PREFIXES[@]}; i++)); do
|
||||
echo "Working with prefix: ${PREFIXES[i]}"
|
||||
create_hash_table $(cat $DB_BACKUP_FILES | grep ${PREFIXES[i]})
|
||||
remove_old_remote_archives
|
||||
done
|
||||
fi
|
||||
|
||||
echo "=================================================================="
|
||||
|
@ -47,4 +47,5 @@ helm-toolkit:
|
||||
- 0.2.38 Minor change to display archive directory with files in sub-directory
|
||||
- 0.2.39 Removed tillerVersion from Chart to pass helm3 linting
|
||||
- 0.2.40 Revert chart naming for subchart compatibility
|
||||
- 0.2.41 Database B/R - archive name parser added
|
||||
...
|
||||
|
Loading…
Reference in New Issue
Block a user