diff --git a/helm-toolkit/Chart.yaml b/helm-toolkit/Chart.yaml index 6c71208e9..6393d6e00 100644 --- a/helm-toolkit/Chart.yaml +++ b/helm-toolkit/Chart.yaml @@ -15,7 +15,7 @@ apiVersion: v1 appVersion: v1.0.0 description: OpenStack-Helm Helm-Toolkit name: helm-toolkit -version: 0.2.40 +version: 0.2.41 home: https://docs.openstack.org/openstack-helm icon: https://www.openstack.org/themes/openstack/images/project-mascots/OpenStack-Helm/OpenStack_Project_OpenStackHelm_vertical.png sources: diff --git a/helm-toolkit/templates/scripts/db-backup-restore/_backup_main.sh.tpl b/helm-toolkit/templates/scripts/db-backup-restore/_backup_main.sh.tpl index db1291566..d3fe4fdee 100755 --- a/helm-toolkit/templates/scripts/db-backup-restore/_backup_main.sh.tpl +++ b/helm-toolkit/templates/scripts/db-backup-restore/_backup_main.sh.tpl @@ -253,6 +253,16 @@ store_backup_remotely() { return 1 } + +function get_archive_date(){ +# get_archive_date function returns correct archive date +# for different formats of archives' names +# the old one: ....tar.gz +# the new one: ..
...tar.gz + local A_FILE="$1" + awk -F. '{print $(NF-2)}' <<< ${A_FILE} | tr -d "Z" +} + # This function takes a list of archives' names as an input # and creates a hash table where keys are number of seconds # between current date and archive date (see seconds_difference), @@ -271,21 +281,6 @@ store_backup_remotely() { # possible case, when we have several backups of the same date. E.g. # one manual, and one automatic. -function get_archive_date(){ -# get_archive_date function returns correct archive date -# for different formats of archives' names -# the old one: ..
..tar.gz -# the new one: ..
...tar.gz -local A_FILE="$1" -local A_DATE="" -if [[ -z ${BACK_UP_MODE} ]]; then - A_DATE=$( awk -F/ '{print $NF}' <<< ${ARCHIVE_FILE} | cut -d'.' -f 4 | tr -d "Z") -else - A_DATE=$( awk -F/ '{print $NF}' <<< ${ARCHIVE_FILE} | cut -d'.' -f 5 | tr -d "Z") -fi -echo ${A_DATE} -} - declare -A fileTable create_hash_table() { unset fileTable @@ -328,33 +323,6 @@ function get_backup_prefix() { done } -remove_old_local_archives() { - if [[ -d $ARCHIVE_DIR ]]; then - count=0 - SECONDS_TO_KEEP=$((${LOCAL_DAYS_TO_KEEP}*86400)) - log INFO "${DB_NAME}_backup" "Deleting backups older than ${LOCAL_DAYS_TO_KEEP} days" - # We iterate over the hash table, checking the delta in seconds (hash keys), - # and minimum number of backups we must have in place. List of keys has to be sorted. - for INDEX in $(tr " " "\n" <<< ${!FILETABLE[@]} | sort -n -); do - ARCHIVE_FILE=${FILETABLE[${INDEX}]} - if [[ ${INDEX} -le ${SECONDS_TO_KEEP} || ${count} -lt ${LOCAL_DAYS_TO_KEEP} ]]; then - ((count++)) - log INFO "${DB_NAME}_backup" "Keeping file(s) ${ARCHIVE_FILE}." - else - log INFO "${DB_NAME}_backup" "Deleting file(s) ${ARCHIVE_FILE}." - rm -rf $ARCHIVE_FILE - if [[ $? -ne 0 ]]; then - # Log error but don't exit so we can finish the script - # because at this point we haven't sent backup to RGW yet - log ERROR "${DB_NAME}_backup" "Failed to cleanup local backup. Cannot remove some of ${ARCHIVE_FILE}" - fi - fi - done - else - log WARN "${DB_NAME}_backup" "The local backup directory ${$ARCHIVE_DIR} does not exist." - fi -} - remove_old_local_archives() { SECONDS_TO_KEEP=$(( $((${LOCAL_DAYS_TO_KEEP}))*86400)) log INFO "${DB_NAME}_backup" "Deleting backups older than ${LOCAL_DAYS_TO_KEEP} days (${SECONDS_TO_KEEP} seconds)" @@ -414,10 +382,12 @@ remove_old_remote_archives() { # Cleanup now that we're done. for fd in ${BACKUP_FILES} ${DB_BACKUP_FILES}; do - if [[ -f fd ]]; then - rm -f fd - else - log WARN "${DB_NAME}_backup" "Can not delete a temporary file ${fd}" + if [[ -f fd ]]; then + rm -f fd + else + log WARN "${DB_NAME}_backup" "Can not delete a temporary file ${fd}" + fi + done } # Main function to backup the databases. Calling functions need to supply: @@ -517,8 +487,12 @@ backup_databases() { #Only delete the old archive after a successful archive if [[ "$REMOTE_DAYS_TO_KEEP" -gt 0 ]]; then prepare_list_of_remote_backups - create_hash_table $(cat $DB_BACKUP_FILES) - remove_old_remote_archives + get_backup_prefix $(cat $DB_BACKUP_FILES) + for ((i=0; i<${#PREFIXES[@]}; i++)); do + echo "Working with prefix: ${PREFIXES[i]}" + create_hash_table $(cat $DB_BACKUP_FILES | grep ${PREFIXES[i]}) + remove_old_remote_archives + done fi echo "==================================================================" diff --git a/releasenotes/notes/helm-toolkit.yaml b/releasenotes/notes/helm-toolkit.yaml index e38016fe1..322ecc01a 100644 --- a/releasenotes/notes/helm-toolkit.yaml +++ b/releasenotes/notes/helm-toolkit.yaml @@ -47,4 +47,5 @@ helm-toolkit: - 0.2.38 Minor change to display archive directory with files in sub-directory - 0.2.39 Removed tillerVersion from Chart to pass helm3 linting - 0.2.40 Revert chart naming for subchart compatibility + - 0.2.41 Database B/R - archive name parser added ...