[Bps-public-commit] postgresql_auto_backup_s3 branch update-for-hostedrt-backups updated. a766818ca129249d31fa95a12b975b0dbf697bea

BPS Git Server git at git.bestpractical.com
Tue Mar 15 00:18:48 UTC 2022


This is an automated email from the git hooks/post-receive script. It was
generated because a ref change was pushed to the repository containing
the project "postgresql_auto_backup_s3".

The branch, update-for-hostedrt-backups has been updated
       via  a766818ca129249d31fa95a12b975b0dbf697bea (commit)
      from  f9a2f7dfc2a264b6ba12541adeb39e56cb808c21 (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.

- Log -----------------------------------------------------------------
commit a766818ca129249d31fa95a12b975b0dbf697bea
Author: Blaine Motsinger <blaine at bestpractical.com>
Date:   Mon Mar 14 19:07:43 2022 -0500

    Update s3cmd commands to aws-cli
    
    aws-cli is the recommended tool from AWS and supports input from
    STDIN for s3 cp commands.
    
    This commit also updates the backup rotation logic which was not
    completed in the original version.

diff --git a/pg_backup_rotated.sh b/pg_backup_rotated.sh
index f0f2421..506a630 100644
--- a/pg_backup_rotated.sh
+++ b/pg_backup_rotated.sh
@@ -82,10 +82,10 @@ function perform_backups()
         do
                 echo "$DATABASE"
 
-                if ! ( pg_dump -Fp -h "$HOSTNAME" -U "$USERNAME" "$DATABASE" --table=sessions --schema-only; pg_dump -Fp -h "$HOSTNAME" -U "$USERNAME" "$DATABASE" --exclude-table=sessions ) | gzip --stdout | s3cmd --reduced-redundancy put - $FINAL_BACKUP_DIR"$DATABASE".sql.gz.in_progress --no-encrypt; then
+                if ! ( pg_dump -Fp -h "$HOSTNAME" -U "$USERNAME" "$DATABASE" --table=sessions --schema-only; pg_dump -Fp -h "$HOSTNAME" -U "$USERNAME" "$DATABASE" --exclude-table=sessions ) | gzip --stdout | aws s3 cp - $FINAL_BACKUP_DIR"$DATABASE".sql.gz.in_progress; then
                         echo "[!!ERROR!!] Failed to produce backup of database $DATABASE" 1>&2
                 else
-                        s3cmd mv $FINAL_BACKUP_DIR"$DATABASE".sql.gz.in_progress $FINAL_BACKUP_DIR"$DATABASE".sql.gz
+                        aws s3 mv $FINAL_BACKUP_DIR"$DATABASE".sql.gz.in_progress $FINAL_BACKUP_DIR"$DATABASE".sql.gz
                 fi
  
         done
@@ -100,7 +100,9 @@ DAY_OF_MONTH=`date +%d`
 if [[ "$DAY_OF_MONTH" -eq "1" ]];
 then
         # Delete all expired monthly directories
-        find $BACKUP_DIR -maxdepth 1 -name "*-monthly" -exec rm -rf '{}' ';'
+        for backup in $(aws s3 ls $BACKUP_DIR | awk '{print $4}' | grep '\-monthly'); do
+                aws s3 rm $BACKUP_DIR"$backup"
+        done
  
         perform_backups "-monthly"
  
@@ -110,13 +112,19 @@ fi
 # WEEKLY BACKUPS
  
 DAY_OF_WEEK=`date +%u` #1-7 (Monday-Sunday)
-EXPIRED_DAYS=`expr $((($WEEKS_TO_KEEP * 7) + 1))`
  
 if [[ "$DAY_OF_WEEK" -eq "$DAY_OF_WEEK_TO_KEEP" ]];
 then
-        # Delete all expired weekly directories
-        find $BACKUP_DIR -maxdepth 1 -mtime +$EXPIRED_DAYS -name "*-weekly" -exec rm -rf '{}' ';'
- 
+        weekly_deldate=`date +"%Y%m%d" --date="$WEEKS_TO_KEEP days ago"`
+
+        for backup in $(aws s3 ls $BACKUP_DIR | awk '{print $4}' | grep '\-weekly'); do
+                weekly_backup_date=${backup:0:8}
+
+                if [[ "$weekly_backup_date" -le "$weekly_deldate" ]]; then
+                        aws s3 rm $BACKUP_DIR"$backup"
+                fi
+        done
+
         perform_backups "-weekly"
  
         exit 0;
@@ -124,16 +132,14 @@ fi
  
 # DAILY BACKUPS
  
-# Delete daily backups $DAYS_TO_KEEP days old or more
-#find $BACKUP_DIR -maxdepth 1 -mtime +$DAYS_TO_KEEP -name "*-daily" -exec rm -rf '{}' ';'
+daily_deldate=`date +"%Y%m%d" --date="$DAYS_TO_KEEP days ago"`
 
-#set $deldate to date minus $DAYS_TO_KEEP
-deldate=`date +"%Y%m%d" --date="$DAYS_TO_KEEP days ago"`
+for backup in $(aws s3 ls $BACKUP_DIR | awk '{print $4}' | grep '\-daily'); do
+        backup_date=${backup:0:8}
 
-#remove leading bucket name from s3 object displayed by "s3cmd ls $bucketname"
-#compare first 8 chars of s3 object name (minus bucket name) with $deldate, 
-#if first 8 chars of s3 object name::int <= $deldate then delete those s3 objects
-s3cmd ls $BACKUP_DIR | awk -v bucket="$BACKUP_DIR" -v deldate="$deldate" '{gsub(bucket,"",$4);if (substr($4,0,9)<=deldate) system("s3cmd info " bucket$4)}' 
-#replace s3cmd info by s3cmd del
+        if [[ "$weekly_backup_date" -le "$daily_deldate" ]]; then
+                aws s3 rm $BACKUP_DIR"$backup"
+        fi
+done
 
 perform_backups "-daily"
-----------------------------------------------------------------------

Summary of changes:
 pg_backup_rotated.sh | 38 ++++++++++++++++++++++----------------
 1 file changed, 22 insertions(+), 16 deletions(-)


hooks/post-receive
-- 
postgresql_auto_backup_s3


More information about the Bps-public-commit mailing list