From: mwolson_admin Date: Wed, 25 Jun 2008 07:11:13 +0000 (-0400) Subject: More backup script revamp. X-Git-Url: http://git.hcoop.net/clinton/scripts.git/commitdiff_plain/37839f02d2abe6c5639730ce59e4f16f9508695c?hp=6cc4523aaf00b4b630dc6bed91a7c1f5098b1148 More backup script revamp. - Split common s3-specific functions into s3.comon. - Make new simple script s3-move to move a single file over to S3. - Replace uses of copy_over with $MOVE_OVER. - Replace one use of catsync in an xargs command to use $MOVE_OVER instead. --- diff --git a/hcoop-backup b/hcoop-backup index 10bdc9a..0bfb73e 100755 --- a/hcoop-backup +++ b/hcoop-backup @@ -15,67 +15,12 @@ PATH=$PATH:/bin:/usr/bin:/sbin:/usr/sbin #COMPRESS_PROG=bzip2 COMPRESS_EXT=.gz COMPRESS_PROG=gzip -# units for BWLIMIT are KB/s -BWLIMIT=325 -# units for CHUNKSIZE are MB -CHUNKSIZE=5000 - -IFS=$'\n' - -SCRIPTDIR=$(dirname $0) KEYFILE=/etc/backup-encryption-key -BUCKET=hcoop.net-backups -BACKUPDIR=full BACKUPTMP=/var/backups/hcoop-backup-testing -SUBDIR=$(date +%Y.%m.%d) - -export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key) -export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key - -function s3_cmd () { - # $1: command (get|put|ls|rm) - # $2: remote file - # $3: local file - local cmd=$1 - shift - local bwarg - if test "$cmd" = "put"; then - bwarg="${BWLIMIT}K"; - else - bwarg= - fi - $SCRIPTDIR/s3 $cmd $BUCKET "$1" "$2" $bwarg -} - -function copy_over () { - # Move file to its offsite destination - # $1: file, $2: relative directory (optional) - if test -z "$1" || test -n "$3"; then - echo "Bad programming" - exit 1 - fi - local FILE=$1 - local DEST=$BACKUPDIR/$SUBDIR - if test -n "$2"; then - DEST=$DEST/$2 - fi - split -d -b ${CHUNKSIZE}m $FILE ${FILE}. - for i in ${FILE}.*; do - s3_cmd put $DEST/$i $i - rm -f $i - done - rm -f $FILE -} - -function prune_old_backups () { - local oldpwd=$PWD - cd $BACKUPDIR - find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \ - -execdir rm -fr '{}' \; || true - rm -rf $SUBDIR - mkdir -p $SUBDIR - cd $oldpwd -} + +MOVE_OVER=$(dirname $0)/s3-move + +IFS=$'\n' #prune_old_backups @@ -160,7 +105,7 @@ F=hcoop.backup.tar$COMPRESS_EXT.aescrypt tar clpf - --ignore-failed-read --no-recursion -C / -T backupfiles | \ $COMPRESS_PROG | \ ccrypt -k $KEYFILE -e > $F -copy_over $F +$MOVE_OVER $F # Acquire lock before messing with spamd COUNT=0 @@ -183,10 +128,7 @@ tar clpf - --ignore-failed-read -C / /var/local/lib/spamd | \ $COMPRESS_PROG | \ ccrypt -k $KEYFILE -e > $F.new rm -f $LOCK -copy_over $F.new .. - -test -s $BACKUPDIR/$F.new && \ - mv $BACKUPDIR/$F.new $BACKUPDIR/$F +$MOVE_OVER $F vos listvol deleuze | \ tail -n +2 | \ @@ -206,15 +148,14 @@ cat volumes | \ "F={}.dump$COMPRESS_EXT.aescrypt ; vos dump -id {} -localauth -clone | $COMPRESS_PROG | ccrypt -k $KEYFILE -e > \$F ; - < \$F catsync -b $BWLIMIT $BACKUPDIR/$SUBDIR/\$F ; - rm -f \$F" + $MOVE_OVER \$F" echo backing up databases F=databases.tar$COMPRESS_EXT.aescrypt tar -C /var/backups/databases/ -cf - . | \ $COMPRESS_PROG | \ ccrypt -k $KEYFILE -e > $F -copy_over $F +$MOVE_OVER $F grep '[a-z/]' complain && \ mail -a 'From: The Backup Program ' \ diff --git a/s3-move b/s3-move new file mode 100755 index 0000000..c8e42ae --- /dev/null +++ b/s3-move @@ -0,0 +1,5 @@ +#!/bin/bash -e + +. $(dirname $0)/s3.common + +move_over $@ diff --git a/s3.common b/s3.common new file mode 100644 index 0000000..51d3637 --- /dev/null +++ b/s3.common @@ -0,0 +1,63 @@ +# Common functions for dealing with Amazon S3. + +# units for BWLIMIT are KB/s +BWLIMIT=325 +# units for CHUNKSIZE are MB +CHUNKSIZE=5000 + +BUCKET=hcoop.net-backups +BACKUPDIR=full +SUBDIR=$(date +%Y.%m.%d) +S3CMD=$(dirname $0)/s3 + +IFS=$'\n' + +export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key) +export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key + +function s3_cmd () { + # $1: command (get|put|ls|rm) + # $2: remote file + # $3: local file + local cmd=$1 + shift + local bwarg + if test "$cmd" = "put"; then + bwarg="${BWLIMIT}K"; + else + bwarg= + fi + $S3CMD $cmd $BUCKET "$1" "$2" $bwarg +} + +function move_over () { + # Move file to its offsite destination + # $1: file, $2: relative directory (optional) + if test -z "$1" || test -n "$3"; then + echo "Bad programming" + exit 1 + fi + local FILE=$1 + local DEST=$BACKUPDIR/$SUBDIR + if test -n "$2"; then + DEST=$DEST/$2 + fi + split -d -b ${CHUNKSIZE}m $FILE ${FILE}. + for i in ${FILE}.*; do + s3_cmd put $DEST/$i $i + rm -f $i + done + rm -f $FILE +} + + +function prune_old_backups () { + # Implement me + local oldpwd=$PWD + cd $BACKUPDIR + find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \ + -execdir rm -fr '{}' \; || true + rm -rf $SUBDIR + mkdir -p $SUBDIR + cd $oldpwd +}