From bac711939993e274678c79f9ad2e2f0be0fe2243 Mon Sep 17 00:00:00 2001 From: mwolson_admin Date: Wed, 25 Jun 2008 02:35:55 -0400 Subject: [PATCH] hcoop-backup: More work. - Split files at ~5GB before transferring, due to Amazon S3 limitations. - Make s3_cmd easier to use. - Temporarily comment out backup pruning. --- hcoop-backup | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/hcoop-backup b/hcoop-backup index 6308105..6200a65 100755 --- a/hcoop-backup +++ b/hcoop-backup @@ -17,6 +17,8 @@ COMPRESS_EXT=.gz COMPRESS_PROG=gzip # units for BWLIMIT are KB/s BWLIMIT=325 +# units for CHUNKSIZE are MB/s +CHUNKSIZE=5000 IFS=$'\n' @@ -32,10 +34,17 @@ export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key function s3_cmd () { # $1: command (get|put|ls|rm) - # $2: bucket - # $3: destination - # $4: source file - $SCRIPTDIR/s3 $1 $2 $3 $4 ${BWLIMIT}K + # $2: remote file + # $3: local file + local cmd=$1 + shift + local bwarg + if test "$cmd" = "put"; then + bwarg="${BWLIMIT}K"; + else + bwarg= + fi + $SCRIPTDIR/s3 $cmd $BUCKET "$1" "$2" $bwarg } function copy_over () { @@ -50,7 +59,11 @@ function copy_over () { if test -n "$2"; then DEST=$DEST/$2 fi - s3_cmd put $BUCKET $DEST/$FILE $FILE + split -d -b ${CHUNKSIZE}m $FILE ${FILE}. + for i in ${FILE}.*; do + s3_cmd put $DEST/$i $i + rm -f $i + done rm -f $FILE } @@ -64,7 +77,7 @@ function prune_old_backups () { cd $oldpwd } -prune_old_backups +#prune_old_backups mkdir -p $BACKUPTMP cd $BACKUPTMP -- 2.20.1