X-Git-Url: http://git.hcoop.net/clinton/scripts.git/blobdiff_plain/37839f02d2abe6c5639730ce59e4f16f9508695c..86a0f2e2d64c035e30dc4dc9634de8b1fb1e5c39:/s3.common diff --git a/s3.common b/s3.common index 51d3637..0365beb 100644 --- a/s3.common +++ b/s3.common @@ -1,3 +1,4 @@ +# -*- Shell-Script -*- # Common functions for dealing with Amazon S3. # units for BWLIMIT are KB/s @@ -7,7 +8,6 @@ CHUNKSIZE=5000 BUCKET=hcoop.net-backups BACKUPDIR=full -SUBDIR=$(date +%Y.%m.%d) S3CMD=$(dirname $0)/s3 IFS=$'\n' @@ -31,28 +31,31 @@ function s3_cmd () { } function move_over () { - # Move file to its offsite destination - # $1: file, $2: relative directory (optional) - if test -z "$1" || test -n "$3"; then + # Move file to its offsite destination. + # Expects the file to come from STDIN. + # $1: date subdirectory + # $2: filename + if test -z "$2" || test -n "$3"; then echo "Bad programming" exit 1 fi - local FILE=$1 - local DEST=$BACKUPDIR/$SUBDIR - if test -n "$2"; then - DEST=$DEST/$2 - fi - split -d -b ${CHUNKSIZE}m $FILE ${FILE}. - for i in ${FILE}.*; do - s3_cmd put $DEST/$i $i + local subdir=$1 + local file=$2 + local dest=$BACKUPDIR/$subdir + local ret + split -d -b ${CHUNKSIZE}m - ${file}. +# for i in ${file}.*; do + for i in *.[0-9][0-9]; do + echo "Transferring $i to S3 ..." + s3_cmd put $dest/$i $i rm -f $i done - rm -f $FILE } function prune_old_backups () { # Implement me + local subdir=$1 local oldpwd=$PWD cd $BACKUPDIR find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \