+++ /dev/null
-# -*- Shell-Script -*-
-# Common functions for dealing with Amazon S3.
-
-# units for BWLIMIT are KB/s
-BWLIMIT=325
-# units for CHUNKSIZE are MB
-CHUNKSIZE=5000
-
-BUCKET=hcoop.net-backups
-BACKUPDIR=full
-S3CMD=$(dirname $0)/s3
-
-IFS=$'\n'
-
-export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key)
-export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key
-
-function s3_cmd () {
- # $1: command (get|put|ls|rm)
- # $2: remote file
- # $3: local file
- local cmd=$1
- shift
- local bwarg
- if test "$cmd" = "put"; then
- bwarg="${BWLIMIT}K";
- else
- bwarg=
- fi
- $S3CMD $cmd $BUCKET "$1" "$2" $bwarg
-}
-
-function move_over () {
- # Move file to its offsite destination.
- # Expects the file to come from STDIN.
- # $1: date subdirectory
- # $2: filename
- if test -z "$2" || test -n "$3"; then
- echo "Bad programming"
- exit 1
- fi
- local subdir=$1
- local file=$2
- local dest=$BACKUPDIR/$subdir
- local ret
- split -d -b ${CHUNKSIZE}m - ${file}.
-# for i in ${file}.*; do
- for i in *.[0-9][0-9]; do
- echo "Transferring $i to S3 ..."
- s3_cmd put $dest/$i $i
- rm -f $i
- done
-}
-
-
-function prune_old_backups () {
- # Implement me
- local subdir=$1
- local oldpwd=$PWD
- cd $BACKUPDIR
- find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
- -execdir rm -fr '{}' \; || true
- rm -rf $SUBDIR
- mkdir -p $SUBDIR
- cd $oldpwd
-}