| 1 | # Common functions for dealing with Amazon S3. |
| 2 | |
| 3 | # units for BWLIMIT are KB/s |
| 4 | BWLIMIT=325 |
| 5 | # units for CHUNKSIZE are MB |
| 6 | CHUNKSIZE=5000 |
| 7 | |
| 8 | BUCKET=hcoop.net-backups |
| 9 | BACKUPDIR=full |
| 10 | SUBDIR=$(date +%Y.%m.%d) |
| 11 | S3CMD=$(dirname $0)/s3 |
| 12 | |
| 13 | IFS=$'\n' |
| 14 | |
| 15 | export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key) |
| 16 | export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key |
| 17 | |
| 18 | function s3_cmd () { |
| 19 | # $1: command (get|put|ls|rm) |
| 20 | # $2: remote file |
| 21 | # $3: local file |
| 22 | local cmd=$1 |
| 23 | shift |
| 24 | local bwarg |
| 25 | if test "$cmd" = "put"; then |
| 26 | bwarg="${BWLIMIT}K"; |
| 27 | else |
| 28 | bwarg= |
| 29 | fi |
| 30 | $S3CMD $cmd $BUCKET "$1" "$2" $bwarg |
| 31 | } |
| 32 | |
| 33 | function move_over () { |
| 34 | # Move file to its offsite destination |
| 35 | # $1: file, $2: relative directory (optional) |
| 36 | if test -z "$1" || test -n "$3"; then |
| 37 | echo "Bad programming" |
| 38 | exit 1 |
| 39 | fi |
| 40 | local FILE=$1 |
| 41 | local DEST=$BACKUPDIR/$SUBDIR |
| 42 | if test -n "$2"; then |
| 43 | DEST=$DEST/$2 |
| 44 | fi |
| 45 | split -d -b ${CHUNKSIZE}m $FILE ${FILE}. |
| 46 | for i in ${FILE}.*; do |
| 47 | s3_cmd put $DEST/$i $i |
| 48 | rm -f $i |
| 49 | done |
| 50 | rm -f $FILE |
| 51 | } |
| 52 | |
| 53 | |
| 54 | function prune_old_backups () { |
| 55 | # Implement me |
| 56 | local oldpwd=$PWD |
| 57 | cd $BACKUPDIR |
| 58 | find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \ |
| 59 | -execdir rm -fr '{}' \; || true |
| 60 | rm -rf $SUBDIR |
| 61 | mkdir -p $SUBDIR |
| 62 | cd $oldpwd |
| 63 | } |