s3: Wait longer after failure, pick up any failed pieces later.
[hcoop/scripts.git] / s3.common
CommitLineData
8862af6d 1# -*- Shell-Script -*-
37839f02 2# Common functions for dealing with Amazon S3.
3
4# units for BWLIMIT are KB/s
5BWLIMIT=325
6# units for CHUNKSIZE are MB
7CHUNKSIZE=5000
8
9BUCKET=hcoop.net-backups
10BACKUPDIR=full
37839f02 11S3CMD=$(dirname $0)/s3
12
13IFS=$'\n'
14
15export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key)
16export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key
17
18function s3_cmd () {
19 # $1: command (get|put|ls|rm)
20 # $2: remote file
21 # $3: local file
22 local cmd=$1
23 shift
24 local bwarg
25 if test "$cmd" = "put"; then
26 bwarg="${BWLIMIT}K";
27 else
28 bwarg=
29 fi
30 $S3CMD $cmd $BUCKET "$1" "$2" $bwarg
31}
32
33function move_over () {
a5a60f0c 34 # Move file to its offsite destination.
35 # Expects the file to come from STDIN.
3b16fa86 36 # $1: date subdirectory
37 # $2: filename
db0b4e21 38 if test -z "$2" || test -n "$3"; then
37839f02 39 echo "Bad programming"
40 exit 1
41 fi
3b16fa86 42 local subdir=$1
43 local file=$2
44 local dest=$BACKUPDIR/$subdir
8862af6d 45 local ret
3b16fa86 46 split -d -b ${CHUNKSIZE}m - ${file}.
a2983bf8 47# for i in ${file}.*; do
48 for i in *.[0-9][0-9]; do
8862af6d 49 echo "Transferring $i to S3 ..."
3b16fa86 50 s3_cmd put $dest/$i $i
37839f02 51 rm -f $i
52 done
37839f02 53}
54
55
56function prune_old_backups () {
57 # Implement me
3b16fa86 58 local subdir=$1
37839f02 59 local oldpwd=$PWD
60 cd $BACKUPDIR
61 find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
62 -execdir rm -fr '{}' \; || true
63 rm -rf $SUBDIR
64 mkdir -p $SUBDIR
65 cd $oldpwd
66}