01a4497dc6b9aec915a2559f2eb26af08dd63079
[clinton/scripts.git] / s3.common
1 # Common functions for dealing with Amazon S3.
2
3 # units for BWLIMIT are KB/s
4 BWLIMIT=325
5 # units for CHUNKSIZE are MB
6 CHUNKSIZE=5000
7
8 BUCKET=hcoop.net-backups
9 BACKUPDIR=full
10 S3CMD=$(dirname $0)/s3
11
12 IFS=$'\n'
13
14 export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key)
15 export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key
16
17 function s3_cmd () {
18 # $1: command (get|put|ls|rm)
19 # $2: remote file
20 # $3: local file
21 local cmd=$1
22 shift
23 local bwarg
24 if test "$cmd" = "put"; then
25 bwarg="${BWLIMIT}K";
26 else
27 bwarg=
28 fi
29 $S3CMD $cmd $BUCKET "$1" "$2" $bwarg
30 }
31
32 function move_over () {
33 # Move file to its offsite destination.
34 # Expects the file to come from STDIN.
35 # $1: date subdirectory
36 # $2: filename
37 # $3: relative directory (optional)
38 if test -z "$2" || test -n "$4"; then
39 echo "Bad programming"
40 exit 1
41 fi
42 local subdir=$1
43 local file=$2
44 local dest=$BACKUPDIR/$subdir
45 if test -n "$2"; then
46 dest=$dest/$2
47 fi
48 split -d -b ${CHUNKSIZE}m - ${file}.
49 for i in ${file}.*; do
50 s3_cmd put $dest/$i $i
51 rm -f $i
52 done
53 }
54
55
56 function prune_old_backups () {
57 # Implement me
58 local subdir=$1
59 local oldpwd=$PWD
60 cd $BACKUPDIR
61 find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
62 -execdir rm -fr '{}' \; || true
63 rm -rf $SUBDIR
64 mkdir -p $SUBDIR
65 cd $oldpwd
66 }