s3.common: Get rid of third argument to move_over.
[hcoop/scripts.git] / s3.common
CommitLineData
37839f02 1# Common functions for dealing with Amazon S3.
2
3# units for BWLIMIT are KB/s
4BWLIMIT=325
5# units for CHUNKSIZE are MB
6CHUNKSIZE=5000
7
8BUCKET=hcoop.net-backups
9BACKUPDIR=full
37839f02 10S3CMD=$(dirname $0)/s3
11
12IFS=$'\n'
13
14export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key)
15export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key
16
17function s3_cmd () {
18 # $1: command (get|put|ls|rm)
19 # $2: remote file
20 # $3: local file
21 local cmd=$1
22 shift
23 local bwarg
24 if test "$cmd" = "put"; then
25 bwarg="${BWLIMIT}K";
26 else
27 bwarg=
28 fi
29 $S3CMD $cmd $BUCKET "$1" "$2" $bwarg
30}
31
32function move_over () {
a5a60f0c 33 # Move file to its offsite destination.
34 # Expects the file to come from STDIN.
3b16fa86 35 # $1: date subdirectory
36 # $2: filename
db0b4e21 37 if test -z "$2" || test -n "$3"; then
37839f02 38 echo "Bad programming"
39 exit 1
40 fi
3b16fa86 41 local subdir=$1
42 local file=$2
43 local dest=$BACKUPDIR/$subdir
3b16fa86 44 split -d -b ${CHUNKSIZE}m - ${file}.
45 for i in ${file}.*; do
46 s3_cmd put $dest/$i $i
37839f02 47 rm -f $i
48 done
37839f02 49}
50
51
52function prune_old_backups () {
53 # Implement me
3b16fa86 54 local subdir=$1
37839f02 55 local oldpwd=$PWD
56 cd $BACKUPDIR
57 find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
58 -execdir rm -fr '{}' \; || true
59 rm -rf $SUBDIR
60 mkdir -p $SUBDIR
61 cd $oldpwd
62}