hcoop-backup: Take file from stdin and split it.
[hcoop/scripts.git] / s3.common
CommitLineData
37839f02 1# Common functions for dealing with Amazon S3.
2
3# units for BWLIMIT are KB/s
4BWLIMIT=325
5# units for CHUNKSIZE are MB
6CHUNKSIZE=5000
7
8BUCKET=hcoop.net-backups
9BACKUPDIR=full
10SUBDIR=$(date +%Y.%m.%d)
11S3CMD=$(dirname $0)/s3
12
13IFS=$'\n'
14
15export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key)
16export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key
17
18function s3_cmd () {
19 # $1: command (get|put|ls|rm)
20 # $2: remote file
21 # $3: local file
22 local cmd=$1
23 shift
24 local bwarg
25 if test "$cmd" = "put"; then
26 bwarg="${BWLIMIT}K";
27 else
28 bwarg=
29 fi
30 $S3CMD $cmd $BUCKET "$1" "$2" $bwarg
31}
32
33function move_over () {
a5a60f0c 34 # Move file to its offsite destination.
35 # Expects the file to come from STDIN.
36 # $1: filename, $2: relative directory (optional)
37839f02 37 if test -z "$1" || test -n "$3"; then
38 echo "Bad programming"
39 exit 1
40 fi
41 local FILE=$1
42 local DEST=$BACKUPDIR/$SUBDIR
43 if test -n "$2"; then
44 DEST=$DEST/$2
45 fi
a5a60f0c 46 split -d -b ${CHUNKSIZE}m - ${FILE}.
37839f02 47 for i in ${FILE}.*; do
48 s3_cmd put $DEST/$i $i
49 rm -f $i
50 done
37839f02 51}
52
53
54function prune_old_backups () {
55 # Implement me
56 local oldpwd=$PWD
57 cd $BACKUPDIR
58 find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
59 -execdir rm -fr '{}' \; || true
60 rm -rf $SUBDIR
61 mkdir -p $SUBDIR
62 cd $oldpwd
63}