+# -*- Shell-Script -*-
# Common functions for dealing with Amazon S3.
# units for BWLIMIT are KB/s
BUCKET=hcoop.net-backups
BACKUPDIR=full
-SUBDIR=$(date +%Y.%m.%d)
S3CMD=$(dirname $0)/s3
IFS=$'\n'
}
function move_over () {
- # Move file to its offsite destination
- # $1: file, $2: relative directory (optional)
- if test -z "$1" || test -n "$3"; then
+ # Move file to its offsite destination.
+ # Expects the file to come from STDIN.
+ # $1: date subdirectory
+ # $2: filename
+ if test -z "$2" || test -n "$3"; then
echo "Bad programming"
exit 1
fi
- local FILE=$1
- local DEST=$BACKUPDIR/$SUBDIR
- if test -n "$2"; then
- DEST=$DEST/$2
- fi
- split -d -b ${CHUNKSIZE}m $FILE ${FILE}.
- for i in ${FILE}.*; do
- s3_cmd put $DEST/$i $i
+ local subdir=$1
+ local file=$2
+ local dest=$BACKUPDIR/$subdir
+ local ret
+ split -d -b ${CHUNKSIZE}m - ${file}.
+# for i in ${file}.*; do
+ for i in *.[0-9][0-9]; do
+ echo "Transferring $i to S3 ..."
+ s3_cmd put $dest/$i $i
rm -f $i
done
- rm -f $FILE
}
function prune_old_backups () {
# Implement me
+ local subdir=$1
local oldpwd=$PWD
cd $BACKUPDIR
find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \