# run this script as root, on deleuze
#
-exec >& /var/log/backup-to-megacz.com-log
+exec >& /var/log/backup-to-s3-log
PATH=$PATH:/bin:/usr/bin:/sbin:/usr/sbin
#COMPRESS_EXT=.bz2
COMPRESS_PROG=gzip
# units for BWLIMIT are KB/s
BWLIMIT=325
+# units for CHUNKSIZE are MB
+CHUNKSIZE=5000
IFS=$'\n'
+SCRIPTDIR=$(dirname $0)
KEYFILE=/etc/backup-encryption-key
-BACKUPDIR=/afs/megacz.com/private/hcoop-backup
-BACKUPTMP=/var/backups/hcoop-backup
-SUBDIR=`date +%Y.%m.%d`
-
-#SYNC_CMD="rsync --bwlimit=$BWLIMIT --remove-source-files"
+BUCKET=hcoop.net-backups
+BACKUPDIR=full
+BACKUPTMP=/var/backups/hcoop-backup-testing
+SUBDIR=$(date +%Y.%m.%d)
+
+export S3_ACCESS_KEY_ID=$(cat ~mwolson_admin/.amazon/access.key)
+export S3_SECRET_ACCESS_KEY=~mwolson_admin/.amazon/secret.key
+
+function s3_cmd () {
+ # $1: command (get|put|ls|rm)
+ # $2: remote file
+ # $3: local file
+ local cmd=$1
+ shift
+ local bwarg
+ if test "$cmd" = "put"; then
+ bwarg="${BWLIMIT}K";
+ else
+ bwarg=
+ fi
+ $SCRIPTDIR/s3 $cmd $BUCKET "$1" "$2" $bwarg
+}
function copy_over () {
# Move file to its offsite destination
if test -n "$2"; then
DEST=$DEST/$2
fi
- < $FILE catsync -b $BWLIMIT $DEST/$FILE
+ split -d -b ${CHUNKSIZE}m $FILE ${FILE}.
+ for i in ${FILE}.*; do
+ s3_cmd put $DEST/$i $i
+ rm -f $i
+ done
rm -f $FILE
}
-cd $BACKUPDIR
-find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
- -execdir rm -fr '{}' \; || true
-rm -rf $SUBDIR
-mkdir -p $SUBDIR
+function prune_old_backups () {
+ local oldpwd=$PWD
+ cd $BACKUPDIR
+ find . -mindepth 1 -maxdepth 1 -type d -ctime +7 \
+ -execdir rm -fr '{}' \; || true
+ rm -rf $SUBDIR
+ mkdir -p $SUBDIR
+ cd $oldpwd
+}
+
+#prune_old_backups
+
mkdir -p $BACKUPTMP
cd $BACKUPTMP