- Split files at ~5GB before transferring, due to Amazon S3 limitations.
- Make s3_cmd easier to use.
- Temporarily comment out backup pruning.
COMPRESS_PROG=gzip
# units for BWLIMIT are KB/s
BWLIMIT=325
+# units for CHUNKSIZE are MB/s
+CHUNKSIZE=5000
IFS=$'\n'
function s3_cmd () {
# $1: command (get|put|ls|rm)
- # $2: bucket
- # $3: destination
- # $4: source file
- $SCRIPTDIR/s3 $1 $2 $3 $4 ${BWLIMIT}K
+ # $2: remote file
+ # $3: local file
+ local cmd=$1
+ shift
+ local bwarg
+ if test "$cmd" = "put"; then
+ bwarg="${BWLIMIT}K";
+ else
+ bwarg=
+ fi
+ $SCRIPTDIR/s3 $cmd $BUCKET "$1" "$2" $bwarg
}
function copy_over () {
if test -n "$2"; then
DEST=$DEST/$2
fi
- s3_cmd put $BUCKET $DEST/$FILE $FILE
+ split -d -b ${CHUNKSIZE}m $FILE ${FILE}.
+ for i in ${FILE}.*; do
+ s3_cmd put $DEST/$i $i
+ rm -f $i
+ done
rm -f $FILE
}
cd $oldpwd
}
-prune_old_backups
+#prune_old_backups
mkdir -p $BACKUPTMP
cd $BACKUPTMP