2 # basic amazon s3 operations
3 # Licensed under the terms of the GNU GPL v2
4 # Copyright 2007 Victor Lowther <victor.lowther@gmail.com>
6 CURL
=/home
/mwolson_admin
/bin
/curl
7 HMAC
=$
(dirname $0)/s3-hmac
9 # print a message and bail
15 # check to see if the variable name passed exists and holds a value.
18 [[ ${!1} ]] || die
"Environment variable ${1} is not set."
21 # check to see if we have all the needed S3 variables defined.
25 for x
in S3_ACCESS_KEY_ID S3_SECRET_ACCESS_KEY
; do
28 [[ -f ${S3_SECRET_ACCESS_KEY} ]] || die
"S3_SECRET_ACCESS_KEY must point to a file!"
29 sak
="$(wc -c "${S3_SECRET_ACCESS_KEY}")"
30 (( ${sak%%[!0-9 ]*} == 40 )) || \
31 die
"S3 Secret Access Key is not exactly 40 bytes long. Please fix it."
33 # check to see if our external dependencies exist
36 while [[ $# -ne 0 ]]; do
37 which "${1}" >& /dev/null || { res=1; echo "${1} not found."; }
40 (( res == 0 )) || die "aborting.
"
44 if test ! -f $HMAC || test ! -x $HMAC; then
45 die "hmac
script not found or not executable.
"
50 check_dep openssl date cat grep
56 # $1 = string to url encode
58 # we don't urlencode everything, just enough stuff.
76 # accept input on stdin, put it on stdout.
77 # patches accepted to get more stuff
84 ## basic S3 functionality. x-amz-header functionality is not implemented.
85 # make an S3 signature string, which will be output on stdout.
86 s3_signature_string() {
88 # $2 = date string, must be in UTC
89 # $3 = bucket name, if any
90 # $4 = resource path, if any
91 # $5 = content md5, if any
92 # $6 = content MIME type, if any
93 # $7 = canonicalized headers, if any
94 # signature string will be output on stdout
95 local verr="Must pass a verb to s3_signature_string
!"
96 local verb="${1:?verr}"
99 local derr="Must pass a
date to s3_signature_string
!"
100 local date="${2:?derr}"
104 printf "%s
\n%s
\n%s
\n%s
\n%s
%s
%s
" \
105 "${verb}" "${md5}" "${mime}" "${date}" \
106 "${headers}" "${bucket}" "${resource}" | \
107 $HMAC sha1 "${S3_SECRET_ACCESS_KEY}" | openssl base64 -e -a
110 # cheesy, but it is the best way to have multiple headers.
112 # each arg passed will be output on its own line
115 echo "header
= \"${1}\""
121 # invoke curl to do all the heavy HTTP lifting
122 # $1 = method (one of GET, PUT, or DELETE. HEAD is not handled yet.)
123 # $2 = remote bucket.
126 # $5 = bandwidth limit.
127 local bucket remote date sig md5 arg inout headers
128 # header handling is kinda fugly, but it works.
129 bucket="${2:+/${2}}/" # slashify the bucket
130 remote="$
(urlenc
"${3}")" # if you don't, strange things may happen.
131 stdopts="--connect-timeout 10 --fail --silent"
132 [[ $CURL_S3_DEBUG == true ]] && stdopts="${stdopts} --show-error --fail"
133 test -n "${5}" && stdopts="${stdopts} --limit-rate ${5}"
135 GET) arg="-o" inout="${4:--}" # stdout if no $4
137 PUT) [[ ${2} ]] || die "PUT can has bucket?
"
138 if [[ ! ${3} ]]; then
140 headers[${#headers[@]}]="Content-Length
: 0"
141 elif [[ -f ${4} ]]; then
142 md5="$
(openssl dgst
-md5 -binary "${4}"|openssl base64
-e -a)"
143 arg="-T" inout="${4}"
144 headers[${#headers[@]}]="Expect
: 100-continue"
146 die "Cannot
write non-existing
file ${4}"
149 DELETE) arg="-X DELETE
"
152 *) die "Unknown verb
${1}. It probably would not have worked anyways.
" ;;
154 date="$
(TZ
=UTC
date '+%a, %e %b %Y %H:%M:%S %z')"
155 sig=$(s3_signature_string ${1} "${date}" "${bucket}" "${remote}" "${md5}")
157 headers
[${#headers[@]}]="Authorization: AWS ${S3_ACCESS_KEY_ID}:${sig}"
158 headers[${#headers[@]}]="Date
: ${date}"
159 [[ ${md5} ]] && headers[${#headers[@]}]="Content-MD5: ${md5}"
160 $CURL ${arg} "${inout}" ${stdopts} -K <(curl_headers "${headers[@]}") \
161 "http
://s3.amazonaws.com
${bucket}${remote}"
166 # $1 = remote bucket to put it into
167 # $2 = remote name to put
168 # $3 = file to put. This must be present if $2 is.
169 # $4 = bandwidth limit.
170 s3_curl PUT "${1}" "${2}" "${3:-${2}}" "${4}"
175 # $1 = bucket to get file from
176 # $2 = remote file to get
177 # $3 = local file to get into. Will be overwritten if it exists.
178 # If this contains a path, that path must exist before calling this.
179 # $4 = bandwidth limit.
180 s3_curl GET "${1}" "${2}" "${3:-${2}}" "${4}"
185 # same args as s3_get, but uses the HEAD verb instead of the GET verb.
186 s3_curl HEAD "${1}" "${2}" >/dev/null
190 # Hideously ugly, but it works well enough.
192 s3_get |grep -o '<Name>[^>]*</Name>' |sed 's/<[^>]*>//g' |xmldec
196 # this will only return the first thousand entries, alas
197 # Mabye some kind soul can fix this without writing an XML parser in bash?
198 # Also need to add xml entity handling.
200 # $1 = bucket to list
201 [ "x
${1}" == "x
" ] && return 1
202 s3_get "${1}" |grep -o '<Key>[^>]*</Key>' |sed 's/<[^>]*>//g'| xmldec
207 # $1 = bucket to delete from
208 # $2 = item to delete
209 s3_curl DELETE "${1}" "${2}"
213 # because this uses s3_list, it suffers from the same flaws.
215 # $1 = bucket to delete everything from
216 s3_list "${1}" | while read f; do
217 s3_delete "${1}" "${f}";
223 put) shift; s3_put "$@
" ;;
224 get) shift; s3_get "$@
" ;;
225 rm) shift; s3_delete "$@
" ;;
226 ls) shift; s3_list "$@
" ;;
227 test) shift; s3_test "$@
" ;;
228 buckets) s3_buckets ;;
229 rmrf) shift; s3_rmrf "$@
" ;;
230 *) die "Unknown
command ${1}.
"