-
-
Save vszakats/2917d28a951844ab80b1 to your computer and use it in GitHub Desktop.
#!/bin/sh | |
# To the extent possible under law, Viktor Szakats | |
# has waived all copyright and related or neighboring rights to this | |
# script. | |
# CC0 - https://creativecommons.org/publicdomain/zero/1.0/ | |
# SPDX-License-Identifier: CC0-1.0 | |
# THIS SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL | |
# THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER | |
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN | |
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | |
# Upload a file to Amazon AWS S3 (and compatible) using Signature Version 4 | |
# | |
# docs: | |
# https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html | |
# https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html | |
# | |
# requires: | |
# curl, openssl 1.x or newer, GNU sed, LF EOLs in this file | |
# shellcheck disable=SC2317 | |
# shellcheck disable=SC3040 | |
set -o errexit -o nounset; [ -n "${BASH:-}${ZSH_NAME:-}" ] && set -o pipefail | |
fileLocal="${1:-example-local-file.ext}" | |
bucket="${2:-example-bucket}" # AWS S3 bucket or full URL (with ending slash): https://localhost[:9000]/[bucket/] | |
region="${3:-}" | |
storageClass="${4:-STANDARD}" # or 'REDUCED_REDUNDANCY' | |
SSE="${5:-AES256}"; [ "${SSE}" = 'none' ] && SSE='' # Server-side encryption: 'AES256' (default) or 'none' | |
my_openssl() { | |
if [ -f /usr/local/opt/[email protected]/bin/openssl ]; then | |
/usr/local/opt/[email protected]/bin/openssl "$@" | |
elif [ -f /usr/local/opt/openssl/bin/openssl ]; then | |
/usr/local/opt/openssl/bin/openssl "$@" | |
else | |
openssl "$@" | |
fi | |
} | |
my_sed() { | |
if command -v gsed > /dev/null 2>&1; then | |
gsed "$@" | |
else | |
sed "$@" | |
fi | |
} | |
awsStringSign4() { | |
kSecret="AWS4$1" | |
kDate=$(printf '%s' "$2" | my_openssl dgst -sha256 -hex -mac HMAC -macopt "key:${kSecret}" 2>/dev/null | my_sed 's/^.* //') | |
kRegion=$(printf '%s' "$3" | my_openssl dgst -sha256 -hex -mac HMAC -macopt "hexkey:${kDate}" 2>/dev/null | my_sed 's/^.* //') | |
kService=$(printf '%s' "$4" | my_openssl dgst -sha256 -hex -mac HMAC -macopt "hexkey:${kRegion}" 2>/dev/null | my_sed 's/^.* //') | |
kSigning=$(printf 'aws4_request' | my_openssl dgst -sha256 -hex -mac HMAC -macopt "hexkey:${kService}" 2>/dev/null | my_sed 's/^.* //') | |
signedString=$(printf '%s' "$5" | my_openssl dgst -sha256 -hex -mac HMAC -macopt "hexkey:${kSigning}" 2>/dev/null | my_sed 's/^.* //') | |
printf '%s' "${signedString}" | |
} | |
iniGet() { | |
# based on: https://stackoverflow.com/questions/22550265/read-certain-key-from-certain-section-of-ini-file-sed-awk#comment34321563_22550640 | |
printf '%s' "$(my_sed \ | |
-n -E "/\[$2\]/,/\[.*\]/{/$3/s/(.*)=[ \\t]*(.*)/\2/p}" "$1")" | |
} | |
# Initialize access keys | |
if [ -z "${AWS_CONFIG_FILE:-}" ]; then | |
if [ -z "${AWS_ACCESS_KEY_ID:-}" ]; then | |
>&2 echo '! AWS_CONFIG_FILE or AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY envvars not set.' | |
exit 1 | |
else | |
awsAccess="${AWS_ACCESS_KEY_ID}" | |
awsSecret="${AWS_SECRET_ACCESS_KEY}" | |
awsRegion="${AWS_DEFAULT_REGION:-eu-west-1}" | |
fi | |
else | |
awsProfile='default' | |
# Read standard aws-cli configuration file | |
# pointed to by the envvar AWS_CONFIG_FILE | |
awsAccess="$(iniGet "${AWS_CONFIG_FILE}" "${awsProfile}" 'aws_access_key_id')" | |
awsSecret="$(iniGet "${AWS_CONFIG_FILE}" "${awsProfile}" 'aws_secret_access_key')" | |
awsRegion="$(iniGet "${AWS_CONFIG_FILE}" "${awsProfile}" 'region')" | |
fi | |
# Initialize defaults | |
if [ -z "${region}" ]; then | |
region="${awsRegion}" | |
fi | |
>&2 echo "! Uploading..." "${fileLocal}" "->" "${bucket}" "${region}" "${storageClass}" | |
>&2 echo "! | $(uname) | $(my_openssl version) | $(my_sed --version | head -1) |" | |
# Initialize helper variables | |
httpReq='PUT' | |
authType='AWS4-HMAC-SHA256' | |
service='s3' | |
if [ "${bucket#https://*}" != "${bucket}" ] || \ | |
[ "${bucket#http://*}" != "${bucket}" ]; then | |
fullUrl="${bucket}" | |
else | |
fullUrl="https://${bucket}.${service}.${region}.amazonaws.com/" | |
fi | |
pathRemote="$(printf '%s' "${fullUrl}" | sed -E 's|^https?://||g' | grep -o -E '/.*$' | cut -c 2-)" | |
hostport="$(printf '%s' "${fullUrl}" | sed -E -e 's|^https?://||g' -e 's|/.*$||')" | |
dateValueS=$(date -u +'%Y%m%d') | |
dateValueL=$(date -u +'%Y%m%dT%H%M%SZ') | |
if command -v file >/dev/null 2>&1; then | |
contentType="$(file --brief --mime-type "${fileLocal}")" | |
else | |
contentType='application/octet-stream' | |
fi | |
# Try to URL-encode the filename we pass | |
# based on: https://gist.github.com/jaytaylor/5a90c49e0976aadfe0726a847ce58736?permalink_comment_id=4043195#gistcomment-4043195 | |
# Very curl version dependent. | |
fileRemote="$({ curl --silent --get / --data-urlencode "=${fileLocal}" --write-out '%{url}' 2>/dev/null || true; } | cut -c 3- | sed 's/+/%20/g')" | |
if [ -z "${fileRemote}" ] || [ "${fileRemote}" = '/' ]; then | |
# Needs trurl | |
fileRemote="$({ trurl --accept-space "file:///${fileLocal}" 2>/dev/null || true; } | cut -c 9-)" | |
if [ -z "${fileRemote}" ]; then | |
# Needs python3 | |
fileRemote="$({ printf '%s' "${fileLocal}" | python3 \ | |
-c 'import sys; import urllib.parse as ul; sys.stdout.write(ul.quote_plus(sys.stdin.read()))' 2>/dev/null || true; } | sed 's/+/%20/g')" | |
if [ -z "${fileRemote}" ]; then | |
# Last resort, that will probably not work as expected, but better than an empty string | |
fileRemote="${fileLocal}" | |
fi | |
fi | |
fi | |
# 0. Hash the file to be uploaded | |
if [ -f "${fileLocal}" ]; then | |
payloadHash=$(my_openssl dgst -sha256 -hex < "${fileLocal}" 2>/dev/null | my_sed 's/^.* //') | |
else | |
>&2 echo "! File not found: '${fileLocal}'" | |
exit 1 | |
fi | |
# 1. Create canonical request | |
# NOTE: order significant in ${headerList} and ${canonicalRequest} | |
if [ -n "${SSE}" ]; then | |
headerList='content-type;host;x-amz-content-sha256;x-amz-date;x-amz-server-side-encryption;x-amz-storage-class' | |
ssehdr="\ | |
x-amz-server-side-encryption:${SSE} | |
" | |
else | |
headerList='content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class' | |
ssehdr='' | |
fi | |
canonicalRequest="\ | |
${httpReq} | |
/${pathRemote}${fileRemote} | |
content-type:${contentType} | |
host:${hostport} | |
x-amz-content-sha256:${payloadHash} | |
x-amz-date:${dateValueL} | |
${ssehdr}x-amz-storage-class:${storageClass} | |
${headerList} | |
${payloadHash}" | |
# Hash it | |
canonicalRequestHash=$(printf '%s' "${canonicalRequest}" | my_openssl dgst -sha256 -hex 2>/dev/null | my_sed 's/^.* //') | |
# 2. Create string to sign | |
stringToSign="\ | |
${authType} | |
${dateValueL} | |
${dateValueS}/${region}/${service}/aws4_request | |
${canonicalRequestHash}" | |
# 3. Sign the string | |
signature=$(awsStringSign4 "${awsSecret}" "${dateValueS}" "${region}" "${service}" "${stringToSign}") | |
# Upload | |
curl --silent --location --proto-redir =https --request "${httpReq}" --upload-file "${fileLocal}" \ | |
--header "Content-Type: ${contentType}" \ | |
--header "Host: ${hostport}" \ | |
--header "X-Amz-Content-SHA256: ${payloadHash}" \ | |
--header "X-Amz-Date: ${dateValueL}" \ | |
--header "X-Amz-Server-Side-Encryption: ${SSE}" \ | |
--header "X-Amz-Storage-Class: ${storageClass}" \ | |
--header "Authorization: ${authType} Credential=${awsAccess}/${dateValueS}/${region}/${service}/aws4_request, SignedHeaders=${headerList}, Signature=${signature}" \ | |
"${fullUrl}${fileRemote}" | |
return | |
# Examples | |
cat > 'test.xml' <<EOF | |
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> | |
<test> | |
</test> | |
EOF | |
export AWS_ACCESS_KEY_ID='<example-id>' | |
export AWS_SECRET_ACCESS_KEY='<example-key>' | |
./s3-upload-aws4.sh 'test.xml' 'http://localhost:9000/example-bucket/' 'eu-west-1' '' 'none' | |
./s3-upload-aws4.sh 'test.xml' 'http://localhost:9000/example-bucket/' 'eu-west-1' '' | |
./s3-upload-aws4.sh 'test.xml' 'example-bucket' 'eu-west-1' '' |
OpenSSL 1.0.2h 3 May 2016 |
InvalidRequest
Missing required header for this request: x-amz-content-sha2560E9858A0CC8E8DA78mNfWwJMsBjEMZtGeMv1QBaPPLfUyVlYCFJyy5wea2PRCdygUfb1OkjlNhKXWgIVGavoh71ck2w=%
I've fixed some minor (but unpleasant) portability issues after testing the script in different environments. None of those should affect the basic logic though, it works here when tested on macOS/Linux (with eu-central-1
region.)
I got your script running on OSX after installing gnu-sed with brew install gnu-sed
and putting it into path with export PATH="/usr/local/opt/gnu-sed/libexec/gnubin:$PATH"
.
Also, I needed to put --insecure
into the curl command, otherwise curl would not run because it was missing a cert.
Anyway, now I get the following error:
<?xml version="1.0" encoding="UTF-8"?>
<Error><Code>AuthorizationHeaderMalformed</Code><Message>The authorization header is malformed; the Credential is mal-formed; expecting "<YOUR-AKID>/YYYYMMDD/REGION/SERVICE/aws4_request".</Message><RequestId>...</RequestId><HostId>...</HostId></Error>
Is the script still working for you?
@vszakats, does this script still work for you i'm constantly getting SignatureDoesNotMatch on a mac. i've swapped out sed for cut because that wasn't working on my mac
I'm also getting the SignatureDoesNotMatch error on mac osx, but I think it's due to a difference in openssl on mac vs linux. I get this error:
unknown option '-mac'
options are
<snip>
[a list of all the options for openssl]
- Script does work here on macOS with the required OpenSSL (1.x) and GNU sed tools installed:
brew install [email protected] gnu-sed
(openssl
may work as well).gsed
should normally be sym-linked to/usr/local/bin/gsed
afterbrew install
and/usr/local/bin
is normally in$PATH
after a Homebrew install. - As for
--insecure
, I'm intentionally not using it: To remain secure, try using either the system curl orbrew install curl
, both should be aware of the required root certificate, as offered by macOS. - As for
AuthorizationHeaderMalformed
, something may be wrong with the AWS credentials, not exactly sure what. You may try to set them viaAWS_ACCESS_KEY
andAWS_SECRET_KEY
, also making sure they are valid for the given region/bucket.
Example
test.sh
:
#!/bin/sh
export AWS_CONFIG_FILE='./my-aws-config'
echo '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<test></test>' > test.xml
# May need: `chmod +x s3-upload-aws4.sh`
if ./s3-upload-aws4.sh test.xml 'my-test-bucket' 'eu-central-1' 'REDUCED_REDUNDANCY'; then
echo 'OK'
else
echo "Failed ($?)"
fi
rm -f test.xml
where ./my-aws-config
is:
[default]
aws_access_key_id = AZZZZZZZZZZZZZZZZZZZ
aws_secret_access_key = zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz
Vincent,
This is brilliant, thanks so much for sharing this! Worked like a charm.
One little gotcha I'd like to point out for anyone else doing a copy-and-paste of the above code, watch out for how the empty lines of the canonicalRequest are (or should I say can be? maybe it's just me?) rather mysteriously stripped out when you paste, like so:
canonicalRequest="\
${httpReq}
/${fileRemote}
content-type:${contentType}
host:${bucket}${baseUrl}
x-amz-content-sha256:${payloadHash}
x-amz-date:${dateValueL}
x-amz-server-side-encryption:AES256
x-amz-storage-class:${storageClass}
${headerList}
${payloadHash}"
As per the original code, it SHOULD be:
canonicalRequest="\
${httpReq}
/${fileRemote}
content-type:${contentType}
host:${bucket}${baseUrl}
x-amz-content-sha256:${payloadHash}
x-amz-date:${dateValueL}
x-amz-server-side-encryption:AES256
x-amz-storage-class:${storageClass}
${headerList}
${payloadHash}"
The lacking of those empty lines will cause you to get a SignatureDoesNotMatch.
No such issue if you copy from Raw. :)
This was a very awesome and kind share, and the comments too. Much thanks.
Thank you for this script. This is very helpful. However, I noticed that if a number appears anywhere in the name of the S3 bucket, I get an error message "No AWSAccessKey was presented." Could someone verify whether that's happening for you as well? I'm running Ubuntu 18.04 with OpenSSL 1.1.1
Thank you for this script. This is very helpful. However, I noticed that if a number appears anywhere in the name of the S3 bucket, I get an error message "No AWSAccessKey was presented." Could someone verify whether that's happening for you as well? I'm running Ubuntu 18.04 with OpenSSL 1.1.1
Turns out, you just have to wait a day for an s3 bucket to be fully created.
Awesome. Information on how to access GCS using AWS4 is rare and this script is a great source of learning. Thanks !!
What's necessary to convert this in a download script.. It's not only change PUT to GET :(
This is the only solution that worked for me. I tried so many scripts but this one really worked (I made the change as advise in the comments and added empty lines in canonicalRequest)
Big thanks to @vszakats! 👍 💯
Improved by me gist:
- Follow the documented common names AWS CLI supported environment variables.
- in the case of unset AWS_CONFIG_FILE should use the environment variable AWS_DEFAULT_REGION if region is not passed, that is like AWS CLI would do.
I use it like that:
unset AWS_PROFILE
export AWS_ACCESS_KEY_ID=${id}
export AWS_SECRET_ACCESS_KEY=${secret}
export AWS_DEFAULT_REGION=${region}
cat > test-file.out << EOF
0123456789
EOF
./s3-upload-aws4.sh test-file.out ${bucket_name}
Thanks @denist-huma! I've updated this Gist with your changes.
@vszakats 👍
I also set the baseUrl=".${service}.${region}.amazonaws.com"
to get regional buckets working right for me or else I got denied:
$ ../../scripts/s3-put-test-file.sh test-unversioned-delete-bucket
Uploading test-file.out -> test-unversioned-delete-bucket-2dr26-oi5rr eu-west-2 STANDARD
| Linux | OpenSSL 1.1.1f 31 Mar 2020 | sed (GNU sed) 4.7 |
<?xml version="1.0" encoding="UTF-8"?>
<Error><Code>AccessDenied</Code><Message>Access Denied</Message><RequestId>...
LGTM, committed it as well, thanks!
The request signature we calculated does not match the signature you provided. Check your key and signing method.
@vszakats
Hello friend, can you explain how it works, I'm new to ssh.
I kept getting an error about the signature not matching. Seemed to be caused by a forward slash in the fileLocal path - "build/image.png". The error went away once I moved the file to the current working directory, making the path just "image.png"
I kept getting an error about the signature not matching. Seemed to be caused by a forward slash in the fileLocal path - "build/image.png". The error went away once I moved the file to the current working directory, making the path just "image.png"
I also had the same problem, turned out to be the url encoding. curl --silent --get / --data-urlencode "=${fileLocal}" --write-out '%{url}'
doesn't seem to work in all versions of curl.
After an annoying long time of trial and error: For me curl 7.68 and 8.0 fails, but only curl 7.79 seems working.
An easy fix would be to just be careful with file naming and not put fancy characters in there that need url encoding.
@lebuni Thanks for the info. It's a twisted hack, though I wonder why it is breaking/unbreaking. And if curl would be willing to add a local test + fix to stabilize it.
This might also work, where trurl is available: trurl --accept-space "file:///${fileLocal}" | cut -c 9-
Added trurl and python3 fallback. (only spot-tested)
Getting the following error, anyone else who gets it?
./s3-upload-aws4.sh 'test.xml' 'purestylebackup' 'us-east-1' ''
! Uploading... test.xml -> purestylebackup us-east-1 STANDARD
openssl: symbol lookup error: openssl: undefined symbol: Camellia_set_key, version OPENSSL_1_1_0
! | Linux | | sed (GNU sed) 4.7 |
* Trying 52.216.220.162:443...
* Connected to purestylebackup.s3.us-east-1.amazonaws.com (52.216.220.162) port 443 (#0)
* ALPN, offering h2
* ALPN, offering http/1.1
* successfully set certificate verify locations:
* CAfile: /etc/ssl/certs/ca-certificates.crt
* CApath: /etc/ssl/certs
* TLSv1.3 (OUT), TLS handshake, Client hello (1):
* TLSv1.3 (IN), TLS handshake, Server hello (2):
* TLSv1.2 (IN), TLS handshake, Certificate (11):
* TLSv1.2 (IN), TLS handshake, Server key exchange (12):
* TLSv1.2 (IN), TLS handshake, Server finished (14):
* TLSv1.2 (OUT), TLS handshake, Client key exchange (16):
* TLSv1.2 (OUT), TLS change cipher, Change cipher spec (1):
* TLSv1.2 (OUT), TLS handshake, Finished (20):
* TLSv1.2 (IN), TLS handshake, Finished (20):
* SSL connection using TLSv1.2 / ECDHE-RSA-AES128-GCM-SHA256
* ALPN, server accepted to use http/1.1
* Server certificate:
* subject: CN=s3.amazonaws.com
* start date: Jul 10 00:00:00 2023 GMT
* expire date: Jun 21 23:59:59 2024 GMT
* subjectAltName: host "purestylebackup.s3.us-east-1.amazonaws.com" matched cert's "*.s3.us-east-1.amazonaws.com"
* issuer: C=US; O=Amazon; CN=Amazon RSA 2048 M01
* SSL certificate verify ok.
> PUT /test.xml HTTP/1.1
> Host: purestylebackup.s3.us-east-1.amazonaws.com
> User-Agent: curl/7.74.0
> Accept: */*
> Content-Type: text/xml
> X-Amz-Date: 20230907T073626Z
> X-Amz-Server-Side-Encryption: AES256
> X-Amz-Storage-Class: STANDARD
> Authorization: AWS4-HMAC-SHA256 Credential=REDACTEDBYME/20230907/us-east-1/s3/aws4_request, SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date;x-amz-server-side-encryption;x-amz-storage-class, Signature=
> Content-Length: 71
> Expect: 100-continue
>
* Mark bundle as not supporting multiuse
< HTTP/1.1 400 Bad Request
< x-amz-request-id: XJSNFSHJZ30HBPZT
< x-amz-id-2: po0Cws+bAQUCgPv4kXlSpu7eAuD8kByq3zHhicgCZwGicW+rhVMXMGvQYnCeeIHABxSHPyvkZAY=
< Content-Type: application/xml
< Transfer-Encoding: chunked
< Date: Thu, 07 Sep 2023 07:36:26 GMT
< Server: AmazonS3
< Connection: close
<
<?xml version="1.0" encoding="UTF-8"?>
* Closing connection 0
* TLSv1.2 (OUT), TLS alert, close notify (256):
If you have a curl version >= 7.75.0 you can upload files to s3 like this
#!/bin/sh
FILE=$1
S3_ACCESS_KEY="<ACCESS_KEY>"
S3_SECRET_KEY="<SECRET_KEY>"
S3_BUCKET="<BUCKET_NAME>"
S3_REGION="<REGION>"
curl --progress-bar -X PUT \
--user "${S3_ACCESS_KEY}":"${S3_SECRET_KEY}" \
--aws-sigv4 "aws:amz:${S3_REGION}:s3" \
--upload-file ${FILE} \
https://${S3_BUCKET}.s3.${S3_REGION}.amazonaws.com
That's correct. There have been fixes for this in later curl versions up to recently (v8.5.0), so the newer the curl, the better this works.
its weird how it works for me on linux but the same script does not work on mac
Signature error
I've found the problem; AWS apparently started to require the payload hash to be specified a second time in the 'canonical request' as
x-amz-content-sha256:
.[ Also applied cleanups to be more tolerant to various OpenSSL versions and it will now also pick the correct Homebrew openssl/GNU sed on macOS (they need to be installed.) ]