find-exposed-resources.sh

This commit is contained in:
mgeeky 2019-12-05 15:41:29 +01:00
parent 7e0bb74f06
commit a98678255a
2 changed files with 108 additions and 0 deletions

View File

@ -244,8 +244,11 @@ drwxr-xr-x 3 root root 4096 lis 4 16:18 home
- **`exfiltrateLambdaTasksDirectory.py`** - Script that creates an in-memory ZIP file from the entire directory `$LAMBDA_TASK_ROOT` (typically `/var/task`) and sends it out in a form of HTTP(S) POST request, within an `exfil` parameter. To be used for exfiltrating AWS Lambda's entire source code. - **`exfiltrateLambdaTasksDirectory.py`** - Script that creates an in-memory ZIP file from the entire directory `$LAMBDA_TASK_ROOT` (typically `/var/task`) and sends it out in a form of HTTP(S) POST request, within an `exfil` parameter. To be used for exfiltrating AWS Lambda's entire source code.
- **`find-exposed-resources.sh`** - Utterly simple script enumerating some of the resources that could be publicly shared which would count as a security misconfiguration.
- **`get-session-creds-in-config-format.sh`** - Calls `aws sts assume-role` using MFA token in order to then retrieve session credentials and reformat it into `~/.aws/credentials` file format. Having that it's easy to copy-and-paste that script's output into credentials file. Then tools such as _s3tk_ that are unable to process MFA tokens may just use preconfigured profile creds. - **`get-session-creds-in-config-format.sh`** - Calls `aws sts assume-role` using MFA token in order to then retrieve session credentials and reformat it into `~/.aws/credentials` file format. Having that it's easy to copy-and-paste that script's output into credentials file. Then tools such as _s3tk_ that are unable to process MFA tokens may just use preconfigured profile creds.
- **`identifyS3Bucket.rb`** - This script attempts to identify passed name whether it resolves to a valid AWS S3 Bucket via different means. This script may come handy when revealing S3 buckets hidden behind HTTP proxies. - **`identifyS3Bucket.rb`** - This script attempts to identify passed name whether it resolves to a valid AWS S3 Bucket via different means. This script may come handy when revealing S3 buckets hidden behind HTTP proxies.
- **`pentest-ec2-instance`** - A set of utilities for quick starting, ssh-ing and stopping of a single temporary EC2 instance intended to be used for Web out-of-band tests (SSRF, reverse-shells, dns/http/other daemons). - **`pentest-ec2-instance`** - A set of utilities for quick starting, ssh-ing and stopping of a single temporary EC2 instance intended to be used for Web out-of-band tests (SSRF, reverse-shells, dns/http/other daemons).

View File

@ -0,0 +1,105 @@
#!/bin/bash
#
# This script attempts quickly enumerate some of the exposed resources
# available given a set of AWS credentials.
# Based on excellent work of Scott Piper:
# https://duo.com/blog/beyond-s3-exposed-resources-on-aws
#
if [ $# -lt 1 ]; then
echo "Usage: ./find-exposed-resources.sh <profile-name> [region]"
echo ""
echo "If region is not specified, will enumerate all regions."
exit 1
fi
PROFILE=$1
REGION=
if [[ "$2" != "" ]]; then
REGION="$2"
fi
trap ctrl_c INT
function ctrl_c() {
echo "[!] User interrupted script execution."
exit 1
}
function _aws() {
if [[ "$REGION" != "" ]]; then
#echo "aws --region $REGION --profile $PROFILE $@ --no-paginate"
aws --region $REGION --profile $PROFILE $@ --no-paginate
else
#echo "aws --profile $PROFILE $@ --no-paginate"
aws --profile $PROFILE $@ --no-paginate
fi
}
function ebs_snapshots() {
out=$(_aws ec2 describe-snapshots --owner-id self --restorable-by-user-ids all)
if ! echo "$out" | grep -q '": \[\]'; then
echo "---[ Public EBS Snapshots"
echo "$out"
echo
fi
}
function rds_snapshots() {
out=$(_aws rds describe-db-snapshots --snapshot-type public)
if ! echo "$out" | grep -q '": \[\]'; then
echo "---[ Public RDS Snapshots"
echo "$out"
echo
fi
}
function ami_images() {
out=$(_aws ec2 describe-images --owners self --executable-users all)
if ! echo "$out" | grep -q '": \[\]'; then
echo "---[ Public RDS Snapshots"
echo "$out"
echo
fi
}
function s3_buckets() {
echo "---[ Public S3 Buckets"
for bucket in $(_aws s3api list-buckets --query 'Buckets[*].Name' --output text)
do
pub=$(_aws s3api get-bucket-policy-status --bucket $bucket --query 'PolicyStatus.IsPublic' 2> /dev/null || echo 'false')
echo -n "IsPublic:"
if [[ "$pub" == "true" ]]; then
echo -en "\e[91m"
else
echo -en "\e[34m"
fi
echo -e "$pub\e[39m - Bucket: \e[93m$bucket\e[39m"
done
echo
}
regions=$(aws ec2 describe-regions --query 'Regions[*].RegionName' --output text)
if [[ "$REGION" == "" ]]; then
for region in ${regions[@]}
do
REGION="$region"
echo "=================== Region: $region ======================"
echo
ebs_snapshots
rds_snapshots
ami_images
done
echo
else
echo "=================== Region: $REGION ======================"
echo
ebs_snapshots
rds_snapshots
ami_images
echo
fi
s3_buckets