diff --git a/README.md b/README.md index 992a170..544af34 100644 --- a/README.md +++ b/README.md @@ -39,6 +39,12 @@ output all ec2 public IPs from all profiles in ~/.aws/config ### aws-route53-saml2aws-all-account-backup.sh Exports all route53 zones across all AWS accounts +## aws-s3-dl-list-uris-multithread.py +Downloads a list of s3 objects (user provided) using multithreading (100 concurrent downloads at a time) + +### aws-s3-find-public-objects-in-s3-buckets.py +If you provide the file with a list of s3 buckets, it will enumerate every object in each bucket and output which objects are publicy accessible. + ### generate_lambdaguard_report_all_profiles.sh Generates a lambdaguard report for all profiles listed in ~/.aws/config diff --git a/working-scripts/aws-list-all-public-objects-in-s3-buckets.py b/working-scripts/aws-s3-find-public-objects-in-s3-buckets.py similarity index 90% rename from working-scripts/aws-list-all-public-objects-in-s3-buckets.py rename to working-scripts/aws-s3-find-public-objects-in-s3-buckets.py index 038c864..d27138d 100644 --- a/working-scripts/aws-list-all-public-objects-in-s3-buckets.py +++ b/working-scripts/aws-s3-find-public-objects-in-s3-buckets.py @@ -1,7 +1,11 @@ # place all bucket names, one per line in a file called bucket_names.txt # run the script (the script assumes you are using the default aws profile) # all public objects will be written to a file called public-objects.txt - +# +# This script assumes your list of buckets are in a file named "bucket_names.txt" +# and that you have a default AWS profile configured in ~/.aws/config +# usage: python3 aws-s3-find-public-objects-in-s3-buckets.py +# import boto3 import os import threading