def check_s3_buckets(names, threads, cverbose=True): """ Checks for open and restricted Amazon S3 buckets """ if cverbose: print("[+] Checking for S3 buckets") pname = ['AWS S3 Buckets ', 1] # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword craft a url with the correct format for name in names: candidates.append('{}.{}'.format(name, S3_URL)) # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, pname, cverbose, use_ssl=False, callback=print_s3_response, threads=threads) # Stop the time utils.stop_timer(start_time, cverbose)
def check_awsapps(names, threads, nameserver, cverbose=True): """ Checks for existence of AWS Apps (ie. WorkDocs, WorkMail, Connect, etc.) """ if cverbose: print("[+] Checking for AWS Apps") pname = ['AWS Apps ', 2] # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [] # Initialize the list of valid hostnames valid_names = [] # Take each mutated keyword craft a domain name to lookup. for name in names: candidates.append('{}.{}'.format(name, APPS_URL)) # AWS Apps use DNS sub-domains. First, see which are valid. valid_names = utils.fast_dns_lookup(candidates, nameserver, pname, cverbose, threads=threads) for name in valid_names: if cverbose: utils.printc(" App Found: https://{}\n".format(name), 'orange') # Stop the timer utils.stop_timer(start_time, cverbose)
def check_appspot(names, threads): """ Checks for Google App Engine sites running on appspot.com """ print("[+] Checking for Google App Engine apps") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword craft a url with the correct format for name in names: # App Engine project names cannot include a period. We'll exlcude # those from the global candidates list if '.' not in name: candidates.append(f'{name}.{APPSPOT_URL}') # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=False, callback=print_appspot_response, threads=threads) # Stop the time utils.stop_timer(start_time)
def check_azure_vms(names, nameserver, threads): """ Checks for Azure Virtual Machines """ print("[+] Checking for Azure Virtual Machines") # Start a counter to report on elapsed time start_time = utils.start_timer() # Pull the regions from a config file regions = azure_regions.REGIONS print("[*] Testing across {} regions defined in the config file".format( len(regions))) for region in regions: # Initialize the list of domain names to look up candidates = [name + '.' + region + '.' + VM_URL for name in names] # Azure VMs use DNS sub-domains. If it resolves, it is registered. utils.fast_dns_lookup(candidates, nameserver, callback=print_vm_response, threads=threads) # Stop the timer utils.stop_timer(start_time)
def check_fbrtdb(names, threads): """ Checks for Google Firebase RTDB """ print("[+] Checking for Google Firebase Realtime Databases") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword craft a url with the correct format for name in names: # Firebase RTDB names cannot include a period. We'll exlcude # those from the global candidates list if '.' not in name: candidates.append(f'{name}.{FBRTDB_URL}/.json') # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=True, callback=print_fbrtdb_response, threads=threads, redir=False) # Stop the time utils.stop_timer(start_time)
def brute_force_containers(storage_accounts, brute_list, threads): """ Attempts to find public Blob Containers in valid Storage Accounts Here is the URL format to list Azure Blog Container contents: <account>.blob.core.windows.net/<container>/?restype=container&comp=list """ # We have a list of valid DNS names that might not be worth scraping, # such as disabled accounts or authentication required. Let's quickly # weed those out. print( f"[*] Checking {len(storage_accounts)} accounts for status before brute-forcing" ) valid_accounts = [] for account in storage_accounts: try: reply = requests.get(f'https://{account}/') if 'Server failed to authenticate the request' in reply.reason: storage_accounts.remove(account) elif 'The specified account is disabled' in reply.reason: storage_accounts.remove(account) else: valid_accounts.append(account) except requests.exceptions.ConnectionError as error_msg: print(f" [!] Connection error on https://{account}:") print(error_msg) # Read the brute force file into memory clean_names = utils.get_brute(brute_list, mini=3) # Start a counter to report on elapsed time start_time = utils.start_timer() print( f"[*] Brute-forcing container names in {len(valid_accounts)} storage accounts" ) for account in valid_accounts: print( f"[*] Brute-forcing {len(clean_names)} container names in {account}" ) # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword and craft a url with correct format for name in clean_names: candidates.append(f'{account}/{name}/?restype=container&comp=list') # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=True, callback=print_container_response, threads=threads) # Stop the timer utils.stop_timer(start_time)
def check_storage_accounts(names, threads, nameserver, cverbose=True): """ Checks storage account names """ if cverbose: print("[+] Checking for Azure Storage Accounts") pname = ['Azure Valid Names ', 3] # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [] # Initialize the list of valid hostnames valid_names = [] # Take each mutated keyword craft a domain name to lookup. # As Azure Storage Accounts can contain only letters and numbers, # discard those not matching to save time on the DNS lookups. regex = re.compile('[^a-zA-Z0-9]') names = list( set([ name.replace('www.', '').replace('www-', '').replace('.com', '').replace( '.net', '').replace('.gov', '') for name in names ])) for name in names: if not re.search(regex, name): candidates.append('{}.{}'.format(name, BLOB_URL)) if candidates: # Azure Storage Accounts use DNS sub-domains. First, see which are valid. valid_names = utils.fast_dns_lookup(candidates, nameserver, pname, cverbose, threads=threads) if valid_names: pname[0] = 'Azure Storage Acts' # Send the valid names to the batch HTTP processor utils.get_url_batch(valid_names, pname, cverbose, use_ssl=False, callback=print_account_response, threads=threads) # Stop the timer utils.stop_timer(start_time, cverbose) # de-dupe the results and return return list(set(valid_names))
def check_azure_databases(names, nameserver): """ Checks for Azure Databases """ print("[+] Checking for Azure Databases") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [name + '.' + DATABASE_URL for name in names] # Azure databases use DNS sub-domains. If it resolves, it is registered. utils.fast_dns_lookup(candidates, nameserver, callback=print_database_response) # Stop the timer utils.stop_timer(start_time)
def check_azure_websites(names, nameserver, threads): """ Checks for Azure Websites (PaaS) """ print("[+] Checking for Azure Websites") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [name + '.' + WEBAPP_URL for name in names] # Azure Websites use DNS sub-domains. If it resolves, it is registered. utils.fast_dns_lookup(candidates, nameserver, callback=print_website_response, threads=threads) # Stop the timer utils.stop_timer(start_time)
def check_awsapps(names, threads, nameserver): """ Checks for existence of AWS Apps (ie. WorkDocs, WorkMail, Connect, etc.) """ data = { 'platform': 'aws', 'msg': 'AWS App Found:', 'target': '', 'access': '' } print("[+] Checking for AWS Apps") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [] # Initialize the list of valid hostnames valid_names = [] # Take each mutated keyword craft a domain name to lookup. for name in names: candidates.append(f'{name}.{APPS_URL}') # AWS Apps use DNS sub-domains. First, see which are valid. valid_names = utils.fast_dns_lookup(candidates, nameserver, threads=threads) for name in valid_names: data['target'] = f'https://{name}' data['access'] = 'protected' utils.fmt_output(data) # Stop the timer utils.stop_timer(start_time)
def check_storage_accounts(names, threads, nameserver): """ Checks storage account names """ print("[+] Checking for Azure Storage Accounts") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [] # Initialize the list of valid hostnames valid_names = [] # Take each mutated keyword craft a domain name to lookup. # As Azure Storage Accounts can contain only letters and numbers, # discard those not matching to save time on the DNS lookups. regex = re.compile('[^a-zA-Z0-9]') for name in names: if not re.search(regex, name): candidates.append(f'{name}.{BLOB_URL}') # Azure Storage Accounts use DNS sub-domains. First, see which are valid. valid_names = utils.fast_dns_lookup(candidates, nameserver, threads=threads) # Send the valid names to the batch HTTP processor utils.get_url_batch(valid_names, use_ssl=False, callback=print_account_response, threads=threads) # Stop the timer utils.stop_timer(start_time) # de-dupe the results and return return list(set(valid_names))
def check_appspot(names, threads, cverbose=True): """ Checks for Google App Engine sites running on appspot.com """ if cverbose: print("[+] Checking for Google App Engine apps") pname = ['Google Engine Apps', 9] # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of correctly formatted urls candidates = [] names = list( set([ name.replace('www.', '').replace('www-', '').replace('.com', '').replace( '.net', '').replace('.gov', '') for name in names ])) # Take each mutated keyword craft a url with the correct format for name in names: # App Engine project names cannot include a period. We'll exlcude # those from the global candidates list if '.' not in name: candidates.append('{}.{}'.format(name, APPSPOT_URL)) if candidates: # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, pname, cverbose, use_ssl=False, callback=print_appspot_response, threads=threads) # Stop the time utils.stop_timer(start_time, cverbose)
def check_awsapps(names, threads, nameserver): """ Checks for existence of AWS Apps (ie. WorkDocs, WorkMail, Connect, etc.) """ print("[+] Checking for AWS Apps") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [] # Initialize the list of valid hostnames valid_names = [] # Take each mutated keyword craft a domain name to lookup. for name in names: candidates.append('{}.{}'.format(name, APPS_URL)) # AWS Apps use DNS sub-domains. First, see which are valid. valid_names = utils.fast_dns_lookup(candidates, nameserver, threads=threads) # Send the valid names to the batch HTTP processor utils.get_url_batch(valid_names, use_ssl=False, callback=print_awsapps_response, threads=threads) # Stop the timer utils.stop_timer(start_time) # de-dupe the results and return return list(set(valid_names))
def check_gcp_buckets(names, threads): """ Checks for open and restricted Google Cloud buckets """ print("[+] Checking for Google buckets") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword craft a url with the correct format for name in names: candidates.append(f'{GCP_URL}/{name}') # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=False, callback=print_bucket_response, threads=threads) # Stop the time utils.stop_timer(start_time)
def check_functions(names, brute_list, quickscan, threads): """ Checks for Google Cloud Functions running on cloudfunctions.net This is a two-part process. First, we want to find region/project combos that have existing Cloud Functions. The URL for a function looks like this: https://[ZONE]-[PROJECT-ID].cloudfunctions.net/[FUNCTION-NAME] We look for a 302 in [ZONE]-[PROJECT-ID].cloudfunctions.net. That means there are some functions defined in that region. Then, we brute force a list of possible function names there. See gcp_regions.py to define which regions to check. The tool currently defaults to only 1 region, so you should really modify it for best results. """ print("[+] Checking for project/zones with Google Cloud Functions.") # Start a counter to report on elapsed time start_time = utils.start_timer() # Pull the regions from a config file regions = gcp_regions.REGIONS print( f"[*] Testing across {len(regions)} regions defined in the config file" ) for region in regions: # Initialize the list of initial URLs to check candidates = [region + '-' + name + '.' + FUNC_URL for name in names] # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=False, callback=print_functions_response1, threads=threads, redir=False) # Retun from function if we have not found any valid combos if not HAS_FUNCS: utils.stop_timer(start_time) return # Also bail out if doing a quick scan if quickscan: return # If we did find something, we'll use the brute list. This will allow people # to provide a separate fuzzing list if they choose. print( f"[*] Brute-forcing function names in {len(HAS_FUNCS)} project/region combos" ) # Load brute list in memory, based on allowed chars/etc brute_strings = utils.get_brute(brute_list) # The global was built in a previous function. We only want to brute force # project/region combos that we know have existing functions defined for func in HAS_FUNCS: print( f"[*] Brute-forcing {len(brute_strings)} function names in {func}") # Initialize the list of initial URLs to check. Strip out the HTTP # protocol first, as that is handled in the utility func = func.replace("http://", "") # Noticed weird behaviour with functions when a slash is not appended. # Works for some, but not others. However, appending a slash seems to # get consistent results. Might need further validation. candidates = [func + brute + '/' for brute in brute_strings] # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=False, callback=print_functions_response2, threads=threads) # Stop the time utils.stop_timer(start_time)
def brute_force_containers(storage_accounts, brute_list, threads): """ Attempts to find public Blob Containers in valid Storage Accounts Here is the URL format to list Azure Blog Container contents: <account>.blob.core.windows.net/<container>/?restype=container&comp=list """ # We have a list of valid DNS names that might not be worth scraping, # such as disabled accounts or authentication required. Let's quickly # weed those out. print("[*] Checking {} accounts for status before brute-forcing".format( len(storage_accounts))) valid_accounts = [] for account in storage_accounts: reply = requests.get('https://{}/'.format(account)) if 'Server failed to authenticate the request' in reply.reason: storage_accounts.remove(account) elif 'The specified account is disabled' in reply.reason: storage_accounts.remove(account) else: valid_accounts.append(account) # Read the brute force file into memory with open(brute_list, encoding="utf8", errors="ignore") as infile: names = infile.read().splitlines() # Clean up the names to usable for containers banned_chars = re.compile('[^a-z0-9-]') clean_names = [] for name in names: name = name.lower() name = banned_chars.sub('', name) if 63 >= len(name) >= 3: if name not in clean_names: clean_names.append(name) # Start a counter to report on elapsed time start_time = utils.start_timer() print("[*] Brute-forcing container names in {} storage accounts".format( len(valid_accounts))) for account in valid_accounts: print("[*] Brute-forcing {} container names in {}".format( len(clean_names), account)) # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword and craft a url with correct format for name in clean_names: candidates.append('{}/{}/?restype=container&comp=list'.format( account, name)) # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=True, callback=print_container_response, threads=threads) # Stop the timer utils.stop_timer(start_time)
def brute_force_containers(storage_accounts, brute_list, threads, cverbose=True): """ Attempts to find public Blob Containers in valid Storage Accounts Here is the URL format to list Azure Blog Container contents: <account>.blob.core.windows.net/<container>/?restype=container&comp=list """ # We have a list of valid DNS names that might not be worth scraping, # such as disabled accounts or authentication required. Let's quickly # weed those out. if cverbose: print( "[*] Checking {} accounts for status before brute-forcing".format( len(storage_accounts))) pname = ['Azure Containers', 4] valid_accounts = [] for account in storage_accounts: reply = requests.get('https://{}/'.format(account)) if 'Server failed to authenticate the request' in reply.reason: storage_accounts.remove(account) elif 'The specified account is disabled' in reply.reason: storage_accounts.remove(account) else: valid_accounts.append(account) # Read the brute force file into memory clean_names = utils.get_brute(brute_list, mini=3) # Start a counter to report on elapsed time start_time = utils.start_timer() if cverbose: print( "[*] Brute-forcing container names in {} storage accounts".format( len(valid_accounts))) for c, account in enumerate(valid_accounts): pname[1] = '4.{}'.format(c) if cverbose: print("[*] Brute-forcing {} container names in {}".format( len(clean_names), account)) # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword and craft a url with correct format for name in clean_names: candidates.append('{}/{}/?restype=container&comp=list'.format( account, name)) # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, pname, cverbose, use_ssl=True, callback=print_container_response, threads=threads) # Stop the timer utils.stop_timer(start_time, cverbose)