def check_azure_vms(names, nameserver): """ Checks for Azure Virtual Machines """ print("[+] Checking for Azure Virtual Machines") # Start a counter to report on elapsed time start_time = utils.start_timer() # Pull the regions from a config file regions = azure_regions.REGIONS print("[*] Testing across {} regions defined in the config file".format( len(regions))) for region in regions: # Initialize the list of domain names to look up candidates = [name + '.' + region + '.' + VM_URL for name in names] # Azure VMs use DNS sub-domains. If it resolves, it is registered. utils.fast_dns_lookup(candidates, nameserver, callback=print_vm_response) # Stop the timer utils.stop_timer(start_time)
def brute_force_containers(storage_accounts, brute_list, threads): """ Attempts to find public Blob Containers in valid Storage Accounts Here is the URL format to list Azure Blog Container contents: <account>.blob.core.windows.net/<container>/?restype=container&comp=list """ # Read the brute force file into memory with open(brute_list) as infile: names = infile.read().splitlines() print("[+] Brute-forcing {} container names in each valid account".format( len(names))) # Start a counter to report on elapsed time start_time = utils.start_timer() for account in storage_accounts: # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword and craft a url with correct format for name in names: candidates.append('{}/{}/?restype=container&comp=list'.format( account, name)) # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=True, callback=print_container_response, threads=threads) # Stop the timer utils.stop_timer(start_time)
def check_azure_databases(names, nameserver): """ Checks for Azure Databases """ print("[+] Checking for Azure Databases") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [name + '.' + DATABASE_URL for name in names] # Azure databases use DNS sub-domains. If it resolves, it is registered. utils.fast_dns_lookup(candidates, nameserver, callback=print_database_response) # Stop the timer utils.stop_timer(start_time)
def check_s3_buckets(names, threads): """ Checks for open and restricted Amazon S3 buckets """ print("[+] Checking for S3 buckets") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of correctly formatted urls candidates = [] # Take each mutated keyword craft a url with the correct format for name in names: candidates.append('{}.{}'.format(name, S3_URL)) # Send the valid names to the batch HTTP processor utils.get_url_batch(candidates, use_ssl=False, callback=print_s3_response, threads=threads) # Stop the time utils.stop_timer(start_time)
def check_storage_accounts(names, threads, nameserver): """ Checks storage account names """ print("[+] Checking for Azure Storage Accounts") # Start a counter to report on elapsed time start_time = utils.start_timer() # Initialize the list of domain names to look up candidates = [] # Initialize the list of valid hostnames valid_names = [] # Take each mutated keyword craft a domain name to lookup. # As Azure Storage Accounts can contain only letters and numbers, # discard those not matching to save time on the DNS lookups. regex = re.compile('[^a-zA-Z0-9]') for name in names: if not re.search(regex, name): candidates.append('{}.{}'.format(name, BLOB_URL)) # Azure Storage Accounts use DNS sub-domains. First, see which are valid. valid_names = utils.fast_dns_lookup(candidates, nameserver) # Send the valid names to the batch HTTP processor utils.get_url_batch(valid_names, use_ssl=False, callback=print_account_response, threads=threads) # Stop the timer utils.stop_timer(start_time) return valid_names