def _process_source(self) -> Block: try: # Get the source data asn_list = self._get_source() except: return Block() new_ips = get_ips_radb(asn_list, self.args.threads, self.excludes) return Block(ips=new_ips)
def _process_source(self) -> Block: try: # Get the source data aws_ips = self._get_source() except: return Block() ips_raw = (n['ip_prefix'] for n in aws_ips['prefixes']) ips = {fix_ip(ip) for ip in ips_raw if ip != ''} return Block(ips=ips)
def _process_source(self): try: # Get the source data asn_list = self._get_source() except: return Block() new_ips = get_ips_bgpview( asn_list, self.args.threads, self.args.exclude, self.headers, self.timeout) return Block(ips=new_ips)
def _process_source(self): try: # Get the source data hostnames = self._get_source() except: return Block() print("[*]\tParsing external source: %s..." % self._file) new_hosts = {h for h in hostnames if h != ''} return Block(hosts=new_hosts)
def _process_source(self) -> Block: try: # Get the source data tor_ips = self._get_source() except: return Block() exit_lines = (l.strip() for l in tor_ips if 'ExitAddress' in l) exit_addresses = (l.split(' ')[1] for l in exit_lines) return Block(ips={fix_ip(line) for line in exit_addresses})
def _process_source(self): try: # Get the source data ips = self._get_source() except: return Block() print("[*]\tParsing external source: %s..." % self._file) new_ips = {fix_ip(ip) for ip in ips if ip != ''} return Block(ips=new_ips)
def _process_source(self): try: # Get the source data agents = self._get_source() except: return Block() print("[*]\tParsing external source: %s..." % self._file) new_agents = {a for a in agents if a != ''} return Block(agents=new_agents)
def _process_source(self) -> Block: try: # Get the source data o365_networks = self._get_source() except: return Block() # Since this returns a JSON object with both URLs and IPs, # lets handle accordingly # This is going to be messy since we are going to attempt to # note each service o365_ips = [] o365_urls = [] # Loop over the JSON objects # This is gross... for network in o365_networks: # Make sure we have IPs/URLs to handle if any(x in network.keys() for x in ['ips', 'urls']): # self.workingfile.write("\t# %s\n" % network['serviceAreaDisplayName']) # If we have URLs, lets document them if 'urls' in network.keys(): for url in network['urls']: # Fix wildcard URL's to work with regex if url.startswith('*'): url = '.' + url url = '^%s$' % url # Add regex style to host if url not in self.host_list and url != '': # self.workingfile.write(REWRITE['COND_HOST'].format(HOSTNAME=url)) self.host_list.append(url) # If we have IPs, lets document them if 'ips' in network.keys(): for ip in network['ips']: if ':' not in ip: # Ignore v6 # Convert /31 and /32 CIDRs to single IP ip = re.sub('/3[12]', '', ip) # Convert lower-bound CIDRs into /24 by default # This is assmuming that if a portion of the net # was seen, we want to avoid the full netblock ip = re.sub('\.[0-9]{1,3}/(2[456789]|30)', '.0/24', ip) # Check if the current IP/CIDR has been seen if ip not in self.ip_list and ip != '': # self.workingfile.write(REWRITE['COND_IP'].format(IP=ip)) # Keep track of all things added self.ip_list.append(ip) return Block()
def _process_source(self) -> Block: try: # Get the source data hostnames = self._get_source() except: return Block() # Add IPs obtained via Malware Kit's and other sources print( "[*]\tAdding static Hostnames obtained via Malware Kit's and other sources..." ) return Block(hosts={h for h in hostnames if h != ''})
def _process_source(self): try: # Get the source data asn_list = self._get_source() except: return Block() asn_list = [x.upper() for x in asn_list] ips = get_ips_radb( asn_list, self.threads, self.excludes) | get_ips_bgpview( asn_list, self.threads, self.excludes, self.headers, self.timeout) return Block(ips=ips)
def _process_source(self) -> Block: try: # Get the source data ips = self._get_source() except: return Block() # Add IPs obtained via Malware Kit's and other sources print( "[*]\tAdding static IPs obtained via Malware Kit's and other sources..." ) new_ips = {fix_ip(ip) for ip in ips if ip != ''} return Block(ips=new_ips)
def _process_source(self): try: # Get the source data azure_subnets = self._get_source() except: return Block() def get_ip(html): return re.search('"(.+?)"', html.strip()).group(1) subnets = (s for s in azure_subnets if 'IpRange Subnet' in s) new_ips_raw = (get_ip(h) for h in subnets) new_ips = {fix_ip(ip) for ip in new_ips_raw if ip != ''} return Block(ips=new_ips)
def _process_source(self): try: # Get the source data oracle_networks = self._get_source() except: return Block() new_ips: Set[str] = set() for region in oracle_networks['regions']: for cidr in region['cidrs']: ip = fix_ip(cidr['cidr']) if ip != '': new_ips.add(ip) return Block(ips=new_ips)
def _process_source(self) -> Block: try: # Get the source data google_netblocks = self._get_source() except: return Block def get_netblock_ip(block): if 'ip4' not in block: return '' ip = block.split(':')[-1] ip = fix_ip(ip) if ip == '': return '' return ip def flatten(xs): return {item for sublist in xs for item in sublist} def pull_netblock(netblock): # Query each GoogleCloud netblock netblock_ips = self.resolver.query(netblock, 'txt') netblock_ips = netblock_ips.response.answer[0][0].strings[0].decode( 'utf-8') ips_gen = (get_netblock_ip(block) for block in netblock_ips.split(' ')) return [l for l in ips_gen if l != ''] # Get netblocks netblocks = (n.split(':')[-1] for n in google_netblocks.split(' ') if 'include' in n) # Pull and parse IPs from netblock new_ips = flatten([pull_netblock(nb) for nb in netblocks]) return Block(ips=new_ips)
def _process_source(self) -> Block: try: # Get the source data htaccess_file = self._get_source() except: return Block() exclude = self.args.exclude ips_list: Set[str] = set() htaccess = load(htaccess_file, Loader=Loader) for name, obj in htaccess['ips'].items(): names = name.split('_') no_add = any(n in exclude for n in names) if not no_add: ips_list |= set(obj) agent_list = set(htaccess['useragents']) return Block(ips=ips_list, agents=agent_list)
def __call__(self, args, block: Block) -> None: ips, hosts, agents = block.to_list() #> ---------------------------------------------------------------------------- # Initialize redirect.rules file # Add header comments to the redirect.rules file headers self.add_comment("\n") self.add_comment("\n\n") # Add updated/modified comments from @curi0usJack's .htaccess self.add_comment(" Note: This currently requires Apache 2.4+\n") self.add_comment("\n") self.add_comment(" Example Usage:\n") self.add_comment(" Save file as /etc/apache2/redirect.rules\n") self.add_comment( " Within your site's Apache conf file (in /etc/apache2/sites-avaiable/),\n" ) self.add_comment(" put the following statement near the bottom:\n") self.add_comment(" \tInclude /etc/apache2/redirect.rules\n") self.add_comment("\n\n") # Add a note to the user to keep the protocol used when setting the redirect target self.add_comment("\n") self.add_comment( " If modifying the 'REDIR_TARGET' value, please ensure to include the protocol\n" ) self.add_comment( " e.g. https://google.com OR http://my.domain/test.txt\n") self.add_comment("\n") self.write_redirect_header(args.destination) self.write_ip_rules(ips, len(agents) == 0) self.write_agent_rules(agents, len(hosts) == 0) self.write_host_rules(hosts, True) self.write('RewriteRule\t\t\t^.*$\t\t\t${REDIR_TARGET} [L,R=302]\n') #> ----------------------------------------------------------------------------- # Rule clean up and file finalization # Now that we have written the sink-hole rules, let's add some example rules for # the user to reference/use for file/path handling and a catch-all redirection # Handle redirection of a file/path to its final file/path destination self.add_comment( " Redirect a file/path to a target backend file/path\n") self.add_comment( " -> Example: Redirect displayed path to raw path to grab 'example.zip'\n" ) self.add_comment( " -> Note: This should come after all IP/Host/User-Agent blacklisting\n\n" ) self.add_comment( " RewriteRule\t\t\t\t^/test/files/example.zip(.*)$\t\t/example.zip\t[L,R=302]\n\n" ) # Handle redirection for a file/path to another server self.add_comment( " Redirect and proxy a file/path to another system's file/path\n") self.add_comment( " -> Example: Redirect and proxy displayed path to another system via the same path\n" ) self.add_comment( " -> Note: You can also specify the URI explicitly as needed\n") self.add_comment( " -> Note: This should come after all IP/Host/User-Agent blacklisting\n\n" ) self.add_comment( " RewriteRule\t\t\t\t^/test/files/example.zip(.*)$\t\thttps://192.168.10.10%{REQUEST_URI}\t[P]\n\n" ) # Create a final, catch-all redirection self.add_comment(" Catch-all redirect\n") self.add_comment( " -> Example: Catch anything other than '/example.zip' and redirect\n" ) self.add_comment( " -> Note: This should be the last item in the redirect.rules file as a final catch-all\n\n" ) self.add_comment( " RewriteRule\t\t\t\t^((?!\\/example\\.zip).)*$\t\t${REDIR_TARGET}\t[L,R=302]\n" )
print_header() # Start timer start = time.perf_counter() # Print exclusion count # Only show count in case a large list was passed in excludes = get_exclusions(args.exclude_file) if len(excludes) > 0: print('[+]\tExclusion List: %d' % len(excludes)) print('[*]\tFull exclusion list can be found at the end of the') print(' \tredirect.rules file.\n') http_headers = {'User-Agent': args.user_agent} blocklist = Block() #> ----------------------------------------------------------------------------- # Write @curi0usJack's .htaccess rules: https://gist.github.com/curi0usJack/971385e8334e189d93a6cb4671238b10 if 'htaccess' not in excludes: # Exclude keyword source = Source('htaccess', [http_headers, args.timeout, args]) blocklist |= source.process_data() #> ----------------------------------------------------------------------------- # Add static User-Agent list # __static__ # Exclude keywords if all(x not in excludes for x in ['agents', 'user-agents', 'static']): source = Source('user-agents', []) blocklist |= source.process_data()