def test_remove_all_matching_multiple(tmpdir): """ Test removal of multiple entries with a common alias """ hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write("1.2.3.4\tfoo-1 foo\n" "2.3.4.5\tfoo-2 foo\n") hosts = Hosts(path=hosts_file.strpath) hosts.remove_all_matching(name="foo") hosts.write() assert not hosts_file.read()
def set_hosts(domains): try: hosts = Hosts() hosts.add([ HostsEntry(entry_type='ipv4', address='127.0.0.1', names=domains) ]) hosts.write() except UnableToWriteHosts: raise ClickException( 'Unable to write to hosts file. Please call command with "sudo".')
def test_remove_all_matching_failure(tmpdir): """ Test removal of multiple entries with a common alias """ with pytest.raises(ValueError): hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write("1.2.3.4\tfoo-1 foo\n" "2.3.4.5\tfoo-2 foo\n") hosts = Hosts(path=hosts_file.strpath) hosts.remove_all_matching() hosts.write()
def test_gke(): project_id = "wn-cloud-275704" zone = "us-central1-c" cluster_id = "wn-cloud-portal-qa" # Use a service account configured in GCP console, # authenticating with a JSON key credentials = service_account.Credentials \ .from_service_account_file('wn-cloud-275704-24c84de6f442.json') print('Authentication done------------------------------>') # Get cluster details cluster_manager_client = ClusterManagerClient(credentials=credentials) cluster = cluster_manager_client.get_cluster(project_id=project_id, zone=zone, cluster_id=cluster_id) print('cluster info received------------------------------>') # Save cluster certificate for SSL verification cert = base64.b64decode(cluster.master_auth.cluster_ca_certificate) cert_filename = 'cluster_ca_cert' cert_file = open(cert_filename, 'w') cert_file.write(cert) cert_file.close() # Configure hostname for SSL verification hosts = Hosts() hosts.add([ HostsEntry(entry_type='ipv4', address=cluster.endpoint, names=['kubernetes']) ]) hosts.write() # Get a token with the scopes required by GKE kubeconfig_creds = credentials.with_scopes([ 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/userinfo.email' ]) auth_req = google.auth.transport.requests.Request() kubeconfig_creds.refresh(auth_req) configuration = client.Configuration() configuration.host = "https://kubernetes" configuration.ssl_ca_cert = cert_filename kubeconfig_creds.apply(configuration.api_key) client.Configuration.set_default(configuration) v1 = client.CoreV1Api() print("Listing pods with their IPs:") pods = v1.list_pod_for_all_namespaces(watch=False) for i in pods.items: print("%s\t%s\t%s" % (i.status.pod_ip, i.metadata.namespace, i.metadata.name))
def test_hosts_write_to_custom_path(tmpdir): """ Test that the hosts file can be written to a different path to the one it was read from """ hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write("6.6.6.6\texample.com\n") hosts = Hosts(path=hosts_file.strpath) alternate_hosts_file = tmpdir.mkdir("tmp").join("hosts2") hosts.write(path=alternate_hosts_file.strpath) alternate_hosts = Hosts(path=alternate_hosts_file.strpath) assert alternate_hosts.count() == 1 assert hosts.exists(address='6.6.6.6', names=['example.com'])
def write_hosts(ip_address, domain_name, alias_name): if platform.system() == 'Windows': hosts_path="C:\\Windows\\System32\\drivers\\etc\\hosts" else: hosts_path="/etc/hosts" hosts = Hosts(path=hosts_path) hosts.remove_all_matching(name=domain_name) new_entry = HostsEntry(entry_type='ipv4', address=ip_address, names=[domain_name, alias_name]) hosts.add([new_entry]) hosts.write()
def add_host(ipaddr, domain): hosts = Hosts(path='/etc/hosts') if domain == os.environ.get('ENDPOINT_URL'): pass else: print(f"===={domain}") if args.verbose > 0 else False domain = remove_port(domain) new_entry = HostsEntry(entry_type='ipv4', address=ipaddr, names=[domain]) # print(new_entry) hosts.add([new_entry], force=True, allow_address_duplication=True) hosts.write() print(f"Modify hosts file -> {ipaddr} {domain}") if args.verbose > 0 else False
def test_exception_raised_when_unable_to_write_hosts(tmpdir): """ Test that the correct exception is raised when a hosts file is not writeable. """ if get_username() != 'root': hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write("127.0.0.1\tlocalhost\n") hosts = Hosts(path=hosts_file.strpath) os.chmod(hosts_file.strpath, 0o444) new_entry = HostsEntry(entry_type='ipv4', address='123.123.123.123', names=['test.example.com']) hosts.add(entries=[new_entry]) with pytest.raises(exception.UnableToWriteHosts): hosts.write()
def test_remove_existing_entry_using_name_only(tmpdir): """ Test removal of an existing entry using name only """ entries = '1.2.3.4 example.com example\n# this is a comment\n\n3.4.5.6 random.com' # two newlines intentionally follow, see issue #11 hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write(entries) hosts_entries = Hosts(path=hosts_file.strpath) assert hosts_entries.exists(address='1.2.3.4') assert hosts_entries.exists(names=['example.com']) hosts_entries.remove_all_matching(name='example.com') assert not hosts_entries.exists(names=['example.com']) hosts_entries.write() assert '# this is a comment\n' in open(hosts_file.strpath).read() assert '3.4.5.6\trandom.com' in open(hosts_file.strpath).read()
def test_write_will_create_path_if_missing(): """ Test that the hosts file declared when constructing a Hosts instance will be created if it doesn't exist """ now = datetime.datetime.now() timestamp = now.strftime('%Y%m%d%H%M%S') hosts_path = '/tmp/testwrite.{0}'.format(timestamp) hosts = Hosts(path=hosts_path) entry = HostsEntry.str_to_hostentry('1.2.3.4 example.com example.org') hosts.add(entries=[entry]) hosts.write() hosts2 = Hosts(path=hosts_path) os.remove(hosts_path) assert hosts2.exists(address='1.2.3.4')
def test_add_comments(tmpdir): """ Test adding a comment """ entries = '127.0.0.1 example.com example2.com\n#existing comment' hosts_path = tmpdir.mkdir("etc").join("hosts") hosts_path.write(entries) hosts_entries = Hosts(path=hosts_path.strpath) assert hosts_entries.count() == 2 # 1 address and 1 comment new_entry_1 = HostsEntry(entry_type='comment', comment='# an example comment') new_entry_2 = HostsEntry(entry_type='comment', comment='another example comment') hosts_entries.add(entries=[new_entry_1, new_entry_2], force=True) assert hosts_entries.count() == 4 # 1 address and 3 comments assert not hosts_entries.exists(address='3.4.5.6') assert hosts_entries.exists(comment='# an example comment') assert hosts_entries.exists(comment='# another example comment') assert hosts_entries.exists(names=['example.com']) # check the entries can be written and then read correctly hosts_entries.write() hosts_entries_2 = Hosts(path=hosts_path.strpath) assert hosts_entries_2.count() == 4 # 1 address and 3 comments
def test_existing_comments_and_blanks_are_preserved(tmpdir): """ Test that comments and newlines/blanks that exist in the file prior to changes are preserved after a new entry is added """ hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write("6.6.6.6\texample.com\n# A test comment\n\n") hosts = Hosts(path=hosts_file.strpath) new_entry = HostsEntry(entry_type='ipv4', address='82.132.132.132', names=['something.com', 'example']) hosts.add(entries=[new_entry], force=False) write_result = hosts.write() assert write_result.get('comments_written') == 1 assert write_result.get('blanks_written') == 1
def test_existing_ipv6_addresses_are_preserved(tmpdir): """ Test that existing ipv6 addresses are preserved after adding an ipv4 entry """ hosts_file = tmpdir.mkdir("etc").join("hosts") hosts_file.write("fe80::1\tlocalhost\n6.6.6.6\texample.com\n# A test comment\n\n") hosts = Hosts(path=hosts_file.strpath) new_entry = HostsEntry(entry_type='ipv4', address='82.132.132.132', names=['something.com', 'example']) hosts.add(entries=[new_entry], force=False) write_result = hosts.write() assert write_result.get('ipv6_entries_written') == 1 assert write_result.get('ipv4_entries_written') == 2 assert write_result.get('comments_written') == 1 assert write_result.get('blanks_written') == 1
def getLinks(self): """Grab all links from web server homepage i.e. http://IP:PORT/ and look for .htb domain names. If a .htb domain is found, add the hostname to the /etc/hosts file and then proceed to fuzz the hostname for virtual hostname routing using wfuzz. If a valid sub-hostname is found, add the domain to the /etc/hosts file as well using python_hosts library merge_names parameter.(Thanks for adding this feature! @jonhadfield)""" def cmdline(command): process = Popen(args=command, stdout=PIPE, shell=True) return process.communicate()[0] np = nmapParser.NmapParserFunk(self.target) np.openPorts() http_ports = np.http_ports cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" cmd_info_orange = "[" + fg.li_yellow + "+" + fg.rs + "]" c = config_parser.CommandParser( f"{os.path.expanduser('~')}/.config/autorecon/config.yaml", self.target) if len(http_ports) != 0: if not os.path.exists(c.getPath("web", "webDir")): os.makedirs(c.getPath("web", "webDir")) for hp in http_ports: try: url = f"""http://{self.target}:{hp}""" wfuzzReport = c.getPath("web", "wfuzzReport", port=hp) page = requests.get(url, verify=False, timeout=(5, 30)) data = page.text soup = BeautifulSoup(data, "html.parser") # links = [] htb = [".htb"] source_domain_name = [] for link in soup.find_all(text=lambda x: ".htb" in x): matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3}", link) for x in matches: if any(s in x for s in htb): source_domain_name.append(x) for link in soup.find_all('img'): src_matches = link.get('src') matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3}", src_matches) for x in matches: if any(s in x for s in htb): source_domain_name.append(x) except requests.exceptions.ConnectionError as ce_error: print("Connection Error: ", ce_error) continue except requests.exceptions.Timeout as t_error: print("Connection Timeout Error: ", t_error) continue except requests.exceptions.RequestException as req_err: print("Some Ambiguous Exception:", req_err) continue if source_domain_name and self.hostnames: all_hostnames = list( set(source_domain_name).union(set(self.hostnames))) if source_domain_name and not self.hostnames: all_hostnames = source_domain_name if self.hostnames and not source_domain_name: all_hostnames = self.hostnames if all_hostnames: vhostnames = [ i.lower() for i in sorted(set(all_hostnames)) ] vhost_log = open(c.getPath("web", "vhostnames"), "a+") for vh in vhostnames: vhost_log.write(vh) vhost_log.close() print( f"""{cmd_info_orange} {fg.li_magenta}Found{fg.rs} {fg.cyan}{vhostnames}{fg.rs} in {fg.li_red}The Source!{fg.rs} http://{self.target}:{hp}""" ) print( f"""{cmd_info} {fg.li_magenta}Adding{fg.rs} {fg.li_cyan} {vhostnames}{fg.rs} to /etc/hosts file""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=vhostnames) hosts.add([new_entry], merge_names=True) hosts.write() base_domain_name = [] for d in vhostnames: self.htb_source_domains.append(d) if d.count('.') == 1: base_domain_name.append(d) try: import wfuzz from tqdm import tqdm tk5 = c.getPath("wordlists", "top5Ksubs") print( f"""{cmd_info} wfuzz -z file,{tk5} -u {base_domain_name[0]}:{hp} -H 'Host: FUZZ.{base_domain_name[0]}:{hp}'""" ) print( f"{fg.li_yellow}Wfuzz's STDOUT is Hidden to prevent filling up Terminal. Desired Response Codes are unpredictable during initial fuzz session. {fg.rs} STDOUT will be written to {fg.li_magenta}{wfuzzReport}{fg.rs}" ) str_domain = f"""{base_domain_name[0]}:{hp}""" fuzz_domain = f"""FUZZ.{base_domain_name[0]}:{hp}""" wordlist_lines = 4997 with tqdm(total=wordlist_lines) as pbar: for r in wfuzz.fuzz( url=str_domain, hc=[404, 400], payloads=[("file", dict(fn=tk5))], headers=[("Host", fuzz_domain)], printer=(wfuzzReport, "raw"), ): # print(r) pbar.update() pbar.set_description_str( desc=f"{fg.li_yellow}wfuzz{fg.rs}") # pass except Exception as e: print(e) if os.path.exists(wfuzzReport): awk_print = "awk '{print $6}'" check_occurances = f"""sed -n -e 's/^.*C=//p' {wfuzzReport} | grep -v "Warning:" | {awk_print} | sort | uniq -c""" response_num = [ i.strip() for i in cmdline( check_occurances).decode("utf-8").split("\n") ] res_filt = [ i.split() for i in sorted(set(response_num)) ] filt2arr = [ c for c in res_filt if len(c) != 0 and int(c[0]) < 5 ] status_code = [] if len(filt2arr) != 0 and (len(filt2arr) < 5): # print(filt2arr) for htprc in filt2arr: status_code.append(htprc[1]) if len(status_code) != 0 and len(status_code) <= 5: for _ in status_code: # print(status_code) awk_print = "awk '{print $9}'" get_domain_cmd = f"""grep '{_} Ch' {wfuzzReport} | {awk_print}""" get_domains = (check_output( get_domain_cmd, shell=True, stderr=STDOUT).rstrip().decode( "utf-8").replace('"', "")) subdomains = [] if get_domains is not None: subdomains.append(get_domains) sub_d = "{}.{}".format( subdomains[0], base_domain_name[0]) print( f"""{cmd_info_orange}{fg.li_blue} Found Subdomain!{fg.rs} {fg.li_green}{sub_d}{fg.rs}""" ) print( f"""{cmd_info}{fg.li_magenta} Adding{fg.rs} {fg.li_cyan}{sub_d}{fg.rs} to /etc/hosts file""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry( entry_type="ipv4", address=self.target, names=[sub_d], ) hosts.add([new_entry], merge_names=True) hosts.write() self.htb_source_domains.append(sub_d)
dest='unset', action='store_true', help= 'Unset development host (without this parameter the dev host will be set).' ) parser.add_argument('-p', '--path_to_hosts_file', dest='path_to_hosts_file', help='Path to hosts file, defaults to autodetect.') # Get cli args args = parser.parse_args() args_dict = vars(args) ############################################################################### # SET HOSTS ############################################################################### hosts = Hosts(path=args_dict['path_to_hosts_file']) if args_dict['unset']: hosts.remove_all_matching(name='{{ FS_DOMAIN }}') print('Removed development host.') else: new_entry = HostsEntry(entry_type='ipv4', address='0.0.0.0', names=['www.{{ FS_DOMAIN }}', '{{ FS_DOMAIN }}']) hosts.add([new_entry]) print('Added development host.') hosts.write()
def Scan(self): np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssl_ports = np.ssl_ports cmd_info = "[" + fg.green + "+" + fg.rs + "]" ignore = [ ".nse", ".php", ".html", ".png", ".js", ".org", ".versio", ".com", ".gif", ".asp", ".aspx", ".jpg", ".jpeg", ".txt", ".cgi", ] dns = [] try: with open( f"{self.target}-Report/nmap/top-ports-{self.target}.nmap", "r") as nm: for line in nm: new = (line.replace("=", " ").replace("/", " ").replace( "commonName=", "").replace("/organizationName=", " ")) # print(new) matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}", new) # print(matches) for x in matches: if not any(s in x for s in ignore): dns.append(x) if "|_http-title: Did not follow redirect to http:" in line: # print(line) split_line = line.split() last_word = split_line[-1] redirect_domain = (last_word.replace( "http://", "").replace("/", "").replace("'", "")) print( f"{self.target} is redirecting to: {redirectDomain}, adding {redirectDomain} to /etc/hosts file" ) dns.append(redirect_domain) self.redirect_hostname.append(redirect_domain) # print(dns) sdns = sorted(set(dns)) # print(sdns) tmpdns = [] for x in sdns: tmpdns.append(x) except FileNotFoundError as fnf_error: print(fnf_error) exit() ################# SSLSCAN ####################### if len(ssl_ports) == 0: tmpdns2 = [] for x in tmpdns: tmpdns2.append(x) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: allsortedhostnameslist.append(x) else: if not os.path.exists(f"{self.target}-Report/webSSL"): os.makedirs(f"{self.target}-Report/webSSL") if not os.path.exists(f"{self.target}-Report/aquatone"): os.makedirs(f"{self.target}-Report/aquatone") for sslport in ssl_ports: sslscanCMD = f"sslscan https://{self.target}:{sslport} | tee {self.target}-Report/webSSL/sslscan-color-{self.target}-{sslport}.log" print(cmd_info, sslscanCMD) call(sslscanCMD, shell=True) if not os.path.exists( f"{self.target}-Report/webSSL/sslscan-color-{self.target}-{sslport}.log" ): pass else: sslscanFile = ( f"{self.target}-Report/webSSL/sslscan-color-{self.target}-{sslport}.log" ) # print(sslscanFile) domainName = [] altDomainNames = [] with open(sslscanFile, "rt") as f: for line in f: if "Subject:" in line: n = line.lstrip("Subject:").rstrip("\n") # print(n) na = n.lstrip() # print(na) domainName.append(na) if "Altnames:" in line: alnam = line.lstrip("Altnames:").rstrip("\n") alname = alnam.lstrip() alname1 = alname.lstrip("DNS:") alname2 = alname1.replace("DNS:", "").replace( ",", "").split() for x in alname2: altDomainNames.append(x) # print(domainName) # print(altDomainNames) # print(alname2) both = [] for x in domainName: both.append(x) for x in altDomainNames: both.append(x) tmpdns2 = [] for x in both: tmpdns2.append(x) for x in tmpdns: tmpdns2.append(x) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: allsortedhostnameslist.append(x) dnsPort = np.dns_ports if len(dnsPort) == 0: if len(allsortedhostnameslist) != 0: for x in allsortedhostnameslist: self.redirect_hostname.append(x) print( f"{cmd_info} Adding {fg.li_cyan}{allsortedhostnameslist} {fg.rs}to /etc/hosts" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=allsortedhostnameslist) hosts.add([new_entry]) hosts.write() else: if not os.path.exists(f"{self.target}-Report/dns"): os.makedirs(f"{self.target}-Report/dns") ######## Check For Zone Transfer: Running dig ############### if len(allsortedhostnameslist) != 0: alldns = " ".join(map(str, allsortedhostnameslist)) # print(alldns) dig_command = f"dig axfr @{self.target} {alldns} | tee {self.target}-Report/dns/dig-zonexfer-{self.target}.log" print(cmd_info, dig_command) call(dig_command, shell=True) filterZoneTransferDomainsCMD = ( f"grep -v ';' {self.target}-Report/dns/dig-zonexfer-{self.target}.log " + "| grep -v -e '^[[:space:]]*$' " + "| awk '{print $1}' " + f"| sed 's/.$//' | sort -u >{self.target}-Report/dns/zonexfer-domains.log" ) call(filterZoneTransferDomainsCMD, shell=True) zxferFile = f"{self.target}-Report/dns/zonexfer-domains.log" if os.path.exists(zxferFile): zonexferDns = [] with open(zxferFile, "r") as zf: for line in zf: zonexferDns.append(line.rstrip()) if len(allsortedhostnameslist) != 0: for x in allsortedhostnameslist: zonexferDns.append(x) sortedAllDomains = sorted(set(zonexferDns)) sortedAllDomainsList = [] for x in sortedAllDomains: sortedAllDomainsList.append(x) self.redirect_hostname.append(x) if len(zonexferDns) != 0: print( f"{cmd_info} Adding {fg.li_cyan}{sortedAllDomainsList} {fg.rs}to /etc/hosts" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=sortedAllDomainsList) hosts.add([new_entry]) hosts.write()
def Scan(self): """Parse nmap's output from the top open ports scan and use regex to find valid hostnames that are 3-6 chars in length. These domains will be filtered to ignore most .com and file extensions since this tool is currently designed for CTF machines like Hack the Box which usually have .htb extensions. The list of ignored domains is in utils/helper_lists.py""" np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssl_ports = np.ssl_ports dnsPort = np.dns_ports cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" c = config_parser.CommandParser(f"{os.getcwd()}/config/config.yaml", self.target) ig = helper_lists.ignoreDomains() ignore = ig.ignore dns = [] try: with open(c.getPath("nmap", "nmap_top_ports_nmap"), "r") as nm: for line in nm: new = (line.replace("=", " ").replace("/", " ").replace( "commonName=", "").replace("/organizationName=", " ").replace(",", " ").replace("_", " ")) matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3,6}", new) for x in matches: if not any(s in x for s in ignore): dns.append(x) if "|_http-title: Did not follow redirect to http:" in line: split_line = line.split() last_word = split_line[-1] redirect_domain = (last_word.replace( "http://", "").replace("/", "").replace("'", "")) print( f"""{self.target} is redirecting to: {redirect_domain}, adding {redirect_domain} to /etc/hosts file""" ) dns.append(redirect_domain) self.redirect_hostname.append(redirect_domain) sdns = sorted(set(dns)) tmpdns = [] for x in sdns: tmpdns.append(x) _ips = re.findall(r"[0-9]+(?:\.[0-9]+){3}", x) if len(_ips) > 0: tmpdns.remove(x) except FileNotFoundError as fnf_error: print(fnf_error) exit() ################# SSLSCAN ####################### if len(ssl_ports) == 0: tmpdns2 = [] for x in tmpdns: tmpdns2.append(x) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: allsortedhostnameslist.append(x) else: if not os.path.exists(c.getPath("webSSL", "webSSLDir")): os.makedirs(c.getPath("webSSL", "webSSLDir")) if not os.path.exists(c.getPath("web", "aquatoneDir")): os.makedirs(c.getPath("web", "aquatoneDir")) for sslport in ssl_ports: sslscanCMD = c.getCmd("webSSL", "sslscan", sslport=sslport) print(cmd_info, sslscanCMD) call(sslscanCMD, shell=True) if not os.path.exists( c.getPath( "webSSL", "webSSLScanTarget", sslport=sslport)): pass else: sslscanFile = c.getPath("webSSL", "webSSLScanTarget", sslport=sslport) domainName = [] altDomainNames = [] with open(sslscanFile, "rt") as f: for line in f: if "Subject:" in line: n = line.lstrip("Subject:").rstrip("\n") na = n.lstrip() if na not in ignore: domainName.append(na) if "Altnames:" in line: alnam = line.lstrip("Altnames:").rstrip("\n") alname = alnam.lstrip() alname1 = alname.lstrip("DNS:") alname2 = (alname1.replace("DNS:", "").replace( ",", "").split()) for x in alname2: if x not in ignore: altDomainNames.append(x) both = [] for x in domainName: both.append(x) for x in altDomainNames: both.append(x) tmpdns2 = [] ignore_chars_regex = re.compile(r"[@_!#$%^&*()<>?/\|}{~:]") for x in both: if ignore_chars_regex.search(x) is None: tmpdns2.append(x) for x in tmpdns: if x not in ignore: tmpdns2.append(x) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: if x not in ignore: allsortedhostnameslist.append(x) for x in allsortedhostnameslist: ips = re.findall(r"[0-9]+(?:\.[0-9]+){3}", x) if len(ips) > 0: allsortedhostnameslist.remove(x) if len(dnsPort) == 0: if len(allsortedhostnameslist) != 0: for x in allsortedhostnameslist: if x not in ignore: self.redirect_hostname.append(x) print( f"""{cmd_info} Adding {fg.li_cyan}{allsortedhostnameslist} {fg.rs}to /etc/hosts""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=allsortedhostnameslist) hosts.add([new_entry]) hosts.write() else: if not os.path.exists(c.getPath("dns", "dnsDir")): os.makedirs(c.getPath("dns", "dnsDir")) dig_cmd = c.getCmd("dns", "dnsDig") print(cmd_info, dig_cmd) dp = dig_parser.digParse(self.target, dig_cmd) dp.parseDig() dig_hosts = dp.hosts sub_hosts = dp.subdomains if len(dig_hosts) != 0: for x in dig_hosts: allsortedhostnameslist.append(x) self.fqdn_hostname.append(x) if len(sub_hosts) != 0: for x in sub_hosts: allsortedhostnameslist.append(x) ######## Check For Zone Transfer: Running dig ############### if len(allsortedhostnameslist) != 0: alldns = " ".join(map(str, allsortedhostnameslist)) zonexferDns = [] dig_command = c.getCmd("dns", "dnsDigAxfr", alldns=alldns) print(cmd_info, dig_command) dp2 = dig_parser.digParse(self.target, dig_command) dp2.parseDigAxfr() subdomains = dp2.subdomains for x in subdomains: zonexferDns.append(x) sortedAllDomains = sorted(set(zonexferDns)) sortedAllDomainsList = [] for x in sortedAllDomains: sortedAllDomainsList.append(x) self.redirect_hostname.append(x) if len(zonexferDns) != 0: print( f"""{cmd_info} Adding {fg.li_cyan}{sortedAllDomainsList} {fg.rs}to /etc/hosts""" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry( entry_type="ipv4", address=self.target, names=sortedAllDomainsList, ) hosts.add([new_entry]) hosts.write()
def Scan(self): np = nmapParser.NmapParserFunk(self.target) np.openPorts() ssl_ports = np.ssl_ports dnsPort = np.dns_ports cmd_info = "[" + fg.li_green + "+" + fg.rs + "]" cwd = os.getcwd() reportDir = f"{cwd}/{self.target}-Report" ignore = [ ".nse", ".php", ".html", ".png", ".js", ".org", ".versio", ".com", ".gif", ".asp", ".aspx", ".jpg", ".jpeg", ".txt", ".cgi", ".pl", ".co", ".eu", ".uk", ".localdomain", "localhost.localdomain", ".localhost", ".local", ] dns = [] try: with open( f"{self.target}-Report/nmap/top-ports-{self.target}.nmap", "r") as nm: for line in nm: new = (line.replace("=", " ").replace("/", " ").replace( "commonName=", "").replace("/organizationName=", " ").replace(",", " ").replace("_", " ")) # print(new) matches = re.findall( r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{3,6}", new) # print(matches) for x in matches: if not any(s in x for s in ignore): dns.append(x) if "|_http-title: Did not follow redirect to http:" in line: # print(line) split_line = line.split() last_word = split_line[-1] redirect_domain = (last_word.replace( "http://", "").replace("/", "").replace("'", "")) print( f"{self.target} is redirecting to: {redirectDomain}, adding {redirectDomain} to /etc/hosts file" ) dns.append(redirect_domain) self.redirect_hostname.append(redirect_domain) # print(dns) sdns = sorted(set(dns)) # print(sdns) tmpdns = [] for x in sdns: tmpdns.append(x) except FileNotFoundError as fnf_error: print(fnf_error) exit() ################# SSLSCAN ####################### if len(ssl_ports) == 0: tmpdns2 = [] for x in tmpdns: tmpdns2.append(x) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: allsortedhostnameslist.append(x) else: if not os.path.exists(f"{self.target}-Report/webSSL"): os.makedirs(f"{self.target}-Report/webSSL") if not os.path.exists(f"{self.target}-Report/aquatone"): os.makedirs(f"{self.target}-Report/aquatone") for sslport in ssl_ports: sslscanCMD = f"sslscan https://{self.target}:{sslport} | tee {self.target}-Report/webSSL/sslscan-color-{self.target}-{sslport}.log" print(cmd_info, sslscanCMD) call(sslscanCMD, shell=True) if not os.path.exists( f"{self.target}-Report/webSSL/sslscan-color-{self.target}-{sslport}.log" ): pass else: sslscanFile = ( f"{self.target}-Report/webSSL/sslscan-color-{self.target}-{sslport}.log" ) # print(sslscanFile) domainName = [] altDomainNames = [] with open(sslscanFile, "rt") as f: for line in f: if "Subject:" in line: n = line.lstrip("Subject:").rstrip("\n") # print(n) na = n.lstrip() # print(na) if na not in ignore: domainName.append(na) if "Altnames:" in line: alnam = line.lstrip("Altnames:").rstrip("\n") alname = alnam.lstrip() alname1 = alname.lstrip("DNS:") alname2 = alname1.replace("DNS:", "").replace( ",", "").split() for x in alname2: if x not in ignore: altDomainNames.append(x) # print(domainName) # print(altDomainNames) # print(alname2) both = [] for x in domainName: both.append(x) for x in altDomainNames: both.append(x) tmpdns2 = [] ignore_chars_regex = re.compile("[@_!#$%^&*()<>?/\|}{~:]") for x in both: if ignore_chars_regex.search(x) == None: tmpdns2.append(x) for x in tmpdns: if x not in ignore: tmpdns2.append(x) unsortedhostnames = [] for x in tmpdns2: unsortedhostnames.append(x) allsortedhostnames = sorted(set(tmpdns2)) allsortedhostnameslist = [] for x in allsortedhostnames: if x not in ignore: allsortedhostnameslist.append(x) if len(dnsPort) == 0: if len(allsortedhostnameslist) != 0: for x in allsortedhostnameslist: if x not in ignore: self.redirect_hostname.append(x) print( f"{cmd_info} Adding {fg.li_cyan}{allsortedhostnameslist} {fg.rs}to /etc/hosts" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=allsortedhostnameslist) hosts.add([new_entry]) hosts.write() else: if not os.path.exists(f"{self.target}-Report/dns"): os.makedirs(f"{self.target}-Report/dns") dig_cmd = ( f"dig -x {self.target} @{self.target} | tee {reportDir}/dns/dig-{self.target}.log" ) print(cmd_info, dig_cmd) dp = dig_parser.digParse(self.target, dig_cmd) dp.parseDig() dig_hosts = dp.hosts sub_hosts = dp.subdomains # print(dig_hosts) # print(sub_hosts) if len(dig_hosts) != 0: for x in dig_hosts: allsortedhostnameslist.append(x) self.fqdn_hostname.append(x) if len(sub_hosts) != 0: for x in sub_hosts: allsortedhostnameslist.append(x) ######## Check For Zone Transfer: Running dig ############### if len(allsortedhostnameslist) != 0: alldns = " ".join(map(str, allsortedhostnameslist)) zonexferDns = [] dig_command = f"dig axfr @{self.target} {alldns} | tee {reportDir}/dns/dig-axfr-{self.target}.log" print(cmd_info, dig_command) dp2 = dig_parser.digParse(self.target, dig_command) dp2.parseDigAxfr() subdomains = dp2.subdomains # print(subdomains) for x in subdomains: zonexferDns.append(x) sortedAllDomains = sorted(set(zonexferDns)) sortedAllDomainsList = [] for x in sortedAllDomains: sortedAllDomainsList.append(x) self.redirect_hostname.append(x) if len(zonexferDns) != 0: print( f"{cmd_info} Adding {fg.li_cyan}{sortedAllDomainsList} {fg.rs}to /etc/hosts" ) hosts = Hosts(path="/etc/hosts") new_entry = HostsEntry(entry_type="ipv4", address=self.target, names=sortedAllDomainsList) hosts.add([new_entry]) hosts.write()