def IPscan(domain, ns, A): url = 'http://' + domain if A != None: try: print(que + 'Using DIG to get the real IP') print(" " + good + 'Possible IP: %s' % str(A)) print(que + 'Retrieving target homepage at: %s' % url) try: org_response = requests.get( url, timeout=config['http_timeout_seconds']) except requests.exceptions.Timeout: sys.stderr.write(" " + bad + "%s timed out after %d seconds\n" % (url, config['http_timeout_seconds'])) except requests.exceptions.RequestException: sys.stderr.write(" " + bad + "Failed to retrieve %s\n" % url) if org_response.status_code != 200: print(' ' + bad + 'Responded with an unexpected HTTP status code') if org_response.url != url: print(' ' + good + '%s redirects to %s' % (url, org_response.url)) print(" " + good + "Request redirected successful to %s" % org_response.url) try: sec_response = requests.get( 'http://' + str(A), timeout=config['http_timeout_seconds']) if sec_response.status_code != 200: print(' ' + bad + 'Responded with an unexpected HTTP status code') else: page_similarity = similarity(sec_response.text, org_response.text) if page_similarity > config[ 'response_similarity_threshold']: print( que + 'Testing if source body is the same in both websites' ) print( ' ' + good + ' HTML content is %d%% structurally similar to: %s' % (round(100 * page_similarity, 2), org_response.url)) except Exception: print(" " + bad + "Connection Timeout") netcat(domain, ns, count=+1) return org_response except requests.exceptions.SSLError: print(" " + bad + 'Error handshaking with SSL') except requests.exceptions.ReadTimeout: print(" " + bad + "Connection Timeout") except requests.ConnectTimeout: print(" " + bad + "Connection Timeout")
def IPscan(domain, ns, A, userAgent, randomAgent, header, args): url = 'http://' + domain headers = dict(x.replace(' ', '').split(':') for x in header.split(',')) if header != None else {} headers.update({'User-agent': random.choice(open("data/txt/random_agents.txt").readlines()).rstrip("\n"),}) if randomAgent == True else '' headers.update({'User-agent': userAgent}) if userAgent != None else '' if A != None: try: print (que + 'Using DIG to get the real IP') print (tab + good + 'Possible IP: %s' % str(A)) print(que + 'Retrieving target homepage at: %s' % url) org_response = requests.get(url, headers=headers, timeout=config['http_timeout_seconds']) if org_response.status_code != 200: print (tab + bad + 'Responded with an unexpected HTTP status code') if org_response.url != url: print (tab + good + '%s Redirects to %s' % (url, org_response.url)) try: sec_response = requests.get('http://' + str(A), headers=headers, timeout=config['http_timeout_seconds']) if sec_response.status_code != 200: print (tab + bad + 'Responded with an unexpected HTTP status code') else: page_similarity = similarity(sec_response.text, org_response.text) if page_similarity > config['response_similarity_threshold']: print (que + 'Testing if source body is the same in both websites') print (tab + good + ' HTML content is %d%% structurally similar to: %s' % (round(100 *page_similarity, 2), org_response.url)) except Exception: print(tab + bad +"Connection Timeout") netcat(domain, ns, args.ignoreRedirects,userAgent, randomAgent, args.headers, count=+1) return org_response except requests.exceptions.SSLError: print(tab + bad +'Error handshaking with SSL') except requests.exceptions.ReadTimeout: print(tab + bad +"Connection Timeout") except requests.ConnectTimeout: print(tab + bad +"Connection Timeout") except requests.exceptions.Timeout: print(tab + bad + "%s timed out after %d seconds" % (url, config['http_timeout_seconds'])) except requests.exceptions.RequestException: print(tab + bad + "Failed to retrieve: %s" % url)
print(" " + bad +"Connection Timeout") if __name__=="__main__": try: args = parse_args() domain, file, brute = args.domain,args.file, args.bruter if args.subdomain == True and args.ns == None: ip_takes = make_list() if brute == True: nameservers = nameserver(domain) ip_takes.extend(nameservers) list_length = len(ip_takes) for i in range(0, list_length): ns = ip_takes[i] scan(domain, ns) netcat(domain, ns, count=0) A = DNSLookup(domain, ns) IPscan(domain) elif args.ns != None and args.subdomain == False: if brute == True: nameservers = nameserver(domain) nameservers.append(args.ns) list_length = len(nameservers) if brute is True else 1 for i in range(0, list_length): ns = nameservers[i] if brute is True else args.ns scan(domain, ns) netcat(domain, ns, count=0) A = DNSLookup(domain, ns) IPscan(domain) else: parse_error(errmsg='too few arguments, please use "help" argument')
if args.host != None: nameservers = [] if args.brute == True: nameservers = nameserver(args.domain) nameservers.append(args.host) if args.censys != None: CensysIP = censys(args.domain, args.censys) subdomain.extend(CensysIP) if args.shodan != None: ShodanIP = shodan(args.domain, args.shodan) subdomain.extend(ShodanIP) list_length = len(nameservers) if args.brute == True else 1 for i in range(0, list_length): host = nameservers[i] if args.brute == True else args.host scan(args.domain, host, args.uagent, args.randomAgent, args.headers) netcat(args.domain, host, args.ignoreRedirects, args.uagent, args.randomAgent, args.headers, count=0) A = DNSLookup(args.domain, host) IPscan(args.domain, host, A, args.uagent, args.randomAgent, args.headers, args) else: if args.brute == True: nameservers = nameserver(args.domain) subdomain.extend(nameservers) if args.censys != None: CensysIP = censys(args.domain, args.censys) subdomain.extend(CensysIP) if args.shodan != None: ShodanIP = shodan(args.domain, args.shodan) subdomain.extend(ShodanIP) list_length = len(subdomain) for i in range(0, list_length): host = subdomain[i]