def main(): args = parse_args() domain = args.domain #threads = args.threads savefile = args.output ports = args.ports bruteforce_list = [] subdomains = [] if not savefile: now = datetime.datetime.now() timestr = now.strftime("-%Y-%m-%d-%H-%M") savefile = domain+timestr+".txt" enable_bruteforce = args.bruteforce if enable_bruteforce or enable_bruteforce is None: enable_bruteforce = True #Validate domain if not is_domain(domain): print R+"[!]Error: Please enter a valid domain"+W sys.exit() #Print the Banner banner() waring = "[!] legal disclaimer: Usage of Teemo for attacking targets without prior mutual consent is illegal. It is the end user's responsibility to obey all applicable local laws. Developers assume no liability and are not responsible for any misuse or damage caused by this program" print waring print B+"[-] Enumerating subdomains now for %s"% domain+W ''' subdomains.extend(callsites(domain,proxy)) domains,emails = callengines(domain,500,proxy) subdomains.extend(domains) #print subdomains ''' Threadlist = [] q_domains = Queue() #to recevie return values q_emails = Queue() useragent = random_useragent(allow_random_useragent) if args.proxy != None: proxy = args.proxy proxy = {args.proxy.split(":")[0]: proxy} elif default_proxies != None and (proxy_switch ==2 or proxy_switch==1): #config.py proxy = default_proxies try: sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sk.settimeout(2) ip = default_proxies['http'].split("/")[-2].split(":")[0] port = default_proxies['http'].split("/")[-2].split(":")[1] sk.connect((ip,int(port))) sk.close except: print "\r\n[!!!]Proxy Test Failed,Please Check!\r\n" proxy = {} else: proxy = {} #doing zone transfer checking zonetransfer(domain).check() for engine in [Alexa, Chaxunla, CrtSearch, DNSdumpster, Googlect, Ilink, Netcraft, PassiveDNS, Pgpsearch, Sitedossier, ThreatCrowd, Threatminer]: #print callsites_thread(engine,domain,proxy) t = threading.Thread(target=callsites_thread, args=(engine, domain, q_domains, q_emails, proxy)) Threadlist.append(t) for engine in [search_ask,search_baidu,search_bing,search_bing_api,search_dogpile,search_duckduckgo,search_exalead,search_fofa,search_google,search_google_cse, search_shodan,search_so,search_yahoo,search_yandex]: if proxy_switch == 1 and engine in proxy_default_enabled: pass else: proxy ={} t = threading.Thread(target=callengines_thread, args=(engine, domain, q_domains, q_emails, useragent, proxy, 500)) #t.setDaemon(True) #变成守护进程,独立于主进程。这里好像不需要 Threadlist.append(t) #for t in Threadlist: # print t for t in Threadlist: # use start() not run() t.start() for t in Threadlist: t.join() #主线程将等待这个线程,直到这个线程运行结束 while not q_domains.empty(): subdomains.append(q_domains.get()) emails = [] while not q_emails.empty(): emails.append(q_emails.get()) if enable_bruteforce: print G+"[-] Starting bruteforce module now using subbrute.."+W record_type = False path_to_file = os.path.dirname(os.path.realpath(__file__)) subs = os.path.join(path_to_file, 'subbrute', 'names.txt') resolvers = os.path.join(path_to_file, 'subbrute', 'resolvers.txt') process_count = 10 output = False json_output = False bruteforce_list = subbrute.print_target(domain, record_type, subs, resolvers, process_count, output, json_output, subdomains) subdomains.extend(bruteforce_list) if subdomains is not None: subdomains = sorted(list(set(subdomains))) emails = sorted(list(set(emails))) subdomains.extend(emails) #this function return value is NoneType ,can't use in function directly #print type(subdomains) #write_file(savefile, subdomains) if ports: print G+"[-] Start port scan now for the following ports: %s%s"%(Y,ports)+W ports = ports.split(',') #list pscan = portscan(subdomains,ports) pscan.run() else: for subdomain in subdomains: print G+subdomain+W print "[+] {0} domains found in total".format(len(subdomains)) print "[+] {0} emails found in total".format(len(emails)) print "[+] Results saved to {0}".format(write_file(savefile, subdomains))
def main(): try: banner() args = adjust_args() print "[-] Enumerating subdomains now for %s" % args.domain #doing zone transfer checking zonetransfer(args.domain).check() Threadlist = [] q_domains = Queue.Queue() #to recevie return values,use it to ensure thread safe. q_similar_domains = Queue.Queue() q_related_domains = Queue.Queue() q_emails = Queue.Queue() for engine in [Alexa, Chaxunla, CrtSearch, DNSdumpster, Googlect, Hackertarget, Ilink, Netcraft, PassiveDNS, Pgpsearch, Sitedossier, ThreatCrowd, Threatminer,Virustotal]: #print callsites_thread(engine,domain,proxy) #print engine.__name__ if proxy_switch == 1 and engine.__name__ in proxy_default_enabled: proxy = args.proxy #通过配置或者参数获取到的proxy else: proxy ={} #不使用proxy t = threading.Thread(target=callsites_thread, args=(engine, args.domain, q_domains, q_similar_domains, q_related_domains, q_emails, proxy)) Threadlist.append(t) for engine in [search_ask,search_baidu,search_bing,search_bing_api,search_dogpile,search_duckduckgo,search_exalead,search_fofa,search_google,search_google_cse, search_shodan,search_so,search_yahoo,search_yandex]: if proxy_switch == 1 and engine.__name__ in proxy_default_enabled: proxy = args.proxy else: proxy ={} t = threading.Thread(target=callengines_thread, args=(engine, args.domain, q_domains, q_emails, proxy, 500)) t.setDaemon(True) #变成守护进程,独立于主进程。这里好像不需要 Threadlist.append(t) #for t in Threadlist: # print t for t in Threadlist: # use start() not run() t.start() for t in Threadlist: #为什么需要2次循环,不能在一次循环中完成? t.join() #主线程将等待这个线程,直到这个线程运行结束 subdomains = [] while not q_domains.empty(): subdomains.append(q_domains.get()) emails = [] while not q_emails.empty(): emails.append(q_emails.get()) related_domains =[] while not q_related_domains.empty(): related_domains.append(q_related_domains.get()) if args.bruteforce: print G+"[-] Starting bruteforce using subDomainsBrute.."+W d = SubNameBrute(target=args.domain) d.run() brute_lines = d.result_lines brute_domains = d.result_domains brute_ips = d.result_ips else: brute_ips = [] brute_lines = [] brute_domains = [] ##########print to console and write to file######################### if subdomains is not None: #prepaire output IP_list, lines = domains2ips(subdomains) #query domains that got from website and search engine IP_list.extend(brute_ips) IPrange_list = iprange(IP_list) #1. IP段 subdomains.extend(brute_domains) subdomains = tolower_list(subdomains) subdomains = sorted(list(set(subdomains)))#2. 子域名,包括爆破所得 subdomain_number = len(subdomains)#子域名数量 lines.extend(brute_lines) lines = list(set(lines)) #3. 域名和IP对 emails = sorted(list(set(emails))) #4. 邮箱 related_domains = sorted(list(set(related_domains))) # 5. 相关域名 subdomains.extend(emails) #this function return value is NoneType ,can't use in function directly subdomains.extend(IPrange_list) #子域名+邮箱+网段 subdomains.extend(related_domains) ##子域名+邮箱+网段+相关域名 #print type(subdomains) for subdomain in subdomains: print G+subdomain+W subdomains.extend(lines) fp = open(args.output,"wb") #fp.writelines("\n".join(subdomains).decode("utf-8")) fp.writelines("\n".join(subdomains).encode("utf-8")) print "[+] {0} domains found in total".format(subdomain_number) print "[+] {0} related domains found in total".format(len(related_domains)) print "[+] {0} emails found in total".format(len(emails)) print "[+] Results saved to {0}".format(args.output) except KeyboardInterrupt as e: logger.info("Exit. Due To KeyboardInterrupt")
def main(): args = adjust_args() print "[-] Enumerating subdomains now for %s" % args.domain #doing zone transfer checking zonetransfer(args.domain).check() #all possible result parameters Result_Sub_Domains = [] Result_Similar_Domains = [] Result_Related_Domains = [] Result_Emails = [] Result_Subnets = [] Temp_IP_List = [] Domain_IP_Records = [] ################using search engine and web api to query subdomains and related domains##################### Threadlist = [] q_domains = Queue.Queue( ) #to recevie return values,use it to ensure thread safe. q_similar_domains = Queue.Queue() q_related_domains = Queue.Queue() q_emails = Queue.Queue() for engine in [ Alexa, Chaxunla, CrtSearch, DNSdumpster, Googlect, Hackertarget, Ilink, Netcraft, PassiveDNS, Pgpsearch, Sitedossier, ThreatCrowd, Threatminer, Virustotal ]: #print callsites_thread(engine,domain,proxy) #print engine.__name__ if proxy_switch == 1 and engine.__name__ in proxy_default_enabled: proxy = args.proxy #通过配置或者参数获取到的proxy else: proxy = {} #不使用proxy t = threading.Thread(target=callsites_thread, args=(engine, args.domain, q_domains, q_similar_domains, q_related_domains, q_emails, proxy)) Threadlist.append(t) for engine in [ search_ask, search_baidu, search_bing, search_bing_api, search_dogpile, search_duckduckgo, search_exalead, search_fofa, search_google, search_google_cse, search_shodan, search_so, search_yahoo, search_yandex ]: if proxy_switch == 1 and engine.__name__ in proxy_default_enabled: proxy = args.proxy else: proxy = {} t = threading.Thread(target=callengines_thread, args=(engine, args.domain, q_domains, q_emails, proxy, 500)) t.setDaemon(True) #变成守护进程,独立于主进程。这里好像不需要 Threadlist.append(t) #for t in Threadlist: # print t for t in Threadlist: # use start() not run() t.start() for t in Threadlist: #为什么需要2次循环,不能在一次循环中完成? t.join() #主线程将等待这个线程,直到这个线程运行结束 while not q_domains.empty(): Result_Sub_Domains.append(q_domains.get()) while not q_emails.empty(): Result_Emails.append(q_emails.get()) while not q_related_domains.empty(): Result_Related_Domains.append(q_related_domains.get()) ################using subDomainsBrute to get more subdomains##################### if args.bruteforce: print G + "[-] Starting bruteforce using subDomainsBrute.." + W d = SubNameBrute(target=args.domain) d.run() Domain_IP_Records.extend(d.result_lines) Result_Sub_Domains.extend(d.result_domains) Temp_IP_List.extend(d.result_ips) #############do some deal############# ips, lines = domains2ips(Result_Sub_Domains) Temp_IP_List.extend(ips) Domain_IP_Records.extend(lines) Result_Subnets.extend(iprange(Temp_IP_List)) #1. IP段 Result_Sub_Domains = sorted(list(set( tolower_list(Result_Sub_Domains)))) #2. 子域名,包括爆破所得 Domain_IP_Records = list(set(Domain_IP_Records)) #3. 域名和IP的解析记录 Result_Emails = sorted(list(set(Result_Emails))) #4. 邮箱 Result_Related_Domains = sorted(list( set(Result_Related_Domains))) # 5. 相关域名 ToPrint = Result_Sub_Domains #this function return value is NoneType ,can't use in function directly ToPrint.extend(Result_Emails) ToPrint.extend(Result_Subnets) ToPrint.extend(Result_Related_Domains) jsonString = "{'Result_Sub_Domains':{0},'Result_Emails':{1},'Result_Subnets':{2},'Result_Related_Domains':{3}}"\ .format(Result_Sub_Domains,Result_Emails,Result_Subnets,Result_Related_Domains) print jsonString return jsonString
def main(): try: banner() args = adjust_args() print "[-] Enumerating subdomains now for %s" % args.domain #doing zone transfer checking issuccess = zonetransfer(args.domain).check() if issuccess: print "[+] Zone Transfer Results saved to output directory" exit() #all possible result parameters Result_Sub_Domains = [] Result_Similar_Domains = [] Result_Related_Domains = [] Result_Emails = [] Result_Subnets = [] Line_Records = [] ################using search engine and web api to query subdomains and related domains##################### Threadlist = [] q_domains = Queue.Queue( ) #to recevie return values,use it to ensure thread safe. q_similar_domains = Queue.Queue() q_related_domains = Queue.Queue() q_emails = Queue.Queue() for engine in [ Alexa, Chaxunla, CrtSearch, DNSdumpster, Googlect, Hackertarget, Ilink, Netcraft, PassiveDNS, Pgpsearch, Sitedossier, ThreatCrowd, Threatminer, Virustotal ]: #print callsites_thread(engine,domain,proxy) #print engine.__name__ if proxy_switch == 1 and engine.__name__ in proxy_default_enabled: proxy = args.proxy #通过配置或者参数获取到的proxy else: proxy = {} #不使用proxy t = threading.Thread(target=callsites_thread, args=(engine, args.domain, q_domains, q_similar_domains, q_related_domains, q_emails, proxy)) Threadlist.append(t) for engine in [ search_ask, search_baidu, search_bing, search_bing_api, search_dogpile, search_duckduckgo, search_exalead, search_fofa, search_google, search_google_cse, search_shodan, search_so, search_sogou, search_yahoo, search_yandex ]: if proxy_switch == 1 and engine.__name__ in proxy_default_enabled: proxy = args.proxy else: proxy = {} t = threading.Thread(target=callengines_thread, args=(engine, args.domain, q_domains, q_emails, proxy, 500)) t.setDaemon(True) #变成守护进程,独立于主进程。这里好像不需要 Threadlist.append(t) #for t in Threadlist: # print t for t in Threadlist: # use start() not run() t.start() for t in Threadlist: #为什么需要2次循环,不能在一次循环中完成?如果只在一个循环中,线程会在启动后马上加入到当前的执行流程,不会有并发的效果 t.join() #主线程将等待这个线程,直到这个线程运行结束 while not q_domains.empty(): Result_Sub_Domains.append(q_domains.get()) while not q_emails.empty(): Result_Emails.append(q_emails.get()) while not q_related_domains.empty(): Result_Related_Domains.append(q_related_domains.get()) ################using subDomainsBrute to get more subdomains##################### if args.bruteforce: print G + "[-] Starting bruteforce using subDomainsBrute.." + W d = SubNameBrute(target=args.domain) d.run() Result_Sub_Domains.extend(d.result_domains) #############do some deal############# print G + "[-] Starting do DNS query ..." + W Result_Sub_Domains = sorted(list(set( tolower_list(Result_Sub_Domains)))) # 2. 子域名,包括爆破所得 if args.title: #to get title ips, lines = targets2lines(Result_Sub_Domains) iplist = set(iprange2iplist(iprange(ips))) - set(ips) ips1, lines1 = targets2lines(iplist) lines.extend(lines1) else: ips, lines = domains2ips(Result_Sub_Domains) Result_Subnets.extend(iprange(ips)) #1. IP段 #Result_Sub_Domains = sorted(list(set(tolower_list(Result_Sub_Domains))))#2. 子域名,包括爆破所得 Line_Records = list(set(lines)) #3. 域名和IP的解析记录 Result_Emails = sorted(list(set(Result_Emails))) #4. 邮箱 Result_Related_Domains = sorted(list( set(Result_Related_Domains))) # 5. 相关域名 fp = open( "{0}-{1}".format(args.output.replace(".txt", ""), "lines.csv"), "wb") fp.writelines("\n".join(Line_Records)) fp.close() ToPrint = Result_Sub_Domains #this function return value is NoneType ,can't use in function directly ToPrint.extend(Result_Emails) ToPrint.extend(Result_Subnets) ToPrint.extend(Result_Related_Domains) for item in ToPrint: print G + item + W fp = open(args.output, "wb") fp.writelines("\n".join(ToPrint).encode("utf-8")) fp.close() print "[+] {0} sub domains found in total".format( len(Result_Sub_Domains)) print "[+] {0} related domains found in total".format( len(Result_Related_Domains)) print "[+] {0} emails found in total".format(len(Result_Emails)) print "[+] Results saved to {0}".format(args.output) except KeyboardInterrupt as e: logger.info("Exit. Due To KeyboardInterrupt")
def main(): try: banner() args = adjust_args() subdomains = [] print("[-] Enumerating subdomains now for %s" % args.domain) #doing zone transfer checking zonetransfer(args.domain).check() Threadlist = [] q_domains = Queue.Queue( ) #to recevie return values,use it to ensure thread safe. q_emails = Queue.Queue() useragent = random_useragent(allow_random_useragent) for engine in [ Alexa, Chaxunla, CrtSearch, DNSdumpster, Googlect, Ilink, Netcraft, PassiveDNS, Pgpsearch, Sitedossier, ThreatCrowd, Threatminer ]: #print callsites_thread(engine,domain,proxy) t = threading.Thread(target=callsites_thread, args=(engine, args.domain, q_domains, q_emails, args.proxy)) Threadlist.append(t) for engine in [ search_ask, search_baidu, search_bing, search_bing_api, search_dogpile, search_duckduckgo, search_exalead, search_fofa, search_google, search_google_cse, search_shodan, search_so, search_yahoo, search_yandex ]: if proxy_switch == 1 and engine in proxy_default_enabled: pass else: proxy = {} t = threading.Thread(target=callengines_thread, args=(engine, args.domain, q_domains, q_emails, useragent, proxy, 500)) t.setDaemon(True) #变成守护进程,独立于主进程。这里好像不需要 Threadlist.append(t) #for t in Threadlist: # print t for t in Threadlist: # use start() not run() t.start() for t in Threadlist: #为什么需要2次循环,不能在一次循环中完成? t.join() #主线程将等待这个线程,直到这个线程运行结束 while not q_domains.empty(): subdomains.append(q_domains.get()) emails = [] while not q_emails.empty(): emails.append(q_emails.get()) if args.bruteforce: print("[-] Starting bruteforce using subDomainsBrute..") d = SubNameBrute(target=args.domain) d.run() brute_lines = d.result_lines brute_domains = d.result_domains brute_ips = d.result_ips else: brute_ips = [] brute_lines = [] brute_domains = [] if subdomains is not None: #prepaire output lines = domains2ips( subdomains ) #query domains that got from website and search engine #IP_list.extend(brute_ips) #IPrange_list = iprange(IP_list) subdomains.extend(brute_domains) subdomains = sorted(list(set(subdomains))) lines = list(set(lines)) #emails = sorted(list(set(emails))) #subdomains.extend(emails) #this function return value is NoneType ,can't use in function directly #subdomains.extend(IPrange_list) #print type(subdomains) for subdomain in subdomains: print(subdomain) subdomains.extend(lines) fp = open(args.output, "a+") #fp.writelines("\n".join(subdomains).decode("utf-8")) fp.writelines("\n".join(subdomains).encode("utf-8")) print("[+] {0} domains found in total".format(len(subdomains))) print("[+] {0} emails found in total".format(len(emails))) print("[+] Results saved to {0}".format(args.output)) except KeyboardInterrupt as e: logger.info("exit. due to KeyboardInterrupt")