def analyse(self): ''' 查询对应domain下的节点IP ''' if not self.bLogin(): return None #print '[+] get all list ip and domains:' if self.searchAuth_token is None: self.searchAuth_token = self.get_auth_token(self.interface_url) if self.searchAuth_token is not None: try: para = {'authenticity_token' : self.searchAuth_token, 'all' : 'true', 'domain': self.topDomain} post_req = urllib2.Request(self.interface_url) post_data = urllib.urlencode(para) resp = urllib2.urlopen(post_req, post_data) result_soup = BeautifulSoup(resp.read()) link_list = result_soup.findAll('a', attrs={'target':'_blank'}) ip_list = [] url_list = [] for link in link_list: if is_vaild_ip(link.get_text()): ip_list.append(getCrangeIP(link.get_text())) else: url_list.append(link.get_text()) self.retlist = {'ip':list(set(ip_list)), 'domain':list(set(url_list))} except Exception: return None else: return None return self.retlist
def start(self, domain): super(SubDomainFindByDit, self).start(domain) dnsBrute = DNSBrute(domain, names_file = DOMAIN_DIC_PATH + '/domain_dic_large.txt', next_sub_file = DOMAIN_DIC_PATH + '/next_sub.txt') dnsBrute.run() ip_list = [] url_list = [] for (url, ips) in dnsBrute.getAvailDomain().items(): url_list.append(url) for ip in ips: ip_list.append(getCrangeIP(ip)) self.result = {'ip':list(set(ip_list)), 'domain': list(set(url_list))} logger.info('subdomain by dic found domain count:%d' % len(url_list)) super(SubDomainFindByDit, self).complete() return self.result
def start(self, domain): super(IpCrange, self).start(domain) self.result.update({'ip':[getCrangeIP(domain)]}) super(IpCrange, self).complete() return self.result
def start(self, domain): super(IpCrange, self).start(domain) self.result.update({'ip': [getCrangeIP(domain)]}) super(IpCrange, self).complete() return self.result