Example #1
0
def fastDir(newurl, target, module):
    '''
	FastDir scan without portscan
	'''
    output_file = report_filename(target, module)
    newurl = url_handle(newurl)
    ip, baidu_status, github_status = '', [], []
    print '[*] Scan new_url: ' + newurl
    if baidu_engine:
        print '[*] Check Baidu site: %s' % urlparse.urlparse(newurl).hostname
        baidu_status = baidu_check(newurl)
    if github_engine:
        print '[*] Check Github status: %s' % urlparse.urlparse(
            newurl).hostname
        github_status = github_check(newurl)
    try:
        newtitle, code, lenth, content = '', '', '', ''
        try:
            newtitle, code, lenth, content = getitle(url=newurl)
        except Exception, e:
            # print traceback.format_exc()
            pass
        if code in range(
                200, 405) and code != 401:  # add Do not scan 401 status_code
            try:
                print '[+] Get title: %s,status_code: %s,content lenth: %s' % (
                    newtitle, code, lenth)
            except:
                pass
            alllink, alllinks, emails, ips = [], [], [], []
            if title_filter not in newtitle and filter_list(
                    module=newtitle, filter_list=title_filter_list):
                try:
                    alllink, alllinks, emails, ips = getallink(newurl, content)
                except Exception, e:
                    # print traceback.format_exc()
                    pass
                dirpaths = []
                try:
                    dir_urls = scandir_again(newurl, alllink)
                    if len(dir_urls) < link_maxnum:  # Pass num
                        for dir_url in dir_urls:
                            dirpaths += weakfile(dir_url)
                except Exception, e:
                    # print traceback.format_exc()
                    pass
                if len(dirpaths) > dirpaths_maxnum:  # Check num
                    dirpaths = ["more_path"]
                baidu_dirs = ''
                if baidu_dir_engine:
                    try:
                        baidu_dirs = baidu_dir(
                            command='site:%s' %
                            urlparse.urlparse(newurl).hostname,
                            key_domain=urlparse.urlparse(newurl).hostname)
                    except Exception, e:
                        # print traceback.format_exc()
                        pass
Example #2
0
def getallink(url,content):
	'''
	Get response all link
	'''
	links,emails,ips,check = [],[],[],[]
	tags = ['a','A','link','script','area','iframe','form']# img
	tos = ['href','src','action']
	if url[-1:] == '/':
		url = url[:-1]
	try:
		print '[*] Now regex alllinks,emails,ips'
		emails_source = email_regex(str(content))
		ips = ip_regex(str(content))
		# print '[*] Now regex urls'
		# urls = url_regex(content)
		for tag in tags:
			for to in tos:
				link = re.findall(r'<%s.*?%s="(.*?)"' % (tag,to),str(content))
				for i in link:
					if i not in check and filter_list(module=i,filter_list=links_filter) and i != '':
						check.append(i)
						if '://' in i:
							i = i.replace(' ','')
							if str(urlparse.urlparse(i).path) in ['/',''] and str(urlparse.urlparse(i).query) in ['/','']:
								link_flag = '<a href="'+i+'" target=_blank />'+urlparse.urlparse(str(i)).netloc+'</a>'
							else:
								link_flag = '<a href="'+i+'" target=_blank />'+quote(urlparse.urlparse(str(i)).path+urlparse.urlparse(str(i)).query)[:25]+'</a>'
						elif '//' in i:
							if str(urlparse.urlparse(i).path) in ['/',''] and str(urlparse.urlparse(i).query) in ['/','']:
								link_flag = '<a href="http:'+i+'" target=_blank />'+urlparse.urlparse(str(i)).netloc+'</a>'
							else:
								link_flag = '<a href="http:'+i+'" target=_blank />'+quote(urlparse.urlparse(str(i)).path+urlparse.urlparse(str(i)).query)[:25]+'</a>'							
						else:
							link_flag = '<a href="'+url+'/'+i+'" target=_blank />'+quote(i)[:25]+'</a>'
							check.append(url + i)
						links.append(link_flag)
	except Exception,e:
		# print traceback.format_exc()
		print '[!] Get regex link error'
		pass
Example #3
0
def checkDir(url, target, module):
    '''
	Main requests function with Portscan && Dirscan
	'''
    output_file = report_filename(target, module)
    url = url_handle(url)
    try:
        if url not in filter_urls and filter_list(module=url,
                                                  filter_list=sub_filter_list):
            filter_urls.append(url)
            ip, open_ports, baidu_status, github_status = url, [], [], []
            print '[*] Now scanning: ' + url
            if module in ['autoscan', 'dirscan', 'single']:  # Handle c_ip scan
                if baidu_engine:
                    print '[*] Check Baidu site: %s' % urlparse.urlparse(
                        url).hostname
                    baidu_status = baidu_check(url)
                if github_engine:
                    print '[*] Check Github status: %s' % urlparse.urlparse(
                        url).hostname
                    github_status = github_check(url)
            try:
                ip = url2ip(url)
                if not is_internal_ip(ip) and ip not in filter_ips.keys(
                ) and ip != '':  # filter internal_ip
                    print '[+] Get url2ip: ' + ip
                    open_ports = portscan(ip)
                    filter_ips[ip] = open_ports
                    write_file(
                        str(ip) + ',' +
                        str(open_ports).replace('[', '').replace(']', ''),
                        handle_ext(output_file) + portscan_opens_file)
                    if len(open_ports) > openports_maxnum:
                        print '[!] Maybe got port waf'
                        write_file(
                            ip,
                            handle_ext(output_file) + portscan_maxnum_file)
                        open_ports = []
                else:
                    open_ports = filter_ips[ip]
            except Exception, e:
                # print traceback.format_exc()
                write_file(url, handle_ext(output_file) + url2ip_error_file)
                pass
            print '[+] Get open ports: ' + str(open_ports)
            if open_ports == []:  #or 80 not in open_ports
                try:
                    newtitle, code, lenth, content = '', '', '', ''
                    try:
                        newtitle, code, lenth, content = getitle(url)
                    except Exception, e:
                        # print traceback.format_exc()
                        pass
                    if code in range(
                            200, 405
                    ) and code != 401:  # add Do not scan 401 status_code
                        try:
                            print '[+] Get title: %s,status_code: %s,content lenth: %s' % (
                                newtitle, code, lenth)
                        except:
                            pass
                        write_file(
                            url,
                            handle_ext(output_file) +
                            '/%s_alive_urls.txt' % handle_ext_old(target))
                        if title_filter not in newtitle and filter_list(
                                module=newtitle,
                                filter_list=title_filter_list):
                            alllink, alllinks, emails, ips = [], [], [], []
                            try:
                                alllink, alllinks, emails, ips = getallink(
                                    url, content)
                            except Exception, e:
                                # print traceback.format_exc()
                                pass
                            dirpaths = []
                            try:
                                dir_urls = scandir_again(url, alllink)
                                if len(dir_urls) < link_maxnum:
                                    for dir_url in dir_urls:
                                        dirpaths += weakfile(dir_url)
                            except Exception, e:
                                # print traceback.format_exc()
                                pass
                            if len(dirpaths) > dirpaths_maxnum:
                                dirpaths = ["more_path"]
                            weakuri = []
                            try:
                                weakuri = dirscan(url)
                            except Exception, e:
                                # print traceback.format_exc()
                                pass
                            baidu_dirs = ''
                            if baidu_dir_engine and module in [
                                    'autoscan', 'dirscan', 'single'
                            ]:
                                try:
                                    baidu_dirs = baidu_dir(
                                        command='site:%s' %
                                        urlparse.urlparse(url).hostname,
                                        key_domain=urlparse.urlparse(
                                            url).hostname)
                                except Exception, e:
                                    # print traceback.format_exc()
                                    pass
                            weakuri = baidu_status + github_status + weakuri
                            try:
                                write_file(
                                    '<tr><td><a href="%s" target=_blank />%s</a></td><td>%s</td><td><font color="blue">%s</font></td><td><font color="red">%s</font></td><td>%s&nbsp;b</td><td>%s</td><td><font color="blue">%s%s</font></td><td><ul><li>%s</li><li>%s</li><ul/></td></tr>\n\n'
                                    % (url, url, ip, code, newtitle, lenth, [
                                        dirpath_key
                                        for dirpath_key in set(dirpaths +
                                                               weakuri)
                                    ], alllinks, baidu_dirs, emails, ips),
                                    output_file)
                            except Exception, e:
                                # print traceback.format_exc()
                                print '[!] output_error'
                                write_file(
                                    url,
                                    handle_ext(output_file) +
                                    output_error_file)
                                pass
Example #4
0
     pass
 if code in range(
         200, 405
 ) and code != 401:  # add Do not scan 401 status_code
     try:
         print '[+] Get title: %s,status_code: %s,content lenth: %s' % (
             newtitle, code, lenth)
     except:
         pass
     write_file(
         newurl,
         handle_ext(output_file) +
         '/%s_alive_urls.txt' %
         handle_ext_old(target))
     if title_filter not in newtitle and filter_list(
             module=newtitle,
             filter_list=title_filter_list):
         alllink,alllinks,emails,ips = [],[],[],[]
         try:
             alllink, alllinks, emails, ips = getallink(
                 newurl, content)
         except Exception, e:
             # print traceback.format_exc()
             pass
         dirpaths = []
         try:
             dir_urls = scandir_again(
                 newurl, alllink)
             if len(dir_urls
                    ) < link_maxnum:  # Pass num
                 for dir_url in dir_urls:
Example #5
0
								link_flag = '<a href="'+i+'" target=_blank />'+urlparse.urlparse(str(i)).netloc+'</a>'
							else:
								link_flag = '<a href="'+i+'" target=_blank />'+quote(urlparse.urlparse(str(i)).path+urlparse.urlparse(str(i)).query)[:25]+'</a>'
						elif '//' in i:
							if str(urlparse.urlparse(i).path) in ['/',''] and str(urlparse.urlparse(i).query) in ['/','']:
								link_flag = '<a href="http:'+i+'" target=_blank />'+urlparse.urlparse(str(i)).netloc+'</a>'
							else:
								link_flag = '<a href="http:'+i+'" target=_blank />'+quote(urlparse.urlparse(str(i)).path+urlparse.urlparse(str(i)).query)[:25]+'</a>'							
						else:
							link_flag = '<a href="'+url+'/'+i+'" target=_blank />'+quote(i)[:25]+'</a>'
							check.append(url + i)
						links.append(link_flag)
	except Exception,e:
		# print traceback.format_exc()
		print '[!] Get regex link error'
		pass
	emails_handle = [email[-30:] for email in set(emails_source)]# Filter too lang email
	for email_check in emails_handle:
		if filter_list(module=email_check,filter_list=emails_filter):# [email protected]/png
			emails.append(email_check)
	ips = [ip for ip in set(ips)]
	if len(links) > 10:
		# Click more_links to get detail result
		mainDiv = ranStr()
		childDiv = ranStr()
		return check,u'''<div id="%s" style="color:red" onclick="document.all.%s.style.display=(document.all.%s.style.display =='none')?'':'none'">[more_links]</div><div id="%s" style="display:none">%s</div>'''%(mainDiv,childDiv,childDiv,childDiv,'<br />'.join(links)),'<br />'.join(emails),'<br />'.join(ips)
	else:
		return check,links,'<br />'.join(emails),'<br />'.join(ips)

if __name__ == '__main__':
	pass
Example #6
0
def checkFast(url,target,module):
	'''
	Main requests function no Dirscan
	'''
	output_file = report_filename(target,module)
	url = url_handle(url)
	try:
		if url not in filter_urls and filter_list(module=url,filter_list=sub_filter_list):
			filter_urls.append(url)
			print '[*] Now scanning: ' + url
			ip,open_ports = url,[]
			try:
				ip = url2ip(url)
				if not is_internal_ip(ip) and ip not in filter_ips.keys() and ip != '':# filter internal_ip
					print '[+] Get url2ip: ' + ip
					open_ports = portscan(ip)
					filter_ips[ip] = open_ports
					write_file(str(ip)+','+str(open_ports).replace('[','').replace(']',''),handle_ext(output_file)+portscan_opens_file)
					if len(open_ports) > openports_maxnum:
						print '[!] Maybe got port waf'
						write_file(ip,handle_ext(output_file)+portscan_maxnum_file)
						open_ports = []
				else:
					open_ports = filter_ips[ip]
			except Exception,e:
				# print traceback.format_exc()
				write_file(url,handle_ext(output_file)+url2ip_error_file)
				pass
			print '[+] Get open ports: ' + str(open_ports)
			if open_ports == []:#or 80 not in open_ports
				try:
					newtitle,code,lenth,content = '','','',''
					try:
						newtitle,code,lenth,content = getitle(url)
					except Exception,e:
						# print traceback.format_exc()
						pass
					if code in range(200,405):
						try:
							print '[+] Get title: %s,status_code: %s,content lenth: %s' % (newtitle,code,lenth)
						except:pass
						write_file(url,handle_ext(output_file)+'/%s_alive_urls.txt' % handle_ext_old(target)) # save alive `host:port` to dirsearch
						alllink,alllinks,emails,ips = [],[],[],[]
						if title_filter not in newtitle and filter_list(module=newtitle,filter_list=title_filter_list):
							try:
								alllink,alllinks,emails,ips = getallink(url,content)
							except Exception,e:
								# print traceback.format_exc()
								pass
							try:
								write_file('<tr><td><a href="%s" target=_blank />%s</a></td><td>%s</td><td><font color="blue">%s</font></td><td><font color="red">%s</font></td><td>%s&nbsp;b</td><td><font color="blue">%s</font></td><td><ul><li>%s</li><li>%s</li><ul/></td></tr>\n\n' % (url,url,ip,code,newtitle,lenth,alllinks,emails,ips),output_file)
							except Exception,e:
								# print traceback.format_exc()
								print '[!] output_error'
								write_file(url,handle_ext(output_file)+output_error_file)
								pass
						else:
							print '[!] Filter title'
							write_file(url,handle_ext(output_file)+title_filter_file)
				except Exception,e:
					# print traceback.format_exc()
					pass
Example #7
0
def github_site(subdoamin, key_domain):
    headers = requests_headers()
    proxies = requests_proxies()
    if '://' in key_domain:
        key_domain = urlparse.urlparse(url).hostname
    github_domains = []
    session = requests.Session()
    headers['Cookie'] = github_cookie
    try:
        # check_login = '******'
        # req_check = session.get(url=check_login,headers=headers,proxies=proxies,timeout=10,verify=False).content
        # if github_account in req_check:
        # 	print '[*] Github site:domain login check Success'
        headers['Host'] = 'github.com'
        headers[
            'Referer'] = 'https://github.com/search?utf8=%E2%9C%93&q=*&type=Code'
        github_url = 'https://github.com/search?q={}&type=Code&utf8=%E2%9C%93'.format(
            subdoamin)
        req = session.get(url=github_url,
                          headers=headers,
                          proxies=proxies,
                          timeout=10,
                          verify=False).content
        if 'blankslate' not in req:  #if 'code results' in req:
            for page in xrange(1, 100):
                newurl = 'https://github.com/search?p={}&q={}&type=Code&s=&utf8=%E2%9C%93'.format(
                    page, subdoamin)
                req_new = session.get(url=newurl,
                                      headers=headers,
                                      proxies=proxies,
                                      timeout=10,
                                      verify=False).content
                req_new = req_new.replace('</em>',
                                          '').replace('<em>', '').replace(
                                              '</span>', '')
                url_regexs = []
                url_regex_url,url_regex_host,url_regex_x,url_regex_a,url_regex_b,url_regex_c,url_regex_b_a,url_regex_c_a,url_regex_d = [],[],[],[],[],[],[],[],[]
                try:
                    url_regex_url = re.findall(r'//([\s\S]*?)%s' % key_domain,
                                               req_new)
                except:
                    pass
                try:
                    url_regex_host = re.findall(
                        r'&quot;([\s\S]*?)%s' % key_domain, req_new)
                except:
                    pass
                try:
                    url_regex_x = re.findall(r'&#39;([\s\S]*?)%s' % key_domain,
                                             req_new)
                except:
                    pass
                try:
                    url_regex_a = re.findall(r'/([\s\S]*?)%s' % key_domain,
                                             req_new)
                except:
                    pass
                try:
                    url_regex_b = re.findall(
                        r'\[<span .*?>([\s\S]*?)%s' % key_domain, req_new)
                except:
                    pass
                try:
                    url_regex_b_a = re.findall(r'\[([\s\S]*?)%s' % key_domain,
                                               req_new)
                except:
                    pass
                try:
                    url_regex_c_a = re.findall(r'\(([\s\S]*?)%s' % key_domain,
                                               req_new)
                except:
                    pass
                try:
                    url_regex_c = re.findall(
                        r'\(<span .*?>([\s\S]*?)%s' % key_domain, req_new)
                except:
                    pass
                try:
                    url_regex_d = re.findall(
                        r'<span .*?>([\s\S]*?)%s' % key_domain, req_new)
                except:
                    pass
                url_regexs = url_regex_url + url_regex_host + url_regex_x + url_regex_a + url_regex_b + url_regex_c + url_regex_b_a + url_regex_c_a + url_regex_d
                for sub in url_regexs:
                    if sub not in github_domains and sub_filter not in sub and sub != '.' and filter_list(
                            module=sub, filter_list=github_sub_filter
                    ) and sub[-1:] != '-' and sub[-1:] != '_':
                        sub.replace(' ', '')
                        if sub[-1:] == '.':
                            subs = sub + key_domain
                        else:
                            subs = sub + '.' + key_domain
                        if is_domain(subs) and subs not in github_domainss:
                            print '[+] Get github site:domain > ' + subs
                            github_domainss.append(subs)
                            github_domains.append(subs)
                if 'next_page disabled' in req_new:
                    return github_domains
        else:
            print '[!] github site:domain no result'
            pass
        # else:
        # 	print '[!] Github login check Error'
        # 	print '[*] Please try again'
        # 	pass
    except Exception, e:
        # print traceback.format_exc()
        pass