def start(**kwargs): policy = kwargs['policy'] if policy == 'full': ports = Port.objects.filter(port_num='9200') else: ports = Port.objects.exclude(scanned__icontains=plugin).filter( port_num=9200) if not ports: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) protocols = ['http', 'https'] for port in ports: ip = port.asset.ip for protocol in protocols: try: url = protocol + "://" + ip + ":" + str( port.port_num) + "/_cat" logger.debug("[%s] [%s] %s" % (plugin, port.id, url)) response = requests.get(url) if "/_cat/master" in response.text: logger.info('[$$$]success, 可以匿名访问') Risk.objects.update_or_create( target=url, risk_type='elasticsearch匿名访问', defaults={'desc': 'elasticsearch匿名访问, ' + url}) logger.info("[True], this host is vulnerable") title = 'elasticsearch匿名访问' content = url wechat.send_msg(title, content) except Exception as e: logger.error(e) update_scan_status(port, plugin)
def get_urls(webapp): logger.debug("[%s] [%s] %s" % (plugin, webapp.id, webapp.subdomain)) cmd = "/opt/tools/crawlergo -c /opt/tools/chrome-linux/chrome -o json " + webapp.subdomain try: rsp = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = rsp.communicate() result = simplejson.loads( out.decode().split("--[Mission Complete]--")[1]) if not result: return req_list = result["req_list"] params = [] for req in req_list: url = req['url'] if 'wp-json' in url: continue parseResult = parse.urlparse(url) param_dict = parse.parse_qs(parseResult.query) res = set(param_dict.keys()).difference(set(params)) if res: logger.debug("[%s] [%s] %s" % (plugin, webapp.id, url)) WebUrls.objects.update_or_create(url=url, webapp=webapp, scanned='not') for p in res: if p not in params: params.append(p) except Exception as e: logger.critical(e) finally: update_scan_status(webapp, 'crawlergo')
def check(webapp): subdomain = webapp.subdomain logger.debug("[%s] [%s] %s" % (plugin, webapp.id, subdomain)) netloc = urlparse(subdomain).netloc.split(':')[0] if not bool(re.search('[a-z]', netloc)): # 判断url是域名还是IP return try: cmd = 'host ' + netloc res = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = res.communicate() out = out.decode('utf-8') if 'alias' not in out: return if 'elb.amazonaws.com' in out: return for payload in payloads: payload = payload.lower() if payload in out: Risk.objects.update_or_create(target=subdomain, risk_type='子域名劫持', defaults={'desc': payload}) logger.info('[$$$] 发现漏洞:%s, %s' % (subdomain, payload)) update_scan_status(webapp, 'subdomain_takeover') except Exception as e: logger.critical(e)
def start(**kwargs): webapps = kwargs['webapps'] policy = kwargs['policy'] xray_plugins = 'xss, cmd_injection, crlf_injection, jsonp, path_traversal, redirect, sqldet, ssrf, phantasm' if policy == 'full': weburls = WebUrls.objects.filter(url__contains='?') webapps = webapps.exclude(status_code=404) else: weburls = WebUrls.objects.filter(url__contains='?').exclude(scanned__icontains=plugin) webapps = WebApp.objects.exclude(scanned__icontains=plugin).exclude(status_code=404) if not weburls: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) return logger.info('有效url数: %s' % len(weburls)) for weburl in []: url = weburl.url logger.debug("[%s] [%s] %s" % (plugin, weburl.id, url)) try: command = '/opt/tools/xray webscan --url %s --html-output %sreports/xray/%s.html' % (url, settings.BASE_DIR, url.split('//')[1]) p = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) while True: status = subprocess.Popen.poll(p) time.sleep(5) if status or status == 0: break update_scan_status(weburl, plugin) except Exception as e: logger.critical(e) for webapp in webapps: url = webapp.subdomain logger.info('-' * 75) logger.debug("[%s] [%s] %s" % (plugin, webapp.id, url)) try: command = '/opt/tools/xray webscan --basic-crawler %s --html-output %sreports/xray/%s.html' % (url, settings.BASE_DIR, url.split('//')[1]) c = subprocess.call(command, shell=True) # out, err = res.communicate() # out = out.decode('utf-8') # if data: # logger.info(data) # logger.info('+ success, 发现%s漏洞' % plugin_name) # Risk.objects.update_or_create(target=url, risk_type=plugin_name, defaults={'desc': data}) # # title = '发现%s漏洞' % plugin_name # content = '-' # wechat.send_msg(title, content) # else: # logger.info('+ 未发现漏洞') update_scan_status(webapp, plugin) except Exception as e: logger.critical(e)
def get_urls(webapp): logger.debug("[%s] [%s] %s" % (plugin, webapp.id, webapp.subdomain)) cmd = "python3 /opt/tools/dirsearch/dirsearch.py -u %s --simple-report=/tmp/dirsearch.txt " \ "-w %sbrute/Filenames_or_Directories_All.txt -i 200" % (settings.BASE_DIR, webapp.subdomain) logger.debug(cmd) try: try: r = requests.get(webapp.subdomain, headers=settings.HTTP_HEADERS, timeout=10, verify=False, allow_redirects=False) if r.status_code != 403: webapp.status_code = r.status_code webapp.save() logger.info(webapp.status_code) return except Exception as e: logger.error(e) return p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() logger.debug(out.decode()) with open('/tmp/dirsearch.txt', 'r') as f: lines = f.readlines() for line in lines: url = line.strip() try: r = requests.get(url, headers=settings.HTTP_HEADERS, timeout=10, verify=False, allow_redirects=False) if 'not found' in r.text: break except Exception as e: logger.error(e) continue logger.info("[%s] %s" % (plugin, url)) WebUrls.objects.update_or_create(url=url, webapp=webapp) except Exception as e: logger.critical(e) finally: update_scan_status(webapp, plugin)
def start(**kwargs): policy = kwargs['policy'] ports = Port.objects.filter(service_name__icontains=plugin) if policy == 'increase': ports = Port.objects.filter(service_name__icontains=plugin).exclude( scanned__contains=plugin) if not ports: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) return usernames = settings.SSH_USERS with open(settings.BASE_DIR + 'brute/passwords.txt', 'r') as f: passwords = f.readlines() for port in ports: ssh_brute(port, usernames, passwords) update_scan_status(port, plugin)
def check(obj): try: url = obj.url except: url = obj.subdomain + '/' if url_is_ip(url): return for payload in payloads: _url = url + payload logger.debug("[%s] [%s] %s" % (plugin, obj.id, _url)) headers = settings.HTTP_HEADERS try: res = requests.get(_url, headers=headers, timeout=10, verify=False, allow_redirects=False) except Exception as e: # logger.error(e) res = None if res: try: if 'dandh811' in res.headers.keys(): logger.info('[$$$] %s , 该域名存在漏洞' % _url) Risk.objects.update_or_create(target=url, risk_type='CRLF注入', defaults={"desc": _url + '\n' + payload}) title = '发现HTTP头注入漏洞' content = _url wechat.send_msg(title, content) except Exception as e: logger.critical(e) try: headers["x-request-id"] = 'test' + payload res = requests.get(url, headers=headers, timeout=10, verify=False, allow_redirects=False) except Exception as e: continue try: if 'dandh811' in res.headers.keys(): Risk.objects.update_or_create(target=url, risk_type='CRLF注入', defaults={"desc": _url + '\n' + payload}) logger.info('success, 该域名存在漏洞') title = '发现HTTP头注入漏洞' content = _url wechat.send_msg(title, content) except Exception as e: logger.critical(e) update_scan_status(obj, plugin)
def check_bucket_subdomain_is_exist(webapp): bucket_existent = [] url = webapp.subdomain netloc = urlparse(url).netloc.split(':')[0] if not bool(re.search('[a-z]', netloc)): # 判断url是域名还是IP return logger.debug("[%s] [%s] %s" % (plugin, webapp.id, url)) sdm = url.split('//')[1] res = check_s3_ls(sdm) if res: bucket_existent.append(sdm) update_scan_status(webapp, plugin) return bucket_existent
def start(**kwargs): policy = kwargs['policy'] if policy == 'full': ports = Port.objects.filter(service_name__icontains='mongod') else: ports = Port.objects.exclude(scanned__icontains=plugin).filter( service_name__icontains='mongod') if not ports: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) for port in ports: ip = port.asset.ip logger.info('-' * 75) logger.debug("[%s] [%s] %s" % (plugin, port.id, ip)) try: conn = pymongo.MongoClient(host=ip, port=port.port_num, serverSelectionTimeoutMS=timeout) database_list = conn.list_database_names() if not database_list: conn.close() return conn.close() cmd = "mongo %s:%s" % (ip, port.port_num) logger.info(cmd) res = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) res.stdin.write(b'show dbs; \n') out, err = res.communicate() out = out.decode('utf-8') desc = 'MongoDB空口令,连接上数据库后,执行了show dbs命令,获取到如下信息:\n' + str(out) Risk.objects.update_or_create(target=ip, risk_type='mongodb空口令', defaults={'desc': desc}) logger.info('[$$$]success') except Exception as e: logger.error(e) finally: update_scan_status(port, plugin)
def check(webapp): url = webapp.subdomain # logger.debug("[%s] [%s] %s" % (plugin, webapp.id, url)) try: r = requests.get(url, timeout=10, headers=settings.HTTP_HEADERS, allow_redirects=False, verify=False) res = r.text except Exception as e: # logger.error(e) update_scan_status(webapp, plugin) return if 'addEventListener' in res: logger.debug("[%s] [%s] %s" % (plugin, 'listening', url))
def start(**kwargs): webapps = kwargs['webapps'] policy = kwargs['policy'] if policy == 'increase': webapps = webapps.exclude(scanned__icontains=plugin).order_by('-id') if not webapps: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) denominator = len(webapps) molecular = 0 for webapp in webapps: subdomain = webapp.subdomain molecular += 1 if not bool(re.search('[a-z]', subdomain.split(':')[1])): continue logger.info('-' * 75) logger.debug("[%s] [%s] %s" % (plugin, webapp.id, subdomain)) # if not web_is_online(subdomain.replace('https://', '')): # webapp.delete() # 判断web是否开启443,关闭则删除 # logger.info('[删除] ' + subdomain) # continue try: a = HTTP_REQUEST_SMUGGLER(subdomain) res = a.run() if res: logger.info(res) Risk.objects.update_or_create(target=subdomain, risk_type='HTTP夹带攻击', defaults={'desc': res}) title = '发现漏洞' content = "漏洞类型:" + plugin wechat.send_msg(title, content) except Exception as e: logger.critical(e) if molecular == denominator: percent = 100.0 logger.warning('%s [%d/%d]'%(str(percent)+'%', molecular, denominator)) else: percent = round(1.0 * molecular / denominator * 100, 2) logger.warning('%s [%d/%d]'%(str(percent)+'%', molecular, denominator)) update_scan_status(webapp, 'smuggling')
def check_bucket_domain_is_exist(domain): bucket_existent = [] domain_name = domain.domain logger.debug("[%s] [%s] %s" % (plugin, domain.id, domain_name)) res = check_s3_ls(domain_name) if res: bucket_existent.append(domain_name) domain_ = domain_name.split('.')[0] bucket_name_keywords = ['marketing', 'attachments', 'users', 'files'] for keyword in bucket_name_keywords: bucket_name = domain_ + '.' + keyword res = check_s3_ls(bucket_name) if res: bucket_existent.append(bucket_name) update_scan_status(domain, plugin) return bucket_existent
def start(**kwargs): policy = kwargs['policy'] if policy == 'full': weburls = WebUrls.objects.filter(url__contains='?').order_by('-id') else: weburls = WebUrls.objects.filter(url__contains='?').exclude( scanned__icontains=plugin).order_by('-id') if not weburls: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) return keywords = [ 'share', 'wap', 'url', 'link', 'src', 'source', 'target', 'u', '3g', 'display', 'sourceURl', 'imageURL', 'domain' ] for weburl in weburls: for keyword in keywords: _keyword = '?' + keyword + '=' if _keyword in weburl.url: url = weburl.url.split( _keyword)[0] + _keyword + 'http://127.0.0.1:22' logger.debug("[%s] [%s] %s" % (plugin, weburl.id, url)) try: res = requests.get( url, timeout=10, verify=False, allow_redirects=False).content.decode('utf-8') if 'mismatch' in res: logger.info(res) logger.info('[$$$] success, 发现%s漏洞' % url) Risk.objects.update_or_create(target=url, risk_type=plugin, defaults={'desc': url}) title = '发现%s漏洞' % plugin content = '-' wechat.send_msg(title, content) update_scan_status(weburl, plugin) except Exception as e: logger.critical(e)
def start(**kwargs): policy = kwargs['policy'] if policy == 'full': weburls = WebUrls.objects.filter(url__contains='?').order_by('-id') else: weburls = WebUrls.objects.filter(url__contains='?').exclude(scanned__icontains='sqli').order_by('-id') if not weburls: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) return denominator = len(weburls) molecular = 0 for weburl in weburls: molecular += 1 url = weburl.url logger.info('-' * 75) logger.debug("[%s] [%s] %s" % (plugin, weburl.id, url)) data = sqlmap(url) try: if data: logger.info(data) logger.info('[$$$]success, 发现SQL注入漏洞') Risk.objects.update_or_create(target=url, risk_type='SQL注入', defaults={'desc': data}) title = '发现SQL注入漏洞' content = '-' wechat.send_msg(title, content) else: logger.info('未发现漏洞') update_scan_status(weburl, 'sqli') except Exception as e: logger.info('* %s' % e) if molecular == denominator: percent = 100.0 logger.info('进度: %s [%d/%d]' % (str(percent)+'%', molecular, denominator)) else: percent = round(1.0 * molecular / denominator * 100, 2) logger.info('进度 : %s [%d/%d]' % (str(percent)+'%', molecular, denominator))
def start(**kwargs): policy = kwargs['policy'] webapps = kwargs['webapps'] if policy == 'increase': webapps = webapps.filter(other_info__icontains='php/').exclude( scanned__icontains=plugin) else: webapps = webapps.filter(other_info__icontains='php/') if not webapps: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) for webapp in webapps: url = webapp.subdomain logger.debug("[%s] [%s] %s" % (plugin, webapp.id, url)) cmd = '/root/go/bin/phuip-fpizdam %s/index.php' % url logger.info(cmd) p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() out = out.decode('utf-8') logger.info(out) logger.error(err.decode('utf-8')) if 'success' in out: Risk.objects.update_or_create(target=url, risk_type='php漏洞', defaults={'desc': cmd}) logger.info('[$$$]success') title = '发现漏洞' content = plugin wechat.send_msg(title, content) update_scan_status(webapp, plugin) logger.info('-' * 75)
def start(**kwargs): policy = kwargs['policy'] if policy == 'full': ports = Port.objects.filter(service_name__icontains=plugin) else: ports = Port.objects.exclude(scanned__icontains=plugin).filter( service_name__icontains=plugin) if not ports: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) usernames = settings.MYSQL_USERS with open(settings.BASE_DIR + 'brute/passwords.txt', 'r') as f: passwords = f.readlines() for port in ports: ip = port.asset.ip logger.info('-' * 75) logger.debug("[%s] [%s] %s" % (plugin, port.id, ip)) sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sk.settimeout(3) try: sk.connect((ip, port.port_num)) except: port.delete() logger.info('port deleted') continue for password in passwords: password = password.strip() for username in usernames: try: t = Thread(target=mysql_brute, args=(ip, port, username, password)) t.start() except Exception as e: logger.info(e) continue update_scan_status(port, plugin)
def start(**kwargs): policy = kwargs['policy'] if policy == 'full': ports = Port.objects.filter(service_name__icontains=plugin) else: ports = Port.objects.exclude(scanned__icontains=plugin).filter( service_name__icontains=plugin) if not ports: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) with open(settings.BASE_DIR + 'brute/usernames.txt', 'r') as f: usernames = f.readlines() with open(settings.BASE_DIR + 'brute/passwords.txt', 'r') as f: passwords = f.readlines() ftp = ftplib.FTP() for port in ports: ip = port.asset.ip logger.debug("[%s] [%s] %s" % (plugin, port.id, ip)) try: ftp.connect(ip, port.port_num, timeout=5) logger.info('ftp端口可以连接') except Exception as e: logger.error(e) port.delete() logger.info('FTP端口连接失败,删除端口') continue try: ftp.login('', '') logger.info('[$$$] FTP登录成功!') Risk.objects.update_or_create(target=ip, risk_type='ftp匿名登录', defaults={ 'target': ip, 'desc': 'ftp匿名登录' }) continue except Exception as e: logger.error(e) for username in usernames: username = username.strip() for password in passwords: password = password.strip() try: # logger.debug(username + ':' + password) ftp.login(username, password) logger.info('[$$$] FTP登录成功!') logger.info('[$$$]success, %s:%s' % (username, password)) Risk.objects.update_or_create( target=ip, risk_type='ftp弱口令', defaults={'desc': '%s:%s' % (username, password)}) title = 'ftp弱口令' content = '-' wechat.send_msg(title, content) except Exception as e: # logger.info(e) pass update_scan_status(port, plugin) logger.info('-' * 75)
def start(**kwargs): webapps = kwargs['webapps'] policy = kwargs['policy'] keywords = [ 'next', 'url', 'return', 'redirect_url', 'callback_url', 'callback', 'r', 'target', 'error', 'errurl', 'error_url', 'redirect', 'redirect_to', 'jump', 'jump_to', 'to', 'link', 'linkto', 'domain', 'u', 'continue', 'back_url' ] payloads = [ r'\baidu.com', '/baidu.com', '//baidu.com', '///baidu.com', '////baidu.com', 'https://[email protected]', '#baidu.com', '?baidu.com', r'\\baidu.com', '.baidu', '.baidu.com', '///baidu.com//..', '////baidu.com//..', '/http://baidu.com' ] chrome_options = Options() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-dev-shm-usage') if policy == 'full': weburls = WebUrls.objects.order_by('-id') else: weburls = WebUrls.objects.exclude(scanned__icontains=plugin) webapps = webapps.exclude(scanned__icontains=plugin) if not weburls: logger.debug("[%s] %s" % (plugin, '未匹配到扫描对象')) for weburl in weburls: url = parse.unquote(weburl.url, 'utf-8') if url_is_ip(url): continue logger.debug("[%s] [%s] %s" % (plugin, weburl.id, url)) try: browser = webdriver.Chrome( executable_path='/opt/tools/chromedriver', chrome_options=chrome_options) except Exception as e: logger.critical(e) continue risk = False parseResult = parse.urlparse(url) param_dict = parse.parse_qs(parseResult.query) for param in param_dict.keys(): if param not in keywords: continue if risk: continue for payload in payloads: _url = url.replace(param + '=' + param_dict[param][0], param + '=' + payload) logger.debug("[%s] [%s] %s" % (plugin, weburl.id, url)) try: browser.get(url) cur_url = browser.current_url browser.close() except Exception as e: continue if 'www.baidu' in cur_url: logger.info('[$$$] success, 发现漏洞') Risk.objects.update_or_create(target=url, risk_type='开放重定向', defaults={"desc": _url}) title = '发现开放重定向漏洞' content = '-' wechat.send_msg(title, content) risk = True try: subprocess.call('pkill chrome', shell=True) except Exception as e: logger.critical(e) finally: browser.quit() update_scan_status(weburl, plugin) if not webapps: logger.debug("[%s] %s" % (plugin, 'There are no objects to scan')) payloads2 = [ r'\baidu.com', '/baidu.com', '//baidu.com', '///baidu.com', '////baidu.com', '#baidu.com', '?baidu.com', r'\\baidu.com', '///baidu.com//..', '////baidu.com//..', '/http://baidu.com' ] for webapp in webapps: try: browser = webdriver.Chrome( executable_path='/opt/tools/chromedriver', chrome_options=chrome_options) except Exception as e: logger.critical(e) continue risk = False for payload in payloads2: if risk: continue url = webapp.subdomain + payload logger.debug("[%s] [%s] %s" % (plugin, webapp.id, url)) try: browser.get(url) cur_url = browser.current_url browser.close() except Exception as e: cur_url = 'error' if 'www.baidu' in cur_url: logger.info('[$$$] success, 发现漏洞') Risk.objects.update_or_create(target=url, risk_type='开放重定向', defaults={"desc": url}) title = '发现开放重定向漏洞' content = url wechat.send_msg(title, content) risk = True try: update_scan_status(webapp, plugin) subprocess.call('pkill chrome', shell=True) except Exception as e: logger.critical(e) finally: browser.quit()