def content_deal(headers, host, method, postdata, uri): u = urlparse.urlparse(uri) url = uri.split(u.netloc)[-1] black_ext = 'css,flv,mp4,mp4,swf,js,jpg,jpeg,png,css,mp4,gif,txt,ico,pdf,css3,txt,rar,zip,avi,mp4,swf,wmi,exe,mpeg,ppt,pptx,doc,docx,xls,xlsx' black_domain = 'ditu.google.cn,doubleclick,cnzz.com,baidu.com,40017.cn,google-analytics.com,googlesyndication,gstatic.com,bing.com,google.com,digicert.com' with open('white_domain.conf') as white: white_domain = white.readline().strip('\n').strip('\r') if white_domain != "": for domain in white_domain.split(','): if not re.search(white_domain, u.netloc.lower()): return else: pass for ext in black_ext.split(','): if u.path.lower().endswith(ext): return for domain in black_domain.split(','): if u.netloc.lower().split(':')[0].endswith(domain): return url_hash = get_hash(host, uri, postdata) if 'Gdscan' not in headers.keys(): request = { 'headers': headers, 'host': host, 'url': url, 'method': method, 'postdata': postdata, 'hash': url_hash, 'uri': uri } reqhash = request['hash'] b64req = base64.encodestring(json.dumps(request)) if r.hsetnx("request", reqhash, b64req): r.lpush("waiting", reqhash)
def insert_redis(self, b64req, hash, host): path = self.request.uri.split('?')[0] black_ext = 'css,flv,mp4,mp4,swf,js,jpg,jpeg,png,css,mp4,gif,txt,ico,pdf,css3,txt,rar,zip,avi,mp4,swf,wmi,exe,mpeg,ppt,pptx,doc,docx,xls,xlsx' black_domain = 'ditu.google.cn,doubleclick,cnzz.com,baidu.com,40017.cn,google-analytics.com,googlesyndication,gstatic.com,bing.com,google.com,digicert.com' with open('white_domain.conf') as white: white_domain = white.readline().strip('\n').strip('\r') if white_domain != "": for domain in white_domain.split(','): if not re.search(white_domain, host.lower()): return else: pass for ext in black_ext.split(','): if path.lower().endswith(ext): return for domain in black_domain.split(','): if host.lower().split(':')[0].endswith(domain): return if r.hsetnx("request", hash, b64req): r.lpush("waiting", hash)
def capture(x): global out if 'HTTP/' in x.lastlayer( ).original and x.lastlayer().original[0:4] != 'HTTP': body = x.lastlayer().original http = Http(body) if http.extract() and 'Gdscan' not in http.headers.keys(): if out: print_request(http) request = { 'headers': http.headers, 'host': http.host, 'url': http.url, 'method': http.method, 'postdata': http.headers['postdata'], 'hash': http.hash } reqhash = request['hash'] b64req = base64.encodestring(json.dumps(request)) if r.hsetnx("request", reqhash, b64req): r.lpush("waiting", reqhash)
def Start_Scan(nothing): ''' Main function of the scan worker, including xss,sqli,xpath,ldap,lfi,sqli_time scan rule. if USESQLMAPAPI= True, user sqlmapapi. sqli: post all the data to sqlmapapi or rule, if is vulun, update taskid to redis server. ''' while True: reqhash = r.rpoplpush("waiting", "running") reqed = r.hget("request", reqhash) if not reqed: continue request = json.loads(ds(reqed)) rules = ['sqli', 'xss', 'xpath', 'ldap', 'lfi' ] #rrules=['xss','sqli','xpath','ldap','lfi','sqli_time'] for rule in rules: try: if rule == 'sqli' and USESQLMAPAPI: newsql = isqlmap() #here wrong! if request.get("uri"): uri = request.get("uri") else: uri = "http://" + request['host'] + request['url'] taskid, api = newsql.extract_request( uri, request['method'], request['headers'], request['postdata']) print taskid, api while not check_status(taskid, api): sleep(7) sqlilen, sqlidata = isvulun(taskid, api) if sqlilen: r.lpush("sqli", reqhash) updaterequest(reqhash, taskid, api, sqlidata) else: scan_obj = general(request['url'], request['host'], request['postdata'], request['headers'], request['method'], request.get('uri')) if 'time' in rule: scan_obj.timecheck = True scan_obj.setname(rule) scan_obj.loadrule() scan_obj.run() if scan_obj.bingo_payload != '': r.lpush(rule, reqhash) r.hset("bingo_payload", reqhash, scan_obj.bingo_payload) except: pass r.lpush("finish", reqhash)
def get(self): logger.debug('Handle %s request to %s', self.request.method, self.request.uri) def handle_response(response): if (response.error and not isinstance(response.error, tornado.httpclient.HTTPError)): self.set_status(500) self.write('Internal server error:\n' + str(response.error)) else: self.set_status(response.code, response.reason) self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header for header, v in response.headers.get_all(): if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'): self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie' if response.body: self.set_header('Content-Length', len(response.body)) self.write(response.body) #print 11 self.finish() body = self.request.body if not body: body = "" try: if 'Proxy-Connection' in self.request.headers: del self.request.headers['Proxy-Connection'] fetch_request( self.request.uri, handle_response, method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False, allow_nonstandard_methods=True) request_dict={} request_dict['uri']=self.request.uri request_dict['method']=self.request.method request_dict['headers']=dict(self.request.headers) request_dict['body']=body request_dict['postdata'] = request_dict['body'] url=urlparse(request_dict['uri']) request_dict['host']= url.netloc request_dict['url']= request_dict['uri'].split(url.netloc)[-1] puuu = request_dict['postdata'] identity=request_dict['uri']+request_dict['body'] request_dict['hash']= md5(identity).hexdigest() #print "="*10, "Got one:", request_dict b64req = es(json.dumps(request_dict)) if r.hsetnx("request", request_dict['hash'], b64req): r.lpush("waiting", request_dict['hash']) else: pass except tornado.httpclient.HTTPError as e: if hasattr(e, 'response') and e.response: handle_response(e.response) else: self.set_status(500) self.write('Internal server error:\n' + str(e)) self.finish()