def printRelaysFound(self): #tableRelays = PrettyTable(["Host", "State", "Reason", "NickName", "Open Ports"]) table = Texttable() table.set_cols_align(["l", "l", "l", "l", "l"]) table.set_cols_valign(["m", "m", "m", "m", "m", "m"]) table.set_cols_width([20, 20, 20, 20, 20]) rows = [["NickName", "Host", "State", "Reason", "Open Ports"]] openPorts = None for torNode in self.torNodes: for port in torNode.openPorts: openPorts = '' openPorts += str(port.reason) + ':' + str(port.port) if openPorts is None: rows.append([ torNode.nickName, torNode.host, torNode.state, torNode.reason, 'No open ports found' ]) else: rows.append([ torNode.nickName, torNode.host, torNode.state, torNode.reason, openPorts ]) openPorts = None
def printRelaysFound(self): # tableRelays = PrettyTable(["Host", "State", "Reason", "NickName", "Open Ports"]) table = Texttable() table.set_cols_align(["l", "l", "l", "l", "l"]) table.set_cols_valign(["m", "m", "m", "m", "m", "m"]) table.set_cols_width([20, 20, 20, 20, 20]) rows = [["NickName", "Host", "State", "Reason", "Open Ports"]] openPorts = None for torNode in self.torNodes: for port in torNode.openPorts: openPorts = "" openPorts += str(port.reason) + ":" + str(port.port) if openPorts is None: rows.append([torNode.nickName, torNode.host, torNode.state, torNode.reason, "No open ports found"]) else: rows.append([torNode.nickName, torNode.host, torNode.state, torNode.reason, openPorts]) openPorts = None
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ "dirBruterOnRelay", 'Try to discover web resources in the relay specified. If the dictionary is not specified, Tortazo will use FuzzDB.', "self.dirBruterOnRelay('89.34.51.116', dictFile='/home/user/dictFile.txt')" ], [ 'dirBruterOnAllRelays', 'Try to discover web resources in the relays stored in database. If the dictionary is not specified, Tortazo will use FuzzDB.', "self.dirBruterOnAllRelays(port=8080,dictFile='/home/user/dictFile.txt')" ], [ 'dirBruterOnHiddenService', 'Try to discover web resources in the specified hidden service. If the dictionary is not specified, Tortazo will use FuzzDB.', 'self.dirBruterOnHiddenService("http://awjrc4y7j9po3ke3.onion")' ] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin..." table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ 'printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()' ], [ 'setTarget', 'Set the relay for the HeartBleed attack. Check the targets using the function "printRelaysFound". Default port: 443.', 'self.setTarget("1.2.3.4")' ], [ 'setTargetWithPort', 'Set the relay and port for the HeartBleed attack. Check the targets using the function "printRelaysFound". ', 'self.setTarget("1.2.3.4", "8443")' ], [ 'startAttack', 'Starts the HeartBleed attack against the specified target. ', 'self.startAttack()' ], [ 'startAttackAllRelays', 'Starts the HeartBleed attack against all relays loaded in the plugin. Default port: 443 ', 'self.startAttackAllRelays()' ] ]) print table.draw() + "\\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ 'printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()' ], [ 'setApiKey', 'Sets the API Key string.', 'self.setApiKey("XXXXXXXXXXXX")' ], [ 'setApiKeyFile', 'Sets the API Key file. Reads the first line of the file and then sets the API Key string.', 'self.setApiKeyFile("/home/apiKeyFile")' ], [ 'basicSearchQuery', 'Performs a basic search with Shodan. By default prints the 10 first results', 'self.basicSearchQuery("OpenSSL 1.0.1", 15)' ], [ 'basicSearchAllRelays', 'Performs a basic search with Shodan against all TOR relays. Uses the "net" filter.', 'self.basicSearchAllRelays("OpenSSL 1.0.1")' ], [ 'basicSearchByRelay', 'Performs a basic search with Shodan against the specified TOR relay.', 'self.basicSearchByRelay("OpenSSL 1.0.1", "80.80.80.80")' ], [ 'basicSearchByNickname', 'Performs a basic search with Shodan against the specified TOR NickName.', 'self.basicSearchByNickname("OpenSSL 1.0.1", "TORNickName")' ] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'], ['torCH', 'Search the specified keyworks in TorCH.', 'self.torCH("anonymous networks")'] ['ahmia', 'Search the specified keyworks in AhMIA.', 'self.ahmia("anonymous networks")'] ['ahmia', 'Search the specified keyworks in TorCH, AhMIA, etc.', 'self.ahmia("anonymous networks")'] ]) print table.draw() + "\\n"
def compareRelaysWithHiddenWebSite(self, hiddenWebSite): if hiddenWebSite == '' or hiddenWebSite is None: pluginException = PluginException( message= "Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" % (hiddenWebSite), trace= "compareRelaysWithHiddenWebSite with args hiddenWebSite=%s" % (hiddenWebSite), plugin="crawler", method="compareRelaysWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" % ( hiddenWebSite) raise pluginException if hiddenWebSite.startswith('http://') == False: hiddenWebSite = "http://" + hiddenWebSite if is_valid_onion_address(hiddenWebSite) == False: pluginException = PluginException( message= "Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" % (hiddenWebSite), trace= "compareRelaysWithHiddenWebSite with args hiddenWebSite=%s" % (hiddenWebSite), plugin="crawler", method="compareRelaysWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" % ( hiddenWebSite) raise pluginException try: responseHidden = self.serviceConnector.performHTTPConnectionHiddenService( hiddenWebSite, method="GET") except Exception as exc: print "[-] Exception connecting to the hidden service. Is the hidden service up and running? " + str( exc.message) return ratios = {} for node in self.torNodes: if responseHidden.status_code == 200: try: responseRelay = self.serviceConnector.performHTTPConnection( 'http://' + node.host, method="GET") if responseRelay.status_code == 200: print "[+] Executing the matcher tool against the responses." ratio = difflib.SequenceMatcher( None, responseHidden.content, responseRelay.content).ratio() ratios[node.host] = str(ratio) except: continue print "[+] The percentage of equivalence between the contents of web sites found in the relays and hidden services are: \n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([15, 15, 15]) elements = [["Hidden Service", "Relay", "Percentage"]] for key in ratios.keys(): elements.append([hiddenWebSite, str(key), str(ratios[key])]) table.add_rows(elements) print table.draw() + "\\n"
def basicSearchQuery(self, basicSearch, limit=10): if limit == None or limit < 0: pluginException = PluginException( message='The limit specified is invalid.', trace="basicSearchQuery with args basicSearch=%s , limit=%s " % (basicSearch, str(limit)), plugin="shodan", method="basicSearchQuery") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The limit specified is invalid. " raise pluginException if basicSearch == None or basicSearch == '': pluginException = PluginException( message='The query specified is invalid.', trace="basicSearchQuery with args basicSearch=%s , limit=%s " % (basicSearch, str(limit)), plugin="shodan", method="basicSearchQuery") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The query specified is invalid. " raise pluginException if hasattr(self, 'apiKey') and self.apiKey is not None: shodanApi = shodan.Shodan(self.apiKey) results = shodanApi.search(basicSearch) count = 0 table = Texttable() table.set_cols_align(["l"]) table.set_cols_valign(["m"]) table.set_cols_width([55]) rows = [["Data"]] for service in results['matches']: if count == limit: break else: count += 1 rows.append([service['ip_str'] + "\n" + service['data']]) table.add_rows(rows) print table.draw() + "\n" else: print "[*] Shodan API key not set. This is mandatory to perform searches using Shodan"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'], ['simpleStemmingAllRelays', 'Stemming with all the specified terms along the relays loaded in the plugin. Search for web sites in common ports, like 80,8080,443 or in a specific port', 'self.simpleStemmingAllRelays("drugs kill killer hitman")'], ['stemmingHiddenService', 'Stemming with all the specified terms in the website specified.', 'self.stemmingWebSite("http://torlinkbgs6aabns.onion/", "drugs kill killer")'] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ["dirBruterOnRelay", 'Try to discover web resources in the relay specified. If the dictionary is not specified, Tortazo will use FuzzDB.', "self.dirBruterOnRelay('89.34.51.116', dictFile='/home/user/dictFile.txt')"], ['dirBruterOnAllRelays', 'Try to discover web resources in the relays stored in database. If the dictionary is not specified, Tortazo will use FuzzDB.', "self.dirBruterOnAllRelays(port=8080,dictFile='/home/user/dictFile.txt')"], ['dirBruterOnHiddenService', 'Try to discover web resources in the specified hidden service. If the dictionary is not specified, Tortazo will use FuzzDB.', 'self.dirBruterOnHiddenService("http://awjrc4y7j9po3ke3.onion")'] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'], ['setApiKey', 'Sets the API Key string.', 'self.setApiKey("XXXXXXXXXXXX")'], ['setApiKeyFile', 'Sets the API Key file. Reads the first line of the file and then sets the API Key string.', 'self.setApiKeyFile("/home/apiKeyFile")'], ['basicSearchQuery', 'Performs a basic search with Shodan. By default prints the 10 first results', 'self.basicSearchQuery("OpenSSL 1.0.1", 15)'], ['basicSearchAllRelays', 'Performs a basic search with Shodan against all TOR relays. Uses the "net" filter.', 'self.basicSearchAllRelays("OpenSSL 1.0.1")'], ['basicSearchByRelay', 'Performs a basic search with Shodan against the specified TOR relay.', 'self.basicSearchByRelay("OpenSSL 1.0.1", "80.80.80.80")'], ['basicSearchByNickname', 'Performs a basic search with Shodan against the specified TOR NickName.', 'self.basicSearchByNickname("OpenSSL 1.0.1", "TORNickName")'] ]) print table.draw() + "\n"
def basicSearchQuery(self, basicSearch, limit=10): if limit == None or limit < 0: pluginException = PluginException(message='The limit specified is invalid.', trace="basicSearchQuery with args basicSearch=%s , limit=%s " %(basicSearch, str(limit)), plugin="shodan", method="basicSearchQuery") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The limit specified is invalid. " raise pluginException if basicSearch == None or basicSearch == '': pluginException = PluginException(message='The query specified is invalid.', trace="basicSearchQuery with args basicSearch=%s , limit=%s " %(basicSearch, str(limit)), plugin="shodan", method="basicSearchQuery") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The query specified is invalid. " raise pluginException if hasattr(self, 'apiKey') and self.apiKey is not None: shodanApi = shodan.Shodan(self.apiKey) results = shodanApi.search(basicSearch) count = 0 table = Texttable() table.set_cols_align(["l"]) table.set_cols_valign(["m"]) table.set_cols_width([55]) rows = [["Data"]] for service in results['matches']: if count == limit: break else: count += 1 rows.append([service['ip_str']+"\n"+service['data']]) table.add_rows(rows) print table.draw() + "\n" else: print "[*] Shodan API key not set. This is mandatory to perform searches using Shodan"
def basicSearchByNickname(self,basicSearch, nickname): if basicSearch == None or basicSearch == '': pluginException = PluginException(message='The query specified is invalid.', trace="basicSearchByNickname with args basicSearch=%s " %(basicSearch), plugin="shodan", method="basicSearchByNickname") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The query specified is invalid. " raise pluginException if hasattr(self, 'apiKey') and self.apiKey is not None: shodanApi = shodan.Shodan(self.apiKey) for node in self.torNodes: if nickname is not None and node.nickName == nickname: results = shodanApi.search(basicSearch+"net:"+node.host) table = Texttable() table.set_cols_align(["l"]) table.set_cols_valign(["m"]) table.set_cols_width([55]) rows = [["Data"]] if len(results['matches']) > 0: print results print "[*] Data for: %s " %(node.host) for service in results['matches']: rows.append([service['ip_str']+"\n"+service['data']]) table.add_rows(rows) print table.draw() + "\n" else: print "[*] No results for: %s " %(node.host) else: print "[*] Shodan API key not set. This is mandatory to perform searches using Shodan"
def printOnionRepository(self, start=1, maxResults=30): #Start-1. Indexes in database starts from 0. start = start-1 onionAddresses = self.db.searchOnionRepository(start,maxResults) table = Texttable() table.set_cols_align(["l", "l", "l", "l" ]) table.set_cols_valign(["m", "m", "m", "m"]) table.set_cols_width([5,20,20,5]) rows = [ ["#", "Onion Adress", "Description", "Service Type"], ] for rowid, address in enumerate(onionAddresses): (onionAddress, responseCode, responseHeaders, onionDescription, serviceType) = address rows.append( [rowid, onionAddress, onionDescription, serviceType] ) table.add_rows(rows) print table.draw() + "\n" if hasattr(self,"db") == False: print "[-] No database connection configured. Check your configuration." self.numberOnionSitesRegistered = self.db.countOnionRepositoryResponses() if start+maxResults <= self.numberOnionSitesRegistered: sys.stdout.write('%s [y/n]\n' %('Print more onion addresses?')) while True: try: input = raw_input if strtobool(input().lower()) == True: break else: return except NameError: pass except ValueError: sys.stdout.write('Please respond with \'y\' or \'n\'.\n') self.printOnionRepository(start+maxResults, maxResults)
def printRelaysFound(self): rows = [["Host", "State", "Reason", "NickName", "Open Ports"]] tableRelays = Texttable() tableRelays.set_cols_align(["l", "l", "l", "l" , "l"]) tableRelays.set_cols_valign(["m", "m", "m", "m", "m"]) tableRelays.set_cols_width([15,10,10,10,15]) openPorts = None for torNode in self.torNodes: for port in torNode.openPorts: openPorts = '' openPorts += str(port.reason)+':'+str(port.port) if openPorts is None: rows.append([torNode.host,torNode.state,torNode.reason,torNode.nickName,'No open ports found']) else: rows.append([torNode.host,torNode.state,torNode.reason,torNode.nickName,openPorts]) openPorts = None tableRelays.add_rows(rows) print tableRelays.draw()
def help(self): print "[*] Functions availaible available in the Plugin..." table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ 'startHTTPHiddenService', 'Starts a hidden service with the specified settings. serviceDir=Directory where the resources are located (html pages, js script, css, images, etc.) ', 'self.startHTTPHiddenService(serviceDir="/opt/Tortazo/plugins/attack/utils/hiddenServiceTest")' ] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows( [ ["Function", "Description", "Example"], ["help", "Help Banner", "self.help()"], ["printRelaysFound", "Table with the relays found.", "self.printRelaysFound()"], ] ) print table.draw() + "\\n"
def printOnionRepository(self, start=1, maxResults=30): #Start-1. Indexes in database starts from 0. start = start - 1 onionAddresses = self.db.searchOnionRepository(start, maxResults) table = Texttable() table.set_cols_align(["l", "l", "l", "l"]) table.set_cols_valign(["m", "m", "m", "m"]) table.set_cols_width([5, 20, 20, 5]) rows = [ ["#", "Onion Adress", "Description", "Service Type"], ] for rowid, address in enumerate(onionAddresses): (onionAddress, responseCode, responseHeaders, onionDescription, serviceType) = address rows.append([rowid, onionAddress, onionDescription, serviceType]) table.add_rows(rows) print table.draw() + "\n" if hasattr(self, "db") == False: print "[-] No database connection configured. Check your configuration." self.numberOnionSitesRegistered = self.db.countOnionRepositoryResponses( ) if start + maxResults <= self.numberOnionSitesRegistered: sys.stdout.write('%s [y/n]\n' % ('Print more onion addresses?')) while True: try: input = raw_input if strtobool(input().lower()) == True: break else: return except NameError: pass except ValueError: sys.stdout.write('Please respond with \'y\' or \'n\'.\n') self.printOnionRepository(start + maxResults, maxResults)
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([40,55,55]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'], ['executeAll', 'Execute Nikto against all TOR relays found (by default, against port 80)', 'self.executeAll("nikto_switches")'], ['executeAllOnPort', 'Execute Nikto against all TOR relays found on the specified port.', 'self.executeAllOnPort(8080, "nikto_switches")'], ['executeByNickname', 'Execute Nikto against the relay specified by NickName (by default, against port 80)', "self.executeByNickname('TorNodeNickName','nikto_switches')"], ['executeByIP', 'Execute Nikto against the relay specified by Ip Address (by default, against port 80)', "self.executeByIP('80.80.80.80','nikto_switches')"], ['executeByIPOnPort', 'Execute Nikto against the relay specified by Ip Address on the specified port', "self.executeByIPOnPort('80.80.80.80', 8080, 'nikto_switches')"] ]) print table.draw() + "\\n"
def __validateResponse(self, response, queryTerms): from bs4 import BeautifulSoup if response.status_code == 200: soup = BeautifulSoup(response.text) from irlib.preprocessor import Preprocessor from irlib.matrix import Matrix from irlib.metrics import Metrics prep = Preprocessor() mx = Matrix() metric = Metrics() terms = prep.ngram_tokenizer(text=soup.get_text()) mx.add_doc(doc_id=response.url, doc_terms=terms, frequency=True, do_padding=True) cnt = Counter() for word in terms: cnt[word] += 1 table = Texttable() table.set_cols_align(["l", "l"]) table.set_cols_valign(["m", "m"]) table.set_cols_width([40, 55]) rows = [["Term", "Frequency"]] for word in sorted(cnt, key=cnt.get, reverse=True): if word.lower() in queryTerms.lower().split(): rows.append([word, cnt[word]]) table.add_rows(rows) print table.draw() + "\n" else: print "[-] Response for %s is %s " % (response.url, response.status_code)
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ 'printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()' ], [ 'simpleStemmingAllRelays', 'Stemming with all the specified terms along the relays loaded in the plugin. Search for web sites in common ports, like 80,8080,443 or in a specific port', 'self.simpleStemmingAllRelays("drugs kill killer hitman")' ], [ 'stemmingHiddenService', 'Stemming with all the specified terms in the website specified.', 'self.stemmingWebSite("http://torlinkbgs6aabns.onion/", "drugs kill killer")' ] ]) print table.draw() + "\n"
def compareWebSiteWithHiddenWebSite(self, webSite, hiddenWebSite): if webSite == '' or webSite is None: pluginException = PluginException( message="The URL specified is invalid. %s " % (webSite), trace= "compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" % (webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The URL specified is invalid. %s " % (webSite) raise pluginException if hiddenWebSite == '' or hiddenWebSite is None: pluginException = PluginException( message= "Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" % (hiddenWebSite), trace= "compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" % (webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" % ( hiddenWebSite) raise pluginException if hiddenWebSite.startswith('http://') == False: hiddenWebSite = "http://" + hiddenWebSite if webSite.startswith('http://') == False: webSite = "http://" + webSite if is_valid_onion_address(hiddenWebSite) == False: pluginException = PluginException( message= "Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" % (hiddenWebSite), trace= "compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" % (webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" % ( hiddenWebSite) raise pluginException if is_valid_url(webSite) == False: pluginException = PluginException( message="The URL specified is invalid. %s " % (webSite), trace= "compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" % (webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The URL specified is invalid. %s " % (webSite) raise pluginException try: responseHidden = self.serviceConnector.performHTTPConnectionHiddenService( hiddenWebSite, method="GET") except Exception as exc: import sys print sys.exc_info() print "[-] Exception connecting to the hidden service. Is the hidden service up and running? " + str( exc.message) return ratio = 0 if responseHidden.status_code == 200: try: responseRelay = self.serviceConnector.performHTTPConnection( webSite, method="GET") if responseRelay.status_code == 200: print "[+] Executing the matcher tool against the responses." ratio = difflib.SequenceMatcher( None, responseHidden.content, responseRelay.content).ratio() print "[+] Match ration between the web sites: %s " % ( str(ratio)) else: print "[-] The website returned an non HTTP 200 code. %s " % ( str(responseRelay.status_code)) except Exception as exc: print "[-] Exception connecting to the web service. Is the web service up and running? " + str( exc.message) return else: print "[-] The Hidden website returned an non HTTP 200 code. %s " % ( str(responseHidden.status_code)) print "[+] The percentage of equivalence between the contents of web sites found in the relays and hidden services are: \n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([15, 15, 15]) elements = [["Hidden Service", "WebSite", "Percentage"]] elements.append([hiddenWebSite, webSite, str(ratio)]) table.add_rows(elements) print table.draw() + "\n"
def compareWebSiteWithHiddenWebSite(self, webSite, hiddenWebSite): if webSite == '' or webSite is None: pluginException = PluginException(message="The URL specified is invalid. %s " %(webSite), trace="compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" %(webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The URL specified is invalid. %s " %(webSite) raise pluginException if hiddenWebSite == '' or hiddenWebSite is None: pluginException = PluginException(message="Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite), trace="compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" %(webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite) raise pluginException if hiddenWebSite.startswith('http://') == False: hiddenWebSite = "http://"+hiddenWebSite if webSite.startswith('http://') == False: webSite = "http://"+webSite if is_valid_onion_address(hiddenWebSite) == False: pluginException = PluginException(message="Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite), trace="compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" %(webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite) raise pluginException if is_valid_url(webSite) == False: pluginException = PluginException(message="The URL specified is invalid. %s " %(webSite), trace="compareWebSiteWithHiddenWebSite with args webSite=%s, hiddenWebSite=%s" %(webSite, hiddenWebSite), plugin="crawler", method="compareWebSiteWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The URL specified is invalid. %s " %(webSite) raise pluginException try: responseHidden = self.serviceConnector.performHTTPConnectionHiddenService(hiddenWebSite,method="GET") except Exception as exc: import sys print sys.exc_info() print "[-] Exception connecting to the hidden service. Is the hidden service up and running? "+str(exc.message) return ratio = 0 if responseHidden.status_code == 200: try: responseRelay = self.serviceConnector.performHTTPConnection(webSite, method="GET") if responseRelay.status_code == 200: print "[+] Executing the matcher tool against the responses." ratio = difflib.SequenceMatcher(None,responseHidden.content,responseRelay.content).ratio() print "[+] Match ration between the web sites: %s " %(str(ratio)) else: print "[-] The website returned an non HTTP 200 code. %s " %(str(responseRelay.status_code)) except Exception as exc: print "[-] Exception connecting to the web service. Is the web service up and running? "+str(exc.message) return else: print "[-] The Hidden website returned an non HTTP 200 code. %s " %(str(responseHidden.status_code)) print "[+] The percentage of equivalence between the contents of web sites found in the relays and hidden services are: \n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([15,15,15]) elements = [ ["Hidden Service", "WebSite", "Percentage"] ] elements.append( [ hiddenWebSite, webSite, str(ratio) ] ) table.add_rows( elements ) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([15, 15, 15]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ "setExtractorRulesAllow", 'Sets the XPATH rules to specify the allowed pages to visit and analyze. This value will be passed to the "allow" attribute of the class "scrapy.contrib.linkextractors.LinkExtractor".', "self.setExtractorRulesAllow('index\.php| index\.jsp')" ], [ 'setExtractorRulesDeny', 'Sets the XPATH rules to specify the disallowed pages to visit and analyze. This value will be passed to the "deny" attribute of the class "scrapy.contrib.linkextractors.LinkExtractor"', "self.setExtractorRulesDeny('index\.php| index\.jsp')" ], [ 'setCrawlRulesLinks', 'Sets the XPath rules to extract links from every webpage analyzed. Default value should be enough to almost every case, however you can use this function to overwrite this value. Default: "//a/@href"', "self.setCrawlRulesLinks('//a[contains(@href, 'confidential')]/@href')" ], [ 'setCrawlRulesImages', 'Sets the XPath rules to extract images from every webpage analyzed. Default value should be enough to almost every case, however you can use this function to overwrite this value. Default: "//img/@src"', "self.setCrawlRulesImages('//a[contains(@href, 'image')]/@href')" ], [ 'setDictForBruter', 'Sets the Dictionary file for HTTP Bruteforce attacks on protected resources.', 'self.setDictForBruter("/home/user/dictFile.txt")' ], [ 'compareWebSiteWithHiddenWebSite', 'This function compares the contents of a website in clear web with the contents of a web site in TOR deep web. The return value will be a percent of correlation and similitude between both sites.', 'self.compareWebSiteWithHiddenWebSite("http://exit-relay-found.com/", "http://gai12dase4sw3f5a.onion/")' ], [ 'compareRelaysWithHiddenWebSite', 'This function will perform an HTTP connection against every relay found and if the response is a HTTP 200 status code, performs an HTTP connection against the hidden service specified and compares the contents of both responses. The return value will be a percent of correlation and similitude between both sites.', 'self.compareRelaysWithHiddenWebSite("http://gai12dase4sw3f5a.onion/")' ], [ 'crawlOnionWebSite', 'This function executes a crawler against the specified hidden service. The following parameters allows to control the behaviour of the crawler:hiddenWebSite: The hidden site to crawl. This is a mandatory parameter. hiddenWebSitePort: Port for the hidden site to crawl. Default value: 80. socatTcpListenPort: Port for the Socat proxy. Default value: 8765. crawlImages: Search and download the images from every page. Default value is True. crawlLinks: Search and visit the links found in every page. Default value: True. crawlContents: Download and save in local file system the contents of every page found. deepLinks: Number of Links that the crawler will visit in deep. bruterOnProtectedResource: If true, when the spider found an HTTP protected resource, tries to execute an bruteforce attack using the specified dict file or FuzzDB. crawlFormData: Search the forms in every page and store that structure in database. useRandomUserAgents: Use a random list of User-Agents in every HTTP connection performed by the crawler. FuzzDB project is used to get a list of User-Agents reading the file fuzzdb/attack-payloads/http-protocol/user-agents.txt', '- self.crawlOnionWebSite("http://gai12dase4sw3f5a.onion/")\n - self.crawlOnionWebSite("http://gai12dase4sw3f5a.onion/", hiddenWebSitePort=8080, crawlImages=False)\n - self.crawlOnionWebSite("http://gai12dase4sw3f5a.onion/", crawlFormData=False)' ] ]) print table.draw() + "\n"
def compareRelaysWithHiddenWebSite(self, hiddenWebSite): if hiddenWebSite == '' or hiddenWebSite is None: pluginException = PluginException(message="Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite), trace="compareRelaysWithHiddenWebSite with args hiddenWebSite=%s" %(hiddenWebSite), plugin="crawler", method="compareRelaysWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite) raise pluginException if hiddenWebSite.startswith('http://') == False: hiddenWebSite = "http://"+hiddenWebSite if is_valid_onion_address(hiddenWebSite) == False: pluginException = PluginException(message="Invalid Onion Adress %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite), trace="compareRelaysWithHiddenWebSite with args hiddenWebSite=%s" %(hiddenWebSite), plugin="crawler", method="compareRelaysWithHiddenWebSite") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] Invalid Onion Address %s must contain 16 characters. The TLD must be .onion" %(hiddenWebSite) raise pluginException try: responseHidden = self.serviceConnector.performHTTPConnectionHiddenService(hiddenWebSite,method="GET") except Exception as exc: print "[-] Exception connecting to the hidden service. Is the hidden service up and running? "+str(exc.message) return ratios={} for node in self.torNodes: if responseHidden.status_code == 200: try: responseRelay = self.serviceConnector.performHTTPConnection('http://'+node.host, method="GET") if responseRelay.status_code == 200: print "[+] Executing the matcher tool against the responses." ratio = difflib.SequenceMatcher(None,responseHidden.content,responseRelay.content).ratio() ratios[node.host] = str(ratio) except: continue print "[+] The percentage of equivalence between the contents of web sites found in the relays and hidden services are: \n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([15,15,15]) elements = [ ["Hidden Service", "Relay", "Percentage"]] for key in ratios.keys(): elements.append( [ hiddenWebSite, str(key), str(ratios[key]) ] ) table.add_rows( elements ) print table.draw() + "\\n"
def basicSearchByNickname(self, basicSearch, nickname): if basicSearch == None or basicSearch == '': pluginException = PluginException( message='The query specified is invalid.', trace="basicSearchByNickname with args basicSearch=%s " % (basicSearch), plugin="shodan", method="basicSearchByNickname") if self.runFromInterpreter: showTrace(pluginException) return else: print "[-] The query specified is invalid. " raise pluginException if hasattr(self, 'apiKey') and self.apiKey is not None: shodanApi = shodan.Shodan(self.apiKey) for node in self.torNodes: if nickname is not None and node.nickName == nickname: results = shodanApi.search(basicSearch + "net:" + node.host) table = Texttable() table.set_cols_align(["l"]) table.set_cols_valign(["m"]) table.set_cols_width([55]) rows = [["Data"]] if len(results['matches']) > 0: print results print "[*] Data for: %s " % (node.host) for service in results['matches']: rows.append( [service['ip_str'] + "\n" + service['data']]) table.add_rows(rows) print table.draw() + "\n" else: print "[*] No results for: %s " % (node.host) else: print "[*] Shodan API key not set. This is mandatory to perform searches using Shodan"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([15,15,15]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ["setExtractorRulesAllow", 'Sets the XPATH rules to specify the allowed pages to visit and analyze. This value will be passed to the "allow" attribute of the class "scrapy.contrib.linkextractors.LinkExtractor".', "self.setExtractorRulesAllow('index\.php| index\.jsp')"], ['setExtractorRulesDeny', 'Sets the XPATH rules to specify the disallowed pages to visit and analyze. This value will be passed to the "deny" attribute of the class "scrapy.contrib.linkextractors.LinkExtractor"', "self.setExtractorRulesDeny('index\.php| index\.jsp')"], ['setCrawlRulesLinks', 'Sets the XPath rules to extract links from every webpage analyzed. Default value should be enough to almost every case, however you can use this function to overwrite this value. Default: "//a/@href"', "self.setCrawlRulesLinks('//a[contains(@href, 'confidential')]/@href')"], ['setCrawlRulesImages', 'Sets the XPath rules to extract images from every webpage analyzed. Default value should be enough to almost every case, however you can use this function to overwrite this value. Default: "//img/@src"', "self.setCrawlRulesImages('//a[contains(@href, 'image')]/@href')" ], ['setDictForBruter', 'Sets the Dictionary file for HTTP Bruteforce attacks on protected resources.', 'self.setDictForBruter("/home/user/dictFile.txt")'], ['compareWebSiteWithHiddenWebSite', 'This function compares the contents of a website in clear web with the contents of a web site in TOR deep web. The return value will be a percent of correlation and similitude between both sites.', 'self.compareWebSiteWithHiddenWebSite("http://exit-relay-found.com/", "http://gai12dase4sw3f5a.onion/")'], ['compareRelaysWithHiddenWebSite', 'This function will perform an HTTP connection against every relay found and if the response is a HTTP 200 status code, performs an HTTP connection against the hidden service specified and compares the contents of both responses. The return value will be a percent of correlation and similitude between both sites.', 'self.compareRelaysWithHiddenWebSite("http://gai12dase4sw3f5a.onion/")'], ['crawlOnionWebSite', 'This function executes a crawler against the specified hidden service. The following parameters allows to control the behaviour of the crawler:hiddenWebSite: The hidden site to crawl. This is a mandatory parameter. hiddenWebSitePort: Port for the hidden site to crawl. Default value: 80. socatTcpListenPort: Port for the Socat proxy. Default value: 8765. crawlImages: Search and download the images from every page. Default value is True. crawlLinks: Search and visit the links found in every page. Default value: True. crawlContents: Download and save in local file system the contents of every page found. deepLinks: Number of Links that the crawler will visit in deep. bruterOnProtectedResource: If true, when the spider found an HTTP protected resource, tries to execute an bruteforce attack using the specified dict file or FuzzDB. crawlFormData: Search the forms in every page and store that structure in database. useRandomUserAgents: Use a random list of User-Agents in every HTTP connection performed by the crawler. FuzzDB project is used to get a list of User-Agents reading the file fuzzdb/attack-payloads/http-protocol/user-agents.txt', '- self.crawlOnionWebSite("http://gai12dase4sw3f5a.onion/")\n - self.crawlOnionWebSite("http://gai12dase4sw3f5a.onion/", hiddenWebSitePort=8080, crawlImages=False)\n - self.crawlOnionWebSite("http://gai12dase4sw3f5a.onion/", crawlFormData=False)'] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'], ['setDictSeparator', 'Sets an separator for dictionary files. Every line en the file must contain <user><separator><passwd>.', 'self.setDictSeparator(":")'], ['sshBruterOnRelay', 'Execute a bruteforce attack against an SSH Server in the relay entered. Uses FuzzDB if the dictFile is not specified.', "self.sshBruterOnRelay('37.213.43.122', dictFile='/home/user/dict')"], ['sshBruterOnAllRelays', 'Execute a bruteforce attack against an SSH Server in the relays founded. Uses FuzzDB if the dictFile is not specified.', "self.sshBruterOnAllRelays(dictFile='/home/user/dict')"], ['sshBruterOnHiddenService', 'Execute a bruteforce attack against an SSH Server in the onion address entered.', 'self.sshBruterOnHiddenService("5bsk3oj5jufsuii6.onion", dictFile="/home/user/dict")'], ['ftpBruterOnRelay', 'Execute a bruteforce attack against an FTP Server in the relay entered.', 'self.ftpBruterOnRelay("37.213.43.122", dictFile="/home/user/dict")'], ['ftpBruterOnAllRelays', 'Execute a bruteforce attack against an FTP Server in the relays founded.', 'self.ftpBruterOnAllRelays(dictFile="/home/user/dict")'], ['ftpBruterOnHiddenService', 'Execute a bruteforce attack against an FTP Server in the onion address entered.', 'self.ftpBruterOnHiddenService("5bsk3oj5jufsuii6.onion", dictFile="/home/user/dict")'], ['smbBruterOnRelay', 'Execute a bruteforce attack against an SMB Server in the relay entered.', 'self.smbBruterOnRelay("37.213.43.122", dictFile="/home/user/dict")'], ['smbBruterOnAllRelays', 'Execute a bruteforce attack against an SMB Server in the relays founded.', 'self.ftpBruterOnAllRelays(dictFile="/home/user/dict")'], ['smbBruterOnHiddenService', 'Execute a bruteforce attack against an SMB Server in the onion address entered. This function uses socat to create a local Socks proxy to route the requests from the local machine to the hidden service.', 'self.smbBruterOnHiddenService("5bsk3oj5jufsuii6.onion", servicePort=139, localPort=139, dictFile="/home/user/dict")'], ['snmpBruterOnRelay', 'Execute a bruteforce attack against an SNMP Server in the relay entered.', 'self.snmpBruterOnRelay("37.213.43.122", dictFile="/home/user/dict")'], ['snmpBruterOnAllRelays', 'Execute a bruteforce attack against an SNMP Server in the relays founded.', 'self.snmpBruterOnAllRelays(dictFile="/home/user/dict")'], ['httpBruterOnSite', 'Execute a bruteforce attack against an web site.', 'self.httpBruterOnSite("http://eviltorrelay.com/auth/", dictFile="/home/user/dict")'], ['httpBruterOnHiddenService', "Execute a bruteforce attack against an onion site (hidden service in TOR's deep web).", 'self.httpBruterOnHiddenService("http://5bsk3oj5jufsuii6.onion/auth/", dictFile="/home/user/dict")'] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin..." table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['startHTTPHiddenService', 'Starts a hidden service with the specified settings. serviceDir=Directory where the resources are located (html pages, js script, css, images, etc.) ', 'self.startHTTPHiddenService(serviceDir="/opt/Tortazo/plugins/attack/utils/hiddenServiceTest")'] ]) print table.draw() + "\n"
def help(self): print "[*] Functions availaible available in the Plugin..." table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25,20,20]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'], ['setTarget', 'Set the relay for the HeartBleed attack. Check the targets using the function "printRelaysFound". Default port: 443.', 'self.setTarget("1.2.3.4")'], ['setTargetWithPort', 'Set the relay and port for the HeartBleed attack. Check the targets using the function "printRelaysFound". ', 'self.setTarget("1.2.3.4", "8443")'], ['startAttack', 'Starts the HeartBleed attack against the specified target. ', 'self.startAttack()'], ['startAttackAllRelays', 'Starts the HeartBleed attack against all relays loaded in the plugin. Default port: 443 ', 'self.startAttackAllRelays()'] ]) print table.draw() + "\\n"
def render_GET(self, request): data = json.loads(request.args['info'][0]) if data != None: table = Texttable() table.set_cols_align(["l", "l"]) table.set_cols_valign(["m", "m"]) table.set_cols_width([20,25]) rows= [ ["Browser Attribute", "Value"], ] for key, value in data.iteritems(): rows.append([key, value]) table.add_rows(rows) print table.draw() + "\n" request.setHeader("content-type", "text/plain") return "Success"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([40,55,55]) table.add_rows([ ["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], ['printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()'] ]) print table.draw() + "\\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([25, 20, 20]) table.add_rows([["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ 'printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()' ], [ 'torCH', 'Search the specified keyworks in TorCH.', 'self.torCH("anonymous networks")' ]['ahmia', 'Search the specified keyworks in AhMIA.', 'self.ahmia("anonymous networks")'] ['ahmia', 'Search the specified keyworks in TorCH, AhMIA, etc.', 'self.ahmia("anonymous networks")']]) print table.draw() + "\\n"
def help(self): print "[*] Functions availaible available in the Plugin...\n" table = Texttable() table.set_cols_align(["l", "l", "c"]) table.set_cols_valign(["m", "m", "m"]) table.set_cols_width([40, 55, 55]) table.add_rows([["Function", "Description", "Example"], ['help', 'Help Banner', 'self.help()'], [ 'printRelaysFound', 'Table with the relays found.', 'self.printRelaysFound()' ]]) print table.draw() + "\\n"
def __validateResponse(self, response, queryTerms): from bs4 import BeautifulSoup if response.status_code == 200: soup = BeautifulSoup(response.text) from irlib.preprocessor import Preprocessor from irlib.matrix import Matrix from irlib.metrics import Metrics prep = Preprocessor() mx = Matrix() metric = Metrics() terms = prep.ngram_tokenizer(text=soup.get_text()) mx.add_doc( doc_id=response.url,doc_terms=terms,frequency=True,do_padding=True) cnt = Counter() for word in terms: cnt[word] += 1 table = Texttable() table.set_cols_align(["l", "l"]) table.set_cols_valign(["m", "m"]) table.set_cols_width([40,55]) rows = [["Term", "Frequency"]] for word in sorted(cnt, key=cnt.get, reverse=True): if word.lower() in queryTerms.lower().split(): rows.append([word, cnt[word]]) table.add_rows(rows) print table.draw() + "\n" else: print "[-] Response for %s is %s " %(response.url, response.status_code)
def render_GET(self, request): data = json.loads(request.args['info'][0]) if data != None: table = Texttable() table.set_cols_align(["l", "l"]) table.set_cols_valign(["m", "m"]) table.set_cols_width([20, 25]) rows = [ ["Browser Attribute", "Value"], ] for key, value in data.iteritems(): rows.append([key, value]) table.add_rows(rows) print table.draw() + "\n" request.setHeader("content-type", "text/plain") return "Success"
def printRelaysFound(self): rows = [["Host", "State", "Reason", "NickName", "Open Ports"]] tableRelays = Texttable() tableRelays.set_cols_align(["l", "l", "l", "l", "l"]) tableRelays.set_cols_valign(["m", "m", "m", "m", "m"]) tableRelays.set_cols_width([15, 10, 10, 10, 15]) openPorts = None for torNode in self.torNodes: for port in torNode.openPorts: openPorts = '' openPorts += str(port.reason) + ':' + str(port.port) if openPorts is None: rows.append([ torNode.host, torNode.state, torNode.reason, torNode.nickName, 'No open ports found' ]) else: rows.append([ torNode.host, torNode.state, torNode.reason, torNode.nickName, openPorts ]) openPorts = None tableRelays.add_rows(rows) print tableRelays.draw()