def fallback_consensus_dl_speed(dirip, dirport, nickname, max_time): downloader = DescriptorDownloader() start = datetime.datetime.utcnow() # there appears to be about 1 second of overhead when comparing stem's # internal trace time and the elapsed time calculated here downloader.get_consensus(endpoints = [(dirip, dirport)]).run() elapsed = (datetime.datetime.utcnow() - start).total_seconds() if elapsed > max_time: status = 'too slow' else: status = 'ok' logging.debug(('Consensus download: %0.2fs %s from %s (%s:%d), ' + 'max download time %0.2fs.') % (elapsed, status, nickname, dirip, dirport, max_time)) return elapsed
def download_consensus(): downloader = DescriptorDownloader() consensus = downloader.get_consensus( document_handler=DocumentHandler.DOCUMENT).run()[0] with open(CONSENSUS_PATH, 'w') as descriptor_file: descriptor_file.write(str(consensus))
def main(): REPLICAS = 2 parser = argparse.ArgumentParser() parser.add_argument('onion_address', help='The hidden service address - e.g. (idnxcnkne4qt76tg.onion)') parser.add_argument('--consensus', help='The optional consensus file', required=False) args = parser.parse_args() if args.consensus is None: downloader = DescriptorDownloader() consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0] t = time() else: with open(args.consensus) as f: consensus = next(parse_file(f, 'network-status-consensus-3 1.0', document_handler = DocumentHandler.DOCUMENT)) t = mktime(consensus.valid_after.timetuple()) service_id, tld = args.onion_address.split(".") if tld == 'onion' and len(service_id) == 16 and service_id.isalnum(): for replica in range(0, REPLICAS): descriptor_id = rend_compute_v2_desc_id(service_id, replica, t) print descriptor_id + '\t' + b32decode(descriptor_id, True).encode('hex') for router in find_responsible_HSDir(descriptor_id, consensus): print router['fingerprint'] + '\t' + router['nickname'] else: print "[!] The onion address you provided is not valid"
def get_relay_long_lat(relay_type): """ Gets the top (the top criteria being the router's bandwidth) relays in a Country """ relay_locations={} downloader = DescriptorDownloader( use_mirrors = True, timeout = 10, ) query = downloader.get_consensus() for desc in query.run(): if relay_type == 'E': if desc.exit_policy.is_exiting_allowed() == True: relay_locations[desc.fingerprint] = [get_long_ip(desc.address), get_lat_ip(desc.address)] with open('Longitude_Latitude_Exit_nodes.txt', "a") as the_file: the_file.write(desc.fingerprint+' '+get_long_ip(desc.address)+' '+get_lat_ip(desc.address)+'\n') continue else: if 'Guard' in desc._entries[u's'][0][0] and desc.exit_policy.is_exiting_allowed() != True: relay_locations[desc.fingerprint] = [get_long_ip(desc.address), get_lat_ip(desc.address)] with open('Longitude_Latitude_Middle_nodes.txt', "a") as the_file: the_file.write(desc.fingerprint+' '+get_long_ip(desc.address)+' '+get_lat_ip(desc.address)+'\n') continue return relay_locations
def get_top_100_relays(): """Get Top 100 relays with the most bandwidth weights""" downloader = DescriptorDownloader( use_mirrors = True, timeout = 10, ) query = downloader.get_consensus() router_bandwidth = {} router_bandwidth_sorted = {} i = 0 for desc in query.run(): router_bandwidth[i] = [desc.fingerprint, desc.exit_policy, desc.bandwidth] #pdb.set_trace() i=i+1 i=0 sorted_relays = [] for key, value in sorted(router_bandwidth.items(), key = lambda fun: fun[1][2], reverse = True): sorted_relays.insert(i, [value[0], value[1], value[2]]) #pdb.set_trace() i = i+1
def get_relay_long_lat(relay_type): """ Returns the nodes of the given relay_type (in the argument) """ relay_locations = {} downloader = DescriptorDownloader( use_mirrors = True, timeout = 10, ) query = downloader.get_consensus() i=0 if relay_type == 'E': for desc in query.run(): if desc.exit_policy.is_exiting_allowed() == True: longi, lat = get_long_lat_ip(desc.address) relay_locations[i] = [desc.fingerprint, longi, lat] i+=1 elif relay_type == 'M': for desc in query.run(): if 'Guard' in desc._entries[u's'][0][0] and desc.exit_policy.is_exiting_allowed() != True: longi, lat = get_long_lat_ip(desc.address) relay_locations[i] = [desc.fingerprint, longi, lat] i+=1 return relay_locations
def tutorial_example_1(): from stem.descriptor import DocumentHandler from stem.descriptor.remote import DescriptorDownloader downloader = DescriptorDownloader() consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0] with open('/tmp/descriptor_dump', 'w') as descriptor_file: descriptor_file.write(str(consensus))
def tutorial_example(): from stem.descriptor.remote import DescriptorDownloader downloader = DescriptorDownloader() try: for desc in downloader.get_consensus().run(): print('found relay %s (%s)' % (desc.nickname, desc.fingerprint)) except Exception as exc: print('Unable to retrieve the consensus: %s' % exc)
def main(): prior_fingerprints = load_fingerprints() downloader = DescriptorDownloader(timeout=60, validate=True) dry_run = False if not prior_fingerprints: log.debug( "We don't have any existing fingerprints so this will be a dry-run. No notifications will be sent." ) dry_run = True else: last_modified = os.stat( FINGERPRINTS_FILE ).st_mtime # unix timestamp for when it was last modified seconds_ago = int(time.time() - last_modified) log.debug("Our fingerprint was last modified at %s (%i seconds ago)." % (time.ctime(last_modified), seconds_ago)) if seconds_ago > (3 * 60 * 60): log.debug( "Fingerprint file was last modified over three hours ago. No notifications will be sent for this run." ) dry_run = True query = downloader.get_consensus() query.run(True) if query.error: log.warn("Unable to retrieve the consensus: %s" % query.error) return # mapping of fingerprints to their router status entry relays = dict((entry.fingerprint, entry) for entry in query) current_fingerprints = set(relays.keys()) new_fingerprints = current_fingerprints.difference(prior_fingerprints) log.debug("%i new relays found" % len(new_fingerprints)) if not dry_run and len(new_fingerprints) >= 50: log.debug("Sending a notification...") send_email([relays[fp] for fp in new_fingerprints]) save_fingerprints(prior_fingerprints.union(current_fingerprints))
def get_top_relays(country): """ Gets the top (the top criteria being the router's bandwidth) relays in a Country """ downloader = DescriptorDownloader( use_mirrors = True, timeout = 10, ) query = downloader.get_consensus() router_bandwidth = {} router_bandwidth_sorted = {} i = 0 for desc in query.run(): router_bandwidth[i] = [desc.fingerprint, desc.exit_policy, desc.bandwidth, get_location(desc.address)] i=i+1 i=0 sorted_relays = [] ###sort relays in descending order### for key, value in sorted(router_bandwidth.items(), key = lambda fun: fun[1][2], reverse = True): sorted_relays.insert(i, [value[0], value[1], value[2], value[3]]) i = i+1 sorted_exit_relays = [] i = 0 for item in sorted_relays: #pdb.set_trace() if item[1].is_exiting_allowed() == True and item[3].lower() == country.lower(): sorted_exit_relays.insert(i, [item[0], item[3]]) ## get the fingerprint (plus location) if exiting allowed i = i+1 #pdb.set_trace() return sorted_exit_relays
def get_bandwidth(fingerprint): ##Optimization todo## ###store all fingerprints and their bandwidths inside a pickle file ##also store time last uploaded ###if time >=24h or fingerprint not found ###then reload-consensus ###otherwise ##just load from a pickle file #### #################33 ####################333 downloader = DescriptorDownloader( use_mirrors = True, timeout = 10, ) query = downloader.get_consensus() for desc in query.run(): if desc.fingerprint == fingerprint: return desc.bandwidth break
def main(): REPLICAS = 2 parser = argparse.ArgumentParser() parser.add_argument( 'onion_address', help='The hidden service address - e.g. (idnxcnkne4qt76tg.onion)') parser.add_argument('--consensus', help='The optional consensus file', required=False) args = parser.parse_args() if args.consensus is None: downloader = DescriptorDownloader() consensus = downloader.get_consensus( document_handler=DocumentHandler.DOCUMENT).run()[0] t = time() else: with open(args.consensus) as f: consensus = next( parse_file(f, 'network-status-consensus-3 1.0', document_handler=DocumentHandler.DOCUMENT)) t = mktime(consensus.valid_after.timetuple()) service_id, tld = args.onion_address.split(".") if tld == 'onion' and len(service_id) == 16 and service_id.isalnum(): for replica in range(0, REPLICAS): descriptor_id = rend_compute_v2_desc_id(service_id, replica, t) print descriptor_id + '\t' + b32decode(descriptor_id, True).encode('hex') for router in find_responsible_HSDir(descriptor_id, consensus): print router['fingerprint'] + '\t' + router['nickname'] else: print "[!] The onion address you provided is not valid"
#script para traer ip addresses de repetidores tor utilizando la libreria stem from colorama import init, Fore, Back, Style from stem.descriptor.remote import DescriptorDownloader import re init() cont = 0 infoCompleta = "" downloader = DescriptorDownloader() for descriptor in downloader.get_consensus().run(): if descriptor.exit_policy.is_exiting_allowed(): ipFind = re.findall(r'[0-9]+(?:\.[0-9]+){3}', str(descriptor)) ipFind.pop(1) parse = "".join(ipFind) infoCompleta = infoCompleta + "\n" + str(parse) cont = cont + 1 #print(ipFind, parse, cont) print("---------Todos los Repetidores TOR------------") print(infoCompleta) print("\nExisten: " + str(cont) + " nodos tor actualmente\n") try: f = open("export.txt", "a") f.write(infoCompleta) f.close print("Se guardaron los datos en el archivo txt\n") except: print("no se pudo generar archivo txt\n") search = input("Buscar IP? (y o n): ") if search == "y": ip = input("Ip a buscar(EJ:10.0.0.0): ")
# Contribute at https://github.com/labriqueinternet/torfilter_ynh # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from stem.descriptor.remote import DescriptorDownloader import yaml import sys downloader = DescriptorDownloader() try: desc = downloader.get_consensus().run() except Exception as exc: print("Unable to retrieve the consensus: %s" % exc) f = open(sys.argv[1], 'w+') for d in desc: f.write("%s\n" % d.address) f.close()
from stem.descriptor import DocumentHandler from stem.descriptor.remote import DescriptorDownloader downloader = DescriptorDownloader() consensus = downloader.get_consensus( document_handler=DocumentHandler.DOCUMENT).run()[0] with open('/tmp/descriptor_dump', 'w') as descriptor_file: descriptor_file.write(str(consensus))
from stem.descriptor.remote import DescriptorDownloader import urllib2 import matplotlib.pyplot as plt downloader = DescriptorDownloader() try: for desc in downloader.get_consensus().run()[:-1]: #print "%i %s" % (int(desc.bandwidth), desc.address) #____Integrating parsing GeoIP IP = str(desc.address) url = 'http://www.geoiptool.com/en/?ip=' + IP website = urllib2.urlopen(url) content = website.read() #parsing Latitude start1 = content.find('lat:')+4 #+7 is the number of characters appearing before the desired string end1 = content.find(',',start1) #parsing Longtitude start2 = content.find('lng:')+4 end2 = content.find(',',start2) #print '%s %1.8f %1.8f' %(IP, float(content[start1:end1]), float(content[start2:end2])) if len(content[start1:end1]) > 1: # to test if the GeoIP is available print '%s %1.8f %1.8f' %(IP, float(content[start1:end1]), float(content[start2:end2])) plt.plot([float(content[start1:end1])],[float(content[start2:end2])], 'ro') plt.axis([-180, 180, -180, 180]) plt.show(block = False) else: print '%s 0 0' %(IP) plt.plot(0,0, 'ro') plt.axis([-180, 180, -180, 180]) plt.show(block = False)
from stem.descriptor.remote import DescriptorDownloader downloader = DescriptorDownloader() descriptors = downloader.get_consensus().run() for descriptor in descriptors: print('Nickname:', descriptor.nickname) print('Fingerprint:', descriptor.fingerprint) print('Address:', descriptor.address) print('Bandwidth:', descriptor.bandwidth)
from stem.descriptor import DocumentHandler from stem.descriptor.remote import DescriptorDownloader downloader = DescriptorDownloader() consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0] with open('/tmp/descriptor_dump', 'w') as descriptor_file: descriptor_file.write(str(consensus))
import time start_time = time.time() from stem.descriptor.remote import DescriptorDownloader import urllib2 downloader = DescriptorDownloader() try: for desc in downloader.get_consensus().run(): #print "%i %s" % (int(desc.bandwidth), desc.address) #____Integrating parsing GeoIP IP = str(desc.address) url1 = 'http://geoip.nekudo.com/api/' + IP website1 = urllib2.urlopen(url1) content1 = website1.read() url2 = 'http://ipinfo.io/' + IP website2 = urllib2.urlopen(url2) content2 = website2.read() #parsing Latitude start1 = content1.find('latitude":')+10 #+7 is the number of characters appearing before the desired string end1 = content1.find(',',start1) #parsing Longtitude start2 = content1.find('longitude":')+11 end2 = content1.find(',',start2) #parsing ASN
def main(): last_notified_config = conf.get_config('last_notified') last_notified_path = util.get_path('data', 'fingerprint_change_last_notified.cfg') if os.path.exists(last_notified_path): last_notified_config.load(last_notified_path) else: last_notified_config._path = last_notified_path fingerprint_changes = load_fingerprint_changes() downloader = DescriptorDownloader(timeout=15) alarm_for = {} for relay in downloader.get_consensus(): prior_fingerprints = fingerprint_changes.setdefault( (relay.address, relay.or_port), {}) if relay.fingerprint not in prior_fingerprints: log.debug("Registering a new fingerprint for %s:%s (%s)" % (relay.address, relay.or_port, relay.fingerprint)) prior_fingerprints[relay.fingerprint] = datetime_to_unix( relay.published) # drop fingerprint changes that are over thirty days old old_fingerprints = [ fp for fp in prior_fingerprints if (time.time() - prior_fingerprints[fp] > TEN_DAYS) ] for fp in old_fingerprints: log.debug( "Removing fingerprint for %s:%s (%s) which was published %i days ago" % (relay.address, relay.or_port, fp, prior_fingerprints[fp] / 60 / 60 / 24)) del prior_fingerprints[fp] # if we've changed more than ten times in the last ten days then alarm if len(prior_fingerprints) >= 10: alarm_for['%s:%s' % (relay.address, relay.or_port)] = (relay.address, relay.or_port, relay.fingerprint) if alarm_for and not is_notification_suppressed(alarm_for.values()): log.debug("Sending a notification for %i relays..." % len(alarm_for)) body = EMAIL_BODY for address, or_port, fingerprint in alarm_for.values(): try: desc = downloader.get_server_descriptors(fingerprint).run()[0] except: desc = None # might not be available, just used for extra info fp_changes = fingerprint_changes[(address, or_port)] log.debug("* %s:%s has had %i fingerprints: %s" % (address, or_port, len(fp_changes), ', '.join( fp_changes.keys()))) if desc: body += "* %s:%s (platform: %s, contact: %s)\n" % ( address, or_port, desc.platform.decode( 'utf-8', 'replace'), desc.contact) else: body += "* %s:%s\n" % (address, or_port) count = 0 for fingerprint in sorted(fp_changes, reverse=True, key=lambda k: fp_changes[k]): body += " %s at %s\n" % ( fingerprint, datetime.datetime.fromtimestamp( fp_changes[fingerprint]).strftime('%Y-%m-%d %H:%M:%S')) count += 1 # Relays frequently cycling their fringerprint can have thousands of # entries. Enumerating them all is unimportant, so if too long then # just give the count. if count > 8: oldest_timestamp = sorted(fp_changes.values())[0] body += " ... and %i more since %s\n" % ( len(fp_changes) - 8, datetime.datetime.fromtimestamp( oldest_timestamp).strftime('%Y-%m-%d %H:%M:%S')) break body += "\n" subject = EMAIL_SUBJECT if len(alarm_for) == 1: subject += ' (%s:%s)' % alarm_for.values()[0][:2] util.send(subject, body=body, to=[ '*****@*****.**', '*****@*****.**' ]) # register that we've notified for these current_time = str(int(time.time())) for address, or_port, _ in alarm_for.values(): last_notified_config.set('%s:%s' % (address, or_port), current_time) last_notified_config.save() save_fingerprint_changes(fingerprint_changes)