Example #1
0
def get_relay_long_lat(relay_type):
  """
  Returns the nodes of the given relay_type (in the argument)

  """
  relay_locations = {}

  downloader = DescriptorDownloader(
	use_mirrors = True,
	timeout = 10,
  )
  query = downloader.get_consensus()
  
  i=0
  
  
  if relay_type == 'E':
	  
	  for desc in query.run():
		  if desc.exit_policy.is_exiting_allowed() == True:
			  longi, lat = get_long_lat_ip(desc.address)
			  relay_locations[i] = [desc.fingerprint, longi, lat]
			  i+=1
			  
  elif relay_type == 'M':
	  
	  for desc in query.run():
		  if 'Guard' in desc._entries[u's'][0][0] and desc.exit_policy.is_exiting_allowed() != True:
			  longi, lat = get_long_lat_ip(desc.address)
			  relay_locations[i] = [desc.fingerprint, longi, lat]
			  i+=1
 
  return relay_locations
Example #2
0
    def listAuthorityExitNodes(self):
        '''
			List the Exit Nodes using the filters specified by command-line.
		'''
        self.cli.logger.debug(term.format("[+] Try to listing the current Exit-Nodes of TOR.", term.Color.GREEN))
        if self.cli.exitNodeFingerprint != None:
            self.cli.logger.debug(term.format("[+] Using the fingerprint: %s " % (self.cli.exitNodeFingerprint), term.Color.GREEN))
        self.cli.logger.debug(term.format("[+] Filter by platform: %s." % (self.cli.mode), term.Color.GREEN))
        self.cli.logger.debug(term.format("[+] Retrieving the first %d records in the Descriptors." % (self.cli.exitNodesToAttack),
                             term.Color.GREEN))

        if self.cli.useMirror == True:
            self.cli.logger.info(term.format("[+] Using the Directory Mirrors to get the descriptors", term.Color.YELLOW))
        downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
        if self.cli.exitNodeFingerprint != None:
            descriptors = downloader.get_server_descriptors(fingerprints=[self.cli.exitNodeFingerprint])
        else:
            descriptors = downloader.get_server_descriptors()
        try:
            listDescriptors = descriptors.run()
        except zlib.error:
            self.cli.logger.error(term.format("[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds.",term.Color.RED))
            return
        except urllib2.HTTPError:
            self.cli.logger.error(term.format("[-] Figerprint not found... It's not registered in the last valid server descriptor.",term.Color.RED))
            return
        return self.filterNodes(listDescriptors)
def get_exit_addresses_remote(exit_ip, exit_port=443):
    downloader = DescriptorDownloader(
        use_mirrors=True,
        timeout=10,
    )
    exit_ips = []
    exit_fingerprints = []
    query = downloader.get_server_descriptors()
    total_exits = 0
    total_allowed_exits = 0
    for desc in query.run():
        try:
            if desc.exit_policy.is_exiting_allowed():
                total_exits += 1
                if desc.exit_policy.can_exit_to(exit_ip, exit_port):
                    total_allowed_exits += 1
                    exit_ips.append(desc.address)
                    exit_fingerprints.append(desc.fingerprint)
            # print(desc.exit_policy)
            # print(dir(desc.exit_policy))
            # print('  %s (%s)' % (desc.nickname, desc.fingerprint))
            #print(desc.address)

            # print('Query took %0.2f seconds' % query.runtime)
        except Exception as exc:
            print('Unable to retrieve the server descriptors: %s' % exc)

    print("Found %d total exits, %d allowing %s on port %d" %
          (total_exits, total_allowed_exits, exit_ip, exit_port))
    return exit_ips, exit_fingerprints
Example #4
0
def get_top_100_relays():
"""Get Top 100 relays with the most bandwidth weights"""

  downloader = DescriptorDownloader(
    use_mirrors = True,
    timeout = 10,
  )

  query = downloader.get_consensus()
  router_bandwidth = {}
  router_bandwidth_sorted = {}

  i = 0

  for desc in query.run():

    router_bandwidth[i] = [desc.fingerprint, desc.exit_policy, desc.bandwidth]
    #pdb.set_trace()
    i=i+1

  i=0
  sorted_relays = []
  for key, value in sorted(router_bandwidth.items(), key = lambda fun: fun[1][2], reverse = True):

    sorted_relays.insert(i, [value[0], value[1], value[2]]) 
    #pdb.set_trace()
	i = i+1
Example #5
0
def download_consensus():
    downloader = DescriptorDownloader()
    consensus = downloader.get_consensus(
        document_handler=DocumentHandler.DOCUMENT).run()[0]

    with open(CONSENSUS_PATH, 'w') as descriptor_file:
        descriptor_file.write(str(consensus))
Example #6
0
    def getExitNodes(self):

        try:
            from stem.descriptor.remote import DescriptorDownloader
        except:
            raise TorConnectionException("Stem Package Missing")

        #self.authenticate()

        downloader = DescriptorDownloader()

        exitNodes = []

        for node in downloader.get_server_descriptors().run():

            if node.exit_policy.is_exiting_allowed():

                exitNodes.append({
                    'nickname': node.nickname,
                    'address': node.address,
                    'fingerprint': node.fingerprint,
                    'platform': node.platform,
                    'os': node.operating_system,
                    'burst': node.burst_bandwidth,
                    'estimated': node.observed_bandwidth,
                    'circuit_protocols': node.circuit_protocols,
                    'contact': node.contact,
                    'tor_version': node.tor_version
                })

            else:

                pass

        return tuple(exitNodes)
def main():
  REPLICAS = 2
  
  parser = argparse.ArgumentParser()
  parser.add_argument('onion_address', help='The hidden service address - e.g. (idnxcnkne4qt76tg.onion)')
  parser.add_argument('--consensus', help='The optional consensus file', required=False)
  args = parser.parse_args()

  if args.consensus is None:
    downloader = DescriptorDownloader()
    consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0]
    t = time()
  else:
    with open(args.consensus) as f:
      consensus = next(parse_file(f, 'network-status-consensus-3 1.0', document_handler = DocumentHandler.DOCUMENT))
    t = mktime(consensus.valid_after.timetuple())

  service_id, tld = args.onion_address.split(".")
  if tld == 'onion' and len(service_id) == 16 and service_id.isalnum():   
      for replica in range(0, REPLICAS):
        descriptor_id = rend_compute_v2_desc_id(service_id, replica, t)
        print descriptor_id + '\t' + b32decode(descriptor_id, True).encode('hex')
        for router in find_responsible_HSDir(descriptor_id, consensus):
          print router['fingerprint'] + '\t' + router['nickname']

  else:
    print "[!] The onion address you provided is not valid"
def get_relay_long_lat(relay_type):
  """
  Gets the top (the top criteria being the router's bandwidth) relays in a Country
  """
  relay_locations={}

  downloader = DescriptorDownloader(
    use_mirrors = True,
    timeout = 10,
  )
  query = downloader.get_consensus()
  
  for desc in query.run():
    if relay_type == 'E':
      if desc.exit_policy.is_exiting_allowed() == True: 

        relay_locations[desc.fingerprint] = [get_long_ip(desc.address), get_lat_ip(desc.address)]

        with open('Longitude_Latitude_Exit_nodes.txt', "a") as the_file:
          the_file.write(desc.fingerprint+' '+get_long_ip(desc.address)+' '+get_lat_ip(desc.address)+'\n')
        continue

    else:
      if 'Guard' in desc._entries[u's'][0][0] and desc.exit_policy.is_exiting_allowed() != True:

        relay_locations[desc.fingerprint] = [get_long_ip(desc.address), get_lat_ip(desc.address)]

        with open('Longitude_Latitude_Middle_nodes.txt', "a") as the_file:
          the_file.write(desc.fingerprint+' '+get_long_ip(desc.address)+' '+get_lat_ip(desc.address)+'\n')
        continue
    
  return relay_locations
Example #9
0
def main():
	try:
		dump = open(PATHNAME,"wb")
		
		downloader = DescriptorDownloader()

		while True:
			query = downloader.get_server_descriptors(fingerprints=FINGERPRINT)

			for desc in query.run():
				dump.seek(0)
				dump.write("Nickname " + str(desc.nickname)+"\n")
				dump.write("Fingerprint " + "".join(str(desc.fingerprint).split())+"\n")
				dump.write("Published " + str(desc.published)+"\n")
				dump.write("Address " + str(desc.address)+"\n")
				dump.write("Version " + str(desc.tor_version)+"\n")
				dump.write("Uptime " + str(desc.uptime)+"\n")
				dump.write("Average_Bandwidth " + str(desc.average_bandwidth)+"\n")
				dump.write("Burst_Bandwidth " + str(desc.burst_bandwidth)+"\n")
				dump.write("Observed_Bandwidth " + str(desc.observed_bandwidth)+"\n")
				dump.write("Hibernating " + str(desc.hibernating)+"\n")

			time.sleep(DOWNLOAD_DELAY)

	except Exception as exc:
		print 'Unable to retrieve the server descriptors: %s' % exc
Example #10
0
    def tutorial_example_1():
      from stem.descriptor import DocumentHandler
      from stem.descriptor.remote import DescriptorDownloader

      downloader = DescriptorDownloader()
      consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0]

      with open('/tmp/descriptor_dump', 'w') as descriptor_file:
        descriptor_file.write(str(consensus))
Example #11
0
    def tutorial_example_1():
      from stem.descriptor import DocumentHandler
      from stem.descriptor.remote import DescriptorDownloader

      downloader = DescriptorDownloader()
      consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0]

      with open('/tmp/descriptor_dump', 'w') as descriptor_file:
        descriptor_file.write(str(consensus))
Example #12
0
    def tutorial_example():
      from stem.descriptor.remote import DescriptorDownloader

      downloader = DescriptorDownloader()

      try:
        for desc in downloader.get_consensus().run():
          print('found relay %s (%s)' % (desc.nickname, desc.fingerprint))
      except Exception as exc:
        print('Unable to retrieve the consensus: %s' % exc)
Example #13
0
    def tutorial_example():
      from stem.descriptor.remote import DescriptorDownloader

      downloader = DescriptorDownloader()

      try:
        for desc in downloader.get_consensus().run():
          print('found relay %s (%s)' % (desc.nickname, desc.fingerprint))
      except Exception as exc:
        print('Unable to retrieve the consensus: %s' % exc)
Example #14
0
      def get_bw_to_relay():
        bw_to_relay = {}

        downloader = DescriptorDownloader()

        try:
          for desc in downloader.get_server_descriptors().run():
            if desc.exit_policy.is_exiting_allowed():
              bw_to_relay.setdefault(desc.observed_bandwidth, []).append(desc.nickname)
        except Exception as exc:
          print('Unable to retrieve the server descriptors: %s' % exc)

        return bw_to_relay
Example #15
0
 def __init__(self,
              initial_consensus=None,
              directory_cache_mode=None,
              max_concurrency=9):
     self.max_concurrency_lock = asyncio.BoundedSemaphore(max_concurrency)
     self.current_consensus = initial_consensus
     self.set_mode(directory_cache_mode
                   or DirectoryCacheMode.DIRECTORY_CACHE)
     self.downloader = DescriptorDownloader(
         timeout=5,
         retries=0,
     )
     self.descriptor_cache = None
Example #16
0
      def get_bw_to_relay():
        bw_to_relay = {}

        downloader = DescriptorDownloader()

        try:
          for desc in downloader.get_server_descriptors().run():
            if desc.exit_policy.is_exiting_allowed():
              bw_to_relay.setdefault(desc.observed_bandwidth, []).append(desc.nickname)
        except Exception as exc:
          print('Unable to retrieve the server descriptors: %s' % exc)

        return bw_to_relay
Example #17
0
	def listExitNodes(self):
		'''
			List the Exit Nodes using the filters specified by command-line.
		'''
		nodesAlreadyScanned = []
		log.info("[+] Try to listing the current Exit-Nodes of TOR.")
		if self.cli.exitNodeFingerprint != None:
			log.info("[+] Using the fingerprint: %s " % (self.cli.exitNodeFingerprint))
		log.info("[+] Filter by platform: %s." % (self.cli.mode))
		log.info("[+] Retrieving the first %d records in the Descriptors." %(self.cli.exitNodesToAttack))
		
		if self.cli.useMirror == True:
			log.info("[+] Using the Directory Mirrors to get the descriptors")
		downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
		nm = nmap.PortScanner()
		if self.cli.exitNodeFingerprint != None:
			descriptors = downloader.get_server_descriptors(fingerprints=[self.cli.exitNodeFingerprint])
		else:
			descriptors = downloader.get_server_descriptors()
		try:
			listDescriptors = descriptors.run()
		except zlib.error:
			log.error("[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds.")
			return				
		log.info("[+] Number of Records found: %d " %(len(listDescriptors)))		
		for descriptor in listDescriptors[1:self.cli.exitNodesToAttack]:
		#for descriptor in parse_file(open("/home/adastra/Escritorio/tor-browser_en-US-Firefox/Data/Tor/cached-consensus")):
			if self.cli.mode.lower() in descriptor.operating_system.lower() and descriptor.exit_policy.is_exiting_allowed():
				#SEARCH FILTERING BY FINGERPRINT
				#Conditions: Fingerprint specified in command-line AND
				#	 Relay Fingerprint equals to the Fingerprint specified in command-line. AND 
				#	 Relay's Operative System equals to the Operative System (option mode) specified in command-line AND
				#	 The Relay is a Exit Node. 	
				if descriptor.address not in nodesAlreadyScanned:
					log.info("[+] %s System has been found... Nickname: %s - OS Version: %s" % (descriptor.operating_system, descriptor.nickname, descriptor.operating_system))
					log.info("[+] Starting the NMap Scan with the following options: ")
					log.info("[+][+] Scan Address: %s " % (descriptor.address))
					log.info("[+][+] Scan Arguments: %s " % (self.cli.scanArguments))
					log.info("[+][+] Scan Ports: %s " % (self.cli.scanPorts))
					if self.cli.scanArguments != None:
						nm.scan(descriptor.address, self.cli.scanPorts, arguments=self.cli.scanArguments)
					else:
						nm.scan(descriptor.address, self.cli.scanPorts)	
					self.recordNmapScan(nm)
					log.info('[+] Scan Ended for %s .' % (descriptor.nickname))
					nodesAlreadyScanned.append(descriptor.address)

		if len(self.exitNodes) == 0:
			log.info("[+] In the first %d records searching for the %s Operating System, there's no results (machines with detected open ports)" %(self.cli.exitNodesToAttack, self.cli.mode.lower()))	
		return self.exitNodes
Example #18
0
 def get_or_from_network(self):
     self._or = {}
     self._relayBW = {}
     self._exit_or = set()
     downloader = DescriptorDownloader()
     try:
         for desc in downloader.get_server_descriptors().run():
             self._or[desc.fingerprint] = desc.nickname
             self._relayBW[desc.fingerprint] = desc.observed_bandwidth
             if desc.exit_policy.is_exiting_allowed() and self._num_hops > 1:
                 self._exit_or.add(desc.fingerprint)
             if desc.exit_policy.is_exiting_allowed() and self._num_hops == 1 and desc.allow_single_hop_exits:
                 self._exit_or.add(desc.fingerprint)
     except Exception as exc:
         print "Unable to retrieve the server descriptors: %s" % exc
Example #19
0
 def fallback_consensus_dl_speed(dirip, dirport, nickname, max_time):
   downloader = DescriptorDownloader()
   start = datetime.datetime.utcnow()
   # there appears to be about 1 second of overhead when comparing stem's
   # internal trace time and the elapsed time calculated here
   downloader.get_consensus(endpoints = [(dirip, dirport)]).run()
   elapsed = (datetime.datetime.utcnow() - start).total_seconds()
   if elapsed > max_time:
     status = 'too slow'
   else:
     status = 'ok'
   logging.debug(('Consensus download: %0.2fs %s from %s (%s:%d), '
                  + 'max download time %0.2fs.') % (elapsed, status,
                                                    nickname, dirip, dirport,
                                                    max_time))
   return elapsed
Example #20
0
    def listAuthorityExitNodes(self):
        '''
			List the Exit Nodes using the filters specified by command-line.
		'''
        self.cli.logger.debug(
            term.format("[+] Try to listing the current Exit-Nodes of TOR.",
                        term.Color.GREEN))
        if self.cli.exitNodeFingerprint != None:
            self.cli.logger.debug(
                term.format(
                    "[+] Using the fingerprint: %s " %
                    (self.cli.exitNodeFingerprint), term.Color.GREEN))
        self.cli.logger.debug(
            term.format("[+] Filter by platform: %s." % (self.cli.mode),
                        term.Color.GREEN))
        self.cli.logger.debug(
            term.format(
                "[+] Retrieving the first %d records in the Descriptors." %
                (self.cli.exitNodesToAttack), term.Color.GREEN))

        if self.cli.useMirror == True:
            self.cli.logger.info(
                term.format(
                    "[+] Using the Directory Mirrors to get the descriptors",
                    term.Color.YELLOW))
        downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
        if self.cli.exitNodeFingerprint != None:
            descriptors = downloader.get_server_descriptors(
                fingerprints=[self.cli.exitNodeFingerprint])
        else:
            descriptors = downloader.get_server_descriptors()
        try:
            listDescriptors = descriptors.run()
        except zlib.error:
            self.cli.logger.error(
                term.format(
                    "[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds.",
                    term.Color.RED))
            return
        except urllib2.HTTPError:
            self.cli.logger.error(
                term.format(
                    "[-] Figerprint not found... It's not registered in the last valid server descriptor.",
                    term.Color.RED))
            return
        return self.filterNodes(listDescriptors)
Example #21
0
def main():
    prior_fingerprints = load_fingerprints()
    downloader = DescriptorDownloader(timeout=60, validate=True)

    dry_run = False

    if not prior_fingerprints:
        log.debug(
            "We don't have any existing fingerprints so this will be a dry-run. No notifications will be sent."
        )
        dry_run = True
    else:
        last_modified = os.stat(
            FINGERPRINTS_FILE
        ).st_mtime  # unix timestamp for when it was last modified
        seconds_ago = int(time.time() - last_modified)

        log.debug("Our fingerprint was last modified at %s (%i seconds ago)." %
                  (time.ctime(last_modified), seconds_ago))

        if seconds_ago > (3 * 60 * 60):
            log.debug(
                "Fingerprint file was last modified over three hours ago. No notifications will be sent for this run."
            )
            dry_run = True

    query = downloader.get_consensus()
    query.run(True)

    if query.error:
        log.warn("Unable to retrieve the consensus: %s" % query.error)
        return

    # mapping of fingerprints to their router status entry
    relays = dict((entry.fingerprint, entry) for entry in query)

    current_fingerprints = set(relays.keys())
    new_fingerprints = current_fingerprints.difference(prior_fingerprints)
    log.debug("%i new relays found" % len(new_fingerprints))

    if not dry_run and len(new_fingerprints) >= 50:
        log.debug("Sending a notification...")
        send_email([relays[fp] for fp in new_fingerprints])

    save_fingerprints(prior_fingerprints.union(current_fingerprints))
Example #22
0
    def tutorial_example():
      from stem.descriptor.remote import DescriptorDownloader
      from stem.version import Version

      downloader = DescriptorDownloader()
      count, with_contact = 0, 0

      print('Checking for outdated relays...\n')

      for desc in downloader.get_server_descriptors():
        if desc.tor_version < Version('0.2.3.0'):
          count += 1

          if desc.contact:
            print('  %-15s %s' % (desc.tor_version, desc.contact.decode('utf-8', 'replace')))
            with_contact += 1

      print('\n%i outdated relays found, %i had contact information' % (count, with_contact))
Example #23
0
    def tutorial_example():
      from stem.descriptor.remote import DescriptorDownloader
      from stem.version import Version

      downloader = DescriptorDownloader()
      count, with_contact = 0, 0

      print("Checking for outdated relays...\n")

      for desc in downloader.get_server_descriptors():
        if desc.tor_version < Version('0.2.3.0'):
          count += 1

          if desc.contact:
            print('  %-15s %s' % (desc.tor_version, desc.contact.decode("utf-8", "replace")))
            with_contact += 1

      print("\n%i outdated relays found, %i had contact information" % (count, with_contact))
Example #24
0
def fetch_descriptors():
    """
    Fetch and return relay descriptors.
    """

    downloader = DescriptorDownloader(use_mirrors=True, timeout=20)
    query = downloader.get_server_descriptors(validate=False)

    descs = {}
    try:
        for desc in query.run():
            descs[desc.fingerprint] = desc
        log.info("Query took %0.2f seconds." % query.runtime)
    except Exception as exc:
        log.critical("Unable to retrieve server descriptors: %s" % exc)

    log.info("Downloaded %d descs." % len(descs))

    return descs
Example #25
0
def check_and_update_bitcoin_fields(relay_details):
    """
    Load full descriptors and parse bitcoin address from X-bitcoin and contact fields then update
    the details.json file with the bitcoin address as a bitcoin_address field. The X-bitcoin field
    takes precedence over the contact field if both both contain bitcoin addresses.
    """
    data = json.loads(relay_details)

    downloader = DescriptorDownloader()
    extracted_addresses = {}
    try:
      # Parse X-bitcoin fields from the network consensus
      for relay_desc in downloader.get_server_descriptors().run():
        x_bitcoin_field = re.search("^X-bitcoin (.*)", str(relay_desc), re.MULTILINE)
        if x_bitcoin_field:
            if extract_bitcoin_address(x_bitcoin_field.group()):
                extracted_addresses[relay_desc.fingerprint] = extract_bitcoin_address(x_bitcoin_field.group())
    except Exception as exc:
        print "Unable to retrieve the network consensus: %s" % exc

    for relay in data['relays']:
        # Check if a bitcoin address was already extracted from X-bitcoin field
        if relay.get('fingerprint') in extracted_addresses:
            relay['bitcoin_address'] = extracted_addresses[relay.get('fingerprint')]

        # Parse bitcoin addresses from the contact field of details.json
        elif relay.get('contact') is not None:
            if extract_bitcoin_address(relay.get('contact')):
                relay['bitcoin_address'] = extract_bitcoin_address(relay.get('contact'))

    # Remove any relays without a bitcoin address or with weight_fraction of -1.0 as they can't be used
    # to determine donation share
    data['relays'][:] = [relay for relay in data['relays'] if (relay.get('bitcoin_address'))]

    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
        temp_file_name = temp_file.name
        json.dump(data, temp_file)

    # Atomically move the new json file to avoid errors where Oniontip
    # tries to parse a partially written json file.
    details_file_path = os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'details.json')
    shutil.move(temp_file_name, details_file_path)
Example #26
0
def check_and_update_bitcoin_fields(relay_details):
    """
    Load full descriptors and parse bitcoin address from X-bitcoin and contact fields then update
    the details.json file with the bitcoin address as a bitcoin_address field. The X-bitcoin field
    takes precedence over the contact field if both both contain bitcoin addresses.
    """
    data = json.loads(relay_details)

    downloader = DescriptorDownloader()
    extracted_addresses = {}
    try:
      # Parse X-bitcoin fields from the network consensus
      for relay_desc in downloader.get_server_descriptors().run():
        x_bitcoin_field = re.search("^X-bitcoin (.*)", str(relay_desc), re.MULTILINE)
        if x_bitcoin_field:
            if extract_bitcoin_address(x_bitcoin_field.group()):
                extracted_addresses[relay_desc.fingerprint] = extract_bitcoin_address(x_bitcoin_field.group())
    except Exception as exc:
        print "Unable to retrieve the network consensus: %s" % exc

    for relay in data['relays']:
        # Check if a bitcoin address was already extracted from X-bitcoin field
        if relay.get('fingerprint') in extracted_addresses:
            relay['bitcoin_address'] = extracted_addresses[relay.get('fingerprint')]

        # Parse bitcoin addresses from the contact field of details.json
        elif relay.get('contact') is not None:
            if extract_bitcoin_address(relay.get('contact')):
                relay['bitcoin_address'] = extract_bitcoin_address(relay.get('contact'))

    # Remove any relays without a bitcoin address or with weight_fraction of -1.0 as they can't be used
    # to determine donation share
    data['relays'][:] = [relay for relay in data['relays'] if (relay.get('bitcoin_address'))]

    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
        temp_file_name = temp_file.name
        json.dump(data, temp_file)

    # Atomically move the new json file to avoid errors where Oniontip
    # tries to parse a partially written json file.
    details_file_path = os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'details.json')
    shutil.move(temp_file_name, details_file_path)
Example #27
0
def fetch_descriptors():
    """
    Fetch and return relay descriptors.
    """

    downloader = DescriptorDownloader(use_mirrors=True, timeout=20)
    query = downloader.get_server_descriptors(validate=False)

    descs = {}
    try:
        for desc in query.run():
            descs[desc.fingerprint] = desc
        log.info("Query took %0.2f seconds." % query.runtime)
    except Exception as exc:
        log.critical("Unable to retrieve server descriptors: %s" % exc)

    log.info("Downloaded %d descs." % len(descs))

    return descs
Example #28
0
def fillQueue(dbConnection):
    print term.format("Starting Tor:\n")

    # Launch Tor
    tor_process = stem.process.launch_tor_with_config(
        config={
            'SocksPort':
            str(SOCKS_PORT),
            'ControlPort':
            '9051',
            'HashedControlPassword':
            '******',
        },
        init_msg_handler=print_bootstrap_lines,
        take_ownership=True,
    )
    now = datetime.datetime.now()
    downloadSuccessful = False
    errorCount = 0
    while not downloadSuccessful:
        try:
            controller = Controller.from_port(port=9051)
            # IMPORTANT: Change the password to the fitting "HashedControlPassword" set in the Torrc by stem, a few lines above here
            controller.authenticate("schnitzel")
            downloader = DescriptorDownloader()
            # Now tefth those servers and add them to the queue
            serverDescriptors = downloader.get_server_descriptors().run()
            i = 1
            for desc in serverDescriptors:
                if desc.exit_policy.is_exiting_allowed():
                    addItemToQueue(dbConnection, desc, now, i)
                    i = i + 1
            downloadSuccessful = True
        except:
            e = sys.exc_info()[0]
            print e
            errorCount = errorCount + 1
            if (errorCount >= 5):
                print "HUGE CLUSTERFUCK, stopping execution"
                tor_process.kill()  # stops tor
                raise
    tor_process.kill()  # stops tor
Example #29
0
def fillQueue(dbConnection):
    print term.format("Starting Tor:\n")

    # Launch Tor
    tor_process = stem.process.launch_tor_with_config(
      config = {
        'SocksPort': str(SOCKS_PORT),
        'ControlPort': '9051',
        'HashedControlPassword': '******',
      },
      init_msg_handler = print_bootstrap_lines,
      take_ownership = True,
    )
    now = datetime.datetime.now()
    downloadSuccessful = False
    errorCount = 0
    while not downloadSuccessful:
        try:
            controller = Controller.from_port(port = 9051)
            # IMPORTANT: Change the password to the fitting "HashedControlPassword" set in the Torrc by stem, a few lines above here 
            controller.authenticate("schnitzel")
            downloader = DescriptorDownloader()
            # Now tefth those servers and add them to the queue
            serverDescriptors =downloader.get_server_descriptors().run()
            i = 1
            for desc in serverDescriptors:
                if desc.exit_policy.is_exiting_allowed():
                    addItemToQueue(dbConnection, desc, now, i)
                    i = i+1
            downloadSuccessful = True
        except:
            e = sys.exc_info()[0]
            print e
            errorCount = errorCount + 1
            if (errorCount >= 5):
                print "HUGE CLUSTERFUCK, stopping execution"
                tor_process.kill()  # stops tor
                raise
    tor_process.kill()  # stops tor
Example #30
0
def get_top_relays(country):
  """
  Gets the top (the top criteria being the router's bandwidth) relays in a Country
  """
  downloader = DescriptorDownloader(
    use_mirrors = True,
    timeout = 10,
  )

  query = downloader.get_consensus()
  router_bandwidth = {}
  router_bandwidth_sorted = {}
  i = 0

  for desc in query.run():

    router_bandwidth[i] = [desc.fingerprint, desc.exit_policy, desc.bandwidth, get_location(desc.address)]
    i=i+1

  i=0

  sorted_relays = []
 ###sort relays in descending order###
 
  for key, value in sorted(router_bandwidth.items(), key = lambda fun: fun[1][2], reverse = True):
    sorted_relays.insert(i, [value[0], value[1], value[2], value[3]]) 
    i = i+1

  
  sorted_exit_relays = []
  i = 0

  for item in sorted_relays:
    #pdb.set_trace()
    if item[1].is_exiting_allowed() == True and item[3].lower() == country.lower():
      sorted_exit_relays.insert(i, [item[0], item[3]]) ## get the fingerprint (plus location) if exiting allowed
      i = i+1
  #pdb.set_trace()
  return sorted_exit_relays 
Example #31
0
def get_bandwidth(fingerprint):
	##Optimization todo##
	###store all fingerprints and their bandwidths inside a pickle file
	##also store time last uploaded
	###if time >=24h or fingerprint not found
	###then reload-consensus
	###otherwise
	##just load from a pickle file
	####
	#################33
	
	####################333
	downloader = DescriptorDownloader(
		use_mirrors = True,
		timeout = 10,
	  )
	
	query = downloader.get_consensus()
	for desc in query.run():
	
		if desc.fingerprint == fingerprint:
			return desc.bandwidth
			break
Example #32
0
def check_and_update_bitcoin_fields(relay_details):
    """
    Load full descriptors and parse bitcoin address from X-bitcoin and contact fields then update
    the details.json file with the bitcoin address as a bitcoin_address field. The X-bitcoin field
    takes precedence over the contact field if both both contain bitcoin addresses.
    """
    data = json.loads(relay_details)

    downloader = DescriptorDownloader()
    extracted_addresses = {}
    try:
      # Parse X-bitcoin fields from the network consensus
      for relay_desc in downloader.get_server_descriptors().run():
        x_bitcoin_field = re.search("^X-bitcoin (.*)", str(relay_desc), re.MULTILINE)
        if x_bitcoin_field:
            if extract_bitcoin_address(x_bitcoin_field.group()):
                extracted_addresses[relay_desc.fingerprint] = extract_bitcoin_address(x_bitcoin_field.group())
    except Exception as exc:
        print "Unable to retrieve the network consensus: %s" % exc

    for relay in data['relays']:
        # Check if a bitcoin address was already extracted from X-bitcoin field
        if relay.get('fingerprint') in extracted_addresses:
            relay['bitcoin_address'] = extracted_addresses[relay.get('fingerprint')]

        # Parse bitcoin addresses from the contact field of details.json
        elif relay.get('contact') is not None:
            if extract_bitcoin_address(relay.get('contact')):
                relay['bitcoin_address'] = extract_bitcoin_address(relay.get('contact'))

    # Remove any relays without a bitcoin address or with weight_fraction of -1.0 as they can't be used
    # to determine donation share
    data['relays'][:] = [relay for relay in data['relays'] if (relay.get('bitcoin_address'))]

    # Write parsed list to file
    with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'details.json'), 'w') as details_file:
        json.dump(data, details_file)
Example #33
0
    def getExitNodes(self):
        
        try:
            from stem.descriptor.remote import DescriptorDownloader
        except:
            raise TorConnectionException("Stem Package Missing")
        
        #self.authenticate()
            
        downloader = DescriptorDownloader()
        
        exitNodes = []
        
        for node in downloader.get_server_descriptors().run():
            
            if node.exit_policy.is_exiting_allowed():
                
                exitNodes.append({

                    'nickname': node.nickname, 
                    'address': node.address, 
                    'fingerprint': node.fingerprint, 
                    'platform': node.platform, 
                    'os': node.operating_system, 
                    'burst': node.burst_bandwidth, 
                    'estimated': node.observed_bandwidth, 
                    'circuit_protocols': node.circuit_protocols, 
                    'contact': node.contact, 
                    'tor_version': node.tor_version

                    })
            
            else:
                
                pass
                
        return tuple(exitNodes)
Example #34
0
def main():
    REPLICAS = 2

    parser = argparse.ArgumentParser()
    parser.add_argument(
        'onion_address',
        help='The hidden service address - e.g. (idnxcnkne4qt76tg.onion)')
    parser.add_argument('--consensus',
                        help='The optional consensus file',
                        required=False)
    args = parser.parse_args()

    if args.consensus is None:
        downloader = DescriptorDownloader()
        consensus = downloader.get_consensus(
            document_handler=DocumentHandler.DOCUMENT).run()[0]
        t = time()
    else:
        with open(args.consensus) as f:
            consensus = next(
                parse_file(f,
                           'network-status-consensus-3 1.0',
                           document_handler=DocumentHandler.DOCUMENT))
        t = mktime(consensus.valid_after.timetuple())

    service_id, tld = args.onion_address.split(".")
    if tld == 'onion' and len(service_id) == 16 and service_id.isalnum():
        for replica in range(0, REPLICAS):
            descriptor_id = rend_compute_v2_desc_id(service_id, replica, t)
            print descriptor_id + '\t' + b32decode(descriptor_id,
                                                   True).encode('hex')
            for router in find_responsible_HSDir(descriptor_id, consensus):
                print router['fingerprint'] + '\t' + router['nickname']

    else:
        print "[!] The onion address you provided is not valid"
Example #35
0
    def listExitNodes(self):
        '''
			List the Exit Nodes using the filters specified by command-line.
		'''
        nodesAlreadyScanned = []
        log.info("[+] Try to listing the current Exit-Nodes of TOR.")
        if self.cli.exitNodeFingerprint != None:
            log.info("[+] Using the fingerprint: %s " %
                     (self.cli.exitNodeFingerprint))
        log.info("[+] Filter by platform: %s." % (self.cli.mode))
        log.info("[+] Retrieving the first %d records in the Descriptors." %
                 (self.cli.exitNodesToAttack))

        if self.cli.useMirror == True:
            log.info("[+] Using the Directory Mirrors to get the descriptors")
        downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
        nm = nmap.PortScanner()
        if self.cli.exitNodeFingerprint != None:
            descriptors = downloader.get_server_descriptors(
                fingerprints=[self.cli.exitNodeFingerprint])
        else:
            descriptors = downloader.get_server_descriptors()
        try:
            listDescriptors = descriptors.run()
        except zlib.error:
            log.error(
                "[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds."
            )
            return
        log.info("[+] Number of Records found: %d " % (len(listDescriptors)))
        for descriptor in listDescriptors[1:self.cli.exitNodesToAttack]:
            #for descriptor in parse_file(open("/home/adastra/Escritorio/tor-browser_en-US-Firefox/Data/Tor/cached-consensus")):
            if self.cli.mode.lower() in descriptor.operating_system.lower(
            ) and descriptor.exit_policy.is_exiting_allowed():
                #SEARCH FILTERING BY FINGERPRINT
                #Conditions: Fingerprint specified in command-line AND
                #	 Relay Fingerprint equals to the Fingerprint specified in command-line. AND
                #	 Relay's Operative System equals to the Operative System (option mode) specified in command-line AND
                #	 The Relay is a Exit Node.
                if descriptor.address not in nodesAlreadyScanned:
                    log.info(
                        "[+] %s System has been found... Nickname: %s - OS Version: %s"
                        % (descriptor.operating_system, descriptor.nickname,
                           descriptor.operating_system))
                    log.info(
                        "[+] Starting the NMap Scan with the following options: "
                    )
                    log.info("[+][+] Scan Address: %s " % (descriptor.address))
                    log.info("[+][+] Scan Arguments: %s " %
                             (self.cli.scanArguments))
                    log.info("[+][+] Scan Ports: %s " % (self.cli.scanPorts))
                    if self.cli.scanArguments != None:
                        nm.scan(descriptor.address,
                                self.cli.scanPorts,
                                arguments=self.cli.scanArguments)
                    else:
                        nm.scan(descriptor.address, self.cli.scanPorts)
                    self.recordNmapScan(nm)
                    log.info('[+] Scan Ended for %s .' % (descriptor.nickname))
                    nodesAlreadyScanned.append(descriptor.address)

        if len(self.exitNodes) == 0:
            log.info(
                "[+] In the first %d records searching for the %s Operating System, there's no results (machines with detected open ports)"
                % (self.cli.exitNodesToAttack, self.cli.mode.lower()))
        return self.exitNodes
Example #36
0
class DirectoryDownloader:
    """
    The :py:class:`DirectoryDownloader` provides an
    :py:mod:`asyncio`-compatible wrapper around the stem
    :py:class:`~stem.descriptor.remote.DescriptorDownloader`, with two modes of
    operation:

    * Directory Cache ([dir-spec]_ §4)
    * Client ([dir-spec]_ §5)

    The DirectoryDownloader will not initiate downloads on its own intiative.
    It must be driven to perform downloads through the methods provided.

    .. note:: As a valid consensus is required to implement parts of the
              functionality, the latest consensus is cached internally. This
              cached consensus should not be relied upon by external code. The
              cached consensus will never be served as a response to a request
              for a consensus.
    """
    def __init__(self,
                 initial_consensus=None,
                 directory_cache_mode=None,
                 max_concurrency=9):
        self.max_concurrency_lock = asyncio.BoundedSemaphore(max_concurrency)
        self.current_consensus = initial_consensus
        self.set_mode(directory_cache_mode
                      or DirectoryCacheMode.DIRECTORY_CACHE)
        self.downloader = DescriptorDownloader(
            timeout=5,
            retries=0,
        )
        self.descriptor_cache = None

    def set_mode(self, directory_cache_mode):
        if not isinstance(directory_cache_mode, DirectoryCacheMode):
            raise TypeError()
        if directory_cache_mode == DirectoryCacheMode.DIRECTORY_CACHE:
            self.endpoints = \
                self.extra_info_endpoints = self.directory_authorities()
        elif directory_cache_mode == DirectoryCacheMode.CLIENT:
            self.endpoints = self.directory_caches()
            self.extra_info_endpoints = self.directory_caches(extra_info=True)
        elif directory_cache_mode == DirectoryCacheMode.TESTING:
            self.endpoints = \
                self.extra_info_endpoints = [LOCAL_DIRECTORY_CACHE]

    def directory_authorities(self):
        """
        Returns a list containing either a :py:class:`~stem.DirPort` or
        an :py:class:`~stem.ORPort` for each of the directory authorities.
        """
        return [a.dir_port for a in DIRECTORY_AUTHORITIES]

    def directory_caches(self, extra_info=False):
        """
        Returns a list containing either a DirPort or an ORPort for
        each of the directory caches known from the latest consensus. If no
        consensus is known, this will return
        :py:meth:`~DirectoryDownloader.authorities()` instead.

        :param bool extra_info: Whether the list returned should contain only
                                directory caches that cache extra-info
                                descriptors.
        """
        if self.current_consensus is None or \
              not self.current_consensus.is_valid():
            LOG.warning(
                "Tried to use directory caches but we don't have a consensus")
            return self.directory_authorities()
        directory_caches = [a.dir_port for a in DIRECTORY_AUTHORITIES]
        for router in self.current_consensus.routers.values():
            if extra_info and self.descriptor_cache:
                server_descriptor = self.descriptor_cache(
                    SERVER_DESCRIPTOR, router.digest)
                if (not server_descriptor) or (
                        not server_descriptor.extra_info_cache):
                    continue
            if stem.Flag.V2DIR in router.flags and router.dir_port:  # pylint: disable=no-member
                directory_caches.append(
                    DirPort(router.address, router.dir_port))
        return directory_caches

    async def _consensus_attempt(self, flavor, endpoint):
        query = self.downloader.query(
            f"/tor/status-vote/current/consensus-{flavor}",
            document_handler=stem.descriptor.DocumentHandler.DOCUMENT,  # pylint: disable=no-member
            endpoints=[endpoint])
        loop = asyncio.get_running_loop()
        result = await loop.run_in_executor(
            None, functools.partial(query.run, suppress=True))
        for consensus in result:
            self.current_consensus = consensus
            return consensus

    async def relay_consensus(self,
                              flavor="ns",
                              valid_after=None,
                              *,
                              endpoint=None):
        endpoints = self.endpoints.copy()
        random.shuffle(endpoints)
        for endpoint in endpoints:
            consensus = await self._consensus_attempt(flavor, endpoint)
            if consensus:
                return consensus

    async def vote(self,
                   valid_after=None,
                   v3ident=None,
                   digest="*",
                   endpoint=None):
        if digest == "*":
            url = f"/tor/status-vote/current/authority"
            #url = f"/tor/status-vote/{'next' if next_vote else 'current'}/authority"
        else:
            url = f"/tor/status-vote/current/d/{digest}"
        query = self.downloader.query(
            url,
            document_handler=stem.descriptor.DocumentHandler.DOCUMENT,  # pylint: disable=no-member
            endpoints=[endpoint] if endpoint else self.directory_authorities())
        LOG.debug("Started consensus download")
        while not query.is_done:
            await asyncio.sleep(1)
        LOG.debug("Vote download completed successfully")
        for vote in query:
            return vote

    async def _multiple_descriptors(self, query_path_function, digests,
                                    endpoints):
        loop = asyncio.get_running_loop()
        descriptors = []
        endpoints = endpoints.copy()
        random.shuffle(endpoints)
        while endpoints and digests:
            endpoint = endpoints.pop()
            async with self.max_concurrency_lock:
                query = self.downloader.query(query_path_function(digests),
                                              endpoints=[endpoint])
                result = await loop.run_in_executor(
                    None, functools.partial(query.run, suppress=True))
            for descriptor in result:
                try:
                    digests.remove(descriptor.digest())
                except ValueError:
                    LOG.error(
                        "I was given a descriptor I didn't ask for! This "
                        "likely indicated a bug in the server we used.")
                descriptors.append(descriptor)
        if digests:
            LOG.warning("Downloader failed to find descriptors: %s.",
                        query_path_function(digests))
        return descriptors

    async def relay_server_descriptors(self, digests, published_hint=None):
        """
        Retrieves multiple server descriptors from directory servers.

        :param list(str) digests: Hex-encoded digests for the descriptors.
        :param ~datetime.datetime published_hint: Provides a hint on the
            published time. Currently this is unused, but is accepted for
            compatibility with other directory sources. In the future this may
            be used to avoid attempts to download descriptors that it is likely
            are long gone.

        :returns: A :py:class:`list` of
                  :py:class:`stem.descriptor.server_descriptor.RelayDescriptor`.
        """
        batches = chunks(digests, MAX_FINGERPRINTS)
        return list(
            chain(*await asyncio.gather(*[
                self._multiple_descriptors(relay_server_descriptors_query_path,
                                           batch, self.endpoints)
                for batch in batches
            ])))

    async def relay_extra_info_descriptors(self, digests, published_hint=None):
        """
        Retrieves multiple extra-info descriptors from directory servers.

        :param list(str) digests: Hex-encoded digests for the descriptors.
        :param ~datetime.datetime published_hint: Provides a hint on the
            published time. Currently this is unused, but is accepted for
            compatibility with other directory sources. In the future this may
            be used to avoid attempts to download descriptors that it is likely
            are long gone.

        :returns: A :py:class:`list` of
                  :py:class:`stem.descriptor.extrainfo_descriptor.RelayExtraInfoDescriptor`.
        """
        batches = chunks(digests, MAX_FINGERPRINTS)
        return list(
            chain(*await asyncio.gather(*[
                self._multiple_descriptors(
                    relay_extra_info_descriptors_query_path, batch,
                    self.extra_info_endpoints) for batch in batches
            ])))

    async def relay_microdescriptors(self,
                                     microdescriptor_hashes,
                                     valid_after_hint=None):
        """
        Retrieves multiple server descriptors from directory servers.

        :param list(str) hashes: base64-encoded hashes for the microdescriptors.
        :param ~datetime.datetime valid_after_hint: Provides a hint on the
            valid_after time. Currently this is unused, but is accepted for
            compatibility with other directory sources. In the future this may
            be used to avoid attempts to download descriptors that it is likely
            are long gone.

        :returns: A :py:class:`list` of
                  :py:class:`stem.descriptor.microdescriptor.Microdescriptor`.
        """
        batches = chunks(microdescriptor_hashes, MAX_MICRODESCRIPTOR_HASHES)
        return list(
            chain(*await asyncio.gather(*[
                self._multiple_descriptors(relay_microdescriptors_query_path,
                                           batch, self.endpoints)
                for batch in batches
            ])))
Example #37
0
#script para traer ip addresses de repetidores tor utilizando la libreria stem
from colorama import init, Fore, Back, Style
from stem.descriptor.remote import DescriptorDownloader
import re
init()
cont = 0
infoCompleta = ""
downloader = DescriptorDownloader()
for descriptor in downloader.get_consensus().run():
    if descriptor.exit_policy.is_exiting_allowed():
        ipFind = re.findall(r'[0-9]+(?:\.[0-9]+){3}', str(descriptor))
        ipFind.pop(1)
        parse = "".join(ipFind)
        infoCompleta = infoCompleta + "\n" + str(parse)
        cont = cont + 1
#print(ipFind, parse, cont)

print("---------Todos los Repetidores TOR------------")
print(infoCompleta)
print("\nExisten: " + str(cont) + " nodos tor actualmente\n")
try:
    f = open("export.txt", "a")
    f.write(infoCompleta)
    f.close
    print("Se guardaron los datos en el archivo txt\n")
except:
    print("no se pudo generar archivo txt\n")

search = input("Buscar IP? (y o n): ")
if search == "y":
    ip = input("Ip a buscar(EJ:10.0.0.0): ")
Example #38
0
def download_descriptors():
	from stem.descriptor.remote import DescriptorDownloader
	print( "Fetching server descriptors..." )
	return DescriptorDownloader().get_server_descriptors().run()
Example #39
0
from stem.descriptor import DocumentHandler
from stem.descriptor.remote import DescriptorDownloader

downloader = DescriptorDownloader()
consensus = downloader.get_consensus(
    document_handler=DocumentHandler.DOCUMENT).run()[0]

with open('/tmp/descriptor_dump', 'w') as descriptor_file:
    descriptor_file.write(str(consensus))
Example #40
0
# Contribute at https://github.com/labriqueinternet/torfilter_ynh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from stem.descriptor.remote import DescriptorDownloader
import yaml
import sys

downloader = DescriptorDownloader()

try:
  desc = downloader.get_consensus().run()
except Exception as exc:
  print("Unable to retrieve the consensus: %s" % exc)

f = open(sys.argv[1], 'w+')
for d in desc: 
  f.write("%s\n" % d.address)
f.close()
Example #41
0
            else:
                if (lon < 0.6):
                    allNodes['EU11']['relays'].append(rel)
                    allNodes['EU11']['bndw'].append(rel.bandwidth)
                elif (lon < 11.8):
                    allNodes['EU12']['relays'].append(rel)
                    allNodes['EU12']['bndw'].append(rel.bandwidth)
                else:
                    allNodes['EU13']['relays'].append(rel)
                    allNodes['EU13']['bndw'].append(rel.bandwidth)


with Controller.from_port() as controller:
    controller.authenticate()
    #data_dir = controller.get_conf('DataDirectory')
    downloader_0 = DescriptorDownloader()
    # 2. Using descriptors to get the list of relays
    '''
	for rel in parse_file(os.path.join(data_dir, 'cached-microdesc-consensus')):
		# 2a. Get the ip location
		if (rel is not None):
			getIpLocation(rel) #2b. Append them into dictionary
	'''

    try:
        for desc in downloader_0.get_server_descriptors().run():
            print desc.average_bandwidth
            print desc.burst_bandwidth
            print desc.observed_bandwidth
    except Exception as exc:
        print "Error: ", exc
from stem.descriptor import DocumentHandler
from stem.descriptor.remote import DescriptorDownloader

downloader = DescriptorDownloader()
consensus = downloader.get_consensus(document_handler = DocumentHandler.DOCUMENT).run()[0]

with open('/tmp/descriptor_dump', 'w') as descriptor_file:
  descriptor_file.write(str(consensus))
from stem.descriptor.remote import DescriptorDownloader

downloader = DescriptorDownloader()

descriptors = downloader.get_server_descriptors().run()

for descriptor in descriptors:
    print('Descriptor', str(descriptor))
    print('Certificate', descriptor.certificate)
    print('ONion key', descriptor.onion_key)
    print('Signing key', descriptor.signing_key)
    print('Signature', descriptor.signature)

Example #44
0
CHECK_PORT = 80  # exit == allows port 80
TOP_PERCENT = 80 # limit analysis to 80% of total observed bw

from stem.descriptor.remote import DescriptorDownloader # to fetch descriptors
from stem.descriptor import parse_file # alternatively, for local parsing
import os
import collections
from difflib import SequenceMatcher

print "Fetching latest descriptors, can take a while..."

if os.path.exists('cached-consensus'):
    descriptors = parse_file('cached-consensus')
else:
    downloader = DescriptorDownloader(use_mirrors=True, timeout=10)
    query = downloader.get_server_descriptors()
    descriptors = query.run()

print ""

#exits_only = filter(lambda descriptor:descriptor.exit_policy.is_exiting_allowed(), descriptors)
exits_only = filter(lambda descriptor:descriptor.exit_policy.can_exit_to(port=CHECK_PORT), descriptors)
exits_sorted =  sorted(exits_only, key=lambda descriptor:descriptor.observed_bandwidth,reverse=True)

print "%s relays (%s exits)" % (len(descriptors), len(exits_sorted))

total_bw = 0
total_exit_bw = 0

for desc in descriptors:
Example #45
0
def main():
    last_notified_config = conf.get_config('last_notified')
    last_notified_path = util.get_path('data',
                                       'fingerprint_change_last_notified.cfg')

    if os.path.exists(last_notified_path):
        last_notified_config.load(last_notified_path)
    else:
        last_notified_config._path = last_notified_path

    fingerprint_changes = load_fingerprint_changes()
    downloader = DescriptorDownloader(timeout=15)
    alarm_for = {}

    for relay in downloader.get_consensus():
        prior_fingerprints = fingerprint_changes.setdefault(
            (relay.address, relay.or_port), {})

        if relay.fingerprint not in prior_fingerprints:
            log.debug("Registering a new fingerprint for %s:%s (%s)" %
                      (relay.address, relay.or_port, relay.fingerprint))
            prior_fingerprints[relay.fingerprint] = datetime_to_unix(
                relay.published)

            # drop fingerprint changes that are over thirty days old

            old_fingerprints = [
                fp for fp in prior_fingerprints
                if (time.time() - prior_fingerprints[fp] > TEN_DAYS)
            ]

            for fp in old_fingerprints:
                log.debug(
                    "Removing fingerprint for %s:%s (%s) which was published %i days ago"
                    % (relay.address, relay.or_port, fp,
                       prior_fingerprints[fp] / 60 / 60 / 24))
                del prior_fingerprints[fp]

            # if we've changed more than ten times in the last ten days then alarm

            if len(prior_fingerprints) >= 10:
                alarm_for['%s:%s' %
                          (relay.address, relay.or_port)] = (relay.address,
                                                             relay.or_port,
                                                             relay.fingerprint)

    if alarm_for and not is_notification_suppressed(alarm_for.values()):
        log.debug("Sending a notification for %i relays..." % len(alarm_for))
        body = EMAIL_BODY

        for address, or_port, fingerprint in alarm_for.values():
            try:
                desc = downloader.get_server_descriptors(fingerprint).run()[0]
            except:
                desc = None  # might not be available, just used for extra info

            fp_changes = fingerprint_changes[(address, or_port)]
            log.debug("* %s:%s has had %i fingerprints: %s" %
                      (address, or_port, len(fp_changes), ', '.join(
                          fp_changes.keys())))

            if desc:
                body += "* %s:%s (platform: %s, contact: %s)\n" % (
                    address, or_port, desc.platform.decode(
                        'utf-8', 'replace'), desc.contact)
            else:
                body += "* %s:%s\n" % (address, or_port)

            count = 0

            for fingerprint in sorted(fp_changes,
                                      reverse=True,
                                      key=lambda k: fp_changes[k]):
                body += "  %s at %s\n" % (
                    fingerprint,
                    datetime.datetime.fromtimestamp(
                        fp_changes[fingerprint]).strftime('%Y-%m-%d %H:%M:%S'))
                count += 1

                # Relays frequently cycling their fringerprint can have thousands of
                # entries. Enumerating them all is unimportant, so if too long then
                # just give the count.

                if count > 8:
                    oldest_timestamp = sorted(fp_changes.values())[0]
                    body += "  ... and %i more since %s\n" % (
                        len(fp_changes) - 8,
                        datetime.datetime.fromtimestamp(
                            oldest_timestamp).strftime('%Y-%m-%d %H:%M:%S'))
                    break

            body += "\n"

        subject = EMAIL_SUBJECT

        if len(alarm_for) == 1:
            subject += ' (%s:%s)' % alarm_for.values()[0][:2]

        util.send(subject,
                  body=body,
                  to=[
                      '*****@*****.**',
                      '*****@*****.**'
                  ])

        # register that we've notified for these

        current_time = str(int(time.time()))

        for address, or_port, _ in alarm_for.values():
            last_notified_config.set('%s:%s' % (address, or_port),
                                     current_time)

        last_notified_config.save()

    save_fingerprint_changes(fingerprint_changes)
Example #46
0
global url
url = args.url or "http://the.earth.li/~sgtatham/putty/latest/x86/putty.exe"

if url.startswith("https://"):
    print(
        term.format("Detected HTTPS connection, should be plaintext (HTTP)",
                    term.Color.RED))

print("URL: " + url)

SOCKS_PORT = 1330
TIMEOUT = 10

downloader = DescriptorDownloader(
    use_mirrors=False,
    timeout=10,
)

query = downloader.get_server_descriptors()

for desc in downloader.get_server_descriptors():
    if desc.exit_policy.is_exiting_allowed():
        file = open("fp.txt", "w")
        file.write('{}\n'.format(desc.fingerprint))
        file.close()

xlines = sum(1 for line in open('fp.txt'))
print("We will test " + str(xlines) + " nodes")
atline = 0

socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT)
import time
start_time = time.time()
from stem.descriptor.remote import DescriptorDownloader
import urllib2
downloader = DescriptorDownloader()

try:
  for desc in downloader.get_consensus().run():
      #print "%i %s" % (int(desc.bandwidth), desc.address)
      #____Integrating parsing GeoIP
      IP = str(desc.address)

      
      url1 = 'http://geoip.nekudo.com/api/' + IP
      website1 = urllib2.urlopen(url1)
      content1 = website1.read()

      url2 = 'http://ipinfo.io/' + IP
      website2 = urllib2.urlopen(url2)
      content2 = website2.read()
        #parsing Latitude

      start1 = content1.find('latitude":')+10 #+7 is the number of characters appearing before the desired string
      end1 = content1.find(',',start1)

        #parsing Longtitude
      
      start2 = content1.find('longitude":')+11
      end2 = content1.find(',',start2)

        #parsing ASN
from stem.descriptor.remote import DescriptorDownloader
import urllib2
import matplotlib.pyplot as plt
downloader = DescriptorDownloader()

try:
  for desc in downloader.get_consensus().run()[:-1]:
      #print "%i %s" % (int(desc.bandwidth), desc.address)
      #____Integrating parsing GeoIP
      IP = str(desc.address)
      url = 'http://www.geoiptool.com/en/?ip=' + IP
      website = urllib2.urlopen(url)
      content = website.read()
        #parsing Latitude
      start1 = content.find('lat:')+4 #+7 is the number of characters appearing before the desired string
      end1 = content.find(',',start1)
        #parsing Longtitude
      start2 = content.find('lng:')+4
      end2 = content.find(',',start2)

      #print '%s %1.8f %1.8f' %(IP, float(content[start1:end1]), float(content[start2:end2]))
      if len(content[start1:end1]) > 1: # to test if the GeoIP is available
          print '%s %1.8f %1.8f' %(IP, float(content[start1:end1]), float(content[start2:end2]))
          plt.plot([float(content[start1:end1])],[float(content[start2:end2])], 'ro')
          plt.axis([-180, 180, -180, 180])
          plt.show(block = False)
      else:
          print '%s 0 0' %(IP)
          plt.plot(0,0, 'ro')
          plt.axis([-180, 180, -180, 180])
          plt.show(block = False)
from stem.descriptor.remote import DescriptorDownloader

downloader = DescriptorDownloader()
server_descriptors = downloader.get_server_descriptors().run()

with open('/tmp/descriptor_dump', 'wb') as descriptor_file:
  descriptor_file.write(''.join(map(str, server_descriptors)))
Example #50
0
from stem.descriptor.remote import DescriptorDownloader
from stem.version import Version

downloader = DescriptorDownloader()
count, with_contact = 0, 0

print("Checking for outdated relays...")
print("")

for desc in downloader.get_server_descriptors():
  if desc.tor_version < Version('0.2.3.0'):
    count += 1

    if desc.contact:
      print('  %-15s %s' % (desc.tor_version, desc.contact.decode("utf-8", "replace")))
      with_contact += 1

print("")
print("%i outdated relays found, %i had contact information" % (count, with_contact))
Example #51
0
import time
start_time = time.time()
import json
import requests
import stem

from stem.descriptor.remote import DescriptorDownloader
downloader = DescriptorDownloader(use_mirrors = False)

try:
    for desc in downloader.get_server_descriptors().run():
        IP = str(desc.address)
        print (IP)
    #Code used to retrieve data ip-api.com
        data = requests.get('http://ip-api.com/json/'+IP+'?fields=query,countryCode,isp,org,as,reverse')
        # Paste response to json
        variables = data.json();
        #Parsing AS number and organization info
        x = variables["as"]
        if (' ' in x) == True: #split to get only ASN
            asn = x.split(' ',1)[0].split('S',1)[1] #cắt ASXXX hai lần để lấy số ASN
        else:
            asn = x.split('AS',1)[-1]
        print (IP+', '+asn+', '+variables["countryCode"]+', '+variables["isp"]+', '+variables["org"]+', '+variables["reverse"])
        time.sleep(0.5)
except Exception as exc:
    print ("Unable to retrieve the consensus: %s"% exc)

#End parsing ASN and Org
print("ip-api.com %1.6f seconds ---" %(time.time() - start_time))
Example #52
0
from stem.descriptor.remote import DescriptorDownloader
import os

downloader = DescriptorDownloader(
    use_mirrors=True,
    timeout=20,
)

query = downloader.get_server_descriptors()

searchString = "[tor-relay.co]"
trcCount = 0
combinedUptime = 0
combinedBandwidth = 0

nodes = list()

try:
    for desc in query.run():
        if searchString in str(desc.contact):
            trcCount += 1
            combinedUptime += desc.uptime
            combinedBandwidth += desc.observed_bandwidth
            nodes.append({
                'name': desc.nickname,
                'bandwidth': desc.observed_bandwidth
            })
except Exception as exc:
    print(exc)

if len(nodes) > 0:
Example #53
0
print("SHA256 sum: " + file_hash)

global url
url = args.url or "http://the.earth.li/~sgtatham/putty/latest/x86/putty.exe"

if url.startswith("https://"):
    print(term.format("Detected HTTPS connection, should be plaintext (HTTP)", term.Color.RED))

print("URL: " + url)

SOCKS_PORT = 1330
TIMEOUT = 10

downloader = DescriptorDownloader(
  use_mirrors = False,
  timeout = 10,
)

query = downloader.get_server_descriptors()

for desc in downloader.get_server_descriptors():
        if desc.exit_policy.is_exiting_allowed():
                file = open("fp.txt", "w")
                file.write('{}\n'.format(desc.fingerprint))
                file.close()

xlines = sum(1 for line in open('fp.txt'))
print("We will test " + str(xlines) + " nodes")
atline = 0

socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT)