예제 #1
0
파일: Discovery.py 프로젝트: repson/Tortazo
    def listAuthorityExitNodes(self):
        '''
			List the Exit Nodes using the filters specified by command-line.
		'''
        self.cli.logger.debug(term.format("[+] Try to listing the current Exit-Nodes of TOR.", term.Color.GREEN))
        if self.cli.exitNodeFingerprint != None:
            self.cli.logger.debug(term.format("[+] Using the fingerprint: %s " % (self.cli.exitNodeFingerprint), term.Color.GREEN))
        self.cli.logger.debug(term.format("[+] Filter by platform: %s." % (self.cli.mode), term.Color.GREEN))
        self.cli.logger.debug(term.format("[+] Retrieving the first %d records in the Descriptors." % (self.cli.exitNodesToAttack),
                             term.Color.GREEN))

        if self.cli.useMirror == True:
            self.cli.logger.info(term.format("[+] Using the Directory Mirrors to get the descriptors", term.Color.YELLOW))
        downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
        if self.cli.exitNodeFingerprint != None:
            descriptors = downloader.get_server_descriptors(fingerprints=[self.cli.exitNodeFingerprint])
        else:
            descriptors = downloader.get_server_descriptors()
        try:
            listDescriptors = descriptors.run()
        except zlib.error:
            self.cli.logger.error(term.format("[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds.",term.Color.RED))
            return
        except urllib2.HTTPError:
            self.cli.logger.error(term.format("[-] Figerprint not found... It's not registered in the last valid server descriptor.",term.Color.RED))
            return
        return self.filterNodes(listDescriptors)
예제 #2
0
	def listExitNodes(self):
		'''
			List the Exit Nodes using the filters specified by command-line.
		'''
		nodesAlreadyScanned = []
		log.info("[+] Try to listing the current Exit-Nodes of TOR.")
		if self.cli.exitNodeFingerprint != None:
			log.info("[+] Using the fingerprint: %s " % (self.cli.exitNodeFingerprint))
		log.info("[+] Filter by platform: %s." % (self.cli.mode))
		log.info("[+] Retrieving the first %d records in the Descriptors." %(self.cli.exitNodesToAttack))
		
		if self.cli.useMirror == True:
			log.info("[+] Using the Directory Mirrors to get the descriptors")
		downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
		nm = nmap.PortScanner()
		if self.cli.exitNodeFingerprint != None:
			descriptors = downloader.get_server_descriptors(fingerprints=[self.cli.exitNodeFingerprint])
		else:
			descriptors = downloader.get_server_descriptors()
		try:
			listDescriptors = descriptors.run()
		except zlib.error:
			log.error("[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds.")
			return				
		log.info("[+] Number of Records found: %d " %(len(listDescriptors)))		
		for descriptor in listDescriptors[1:self.cli.exitNodesToAttack]:
		#for descriptor in parse_file(open("/home/adastra/Escritorio/tor-browser_en-US-Firefox/Data/Tor/cached-consensus")):
			if self.cli.mode.lower() in descriptor.operating_system.lower() and descriptor.exit_policy.is_exiting_allowed():
				#SEARCH FILTERING BY FINGERPRINT
				#Conditions: Fingerprint specified in command-line AND
				#	 Relay Fingerprint equals to the Fingerprint specified in command-line. AND 
				#	 Relay's Operative System equals to the Operative System (option mode) specified in command-line AND
				#	 The Relay is a Exit Node. 	
				if descriptor.address not in nodesAlreadyScanned:
					log.info("[+] %s System has been found... Nickname: %s - OS Version: %s" % (descriptor.operating_system, descriptor.nickname, descriptor.operating_system))
					log.info("[+] Starting the NMap Scan with the following options: ")
					log.info("[+][+] Scan Address: %s " % (descriptor.address))
					log.info("[+][+] Scan Arguments: %s " % (self.cli.scanArguments))
					log.info("[+][+] Scan Ports: %s " % (self.cli.scanPorts))
					if self.cli.scanArguments != None:
						nm.scan(descriptor.address, self.cli.scanPorts, arguments=self.cli.scanArguments)
					else:
						nm.scan(descriptor.address, self.cli.scanPorts)	
					self.recordNmapScan(nm)
					log.info('[+] Scan Ended for %s .' % (descriptor.nickname))
					nodesAlreadyScanned.append(descriptor.address)

		if len(self.exitNodes) == 0:
			log.info("[+] In the first %d records searching for the %s Operating System, there's no results (machines with detected open ports)" %(self.cli.exitNodesToAttack, self.cli.mode.lower()))	
		return self.exitNodes
def get_exit_addresses_remote(exit_ip, exit_port=443):
    downloader = DescriptorDownloader(
        use_mirrors=True,
        timeout=10,
    )
    exit_ips = []
    exit_fingerprints = []
    query = downloader.get_server_descriptors()
    total_exits = 0
    total_allowed_exits = 0
    for desc in query.run():
        try:
            if desc.exit_policy.is_exiting_allowed():
                total_exits += 1
                if desc.exit_policy.can_exit_to(exit_ip, exit_port):
                    total_allowed_exits += 1
                    exit_ips.append(desc.address)
                    exit_fingerprints.append(desc.fingerprint)
            # print(desc.exit_policy)
            # print(dir(desc.exit_policy))
            # print('  %s (%s)' % (desc.nickname, desc.fingerprint))
            #print(desc.address)

            # print('Query took %0.2f seconds' % query.runtime)
        except Exception as exc:
            print('Unable to retrieve the server descriptors: %s' % exc)

    print("Found %d total exits, %d allowing %s on port %d" %
          (total_exits, total_allowed_exits, exit_ip, exit_port))
    return exit_ips, exit_fingerprints
예제 #4
0
파일: tor-desc.py 프로젝트: lzkill/tor-info
def main():
	try:
		dump = open(PATHNAME,"wb")
		
		downloader = DescriptorDownloader()

		while True:
			query = downloader.get_server_descriptors(fingerprints=FINGERPRINT)

			for desc in query.run():
				dump.seek(0)
				dump.write("Nickname " + str(desc.nickname)+"\n")
				dump.write("Fingerprint " + "".join(str(desc.fingerprint).split())+"\n")
				dump.write("Published " + str(desc.published)+"\n")
				dump.write("Address " + str(desc.address)+"\n")
				dump.write("Version " + str(desc.tor_version)+"\n")
				dump.write("Uptime " + str(desc.uptime)+"\n")
				dump.write("Average_Bandwidth " + str(desc.average_bandwidth)+"\n")
				dump.write("Burst_Bandwidth " + str(desc.burst_bandwidth)+"\n")
				dump.write("Observed_Bandwidth " + str(desc.observed_bandwidth)+"\n")
				dump.write("Hibernating " + str(desc.hibernating)+"\n")

			time.sleep(DOWNLOAD_DELAY)

	except Exception as exc:
		print 'Unable to retrieve the server descriptors: %s' % exc
예제 #5
0
    def getExitNodes(self):

        try:
            from stem.descriptor.remote import DescriptorDownloader
        except:
            raise TorConnectionException("Stem Package Missing")

        #self.authenticate()

        downloader = DescriptorDownloader()

        exitNodes = []

        for node in downloader.get_server_descriptors().run():

            if node.exit_policy.is_exiting_allowed():

                exitNodes.append({
                    'nickname': node.nickname,
                    'address': node.address,
                    'fingerprint': node.fingerprint,
                    'platform': node.platform,
                    'os': node.operating_system,
                    'burst': node.burst_bandwidth,
                    'estimated': node.observed_bandwidth,
                    'circuit_protocols': node.circuit_protocols,
                    'contact': node.contact,
                    'tor_version': node.tor_version
                })

            else:

                pass

        return tuple(exitNodes)
예제 #6
0
파일: Discovery.py 프로젝트: intfrr/Tortazo
    def listAuthorityExitNodes(self):
        '''
			List the Exit Nodes using the filters specified by command-line.
		'''
        self.cli.logger.debug(
            term.format("[+] Try to listing the current Exit-Nodes of TOR.",
                        term.Color.GREEN))
        if self.cli.exitNodeFingerprint != None:
            self.cli.logger.debug(
                term.format(
                    "[+] Using the fingerprint: %s " %
                    (self.cli.exitNodeFingerprint), term.Color.GREEN))
        self.cli.logger.debug(
            term.format("[+] Filter by platform: %s." % (self.cli.mode),
                        term.Color.GREEN))
        self.cli.logger.debug(
            term.format(
                "[+] Retrieving the first %d records in the Descriptors." %
                (self.cli.exitNodesToAttack), term.Color.GREEN))

        if self.cli.useMirror == True:
            self.cli.logger.info(
                term.format(
                    "[+] Using the Directory Mirrors to get the descriptors",
                    term.Color.YELLOW))
        downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
        if self.cli.exitNodeFingerprint != None:
            descriptors = downloader.get_server_descriptors(
                fingerprints=[self.cli.exitNodeFingerprint])
        else:
            descriptors = downloader.get_server_descriptors()
        try:
            listDescriptors = descriptors.run()
        except zlib.error:
            self.cli.logger.error(
                term.format(
                    "[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds.",
                    term.Color.RED))
            return
        except urllib2.HTTPError:
            self.cli.logger.error(
                term.format(
                    "[-] Figerprint not found... It's not registered in the last valid server descriptor.",
                    term.Color.RED))
            return
        return self.filterNodes(listDescriptors)
예제 #7
0
파일: tutorial.py 프로젝트: patrickod/stem
      def get_bw_to_relay():
        bw_to_relay = {}

        downloader = DescriptorDownloader()

        try:
          for desc in downloader.get_server_descriptors().run():
            if desc.exit_policy.is_exiting_allowed():
              bw_to_relay.setdefault(desc.observed_bandwidth, []).append(desc.nickname)
        except Exception as exc:
          print('Unable to retrieve the server descriptors: %s' % exc)

        return bw_to_relay
예제 #8
0
      def get_bw_to_relay():
        bw_to_relay = {}

        downloader = DescriptorDownloader()

        try:
          for desc in downloader.get_server_descriptors().run():
            if desc.exit_policy.is_exiting_allowed():
              bw_to_relay.setdefault(desc.observed_bandwidth, []).append(desc.nickname)
        except Exception as exc:
          print('Unable to retrieve the server descriptors: %s' % exc)

        return bw_to_relay
예제 #9
0
 def get_or_from_network(self):
     self._or = {}
     self._relayBW = {}
     self._exit_or = set()
     downloader = DescriptorDownloader()
     try:
         for desc in downloader.get_server_descriptors().run():
             self._or[desc.fingerprint] = desc.nickname
             self._relayBW[desc.fingerprint] = desc.observed_bandwidth
             if desc.exit_policy.is_exiting_allowed() and self._num_hops > 1:
                 self._exit_or.add(desc.fingerprint)
             if desc.exit_policy.is_exiting_allowed() and self._num_hops == 1 and desc.allow_single_hop_exits:
                 self._exit_or.add(desc.fingerprint)
     except Exception as exc:
         print "Unable to retrieve the server descriptors: %s" % exc
예제 #10
0
    def tutorial_example():
      from stem.descriptor.remote import DescriptorDownloader
      from stem.version import Version

      downloader = DescriptorDownloader()
      count, with_contact = 0, 0

      print('Checking for outdated relays...\n')

      for desc in downloader.get_server_descriptors():
        if desc.tor_version < Version('0.2.3.0'):
          count += 1

          if desc.contact:
            print('  %-15s %s' % (desc.tor_version, desc.contact.decode('utf-8', 'replace')))
            with_contact += 1

      print('\n%i outdated relays found, %i had contact information' % (count, with_contact))
예제 #11
0
    def tutorial_example():
      from stem.descriptor.remote import DescriptorDownloader
      from stem.version import Version

      downloader = DescriptorDownloader()
      count, with_contact = 0, 0

      print("Checking for outdated relays...\n")

      for desc in downloader.get_server_descriptors():
        if desc.tor_version < Version('0.2.3.0'):
          count += 1

          if desc.contact:
            print('  %-15s %s' % (desc.tor_version, desc.contact.decode("utf-8", "replace")))
            with_contact += 1

      print("\n%i outdated relays found, %i had contact information" % (count, with_contact))
예제 #12
0
파일: trnnr.py 프로젝트: 5l1v3r1/trnnr
def fetch_descriptors():
    """
    Fetch and return relay descriptors.
    """

    downloader = DescriptorDownloader(use_mirrors=True, timeout=20)
    query = downloader.get_server_descriptors(validate=False)

    descs = {}
    try:
        for desc in query.run():
            descs[desc.fingerprint] = desc
        log.info("Query took %0.2f seconds." % query.runtime)
    except Exception as exc:
        log.critical("Unable to retrieve server descriptors: %s" % exc)

    log.info("Downloaded %d descs." % len(descs))

    return descs
예제 #13
0
def check_and_update_bitcoin_fields(relay_details):
    """
    Load full descriptors and parse bitcoin address from X-bitcoin and contact fields then update
    the details.json file with the bitcoin address as a bitcoin_address field. The X-bitcoin field
    takes precedence over the contact field if both both contain bitcoin addresses.
    """
    data = json.loads(relay_details)

    downloader = DescriptorDownloader()
    extracted_addresses = {}
    try:
      # Parse X-bitcoin fields from the network consensus
      for relay_desc in downloader.get_server_descriptors().run():
        x_bitcoin_field = re.search("^X-bitcoin (.*)", str(relay_desc), re.MULTILINE)
        if x_bitcoin_field:
            if extract_bitcoin_address(x_bitcoin_field.group()):
                extracted_addresses[relay_desc.fingerprint] = extract_bitcoin_address(x_bitcoin_field.group())
    except Exception as exc:
        print "Unable to retrieve the network consensus: %s" % exc

    for relay in data['relays']:
        # Check if a bitcoin address was already extracted from X-bitcoin field
        if relay.get('fingerprint') in extracted_addresses:
            relay['bitcoin_address'] = extracted_addresses[relay.get('fingerprint')]

        # Parse bitcoin addresses from the contact field of details.json
        elif relay.get('contact') is not None:
            if extract_bitcoin_address(relay.get('contact')):
                relay['bitcoin_address'] = extract_bitcoin_address(relay.get('contact'))

    # Remove any relays without a bitcoin address or with weight_fraction of -1.0 as they can't be used
    # to determine donation share
    data['relays'][:] = [relay for relay in data['relays'] if (relay.get('bitcoin_address'))]

    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
        temp_file_name = temp_file.name
        json.dump(data, temp_file)

    # Atomically move the new json file to avoid errors where Oniontip
    # tries to parse a partially written json file.
    details_file_path = os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'details.json')
    shutil.move(temp_file_name, details_file_path)
예제 #14
0
파일: trnnr.py 프로젝트: nusenu/trnnr
def fetch_descriptors():
    """
    Fetch and return relay descriptors.
    """

    downloader = DescriptorDownloader(use_mirrors=True, timeout=20)
    query = downloader.get_server_descriptors(validate=False)

    descs = {}
    try:
        for desc in query.run():
            descs[desc.fingerprint] = desc
        log.info("Query took %0.2f seconds." % query.runtime)
    except Exception as exc:
        log.critical("Unable to retrieve server descriptors: %s" % exc)

    log.info("Downloaded %d descs." % len(descs))

    return descs
예제 #15
0
파일: util.py 프로젝트: thefinn93/oniontip
def check_and_update_bitcoin_fields(relay_details):
    """
    Load full descriptors and parse bitcoin address from X-bitcoin and contact fields then update
    the details.json file with the bitcoin address as a bitcoin_address field. The X-bitcoin field
    takes precedence over the contact field if both both contain bitcoin addresses.
    """
    data = json.loads(relay_details)

    downloader = DescriptorDownloader()
    extracted_addresses = {}
    try:
      # Parse X-bitcoin fields from the network consensus
      for relay_desc in downloader.get_server_descriptors().run():
        x_bitcoin_field = re.search("^X-bitcoin (.*)", str(relay_desc), re.MULTILINE)
        if x_bitcoin_field:
            if extract_bitcoin_address(x_bitcoin_field.group()):
                extracted_addresses[relay_desc.fingerprint] = extract_bitcoin_address(x_bitcoin_field.group())
    except Exception as exc:
        print "Unable to retrieve the network consensus: %s" % exc

    for relay in data['relays']:
        # Check if a bitcoin address was already extracted from X-bitcoin field
        if relay.get('fingerprint') in extracted_addresses:
            relay['bitcoin_address'] = extracted_addresses[relay.get('fingerprint')]

        # Parse bitcoin addresses from the contact field of details.json
        elif relay.get('contact') is not None:
            if extract_bitcoin_address(relay.get('contact')):
                relay['bitcoin_address'] = extract_bitcoin_address(relay.get('contact'))

    # Remove any relays without a bitcoin address or with weight_fraction of -1.0 as they can't be used
    # to determine donation share
    data['relays'][:] = [relay for relay in data['relays'] if (relay.get('bitcoin_address'))]

    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
        temp_file_name = temp_file.name
        json.dump(data, temp_file)

    # Atomically move the new json file to avoid errors where Oniontip
    # tries to parse a partially written json file.
    details_file_path = os.path.join(os.path.dirname(
        os.path.abspath(__file__)), 'details.json')
    shutil.move(temp_file_name, details_file_path)
예제 #16
0
def fillQueue(dbConnection):
    print term.format("Starting Tor:\n")

    # Launch Tor
    tor_process = stem.process.launch_tor_with_config(
        config={
            'SocksPort':
            str(SOCKS_PORT),
            'ControlPort':
            '9051',
            'HashedControlPassword':
            '******',
        },
        init_msg_handler=print_bootstrap_lines,
        take_ownership=True,
    )
    now = datetime.datetime.now()
    downloadSuccessful = False
    errorCount = 0
    while not downloadSuccessful:
        try:
            controller = Controller.from_port(port=9051)
            # IMPORTANT: Change the password to the fitting "HashedControlPassword" set in the Torrc by stem, a few lines above here
            controller.authenticate("schnitzel")
            downloader = DescriptorDownloader()
            # Now tefth those servers and add them to the queue
            serverDescriptors = downloader.get_server_descriptors().run()
            i = 1
            for desc in serverDescriptors:
                if desc.exit_policy.is_exiting_allowed():
                    addItemToQueue(dbConnection, desc, now, i)
                    i = i + 1
            downloadSuccessful = True
        except:
            e = sys.exc_info()[0]
            print e
            errorCount = errorCount + 1
            if (errorCount >= 5):
                print "HUGE CLUSTERFUCK, stopping execution"
                tor_process.kill()  # stops tor
                raise
    tor_process.kill()  # stops tor
예제 #17
0
def fillQueue(dbConnection):
    print term.format("Starting Tor:\n")

    # Launch Tor
    tor_process = stem.process.launch_tor_with_config(
      config = {
        'SocksPort': str(SOCKS_PORT),
        'ControlPort': '9051',
        'HashedControlPassword': '******',
      },
      init_msg_handler = print_bootstrap_lines,
      take_ownership = True,
    )
    now = datetime.datetime.now()
    downloadSuccessful = False
    errorCount = 0
    while not downloadSuccessful:
        try:
            controller = Controller.from_port(port = 9051)
            # IMPORTANT: Change the password to the fitting "HashedControlPassword" set in the Torrc by stem, a few lines above here 
            controller.authenticate("schnitzel")
            downloader = DescriptorDownloader()
            # Now tefth those servers and add them to the queue
            serverDescriptors =downloader.get_server_descriptors().run()
            i = 1
            for desc in serverDescriptors:
                if desc.exit_policy.is_exiting_allowed():
                    addItemToQueue(dbConnection, desc, now, i)
                    i = i+1
            downloadSuccessful = True
        except:
            e = sys.exc_info()[0]
            print e
            errorCount = errorCount + 1
            if (errorCount >= 5):
                print "HUGE CLUSTERFUCK, stopping execution"
                tor_process.kill()  # stops tor
                raise
    tor_process.kill()  # stops tor
예제 #18
0
파일: util.py 프로젝트: benhc123/oniontip
def check_and_update_bitcoin_fields(relay_details):
    """
    Load full descriptors and parse bitcoin address from X-bitcoin and contact fields then update
    the details.json file with the bitcoin address as a bitcoin_address field. The X-bitcoin field
    takes precedence over the contact field if both both contain bitcoin addresses.
    """
    data = json.loads(relay_details)

    downloader = DescriptorDownloader()
    extracted_addresses = {}
    try:
      # Parse X-bitcoin fields from the network consensus
      for relay_desc in downloader.get_server_descriptors().run():
        x_bitcoin_field = re.search("^X-bitcoin (.*)", str(relay_desc), re.MULTILINE)
        if x_bitcoin_field:
            if extract_bitcoin_address(x_bitcoin_field.group()):
                extracted_addresses[relay_desc.fingerprint] = extract_bitcoin_address(x_bitcoin_field.group())
    except Exception as exc:
        print "Unable to retrieve the network consensus: %s" % exc

    for relay in data['relays']:
        # Check if a bitcoin address was already extracted from X-bitcoin field
        if relay.get('fingerprint') in extracted_addresses:
            relay['bitcoin_address'] = extracted_addresses[relay.get('fingerprint')]

        # Parse bitcoin addresses from the contact field of details.json
        elif relay.get('contact') is not None:
            if extract_bitcoin_address(relay.get('contact')):
                relay['bitcoin_address'] = extract_bitcoin_address(relay.get('contact'))

    # Remove any relays without a bitcoin address or with weight_fraction of -1.0 as they can't be used
    # to determine donation share
    data['relays'][:] = [relay for relay in data['relays'] if (relay.get('bitcoin_address'))]

    # Write parsed list to file
    with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'details.json'), 'w') as details_file:
        json.dump(data, details_file)
예제 #19
0
    def getExitNodes(self):
        
        try:
            from stem.descriptor.remote import DescriptorDownloader
        except:
            raise TorConnectionException("Stem Package Missing")
        
        #self.authenticate()
            
        downloader = DescriptorDownloader()
        
        exitNodes = []
        
        for node in downloader.get_server_descriptors().run():
            
            if node.exit_policy.is_exiting_allowed():
                
                exitNodes.append({

                    'nickname': node.nickname, 
                    'address': node.address, 
                    'fingerprint': node.fingerprint, 
                    'platform': node.platform, 
                    'os': node.operating_system, 
                    'burst': node.burst_bandwidth, 
                    'estimated': node.observed_bandwidth, 
                    'circuit_protocols': node.circuit_protocols, 
                    'contact': node.contact, 
                    'tor_version': node.tor_version

                    })
            
            else:
                
                pass
                
        return tuple(exitNodes)
예제 #20
0
    def listExitNodes(self):
        '''
			List the Exit Nodes using the filters specified by command-line.
		'''
        nodesAlreadyScanned = []
        log.info("[+] Try to listing the current Exit-Nodes of TOR.")
        if self.cli.exitNodeFingerprint != None:
            log.info("[+] Using the fingerprint: %s " %
                     (self.cli.exitNodeFingerprint))
        log.info("[+] Filter by platform: %s." % (self.cli.mode))
        log.info("[+] Retrieving the first %d records in the Descriptors." %
                 (self.cli.exitNodesToAttack))

        if self.cli.useMirror == True:
            log.info("[+] Using the Directory Mirrors to get the descriptors")
        downloader = DescriptorDownloader(use_mirrors=self.cli.useMirror)
        nm = nmap.PortScanner()
        if self.cli.exitNodeFingerprint != None:
            descriptors = downloader.get_server_descriptors(
                fingerprints=[self.cli.exitNodeFingerprint])
        else:
            descriptors = downloader.get_server_descriptors()
        try:
            listDescriptors = descriptors.run()
        except zlib.error:
            log.error(
                "[-] Error fetching the TOR descriptors. This is something quite common... Try again in a few seconds."
            )
            return
        log.info("[+] Number of Records found: %d " % (len(listDescriptors)))
        for descriptor in listDescriptors[1:self.cli.exitNodesToAttack]:
            #for descriptor in parse_file(open("/home/adastra/Escritorio/tor-browser_en-US-Firefox/Data/Tor/cached-consensus")):
            if self.cli.mode.lower() in descriptor.operating_system.lower(
            ) and descriptor.exit_policy.is_exiting_allowed():
                #SEARCH FILTERING BY FINGERPRINT
                #Conditions: Fingerprint specified in command-line AND
                #	 Relay Fingerprint equals to the Fingerprint specified in command-line. AND
                #	 Relay's Operative System equals to the Operative System (option mode) specified in command-line AND
                #	 The Relay is a Exit Node.
                if descriptor.address not in nodesAlreadyScanned:
                    log.info(
                        "[+] %s System has been found... Nickname: %s - OS Version: %s"
                        % (descriptor.operating_system, descriptor.nickname,
                           descriptor.operating_system))
                    log.info(
                        "[+] Starting the NMap Scan with the following options: "
                    )
                    log.info("[+][+] Scan Address: %s " % (descriptor.address))
                    log.info("[+][+] Scan Arguments: %s " %
                             (self.cli.scanArguments))
                    log.info("[+][+] Scan Ports: %s " % (self.cli.scanPorts))
                    if self.cli.scanArguments != None:
                        nm.scan(descriptor.address,
                                self.cli.scanPorts,
                                arguments=self.cli.scanArguments)
                    else:
                        nm.scan(descriptor.address, self.cli.scanPorts)
                    self.recordNmapScan(nm)
                    log.info('[+] Scan Ended for %s .' % (descriptor.nickname))
                    nodesAlreadyScanned.append(descriptor.address)

        if len(self.exitNodes) == 0:
            log.info(
                "[+] In the first %d records searching for the %s Operating System, there's no results (machines with detected open ports)"
                % (self.cli.exitNodesToAttack, self.cli.mode.lower()))
        return self.exitNodes
예제 #21
0
CHECK_PORT = 80  # exit == allows port 80
TOP_PERCENT = 80 # limit analysis to 80% of total observed bw

from stem.descriptor.remote import DescriptorDownloader # to fetch descriptors
from stem.descriptor import parse_file # alternatively, for local parsing
import os
import collections
from difflib import SequenceMatcher

print "Fetching latest descriptors, can take a while..."

if os.path.exists('cached-consensus'):
    descriptors = parse_file('cached-consensus')
else:
    downloader = DescriptorDownloader(use_mirrors=True, timeout=10)
    query = downloader.get_server_descriptors()
    descriptors = query.run()

print ""

#exits_only = filter(lambda descriptor:descriptor.exit_policy.is_exiting_allowed(), descriptors)
exits_only = filter(lambda descriptor:descriptor.exit_policy.can_exit_to(port=CHECK_PORT), descriptors)
exits_sorted =  sorted(exits_only, key=lambda descriptor:descriptor.observed_bandwidth,reverse=True)

print "%s relays (%s exits)" % (len(descriptors), len(exits_sorted))

total_bw = 0
total_exit_bw = 0

for desc in descriptors:
    total_bw += desc.observed_bandwidth
예제 #22
0

with Controller.from_port() as controller:
    controller.authenticate()
    #data_dir = controller.get_conf('DataDirectory')
    downloader_0 = DescriptorDownloader()
    # 2. Using descriptors to get the list of relays
    '''
	for rel in parse_file(os.path.join(data_dir, 'cached-microdesc-consensus')):
		# 2a. Get the ip location
		if (rel is not None):
			getIpLocation(rel) #2b. Append them into dictionary
	'''

    try:
        for desc in downloader_0.get_server_descriptors().run():
            print desc.average_bandwidth
            print desc.burst_bandwidth
            print desc.observed_bandwidth
    except Exception as exc:
        print "Error: ", exc
    '''
	for rel in downloader_0.get_server_descriptors().run():
		if (rel is not None):
			getIpLocation(rel)
	getavbndw()

	
	checklist = []
	for r in allNodes['EU4']['relays']:
		checklist.append(r.fingerprint)
from stem.descriptor.remote import DescriptorDownloader

downloader = DescriptorDownloader()

descriptors = downloader.get_server_descriptors().run()

for descriptor in descriptors:
    print('Descriptor', str(descriptor))
    print('Certificate', descriptor.certificate)
    print('ONion key', descriptor.onion_key)
    print('Signing key', descriptor.signing_key)
    print('Signature', descriptor.signature)

예제 #24
0
def main():
    last_notified_config = conf.get_config('last_notified')
    last_notified_path = util.get_path('data',
                                       'fingerprint_change_last_notified.cfg')

    if os.path.exists(last_notified_path):
        last_notified_config.load(last_notified_path)
    else:
        last_notified_config._path = last_notified_path

    fingerprint_changes = load_fingerprint_changes()
    downloader = DescriptorDownloader(timeout=15)
    alarm_for = {}

    for relay in downloader.get_consensus():
        prior_fingerprints = fingerprint_changes.setdefault(
            (relay.address, relay.or_port), {})

        if relay.fingerprint not in prior_fingerprints:
            log.debug("Registering a new fingerprint for %s:%s (%s)" %
                      (relay.address, relay.or_port, relay.fingerprint))
            prior_fingerprints[relay.fingerprint] = datetime_to_unix(
                relay.published)

            # drop fingerprint changes that are over thirty days old

            old_fingerprints = [
                fp for fp in prior_fingerprints
                if (time.time() - prior_fingerprints[fp] > TEN_DAYS)
            ]

            for fp in old_fingerprints:
                log.debug(
                    "Removing fingerprint for %s:%s (%s) which was published %i days ago"
                    % (relay.address, relay.or_port, fp,
                       prior_fingerprints[fp] / 60 / 60 / 24))
                del prior_fingerprints[fp]

            # if we've changed more than ten times in the last ten days then alarm

            if len(prior_fingerprints) >= 10:
                alarm_for['%s:%s' %
                          (relay.address, relay.or_port)] = (relay.address,
                                                             relay.or_port,
                                                             relay.fingerprint)

    if alarm_for and not is_notification_suppressed(alarm_for.values()):
        log.debug("Sending a notification for %i relays..." % len(alarm_for))
        body = EMAIL_BODY

        for address, or_port, fingerprint in alarm_for.values():
            try:
                desc = downloader.get_server_descriptors(fingerprint).run()[0]
            except:
                desc = None  # might not be available, just used for extra info

            fp_changes = fingerprint_changes[(address, or_port)]
            log.debug("* %s:%s has had %i fingerprints: %s" %
                      (address, or_port, len(fp_changes), ', '.join(
                          fp_changes.keys())))

            if desc:
                body += "* %s:%s (platform: %s, contact: %s)\n" % (
                    address, or_port, desc.platform.decode(
                        'utf-8', 'replace'), desc.contact)
            else:
                body += "* %s:%s\n" % (address, or_port)

            count = 0

            for fingerprint in sorted(fp_changes,
                                      reverse=True,
                                      key=lambda k: fp_changes[k]):
                body += "  %s at %s\n" % (
                    fingerprint,
                    datetime.datetime.fromtimestamp(
                        fp_changes[fingerprint]).strftime('%Y-%m-%d %H:%M:%S'))
                count += 1

                # Relays frequently cycling their fringerprint can have thousands of
                # entries. Enumerating them all is unimportant, so if too long then
                # just give the count.

                if count > 8:
                    oldest_timestamp = sorted(fp_changes.values())[0]
                    body += "  ... and %i more since %s\n" % (
                        len(fp_changes) - 8,
                        datetime.datetime.fromtimestamp(
                            oldest_timestamp).strftime('%Y-%m-%d %H:%M:%S'))
                    break

            body += "\n"

        subject = EMAIL_SUBJECT

        if len(alarm_for) == 1:
            subject += ' (%s:%s)' % alarm_for.values()[0][:2]

        util.send(subject,
                  body=body,
                  to=[
                      '*****@*****.**',
                      '*****@*****.**'
                  ])

        # register that we've notified for these

        current_time = str(int(time.time()))

        for address, or_port, _ in alarm_for.values():
            last_notified_config.set('%s:%s' % (address, or_port),
                                     current_time)

        last_notified_config.save()

    save_fingerprint_changes(fingerprint_changes)
"""
Collect all RSA public keys: recent, archived, signing key, and onion key.
Create a dictionary that maps RSA public keys to metadata. Pickle that
dictionary.
"""
import pickle

from stem.descriptor.remote import DescriptorDownloader
from stem.descriptor.reader import DescriptorReader

rsa_key_data = {}

# Get RSA keys from recent server descriptors.
downloader = DescriptorDownloader()
for desc_r in downloader.get_server_descriptors():
    # Load metadata with relay information: nickname, fingerprint, published,
    # address, or_port, platform, contact, average_bandwith, extra_info_digest
    metadata = {}
    if desc_r.nickname is not None:
        metadata['nickname'] = desc_r.nickname
    if desc_r.fingerprint is not None:
        metadata['fingerprint'] = desc_r.fingerprint
    if desc_r.published is not None:
        metadata['date'] = desc_r.published
    if desc_r.address is not None:
        metadata['ip4_address'] = desc_r.address
    if desc_r.or_port is not None:
        metadata['port'] = desc_r.or_port
    if desc_r.platform is not None:
        metadata['platform'] = desc_r.platform
    if desc_r.contact is not None:
from stem.descriptor.remote import DescriptorDownloader

downloader = DescriptorDownloader()
server_descriptors = downloader.get_server_descriptors().run()

with open('/tmp/descriptor_dump', 'wb') as descriptor_file:
  descriptor_file.write(''.join(map(str, server_descriptors)))
예제 #27
0
파일: check_bin.py 프로젝트: badonions/dtmb
if url.startswith("https://"):
    print(
        term.format("Detected HTTPS connection, should be plaintext (HTTP)",
                    term.Color.RED))

print("URL: " + url)

SOCKS_PORT = 1330
TIMEOUT = 10

downloader = DescriptorDownloader(
    use_mirrors=False,
    timeout=10,
)

query = downloader.get_server_descriptors()

for desc in downloader.get_server_descriptors():
    if desc.exit_policy.is_exiting_allowed():
        file = open("fp.txt", "w")
        file.write('{}\n'.format(desc.fingerprint))
        file.close()

xlines = sum(1 for line in open('fp.txt'))
print("We will test " + str(xlines) + " nodes")
atline = 0

socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT)
socket.socket = socks.socksocket

예제 #28
0
import time
start_time = time.time()
import json
import requests
import stem

from stem.descriptor.remote import DescriptorDownloader
downloader = DescriptorDownloader(use_mirrors = False)

try:
    for desc in downloader.get_server_descriptors().run():
        IP = str(desc.address)
        print (IP)
    #Code used to retrieve data ip-api.com
        data = requests.get('http://ip-api.com/json/'+IP+'?fields=query,countryCode,isp,org,as,reverse')
        # Paste response to json
        variables = data.json();
        #Parsing AS number and organization info
        x = variables["as"]
        if (' ' in x) == True: #split to get only ASN
            asn = x.split(' ',1)[0].split('S',1)[1] #cắt ASXXX hai lần để lấy số ASN
        else:
            asn = x.split('AS',1)[-1]
        print (IP+', '+asn+', '+variables["countryCode"]+', '+variables["isp"]+', '+variables["org"]+', '+variables["reverse"])
        time.sleep(0.5)
except Exception as exc:
    print ("Unable to retrieve the consensus: %s"% exc)

#End parsing ASN and Org
print("ip-api.com %1.6f seconds ---" %(time.time() - start_time))
예제 #29
0
from stem.descriptor.remote import DescriptorDownloader
import os

downloader = DescriptorDownloader(
    use_mirrors=True,
    timeout=20,
)

query = downloader.get_server_descriptors()

searchString = "[tor-relay.co]"
trcCount = 0
combinedUptime = 0
combinedBandwidth = 0

nodes = list()

try:
    for desc in query.run():
        if searchString in str(desc.contact):
            trcCount += 1
            combinedUptime += desc.uptime
            combinedBandwidth += desc.observed_bandwidth
            nodes.append({
                'name': desc.nickname,
                'bandwidth': desc.observed_bandwidth
            })
except Exception as exc:
    print(exc)

if len(nodes) > 0:
예제 #30
0
url = args.url or "http://the.earth.li/~sgtatham/putty/latest/x86/putty.exe"

if url.startswith("https://"):
    print(term.format("Detected HTTPS connection, should be plaintext (HTTP)", term.Color.RED))

print("URL: " + url)

SOCKS_PORT = 1330
TIMEOUT = 10

downloader = DescriptorDownloader(
  use_mirrors = False,
  timeout = 10,
)

query = downloader.get_server_descriptors()

for desc in downloader.get_server_descriptors():
        if desc.exit_policy.is_exiting_allowed():
                file = open("fp.txt", "w")
                file.write('{}\n'.format(desc.fingerprint))
                file.close()

xlines = sum(1 for line in open('fp.txt'))
print("We will test " + str(xlines) + " nodes")
atline = 0

socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT)
socket.socket = socks.socksocket

def getaddrinfo(*args):