def __init__(self): self.has_local_directory_cache = True try: query = get_consensus(endpoints=[LOCAL_DIRECTORY_CACHE], retries=0, document_handler=DocumentHandler.DOCUMENT) # pylint: disable=no-member consensus = query.run() self.downloader = DirectoryDownloader( initial_consensus=consensus[0]) except urllib.error.URLError: self.has_local_directory_cache = False self.downloader = DirectoryDownloader()
def get_consensus_data(self): try: for desc in remote.get_consensus().run(): memory.tor_nodes.append((desc.address, desc.or_port)) except Exception as exc: print("Unable to retrieve the consensus: %s" % exc)
indicators=[]) print("created feed") feed_id = new_feed["id"] print("using feed %s" % (feed_id)) # get_existing_indicators try: with open(OTX_INDICATOR_FILE, "rb") as r: indicators = pickle.load(r) except: indicators = {} print ("%d existing indicators retrieved" % (len(indicators))) # Load tor consensus tor_relays = get_consensus().run() print("%d active relays found" % (len(tor_relays))) for relay in tor_relays: title = "tor relay %s:%s" % (relay.address, relay.or_port) description = "%s:%s (%s, %s) last_published: %s" % (relay.address, relay.or_port, relay.nickname, relay.fingerprint, relay.published) indicators[(relay.address, relay.or_port)] = { "type": "IP", "indicator": relay.address, "role": "tor_relay", "description": description, "title": title,