def _check(self): if self.current_started is not None: if self.current_started <= bttime() - 58: log.warning("Tracker announce still not complete " "%d seconds after starting it" % int(bttime() - self.current_started)) ## Announce has been hanging for too long, retry it. if int(bttime() - self.current_started) >= 180: self._announce(STARTED) return if self.basequery is None: self.basequery = self._makequery() self._announce(STARTED) return if self.changed_port: self._announce(STOPPED) self.changed_port = False self.basequery = None return if self.finish: self.finish = False self._announce(COMPLETED) return if self.fail_wait is not None: if self.last_time + self.fail_wait <= bttime(): self._announce(STARTED) return if self.last_time > bttime() - self.config['rerequest_interval']: return getmore = bool(self.neighbors.failed_connections()) #TODO: also reannounce when TCs have failed if getmore or bttime() - self.last_time > self.announce_interval: self._announce()
def _scrape(self, query): """ Make an HTTP GET request to the tracker Note: This runs in its own thread. """ self.spath = "/scrape" if not self.https: log.warning("Warning: Will not connect to non HTTPS server") return try: if self.proxy_url: h = ProxyHTTPSConnection(self.proxy_url, \ username=self.proxy_username, \ password=self.proxy_password, \ ssl_context=self.ssl_ctx) s = "https://%s:%d%s%s" % (self.url, self.remote_port, self.spath, query) h.putrequest('GET', s) else: #No proxy url, use normal connection h = HTTPSConnection(self.url, self.remote_port, ssl_context=self.ssl_ctx) h.putrequest('GET', self.spath+query) h.endheaders() resp = h.getresponse() data = resp.read() resp.close() h.close() h = None # urllib2 can raise various crap that doesn't have a common base # exception class especially when proxies are used, at least # ValueError and stuff from httplib except Exception, g: def f(r='Problem connecting to ' + self.url + ': ' + str(g)): self._postrequest(errormsg=r)
def close(self): if self.closed: log.warning("Double close") return log.info("Closing %s"%self.uniq_id()) if self.complete: self.send_break() self.shutdown()
def __init__(self, *args): apply(gtk.Window.__init__, (self,)+args) try: #TODO: Icon doesn't work on XP build, don't know why if (os.name != 'nt'): self.set_icon_from_file(os.path.join(image_root,'anomos.ico')) except Exception, e: log.warning(e)
def __init__(self, *args): apply(gtk.Window.__init__, (self, ) + args) try: #TODO: Icon doesn't work on XP build, don't know why if (os.name != 'nt'): self.set_icon_from_file(os.path.join(image_root, 'anomos.ico')) except Exception, e: log.warning(e)
def _load_fastresume(self, resumefile, typecode): if resumefile is not None: try: r = array(typecode) r.fromfile(resumefile, self.numpieces) return r except Exception, e: log.warning("Couldn't read fastresume data: " + str(e))
def ore_closed(self): """ Closes the connection when a Break has been received by our other relay (ore). Called by this object's ore during shutdown """ if self.closed: log.warning("Double close") return if not self.sent_break: self.send_break()
def close(self): # Connection was closed locally (as opposed to # being closed by receiving a BREAK message) if self.closed: log.warning("%s: Double close" % self.uniq_id()) return log.info("Closing %s" % self.uniq_id()) if self.complete and not self.sent_break: self.send_break() self.shutdown()
def close(self): # Connection was closed locally (as opposed to # being closed by receiving a BREAK message) if self.closed: log.warning("%s: Double close" % self.uniq_id()) return log.info("Closing %s"%self.uniq_id()) if self.complete and not self.sent_break: self.send_break() self.shutdown()
def start_circuit(self, tc, infohash, aeskey): """Called from Rerequester to initialize new circuits we've just gotten TCs for from the Tracker""" if self.count_streams() >= self.config['max_initiate']: log.warning("Not starting circuit -- Stream count exceeds maximum") return tcreader = TCReader(self.certificate) try: tcdata = tcreader.parseTC(tc) except Anomos.Crypto.CryptoError, e: log.error("Decryption Error: %s" % str(e)) return
def connection_completed(self): """ Called when a CONFIRM message is received indicating that our peer has received our tracking code """ if self.complete: log.warning("Double complete") return self.complete = True self.upload = self.torrent.make_upload(self) self.choker = self.upload.choker self.choker.connection_made(self) self.download = self.torrent.make_download(self) self.torrent.add_active_stream(self)
def shutdown(self): if self.closed: log.warning("Double close") return self.closed = True if not (self.decremented_count or (self.orelay and self.orelay.decremented_count)): self.manager.dec_relay_count() self.decremented_count = True # Tell our orelay to close. if self.orelay and not self.orelay.closed: self.orelay.ore_closed() self.ratelimiter.clean_closed()
def socket_cb(self, sock): if sock.connected: peercert = self.socket.get_peer_cert() recvd_pid = peercert.get_fingerprint('sha256')[-20:] if self.peerid != recvd_pid: # The certificate we received doesn't match the one # given to the tracker. # XXX: Should probably disconnect the peer rather than # just saying the NatCheck failed. log.warning("Peer certificate mismatch") self.answer(False) AnomosNeighborInitializer(self, self.socket, self.id) else: self.answer(False)
def update_neighbor_list(self, list): freshids = dict([(i[2],(i[0],i[1])) for i in list]) #{nid : (ip, port)} # Remove neighbors not found in freshids for id in self.neighbors.keys(): if not freshids.has_key(id): self.rm_neighbor(id) # Start connections with the new neighbors for id, loc in freshids.iteritems(): if self.nid_collision(id, loc): # Already had neighbor by the given id at a different location log.warning('NID collision - x%02x' % ord(id)) # To be safe, kill connection with the neighbor we already # had with the requested ID and add ID to the failed list self.rm_neighbor(id) elif (not self.has_neighbor(id)) and (id not in self.failedPeers): self.start_connection(id, loc)
def _postrequest(self, data=None, errormsg=None): self.current_started = None self.last_time = bttime() if errormsg is not None: log.warning(errormsg) self._fail() return try: # Here's where we receive/decrypt data from the tracker r = bdecode(data) check_peers(r) except BTFailure, e: if data != '': log.error('bad data from tracker - ' + str(e)) self._fail() return
def update_neighbor_list(self, list): freshids = dict([(i[2], (i[0], i[1])) for i in list]) #{nid : (ip, port)} # Remove neighbors not found in freshids for id in self.neighbors.keys(): if not freshids.has_key(id): self.rm_neighbor(id) # Start connections with the new neighbors for id, loc in freshids.iteritems(): if self.nid_collision(id, loc): # Already had neighbor by the given id at a different location log.warning('NID collision - x%02x' % ord(id)) # To be safe, kill connection with the neighbor we already # had with the requested ID and add ID to the failed list self.rm_neighbor(id) elif (not self.has_neighbor(id)) and (id not in self.failedPeers): self.start_connection(id, loc)
def start_connection(self, id, loc): """ Start a new SSL connection to the peer at loc and assign them the NeighborID id @param loc: (IP, Port) @param id: The neighbor ID to assign to this connection @type loc: tuple @type id: int """ if self.config['one_connection_per_ip'] and self.has_ip(loc[0]): log.warning('Got duplicate IP address in neighbor list. ' \ 'Multiple connections to the same IP are disabled ' \ 'in your config.') return self.incomplete[id] = loc conn = P2PConnection(addr=loc, ssl_ctx=self.ssl_ctx, connect_cb=self.socket_cb, schedule=self.schedule)
def _load(self): """Attempts to load the certificate and key from self.certfile and self.keyfile, Generates the certificate and key if they don't exist""" if not self.secure: self.rsakey = RSA.load_key(self.keyfile, m2util.no_passphrase_callback) else: # Allow 3 attempts before quitting i = 0 while i < 3: try: self.rsakey = RSA.load_key(self.keyfile) break except RSA.RSAError: i += 1 else: log.warning("\nInvalid password entered, exiting.") sys.exit() self.cert = X509.load_cert(self.certfile)
def set_filesystem_encoding(encoding): global filesystem_encoding filesystem_encoding = 'ascii' if encoding == '': try: sys.getfilesystemencoding except AttributeError: log.warning("This seems to be an old Python version which does not support detecting the filesystem encoding. Assuming 'ascii'.") return encoding = sys.getfilesystemencoding() if encoding is None: log.warning("Python failed to autodetect filesystem encoding. Using 'ascii' instead.") return try: 'a1'.decode(encoding) except: log.error("Filesystem encoding '"+encoding+"' is not supported. Using 'ascii' instead.") return filesystem_encoding = encoding
def _read_messages(self): """ Read messages off the line and relay or process them depending on connection type """ while True: yield 2 # Stream ID stream = toint(self._message) handler = self.get_stream_handler(stream) self._message = '' yield 4 # Message Length l = toint(self._message) if l > self.config['max_message_length']: log.warning("Received message longer than max length") # return self._message = '' yield l # Payload if handler == self: # Grab the stream ID to initialize the received stream self.incoming_stream_id = stream handler.got_message(self._message) self._message = ''
def _save_fastresume(self, on_finish=False): if not on_finish and (self.finflag.isSet() or not self.started): return if not self.config["data_dir"]: return if on_finish: # self._ratemeasure might not exist yet amount_done = self.file_size else: amount_done = self.file_size - self._ratemeasure.get_size_left() filename = os.path.join(self.config["data_dir"], "resume", self.infohash.encode("hex")) resumefile = None try: resumefile = file(filename, "wb") self._storage.write_fastresume(resumefile, amount_done) self._storagewrapper.write_fastresume(resumefile) resumefile.close() except Exception, e: log.warning("Could not write fastresume data: " + str(e)) if resumefile is not None: resumefile.close()
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): #putrequest is called before connect, so can interpret url and get #real host/port to be used to make CONNECT request to proxy log.warning("Using ProxyHTTPSConnection") proto, rest = urllib.splittype(url) if proto is None: raise ValueError, "unknown URL type: %s" % url #get host host, rest = urllib.splithost(rest) #try to get port host, port = urllib.splitport(host) #if port is not defined try to get from proto if port is None: try: port = self._ports[proto] except KeyError: raise ValueError, "unknown protocol for: %s" % url self._real_host = host self._real_port = int(port) #This whole class exists for this line :/ HTTPSConnection.putrequest(self, method, url, skip_host, skip_accept_encoding)
def init(data_dir): """Sets the directory in which to store crypto data/randfile @param data_dir: path to directory @type data_dir: string """ threading.init() global get_rand global global_cryptodir, global_randfile, global_dd, global_certpath if None not in (global_cryptodir, global_randfile): log.warning("Crypto already initialized with root directory: %s. Not using %s." % (global_dd, data_dir)) return # Initialize directory structure global_dd = data_dir global_cryptodir = os.path.join(data_dir, 'crypto') if not os.path.exists(data_dir): os.mkdir(data_dir, 0700) if not os.path.exists(global_cryptodir): os.mkdir(global_cryptodir, 0700) # Copy the default certificates into the user's crypto dir global_certpath = os.path.join(global_cryptodir, 'default_certificates') if not os.path.exists(global_certpath): from Anomos import app_root shutil.copytree(os.path.join(app_root, 'default_certificates'), global_certpath) # Initialize randfile global_randfile = os.path.join(global_cryptodir, 'randpool.dat') if Rand.save_file(global_randfile) == 0: raise CryptoError('Rand file not writable') @use_rand_file def randfunc(numBytes=32): rb = Rand.rand_bytes(numBytes); return rb get_rand = randfunc # Make Crypto objects accessible now that init has been called. global AESKey, Certificate, PeerCert import _AESKey, _Certificate, _PeerCert AESKey = _AESKey.AESKey Certificate = _Certificate.Certificate PeerCert = _PeerCert.PeerCert
def _save_fastresume(self, on_finish=False): if not on_finish and (self.finflag.isSet() or not self.started): return if not self.config['data_dir']: return if on_finish: # self._ratemeasure might not exist yet amount_done = self.file_size else: amount_done = self.file_size - self._ratemeasure.get_size_left() filename = os.path.join(self.config['data_dir'], 'resume', self.infohash.encode('hex')) resumefile = None try: resumefile = file(filename, 'wb') self._storage.write_fastresume(resumefile, amount_done) self._storagewrapper.write_fastresume(resumefile) resumefile.close() except Exception, e: log.warning('Could not write fastresume data: ' + str(e)) if resumefile is not None: resumefile.close()
def _rerequest(self, query): """ Make an HTTP GET request to the tracker Note: This runs in its own thread. """ log.info("Making announce to " + self.url + ":" + str(self.remote_port)) if not self.https: log.warning("Warning: Will not connect to non HTTPS server") return try: if self.proxy_url: h = ProxyHTTPSConnection(self.proxy_url, \ username=self.proxy_username, \ password=self.proxy_password, \ ssl_context=self.ssl_ctx) s = "https://%s:%d%s%s" % (self.url, self.remote_port, self.path, query) h.putrequest('GET', s) # I suggest that for now, until there is a better solution in python, # that connections with socks proxies be done with: # socat TCP4-LISTEN:5555,fork SOCKS4A:s,socksport=9050 # or use Privoxy: # 127.0.0.1:8118 else: #No proxy url, use normal connection h = HTTPSConnection(self.url, self.remote_port, ssl_context=self.ssl_ctx) h.putrequest('GET', self.path+query) h.endheaders() resp = h.getresponse() data = resp.read() resp.close() h.close() h = None # urllib2 can raise various crap that doesn't have a common base # exception class especially when proxies are used, at least # ValueError and stuff from httplib except Exception, g: def f(r='Problem connecting to ' + self.url + ': ' + str(g)): self._postrequest(errormsg=r)
class P2PServer(asyncore.dispatcher): def __init__(self, addr, port, ssl_context): asyncore.dispatcher.__init__(self) self.create_socket(ssl_context) self.bound = False # The bound variable is to prevent handle_error self.bind((addr, port)) # from logging errors caused by the following self.bound = True # call to bind. Errors from bind are caught by # _find_port in Multitorrent. self.listen(socket.SOMAXCONN) # Neighbor Manager is set after the torrent is started self.neighbor_manager = None def set_neighbor_manager(self, mgr): self.neighbor_manager = mgr ## asyncore.dispatcher methods ## def create_socket(self, ssl_context): self.ssl_ctx = ssl_context conn = SSL.Connection(self.ssl_ctx) self.set_socket(conn) self.socket.setblocking(0) self.set_reuse_addr() self.add_channel() def writable(self): return False def handle_accept(self): try: sock, addr = self.socket.accept() except (SSL.SSLError, socket.error), err: log.warning("Problem accepting connection: " + str(err)) return if self.neighbor_manager is None: log.warning("Received connection attempt without any active" \ "torrents, this could be the port checker or another" \ "service trying to connect on this port.") else: conn = P2PConnection(socket=sock) AnomosNeighborInitializer(self.neighbor_manager, conn)
factory = gtk.IconFactory() # these don't seem to be documented anywhere: # ICON_SIZE_BUTTON = 20x20 # ICON_SIZE_LARGE_TOOLBAR = 24x24 for n in 'broken finished info pause paused play queued running remove running-unsafe'.split( ): try: fn = os.path.join(image_root, ("%s.png" % n)) pixbuf = gtk.gdk.pixbuf_new_from_file(fn) set = gtk.IconSet(pixbuf) factory.add('anon-%s' % n, set) except Exception, e: log.warning(e) factory.add_default() def get_logo(size=32): fn = os.path.join(image_root, 'logo', 'anomos_%d.png' % size) logo = gtk.Image() logo.set_from_file(fn) return logo def get_warning(): warn = gtk.Image() warn.set_from_stock(gtk.STOCK_DIALOG_WARNING, gtk.ICON_SIZE_DND) return warn
log.critical(data.getvalue()) self._activity = ("killed by internal exception: " + str(e), 0) try: self._close() except Exception, e: log.error("Additional error when closing down due to " "error: " + str(e)) if is_external: self.feedback.failed(self, True) return if self.config["data_dir"] and self._storage is not None: filename = os.path.join(self.config["data_dir"], "resume", self.infohash.encode("hex")) if os.path.exists(filename): try: os.remove(filename) except Exception, e: log.warning("Could not remove fastresume file " "after failure:" + str(e)) self.feedback.failed(self, False) def _finished(self): self.finflag.set() # Call self._storage.close() to flush buffers and change files to # read-only mode (when they're possibly reopened). Let exceptions # from self._storage.close() kill the torrent since files might not # be correct on disk if file.close() failed. self._storage.close() # If we haven't announced yet, normal first announce done later will # tell the tracker about seed status. self.is_seed = True if self._announced: self._rerequest.announce_finish() self._activity = ("seeding", 1)
def handle_accept(self): try: sock, addr = self.socket.accept() except (SSL.SSLError, socket.error), err: log.warning("Problem accepting connection: " + str(err)) return
def handle_accept(self): try: sock, addr = self.socket.accept() except (SSL.SSLError, socket.error), e: log.warning("Exception in HTTPSServer socket.accept: " + str(e)) return
try: self._close() except Exception, e: log.error('Additional error when closing down due to ' 'error: ' + str(e)) if is_external: self.feedback.failed(self, True) return if self.config['data_dir'] and self._storage is not None: filename = os.path.join(self.config['data_dir'], 'resume', self.infohash.encode('hex')) if os.path.exists(filename): try: os.remove(filename) except Exception, e: log.warning('Could not remove fastresume file ' 'after failure:' + str(e)) self.feedback.failed(self, False) def _finished(self): self.finflag.set() # Call self._storage.close() to flush buffers and change files to # read-only mode (when they're possibly reopened). Let exceptions # from self._storage.close() kill the torrent since files might not # be correct on disk if file.close() failed. self._storage.close() # If we haven't announced yet, normal first announce done later will # tell the tracker about seed status. self.is_seed = True if self._announced: self._rerequest.announce_finish() self._activity = ('seeding', 1)
def invalid_message(self, t): log.warning("Invalid message of type %02x on %s. Closing neighbor."% \ (ord(t), self.uniq_id())) self.socket.close()
def invalid_message(self, t): log.warning("Invalid message of type %02x on %s. Closing stream."% \ (ord(t), self.uniq_id())) self.close()
return align(obj,0.5,amt) factory = gtk.IconFactory() # these don't seem to be documented anywhere: # ICON_SIZE_BUTTON = 20x20 # ICON_SIZE_LARGE_TOOLBAR = 24x24 for n in 'broken finished info pause paused play queued running remove running-unsafe'.split(): try: fn = os.path.join(image_root, ("%s.png"%n)) pixbuf = gtk.gdk.pixbuf_new_from_file(fn) set = gtk.IconSet(pixbuf) factory.add('anon-%s'%n, set) except Exception, e: log.warning(e) factory.add_default() def get_logo(size=32): fn = os.path.join(image_root, 'logo', 'anomos_%d.png'%size) logo = gtk.Image() logo.set_from_file(fn) return logo def get_warning(): warn = gtk.Image() warn.set_from_stock(gtk.STOCK_DIALOG_WARNING, gtk.ICON_SIZE_DND) return warn class Size(long):
def collect_incoming_data(self, data): if self.collector: self.collector.collect_incoming_data(data) else: log.warning("Dropping %d bytes of data" % len(data))