def __init__(self, timeout, ipv6_enable, readsize=100000, max_connects=1000): self.timeout = timeout self.ipv6_enable = ipv6_enable self.readsize = readsize self.poll = poll() self.single_sockets = {} self.dead_from_write = [] if max_connects <= 0: max_connects = 1000 if DEBUG: log('SocketHandler::__init__: max_connects', max_connects) self.max_connects = max_connects self.servers = {} self.btengine_said_reachable = False self.interrupt_socket = None self.udp_sockets = {} if globalConfig.get_mode() == 'stream' and globalConfig.get_value( 'private_source'): self.white_list = globalConfig.get_value('support_nodes') if DEBUG: log('SocketHandler::__init__: white_list', self.white_list) else: self.white_list = None
def __init__(self, numpieces, rarest_first_cutoff=1, rarest_first_priority_cutoff=3, priority_step=20, helper=None, coordinator=None, rate_predictor=None, piecesize=0): PiecePicker.__init__(self, numpieces, rarest_first_cutoff, rarest_first_priority_cutoff, priority_step, helper, coordinator, rate_predictor) self.maxhave = 0 self.stats = {} self.stats['high'] = 0 self.stats['mid'] = 0 self.stats['low'] = 0 self.transporter = None self.outstanding_requests = {} self.playing_delay = (5, 20, -0.5) buffering_delay = globalConfig.get_value( 'piece_picker_buffering_delay', None) if buffering_delay is not None: self.buffering_delay = buffering_delay else: self.buffering_delay = (7.5, 30, 10) self.is_interesting = self.is_interesting_normal self.extra_videostatus = []
def __init__(self, i2iport, connhandler, timeout = 300.0, port_file = None): Thread.__init__(self) self.setDaemon(True) self.setName('Instance2Instance' + self.getName()) self.i2iport = i2iport self.port_file = port_file self.connhandler = connhandler self.i2idoneflag = Event() self.rawserver = RawServer(self.i2idoneflag, timeout / 5.0, timeout, ipv6_enable=False, failfunc=self.rawserver_fatalerrorfunc, errorfunc=self.rawserver_nonfatalerrorfunc) self.rawserver.add_task(self.rawserver_keepalive, 1) if globalConfig.get_value('allow-non-local-client-connection'): interfaces = self.rawserver.bind(self.i2iport, reuse=True) else: interfaces = self.rawserver.bind(self.i2iport, bind=['127.0.0.1'], reuse=True) if DEBUG: log('i2is::init: bound on interfaces', interfaces) if i2iport == 0 and len(interfaces): host, port = interfaces[0] self.i2iport = port if port_file is not None: f = None try: f = open(port_file, 'w') f.write(str(port)) except: if DEBUG: log('i2is::init: cannot save port to file', port_file) raise Exception, 'Cannot save port' finally: if f: f.close()
def get_default_state_dir(): if globalConfig.get_value('apptype', '') == 'torrentstream': homedirpostfix = '.Torrent Stream' else: homedirpostfix = '.freestream' appdir = get_appstate_dir() statedir = os.path.join(appdir, homedirpostfix) return statedir
def connection_lost(self, s): peername = s.get_ip() if DEBUG: log('InstanceConnectionHandler:connection_lost: ip', peername) if not globalConfig.get_value('allow-non-local-client-connection') and peername != '127.0.0.1': print >> sys.stderr, 'i2is: ich: connection_lost: Refusing non-local connection from', peername return del self.singsock2ic[s]
def external_connection_made(self, s): peername = s.get_ip() if DEBUG: log('InstanceConnectionHandler: external_connection_made: ip', peername) if not globalConfig.get_value('allow-non-local-client-connection') and peername != '127.0.0.1': print >> sys.stderr, 'i2is: ich: ext_conn_made: Refusing non-local connection from', peername s.close() ic = InstanceConnection(s, self, self.readlinecallback) self.singsock2ic[s] = ic
def connection_lost(self, s): peername = s.get_ip() if DEBUG: log('InstanceConnectionHandler:connection_lost: ip', peername) if not globalConfig.get_value('allow-non-local-client-connection' ) and peername != '127.0.0.1': print >> sys.stderr, 'i2is: ich: connection_lost: Refusing non-local connection from', peername return del self.singsock2ic[s]
def external_connection_made(self, s): peername = s.get_ip() if DEBUG: log('InstanceConnectionHandler: external_connection_made: ip', peername) if not globalConfig.get_value('allow-non-local-client-connection' ) and peername != '127.0.0.1': print >> sys.stderr, 'i2is: ich: ext_conn_made: Refusing non-local connection from', peername s.close() ic = InstanceConnection(s, self, self.readlinecallback) self.singsock2ic[s] = ic
def __init__(self, connecter, raw_server, my_id, max_len, schedulefunc, keepalive_delay, download_id, measurefunc, config, limit_connections_queue): self.raw_server = raw_server self.connecter = connecter self.my_id = my_id self.max_len = max_len self.schedulefunc = schedulefunc self.keepalive_delay = keepalive_delay self.download_id = download_id self.measurefunc = measurefunc self.config = config self.connections = {} self.banned = {} self.to_connect = set() self.trackertime = None self.paused = False self.limit_connections_queue = limit_connections_queue if self.config['max_connections'] == 0: self.max_connections = 1073741824 else: self.max_connections = self.config['max_connections'] self.rerequest = None self.toofast_banned = {} self.helper = None self.white_list = None self.black_list = None self.app_mode = globalConfig.get_mode() if self.app_mode == 'node': self.last_source_check_time = None source_node = globalConfig.get_value('source_node') support_nodes = globalConfig.get_value('support_nodes') if not globalConfig.get_value('allow_peers_download'): self.white_list = set() if source_node is not None and globalConfig.get_value( 'allow_source_download'): self.white_list.add(source_node[0]) if len(support_nodes) and globalConfig.get_value( 'allow_support_download'): self.white_list.update([addr[0] for addr in support_nodes]) else: self.black_list = set() if source_node is not None and not globalConfig.get_value( 'allow_source_download'): self.black_list.add(source_node[0]) if len(support_nodes) and not globalConfig.get_value( 'allow_support_download'): self.black_list.update([addr[0] for addr in support_nodes]) if len(self.black_list) == 0: self.black_list = None if DEBUG: log('Encoder::__init__: white_list', self.white_list, 'black_list', self.black_list) schedulefunc(self.send_keepalives, keepalive_delay) self.repexer = None
def __init__(self, timeout, ipv6_enable, readsize = 100000, max_connects = 1000): self.timeout = timeout self.ipv6_enable = ipv6_enable self.readsize = readsize self.poll = poll() self.single_sockets = {} self.dead_from_write = [] if max_connects <= 0: max_connects = 1000 if DEBUG: log('SocketHandler::__init__: max_connects', max_connects) self.max_connects = max_connects self.servers = {} self.btengine_said_reachable = False self.interrupt_socket = None self.udp_sockets = {} if globalConfig.get_mode() == 'stream' and globalConfig.get_value('private_source'): self.white_list = globalConfig.get_value('support_nodes') if DEBUG: log('SocketHandler::__init__: white_list', self.white_list) else: self.white_list = None
def add_piece(self, index, piece): if DEBUG: log('VideoSource::add_piece: index', index) if globalConfig.get_value('live_source_show_pieces', False): log('stream: created piece', index, 'speed %.2f KiB/s' % (self.ratemeasure.get_rate_noupdate() / 1024)) chunk_size = self.storagewrapper.request_size length = min(len(piece), self.storagewrapper._piecelen(index)) x = 0 while x < length: self.storagewrapper.new_request(index) self.storagewrapper.piece_came_in(index, x, [], piece[x:x + chunk_size]) x += chunk_size self.picker.complete(index) self.connecter.got_piece(index)
def __init__(self, downloader, connection): SingleDownloadHelperInterface.__init__(self) self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.peermeasure = Measure(downloader.max_rate_period) self.raw_have = Bitfield(downloader.numpieces) self.have = Bitfield(downloader.numpieces) self.last = -1000 self.last2 = -1000 self.example_interest = None self.backlog = 2 self.ip = connection.get_ip() self.guard = BadDataGuard(self) self.app_mode = globalConfig.get_mode() self.white_list = None self.black_list = None self.app_mode = globalConfig.get_mode() if self.app_mode == 'node': source_node = globalConfig.get_value('source_node') support_nodes = globalConfig.get_value('support_nodes') if not globalConfig.get_value('allow_peers_download'): self.white_list = set() if source_node is not None and globalConfig.get_value( 'allow_source_download'): self.white_list.add(source_node[0]) if len(support_nodes) and globalConfig.get_value( 'allow_support_download'): self.white_list.update([addr[0] for addr in support_nodes]) else: self.black_list = set() if source_node is not None and not globalConfig.get_value( 'allow_source_download'): self.black_list.add(source_node[0]) if len(support_nodes) and not globalConfig.get_value( 'allow_support_download'): self.black_list.update([addr[0] for addr in support_nodes]) if len(self.black_list) == 0: self.black_list = None if DEBUG: log('download::__init__: white_list', self.white_list, 'black_list', self.black_list) self.helper = downloader.picker.helper self.proxy_have = Bitfield(downloader.numpieces) self.short_term_measure = Measure(5) self.bad_performance_counter = 0
def __init__(self, port): if VideoHTTPServer.__single: raise RuntimeError, 'HTTPServer is Singleton' VideoHTTPServer.__single = self self.port = port if globalConfig.get_value('allow-non-local-client-connection'): bind_address = '' else: bind_address = '127.0.0.1' BaseHTTPServer.HTTPServer.__init__(self, (bind_address, self.port), SimpleServer) self.daemon_threads = True self.allow_reuse_address = True self.lock = RLock() self.urlpath2streaminfo = {} self.mappers = [] self.errorcallback = None self.statuscallback = None
def __init__(self, numpieces, rarest_first_cutoff = 1, rarest_first_priority_cutoff = 3, priority_step = 20, helper = None, coordinator = None, rate_predictor = None, piecesize = 0): PiecePicker.__init__(self, numpieces, rarest_first_cutoff, rarest_first_priority_cutoff, priority_step, helper, coordinator, rate_predictor) self.maxhave = 0 self.stats = {} self.stats['high'] = 0 self.stats['mid'] = 0 self.stats['low'] = 0 self.transporter = None self.outstanding_requests = {} self.playing_delay = (5, 20, -0.5) buffering_delay = globalConfig.get_value('piece_picker_buffering_delay', None) if buffering_delay is not None: self.buffering_delay = buffering_delay else: self.buffering_delay = (7.5, 30, 10) self.is_interesting = self.is_interesting_normal self.extra_videostatus = []
def __init__(self, connecter, raw_server, my_id, max_len, schedulefunc, keepalive_delay, download_id, measurefunc, config, limit_connections_queue): self.raw_server = raw_server self.connecter = connecter self.my_id = my_id self.max_len = max_len self.schedulefunc = schedulefunc self.keepalive_delay = keepalive_delay self.download_id = download_id self.measurefunc = measurefunc self.config = config self.connections = {} self.banned = {} self.to_connect = set() self.trackertime = None self.paused = False self.limit_connections_queue = limit_connections_queue if self.config['max_connections'] == 0: self.max_connections = 1073741824 else: self.max_connections = self.config['max_connections'] self.rerequest = None self.toofast_banned = {} self.helper = None self.white_list = None self.black_list = None self.app_mode = globalConfig.get_mode() if self.app_mode == 'node': self.last_source_check_time = None source_node = globalConfig.get_value('source_node') support_nodes = globalConfig.get_value('support_nodes') if not globalConfig.get_value('allow_peers_download'): self.white_list = set() if source_node is not None and globalConfig.get_value('allow_source_download'): self.white_list.add(source_node[0]) if len(support_nodes) and globalConfig.get_value('allow_support_download'): self.white_list.update([ addr[0] for addr in support_nodes ]) else: self.black_list = set() if source_node is not None and not globalConfig.get_value('allow_source_download'): self.black_list.add(source_node[0]) if len(support_nodes) and not globalConfig.get_value('allow_support_download'): self.black_list.update([ addr[0] for addr in support_nodes ]) if len(self.black_list) == 0: self.black_list = None if DEBUG: log('Encoder::__init__: white_list', self.white_list, 'black_list', self.black_list) schedulefunc(self.send_keepalives, keepalive_delay) self.repexer = None
def add_piece(self, index, piece): if DEBUG: log('VideoSource::add_piece: index', index) if globalConfig.get_value('live_source_show_pieces', False): log( 'stream: created piece', index, 'speed %.2f KiB/s' % (self.ratemeasure.get_rate_noupdate() / 1024)) chunk_size = self.storagewrapper.request_size length = min(len(piece), self.storagewrapper._piecelen(index)) x = 0 while x < length: self.storagewrapper.new_request(index) self.storagewrapper.piece_came_in(index, x, [], piece[x:x + chunk_size]) x += chunk_size self.picker.complete(index) self.connecter.got_piece(index)
def __init__(self, downloader, connection): SingleDownloadHelperInterface.__init__(self) self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.peermeasure = Measure(downloader.max_rate_period) self.raw_have = Bitfield(downloader.numpieces) self.have = Bitfield(downloader.numpieces) self.last = -1000 self.last2 = -1000 self.example_interest = None self.backlog = 2 self.ip = connection.get_ip() self.guard = BadDataGuard(self) self.app_mode = globalConfig.get_mode() self.white_list = None self.black_list = None self.app_mode = globalConfig.get_mode() if self.app_mode == 'node': source_node = globalConfig.get_value('source_node') support_nodes = globalConfig.get_value('support_nodes') if not globalConfig.get_value('allow_peers_download'): self.white_list = set() if source_node is not None and globalConfig.get_value('allow_source_download'): self.white_list.add(source_node[0]) if len(support_nodes) and globalConfig.get_value('allow_support_download'): self.white_list.update([ addr[0] for addr in support_nodes ]) else: self.black_list = set() if source_node is not None and not globalConfig.get_value('allow_source_download'): self.black_list.add(source_node[0]) if len(support_nodes) and not globalConfig.get_value('allow_support_download'): self.black_list.update([ addr[0] for addr in support_nodes ]) if len(self.black_list) == 0: self.black_list = None if DEBUG: log('download::__init__: white_list', self.white_list, 'black_list', self.black_list) self.helper = downloader.picker.helper self.proxy_have = Bitfield(downloader.numpieces) self.short_term_measure = Measure(5) self.bad_performance_counter = 0
def __init__(self, i2iport, connhandler, timeout=300.0, port_file=None): Thread.__init__(self) self.setDaemon(True) self.setName('Instance2Instance' + self.getName()) self.i2iport = i2iport self.port_file = port_file self.connhandler = connhandler self.i2idoneflag = Event() self.rawserver = RawServer(self.i2idoneflag, timeout / 5.0, timeout, ipv6_enable=False, failfunc=self.rawserver_fatalerrorfunc, errorfunc=self.rawserver_nonfatalerrorfunc) self.rawserver.add_task(self.rawserver_keepalive, 1) if globalConfig.get_value('allow-non-local-client-connection'): interfaces = self.rawserver.bind(self.i2iport, reuse=True) else: interfaces = self.rawserver.bind(self.i2iport, bind=['127.0.0.1'], reuse=True) if DEBUG: log('i2is::init: bound on interfaces', interfaces) if i2iport == 0 and len(interfaces): host, port = interfaces[0] self.i2iport = port if port_file is not None: f = None try: f = open(port_file, 'w') f.write(str(port)) except: if DEBUG: log('i2is::init: cannot save port to file', port_file) raise Exception, 'Cannot save port' finally: if f: f.close()
def __init__(self, dltype, session, tdef = None, main_url = None): self.dllock = RLock() self.dltype = dltype self.error = None self.progressbeforestop = 0.0 self.session = session self.pstate_for_restart = None self.dlruntimeconfig = None self.starting = False self.sd = None self.dd = None if self.dltype == DLTYPE_TORRENT: if tdef is None: raise ValueError('Missing tdef') self.filepieceranges = [] self.tdef = tdef.copy() self.tdef.readonly = True self.log_prefix = 'DownloadImpl::' + str(DLTYPE_TORRENT) + ':' + binascii.hexlify(self.tdef.get_infohash()) + ':' if tdef.can_save() != 1: self.encrypted_storage = True else: self.encrypted_storage = globalConfig.get_value('encrypted_storage') elif self.dltype == DLTYPE_DIRECT: if main_url is None: raise ValueError('Missing url') self.main_url = main_url self.urlhash = hashlib.sha1(main_url).digest() self.pstate_filename = None self.pstate_content_length = None self.log_prefix = 'DownloadImpl::' + str(DLTYPE_DIRECT) + ':' + binascii.hexlify(self.urlhash) + ':' self.encrypted_storage = False else: raise ValueError('Unknown download type ' + str(dltype)) self.speed_stats = {'up_total': 0.0, 'up_count': 0, 'down_total': 0.0, 'down_count': 0}
def _start_connection_from_queue(self, sched = True): try: force_sched = False if self.app_mode == 'node' and (self.last_source_check_time is None or time() - self.last_source_check_time > 10): try: self.last_source_check_time = time() if globalConfig.get_value('allow_source_download'): source_node = globalConfig.get_value('source_node') if source_node is not None: connected_to_source = False if len(self.connections) == 0: if DEBUG: log('encoder::_start_connection_from_queue: no connections, connect to the source:', source_node) else: if DEBUG: log('encoder::_start_connection_from_queue: check connection to the source:', source_node) for v in self.connections.values(): if v is None: continue ip = v.get_ip(True) port = v.get_port(False) if DEBUG: log('encoder::_start_connection_from_queue: check connection to the source: test ip', ip, 'port', port) if ip == source_node[0] and port == source_node[1]: connected_to_source = True if DEBUG: log('encoder::_start_connection_from_queue: got connection to the source:', source_node) break if not connected_to_source: if DEBUG: log('encoder::_start_connection_from_queue: start connection to the source:', source_node) force_sched = True self.to_connect.add((tuple(source_node), 0)) if globalConfig.get_value('allow_support_download'): support_nodes = globalConfig.get_value('support_nodes') if len(support_nodes): nodes = {} for addr in support_nodes: nodes[tuple(addr)] = False if len(self.connections) == 0: if DEBUG: log('encoder::_start_connection_from_queue: no connections, connect to support nodes:', support_nodes) else: for v in self.connections.values(): if v is None: continue ip = v.get_ip(True) port = v.get_port(False) if DEBUG: log('encoder::_start_connection_from_queue: check connection to support node: test ip', ip, 'port', port) addr = (ip, port) if addr in nodes: nodes[addr] = True if DEBUG: log('encoder::_start_connection_from_queue: got connection to support node:', addr) for addr, connected in nodes.iteritems(): if not connected: if DEBUG: log('encoder::_start_connection_from_queue: start connection to support node:', addr) force_sched = True self.to_connect.add((addr, 0)) except: print_exc() if not self.to_connect: return if self.connecter.external_connection_made: max_initiate = self.config['max_initiate'] else: max_initiate = int(self.config['max_initiate'] * 1.5) cons = len(self.connections) if DEBUG: log('encoder::_start_connection_from_queue: conns', cons, 'max conns', self.max_connections, 'max init', max_initiate) if cons >= self.max_connections or cons >= max_initiate: delay = 60.0 if DEBUG: log('encoder::_start_connection_from_queue: cons >= max: delay', delay) elif self.paused or incompletecounter.toomany(): delay = 1.0 if DEBUG: log('encoder::_start_connection_from_queue: paused or too many: delay', delay) else: delay = 0.0 dns, id = self.to_connect.pop() if self.white_list is not None and dns[0] not in self.white_list: if DEBUG: log('encoder::_start_connection_from_queue: peer is not in the white list: dns', dns) elif self.black_list is not None and dns[0] in self.black_list: if DEBUG: log('encoder::_start_connection_from_queue: peer is in the black list: dns', dns) else: if DEBUG: log('encoder::_start_connection_from_queue: start now: dns', dns, 'id', id) self.start_connection(dns, id) if force_sched or self.to_connect and sched: if force_sched: delay = 11.0 if DEBUG: log('encoder::_start_connection_from_queue: start_from_queue: force', force_sched, 'delay', delay) self.raw_server.add_task(self._start_connection_from_queue, delay) except: log_exc() raise
def _start_connection_from_queue(self, sched=True): try: force_sched = False if self.app_mode == 'node' and ( self.last_source_check_time is None or time() - self.last_source_check_time > 10): try: self.last_source_check_time = time() if globalConfig.get_value('allow_source_download'): source_node = globalConfig.get_value('source_node') if source_node is not None: connected_to_source = False if len(self.connections) == 0: if DEBUG: log( 'encoder::_start_connection_from_queue: no connections, connect to the source:', source_node) else: if DEBUG: log( 'encoder::_start_connection_from_queue: check connection to the source:', source_node) for v in self.connections.values(): if v is None: continue ip = v.get_ip(True) port = v.get_port(False) if DEBUG: log( 'encoder::_start_connection_from_queue: check connection to the source: test ip', ip, 'port', port) if ip == source_node[ 0] and port == source_node[1]: connected_to_source = True if DEBUG: log( 'encoder::_start_connection_from_queue: got connection to the source:', source_node) break if not connected_to_source: if DEBUG: log( 'encoder::_start_connection_from_queue: start connection to the source:', source_node) force_sched = True self.to_connect.add((tuple(source_node), 0)) if globalConfig.get_value('allow_support_download'): support_nodes = globalConfig.get_value('support_nodes') if len(support_nodes): nodes = {} for addr in support_nodes: nodes[tuple(addr)] = False if len(self.connections) == 0: if DEBUG: log( 'encoder::_start_connection_from_queue: no connections, connect to support nodes:', support_nodes) else: for v in self.connections.values(): if v is None: continue ip = v.get_ip(True) port = v.get_port(False) if DEBUG: log( 'encoder::_start_connection_from_queue: check connection to support node: test ip', ip, 'port', port) addr = (ip, port) if addr in nodes: nodes[addr] = True if DEBUG: log( 'encoder::_start_connection_from_queue: got connection to support node:', addr) for addr, connected in nodes.iteritems(): if not connected: if DEBUG: log( 'encoder::_start_connection_from_queue: start connection to support node:', addr) force_sched = True self.to_connect.add((addr, 0)) except: print_exc() if not self.to_connect: return if self.connecter.external_connection_made: max_initiate = self.config['max_initiate'] else: max_initiate = int(self.config['max_initiate'] * 1.5) cons = len(self.connections) if DEBUG: log('encoder::_start_connection_from_queue: conns', cons, 'max conns', self.max_connections, 'max init', max_initiate) if cons >= self.max_connections or cons >= max_initiate: delay = 60.0 if DEBUG: log( 'encoder::_start_connection_from_queue: cons >= max: delay', delay) elif self.paused or incompletecounter.toomany(): delay = 1.0 if DEBUG: log( 'encoder::_start_connection_from_queue: paused or too many: delay', delay) else: delay = 0.0 dns, id = self.to_connect.pop() if self.white_list is not None and dns[ 0] not in self.white_list: if DEBUG: log( 'encoder::_start_connection_from_queue: peer is not in the white list: dns', dns) elif self.black_list is not None and dns[0] in self.black_list: if DEBUG: log( 'encoder::_start_connection_from_queue: peer is in the black list: dns', dns) else: if DEBUG: log( 'encoder::_start_connection_from_queue: start now: dns', dns, 'id', id) self.start_connection(dns, id) if force_sched or self.to_connect and sched: if force_sched: delay = 11.0 if DEBUG: log( 'encoder::_start_connection_from_queue: start_from_queue: force', force_sched, 'delay', delay) self.raw_server.add_task(self._start_connection_from_queue, delay) except: log_exc() raise
def _rechoke(self): helper = self.picker.helper if helper is not None and helper.coordinator is None and helper.is_complete(): for c in self.connections: if not c.connection.is_coordinator_con(): u = c.get_upload() u.choke() return if self.paused: if DEBUG: log('choker::_rechoke: pause, choke all uploads') for c in self.connections: c.get_upload().choke() return if 'unchoke_bias_for_internal' in self.config: checkinternalbias = self.config['unchoke_bias_for_internal'] else: checkinternalbias = 0 if DEBUG: log('choker: _rechoke: checkinternalbias', checkinternalbias) preferred = [] maxuploads = self.config['max_uploads'] if DEBUG: log('choker::_rechoke: maxuploads', maxuploads) if maxuploads > 1: for c in self.connections: if c.use_g2g: continue if self.seeding_manager is None or self.seeding_manager.is_conn_eligible(c): u = c.get_upload() if not u.is_interested(): if DEBUG: log('choker::_rechoke: not interested:', c.get_ip(), c.get_port()) continue if self.done(): r = u.get_rate() else: d = c.get_download() r = d.get_rate() if r < 1000 or d.is_snubbed(): if DEBUG: log('choker::_rechoke: too slow or snubbed:', c.get_ip(), c.get_port()) continue if checkinternalbias and c.na_get_address_distance() == 0: r += checkinternalbias if DEBUG: print >> sys.stderr, 'choker: _rechoke: BIASING', c.get_ip(), c.get_port() preferred.append((-r, c)) self.last_preferred = len(preferred) preferred.sort() del preferred[maxuploads - 1:] if DEBUG: x = [ (p[0], p[1].get_ip(), p[1].get_port()) for p in preferred ] log('choker::_rechoke: normal unchoke: preferred', x) del x preferred = [ x[1] for x in preferred ] g2g_preferred = [] for c in self.connections: if not c.use_g2g: continue if self.seeding_manager is None or self.seeding_manager.is_conn_eligible(c): u = c.get_upload() if not u.is_interested(): continue r = c.g2g_score() if checkinternalbias and c.na_get_address_distance() == 0: r[0] += checkinternalbias r[1] += checkinternalbias if DEBUG: print >> sys.stderr, 'choker: _rechoke: G2G BIASING', c.get_ip(), c.get_port() g2g_preferred.append((-r[0], -r[1], c)) g2g_preferred.sort() del g2g_preferred[maxuploads - 1:] if DEBUG: log('choker::_rechoke: G2G unchoke: g2g_preferred', g2g_preferred) g2g_preferred = [ x[2] for x in g2g_preferred ] preferred += g2g_preferred count = len(preferred) hit = False to_unchoke = [] if self.app_mode == 'stream': for support_node_ip in globalConfig.get_value('support_nodes', []): for c in self.connections: if c.get_ip() == support_node_ip: to_unchoke.append(c.get_upload()) if DEBUG: log('choker::_rechoke: permanently unchoking support node:', support_node_ip) for c in self.connections: u = c.get_upload() if c in preferred: to_unchoke.append(u) elif count < maxuploads or not hit: if self.seeding_manager is None or self.seeding_manager.is_conn_eligible(c): if u.is_interested(): to_unchoke.append(u) count += 1 if DEBUG and not hit: log('choker::_rechoke: optimistic unchoke: hit', hit, 'ip', c.get_ip(), 'port', c.get_port()) hit = True elif not c.connection.is_coordinator_con() and not c.connection.is_helper_con(): u.choke() elif u.is_choked(): to_unchoke.append(u) if DEBUG: x = [ (u.connection.get_ip(), u.connection.get_port()) for u in to_unchoke ] log('choker::_rechoke: to_unchoke', x) del x for u in to_unchoke: u.unchoke()
def _rechoke(self): helper = self.picker.helper if helper is not None and helper.coordinator is None and helper.is_complete( ): for c in self.connections: if not c.connection.is_coordinator_con(): u = c.get_upload() u.choke() return if self.paused: if DEBUG: log('choker::_rechoke: pause, choke all uploads') for c in self.connections: c.get_upload().choke() return if 'unchoke_bias_for_internal' in self.config: checkinternalbias = self.config['unchoke_bias_for_internal'] else: checkinternalbias = 0 if DEBUG: log('choker: _rechoke: checkinternalbias', checkinternalbias) preferred = [] maxuploads = self.config['max_uploads'] if DEBUG: log('choker::_rechoke: maxuploads', maxuploads) if maxuploads > 1: for c in self.connections: if c.use_g2g: continue if self.seeding_manager is None or self.seeding_manager.is_conn_eligible( c): u = c.get_upload() if not u.is_interested(): if DEBUG: log('choker::_rechoke: not interested:', c.get_ip(), c.get_port()) continue if self.done(): r = u.get_rate() else: d = c.get_download() r = d.get_rate() if r < 1000 or d.is_snubbed(): if DEBUG: log('choker::_rechoke: too slow or snubbed:', c.get_ip(), c.get_port()) continue if checkinternalbias and c.na_get_address_distance() == 0: r += checkinternalbias if DEBUG: print >> sys.stderr, 'choker: _rechoke: BIASING', c.get_ip( ), c.get_port() preferred.append((-r, c)) self.last_preferred = len(preferred) preferred.sort() del preferred[maxuploads - 1:] if DEBUG: x = [(p[0], p[1].get_ip(), p[1].get_port()) for p in preferred] log('choker::_rechoke: normal unchoke: preferred', x) del x preferred = [x[1] for x in preferred] g2g_preferred = [] for c in self.connections: if not c.use_g2g: continue if self.seeding_manager is None or self.seeding_manager.is_conn_eligible( c): u = c.get_upload() if not u.is_interested(): continue r = c.g2g_score() if checkinternalbias and c.na_get_address_distance() == 0: r[0] += checkinternalbias r[1] += checkinternalbias if DEBUG: print >> sys.stderr, 'choker: _rechoke: G2G BIASING', c.get_ip( ), c.get_port() g2g_preferred.append((-r[0], -r[1], c)) g2g_preferred.sort() del g2g_preferred[maxuploads - 1:] if DEBUG: log('choker::_rechoke: G2G unchoke: g2g_preferred', g2g_preferred) g2g_preferred = [x[2] for x in g2g_preferred] preferred += g2g_preferred count = len(preferred) hit = False to_unchoke = [] if self.app_mode == 'stream': for support_node_ip in globalConfig.get_value('support_nodes', []): for c in self.connections: if c.get_ip() == support_node_ip: to_unchoke.append(c.get_upload()) if DEBUG: log( 'choker::_rechoke: permanently unchoking support node:', support_node_ip) for c in self.connections: u = c.get_upload() if c in preferred: to_unchoke.append(u) elif count < maxuploads or not hit: if self.seeding_manager is None or self.seeding_manager.is_conn_eligible( c): if u.is_interested(): to_unchoke.append(u) count += 1 if DEBUG and not hit: log('choker::_rechoke: optimistic unchoke: hit', hit, 'ip', c.get_ip(), 'port', c.get_port()) hit = True elif not c.connection.is_coordinator_con( ) and not c.connection.is_helper_con(): u.choke() elif u.is_choked(): to_unchoke.append(u) if DEBUG: x = [(u.connection.get_ip(), u.connection.get_port()) for u in to_unchoke] log('choker::_rechoke: to_unchoke', x) del x for u in to_unchoke: u.unchoke()