def __init__(self, config=None, noisy=True): """config is a dict that contains option-value pairs. """ RawServerMixin.__init__(self, config, noisy) self.doneflag = None # init is fine until the loop starts self.ident = thread.get_ident() self.associated = False self.single_sockets = set() self.unix_sockets = set() self.udp_sockets = set() self.listened = False self.connections = 0 ############################################################## if profile: try: os.unlink(prof_file_name) except: pass self.prof = Profiler() ############################################################## self.connection_limit = self.config.get('max_incomplete', 10) connectionRateLimitReactor(reactor, self.connection_limit) # bleh self.add_pending_connection = reactor.add_pending_connection self.remove_pending_connection = reactor.remove_pending_connection self.reactor = reactor self.reactor.resolver = SaneThreadedResolver(self.reactor)
def __init__(self, multitorrent, rawserver, test_new_version=None, test_current_version=None): TorrentButler.__init__(self, multitorrent) self.runs = 0 self.rawserver = rawserver self.estate = set() self.old_updates = set() self.log_root = "core.AutoUpdateButler" self.logger = logging.getLogger(self.log_root) self.installable_version = None self.available_version = None self.current_version = Version.from_str(version) self.debug_mode = DEBUG self.delay = 60*60*24 if self.debug_mode: self.delay = 10 if test_new_version: test_new_version = Version.from_str(test_new_version) self.debug_mode = True self.debug('__init__() turning debug on') def _hack_get_available(url): self.debug('_hack_get_available() run#%d: returning %s' % (self.runs, str(test_new_version))) return test_new_version self._get_available = _hack_get_available if test_current_version: self.debug_mode = True self.current_version = Version.from_str(test_current_version) self.version_site = version_host # The version URL format is: # http:// VERSION_SITE / OS_NAME / (LEGACY /) BETA or STABLE # LEGACY means that the user is on a version of an OS that has # been deemed "legacy", and as such the latest client version # for their OS version may be different than the latest client # version for the OS in general. For example, if we are going # to roll a version that requires WinXP/2K or greater, or a # version that requires OSX 10.5 or greater, we may maintain # an older version for Win98 or OSX 10.4 in OS_NAME/legacy/. if os.name == 'nt': self.version_site += 'win32/' if os_version not in ('XP', '2000', '2003'): self.version_site += 'legacy/' elif osx: self.version_site += 'osx/' elif self.debug_mode: self.version_site += 'win32/' self.installer_dir = self._calc_installer_dir() # kick it off self.rawserver.add_task(0, self.check_version)
def close_files(self, file_set): failures = set() self.free_handle_condition.acquire() done = False while not done: filenames = list(self.open_file_to_handles.iterkeys()) for filename in filenames: if filename not in file_set: continue handles = self.open_file_to_handles.poprow(filename) for handle in handles: try: handle.close() except Exception, e: failures.add(e) self.free_handle_condition.notify() done = True for filename in file_set.iterkeys(): if filename in self.active_file_to_handles: done = False break if not done: self.free_handle_condition.wait(0.5)
def SetValue(self, value, state=None, data=None, redraw=True): if data is not None: sorted_data = {} length, update, piece_states = data self.resolution = length keys = piece_states.keys() keys.sort(self.sort) pos = 0 h = piece_states.get('h', SparseSet()) t = piece_states.get('t', set()) t = list(t) t.sort() have_trans_sparse_set = h + t for k in keys: p = piece_states[k] if k in ('h', 't'): count = len(p) else: count = 0 # OW for i in p: if i not in have_trans_sparse_set: count += 1 if not count: continue newpos = pos+count s = SparseSet() s.add(pos, newpos) sorted_data[k] = s pos = newpos data = (length, update, sorted_data) FancyDownloadGauge.SetValue(self, value, state, data, redraw)
def SetValue(self, value, state=None, data=None, redraw=True): if data is not None: sorted_data = {} length, update, piece_states = data self.resolution = length keys = piece_states.keys() keys.sort(self.sort) pos = 0 h = piece_states.get('h', SparseSet()) t = piece_states.get('t', set()) t = list(t) t.sort() have_trans_sparse_set = h + t for k in keys: p = piece_states[k] if k in ('h', 't'): count = len(p) else: count = 0 # OW for i in p: if i not in have_trans_sparse_set: count += 1 if not count: continue newpos = pos + count s = SparseSet() s.add(pos, newpos) sorted_data[k] = s pos = newpos data = (length, update, sorted_data) FancyDownloadGauge.SetValue(self, value, state, data, redraw)
def started(self, torrent): """Only run the most recently added torrent""" if self.butles(torrent): removable = self.estate - set([torrent.infohash]) for i in removable: self.estate.discard(i) self.multitorrent.remove_torrent(i, del_files=True)
def __init__(self, config, storage, rm, urlage, picker, numpieces, finished, errorfunc, kickfunc, banfunc, get_downrate): self.config = config self.storage = storage self.rm = rm self.urlage = urlage self.picker = picker self.errorfunc = errorfunc self.rerequester = None self.entered_endgame = False self.connection_manager = None self.chunksize = config['download_chunk_size'] self.numpieces = numpieces self.finished = finished self.snub_time = config['snub_time'] self.kickfunc = kickfunc self.banfunc = banfunc self.get_downrate = get_downrate self.downloads = [] self.perip = {} self.bad_peers = {} self.discarded_bytes = 0 self.useful_received_listeners = set() self.raw_received_listeners = set() if SPARSE_SET: self.piece_states = PieceSetBuckets() nothing = SparseSet() nothing.add(0, self.numpieces) self.piece_states.buckets.append(nothing) # I hate this nowhere = [(i, 0) for i in xrange(self.numpieces)] self.piece_states.place_in_buckets = dict(nowhere) else: typecode = resolve_typecode(self.numpieces) self.piece_states = SortedPieceBuckets(typecode) nothing = array.array(typecode, range(self.numpieces)) self.piece_states.buckets.append(nothing) # I hate this nowhere = [(i, (0, i)) for i in xrange(self.numpieces)] self.piece_states.place_in_buckets = dict(nowhere) self.last_update = 0 self.all_requests = set()
def _affect_rate(self, type, std, max_std, rate, set): rate = self._method_stddev(type, std, max_std, rate) rock_bottom = False if rate <= 4096: if debug: print "Rock bottom" rock_bottom = True rate = 4096 set(int(rate)) if stats: print "BandwidthManager._affect_rate(%f)" % rate self.rfp.write("%d %d" % (bttime(), int(rate))) self.sdevfp.write("%d %f" % (bttime(), std)) return rock_bottom
def check_enter_endgame(self): if not self.entered_endgame: if self.rm.endgame: self.entered_endgame = True self.all_requests = set() for d in self.downloads: self.all_requests.update(d.active_requests) for d in self.downloads: d.fix_download_endgame()
def _affect_rate(self, type, std, max_std, rate, set): rate = self._method_stddev(type, std, max_std, rate) rock_bottom = False if rate <= 4096: if debug: print "Rock bottom" rock_bottom = True rate = 4096 set(int(rate)) if stats: print "BandwidthManager._affect_rate(%f)" % rate self.rfp.write( "%d %d" % (bttime(),int(rate)) ) self.sdevfp.write( "%d %f" % (bttime(), std ) ) return rock_bottom
def aggregate_piece_states(self): d = {} d['h'] = self.storage.have_set d['t'] = set(self.rm.active_requests.iterkeys()) for i, bucket in enumerate(self.piece_states.buckets): d[i] = bucket r = (self.numpieces, self.last_update, d) return r
def canon_password(username, password, allow_weak): ''' N.B. allow_weak = True should be used for lookup but not storage as it allows unassigned codepoints. ''' username = to_utf8(username) password = to_utf8(password) password = demoronize(password) password = password.decode('utf-8') password = saslprep(password, allow_unassigned=allow_weak) if not allow_weak: if len(password) < 6: # FIXME: This error message is wrong -- there is no actual maximum length. raise ValueError( 'Please enter a password of between 6 and 20 characters') try: cpassword = canon_username(password, allow_reserved=True).decode('idna') except: cpassword = password.decode('utf-8') try: username = canon_username(username, allow_reserved=True).decode('idna') except: try: username = username.decode('idna') except: username = username.decode('utf-8') # import here because this is a big, slow module from BTL.canonical.identifier import confuse password_letters = list(set([ch for ch in confuse(cpassword)])) password_letters.sort() username_letters = list(set([ch for ch in confuse(username)])) username_letters.sort() if cpassword in username or u''.join(password_letters) == u''.join( username_letters): raise ValueError('password is too similar to user name') # TODO: password re-use prevention (password history) # TODO: complexity checks (dictionary?) # TODO: lockout (temporary and permanent) after failed login attempts return password
def __init__(self, rawserver): self.rawserver = rawserver old_connectionMade = rawserver.connectionMade def connectionMade(s): if rawserver.connections == 0: self._first_connection() old_connectionMade(s) rawserver.connectionMade = connectionMade rawserver.internet_watcher = self self.subscribers = set() self.internet_watcher = task.LoopingCall(self._internet_watch) self.internet_watcher.start(5)
def __init__(self, rawserver): self.rawserver = rawserver old_connectionMade = rawserver.connectionMade def connectionMade(s): if rawserver.connections == 0: self._first_connection() old_connectionMade(s) assert not hasattr(rawserver, "internet_watcher"), \ "rawserver already has conncetion rate limiter installed" rawserver.connectionMade = connectionMade rawserver.internet_watcher = self self.subscribers = set() self.internet_watcher = task.LoopingCall(self._internet_watch) self.internet_watcher.start(5)
def _get_deferred_host_ips3(host_ip): global _host_ips global _host_ips_cachetime assert reactor.ident != thread.get_ident() l = set() if host_ip is not "unknown": l.add(host_ip) try: hostname = socket.gethostname() hostname, aliaslist, ipaddrlist = socket.gethostbyname_ex(hostname) l.update(ipaddrlist) except socket.error, e: print "ARG", e
def _get_deferred_host_ips3(host_ip): global _host_ips global _host_ips_cachetime if hasattr(reactor, 'ident'): assert reactor.ident != thread.get_ident() l = set() if host_ip is not 'unknown': l.add(host_ip) try: hostname = socket.gethostname() hostname, aliaslist, ipaddrlist = socket.gethostbyname_ex(hostname) l.update(ipaddrlist) except socket.error, e: print "ARG", e
def _collect_nodes(self, local_ips): addrs = self.get_remote_endpoints() ips = set() for (ip, port) in addrs: if ip is not None and ip != "0.0.0.0" and ip not in local_ips: assert isinstance(ip, str) assert isinstance(port, int) ips.add(ip) self.rttmonitor.set_nodes_restart(ips) delay = 5 if len(ips) > 0: delay = 300 self.external_add_task(delay, self._collect_nodes, local_ips)
def update_status(self): """Update torrent information based on the results of making a status request.""" df = self.multitorrent.get_torrents() yield df torrents = df.getResult() infohashes = set() au_torrents = {} for torrent in torrents: torrent = self._thread_proxy(torrent) infohashes.add(torrent.metainfo.infohash) if torrent.metainfo.infohash not in self.torrents: if self.config.get( 'show_hidden_torrents') or not torrent.hidden: # create new torrent widget to = self.new_displayed_torrent(torrent) if torrent.is_auto_update: au_torrents[torrent.metainfo.infohash] = torrent for infohash, torrent in copy(self.torrents).iteritems(): # remove nonexistent torrents if infohash not in infohashes: self._do_remove_torrent(infohash) total_completion = 0 total_bytes = 0 for infohash, torrent in copy(self.torrents).iteritems(): # update existing torrents df = self.multitorrent.torrent_status(infohash, torrent.wants_peers(), torrent.wants_files()) yield df try: core_torrent, statistics = df.getResult() except UnknownInfohash: # looks like it's gone now if infohash in self.torrents: self._do_remove_torrent(infohash) else: # the infohash might have been removed from torrents # while we were yielding above, so we need to check if infohash in self.torrents: core_torrent = self._thread_proxy(core_torrent) torrent.update(core_torrent, statistics) self.update_torrent(torrent) if statistics['fractionDone'] is not None: amount_done = statistics[ 'fractionDone'] * torrent.metainfo.total_bytes total_completion += amount_done total_bytes += torrent.metainfo.total_bytes all_completed = False if total_bytes == 0: average_completion = 0 else: average_completion = total_completion / total_bytes if total_completion == total_bytes: all_completed = True df = self.multitorrent.auto_update_status() yield df available_version, installable_version, delay = df.getResult() if available_version is not None: if installable_version is None: self.notify_of_new_version(available_version) else: if self.installer_to_launch_at_exit is None: atexit.register(self.launch_installer_at_exit) if installable_version not in au_torrents: df = self.multitorrent.get_torrent(installable_version) yield df torrent = df.getResult() torrent = ThreadProxy(torrent, self.gui_wrap) else: torrent = au_torrents[installable_version] self.installer_to_launch_at_exit = torrent.working_path if bttime() > self.next_autoupdate_nag: self.prompt_for_quit_for_new_version(available_version) self.next_autoupdate_nag = bttime() + delay def get_global_stats(mt): stats = {} u, d = mt.get_total_rates() stats['total_uprate'] = Rate(u) stats['total_downrate'] = Rate(d) u, d = mt.get_total_totals() stats['total_uptotal'] = Size(u) stats['total_downtotal'] = Size(d) torrents = mt.get_visible_torrents() running = mt.get_visible_running() stats['num_torrents'] = len(torrents) stats['num_running_torrents'] = len(running) stats['num_connections'] = 0 for t in torrents: stats['num_connections'] += t.get_num_connections() try: stats['avg_connections'] = (stats['num_connections'] / stats['num_running_torrents']) except ZeroDivisionError: stats['avg_connections'] = 0 stats['avg_connections'] = "%.02f" % stats['avg_connections'] return stats df = self.multitorrent.call_with_obj(get_global_stats) yield df global_stats = df.getResult() yield average_completion, all_completed, global_stats
def push(self, key, value): if key not in self: v = set([value]) self.parent.__setitem__(self, key, v) else: self[key].add(value)
def _slow_init(confusables_file=None): global confusables, confusemap, _init_in_progress if _init_in_progress: return _init_in_progress = True try: if confusables_file is None and __name__ == '__main__': try: confusables_file = file( os.path.join( os.path.split(sys.argv[0])[0], '../share/confusables.txt'), 'rb') except: pass if confusables_file is None: confusables_file = urllib.urlopen( 'http://www.unicode.org/reports/tr36/data/confusables.txt', 'rb') for line in confusables_file: line = line.decode('utf-8').rstrip() try: cmd, comment = [field.strip() for field in line.split('#', 1)] a, b, cclass = [field.strip() for field in cmd.split(';')] a, b = [ u''.join([unichr(int(ch, 16)) for ch in field.split()]) for field in (a, b) ] confusables[cclass][a] = confusables[cclass].get( a, set()) | set((b, )) except: pass confusables_file.close() for chno in xrange(0, 0x110000): source = unichr(chno) sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower()) ]) try: sources = sources | set([source.encode('idna').decode('idna')]) except: pass while True: sources2 = sources for source in sources: sources2 = sources2 | confusables['all'].get( source, set([source])) if sources2 == sources: break sources = sources2 if len(sources) > 1: for source in sources: if unilen(source) == 1: confusables['all'][source] = sources for cclass in confusables: if cclass == 'all': continue for source in confusables[cclass]: dest = confusables[cclass][source] dest = dest | confusables['all'].get(source, set()) | set( [source]) dest2 = dest for d in dest2: dest = dest | confusables['all'].get(d, set()) for d in dest: if unilen(d) == 1: confusables['all'][d] = confusables['all'].get( d, set()) | dest confusables['all2'] = {} for source in confusables['all']: sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower()) ]) dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest for source2 in sources: if unilen(source2) == 1: confusables['all2'][source2] = sources confusables['all'] = confusables['all2'] del confusables['all2'] confusables['all2'] = {} for source in confusables['all']: sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower()) ]) dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest for source2 in sources: if unilen(source2) == 1: confusables['all2'][source2] = sources confusables['all'] = confusables['all2'] del confusables['all2'] confusables['all2'] = {} for source in confusables['all']: sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower()) ]) dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest for source2 in sources: if unilen(source2) == 1: confusables['all2'][source2] = sources confusables['all'] = confusables['all2'] del confusables['all2'] _confusemap = {} for chno in xrange(0, 0x110000): source = unichr(chno) dest = _confuse_internal(source, cclasses=['all'], asciify=True) dest2 = unicodedata.normalize('NFKC', source.lower()) if dest != dest2 or (uniord(source) >= 0x10000 and source != dest): if dest != unicodedata.normalize('NFC', dest.lower()): raise ValueError(repr(source) + ': ' + repr(dest)) _confusemap[source] = dest confusemap = _confusemap finally: _init_in_progress = False
from BTL.reactor_magic import noSignals, reactor, is_iocpreactor # as far as I know, we work with twisted 1.3 and >= 2.0 #import twisted.copyright #if twisted.copyright.version.split('.') < 2: # raise ImportError(_("RawServer_twisted requires twisted 2.0.0 or greater")) from twisted.protocols.policies import TimeoutMixin from twisted.internet.protocol import DatagramProtocol, Protocol, ClientFactory from twisted.internet.threads import deferToThread from twisted.internet import error, interfaces from BTL.ConnectionRateLimitReactor import connectionRateLimitReactor letters = set(string.letters) main_thread = thread.get_ident() rawserver_logger = logging.getLogger('RawServer') NOLINGER = struct.pack('ii', 1, 0) # python sucks. SHUT_RD = getattr(socket, 'SHUT_RD', 0) SHUT_WR = getattr(socket, 'SHUT_WR', 1) # this is a base class for all the callbacks the server could use class Handler(object): # called when the connection is being attempted def connection_starting(self, addr): pass
0x0441: "sw", # Swahili 0x0430: "sx", # Sutu 0x0449: "ta", # Tamil 0x041E: "th", # Thai 0x0432: "tn", # Setsuana 0x041F: "tr", # Turkish 0x0431: "ts", # Tsonga 0X0444: "tt", # Tatar 0x0422: "uk", # Ukrainian 0x0420: "ur", # Urdu 0x0443: "uz_UZ", # Uzbek - Latin 0x042A: "vi", # Vietnamese 0x0434: "xh", # Xhosa 0x043D: "yi", # Yiddish 0x0804: "zh_CN", # Chinese - China 0x0C04: "zh_HK", # Chinese - Hong Kong S.A.R. 0x1404: "zh_MO", # Chinese - Macau S.A.R 0x1004: "zh_SG", # Chinese - Singapore 0x0404: "zh_TW", # Chinese - Taiwan 0x0435: "zu", # Zulu } if __name__ == '__main__': from BTL.obsoletepythonsupport import set internal = set([x.lower() for x in languages]) windows = set(locale_sucks.values()) if not windows.issuperset(internal): diff = list(internal.difference(windows)) diff.sort() print diff
def _slow_init(confusables_file = None): global confusables, confusemap, _init_in_progress if _init_in_progress: return _init_in_progress = True try: if confusables_file is None and __name__ == '__main__': try: confusables_file = file(os.path.join(os.path.split(sys.argv[0])[0], '../share/confusables.txt'), 'rb') except: pass if confusables_file is None: confusables_file = urllib.urlopen('http://www.unicode.org/reports/tr36/data/confusables.txt', 'rb') for line in confusables_file: line = line.decode('utf-8').rstrip() try: cmd, comment = [ field.strip() for field in line.split('#', 1) ] a, b, cclass = [ field.strip() for field in cmd.split(';') ] a, b = [ u''.join([ unichr(int(ch, 16)) for ch in field.split() ]) for field in (a, b) ] confusables[cclass][a] = confusables[cclass].get(a, set()) | set((b,)) except: pass confusables_file.close() for chno in xrange(0, 0x110000): source = unichr(chno) sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower())]) try: sources = sources | set([ source.encode('idna').decode('idna') ]) except: pass while True: sources2 = sources for source in sources: sources2 = sources2 | confusables['all'].get(source, set([ source ])) if sources2 == sources: break sources = sources2 if len(sources) > 1: for source in sources: if unilen(source) == 1: confusables['all'][source] = sources for cclass in confusables: if cclass == 'all': continue for source in confusables[cclass]: dest = confusables[cclass][source] dest = dest | confusables['all'].get(source, set()) | set([ source ]) dest2 = dest for d in dest2: dest = dest | confusables['all'].get(d, set()) for d in dest: if unilen(d) == 1: confusables['all'][d] = confusables['all'].get(d, set()) | dest confusables['all2'] = {} for source in confusables['all']: sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower())]) dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest for source2 in sources: if unilen(source2) == 1: confusables['all2'][source2] = sources confusables['all'] = confusables['all2'] del confusables['all2'] confusables['all2'] = {} for source in confusables['all']: sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower())]) dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest for source2 in sources: if unilen(source2) == 1: confusables['all2'][source2] = sources confusables['all'] = confusables['all2'] del confusables['all2'] confusables['all2'] = {} for source in confusables['all']: sources = set([ source, unicodedata.normalize('NFKD', source), source.upper(), unicodedata.normalize('NFKD', source.upper()), source.title(), unicodedata.normalize('NFKD', source.title()), source.lower(), unicodedata.normalize('NFKD', source.lower())]) dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest dest = set() for source2 in sources: dest = dest | confusables['all'].get(source2, set()) sources = sources | dest for source2 in sources: if unilen(source2) == 1: confusables['all2'][source2] = sources confusables['all'] = confusables['all2'] del confusables['all2'] _confusemap = {} for chno in xrange(0, 0x110000): source = unichr(chno) dest = _confuse_internal(source, cclasses = [ 'all' ], asciify = True) dest2 = unicodedata.normalize('NFKC', source.lower()) if dest != dest2 or (uniord(source) >= 0x10000 and source != dest): if dest != unicodedata.normalize('NFC', dest.lower()): raise ValueError(repr(source) + ': ' + repr(dest)) _confusemap[source] = dest confusemap = _confusemap finally: _init_in_progress = False
def __init__(self, parent, *a, **k): SettingsPanel.__init__(self, parent, *a, **k) # widgets self.gauge_box = wx.StaticBox(self, label=_("Progress bar style:")) self.gauge_sizer = wx.StaticBoxSizer(self.gauge_box, wx.VERTICAL) self.null_radio = wx.RadioButton( self, label=_("&None (just show percent complete)"), style=wx.RB_GROUP) self.null_radio.value = 0 self.simple_radio = wx.RadioButton(self, label=_("&Ordinary progress bar")) self.simple_radio.value = 1 self.simple_sample = self.new_sample(SimpleDownloadGauge, 1) self.moderate_radio = wx.RadioButton(self, label=_("&Detailed progress bar")) self.moderate_radio.value = 2 msg = _( "(shows the percentage of complete, transferring, available and missing pieces in the torrent)" ) if not text_wrappable: half = len(msg) // 2 for i in xrange(half): if msg[half + i] == ' ': msg = msg[:half + i + 1] + '\n' + msg[half + i + 1:] break elif msg[half - i] == ' ': msg = msg[:half - i + 1] + '\n' + msg[half - i + 1:] break self.moderate_text = ElectroStaticText(self, wx.ID_ANY, msg) if text_wrappable: self.moderate_text.Wrap(250) self.moderate_sample = self.new_sample(ModerateDownloadGauge, 2) self.fancy_radio = wx.RadioButton(self, label=_("&Piece bar")) self.fancy_radio.value = 3 self.fancy_text = ElectroStaticText( self, wx.ID_ANY, _("(shows the status of each piece in the torrent)")) if text_wrappable: self.fancy_text.Wrap(250) # generate random sample data r = set(xrange(200)) self.sample_data = {} for key, count in (('h', 80), ('t', 20)) + tuple([(i, 5) for i in range(19)]): self.sample_data[key] = SparseSet() for d in random.sample(r, count): self.sample_data[key].add(d) r.remove(d) for d in r: self.sample_data[0].add(d) self.fancy_sample = self.new_sample(FancyDownloadGauge, 3) # sizers gauge = wx.TOP | wx.LEFT | wx.RIGHT extra = wx.TOP | wx.LEFT | wx.RIGHT | wx.GROW self.gauge_sizer.Add(self.null_radio, flag=gauge, border=SPACING) self.gauge_sizer.AddSpacer((SPACING, SPACING)) self.gauge_sizer.Add(self.simple_radio, flag=gauge, border=SPACING) self.gauge_sizer.Add(self.simple_sample, flag=extra, border=SPACING) self.gauge_sizer.AddSpacer((SPACING, SPACING)) self.gauge_sizer.Add(self.moderate_radio, flag=gauge, border=SPACING) self.gauge_sizer.Add(self.moderate_sample, flag=extra, border=SPACING) self.gauge_sizer.Add(self.moderate_text, flag=extra, border=SPACING) self.gauge_sizer.AddSpacer((SPACING, SPACING)) self.gauge_sizer.Add(self.fancy_radio, flag=gauge, border=SPACING) self.gauge_sizer.Add(self.fancy_sample, flag=extra, border=SPACING) self.gauge_sizer.Add(self.fancy_text, flag=extra, border=SPACING) self.sizer.AddFirst(self.gauge_sizer, flag=wx.GROW) # setup self.pb_group = (self.null_radio, self.simple_radio, self.moderate_radio, self.fancy_radio) for r in self.pb_group: r.Bind(wx.EVT_RADIOBUTTON, self.radio) if r.value == wx.the_app.config[self.pb_config_key]: r.SetValue(True) else: r.SetValue(False) # toolbar widgets self.toolbar_box = wx.StaticBox(self, label=_("Toolbar style:")) self.toolbar_text = CheckButton( self, _("Show text"), self.settings_window, 'toolbar_text', self.settings_window.config['toolbar_text'], wx.the_app.reset_toolbar_style) self.toolbar_size_text = ElectroStaticText(self, id=wx.ID_ANY, label=_("Icon size:")) self.toolbar_size_choice = wx.Choice(self, choices=(_("Small"), _("Normal"), _("Large"))) self.toolbar_config_to_choice(wx.the_app.config['toolbar_size']) self.toolbar_size_choice.Bind(wx.EVT_CHOICE, self.toolbar_choice_to_config) # toolbar sizers self.toolbar_sizer = HSizer() self.toolbar_sizer.AddFirst(self.toolbar_text, flag=wx.ALIGN_CENTER_VERTICAL) line = wx.StaticLine(self, id=wx.ID_ANY, style=wx.VERTICAL) self.toolbar_sizer.Add(line, flag=wx.ALIGN_CENTER_VERTICAL | wx.GROW) self.toolbar_sizer.Add(self.toolbar_size_text, flag=wx.ALIGN_CENTER_VERTICAL) self.toolbar_sizer.Add(self.toolbar_size_choice, flag=wx.GROW | wx.ALIGN_TOP, proportion=1) self.toolbar_box_sizer = wx.StaticBoxSizer(self.toolbar_box, wx.VERTICAL) self.toolbar_box_sizer.Add(self.toolbar_sizer, flag=wx.GROW) self.sizer.Add(self.toolbar_box_sizer, flag=wx.GROW) if wx.the_app.config['debug']: # the T-Word widgets self.themes = [] self.theme_choice = wx.Choice(self, choices=[]) self.theme_choice.Enable(False) self.theme_choice.Bind(wx.EVT_CHOICE, self.set_theme) self.restart_hint = ElectroStaticText( self, id=wx.ID_ANY, label=_("(Changing themes requires restart.)")) self.theme_static_box = wx.StaticBox(self, label=_("Theme:")) # the T-Word sizers self.theme_sizer = VSizer() self.theme_sizer.AddFirst(self.theme_choice, flag=wx.GROW | wx.ALIGN_RIGHT) self.theme_sizer.Add(self.restart_hint, flag=wx.GROW | wx.ALIGN_RIGHT) self.theme_static_box_sizer = wx.StaticBoxSizer( self.theme_static_box, wx.VERTICAL) self.theme_static_box_sizer.Add(self.theme_sizer, flag=wx.GROW) self.sizer.Add(self.theme_static_box_sizer, flag=wx.GROW) self.get_themes()
def _rechoke(self): # step 1: # get sorted in order of preference lists of peers # one for downloading torrents, and one for seeding torrents down_pref = [] seed_pref = [] for i, c in enumerate(self.connections): u = c.upload if c.download.have.numfalse == 0 or not u.interested: continue # I cry. if c.download.multidownload.storage.have.numfalse != 0: ## heuristic for downloading torrents if not c.download.is_snubbed(): ## simple download rate based down_pref.append((-c.download.get_rate(), i)) ## ratio based #dr = c.download.get_rate() #ur = max(1, u.get_rate()) #ratio = dr / ur #down_pref.append((-ratio, i)) else: ## heuristic for seeding torrents ## Uoti special ## if c._decrypt is not None: ## seed_pref.append((self.count, u.get_rate(), i)) ## elif (u.unchoke_time > self.count - self.magic_number or ## u.buffer and c.connection.is_flushed()): ## seed_pref.append((u.unchoke_time, u.get_rate(), i)) ## else: ## seed_pref.append((1, u.get_rate(), i)) ## sliding, first pass (see below) r = u.get_rate() if c._decrypt is not None: seed_pref.append((2, r, i)) else: seed_pref.append((1, r, i)) down_pref.sort() seed_pref.sort() #pprint(down_pref) #pprint(seed_pref) down_pref = [ self.connections[i] for junk, i in down_pref ] seed_pref = [ self.connections[i] for junk, junk, i in seed_pref ] max_uploads = self._max_uploads() ## sliding, second pass ## # up-side-down sum for an idea of capacity ## uprate_sum = sum(rates[-max_uploads:]) ## if max_uploads == 0: ## avg_uprate = 0 ## else: ## avg_uprate = uprate_sum / max_uploads ## #print 'avg_uprate', avg_uprate, 'of', max_uploads ## self.extra_slots = max(self.extra_slots - 1, 0) ## if avg_uprate > self.arbitrary_min: ## for r in rates: ## if r < (avg_uprate * 0.80): # magic 80% ## self.extra_slots += 2 ## break ## self.extra_slots = min(len(seed_pref), self.extra_slots) ## max_uploads += self.extra_slots ## #print 'plus', self.extra_slots # step 2: # split the peer lists by a ratio to fill the available upload slots d_uploads = max(1, int(round(max_uploads * 0.70))) s_uploads = max(1, int(round(max_uploads * 0.30))) #print 'original', 'ds', d_uploads, 'us', s_uploads extra = max(0, d_uploads - len(down_pref)) if extra > 0: s_uploads += extra d_uploads -= extra extra = max(0, s_uploads - len(seed_pref)) if extra > 0: s_uploads -= extra d_uploads = min(d_uploads + extra, len(down_pref)) #print 'ds', d_uploads, 'us', s_uploads down_pref = down_pref[:d_uploads] seed_pref = seed_pref[:s_uploads] preferred = set(down_pref) preferred.update(seed_pref) # step 3: # enforce unchoke states count = 0 to_choke = [] for i, c in enumerate(self.connections): u = c.upload if c in preferred: u.unchoke(self.count) count += 1 else: to_choke.append(c) # step 4: # enforce choke states and handle optimistics optimistics = max(self.config['min_uploads'], max_uploads - len(preferred)) #print 'optimistics', optimistics for c in to_choke: u = c.upload if c.download.have.numfalse == 0: # keep seeds choked, out of superstition u.unchoke(self.count) elif count >= optimistics: u.choke() else: # this one's optimistic u.unchoke(self.count) if u.interested: count += 1
def __init__(self, multitorrent): TorrentButler.__init__(self, multitorrent) self.started_torrents = set()
def _rechoke(self): # step 1: # get sorted in order of preference lists of peers # one for downloading torrents, and one for seeding torrents down_pref = [] seed_pref = [] for i, c in enumerate(self.connections): u = c.upload if c.download.have.numfalse == 0 or not u.interested: continue # I cry. if c.download.multidownload.storage.have.numfalse != 0: ## heuristic for downloading torrents if not c.download.is_snubbed(): ## simple download rate based down_pref.append((-c.download.get_rate(), i)) ## ratio based #dr = c.download.get_rate() #ur = max(1, u.get_rate()) #ratio = dr / ur #down_pref.append((-ratio, i)) else: ## heuristic for seeding torrents ## Uoti special ## if c._decrypt is not None: ## seed_pref.append((self.count, u.get_rate(), i)) ## elif (u.unchoke_time > self.count - self.magic_number or ## u.buffer and c.connection.is_flushed()): ## seed_pref.append((u.unchoke_time, u.get_rate(), i)) ## else: ## seed_pref.append((1, u.get_rate(), i)) ## sliding, first pass (see below) r = u.get_rate() if c._decrypt is not None: seed_pref.append((2, r, i)) else: seed_pref.append((1, r, i)) down_pref.sort() seed_pref.sort() #pprint(down_pref) #pprint(seed_pref) down_pref = [self.connections[i] for junk, i in down_pref] seed_pref = [self.connections[i] for junk, junk, i in seed_pref] max_uploads = self._max_uploads() ## sliding, second pass ## # up-side-down sum for an idea of capacity ## uprate_sum = sum(rates[-max_uploads:]) ## if max_uploads == 0: ## avg_uprate = 0 ## else: ## avg_uprate = uprate_sum / max_uploads ## #print 'avg_uprate', avg_uprate, 'of', max_uploads ## self.extra_slots = max(self.extra_slots - 1, 0) ## if avg_uprate > self.arbitrary_min: ## for r in rates: ## if r < (avg_uprate * 0.80): # magic 80% ## self.extra_slots += 2 ## break ## self.extra_slots = min(len(seed_pref), self.extra_slots) ## max_uploads += self.extra_slots ## #print 'plus', self.extra_slots # step 2: # split the peer lists by a ratio to fill the available upload slots d_uploads = max(1, int(round(max_uploads * 0.70))) s_uploads = max(1, int(round(max_uploads * 0.30))) #print 'original', 'ds', d_uploads, 'us', s_uploads extra = max(0, d_uploads - len(down_pref)) if extra > 0: s_uploads += extra d_uploads -= extra extra = max(0, s_uploads - len(seed_pref)) if extra > 0: s_uploads -= extra d_uploads = min(d_uploads + extra, len(down_pref)) #print 'ds', d_uploads, 'us', s_uploads down_pref = down_pref[:d_uploads] seed_pref = seed_pref[:s_uploads] preferred = set(down_pref) preferred.update(seed_pref) # step 3: # enforce unchoke states count = 0 to_choke = [] for i, c in enumerate(self.connections): u = c.upload if c in preferred: u.unchoke(self.count) count += 1 else: to_choke.append(c) # step 4: # enforce choke states and handle optimistics count = 0 optimistics = max(self.config['min_uploads'], max_uploads - len(preferred)) #print 'optimistics', optimistics for c in to_choke: u = c.upload if c.download.have.numfalse == 0: u.choke() elif count >= optimistics: u.choke() else: # this one's optimistic u.unchoke(self.count) if u.interested: count += 1
def __init__(self, multitorrent, rawserver, test_new_version=None, test_current_version=None): TorrentButler.__init__(self, multitorrent) self.runs = 0 self.rawserver = rawserver self.estate = set() self.old_updates = set() self.log_root = "core.AutoUpdateButler" self.logger = logging.getLogger(self.log_root) self.installable_version = None self.available_version = None self.current_version = Version.from_str(version) self.debug_mode = DEBUG self.delay = 60 * 60 * 24 if self.debug_mode: self.delay = 10 if test_new_version: test_new_version = Version.from_str(test_new_version) self.debug_mode = True self.debug('__init__() turning debug on') def _hack_get_available(url): self.debug('_hack_get_available() run#%d: returning %s' % (self.runs, str(test_new_version))) return test_new_version self._get_available = _hack_get_available if test_current_version: self.debug_mode = True self.current_version = Version.from_str(test_current_version) self.version_site = version_host # The version URL format is: # http:// VERSION_SITE / OS_NAME / (LEGACY /) BETA or STABLE # LEGACY means that the user is on a version of an OS that has # been deemed "legacy", and as such the latest client version # for their OS version may be different than the latest client # version for the OS in general. For example, if we are going # to roll a version that requires WinXP/2K or greater, or a # version that requires OSX 10.5 or greater, we may maintain # an older version for Win98 or OSX 10.4 in OS_NAME/legacy/. if os.name == 'nt': self.version_site += 'win32/' if os_version not in ('XP', '2000', '2003'): self.version_site += 'legacy/' elif osx: self.version_site += 'osx/' elif self.debug_mode: self.version_site += 'win32/' self.installer_dir = self._calc_installer_dir() # kick it off self.rawserver.add_task(0, self.check_version)
def update_status(self): """Update torrent information based on the results of making a status request.""" df = self.multitorrent.get_torrents() yield df torrents = df.getResult() infohashes = set() au_torrents = {} for torrent in torrents: torrent = self._thread_proxy(torrent) infohashes.add(torrent.metainfo.infohash) if torrent.metainfo.infohash not in self.torrents: if self.config.get("show_hidden_torrents") or not torrent.hidden: # create new torrent widget to = self.new_displayed_torrent(torrent) if torrent.is_auto_update: au_torrents[torrent.metainfo.infohash] = torrent for infohash, torrent in copy(self.torrents).iteritems(): # remove nonexistent torrents if infohash not in infohashes: self._do_remove_torrent(infohash) total_completion = 0 total_bytes = 0 for infohash, torrent in copy(self.torrents).iteritems(): # update existing torrents df = self.multitorrent.torrent_status(infohash, torrent.wants_peers(), torrent.wants_files()) yield df try: core_torrent, statistics = df.getResult() except UnknownInfohash: # looks like it's gone now if infohash in self.torrents: self._do_remove_torrent(infohash) else: # the infohash might have been removed from torrents # while we were yielding above, so we need to check if infohash in self.torrents: core_torrent = self._thread_proxy(core_torrent) torrent.update(core_torrent, statistics) self.update_torrent(torrent) if statistics["fractionDone"] is not None: amount_done = statistics["fractionDone"] * torrent.metainfo.total_bytes total_completion += amount_done total_bytes += torrent.metainfo.total_bytes all_completed = False if total_bytes == 0: average_completion = 0 else: average_completion = total_completion / total_bytes if total_completion == total_bytes: all_completed = True df = self.multitorrent.auto_update_status() yield df available_version, installable_version, delay = df.getResult() if available_version is not None: if installable_version is None: self.notify_of_new_version(available_version) else: if self.installer_to_launch_at_exit is None: atexit.register(self.launch_installer_at_exit) if installable_version not in au_torrents: df = self.multitorrent.get_torrent(installable_version) yield df torrent = df.getResult() torrent = ThreadProxy(torrent, self.gui_wrap) else: torrent = au_torrents[installable_version] self.installer_to_launch_at_exit = torrent.working_path if bttime() > self.next_autoupdate_nag: self.prompt_for_quit_for_new_version(available_version) self.next_autoupdate_nag = bttime() + delay def get_global_stats(mt): stats = {} u, d = mt.get_total_rates() stats["total_uprate"] = Rate(u) stats["total_downrate"] = Rate(d) u, d = mt.get_total_totals() stats["total_uptotal"] = Size(u) stats["total_downtotal"] = Size(d) torrents = mt.get_visible_torrents() running = mt.get_visible_running() stats["num_torrents"] = len(torrents) stats["num_running_torrents"] = len(running) stats["num_connections"] = 0 for t in torrents: stats["num_connections"] += t.get_num_connections() try: stats["avg_connections"] = stats["num_connections"] / stats["num_running_torrents"] except ZeroDivisionError: stats["avg_connections"] = 0 stats["avg_connections"] = "%.02f" % stats["avg_connections"] return stats df = self.multitorrent.call_with_obj(get_global_stats) yield df global_stats = df.getResult() yield average_completion, all_completed, global_stats
from BTL.reactor_magic import noSignals, reactor, is_iocpreactor # as far as I know, we work with twisted 1.3 and >= 2.0 #import twisted.copyright #if twisted.copyright.version.split('.') < 2: # raise ImportError(_("RawServer_twisted requires twisted 2.0.0 or greater")) from twisted.protocols.policies import TimeoutMixin from twisted.internet.protocol import DatagramProtocol, Protocol, ClientFactory from twisted.internet.threads import deferToThread from twisted.internet import error, interfaces from BTL.ConnectionRateLimitReactor import connectionRateLimitReactor letters = set(string.letters) main_thread = thread.get_ident() rawserver_logger = logging.getLogger('RawServer') NOLINGER = struct.pack('ii', 1, 0) # python sucks. SHUT_RD = getattr(socket, 'SHUT_RD', 0) SHUT_WR = getattr(socket, 'SHUT_WR', 1) # this is a base class for all the callbacks the server could use class Handler(object): # called when the connection is being attempted def connection_starting(self, addr):
def __init__(self, parent, *a, **k): SettingsPanel.__init__(self, parent, *a, **k) # widgets self.gauge_box = wx.StaticBox(self, label=_("Progress bar style:")) self.gauge_sizer = wx.StaticBoxSizer(self.gauge_box, wx.VERTICAL) self.null_radio = wx.RadioButton(self, label=_("&None (just show percent complete)"), style=wx.RB_GROUP) self.null_radio.value = 0 self.simple_radio = wx.RadioButton(self, label=_("&Ordinary progress bar")) self.simple_radio.value = 1 self.simple_sample = self.new_sample(SimpleDownloadGauge, 1) self.moderate_radio = wx.RadioButton(self, label=_("&Detailed progress bar")) self.moderate_radio.value = 2 msg = _("(shows the percentage of complete, transferring, available and missing pieces in the torrent)") if not text_wrappable: half = len(msg)//2 for i in xrange(half): if msg[half+i] == ' ': msg = msg[:half+i+1] + '\n' + msg[half+i+1:] break elif msg[half-i] == ' ': msg = msg[:half-i+1] + '\n' + msg[half-i+1:] break self.moderate_text = ElectroStaticText(self, wx.ID_ANY, msg) if text_wrappable: self.moderate_text.Wrap(250) self.moderate_sample = self.new_sample(ModerateDownloadGauge, 2) self.fancy_radio = wx.RadioButton(self, label=_("&Piece bar")) self.fancy_radio.value = 3 self.fancy_text = ElectroStaticText(self, wx.ID_ANY, _("(shows the status of each piece in the torrent)")) if text_wrappable: self.fancy_text.Wrap(250) # generate random sample data r = set(xrange(200)) self.sample_data = {} for key, count in (('h',80), ('t',20)) + tuple([(i,5) for i in range(19)]): self.sample_data[key] = SparseSet() for d in random.sample(r, count): self.sample_data[key].add(d) r.remove(d) for d in r: self.sample_data[0].add(d) self.fancy_sample = self.new_sample(FancyDownloadGauge, 3) # sizers gauge = wx.TOP|wx.LEFT|wx.RIGHT extra = wx.TOP|wx.LEFT|wx.RIGHT|wx.GROW self.gauge_sizer.Add(self.null_radio , flag=gauge, border=SPACING) self.gauge_sizer.AddSpacer((SPACING, SPACING)) self.gauge_sizer.Add(self.simple_radio , flag=gauge, border=SPACING) self.gauge_sizer.Add(self.simple_sample , flag=extra, border=SPACING) self.gauge_sizer.AddSpacer((SPACING, SPACING)) self.gauge_sizer.Add(self.moderate_radio , flag=gauge, border=SPACING) self.gauge_sizer.Add(self.moderate_sample, flag=extra, border=SPACING) self.gauge_sizer.Add(self.moderate_text , flag=extra, border=SPACING) self.gauge_sizer.AddSpacer((SPACING, SPACING)) self.gauge_sizer.Add(self.fancy_radio , flag=gauge, border=SPACING) self.gauge_sizer.Add(self.fancy_sample , flag=extra, border=SPACING) self.gauge_sizer.Add(self.fancy_text , flag=extra, border=SPACING) self.sizer.AddFirst(self.gauge_sizer, flag=wx.GROW) # setup self.pb_group = (self.null_radio, self.simple_radio, self.moderate_radio, self.fancy_radio) for r in self.pb_group: r.Bind(wx.EVT_RADIOBUTTON, self.radio) if r.value == wx.the_app.config[self.pb_config_key]: r.SetValue(True) else: r.SetValue(False) # toolbar widgets self.toolbar_box = wx.StaticBox(self, label=_("Toolbar style:")) self.toolbar_text = CheckButton(self, _("Show text"), self.settings_window, 'toolbar_text', self.settings_window.config['toolbar_text'], wx.the_app.reset_toolbar_style) self.toolbar_size_text = ElectroStaticText(self, id=wx.ID_ANY, label=_("Icon size:")) self.toolbar_size_choice = wx.Choice(self, choices=(_("Small"), _("Normal"), _("Large"))) self.toolbar_config_to_choice(wx.the_app.config['toolbar_size']) self.toolbar_size_choice.Bind(wx.EVT_CHOICE, self.toolbar_choice_to_config) # toolbar sizers self.toolbar_sizer = HSizer() self.toolbar_sizer.AddFirst(self.toolbar_text, flag=wx.ALIGN_CENTER_VERTICAL) line = wx.StaticLine(self, id=wx.ID_ANY, style=wx.VERTICAL) self.toolbar_sizer.Add(line, flag=wx.ALIGN_CENTER_VERTICAL|wx.GROW) self.toolbar_sizer.Add(self.toolbar_size_text, flag=wx.ALIGN_CENTER_VERTICAL) self.toolbar_sizer.Add(self.toolbar_size_choice, flag=wx.GROW|wx.ALIGN_TOP, proportion=1) self.toolbar_box_sizer = wx.StaticBoxSizer(self.toolbar_box, wx.VERTICAL) self.toolbar_box_sizer.Add(self.toolbar_sizer, flag=wx.GROW) self.sizer.Add(self.toolbar_box_sizer, flag=wx.GROW) if wx.the_app.config['debug']: # the T-Word widgets self.themes = [] self.theme_choice = wx.Choice(self, choices=[]) self.theme_choice.Enable(False) self.theme_choice.Bind(wx.EVT_CHOICE, self.set_theme) self.restart_hint = ElectroStaticText(self, id=wx.ID_ANY, label=_("(Changing themes requires restart.)")) self.theme_static_box = wx.StaticBox(self, label=_("Theme:")) # the T-Word sizers self.theme_sizer = VSizer() self.theme_sizer.AddFirst(self.theme_choice, flag=wx.GROW|wx.ALIGN_RIGHT) self.theme_sizer.Add(self.restart_hint, flag=wx.GROW|wx.ALIGN_RIGHT) self.theme_static_box_sizer = wx.StaticBoxSizer(self.theme_static_box, wx.VERTICAL) self.theme_static_box_sizer.Add(self.theme_sizer, flag=wx.GROW) self.sizer.Add(self.theme_static_box_sizer, flag=wx.GROW) self.get_themes()
def __init__(self, config, storage, rm, urlage, picker, numpieces, finished, errorfunc, kickfunc, banfunc, get_downrate, micropayments=False): self.config = config self.storage = storage self.rm = rm self.urlage = urlage self.picker = picker self.errorfunc = errorfunc self.rerequester = None self.entered_endgame = False self.connection_manager = None self.chunksize = config['download_chunk_size'] self.numpieces = numpieces self.finished = finished self.snub_time = config['snub_time'] self.kickfunc = kickfunc self.banfunc = banfunc self.get_downrate = get_downrate self.downloads = [] self.perip = {} self.bad_peers = {} self.discarded_bytes = 0 self.useful_received_listeners = set() self.raw_received_listeners = set() self.micropayments = micropayments #boolean on/off self.key_rewards = {} #key rewards we use to "pay" uploaders self.waiting_for_reward = { } #hash with first key peerid then [(idx1,offset1,len1),(idx2,offset2,len2) ] self.payment_key_hash_cache = { } #stores payment key hashes received from tracker self.private_key = None self.public_key = None self.certificate = None self.pk_tools = None self.public_key_tracker = None if micropayments: """load keys and check certificates""" log("micropayments=" + str(micropayments)) #check if certifcates are OK cert = self.config["micropayment_certificate"] self.ca_dir = self.config["micropayment_trusted_ca_dir"] self.pk_tools = PKTools(self.ca_dir, config['save_incomplete_in']) if not self.pk_tools.validate_certificate(cert): log("invalid certificates") return else: log("valid certificates") self.certificate = parse_PEM_certificate( open(self.config["micropayment_certificate"])) self.public_key = self.certificate.publicKey self.private_key = parse_PEM_private_key( open(self.config["micropayment_private_key"])) log("cert tracker filename=%s:" % (self.config["micropayment_tracker_certificate"])) cert_tracker = parse_PEM_certificate( open(self.config["micropayment_tracker_certificate"])) self.public_key_tracker = cert_tracker.publicKey if SPARSE_SET: self.piece_states = PieceSetBuckets() nothing = SparseSet() nothing.add(0, self.numpieces) self.piece_states.buckets.append(nothing) # I hate this nowhere = [(i, 0) for i in xrange(self.numpieces)] self.piece_states.place_in_buckets = dict(nowhere) else: typecode = resolve_typecode(self.numpieces) self.piece_states = SortedPieceBuckets(typecode) nothing = array.array(typecode, range(self.numpieces)) self.piece_states.buckets.append(nothing) # I hate this nowhere = [(i, (0, i)) for i in xrange(self.numpieces)] self.piece_states.place_in_buckets = dict(nowhere) self.last_update = 0 self.all_requests = set()