def __init__(self, multitorrent, rawserver,
                 test_new_version=None, test_current_version=None):
        TorrentButler.__init__(self, multitorrent)

        self.runs = 0

        self.rawserver = rawserver
        self.estate = set()
        self.old_updates = set()

        self.log_root = "core.AutoUpdateButler"
        self.logger = logging.getLogger(self.log_root)

        self.installable_version = None
        self.available_version   = None

        self.current_version = Version.from_str(version)

        self.debug_mode = DEBUG

        self.delay = 60*60*24
        if self.debug_mode:
            self.delay = 10

        if test_new_version:
            test_new_version = Version.from_str(test_new_version)
            self.debug_mode = True
            self.debug('__init__() turning debug on')
            def _hack_get_available(url):
                self.debug('_hack_get_available() run#%d: returning %s' % (self.runs, str(test_new_version)))
                return test_new_version
            self._get_available = _hack_get_available
        if test_current_version:
            self.debug_mode = True
            self.current_version = Version.from_str(test_current_version)

        self.version_site = version_host
        # The version URL format is:
        # http:// VERSION_SITE / OS_NAME / (LEGACY /) BETA or STABLE

        # LEGACY means that the user is on a version of an OS that has
        # been deemed "legacy", and as such the latest client version
        # for their OS version may be different than the latest client
        # version for the OS in general.  For example, if we are going
        # to roll a version that requires WinXP/2K or greater, or a
        # version that requires OSX 10.5 or greater, we may maintain
        # an older version for Win98 or OSX 10.4 in OS_NAME/legacy/.

        if os.name == 'nt':
            self.version_site += 'win32/'
            if os_version not in ('XP', '2000', '2003'):
                self.version_site += 'legacy/'
        elif osx:
            self.version_site += 'osx/'
        elif self.debug_mode:
            self.version_site += 'win32/'

        self.installer_dir = self._calc_installer_dir()
        # kick it off
        self.rawserver.add_task(0, self.check_version)
    def _affect_rate(self, type, std, max_std, rate, set):
        rate = self._method_stddev(type, std, max_std, rate)

        rock_bottom = False
        if rate <= 4096:
            if debug:
                print "Rock bottom"
            rock_bottom = True
            rate = 4096
    
        set(int(rate))

        return rock_bottom
 def started(self, torrent):
     """Only run the most recently added torrent"""
     if self.butles(torrent):
         removable = self.estate - set([torrent.infohash])
         for i in removable:
             self.estate.discard(i)
             self.multitorrent.remove_torrent(i, del_files=True)
Пример #4
0
def parsedir(directory, parsed, files, blocked, errfunc,
             include_metainfo=True):
    """Recurses breadth-first starting from the passed 'directory'
       looking for .torrrent files.  Stops recursing in any given
       branch at the first depth that .torrent files are encountered.

       The directory, parsed, files, and blocked arguments are passed
       from the previous iteration of parsedir.

       @param directory: root of the breadth-first search for .torrent files.
       @param parsed: dict mapping infohash to ConvertedMetainfo.
       @param files: dict mapping path -> [(modification time, size), infohash]
       @param blocked: dict used as set.  keys are list of paths of files
          that were not parsed on a prior call to parsedir for some reason.
          Valid reasons are that the .torrent file is unparseable or that a
          torrent with a matching infohash is already in the parsed set.
       @param errfunc: error-reporting callback.
       @param include_metainfo:
       @return: The tuple (new parsed, new files, new blocked, added, removed)
          where 'new parsed', 'new files', and 'new blocked' are updated
          versions of 'parsed', 'files', and 'blocked' respectively. 'added'
          and 'removed' contain the changes made to the first three members
          of the tuple.  'added' and 'removed' are dicts mapping from
          infohash on to the same torrent-specific info dict that is in
          or was in parsed.
       """
    
    if NOISY:
        errfunc('checking dir')
    dirs_to_check = [directory]
    new_files = {}          # maps path -> [(modification time, size),infohash]
    new_blocked = set()
    while dirs_to_check:    # first, recurse directories and gather torrents
        directory = dirs_to_check.pop()
        newtorrents = False
        try:
            dir_contents = os.listdir(directory)
        except (IOError, OSError), e:
            errfunc(_("Could not read directory ") + directory)
            continue
        for f in dir_contents:
            if f.endswith('.torrent'):
                newtorrents = True
                p = os.path.join(directory, f)
                try:
                    new_files[p] = [(os.path.getmtime(p), os.path.getsize(p)), 0]
                except (IOError, OSError), e:
                    errfunc(_("Could not stat ") + p + " : " + unicode(e.args[0]))
    def __init__(self, rawserver):

        self.rawserver = rawserver
        old_connectionMade = rawserver.connectionMade
        def connectionMade(s):
            if rawserver.connections == 0:
                self._first_connection()
            old_connectionMade(s)
        assert not hasattr(rawserver, "internet_watcher"), \
                "rawserver already has conncetion rate limiter installed"
        rawserver.connectionMade = connectionMade
        rawserver.internet_watcher = self

        self.subscribers = set()
        self.internet_watcher = task.LoopingCall(self._internet_watch)
        self.internet_watcher.start(5)
    def _collect_nodes(self, local_ips):
        addrs = self.get_remote_endpoints()
        
        ips = set()
        for (ip, port) in addrs:
            if ip is not None and ip != "0.0.0.0" and ip not in local_ips:
                assert isinstance(ip, str)
                assert isinstance(port, int)
                ips.add(ip)
        
        self.rttmonitor.set_nodes_restart(ips)

        delay = 5
        if len(ips) > 0:
            delay = 300
    
        self.external_add_task(delay, self._collect_nodes, local_ips)
Пример #7
0
    def _collect_nodes(self):
        addrs = self.get_remote_endpoints()
        print "_collect_nodes: addrs=", addrs

        local_ips = get_host_ips()

        ips = set()
        for (ip, port) in addrs:
            if ip is not None and ip != "0.0.0.0" and ip not in local_ips:
                ips.add(ip)

        self.rttmonitor.set_nodes_restart(ips)

        delay = 5
        if len(ips) > 0:
            delay = 300

        self.add_task(delay, self._collect_nodes)
    def _collect_nodes(self):
        addrs = self.get_remote_endpoints()
        print "_collect_nodes: addrs=",addrs
        
        local_ips = get_host_ips()
    
        ips = set()
        for (ip, port) in addrs:
            if ip is not None and ip != "0.0.0.0" and ip not in local_ips:
                ips.add(ip)
        
        self.rttmonitor.set_nodes_restart(ips)

        delay = 5
        if len(ips) > 0:
            delay = 300
    
        self.add_task(delay, self._collect_nodes)
Пример #9
0
    def __init__(self, config, storage, urlage, picker, numpieces,
                 finished, total_downmeasure, downmeasure, measurefunc,
                 kickfunc, banfunc):
        self.config = config
        self.storage = storage
        self.urlage = urlage
        self.picker = picker
        self.rerequester = None
        self.connection_manager = None
        self.chunksize = config['download_slice_size']
        self.total_downmeasure = total_downmeasure
        self.downmeasure = downmeasure
        self.numpieces = numpieces
        self.finished = finished
        self.snub_time = config['snub_time']
        self.measurefunc = measurefunc
        self.kickfunc = kickfunc
        self.banfunc = banfunc
        self.downloads = []
        self.perip = {}
        self.bad_peers = {}
        self.discarded_bytes = 0
        self.burst_avg = 0

        self.piece_states = PieceSetBuckets()
        nothing = SparseSet(xrange(self.numpieces))
        self.piece_states.buckets.append(nothing)
        # I hate this
        nowhere = [(i, 0) for i in xrange(self.numpieces)]
        self.piece_states.place_in_buckets = dict(nowhere)
        
        self.last_update = 0
        indexes = self.storage.inactive_requests.keys()
        indexes.sort()
        self.active_requests = SparseSet(indexes)
        self.all_requests = set()
Пример #10
0
    def update_status(self):
        """Update torrent information based on the results of making a
        status request."""
        df = self.multitorrent.get_torrents()
        yield df
        torrents = df.getResult()

        infohashes = set()
        au_torrents = {}

        for torrent in torrents:
            torrent = ThreadProxy(torrent, self.gui_wrap)
            infohashes.add(torrent.metainfo.infohash)
            if (not torrent.hidden and
                torrent.metainfo.infohash not in self.torrents):
                # create new torrent widget
                to = self.new_displayed_torrent(torrent)

            if torrent.is_auto_update:
                au_torrents[torrent.metainfo.infohash] = torrent

        for infohash, torrent in copy(self.torrents).iteritems():
            # remove nonexistent torrents
            if infohash not in infohashes:
                self.torrents.pop(infohash)
                self.torrent_removed(infohash)

        total_completion = 0
        total_bytes = 0

        for infohash, torrent in copy(self.torrents).iteritems():
            # update existing torrents
            df = self.multitorrent.torrent_status(infohash,
                                                  torrent.wants_peers(),
                                                  torrent.wants_files()
                                                  )
            yield df
            try:
                core_torrent, statistics = df.getResult()
            except UnknownInfohash:
                # looks like it's gone now
                if infohash in self.torrents:
                    self.torrents.pop(infohash)
                    self.torrent_removed(infohash)
            else:
                # the infohash might have been removed from torrents
                # while we were yielding above, so we need to check
                if infohash in self.torrents:
                    core_torrent = ThreadProxy(core_torrent, self.gui_wrap)
                    torrent.update(core_torrent, statistics)
                    self.update_torrent(torrent)
                    if statistics['fractionDone'] is not None:
                        amount_done = statistics['fractionDone'] * torrent.metainfo.total_bytes
                        total_completion += amount_done
                        total_bytes += torrent.metainfo.total_bytes
        all_completed = False
        if total_bytes == 0:
            average_completion = 0
        else:
            average_completion = total_completion / total_bytes
            if total_completion == total_bytes:
                all_completed = True

        df = self.multitorrent.auto_update_status()
        yield df
        available_version, installable_version, delay = df.getResult()
        if available_version is not None:
            if installable_version is None:
                self.notify_of_new_version(available_version)
            else:
                if self.installer_to_launch_at_exit is None:
                    atexit.register(self.launch_installer_at_exit)
                if installable_version not in au_torrents:
                    df = self.multitorrent.get_torrent(installable_version)
                    yield df
                    torrent = df.getResult()
                    torrent = ThreadProxy(torrent, self.gui_wrap)
                else:
                    torrent = au_torrents[installable_version]
                self.installer_to_launch_at_exit = torrent.working_path
                if bttime() > self.next_autoupdate_nag:
                    self.prompt_for_quit_for_new_version(available_version)
                    self.next_autoupdate_nag = bttime() + delay

        def get_global_stats(mt):
            stats = {}

            u, d = mt.get_total_rates()
            stats['total_uprate'] = Rate(u)
            stats['total_downrate'] = Rate(d)

            u, d = mt.get_total_totals()
            stats['total_uptotal'] = Size(u)
            stats['total_downtotal'] = Size(d)

            torrents = mt.get_visible_torrents()
            running = mt.get_visible_running()
            stats['num_torrents'] = len(torrents)
            stats['num_running_torrents'] = len(running)

            stats['num_connections'] = 0
            for t in torrents:
                stats['num_connections'] += t.get_num_connections()

            try:
                stats['avg_connections'] = (stats['num_connections'] /
                                            stats['num_running_torrents'])
            except ZeroDivisionError:
                stats['avg_connections'] = 0

            stats['avg_connections'] = "%.02f" % stats['avg_connections']

            return stats

        df = self.multitorrent.call_with_obj(get_global_stats)
        yield df
        global_stats = df.getResult()

        yield average_completion, all_completed, global_stats
Пример #11
0
    def _rechoke(self):
        # step 1:
        # get sorted in order of preference lists of peers
        # one for downloading torrents, and one for seeding torrents
        down_pref = []
        seed_pref = []
        for i, c in enumerate(self.connections):
            u = c.upload
            if c.download.have.numfalse == 0 or not u.interested:
                continue
            # I cry.
            if c.download.multidownload.storage.have.numfalse != 0:
                ## heuristic for downloading torrents
                if not c.download.is_snubbed():

                    ## simple download rate based
                    down_pref.append((-c.download.get_rate(), i))

                    ## ratio based
                    #dr = c.download.get_rate()
                    #ur = max(1, u.get_rate())
                    #ratio = dr / ur
                    #down_pref.append((-ratio, i))
            else:
                ## heuristic for seeding torrents

                ## Uoti special               
##                if c._decrypt is not None:
##                    seed_pref.append((self.count, u.get_rate(), i))
##                elif (u.unchoke_time > self.count - self.magic_number or
##                      u.buffer and c.connection.is_flushed()):
##                    seed_pref.append((u.unchoke_time, u.get_rate(), i))
##                else:
##                    seed_pref.append((1, u.get_rate(), i))

                ## sliding, first pass (see below)
                r = u.get_rate()
                if c._decrypt is not None:
                    seed_pref.append((2, r, i))
                else:
                    seed_pref.append((1, r, i))

        down_pref.sort()
        seed_pref.sort()
        #pprint(down_pref)
        #pprint(seed_pref)
        down_pref = [ self.connections[i] for junk, i in down_pref ]
        seed_pref = [ self.connections[i] for junk, junk, i in seed_pref ]

        max_uploads = self._max_uploads()

        ## sliding, second pass
##        # up-side-down sum for an idea of capacity
##        uprate_sum = sum(rates[-max_uploads:])
##        if max_uploads == 0:
##            avg_uprate = 0
##        else:
##            avg_uprate = uprate_sum / max_uploads
##        #print 'avg_uprate', avg_uprate, 'of', max_uploads
##        self.extra_slots = max(self.extra_slots - 1, 0)
##        if avg_uprate > self.arbitrary_min:
##            for r in rates:
##                if r < (avg_uprate * 0.80): # magic 80%
##                    self.extra_slots += 2
##                    break
##        self.extra_slots = min(len(seed_pref), self.extra_slots)
##        max_uploads += self.extra_slots
##        #print 'plus', self.extra_slots

        # step 2:
        # split the peer lists by a ratio to fill the available upload slots
        d_uploads = max(1, int(round(max_uploads * 0.70)))
        s_uploads = max(1, int(round(max_uploads * 0.30)))
        #print 'original', 'ds', d_uploads, 'us', s_uploads
        extra = max(0, d_uploads - len(down_pref))
        if extra > 0:
            s_uploads += extra
            d_uploads -= extra
        extra = max(0, s_uploads - len(seed_pref))
        if extra > 0:
            s_uploads -= extra
            d_uploads = min(d_uploads + extra, len(down_pref))
        #print 'ds', d_uploads, 'us', s_uploads        
        down_pref = down_pref[:d_uploads]
        seed_pref = seed_pref[:s_uploads]
        preferred = set(down_pref)
        preferred.update(seed_pref)

        # step 3:
        # enforce unchoke states
        count = 0
        to_choke = []
        for i, c in enumerate(self.connections):
            u = c.upload
            if c in preferred:
                u.unchoke(self.count)
                count += 1
            else:
                to_choke.append(c)

        # step 4:
        # enforce choke states and handle optimistics
        optimistics = max(self.config['min_uploads'],
                          max_uploads - len(preferred))
        #print 'optimistics', optimistics
        for c in to_choke:
            u = c.upload
            if c.download.have.numfalse == 0:
                # keep seeds choked, out of superstition
                u.unchoke(self.count)
            elif count >= optimistics:
                u.choke()
            else:
                # this one's optimistic
                u.unchoke(self.count)
                if u.interested:
                    count += 1
Пример #12
0
 def __init__(self, multitorrent):
     TorrentButler.__init__(self, multitorrent)
     self.started_torrents = set()
Пример #13
0
    0x0441: "sw",       # Swahili
    0x0430: "sx",       # Sutu
    0x0449: "ta",       # Tamil
    0x041E: "th",       # Thai
    0x0432: "tn",       # Setsuana
    0x041F: "tr",       # Turkish
    0x0431: "ts",       # Tsonga
    0X0444: "tt",       # Tatar
    0x0422: "uk",       # Ukrainian
    0x0420: "ur",       # Urdu
    0x0443: "uz_UZ",    # Uzbek - Latin
    0x042A: "vi",       # Vietnamese
    0x0434: "xh",       # Xhosa
    0x043D: "yi",       # Yiddish
    0x0804: "zh_CN",    # Chinese - China
    0x0C04: "zh_HK",    # Chinese - Hong Kong S.A.R.
    0x1404: "zh_MO",    # Chinese - Macau S.A.R
    0x1004: "zh_SG",    # Chinese - Singapore
    0x0404: "zh_TW",    # Chinese - Taiwan
    0x0435: "zu",       # Zulu
}

if __name__ == '__main__':
    from BitTorrent.obsoletepythonsupport import set
    internal = set([x.lower() for x in languages])
    windows = set(locale_sucks.values())
    if not windows.issuperset(internal):
        diff = list(internal.difference(windows))
        diff.sort()
        print diff