示例#1
0
 def _load_torrents(self):
     """
     detect folder changes
     """
     for up_down in (UP, DOWN):
         for f in ls_ext(DIR[up_down], ".torrent"):
             if f in self.files:
                 continue
             self.files |= set([f])
             try:
                 torrent = Torrent()
                 torrent.load(up_down, f)
             except TypeError:
                 log.error("bad torrent format [%s]" % f)
                 continue
             except:
                 log.exception("exception in loading torrent [%s]" % f)
                 continue
             torrent_id = torrent.hash()
             if torrent_id in self.torrents:
                 log.warning("duplicate torrent [%s]" % f)
                 continue
             self.torrents[torrent_id] = torrent
             log.info("added [%20s] size:%s left:%s for %s" % \
                          (torrent.name,
                           psize(torrent.size),
                           psize(torrent.left),
                           WORD[up_down]))
示例#2
0
    def fuck_yourself(self):
        # set ipv6 address if needed
        if self.use_ipv6:
            ipv6 = ipv6_addr()
            if not ipv6:
                ptl_error("cannot get ipv6 address")
            self.str_ipv6 = "&ipv6=%s" % urlencode(ipv6)

        # generate a port number if not given
        if not self.port:
            self.port = randint(MIN_PORT, MAX_PORT)

        # generate client key
        self.client_key = client_key()
        client_info = BT_CLIENTS[self.client_id]

        # generate peer_id : based on client chosen
        prefix = client_info["prefix"]
        pid = peer_id(prefix)
        self.quoted_peer_id = urlencode(pid)

        # generate http header[user-agent] : based on client chosen
        user_agent = client_info["user-agent"]
        self.headers.update({"User-Agent": user_agent})

        # supports scrape?
        self.scrapable = not self.no_scrape and client_info["scrape"]

        # create directories if not exist
        for up_down in (UP, DOWN):
            mkdir(DIR[up_down])

        log.setLevel(DEBUG)
        log.debug("ptliar started, version: %s" % __version__)
        log.info("verbose            : %s" % (self.logging_level == DEBUG))
        log.info("ipv6               : %s" % self.use_ipv6)
        log.info("zero_rate          : %s" % self.use_zero_rate)
        log.info("timer              : %s" % ptime(self.timer))
        log.info("max up bandwidth   : %s/s" % psize(self.max_up_speed))
        log.info("max down bandwidth : %s/s" % psize(self.max_down_speed))
        log.info("max torrent speed  : %s/s" % psize(self.max_torrent_speed))
        log.info("fake bt client     : %s" % self.client_id)
示例#3
0
    def _fool_around(self):
        def run_threads(box):
            for th in box:
                th.start()
            for th in box:
                th.join()

        thread_box = []
        for torrent in self.torrents.values():
            if not torrent.is_ready:
                continue
            thread_box.append(Thread(target=torrent.commit))
            if len(thread_box) == CONNECTION_PER_BOX:
                run_threads(thread_box)
                thread_box = []
                if not ps.no_sleep:
                    sleep(SLEEP_THREAD)
        run_threads(thread_box)

        # calculate committed values
        all_torrents = self.torrents.values()
        uploaded = 0
        downloaded = 0
        for torrent in all_torrents:
            uploaded += torrent.uploaded
            downloaded += torrent.downloaded

        log.info("time: %s up_speed: %s/s down_speed: %s/s "\
                 "committed [up: %s down: %s]" % \
                    (ptime(time() - self.started),
                     psize(ts.up_speed),
                     psize(ts.down_speed),
                     psize(uploaded),
                     psize(downloaded)))

        if self.done:
            # say goodbye
            elapsed = time() - self.started
            log.info("time elapsed: %s" % ptime(elapsed))
            log.info("avg up speed: %s/s" % psize(uploaded / elapsed))
            log.info("avg down speed: %s/s" % psize(downloaded / elapsed))
            log.debug("<= PTLiar ended")
            print "Bye~"
            ptl_exit(0)

        active_torrents = filter(lambda t: t.status != "error", all_torrents)
        if len(active_torrents) < 1:
            ptl_error("no torrents available")

        # calculate how long should we sleep
        next_commit = min(map(lambda t: t.next_commit_time, active_torrents))
        left = max(0, next_commit - time())

        # sleep one more second than needed
        zzz = min(SLEEP_SCAN, left + 1)
        log.info("next commit: %s from now, sleep for %s.." % \
                 (ptime(left), ptime(zzz)))
        print "press [Ctrl+C] to leave"
        try:
            interrupt_on()
            sleep(zzz)
            interrupt_off()
        except (KeyboardInterrupt, SystemExit):
            # gracefully shutdown
            interrupt_off()
            self.done = True
        if time() >= self.started + ps.timer:
            # timer
            self.done = True
        if self.done:
            log.info("stopping...")
            for torrent in active_torrents:
                torrent.status = "stopped"
            return
        # check whether we've got new torrent
        self._load_torrents()
示例#4
0
    def _commit(self):
        if self.status == "started" and self.scrapable:
            self.scrape()

        self._update_status()

        req = self._get_commit_string()
        is_success, meta_info = self._send_message(req, "commit")

        if not is_success:
            self._error(meta_info["err_msg"], False)
            self.last_commit_time = time()
            self.next_commit_time = self.last_commit_time + 30 * MIN
            return

        if self.status == "stopped":
            # stopped, that's it
            log.info("receive [%20s] up:%s down:%s" % \
                        (self.name, psize(self.uploaded), psize(self.downloaded)))
            return

        failure_reason = meta_info.get("failure reason", -1)
        if failure_reason != -1:
            # failure reason received
            if "failure reason" in CRITICAL_RESPONSES:
                self._error("server rejected [%s]" % failure_reason)
                return
            # not really critical, try 30 mis later
            self._error("server rejected [%s]" % failure_reason, False)
            self.last_commit_time = time()
            self.next_commit_time = self.last_commit_time + 30 * MIN
            return

        interval = meta_info.get("interval", -1)
        if interval == -1:
            # weird, inteval not given
            self._error("inteval not given")
            return

        # interval received, set next_commit_time
        self.last_commit_time = time()
        self.next_commit_time = self.last_commit_time + interval

        # get 'up_peers' and 'down_peers'
        complete = meta_info.get("complete", -1)
        incomplete = meta_info.get("incomplete", -1)
        if complete != -1 and incomplete != -1:
            # got overall status from commit response
            self.up_peers = int(complete)
            self.down_peers = int(incomplete)
        elif self.scrapable:
            if self.status != "started":
                # scrape supported and not yet scraped
                self.scrape()
        elif "peers" in meta_info:
            # just assume [active peers] = [total peers] / [a certain rate]
            len_peers = len(meta_info["peers"])
            self.up_peers = len_peers / PEER_UPLOAD_RATE
            self.down_peers = len_peers / PEER_DOWNLOAD_RATE

        # get some uploading speed?
        if (self.up and self.down_peers) or \
               (self.down and self.tickets[DOWN] and self.down_peers > 1):
            self.tickets[UP] = ts.get_tickets(self, UP)
            self.speed[UP] = ts.get_up_speed(self)
        else:
            ts.return_tickets(self, UP)

        # get some downloading speed?
        if self.down and self.down_peers > 1 and self.up_peers > 1:
            # fake download only when
            # there is actually someone downloading and someone uploading
            self.tickets[DOWN] = ts.get_tickets(self, DOWN)
            self.speed[DOWN] = ts.get_down_speed(self)
            if self.speed[DOWN]:
                left = self.left / self.speed[DOWN]
                if left < interval:
                    self.next_commit_time = self.last_commit_time + left + 10
        else:
            ts.return_tickets(self, DOWN)

        # clear the event
        if self.status in ("started", "completed"):
            self.status = "noevent"

        log.info("receive [%20s] int:%s " \
                 "(down_peer:%s up_speed:%s/s) " \
                 "(up_peer:%s down_speed:%s/s)" % \
                     (self.name, ptime(interval),
                      self.down_peers, psize(self.speed[UP]),
                      self.up_peers, psize(self.speed[DOWN])))
示例#5
0
    def _send_message(self, path, method):
        """
        send the lie to tracker
        if success: return True, response
        if failure: return False, {"err_msg" : reason}
        """
        cnt_redir = 0  # count of redirections
        scheme = self.scheme  # "http" or "https"
        domain = self.domain
        while cnt_redir < REDIRECT_RETRY:
            cnt_tried = 0  # count of retries
            while True:
                # if this is a retry, append a string in output to indicate it
                retry = " retry %s" % cnt_tried if cnt_tried else ""
                if method == "scrape":
                    log.info("%s [%20s] %s%s" %
                             (method, self.name, scheme, retry))
                elif method == "commit":
                    log.info("%s [%20s] up:%s down:%s left:%s event:%s %s%s" % \
                                   (method, self.name,
                                    psize(self.uploaded),
                                    psize(self.downloaded),
                                    psize(self.left),
                                    self.status, scheme, retry))

                conn_class = Torrent.conns.get(scheme)
                if not conn_class:
                    raise Exception("Weird scheme: %s" % scheme)
                try:
                    conn = None
                    conn = conn_class(domain, timeout=CONNECTION_TIMEOUT)
                    conn.putrequest("GET", path, True, True)
                    conn.putheader("Host", domain)
                    conn.putheader("User-Agent", ps.headers["User-Agent"])
                    conn.putheader("Accept-Encoding",
                                   ps.headers["Accept-Encoding"])
                    conn.putheader("Connection", ps.headers["Connection"])
                    conn.endheaders()
                    response = conn.getresponse()
                    status = response.status
                    headers = response.getheaders()
                    data = response.read()
                    if status not in (500, 501, 502):
                        conn.close()
                        break
                    # retry when encounters 500, 502, count them as timeout
                    log.error("internal server error [%20s]" % self.name)
                except Exception as e:
                    log.error("%s:%s [%20s]" %
                              (type(e).__name__, e, self.name))
                if conn:
                    conn.close()
                cnt_tried += 1
                if cnt_tried >= TIMEOUT_RETRY:
                    # seems like the tracker ignored us
                    return False, {"err_msg": "timeout several times"}
                sleep(SLEEP_TIMEOUT)
            if status in (300, 301, 302, 303, 307):
                # handling redirection
                redir_url = None
                for (k, v) in headers:
                    if k.lower() == "location":
                        redir_url = v
                        break
                if redir_url == None:
                    # caught in a bad redirection
                    return False, {"err_msg": "bad redirection"}
                # get the new url to visit
                cnt_redir += 1
                scheme, domain, path = split_url(redir_url)
                log.debug("redirect %s [%20s] url:%s" %
                          (status, self.name, redir_url))
                continue
            elif status != 200:
                # unsupported HTTP status
                return False, {
                    "err_msg": "not supported HTTP status: %s" % status
                }

            # 200, succeeded in getting response
            bencoded_info = None
            for (k, v) in headers:
                if k.lower() == "content-encoding":
                    if v.lower() == "gzip":
                        # it's gzipped
                        bencoded_info = gzip_decode(data)
                    break
            if not bencoded_info:
                bencoded_info = data

            # B decoding
            try:
                meta_info = bdecode(bencoded_info)
            except TypeError:
                return False, {"err_msg": "bad response format"}
            return True, meta_info
        return False, {"err_msg": "too many redirections"}