def __init__(self, stream, bt1download, authconfig, restartstatefilename):
     self.stream = stream
     self.bt1download = bt1download
     self.restartstatefilename = restartstatefilename
     self.exiting = False
     self.ratemeasure = Measure(30)
     self.storagewrapper = bt1download.storagewrapper
     self.picker = bt1download.picker
     self.rawserver = bt1download.rawserver
     self.connecter = bt1download.connecter
     self.fileselector = bt1download.fileselector
     self.videostatus = bt1download.videostatus
     self.buffer = []
     self.buflen = 0
     self.bufferlock = RLock()
     self.handling_pieces = False
     self.readlastseqnum = False
     if authconfig.get_method() == LIVE_AUTHMETHOD_ECDSA:
         self.authenticator = ECDSAAuthenticator(
             self.videostatus.piecelen,
             self.bt1download.len_pieces,
             keypair=authconfig.get_keypair())
     elif authconfig.get_method() == LIVE_AUTHMETHOD_RSA:
         self.authenticator = RSAAuthenticator(
             self.videostatus.piecelen,
             self.bt1download.len_pieces,
             keypair=authconfig.get_keypair())
     else:
         self.authenticator = NullAuthenticator(self.videostatus.piecelen,
                                                self.bt1download.len_pieces)
class HaltOnEOFStream():
    def __init__(self, stream):
        self.stream = stream
        self.ratemeasure = Measure(30)

    def read(self, nbytes=None):
        ret = self.stream.read(nbytes)
        if len(ret) == 0:
            print >> sys.stderr, 'createlivestream: Exiting on EOF input stream'
            os._exit(1)
        self.ratemeasure.update_rate(len(ret))
        return ret

    def close(self):
        self.stream.close()
    def __init__(self, connection, ratelimiter, totalup, choker, storage, picker, config):
        self.connection = connection
        self.ratelimiter = ratelimiter
        self.totalup = totalup
        self.choker = choker
        self.storage = storage
        self.picker = picker
        self.config = config
        self.max_slice_length = config['max_slice_length']
        self.choked = True
        self.cleared = True
        self.interested = False
        self.super_seeding = False
        self.buffer = []
        self.measure = Measure(config['max_rate_period'], config['upload_rate_fudge'])
        self.was_ever_interested = False
        if storage.get_amount_left() == 0:
            if choker.super_seed:
                self.super_seeding = True
                self.seed_have_list = []
                self.skipped_count = 0
            elif config['breakup_seed_bitfield']:
                bitfield, msgs = storage.get_have_list_cloaked()
                connection.send_bitfield(bitfield)
                for have in msgs:
                    connection.send_have(have)

            else:
                connection.send_bitfield(storage.get_have_list())
        elif storage.do_I_have_anything():
            connection.send_bitfield(storage.get_have_list())
        self.piecedl = None
        self.piecebuf = None
        self.hashlist = []
class HaltOnEOFStream():

    def __init__(self, stream):
        self.stream = stream
        self.ratemeasure = Measure(30)

    def read(self, nbytes = None):
        ret = self.stream.read(nbytes)
        if len(ret) == 0:
            print >> sys.stderr, 'createlivestream: Exiting on EOF input stream'
            os._exit(1)
        self.ratemeasure.update_rate(len(ret))
        return ret

    def close(self):
        self.stream.close()
Exemplo n.º 5
0
 def __init__(self, url, dlhash, rawserver, failed_func, max_errors=10):
     if DEBUG:
         log('dd-downloader::__init__: url', url, 'hash',
             binascii.hexlify(dlhash))
     self.url = url
     self.rawserver = rawserver
     self.failed_func = failed_func
     self.final_url = None
     self.storage = None
     self.lock = Lock()
     self.measure = Measure(10.0)
     self.errors = 0
     self.max_errors = max_errors
     self.seek = None
     self.shutdown_flag = False
     self.running = False
     self.log_prefix = 'dd-downloader::' + binascii.hexlify(dlhash) + ':'
Exemplo n.º 6
0
    def __init__(self, downloader, url):
        SingleDownloadHelperInterface.__init__(self)
        self.downloader = downloader
        self.baseurl = url
        try:
            self.scheme, self.netloc, path, pars, query, fragment = urlparse(
                url)
        except:
            self.downloader.errorfunc('cannot parse http seed address: ' + url)
            return

        if self.scheme != 'http':
            self.downloader.errorfunc('http seed url not http: ' + url)
            return
        self.proxyhost = find_proxy(url)
        try:
            if self.proxyhost is None:
                self.connection = HTTPConnection(self.netloc)
            else:
                self.connection = HTTPConnection(self.proxyhost)
        except:
            self.downloader.errorfunc('cannot connect to http seed: ' + url)
            return

        self.seedurl = path
        if pars:
            self.seedurl += ';' + pars
        self.seedurl += '?'
        if query:
            self.seedurl += query + '&'
        self.seedurl += 'info_hash=' + urllib.quote(self.downloader.infohash)
        self.measure = Measure(downloader.max_rate_period)
        self.index = None
        self.url = ''
        self.requests = []
        self.request_size = 0
        self.endflag = False
        self.error = None
        self.retry_period = 30
        self._retry_period = None
        self.errorcount = 0
        self.goodseed = False
        self.active = False
        self.cancelled = False
        self.resched(randint(2, 10))
 def __init__(self, downloader, connection):
     SingleDownloadHelperInterface.__init__(self)
     self.downloader = downloader
     self.connection = connection
     self.choked = True
     self.interested = False
     self.active_requests = []
     self.measure = Measure(downloader.max_rate_period)
     self.peermeasure = Measure(downloader.max_rate_period)
     self.raw_have = Bitfield(downloader.numpieces)
     self.have = Bitfield(downloader.numpieces)
     self.last = -1000
     self.last2 = -1000
     self.example_interest = None
     self.backlog = 2
     self.ip = connection.get_ip()
     self.guard = BadDataGuard(self)
     self.app_mode = globalConfig.get_mode()
     self.white_list = None
     self.black_list = None
     self.app_mode = globalConfig.get_mode()
     if self.app_mode == 'node':
         source_node = globalConfig.get_value('source_node')
         support_nodes = globalConfig.get_value('support_nodes')
         if not globalConfig.get_value('allow_peers_download'):
             self.white_list = set()
             if source_node is not None and globalConfig.get_value('allow_source_download'):
                 self.white_list.add(source_node[0])
             if len(support_nodes) and globalConfig.get_value('allow_support_download'):
                 self.white_list.update([ addr[0] for addr in support_nodes ])
         else:
             self.black_list = set()
             if source_node is not None and not globalConfig.get_value('allow_source_download'):
                 self.black_list.add(source_node[0])
             if len(support_nodes) and not globalConfig.get_value('allow_support_download'):
                 self.black_list.update([ addr[0] for addr in support_nodes ])
             if len(self.black_list) == 0:
                 self.black_list = None
         if DEBUG:
             log('download::__init__: white_list', self.white_list, 'black_list', self.black_list)
     self.helper = downloader.picker.helper
     self.proxy_have = Bitfield(downloader.numpieces)
     self.short_term_measure = Measure(5)
     self.bad_performance_counter = 0
 def __init__(self,
              infohash,
              storage,
              picker,
              backlog,
              max_rate_period,
              numpieces,
              chunksize,
              measurefunc,
              snub_time,
              kickbans_ok,
              kickfunc,
              banfunc,
              scheduler=None):
     self.infohash = infohash
     self.b64_infohash = b64encode(infohash)
     self.storage = storage
     self.picker = picker
     self.backlog = backlog
     self.max_rate_period = max_rate_period
     self.measurefunc = measurefunc
     self.totalmeasure = Measure(max_rate_period * storage.piece_length /
                                 storage.request_size)
     self.numpieces = numpieces
     self.chunksize = chunksize
     self.snub_time = snub_time
     self.kickfunc = kickfunc
     self.banfunc = banfunc
     self.disconnectedseeds = {}
     self.downloads = []
     self.perip = {}
     self.gotbaddata = {}
     self.kicked = {}
     self.banned = {}
     self.kickbans_ok = kickbans_ok
     self.kickbans_halted = False
     self.super_seeding = False
     self.endgamemode = False
     self.endgame_queued_pieces = []
     self.all_requests = []
     self.discarded = 0L
     self.download_rate = 0
     self.bytes_requested = 0
     self.last_time = clock()
     self.queued_out = {}
     self.requeueing = False
     self.paused = False
     self.scheduler = scheduler
     self.scheduler(self.dlr_periodic_check, 1)
     if self.picker is not None:
         if self.picker.helper is not None:
             self.picker.helper.set_downloader(self)
    def __init__(self, connection, ratelimiter, totalup, choker, storage,
                 picker, config):
        self.connection = connection
        self.ratelimiter = ratelimiter
        self.totalup = totalup
        self.choker = choker
        self.storage = storage
        self.picker = picker
        self.config = config
        self.max_slice_length = config['max_slice_length']
        self.choked = True
        self.cleared = True
        self.interested = False
        self.super_seeding = False
        self.buffer = []
        self.measure = Measure(config['max_rate_period'],
                               config['upload_rate_fudge'])
        self.was_ever_interested = False
        if storage.get_amount_left() == 0:
            if choker.super_seed:
                self.super_seeding = True
                self.seed_have_list = []
                self.skipped_count = 0
            elif config['breakup_seed_bitfield']:
                bitfield, msgs = storage.get_have_list_cloaked()
                connection.send_bitfield(bitfield)
                for have in msgs:
                    connection.send_have(have)

            else:
                connection.send_bitfield(storage.get_have_list())
        elif storage.do_I_have_anything():
            connection.send_bitfield(storage.get_have_list())
        self.piecedl = None
        self.piecebuf = None
        self.hashlist = []
 def __init__(self, url, dlhash, rawserver, failed_func, max_errors = 10):
     if DEBUG:
         log('dd-downloader::__init__: url', url, 'hash', binascii.hexlify(dlhash))
     self.url = url
     self.rawserver = rawserver
     self.failed_func = failed_func
     self.final_url = None
     self.storage = None
     self.lock = Lock()
     self.measure = Measure(10.0)
     self.errors = 0
     self.max_errors = max_errors
     self.seek = None
     self.shutdown_flag = False
     self.running = False
     self.log_prefix = 'dd-downloader::' + binascii.hexlify(dlhash) + ':'
    def __init__(self, downloader, url):
        SingleDownloadHelperInterface.__init__(self)
        self.downloader = downloader
        self.baseurl = url
        try:
            self.scheme, self.netloc, path, pars, query, fragment = urlparse(url)
        except:
            self.downloader.errorfunc('cannot parse http seed address: ' + url)
            return

        if self.scheme != 'http':
            self.downloader.errorfunc('http seed url not http: ' + url)
            return
        self.proxyhost = find_proxy(url)
        try:
            if self.proxyhost is None:
                self.connection = HTTPConnection(self.netloc)
            else:
                self.connection = HTTPConnection(self.proxyhost)
        except:
            self.downloader.errorfunc('cannot connect to http seed: ' + url)
            return

        self.seedurl = path
        if pars:
            self.seedurl += ';' + pars
        self.seedurl += '?'
        if query:
            self.seedurl += query + '&'
        self.seedurl += 'info_hash=' + urllib.quote(self.downloader.infohash)
        self.measure = Measure(downloader.max_rate_period)
        self.index = None
        self.url = ''
        self.requests = []
        self.request_size = 0
        self.endflag = False
        self.error = None
        self.retry_period = 30
        self._retry_period = None
        self.errorcount = 0
        self.goodseed = False
        self.active = False
        self.cancelled = False
        self.resched(randint(2, 10))
 def __init__(self, downloader, connection):
     SingleDownloadHelperInterface.__init__(self)
     self.downloader = downloader
     self.connection = connection
     self.choked = True
     self.interested = False
     self.active_requests = []
     self.measure = Measure(downloader.max_rate_period)
     self.peermeasure = Measure(downloader.max_rate_period)
     self.raw_have = Bitfield(downloader.numpieces)
     self.have = Bitfield(downloader.numpieces)
     self.last = -1000
     self.last2 = -1000
     self.example_interest = None
     self.backlog = 2
     self.ip = connection.get_ip()
     self.guard = BadDataGuard(self)
     self.app_mode = globalConfig.get_mode()
     self.white_list = None
     self.black_list = None
     self.app_mode = globalConfig.get_mode()
     if self.app_mode == 'node':
         source_node = globalConfig.get_value('source_node')
         support_nodes = globalConfig.get_value('support_nodes')
         if not globalConfig.get_value('allow_peers_download'):
             self.white_list = set()
             if source_node is not None and globalConfig.get_value(
                     'allow_source_download'):
                 self.white_list.add(source_node[0])
             if len(support_nodes) and globalConfig.get_value(
                     'allow_support_download'):
                 self.white_list.update([addr[0] for addr in support_nodes])
         else:
             self.black_list = set()
             if source_node is not None and not globalConfig.get_value(
                     'allow_source_download'):
                 self.black_list.add(source_node[0])
             if len(support_nodes) and not globalConfig.get_value(
                     'allow_support_download'):
                 self.black_list.update([addr[0] for addr in support_nodes])
             if len(self.black_list) == 0:
                 self.black_list = None
         if DEBUG:
             log('download::__init__: white_list', self.white_list,
                 'black_list', self.black_list)
     self.helper = downloader.picker.helper
     self.proxy_have = Bitfield(downloader.numpieces)
     self.short_term_measure = Measure(5)
     self.bad_performance_counter = 0
 def __init__(self, stream, bt1download, authconfig, restartstatefilename):
     self.stream = stream
     self.bt1download = bt1download
     self.restartstatefilename = restartstatefilename
     self.exiting = False
     self.ratemeasure = Measure(30)
     self.storagewrapper = bt1download.storagewrapper
     self.picker = bt1download.picker
     self.rawserver = bt1download.rawserver
     self.connecter = bt1download.connecter
     self.fileselector = bt1download.fileselector
     self.videostatus = bt1download.videostatus
     self.buffer = []
     self.buflen = 0
     self.bufferlock = RLock()
     self.handling_pieces = False
     self.readlastseqnum = False
     if authconfig.get_method() == LIVE_AUTHMETHOD_ECDSA:
         self.authenticator = ECDSAAuthenticator(self.videostatus.piecelen, self.bt1download.len_pieces, keypair=authconfig.get_keypair())
     elif authconfig.get_method() == LIVE_AUTHMETHOD_RSA:
         self.authenticator = RSAAuthenticator(self.videostatus.piecelen, self.bt1download.len_pieces, keypair=authconfig.get_keypair())
     else:
         self.authenticator = NullAuthenticator(self.videostatus.piecelen, self.bt1download.len_pieces)
Exemplo n.º 14
0
class SingleDownload(SingleDownloadHelperInterface):
    def __init__(self, downloader, url):
        SingleDownloadHelperInterface.__init__(self)
        self.downloader = downloader
        self.baseurl = url
        try:
            self.scheme, self.netloc, path, pars, query, fragment = urlparse(
                url)
        except:
            self.downloader.errorfunc('cannot parse http seed address: ' + url)
            return

        if self.scheme != 'http':
            self.downloader.errorfunc('http seed url not http: ' + url)
            return
        self.proxyhost = find_proxy(url)
        try:
            if self.proxyhost is None:
                self.connection = HTTPConnection(self.netloc)
            else:
                self.connection = HTTPConnection(self.proxyhost)
        except:
            self.downloader.errorfunc('cannot connect to http seed: ' + url)
            return

        self.seedurl = path
        if pars:
            self.seedurl += ';' + pars
        self.seedurl += '?'
        if query:
            self.seedurl += query + '&'
        self.seedurl += 'info_hash=' + urllib.quote(self.downloader.infohash)
        self.measure = Measure(downloader.max_rate_period)
        self.index = None
        self.url = ''
        self.requests = []
        self.request_size = 0
        self.endflag = False
        self.error = None
        self.retry_period = 30
        self._retry_period = None
        self.errorcount = 0
        self.goodseed = False
        self.active = False
        self.cancelled = False
        self.resched(randint(2, 10))

    def resched(self, len=None):
        if len is None:
            len = self.retry_period
        if self.errorcount > 3:
            len = len * (self.errorcount - 2)
        self.downloader.rawserver.add_task(self.download, len)

    def _want(self, index):
        if self.endflag:
            return self.downloader.storage.do_I_have_requests(index)
        else:
            return self.downloader.storage.is_unstarted(index)

    def download(self):
        if DEBUG:
            print 'http-sdownload: download()'
        if self.is_frozen_by_helper():
            if DEBUG:
                print 'http-sdownload: blocked, rescheduling'
            self.resched(1)
            return
        self.cancelled = False
        if self.downloader.picker.am_I_complete():
            self.downloader.downloads.remove(self)
            return
        self.index = self.downloader.picker.next(haveall, self._want, self)
        if self.index is None and self.frozen_by_helper:
            self.resched(0.01)
            return
        if self.index is None and not self.endflag and not self.downloader.peerdownloader.has_downloaders(
        ):
            self.endflag = True
            self.index = self.downloader.picker.next(haveall, self._want, self)
        if self.index is None:
            self.endflag = True
            self.resched()
        else:
            self.url = self.seedurl + '&piece=' + str(self.index)
            self._get_requests()
            if self.request_size < self.downloader.storage._piecelen(
                    self.index):
                self.url += '&ranges=' + self._request_ranges()
            rq = Thread(target=self._request)
            rq.setName('HoffmanHTTPDownloader' + rq.getName())
            rq.setDaemon(True)
            rq.start()
            self.active = True

    def _request(self):
        import encodings.ascii
        import encodings.punycode
        import encodings.idna
        self.error = None
        self.received_data = None
        try:
            self.connection.request('GET', self.url, None,
                                    {'User-Agent': VERSION})
            r = self.connection.getresponse()
            self.connection_status = r.status
            self.received_data = r.read()
        except Exception as e:
            log_exc()
            self.error = 'error accessing http seed: ' + str(e)
            try:
                self.connection.close()
            except:
                pass

            try:
                self.connection = HTTPConnection(self.netloc)
            except:
                self.connection = None

        self.downloader.rawserver.add_task(self.request_finished)

    def request_finished(self):
        self.active = False
        if self.error is not None:
            if self.goodseed:
                self.downloader.errorfunc(self.error)
            self.errorcount += 1
        if self.received_data:
            self.errorcount = 0
            if not self._got_data():
                self.received_data = None
        if not self.received_data:
            self._release_requests()
            self.downloader.peerdownloader.piece_flunked(self.index)
        if self._retry_period:
            self.resched(self._retry_period)
            self._retry_period = None
            return
        self.resched()

    def _got_data(self):
        if self.connection_status == 503:
            try:
                self.retry_period = max(int(self.received_data), 5)
            except:
                pass

            return False
        if self.connection_status != 200:
            self.errorcount += 1
            return False
        self._retry_period = 1
        if len(self.received_data) != self.request_size:
            if self.goodseed:
                self.downloader.errorfunc(
                    'corrupt data from http seed - redownloading')
            return False
        self.measure.update_rate(len(self.received_data))
        self.downloader.measurefunc(len(self.received_data))
        if self.cancelled:
            return False
        if not self._fulfill_requests():
            return False
        if not self.goodseed:
            self.goodseed = True
            self.downloader.seedsfound += 1
        if self.downloader.storage.do_I_have(self.index):
            self.downloader.picker.complete(self.index)
            self.downloader.peerdownloader.check_complete(self.index)
            self.downloader.gotpiecefunc(self.index)
        return True

    def _get_requests(self):
        self.requests = []
        self.request_size = 0L
        while self.downloader.storage.do_I_have_requests(self.index):
            r = self.downloader.storage.new_request(self.index)
            self.requests.append(r)
            self.request_size += r[1]

        self.requests.sort()

    def _fulfill_requests(self):
        start = 0L
        success = True
        while self.requests:
            begin, length = self.requests.pop(0)
            if not self.downloader.storage.piece_came_in(
                    self.index, begin, [],
                    self.received_data[start:start + length], length):
                success = False
                break
            start += length

        return success

    def _release_requests(self):
        for begin, length in self.requests:
            self.downloader.storage.request_lost(self.index, begin, length)

        self.requests = []

    def _request_ranges(self):
        s = ''
        begin, length = self.requests[0]
        for begin1, length1 in self.requests[1:]:
            if begin + length == begin1:
                length += length1
                continue
            else:
                if s:
                    s += ','
                s += str(begin) + '-' + str(begin + length - 1)
                begin, length = begin1, length1

        if s:
            s += ','
        s += str(begin) + '-' + str(begin + length - 1)
        return s

    def helper_forces_unchoke(self):
        pass

    def helper_set_freezing(self, val):
        self.frozen_by_helper = val
Exemplo n.º 15
0
class Downloader:
    def __init__(self, url, dlhash, rawserver, failed_func, max_errors=10):
        if DEBUG:
            log('dd-downloader::__init__: url', url, 'hash',
                binascii.hexlify(dlhash))
        self.url = url
        self.rawserver = rawserver
        self.failed_func = failed_func
        self.final_url = None
        self.storage = None
        self.lock = Lock()
        self.measure = Measure(10.0)
        self.errors = 0
        self.max_errors = max_errors
        self.seek = None
        self.shutdown_flag = False
        self.running = False
        self.log_prefix = 'dd-downloader::' + binascii.hexlify(dlhash) + ':'

    def predownload(self, callback, timeout=10):
        if self.lock.locked():
            self.seek = pos
            return
        t = Thread(target=self._predownload, args=[callback, timeout])
        t.setName('dd-downloader-predownload-' + t.getName())
        t.setDaemon(True)
        t.start()

    def _predownload(self, callback, timeout):
        self.lock.acquire()
        self.running = True
        try:
            if DEBUG:
                log(self.log_prefix + '_predownload: url', self.url, 'timeout',
                    timeout)
            stream = urlOpenTimeout(self.url, timeout=timeout)
            content_type = stream.info().getheader('Content-Type')
            content_length = stream.info().getheader('Content-Length')
            if DEBUG:
                log(
                    self.log_prefix +
                    '_predownload: request finished: content_type',
                    content_type, 'content_length', content_length)
            data = ''
            while True:
                if self.shutdown_flag:
                    if DEBUG:
                        log(
                            self.log_prefix +
                            '_predownload: got shutdown flag while reading: url',
                            self.url)
                    break
                buf = stream.read(524288)
                if not buf:
                    if DEBUG:
                        log(self.log_prefix + '_predownload: eof: url',
                            self.url)
                    break
                data += buf
                if DEBUG:
                    log(self.log_prefix + '_predownload: read chunk: url',
                        self.url, 'read_len', len(data))

            stream.close()
            if not self.shutdown_flag:
                if DEBUG:
                    log(
                        self.log_prefix +
                        '_predownload: finished, run callback: url', self.url,
                        'content_type', content_type, 'content_length',
                        content_length, 'data_len', len(data))
                callback(content_type, data)
        except Exception as e:
            if DEBUG:
                print_exc()
            self.failed_func(e)
        finally:
            self.running = False
            self.lock.release()

    def init(self, callback=None, timeout=10):
        if callback is None:
            return self._init()
        t = Thread(target=self._init, args=[callback, timeout])
        t.setName('dd-downloader-init-' + t.getName())
        t.setDaemon(True)
        t.start()

    def _init(self, callback=None, timeout=10):
        try:
            scheme, host, path = self.parse_url(self.url)
            redirects = 0
            connection = HTTPConnection(host)
            while True:
                connection.request('HEAD', path, None, {
                    'Host': host,
                    'User-Agent': USER_AGENT
                })
                r = connection.getresponse()
                if r.status == 200:
                    break
                elif r.status == 301 or r.status == 302:
                    redirect_url = r.getheader('Location', None)
                    if DEBUG:
                        log(self.log_prefix + 'init: got redirect: url',
                            self.url, 'redirect', redirect_url)
                    scheme, rhost, path = self.parse_url(redirect_url)
                    redirects += 1
                    if redirects > MAX_REDIRECTS:
                        raise Exception('Too much redirects')
                    if rhost != host:
                        connection.close()
                        connection = HTTPConnection(rhost)
                        host = rhost
                else:
                    raise Exception('Bad http status: ' + str(r.status))

            mime = r.getheader('Content-Type', None)
            length = r.getheader('Content-Length', None)
            connection.close()
            if length is None:
                raise Exception('No content-length in response')
            if mime is None:
                raise Exception('No content-type in response')
            length = int(length)
            self.final_url = scheme + '://' + host + path
            if DEBUG:
                log(self.log_prefix + 'init: got response: length', length,
                    'mime', mime, 'final_url', self.final_url)
            if callback is None:
                return (length, mime)
            callback(length, mime)
        except Exception as e:
            if DEBUG:
                print_exc()
            if callback is None:
                raise e
            else:
                self.failed_func(e)

    def set_storage(self, storage):
        self.storage = storage

    def start(self, pos=0):
        if self.storage is None:
            raise Exception('Storage is not set')
        if self.final_url is None:
            raise Exception('Final url is not set')
        if self.lock.locked():
            self.seek = pos
            return
        t = Thread(target=self._request, args=[pos])
        t.setName('dd-downloader-' + t.getName())
        t.setDaemon(True)
        t.start()

    def _request(self, pos):
        self.lock.acquire()
        self.running = True
        try:
            while True:
                if self.shutdown_flag:
                    if DEBUG:
                        log(
                            self.log_prefix +
                            '_request: got shutdown flag before read: url',
                            self.url)
                    break
                pos = self.storage.get_unfinished_pos(pos)
                if pos is None:
                    if DEBUG:
                        log(
                            self.log_prefix +
                            '_request: no unfinished pos, break: url',
                            self.url)
                    break
                self._read(pos)
                if self.seek is not None:
                    pos = self.seek
                    self.seek = None
                    continue
                break

        except ReadErrorException:
            if DEBUG:
                log(
                    self.log_prefix +
                    '_request: read error, retry immediatelly: url', self.url,
                    'pos', pos)
            start_lambda = lambda: self.start(pos)
            self.rawserver.add_task(start_lambda, 0.1)
        except FatalErrorException as e:
            if DEBUG:
                log(self.log_prefix + '_request: fatal error, exit: url',
                    self.url, 'pos', pos)
            self.failed_func(e)
        except Exception as e:
            self.errors += 1
            if DEBUG:
                print_exc()
            if self.errors > self.max_errors:
                if DEBUG:
                    log(
                        self.log_prefix +
                        '_request: non-fatal error, max errors reached: errors',
                        self.errors, 'max', self.max_errors)
                self.failed_func(e)
            else:
                retry_in = 5 * (1 + self.errors / 10)
                if DEBUG:
                    log(self.log_prefix + '_request: non-fatal error: url',
                        self.url, 'pos', pos, 'errors', self.errors,
                        'retry_in', retry_in)
                start_lambda = lambda: self.start(pos)
                self.rawserver.add_task(start_lambda, retry_in)
        finally:
            self.running = False
            self.lock.release()

    def is_running(self):
        return self.running

    def _read(self, pos):
        scheme, host, path = self.parse_url(self.final_url)
        request_range = str(pos) + '-'
        connection = HTTPConnection(host)
        connection.request(
            'GET', path, None, {
                'Host': host,
                'User-Agent': USER_AGENT,
                'Range': 'bytes=%s' % request_range
            })
        r = connection.getresponse()
        if DEBUG:
            log(self.log_prefix + '_read: url', self.url, 'final',
                self.final_url, 'pos', pos, 'status', r.status)
        if r.status != 200 and r.status != 206:
            if DEBUG:
                log(self.log_prefix + '_read: bad http status: url', self.url,
                    'status', r.status)
            connection.close()
            if 400 <= r.status < 500:
                raise FatalErrorException, 'http status ' + str(r.status)
            else:
                raise NonFatalErrorException, 'http status ' + str(r.status)
        request_size = r.getheader('Content-Length', None)
        if request_size is None:
            if DEBUG:
                log(self.log_prefix + '_read: missing content length: url',
                    self.url)
            connection.close()
            return
        try:
            request_size = int(request_size)
        except:
            if DEBUG:
                print_exc()
            connection.close()
            return

        if DEBUG:
            log(self.log_prefix + '_read: url', self.url, 'request_range',
                request_range, 'request_size', request_size)
        total_read = 0
        read_size = 16384
        while True:
            chunk = r.read(read_size)
            if not chunk:
                if total_read != request_size:
                    if DEBUG:
                        log(
                            self.log_prefix +
                            '_read: no data, raise read error: url', self.url,
                            'pos', pos, 'total_read', total_read,
                            'request_size', request_size)
                    raise ReadErrorException()
                if DEBUG:
                    log(self.log_prefix + '_read: no data, exit: url',
                        self.url, 'pos', pos)
                break
            chunk_len = len(chunk)
            total_read += chunk_len
            if DEBUG:
                log('>>>> ' + self.log_prefix + '_read: got chunk: pos', pos,
                    'chunk_len', chunk_len, 'total_read', total_read)
            self.measure.update_rate(chunk_len)
            if chunk_len != read_size and total_read != request_size:
                if DEBUG:
                    log(
                        self.log_prefix +
                        '_read: bad data len, raise read error: url', self.url,
                        'pos', pos, 'total_read', total_read, 'request_size',
                        request_size, 'chunk_len', chunk_len, 'read_size',
                        read_size)
                raise ReadErrorException()
            if self.shutdown_flag:
                if DEBUG:
                    log(
                        self.log_prefix +
                        '_read: got shutdown flag on read: url', self.url)
                break
            try:
                t = time.time()
                updated_len = self.storage.write(pos, chunk)
                if DEBUG:
                    log(
                        '%%%%' + self.log_prefix +
                        '_read: write to storage: pos', pos, 'len', chunk_len,
                        'time',
                        time.time() - t)
                if updated_len == 0:
                    if DEBUG:
                        log(
                            self.log_prefix +
                            '_read: data exists in storage: url', self.url,
                            'pos', pos, 'len', chunk_len, 'seek_flag',
                            self.seek)
                    if self.seek is None:
                        self.seek = self.storage.get_unfinished_pos(pos)
                        if self.seek is None:
                            if DEBUG:
                                log(
                                    self.log_prefix +
                                    '_read: no unfinished data, exit: url',
                                    self.url, 'pos', pos)
                            break
            except:
                if DEBUG:
                    print_exc()
                    log(self.log_prefix + '_read: cannot write, exit: url',
                        self.url)
                raise FatalErrorException, 'cannot write to storage'

            if self.seek is not None:
                log(self.log_prefix + '_read: got seek: url', self.url, 'seek',
                    self.seek)
                break
            pos += chunk_len

        connection.close()

    def parse_url(self, url):
        scheme, host, path, pars, query, fragment = urlparse(url)
        if scheme != 'http':
            raise ValueError('Unsupported scheme ' + scheme)
        if len(host) == 0:
            raise ValueError('Empty host')
        if len(path) == 0:
            path = '/'
        if len(pars) > 0:
            path += ';' + pars
        if len(query) > 0:
            path += '?' + query
        if len(fragment) > 0:
            path += '#' + fragment
        return (scheme, host, path)

    def shutdown(self):
        if DEBUG:
            log(self.log_prefix + 'shutdown: ---')
        self.shutdown_flag = True
class VideoSourceTransporter:
    def __init__(self, stream, bt1download, authconfig, restartstatefilename):
        self.stream = stream
        self.bt1download = bt1download
        self.restartstatefilename = restartstatefilename
        self.exiting = False
        self.ratemeasure = Measure(30)
        self.storagewrapper = bt1download.storagewrapper
        self.picker = bt1download.picker
        self.rawserver = bt1download.rawserver
        self.connecter = bt1download.connecter
        self.fileselector = bt1download.fileselector
        self.videostatus = bt1download.videostatus
        self.buffer = []
        self.buflen = 0
        self.bufferlock = RLock()
        self.handling_pieces = False
        self.readlastseqnum = False
        if authconfig.get_method() == LIVE_AUTHMETHOD_ECDSA:
            self.authenticator = ECDSAAuthenticator(
                self.videostatus.piecelen,
                self.bt1download.len_pieces,
                keypair=authconfig.get_keypair())
        elif authconfig.get_method() == LIVE_AUTHMETHOD_RSA:
            self.authenticator = RSAAuthenticator(
                self.videostatus.piecelen,
                self.bt1download.len_pieces,
                keypair=authconfig.get_keypair())
        else:
            self.authenticator = NullAuthenticator(self.videostatus.piecelen,
                                                   self.bt1download.len_pieces)

    def start(self):
        self.input_thread_handle = SimpleThread(self.input_thread)
        self.input_thread_handle.start()

    def _read(self, length):
        return self.stream.read(length)

    def input_thread(self):
        log('stream: started input thread')
        contentbs = self.authenticator.get_content_blocksize()
        try:
            if DEBUG_TRANSPORT:
                f = open('/tmp/stream.dat', 'wb')
            while not self.exiting:
                data = self._read(contentbs)
                if not data:
                    break
                if DEBUG:
                    log('VideoSource: read %d bytes' % len(data))
                if DEBUG_TRANSPORT:
                    log('VideoSource::input_thread: read chunk: want',
                        contentbs, 'len', len(data))
                    f.write(data)
                self.ratemeasure.update_rate(len(data))
                self.process_data(data)

            if DEBUG_TRANSPORT:
                f.close()
        except IOError:
            if DEBUG:
                print_exc()

        self.shutdown()

    def shutdown(self):
        if DEBUG:
            log('VideoSource::shutdown: ---')
        if self.exiting:
            return
        self.exiting = True
        try:
            self.stream.close()
        except IOError:
            pass

    def process_data(self, data):
        vs = self.videostatus
        self.bufferlock.acquire()
        try:
            self.buffer.append(data)
            self.buflen += len(data)
            if not self.handling_pieces:
                self.rawserver.add_task(self.create_pieces)
                self.handling_pieces = True
        finally:
            self.bufferlock.release()

    def create_pieces(self):
        def handle_one_piece():
            vs = self.videostatus
            contentbs = self.authenticator.get_content_blocksize()
            if self.buflen < contentbs:
                return False
            if len(self.buffer[0]) == contentbs:
                content = self.buffer[0]
                del self.buffer[0]
            else:
                if DEBUG:
                    print >> sys.stderr, 'VideoSource: JOIN ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^'
                buffer = ''.join(self.buffer)
                self.buffer = [buffer[contentbs:]]
                content = buffer[:contentbs]
            self.buflen -= contentbs
            datas = self.authenticator.sign(content)
            piece = ''.join(datas)
            self.add_piece(vs.playback_pos, piece)
            self.del_piece(vs.live_piece_to_invalidate())
            self.readlastseqnum = True
            if self.restartstatefilename is not None:
                try:
                    lastseqnum = self.authenticator.get_source_seqnum()
                    f = open(self.restartstatefilename, 'wb')
                    f.write(str(lastseqnum))
                    f.close()
                except:
                    print_exc()

            vs.inc_playback_pos()
            return True

        if not self.readlastseqnum and self.restartstatefilename is not None and os.path.isfile(
                self.restartstatefilename):
            self.readlastseqnum = True
            try:
                f = open(self.restartstatefilename, 'rb')
                data = f.read()
                f.close()
                lastseqnum = int(data)
                log('stream: restarting stream from piece', lastseqnum)
                lastpiecenum = lastseqnum % self.authenticator.get_npieces()
                self.authenticator.set_source_seqnum(lastseqnum)
                self.videostatus.set_live_startpos(lastpiecenum)
            except:
                print_exc()

        self.bufferlock.acquire()
        try:
            while handle_one_piece():
                pass

            self.handling_pieces = False
        finally:
            self.bufferlock.release()

    def add_piece(self, index, piece):
        if DEBUG:
            log('VideoSource::add_piece: index', index)
        if globalConfig.get_value('live_source_show_pieces', False):
            log(
                'stream: created piece', index, 'speed %.2f KiB/s' %
                (self.ratemeasure.get_rate_noupdate() / 1024))
        chunk_size = self.storagewrapper.request_size
        length = min(len(piece), self.storagewrapper._piecelen(index))
        x = 0
        while x < length:
            self.storagewrapper.new_request(index)
            self.storagewrapper.piece_came_in(index, x, [],
                                              piece[x:x + chunk_size])
            x += chunk_size

        self.picker.complete(index)
        self.connecter.got_piece(index)

    def del_piece(self, piece):
        if DEBUG:
            log('VideoSource::del_piece:', piece)
        self.picker.downloader.live_invalidate(piece)
 def __init__(self, stream):
     self.stream = stream
     self.ratemeasure = Measure(30)
class SingleDownload(SingleDownloadHelperInterface):
    def __init__(self, downloader, connection):
        SingleDownloadHelperInterface.__init__(self)
        self.downloader = downloader
        self.connection = connection
        self.choked = True
        self.interested = False
        self.active_requests = []
        self.measure = Measure(downloader.max_rate_period)
        self.peermeasure = Measure(downloader.max_rate_period)
        self.raw_have = Bitfield(downloader.numpieces)
        self.have = Bitfield(downloader.numpieces)
        self.last = -1000
        self.last2 = -1000
        self.example_interest = None
        self.backlog = 2
        self.ip = connection.get_ip()
        self.guard = BadDataGuard(self)
        self.app_mode = globalConfig.get_mode()
        self.white_list = None
        self.black_list = None
        self.app_mode = globalConfig.get_mode()
        if self.app_mode == 'node':
            source_node = globalConfig.get_value('source_node')
            support_nodes = globalConfig.get_value('support_nodes')
            if not globalConfig.get_value('allow_peers_download'):
                self.white_list = set()
                if source_node is not None and globalConfig.get_value(
                        'allow_source_download'):
                    self.white_list.add(source_node[0])
                if len(support_nodes) and globalConfig.get_value(
                        'allow_support_download'):
                    self.white_list.update([addr[0] for addr in support_nodes])
            else:
                self.black_list = set()
                if source_node is not None and not globalConfig.get_value(
                        'allow_source_download'):
                    self.black_list.add(source_node[0])
                if len(support_nodes) and not globalConfig.get_value(
                        'allow_support_download'):
                    self.black_list.update([addr[0] for addr in support_nodes])
                if len(self.black_list) == 0:
                    self.black_list = None
            if DEBUG:
                log('download::__init__: white_list', self.white_list,
                    'black_list', self.black_list)
        self.helper = downloader.picker.helper
        self.proxy_have = Bitfield(downloader.numpieces)
        self.short_term_measure = Measure(5)
        self.bad_performance_counter = 0

    def _backlog(self, just_unchoked):
        self.backlog = int(
            min(
                2 +
                int(4 * self.measure.get_rate() / self.downloader.chunksize),
                2 * just_unchoked + self.downloader.queue_limit()))
        if DEBUG:
            log('downloader::sd::_backlog: backlog', self.backlog, 'rate',
                self.measure.get_rate(), 'chunksize',
                self.downloader.chunksize, 'just_unchoked', just_unchoked,
                'queue_limit', self.downloader.queue_limit())
        if self.backlog > 50:
            self.backlog = int(max(50, self.backlog * 0.075))
            if DEBUG:
                log('downloader::sd::_backlog: fix backlog', self.backlog)
        return self.backlog

    def disconnected(self):
        self.downloader.lost_peer(self)
        if self.have.complete() and self.downloader.storage.is_endgame():
            self.downloader.add_disconnected_seed(
                self.connection.get_readable_id())
        self._letgo()
        self.guard.download = None

    def _letgo(self):
        if self.downloader.queued_out.has_key(self):
            del self.downloader.queued_out[self]
        if not self.active_requests:
            return
        if self.downloader.endgamemode:
            self.active_requests = []
            return
        lost = {}
        for index, begin, length in self.active_requests:
            self.downloader.storage.request_lost(index, begin, length)
            lost[index] = 1

        lost = lost.keys()
        self.active_requests = []
        if self.downloader.paused:
            return
        ds = [d for d in self.downloader.downloads if not d.choked]
        shuffle(ds)
        for d in ds:
            d._request_more()

        for d in self.downloader.downloads:
            if d.choked and not d.interested:
                for l in lost:
                    if d.have[l] and self.downloader.storage.do_I_have_requests(
                            l):
                        d.send_interested()
                        break

    def got_choke(self):
        if not self.choked:
            if DEBUG:
                log('downloader::got_choke: got choke: ip',
                    self.connection.get_ip())
            self.choked = True
            self._letgo()
        elif DEBUG:
            log('downloader::got_choke: already choked: ip',
                self.connection.get_ip())

    def got_unchoke(self):
        if self.choked:
            if DEBUG:
                log('downloader::got_unchoke: got unchoke: ip',
                    self.connection.get_ip(), 'interested', self.interested)
            self.choked = False
            if self.interested:
                self._request_more(new_unchoke=True)
            self.last2 = clock()
        elif DEBUG:
            log('downloader::got_unchoke: already unchoked: ip',
                self.connection.get_ip())

    def is_choked(self):
        return self.choked

    def is_interested(self):
        return self.interested

    def send_interested(self):
        if not self.interested:
            if DEBUG:
                log('downloader::send_interested: send interested: ip',
                    self.connection.get_ip())
            self.interested = True
            self.connection.send_interested()
        elif DEBUG:
            log('downloader::send_interested: already interested: ip',
                self.connection.get_ip())

    def send_not_interested(self):
        if self.interested:
            if DEBUG:
                log('downloader::send_not_interested: send not interested: ip',
                    self.connection.get_ip())
            self.interested = False
            self.connection.send_not_interested()
        elif DEBUG:
            log('downloader::send_not_interested: already not interested: ip',
                self.connection.get_ip())

    def got_piece(self, index, begin, hashlist, piece):
        if self.bad_performance_counter:
            self.bad_performance_counter -= 1
            if DEBUG:
                print >> sys.stderr, 'decreased bad_performance_counter to', self.bad_performance_counter
        length = len(piece)
        try:
            self.active_requests.remove((index, begin, length))
        except ValueError:
            self.downloader.discarded += length
            return False

        if self.downloader.endgamemode:
            self.downloader.all_requests.remove((index, begin, length))
            if DEBUG:
                print >> sys.stderr, 'Downloader: got_piece: removed one request from all_requests', len(
                    self.downloader.all_requests), 'remaining'
        self.last = clock()
        self.last2 = clock()
        self.measure.update_rate(length)
        self.short_term_measure.update_rate(length)
        self.downloader.measurefunc(length)
        if not self.downloader.storage.piece_came_in(index, begin, hashlist,
                                                     piece, self.guard):
            self.downloader.piece_flunked(index)
            return False
        self.downloader.picker.got_piece(index, begin, length)
        if self.downloader.storage.do_I_have(index):
            self.downloader.picker.complete(index)
        if self.downloader.endgamemode:
            for d in self.downloader.downloads:
                if d is not self:
                    if d.interested:
                        if d.choked:
                            d.fix_download_endgame()
                        else:
                            try:
                                d.active_requests.remove(
                                    (index, begin, length))
                            except ValueError:
                                continue

                            d.connection.send_cancel(index, begin, length)
                            d.fix_download_endgame()

        self._request_more()
        self.downloader.check_complete(index)
        self.connection.total_downloaded += length
        return self.downloader.storage.do_I_have(index)

    def helper_forces_unchoke(self):
        self.choked = False

    def _request_more(self, new_unchoke=False, slowpieces=[]):
        if self.helper is not None and self.is_frozen_by_helper():
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: blocked, returning'
            return
        if self.app_mode == 'node':
            ip = self.connection.get_ip()
            if DEBUG:
                log('download::_request_more: check ip', ip)
            if self.white_list is not None and ip not in self.white_list:
                if DEBUG:
                    log(
                        'download::_request_more: peer is not in the white list: ip',
                        ip)
                return
            if self.black_list is not None and ip in self.black_list:
                if DEBUG:
                    log(
                        'download::_request_more: peer is in the black list: ip',
                        ip)
                return
        if self.choked:
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: choked, returning'
            return
        if self.connection.connection.is_coordinator_con():
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: coordinator conn'
            return
        if self.downloader.endgamemode:
            self.fix_download_endgame(new_unchoke)
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: endgame mode, returning'
            return
        if self.downloader.paused:
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: paused, returning'
            return
        if len(self.active_requests) >= self._backlog(new_unchoke):
            if DEBUG:
                log('downloader::_request_more: more req than unchoke (active req: %d >= backlog: %d), download_rate=%d'
                    % (len(self.active_requests), self._backlog(new_unchoke),
                       self.downloader.download_rate))
            if self.downloader.download_rate:
                wait_period = self.downloader.chunksize / self.downloader.download_rate / 2.0
                if wait_period > 1.0:
                    if DEBUG:
                        print >> sys.stderr, 'Downloader: waiting for %f s to call _request_more again' % wait_period
                    self.downloader.scheduler(self._request_more, wait_period)
            if not (self.active_requests or self.backlog):
                if DEBUG:
                    print >> sys.stderr, 'Downloader::_request_more: queue out download'
                self.downloader.queued_out[self] = 1
            return
        lost_interests = []
        while len(self.active_requests) < self.backlog:
            interest = self.downloader.picker.next(
                self.have,
                self.downloader.storage.do_I_have_requests,
                self,
                self.downloader.too_many_partials(),
                self.connection.connection.is_helper_con(),
                slowpieces=slowpieces,
                connection=self.connection,
                proxyhave=self.proxy_have)
            diff = -1
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: next() returned', interest, 'took %.5f' % diff
            if interest is None:
                break
            if self.helper and self.downloader.storage.inactive_requests[
                    interest] is None:
                self.connection.send_have(interest)
                break
            if self.helper and self.downloader.storage.inactive_requests[
                    interest] == []:
                break
            self.example_interest = interest
            self.send_interested()
            loop = True
            while len(self.active_requests) < self.backlog and loop:
                request = self.downloader.storage.new_request(interest)
                if request is None:
                    log(
                        'downloader::_request_more: new_request returned none: index',
                        interest)
                    lost_interests.append(interest)
                    break
                begin, length = request
                if DEBUG:
                    log('downloader::_request_more: new_request',
                        interest, begin, length, 'to',
                        self.connection.connection.get_ip(),
                        self.connection.connection.get_port())
                self.downloader.picker.requested(interest, begin, length)
                self.active_requests.append((interest, begin, length))
                self.connection.send_request(interest, begin, length)
                self.downloader.chunk_requested(length)
                if not self.downloader.storage.do_I_have_requests(interest):
                    loop = False
                    lost_interests.append(interest)

        if not self.active_requests:
            self.send_not_interested()
        if lost_interests:
            for d in self.downloader.downloads:
                if d.active_requests or not d.interested:
                    continue
                if d.example_interest is not None and self.downloader.storage.do_I_have_requests(
                        d.example_interest):
                    continue
                for lost in lost_interests:
                    if d.have[lost]:
                        break
                else:
                    continue

                interest = self.downloader.picker.next(
                    d.have,
                    self.downloader.storage.do_I_have_requests,
                    self,
                    self.downloader.too_many_partials(),
                    self.connection.connection.is_helper_con(),
                    willrequest=False,
                    connection=self.connection,
                    proxyhave=self.proxy_have)
                diff = -1
                if DEBUG:
                    print >> sys.stderr, 'Downloader: _request_more: next()2 returned', interest, 'took %.5f' % diff
                if interest is not None:
                    if self.helper and self.downloader.storage.inactive_requests[
                            interest] is None:
                        self.connection.send_have(interest)
                        break
                    if self.helper and self.downloader.storage.inactive_requests[
                            interest] == []:
                        break
                if interest is None:
                    d.send_not_interested()
                else:
                    d.example_interest = interest

        if not self.downloader.endgamemode and self.downloader.storage.is_endgame(
        ) and not (self.downloader.picker.videostatus
                   and self.downloader.picker.videostatus.live_streaming):
            self.downloader.start_endgame()

    def fix_download_endgame(self, new_unchoke=False):
        if self.downloader.paused or self.connection.connection.is_coordinator_con(
        ):
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: paused', self.downloader.paused, 'or is_coordinator_con', self.connection.connection.is_coordinator_con(
                )
            return
        if len(self.active_requests) >= self._backlog(new_unchoke):
            if not (self.active_requests or self.backlog) and not self.choked:
                self.downloader.queued_out[self] = 1
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: returned'
            return
        want = [
            a for a in self.downloader.all_requests
            if self.have[a[0]] and a not in self.active_requests and
            (self.helper is None or self.connection.connection.is_helper_con()
             or not self.helper.is_ignored(a[0]))
        ]
        if not (self.active_requests or want):
            self.send_not_interested()
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: not interested'
            return
        if want:
            self.send_interested()
        if self.choked:
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: choked'
            return
        shuffle(want)
        del want[self.backlog - len(self.active_requests):]
        self.active_requests.extend(want)
        for piece, begin, length in want:
            if self.helper is None or self.connection.connection.is_helper_con(
            ) or self.helper.reserve_piece(piece, self):
                self.connection.send_request(piece, begin, length)
                self.downloader.chunk_requested(length)

    def got_invalidate(self, index):
        if DEBUG:
            log('downloader::got_invalidate: index', index)
        if not self.have[index]:
            return
        self.have[index] = False
        self.downloader.picker.lost_have(index)

    def got_have(self, index):
        if index == self.downloader.numpieces - 1:
            self.downloader.totalmeasure.update_rate(
                self.downloader.storage.total_length -
                (self.downloader.numpieces - 1) *
                self.downloader.storage.piece_length)
            self.peermeasure.update_rate(self.downloader.storage.total_length -
                                         (self.downloader.numpieces - 1) *
                                         self.downloader.storage.piece_length)
        else:
            self.downloader.totalmeasure.update_rate(
                self.downloader.storage.piece_length)
            self.peermeasure.update_rate(self.downloader.storage.piece_length)
        self.raw_have[index] = True
        if not self.downloader.picker.is_valid_piece(index):
            if DEBUG:
                print >> sys.stderr, 'Downloader::got_have: invalid piece: index', index, 'ip', self.connection.get_ip(
                )
        if self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming and not self.connection.supports_piece_invalidate(
        ):
            i = self.downloader.picker.videostatus.live_piece_to_invalidate(
                index)
            if DEBUG:
                log('downloader::got_have: invalidate old piece: i', i, 'ip',
                    self.connection.get_ip())
            self.got_invalidate(i)
        if self.have[index]:
            return
        self.have[index] = True
        self.downloader.picker.got_have(index, self.connection)
        if DEBUG:
            print >> sys.stderr, '>>>debug: got have:', self.connection.get_ip(
            ), 'piece', index, 'have', debug_format_have(
                self.have
            ), 'choked', self.choked, 'interested', self.interested
        self.downloader.aggregate_and_send_haves()
        if self.have.complete():
            self.downloader.picker.became_seed()
            if self.downloader.picker.am_I_complete():
                self.downloader.add_disconnected_seed(
                    self.connection.get_readable_id())
                self.connection.close()
                return
        if self.downloader.endgamemode:
            self.fix_download_endgame()
        elif not self.downloader.paused and not self.downloader.picker.is_blocked(
                index) and self.downloader.storage.do_I_have_requests(index):
            if not self.choked:
                if DEBUG:
                    log('downloader::got_have: not choked, request more')
                self._request_more()
            else:
                if DEBUG:
                    log('downloader::got_have: choked, send interested')
                self.send_interested()
        elif DEBUG:
            print >> sys.stderr, 'downloader::got_have: do not request more: paused', self.downloader.paused, 'is_blocked', self.downloader.picker.is_blocked(
                index
            ), 'have_requests', self.downloader.storage.do_I_have_requests(
                index)

    def _check_interests(self):
        if self.interested or self.downloader.paused:
            return
        for i in xrange(len(self.have)):
            if self.have[i] and not self.downloader.picker.is_blocked(i) and (
                    self.downloader.endgamemode
                    or self.downloader.storage.do_I_have_requests(i)):
                self.send_interested()
                return

    def got_have_bitfield(self, have):
        if self.downloader.picker.am_I_complete() and have.complete():
            if self.downloader.super_seeding:
                self.connection.send_bitfield(have.tostring())
            self.connection.try_send_pex()

            def auto_close():
                self.connection.close()
                self.downloader.add_disconnected_seed(
                    self.connection.get_readable_id())

            self.downloader.scheduler(auto_close, REPEX_LISTEN_TIME)
            return
        if DEBUGBF:
            st = time.time()
        self.raw_have = have
        if have.complete():
            self.downloader.picker.got_seed()
        else:
            activerangeiterators = []
            if self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming and self.downloader.picker.videostatus.get_live_startpos(
            ) is None:
                activeranges = have.get_active_ranges()
                if len(activeranges) == 0:
                    activerangeiterators = [
                        self.downloader.picker.get_valid_range_iterator()
                    ]
                else:
                    for s, e in activeranges:
                        activerangeiterators.append(xrange(s, e + 1))

            else:
                activerangeiterators = [
                    self.downloader.picker.get_valid_range_iterator(
                        skip_filter=True)
                ]
            if DEBUGBF:
                print >> sys.stderr, 'Downloader: got_have_field: live: Filtering bitfield', activerangeiterators
            if DEBUGBF:
                print >> sys.stderr, 'Downloader: got_have_field: live or normal filter'
            validhave = Bitfield(self.downloader.numpieces)
            for iterator in activerangeiterators:
                for i in iterator:
                    if have[i]:
                        validhave[i] = True
                        self.downloader.picker.got_have(i, self.connection)

            if DEBUG:
                print >> sys.stderr, '>>>debug: got bitfield:', self.connection.get_ip(
                ), 'have', debug_format_have(have)
                print >> sys.stderr, '>>>debug: got bitfield:', self.connection.get_ip(
                ), 'validhave', debug_format_have(validhave)
            self.downloader.aggregate_and_send_haves()
            have = validhave
        if DEBUGBF:
            et = time.time()
            diff = et - st
            print >> sys.stderr, 'Download: got_have_field: took', diff
        self.have = have
        if self.downloader.endgamemode and not self.downloader.paused:
            for piece, begin, length in self.downloader.all_requests:
                if self.have[piece]:
                    self.send_interested()
                    break

            return
        self._check_interests()

    def reset_have(self):
        if DEBUG:
            print >> sys.stderr, 'Downloader::reset_have: before self.have:', self.have.toboollist(
            )
        validhave = Bitfield(self.downloader.numpieces)
        for i in self.downloader.picker.get_valid_range_iterator():
            if self.raw_have[i]:
                validhave[i] = True

        self.have = validhave
        if DEBUG:
            print >> sys.stderr, 'Downloader::reset_have: after self.have:', self.have.toboollist(
            )

    def get_rate(self):
        return self.measure.get_rate()

    def get_short_term_rate(self):
        return self.short_term_measure.get_rate()

    def is_snubbed(self, just_check=False):
        if not self.choked and not just_check and self.app_mode != 'node' and clock(
        ) - self.last2 > self.downloader.snub_time and not self.connection.connection.is_helper_con(
        ) and not self.connection.connection.is_coordinator_con():
            for index, begin, length in self.active_requests:
                self.connection.send_cancel(index, begin, length)

            self.got_choke()
        return clock() - self.last > self.downloader.snub_time

    def peer_is_complete(self):
        return self.have.complete()
class VideoSourceTransporter:

    def __init__(self, stream, bt1download, authconfig, restartstatefilename):
        self.stream = stream
        self.bt1download = bt1download
        self.restartstatefilename = restartstatefilename
        self.exiting = False
        self.ratemeasure = Measure(30)
        self.storagewrapper = bt1download.storagewrapper
        self.picker = bt1download.picker
        self.rawserver = bt1download.rawserver
        self.connecter = bt1download.connecter
        self.fileselector = bt1download.fileselector
        self.videostatus = bt1download.videostatus
        self.buffer = []
        self.buflen = 0
        self.bufferlock = RLock()
        self.handling_pieces = False
        self.readlastseqnum = False
        if authconfig.get_method() == LIVE_AUTHMETHOD_ECDSA:
            self.authenticator = ECDSAAuthenticator(self.videostatus.piecelen, self.bt1download.len_pieces, keypair=authconfig.get_keypair())
        elif authconfig.get_method() == LIVE_AUTHMETHOD_RSA:
            self.authenticator = RSAAuthenticator(self.videostatus.piecelen, self.bt1download.len_pieces, keypair=authconfig.get_keypair())
        else:
            self.authenticator = NullAuthenticator(self.videostatus.piecelen, self.bt1download.len_pieces)

    def start(self):
        self.input_thread_handle = SimpleThread(self.input_thread)
        self.input_thread_handle.start()

    def _read(self, length):
        return self.stream.read(length)

    def input_thread(self):
        log('stream: started input thread')
        contentbs = self.authenticator.get_content_blocksize()
        try:
            if DEBUG_TRANSPORT:
                f = open('/tmp/stream.dat', 'wb')
            while not self.exiting:
                data = self._read(contentbs)
                if not data:
                    break
                if DEBUG:
                    log('VideoSource: read %d bytes' % len(data))
                if DEBUG_TRANSPORT:
                    log('VideoSource::input_thread: read chunk: want', contentbs, 'len', len(data))
                    f.write(data)
                self.ratemeasure.update_rate(len(data))
                self.process_data(data)

            if DEBUG_TRANSPORT:
                f.close()
        except IOError:
            if DEBUG:
                print_exc()

        self.shutdown()

    def shutdown(self):
        if DEBUG:
            log('VideoSource::shutdown: ---')
        if self.exiting:
            return
        self.exiting = True
        try:
            self.stream.close()
        except IOError:
            pass

    def process_data(self, data):
        vs = self.videostatus
        self.bufferlock.acquire()
        try:
            self.buffer.append(data)
            self.buflen += len(data)
            if not self.handling_pieces:
                self.rawserver.add_task(self.create_pieces)
                self.handling_pieces = True
        finally:
            self.bufferlock.release()

    def create_pieces(self):

        def handle_one_piece():
            vs = self.videostatus
            contentbs = self.authenticator.get_content_blocksize()
            if self.buflen < contentbs:
                return False
            if len(self.buffer[0]) == contentbs:
                content = self.buffer[0]
                del self.buffer[0]
            else:
                if DEBUG:
                    print >> sys.stderr, 'VideoSource: JOIN ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^'
                buffer = ''.join(self.buffer)
                self.buffer = [buffer[contentbs:]]
                content = buffer[:contentbs]
            self.buflen -= contentbs
            datas = self.authenticator.sign(content)
            piece = ''.join(datas)
            self.add_piece(vs.playback_pos, piece)
            self.del_piece(vs.live_piece_to_invalidate())
            self.readlastseqnum = True
            if self.restartstatefilename is not None:
                try:
                    lastseqnum = self.authenticator.get_source_seqnum()
                    f = open(self.restartstatefilename, 'wb')
                    f.write(str(lastseqnum))
                    f.close()
                except:
                    print_exc()

            vs.inc_playback_pos()
            return True

        if not self.readlastseqnum and self.restartstatefilename is not None and os.path.isfile(self.restartstatefilename):
            self.readlastseqnum = True
            try:
                f = open(self.restartstatefilename, 'rb')
                data = f.read()
                f.close()
                lastseqnum = int(data)
                log('stream: restarting stream from piece', lastseqnum)
                lastpiecenum = lastseqnum % self.authenticator.get_npieces()
                self.authenticator.set_source_seqnum(lastseqnum)
                self.videostatus.set_live_startpos(lastpiecenum)
            except:
                print_exc()

        self.bufferlock.acquire()
        try:
            while handle_one_piece():
                pass

            self.handling_pieces = False
        finally:
            self.bufferlock.release()

    def add_piece(self, index, piece):
        if DEBUG:
            log('VideoSource::add_piece: index', index)
        if globalConfig.get_value('live_source_show_pieces', False):
            log('stream: created piece', index, 'speed %.2f KiB/s' % (self.ratemeasure.get_rate_noupdate() / 1024))
        chunk_size = self.storagewrapper.request_size
        length = min(len(piece), self.storagewrapper._piecelen(index))
        x = 0
        while x < length:
            self.storagewrapper.new_request(index)
            self.storagewrapper.piece_came_in(index, x, [], piece[x:x + chunk_size])
            x += chunk_size

        self.picker.complete(index)
        self.connecter.got_piece(index)

    def del_piece(self, piece):
        if DEBUG:
            log('VideoSource::del_piece:', piece)
        self.picker.downloader.live_invalidate(piece)
class SingleDownload(SingleDownloadHelperInterface):

    def __init__(self, downloader, url):
        SingleDownloadHelperInterface.__init__(self)
        self.downloader = downloader
        self.baseurl = url
        try:
            self.scheme, self.netloc, path, pars, query, fragment = urlparse(url)
        except:
            self.downloader.errorfunc('cannot parse http seed address: ' + url)
            return

        if self.scheme != 'http':
            self.downloader.errorfunc('http seed url not http: ' + url)
            return
        self.proxyhost = find_proxy(url)
        try:
            if self.proxyhost is None:
                self.connection = HTTPConnection(self.netloc)
            else:
                self.connection = HTTPConnection(self.proxyhost)
        except:
            self.downloader.errorfunc('cannot connect to http seed: ' + url)
            return

        self.seedurl = path
        if pars:
            self.seedurl += ';' + pars
        self.seedurl += '?'
        if query:
            self.seedurl += query + '&'
        self.seedurl += 'info_hash=' + urllib.quote(self.downloader.infohash)
        self.measure = Measure(downloader.max_rate_period)
        self.index = None
        self.url = ''
        self.requests = []
        self.request_size = 0
        self.endflag = False
        self.error = None
        self.retry_period = 30
        self._retry_period = None
        self.errorcount = 0
        self.goodseed = False
        self.active = False
        self.cancelled = False
        self.resched(randint(2, 10))

    def resched(self, len = None):
        if len is None:
            len = self.retry_period
        if self.errorcount > 3:
            len = len * (self.errorcount - 2)
        self.downloader.rawserver.add_task(self.download, len)

    def _want(self, index):
        if self.endflag:
            return self.downloader.storage.do_I_have_requests(index)
        else:
            return self.downloader.storage.is_unstarted(index)

    def download(self):
        if DEBUG:
            print 'http-sdownload: download()'
        if self.is_frozen_by_helper():
            if DEBUG:
                print 'http-sdownload: blocked, rescheduling'
            self.resched(1)
            return
        self.cancelled = False
        if self.downloader.picker.am_I_complete():
            self.downloader.downloads.remove(self)
            return
        self.index = self.downloader.picker.next(haveall, self._want, self)
        if self.index is None and self.frozen_by_helper:
            self.resched(0.01)
            return
        if self.index is None and not self.endflag and not self.downloader.peerdownloader.has_downloaders():
            self.endflag = True
            self.index = self.downloader.picker.next(haveall, self._want, self)
        if self.index is None:
            self.endflag = True
            self.resched()
        else:
            self.url = self.seedurl + '&piece=' + str(self.index)
            self._get_requests()
            if self.request_size < self.downloader.storage._piecelen(self.index):
                self.url += '&ranges=' + self._request_ranges()
            rq = Thread(target=self._request)
            rq.setName('HoffmanHTTPDownloader' + rq.getName())
            rq.setDaemon(True)
            rq.start()
            self.active = True

    def _request(self):
        import encodings.ascii
        import encodings.punycode
        import encodings.idna
        self.error = None
        self.received_data = None
        try:
            self.connection.request('GET', self.url, None, {'User-Agent': VERSION})
            r = self.connection.getresponse()
            self.connection_status = r.status
            self.received_data = r.read()
        except Exception as e:
            log_exc()
            self.error = 'error accessing http seed: ' + str(e)
            try:
                self.connection.close()
            except:
                pass

            try:
                self.connection = HTTPConnection(self.netloc)
            except:
                self.connection = None

        self.downloader.rawserver.add_task(self.request_finished)

    def request_finished(self):
        self.active = False
        if self.error is not None:
            if self.goodseed:
                self.downloader.errorfunc(self.error)
            self.errorcount += 1
        if self.received_data:
            self.errorcount = 0
            if not self._got_data():
                self.received_data = None
        if not self.received_data:
            self._release_requests()
            self.downloader.peerdownloader.piece_flunked(self.index)
        if self._retry_period:
            self.resched(self._retry_period)
            self._retry_period = None
            return
        self.resched()

    def _got_data(self):
        if self.connection_status == 503:
            try:
                self.retry_period = max(int(self.received_data), 5)
            except:
                pass

            return False
        if self.connection_status != 200:
            self.errorcount += 1
            return False
        self._retry_period = 1
        if len(self.received_data) != self.request_size:
            if self.goodseed:
                self.downloader.errorfunc('corrupt data from http seed - redownloading')
            return False
        self.measure.update_rate(len(self.received_data))
        self.downloader.measurefunc(len(self.received_data))
        if self.cancelled:
            return False
        if not self._fulfill_requests():
            return False
        if not self.goodseed:
            self.goodseed = True
            self.downloader.seedsfound += 1
        if self.downloader.storage.do_I_have(self.index):
            self.downloader.picker.complete(self.index)
            self.downloader.peerdownloader.check_complete(self.index)
            self.downloader.gotpiecefunc(self.index)
        return True

    def _get_requests(self):
        self.requests = []
        self.request_size = 0L
        while self.downloader.storage.do_I_have_requests(self.index):
            r = self.downloader.storage.new_request(self.index)
            self.requests.append(r)
            self.request_size += r[1]

        self.requests.sort()

    def _fulfill_requests(self):
        start = 0L
        success = True
        while self.requests:
            begin, length = self.requests.pop(0)
            if not self.downloader.storage.piece_came_in(self.index, begin, [], self.received_data[start:start + length], length):
                success = False
                break
            start += length

        return success

    def _release_requests(self):
        for begin, length in self.requests:
            self.downloader.storage.request_lost(self.index, begin, length)

        self.requests = []

    def _request_ranges(self):
        s = ''
        begin, length = self.requests[0]
        for begin1, length1 in self.requests[1:]:
            if begin + length == begin1:
                length += length1
                continue
            else:
                if s:
                    s += ','
                s += str(begin) + '-' + str(begin + length - 1)
                begin, length = begin1, length1

        if s:
            s += ','
        s += str(begin) + '-' + str(begin + length - 1)
        return s

    def helper_forces_unchoke(self):
        pass

    def helper_set_freezing(self, val):
        self.frozen_by_helper = val
class Downloader:

    def __init__(self, url, dlhash, rawserver, failed_func, max_errors = 10):
        if DEBUG:
            log('dd-downloader::__init__: url', url, 'hash', binascii.hexlify(dlhash))
        self.url = url
        self.rawserver = rawserver
        self.failed_func = failed_func
        self.final_url = None
        self.storage = None
        self.lock = Lock()
        self.measure = Measure(10.0)
        self.errors = 0
        self.max_errors = max_errors
        self.seek = None
        self.shutdown_flag = False
        self.running = False
        self.log_prefix = 'dd-downloader::' + binascii.hexlify(dlhash) + ':'

    def predownload(self, callback, timeout = 10):
        if self.lock.locked():
            self.seek = pos
            return
        t = Thread(target=self._predownload, args=[callback, timeout])
        t.setName('dd-downloader-predownload-' + t.getName())
        t.setDaemon(True)
        t.start()

    def _predownload(self, callback, timeout):
        self.lock.acquire()
        self.running = True
        try:
            if DEBUG:
                log(self.log_prefix + '_predownload: url', self.url, 'timeout', timeout)
            stream = urlOpenTimeout(self.url, timeout=timeout)
            content_type = stream.info().getheader('Content-Type')
            content_length = stream.info().getheader('Content-Length')
            if DEBUG:
                log(self.log_prefix + '_predownload: request finished: content_type', content_type, 'content_length', content_length)
            data = ''
            while True:
                if self.shutdown_flag:
                    if DEBUG:
                        log(self.log_prefix + '_predownload: got shutdown flag while reading: url', self.url)
                    break
                buf = stream.read(524288)
                if not buf:
                    if DEBUG:
                        log(self.log_prefix + '_predownload: eof: url', self.url)
                    break
                data += buf
                if DEBUG:
                    log(self.log_prefix + '_predownload: read chunk: url', self.url, 'read_len', len(data))

            stream.close()
            if not self.shutdown_flag:
                if DEBUG:
                    log(self.log_prefix + '_predownload: finished, run callback: url', self.url, 'content_type', content_type, 'content_length', content_length, 'data_len', len(data))
                callback(content_type, data)
        except Exception as e:
            if DEBUG:
                print_exc()
            self.failed_func(e)
        finally:
            self.running = False
            self.lock.release()

    def init(self, callback = None, timeout = 10):
        if callback is None:
            return self._init()
        t = Thread(target=self._init, args=[callback, timeout])
        t.setName('dd-downloader-init-' + t.getName())
        t.setDaemon(True)
        t.start()

    def _init(self, callback = None, timeout = 10):
        try:
            scheme, host, path = self.parse_url(self.url)
            redirects = 0
            connection = HTTPConnection(host)
            while True:
                connection.request('HEAD', path, None, {'Host': host,
                 'User-Agent': USER_AGENT})
                r = connection.getresponse()
                if r.status == 200:
                    break
                elif r.status == 301 or r.status == 302:
                    redirect_url = r.getheader('Location', None)
                    if DEBUG:
                        log(self.log_prefix + 'init: got redirect: url', self.url, 'redirect', redirect_url)
                    scheme, rhost, path = self.parse_url(redirect_url)
                    redirects += 1
                    if redirects > MAX_REDIRECTS:
                        raise Exception('Too much redirects')
                    if rhost != host:
                        connection.close()
                        connection = HTTPConnection(rhost)
                        host = rhost
                else:
                    raise Exception('Bad http status: ' + str(r.status))

            mime = r.getheader('Content-Type', None)
            length = r.getheader('Content-Length', None)
            connection.close()
            if length is None:
                raise Exception('No content-length in response')
            if mime is None:
                raise Exception('No content-type in response')
            length = int(length)
            self.final_url = scheme + '://' + host + path
            if DEBUG:
                log(self.log_prefix + 'init: got response: length', length, 'mime', mime, 'final_url', self.final_url)
            if callback is None:
                return (length, mime)
            callback(length, mime)
        except Exception as e:
            if DEBUG:
                print_exc()
            if callback is None:
                raise e
            else:
                self.failed_func(e)

    def set_storage(self, storage):
        self.storage = storage

    def start(self, pos = 0):
        if self.storage is None:
            raise Exception('Storage is not set')
        if self.final_url is None:
            raise Exception('Final url is not set')
        if self.lock.locked():
            self.seek = pos
            return
        t = Thread(target=self._request, args=[pos])
        t.setName('dd-downloader-' + t.getName())
        t.setDaemon(True)
        t.start()

    def _request(self, pos):
        self.lock.acquire()
        self.running = True
        try:
            while True:
                if self.shutdown_flag:
                    if DEBUG:
                        log(self.log_prefix + '_request: got shutdown flag before read: url', self.url)
                    break
                pos = self.storage.get_unfinished_pos(pos)
                if pos is None:
                    if DEBUG:
                        log(self.log_prefix + '_request: no unfinished pos, break: url', self.url)
                    break
                self._read(pos)
                if self.seek is not None:
                    pos = self.seek
                    self.seek = None
                    continue
                break

        except ReadErrorException:
            if DEBUG:
                log(self.log_prefix + '_request: read error, retry immediatelly: url', self.url, 'pos', pos)
            start_lambda = lambda : self.start(pos)
            self.rawserver.add_task(start_lambda, 0.1)
        except FatalErrorException as e:
            if DEBUG:
                log(self.log_prefix + '_request: fatal error, exit: url', self.url, 'pos', pos)
            self.failed_func(e)
        except Exception as e:
            self.errors += 1
            if DEBUG:
                print_exc()
            if self.errors > self.max_errors:
                if DEBUG:
                    log(self.log_prefix + '_request: non-fatal error, max errors reached: errors', self.errors, 'max', self.max_errors)
                self.failed_func(e)
            else:
                retry_in = 5 * (1 + self.errors / 10)
                if DEBUG:
                    log(self.log_prefix + '_request: non-fatal error: url', self.url, 'pos', pos, 'errors', self.errors, 'retry_in', retry_in)
                start_lambda = lambda : self.start(pos)
                self.rawserver.add_task(start_lambda, retry_in)
        finally:
            self.running = False
            self.lock.release()

    def is_running(self):
        return self.running

    def _read(self, pos):
        scheme, host, path = self.parse_url(self.final_url)
        request_range = str(pos) + '-'
        connection = HTTPConnection(host)
        connection.request('GET', path, None, {'Host': host,
         'User-Agent': USER_AGENT,
         'Range': 'bytes=%s' % request_range})
        r = connection.getresponse()
        if DEBUG:
            log(self.log_prefix + '_read: url', self.url, 'final', self.final_url, 'pos', pos, 'status', r.status)
        if r.status != 200 and r.status != 206:
            if DEBUG:
                log(self.log_prefix + '_read: bad http status: url', self.url, 'status', r.status)
            connection.close()
            if 400 <= r.status < 500:
                raise FatalErrorException, 'http status ' + str(r.status)
            else:
                raise NonFatalErrorException, 'http status ' + str(r.status)
        request_size = r.getheader('Content-Length', None)
        if request_size is None:
            if DEBUG:
                log(self.log_prefix + '_read: missing content length: url', self.url)
            connection.close()
            return
        try:
            request_size = int(request_size)
        except:
            if DEBUG:
                print_exc()
            connection.close()
            return

        if DEBUG:
            log(self.log_prefix + '_read: url', self.url, 'request_range', request_range, 'request_size', request_size)
        total_read = 0
        read_size = 16384
        while True:
            chunk = r.read(read_size)
            if not chunk:
                if total_read != request_size:
                    if DEBUG:
                        log(self.log_prefix + '_read: no data, raise read error: url', self.url, 'pos', pos, 'total_read', total_read, 'request_size', request_size)
                    raise ReadErrorException()
                if DEBUG:
                    log(self.log_prefix + '_read: no data, exit: url', self.url, 'pos', pos)
                break
            chunk_len = len(chunk)
            total_read += chunk_len
            if DEBUG:
                log('>>>> ' + self.log_prefix + '_read: got chunk: pos', pos, 'chunk_len', chunk_len, 'total_read', total_read)
            self.measure.update_rate(chunk_len)
            if chunk_len != read_size and total_read != request_size:
                if DEBUG:
                    log(self.log_prefix + '_read: bad data len, raise read error: url', self.url, 'pos', pos, 'total_read', total_read, 'request_size', request_size, 'chunk_len', chunk_len, 'read_size', read_size)
                raise ReadErrorException()
            if self.shutdown_flag:
                if DEBUG:
                    log(self.log_prefix + '_read: got shutdown flag on read: url', self.url)
                break
            try:
                t = time.time()
                updated_len = self.storage.write(pos, chunk)
                if DEBUG:
                    log('%%%%' + self.log_prefix + '_read: write to storage: pos', pos, 'len', chunk_len, 'time', time.time() - t)
                if updated_len == 0:
                    if DEBUG:
                        log(self.log_prefix + '_read: data exists in storage: url', self.url, 'pos', pos, 'len', chunk_len, 'seek_flag', self.seek)
                    if self.seek is None:
                        self.seek = self.storage.get_unfinished_pos(pos)
                        if self.seek is None:
                            if DEBUG:
                                log(self.log_prefix + '_read: no unfinished data, exit: url', self.url, 'pos', pos)
                            break
            except:
                if DEBUG:
                    print_exc()
                    log(self.log_prefix + '_read: cannot write, exit: url', self.url)
                raise FatalErrorException, 'cannot write to storage'

            if self.seek is not None:
                log(self.log_prefix + '_read: got seek: url', self.url, 'seek', self.seek)
                break
            pos += chunk_len

        connection.close()

    def parse_url(self, url):
        scheme, host, path, pars, query, fragment = urlparse(url)
        if scheme != 'http':
            raise ValueError('Unsupported scheme ' + scheme)
        if len(host) == 0:
            raise ValueError('Empty host')
        if len(path) == 0:
            path = '/'
        if len(pars) > 0:
            path += ';' + pars
        if len(query) > 0:
            path += '?' + query
        if len(fragment) > 0:
            path += '#' + fragment
        return (scheme, host, path)

    def shutdown(self):
        if DEBUG:
            log(self.log_prefix + 'shutdown: ---')
        self.shutdown_flag = True
Exemplo n.º 22
0
class Upload:
    def __init__(self, connection, ratelimiter, totalup, choker, storage,
                 picker, config):
        self.connection = connection
        self.ratelimiter = ratelimiter
        self.totalup = totalup
        self.choker = choker
        self.storage = storage
        self.picker = picker
        self.config = config
        self.max_slice_length = config['max_slice_length']
        self.choked = True
        self.cleared = True
        self.interested = False
        self.super_seeding = False
        self.buffer = []
        self.measure = Measure(config['max_rate_period'],
                               config['upload_rate_fudge'])
        self.was_ever_interested = False
        if storage.get_amount_left() == 0:
            if choker.super_seed:
                self.super_seeding = True
                self.seed_have_list = []
                self.skipped_count = 0
            elif config['breakup_seed_bitfield']:
                bitfield, msgs = storage.get_have_list_cloaked()
                connection.send_bitfield(bitfield)
                for have in msgs:
                    connection.send_have(have)

            else:
                connection.send_bitfield(storage.get_have_list())
        elif storage.do_I_have_anything():
            connection.send_bitfield(storage.get_have_list())
        self.piecedl = None
        self.piecebuf = None
        self.hashlist = []

    def send_haves(self, connection):
        have_list = self.storage.get_have_list()
        print >> sys.stderr, 'Have list:', have_list

    def send_bitfield(self, connection):
        if self.storage.get_amount_left() == 0:
            if not self.super_seeding:
                if self.config['breakup_seed_bitfield']:
                    bitfield, msgs = self.storage.get_have_list_cloaked()
                    connection.send_bitfield(bitfield)
                    for have in msgs:
                        connection.send_have(have)

                else:
                    connection.send_bitfield(self.storage.get_have_list())
        elif self.storage.do_I_have_anything():
            connection.send_bitfield(self.storage.get_have_list())

    def got_not_interested(self):
        if self.interested:
            self.interested = False
            del self.buffer[:]
            self.piecedl = None
            if self.piecebuf:
                self.piecebuf.release()
            self.piecebuf = None
            self.choker.not_interested(self.connection)

    def got_interested(self):
        if not self.interested:
            self.interested = True
            self.was_ever_interested = True
            self.choker.interested(self.connection)

    def get_upload_chunk(self):
        if self.choked or not self.buffer:
            return
        index, begin, length = self.buffer.pop(0)
        if False and self.config['buffer_reads']:
            if index != self.piecedl:
                if self.piecebuf:
                    self.piecebuf.release()
                self.piecedl = index
                self.piecebuf, self.hashlist = self.storage.get_piece(
                    index, 0, -1)
            try:
                piece = self.piecebuf[begin:begin + length]
            except:
                self.connection.close()
                return

            if begin == 0:
                hashlist = self.hashlist
            else:
                hashlist = []
        else:
            if self.piecebuf:
                self.piecebuf.release()
                self.piecedl = None
            piece, hashlist = self.storage.get_piece(index, begin, length)
            if piece is None:
                self.connection.close()
                return
        self.measure.update_rate(len(piece))
        self.totalup.update_rate(len(piece))
        self.connection.total_uploaded += length
        return (index, begin, hashlist, piece)

    def got_request(self, index, begin, length):
        if self.super_seeding and index not in self.seed_have_list or not self.connection.connection.is_coordinator_con(
        ) and not self.interested or length > self.max_slice_length:
            self.connection.close()
            return
        if not self.cleared:
            self.buffer.append((index, begin, length))
        if not self.choked and self.connection.next_upload is None:
            self.ratelimiter.queue(self.connection)

    def got_cancel(self, index, begin, length):
        try:
            self.buffer.remove((index, begin, length))
        except ValueError:
            pass

    def choke(self):
        if not self.choked:
            if DEBUG:
                log('uploader::choke: ip', self.connection.get_ip(), 'port',
                    self.connection.get_port())
            self.choked = True
            self.connection.send_choke()
        self.piecedl = None
        if self.piecebuf:
            self.piecebuf.release()
            self.piecebuf = None

    def choke_sent(self):
        del self.buffer[:]
        self.cleared = True

    def unchoke(self):
        if self.choked:
            try:
                if DEBUG:
                    log('uploader::unchoke: ip', self.connection.get_ip(),
                        'port', self.connection.get_port())
                if self.connection.send_unchoke():
                    self.choked = False
                    self.cleared = False
            except:
                pass

    def disconnected(self):
        if self.piecebuf:
            self.piecebuf.release()
            self.piecebuf = None

    def is_choked(self):
        return self.choked

    def is_interested(self):
        return self.interested

    def has_queries(self):
        return not self.choked and self.buffer

    def get_rate(self):
        return self.measure.get_rate()
 def __init__(self, stream):
     self.stream = stream
     self.ratemeasure = Measure(30)
class SingleDownload(SingleDownloadHelperInterface):

    def __init__(self, downloader, connection):
        SingleDownloadHelperInterface.__init__(self)
        self.downloader = downloader
        self.connection = connection
        self.choked = True
        self.interested = False
        self.active_requests = []
        self.measure = Measure(downloader.max_rate_period)
        self.peermeasure = Measure(downloader.max_rate_period)
        self.raw_have = Bitfield(downloader.numpieces)
        self.have = Bitfield(downloader.numpieces)
        self.last = -1000
        self.last2 = -1000
        self.example_interest = None
        self.backlog = 2
        self.ip = connection.get_ip()
        self.guard = BadDataGuard(self)
        self.app_mode = globalConfig.get_mode()
        self.white_list = None
        self.black_list = None
        self.app_mode = globalConfig.get_mode()
        if self.app_mode == 'node':
            source_node = globalConfig.get_value('source_node')
            support_nodes = globalConfig.get_value('support_nodes')
            if not globalConfig.get_value('allow_peers_download'):
                self.white_list = set()
                if source_node is not None and globalConfig.get_value('allow_source_download'):
                    self.white_list.add(source_node[0])
                if len(support_nodes) and globalConfig.get_value('allow_support_download'):
                    self.white_list.update([ addr[0] for addr in support_nodes ])
            else:
                self.black_list = set()
                if source_node is not None and not globalConfig.get_value('allow_source_download'):
                    self.black_list.add(source_node[0])
                if len(support_nodes) and not globalConfig.get_value('allow_support_download'):
                    self.black_list.update([ addr[0] for addr in support_nodes ])
                if len(self.black_list) == 0:
                    self.black_list = None
            if DEBUG:
                log('download::__init__: white_list', self.white_list, 'black_list', self.black_list)
        self.helper = downloader.picker.helper
        self.proxy_have = Bitfield(downloader.numpieces)
        self.short_term_measure = Measure(5)
        self.bad_performance_counter = 0

    def _backlog(self, just_unchoked):
        self.backlog = int(min(2 + int(4 * self.measure.get_rate() / self.downloader.chunksize), 2 * just_unchoked + self.downloader.queue_limit()))
        if DEBUG:
            log('downloader::sd::_backlog: backlog', self.backlog, 'rate', self.measure.get_rate(), 'chunksize', self.downloader.chunksize, 'just_unchoked', just_unchoked, 'queue_limit', self.downloader.queue_limit())
        if self.backlog > 50:
            self.backlog = int(max(50, self.backlog * 0.075))
            if DEBUG:
                log('downloader::sd::_backlog: fix backlog', self.backlog)
        return self.backlog

    def disconnected(self):
        self.downloader.lost_peer(self)
        if self.have.complete() and self.downloader.storage.is_endgame():
            self.downloader.add_disconnected_seed(self.connection.get_readable_id())
        self._letgo()
        self.guard.download = None

    def _letgo(self):
        if self.downloader.queued_out.has_key(self):
            del self.downloader.queued_out[self]
        if not self.active_requests:
            return
        if self.downloader.endgamemode:
            self.active_requests = []
            return
        lost = {}
        for index, begin, length in self.active_requests:
            self.downloader.storage.request_lost(index, begin, length)
            lost[index] = 1

        lost = lost.keys()
        self.active_requests = []
        if self.downloader.paused:
            return
        ds = [ d for d in self.downloader.downloads if not d.choked ]
        shuffle(ds)
        for d in ds:
            d._request_more()

        for d in self.downloader.downloads:
            if d.choked and not d.interested:
                for l in lost:
                    if d.have[l] and self.downloader.storage.do_I_have_requests(l):
                        d.send_interested()
                        break

    def got_choke(self):
        if not self.choked:
            if DEBUG:
                log('downloader::got_choke: got choke: ip', self.connection.get_ip())
            self.choked = True
            self._letgo()
        elif DEBUG:
            log('downloader::got_choke: already choked: ip', self.connection.get_ip())

    def got_unchoke(self):
        if self.choked:
            if DEBUG:
                log('downloader::got_unchoke: got unchoke: ip', self.connection.get_ip(), 'interested', self.interested)
            self.choked = False
            if self.interested:
                self._request_more(new_unchoke=True)
            self.last2 = clock()
        elif DEBUG:
            log('downloader::got_unchoke: already unchoked: ip', self.connection.get_ip())

    def is_choked(self):
        return self.choked

    def is_interested(self):
        return self.interested

    def send_interested(self):
        if not self.interested:
            if DEBUG:
                log('downloader::send_interested: send interested: ip', self.connection.get_ip())
            self.interested = True
            self.connection.send_interested()
        elif DEBUG:
            log('downloader::send_interested: already interested: ip', self.connection.get_ip())

    def send_not_interested(self):
        if self.interested:
            if DEBUG:
                log('downloader::send_not_interested: send not interested: ip', self.connection.get_ip())
            self.interested = False
            self.connection.send_not_interested()
        elif DEBUG:
            log('downloader::send_not_interested: already not interested: ip', self.connection.get_ip())

    def got_piece(self, index, begin, hashlist, piece):
        if self.bad_performance_counter:
            self.bad_performance_counter -= 1
            if DEBUG:
                print >> sys.stderr, 'decreased bad_performance_counter to', self.bad_performance_counter
        length = len(piece)
        try:
            self.active_requests.remove((index, begin, length))
        except ValueError:
            self.downloader.discarded += length
            return False

        if self.downloader.endgamemode:
            self.downloader.all_requests.remove((index, begin, length))
            if DEBUG:
                print >> sys.stderr, 'Downloader: got_piece: removed one request from all_requests', len(self.downloader.all_requests), 'remaining'
        self.last = clock()
        self.last2 = clock()
        self.measure.update_rate(length)
        self.short_term_measure.update_rate(length)
        self.downloader.measurefunc(length)
        if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard):
            self.downloader.piece_flunked(index)
            return False
        self.downloader.picker.got_piece(index, begin, length)
        if self.downloader.storage.do_I_have(index):
            self.downloader.picker.complete(index)
        if self.downloader.endgamemode:
            for d in self.downloader.downloads:
                if d is not self:
                    if d.interested:
                        if d.choked:
                            d.fix_download_endgame()
                        else:
                            try:
                                d.active_requests.remove((index, begin, length))
                            except ValueError:
                                continue

                            d.connection.send_cancel(index, begin, length)
                            d.fix_download_endgame()

        self._request_more()
        self.downloader.check_complete(index)
        self.connection.total_downloaded += length
        return self.downloader.storage.do_I_have(index)

    def helper_forces_unchoke(self):
        self.choked = False

    def _request_more(self, new_unchoke = False, slowpieces = []):
        if self.helper is not None and self.is_frozen_by_helper():
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: blocked, returning'
            return
        if self.app_mode == 'node':
            ip = self.connection.get_ip()
            if DEBUG:
                log('download::_request_more: check ip', ip)
            if self.white_list is not None and ip not in self.white_list:
                if DEBUG:
                    log('download::_request_more: peer is not in the white list: ip', ip)
                return
            if self.black_list is not None and ip in self.black_list:
                if DEBUG:
                    log('download::_request_more: peer is in the black list: ip', ip)
                return
        if self.choked:
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: choked, returning'
            return
        if self.connection.connection.is_coordinator_con():
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: coordinator conn'
            return
        if self.downloader.endgamemode:
            self.fix_download_endgame(new_unchoke)
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: endgame mode, returning'
            return
        if self.downloader.paused:
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: paused, returning'
            return
        if len(self.active_requests) >= self._backlog(new_unchoke):
            if DEBUG:
                log('downloader::_request_more: more req than unchoke (active req: %d >= backlog: %d), download_rate=%d' % (len(self.active_requests), self._backlog(new_unchoke), self.downloader.download_rate))
            if self.downloader.download_rate:
                wait_period = self.downloader.chunksize / self.downloader.download_rate / 2.0
                if wait_period > 1.0:
                    if DEBUG:
                        print >> sys.stderr, 'Downloader: waiting for %f s to call _request_more again' % wait_period
                    self.downloader.scheduler(self._request_more, wait_period)
            if not (self.active_requests or self.backlog):
                if DEBUG:
                    print >> sys.stderr, 'Downloader::_request_more: queue out download'
                self.downloader.queued_out[self] = 1
            return
        lost_interests = []
        while len(self.active_requests) < self.backlog:
            interest = self.downloader.picker.next(self.have, self.downloader.storage.do_I_have_requests, self, self.downloader.too_many_partials(), self.connection.connection.is_helper_con(), slowpieces=slowpieces, connection=self.connection, proxyhave=self.proxy_have)
            diff = -1
            if DEBUG:
                print >> sys.stderr, 'Downloader: _request_more: next() returned', interest, 'took %.5f' % diff
            if interest is None:
                break
            if self.helper and self.downloader.storage.inactive_requests[interest] is None:
                self.connection.send_have(interest)
                break
            if self.helper and self.downloader.storage.inactive_requests[interest] == []:
                break
            self.example_interest = interest
            self.send_interested()
            loop = True
            while len(self.active_requests) < self.backlog and loop:
                request = self.downloader.storage.new_request(interest)
                if request is None:
                    log('downloader::_request_more: new_request returned none: index', interest)
                    lost_interests.append(interest)
                    break
                begin, length = request
                if DEBUG:
                    log('downloader::_request_more: new_request', interest, begin, length, 'to', self.connection.connection.get_ip(), self.connection.connection.get_port())
                self.downloader.picker.requested(interest, begin, length)
                self.active_requests.append((interest, begin, length))
                self.connection.send_request(interest, begin, length)
                self.downloader.chunk_requested(length)
                if not self.downloader.storage.do_I_have_requests(interest):
                    loop = False
                    lost_interests.append(interest)

        if not self.active_requests:
            self.send_not_interested()
        if lost_interests:
            for d in self.downloader.downloads:
                if d.active_requests or not d.interested:
                    continue
                if d.example_interest is not None and self.downloader.storage.do_I_have_requests(d.example_interest):
                    continue
                for lost in lost_interests:
                    if d.have[lost]:
                        break
                else:
                    continue

                interest = self.downloader.picker.next(d.have, self.downloader.storage.do_I_have_requests, self, self.downloader.too_many_partials(), self.connection.connection.is_helper_con(), willrequest=False, connection=self.connection, proxyhave=self.proxy_have)
                diff = -1
                if DEBUG:
                    print >> sys.stderr, 'Downloader: _request_more: next()2 returned', interest, 'took %.5f' % diff
                if interest is not None:
                    if self.helper and self.downloader.storage.inactive_requests[interest] is None:
                        self.connection.send_have(interest)
                        break
                    if self.helper and self.downloader.storage.inactive_requests[interest] == []:
                        break
                if interest is None:
                    d.send_not_interested()
                else:
                    d.example_interest = interest

        if not self.downloader.endgamemode and self.downloader.storage.is_endgame() and not (self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming):
            self.downloader.start_endgame()

    def fix_download_endgame(self, new_unchoke = False):
        if self.downloader.paused or self.connection.connection.is_coordinator_con():
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: paused', self.downloader.paused, 'or is_coordinator_con', self.connection.connection.is_coordinator_con()
            return
        if len(self.active_requests) >= self._backlog(new_unchoke):
            if not (self.active_requests or self.backlog) and not self.choked:
                self.downloader.queued_out[self] = 1
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: returned'
            return
        want = [ a for a in self.downloader.all_requests if self.have[a[0]] and a not in self.active_requests and (self.helper is None or self.connection.connection.is_helper_con() or not self.helper.is_ignored(a[0])) ]
        if not (self.active_requests or want):
            self.send_not_interested()
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: not interested'
            return
        if want:
            self.send_interested()
        if self.choked:
            if DEBUG:
                print >> sys.stderr, 'Downloader: fix_download_endgame: choked'
            return
        shuffle(want)
        del want[self.backlog - len(self.active_requests):]
        self.active_requests.extend(want)
        for piece, begin, length in want:
            if self.helper is None or self.connection.connection.is_helper_con() or self.helper.reserve_piece(piece, self):
                self.connection.send_request(piece, begin, length)
                self.downloader.chunk_requested(length)

    def got_invalidate(self, index):
        if DEBUG:
            log('downloader::got_invalidate: index', index)
        if not self.have[index]:
            return
        self.have[index] = False
        self.downloader.picker.lost_have(index)

    def got_have(self, index):
        if index == self.downloader.numpieces - 1:
            self.downloader.totalmeasure.update_rate(self.downloader.storage.total_length - (self.downloader.numpieces - 1) * self.downloader.storage.piece_length)
            self.peermeasure.update_rate(self.downloader.storage.total_length - (self.downloader.numpieces - 1) * self.downloader.storage.piece_length)
        else:
            self.downloader.totalmeasure.update_rate(self.downloader.storage.piece_length)
            self.peermeasure.update_rate(self.downloader.storage.piece_length)
        self.raw_have[index] = True
        if not self.downloader.picker.is_valid_piece(index):
            if DEBUG:
                print >> sys.stderr, 'Downloader::got_have: invalid piece: index', index, 'ip', self.connection.get_ip()
        if self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming and not self.connection.supports_piece_invalidate():
            i = self.downloader.picker.videostatus.live_piece_to_invalidate(index)
            if DEBUG:
                log('downloader::got_have: invalidate old piece: i', i, 'ip', self.connection.get_ip())
            self.got_invalidate(i)
        if self.have[index]:
            return
        self.have[index] = True
        self.downloader.picker.got_have(index, self.connection)
        if DEBUG:
            print >> sys.stderr, '>>>debug: got have:', self.connection.get_ip(), 'piece', index, 'have', debug_format_have(self.have), 'choked', self.choked, 'interested', self.interested
        self.downloader.aggregate_and_send_haves()
        if self.have.complete():
            self.downloader.picker.became_seed()
            if self.downloader.picker.am_I_complete():
                self.downloader.add_disconnected_seed(self.connection.get_readable_id())
                self.connection.close()
                return
        if self.downloader.endgamemode:
            self.fix_download_endgame()
        elif not self.downloader.paused and not self.downloader.picker.is_blocked(index) and self.downloader.storage.do_I_have_requests(index):
            if not self.choked:
                if DEBUG:
                    log('downloader::got_have: not choked, request more')
                self._request_more()
            else:
                if DEBUG:
                    log('downloader::got_have: choked, send interested')
                self.send_interested()
        elif DEBUG:
            print >> sys.stderr, 'downloader::got_have: do not request more: paused', self.downloader.paused, 'is_blocked', self.downloader.picker.is_blocked(index), 'have_requests', self.downloader.storage.do_I_have_requests(index)

    def _check_interests(self):
        if self.interested or self.downloader.paused:
            return
        for i in xrange(len(self.have)):
            if self.have[i] and not self.downloader.picker.is_blocked(i) and (self.downloader.endgamemode or self.downloader.storage.do_I_have_requests(i)):
                self.send_interested()
                return

    def got_have_bitfield(self, have):
        if self.downloader.picker.am_I_complete() and have.complete():
            if self.downloader.super_seeding:
                self.connection.send_bitfield(have.tostring())
            self.connection.try_send_pex()

            def auto_close():
                self.connection.close()
                self.downloader.add_disconnected_seed(self.connection.get_readable_id())

            self.downloader.scheduler(auto_close, REPEX_LISTEN_TIME)
            return
        if DEBUGBF:
            st = time.time()
        self.raw_have = have
        if have.complete():
            self.downloader.picker.got_seed()
        else:
            activerangeiterators = []
            if self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming and self.downloader.picker.videostatus.get_live_startpos() is None:
                activeranges = have.get_active_ranges()
                if len(activeranges) == 0:
                    activerangeiterators = [self.downloader.picker.get_valid_range_iterator()]
                else:
                    for s, e in activeranges:
                        activerangeiterators.append(xrange(s, e + 1))

            else:
                activerangeiterators = [self.downloader.picker.get_valid_range_iterator(skip_filter=True)]
            if DEBUGBF:
                print >> sys.stderr, 'Downloader: got_have_field: live: Filtering bitfield', activerangeiterators
            if DEBUGBF:
                print >> sys.stderr, 'Downloader: got_have_field: live or normal filter'
            validhave = Bitfield(self.downloader.numpieces)
            for iterator in activerangeiterators:
                for i in iterator:
                    if have[i]:
                        validhave[i] = True
                        self.downloader.picker.got_have(i, self.connection)

            if DEBUG:
                print >> sys.stderr, '>>>debug: got bitfield:', self.connection.get_ip(), 'have', debug_format_have(have)
                print >> sys.stderr, '>>>debug: got bitfield:', self.connection.get_ip(), 'validhave', debug_format_have(validhave)
            self.downloader.aggregate_and_send_haves()
            have = validhave
        if DEBUGBF:
            et = time.time()
            diff = et - st
            print >> sys.stderr, 'Download: got_have_field: took', diff
        self.have = have
        if self.downloader.endgamemode and not self.downloader.paused:
            for piece, begin, length in self.downloader.all_requests:
                if self.have[piece]:
                    self.send_interested()
                    break

            return
        self._check_interests()

    def reset_have(self):
        if DEBUG:
            print >> sys.stderr, 'Downloader::reset_have: before self.have:', self.have.toboollist()
        validhave = Bitfield(self.downloader.numpieces)
        for i in self.downloader.picker.get_valid_range_iterator():
            if self.raw_have[i]:
                validhave[i] = True

        self.have = validhave
        if DEBUG:
            print >> sys.stderr, 'Downloader::reset_have: after self.have:', self.have.toboollist()

    def get_rate(self):
        return self.measure.get_rate()

    def get_short_term_rate(self):
        return self.short_term_measure.get_rate()

    def is_snubbed(self, just_check = False):
        if not self.choked and not just_check and self.app_mode != 'node' and clock() - self.last2 > self.downloader.snub_time and not self.connection.connection.is_helper_con() and not self.connection.connection.is_coordinator_con():
            for index, begin, length in self.active_requests:
                self.connection.send_cancel(index, begin, length)

            self.got_choke()
        return clock() - self.last > self.downloader.snub_time

    def peer_is_complete(self):
        return self.have.complete()
class Upload:

    def __init__(self, connection, ratelimiter, totalup, choker, storage, picker, config):
        self.connection = connection
        self.ratelimiter = ratelimiter
        self.totalup = totalup
        self.choker = choker
        self.storage = storage
        self.picker = picker
        self.config = config
        self.max_slice_length = config['max_slice_length']
        self.choked = True
        self.cleared = True
        self.interested = False
        self.super_seeding = False
        self.buffer = []
        self.measure = Measure(config['max_rate_period'], config['upload_rate_fudge'])
        self.was_ever_interested = False
        if storage.get_amount_left() == 0:
            if choker.super_seed:
                self.super_seeding = True
                self.seed_have_list = []
                self.skipped_count = 0
            elif config['breakup_seed_bitfield']:
                bitfield, msgs = storage.get_have_list_cloaked()
                connection.send_bitfield(bitfield)
                for have in msgs:
                    connection.send_have(have)

            else:
                connection.send_bitfield(storage.get_have_list())
        elif storage.do_I_have_anything():
            connection.send_bitfield(storage.get_have_list())
        self.piecedl = None
        self.piecebuf = None
        self.hashlist = []

    def send_haves(self, connection):
        have_list = self.storage.get_have_list()
        print >> sys.stderr, 'Have list:', have_list

    def send_bitfield(self, connection):
        if self.storage.get_amount_left() == 0:
            if not self.super_seeding:
                if self.config['breakup_seed_bitfield']:
                    bitfield, msgs = self.storage.get_have_list_cloaked()
                    connection.send_bitfield(bitfield)
                    for have in msgs:
                        connection.send_have(have)

                else:
                    connection.send_bitfield(self.storage.get_have_list())
        elif self.storage.do_I_have_anything():
            connection.send_bitfield(self.storage.get_have_list())

    def got_not_interested(self):
        if self.interested:
            self.interested = False
            del self.buffer[:]
            self.piecedl = None
            if self.piecebuf:
                self.piecebuf.release()
            self.piecebuf = None
            self.choker.not_interested(self.connection)

    def got_interested(self):
        if not self.interested:
            self.interested = True
            self.was_ever_interested = True
            self.choker.interested(self.connection)

    def get_upload_chunk(self):
        if self.choked or not self.buffer:
            return
        index, begin, length = self.buffer.pop(0)
        if False and self.config['buffer_reads']:
            if index != self.piecedl:
                if self.piecebuf:
                    self.piecebuf.release()
                self.piecedl = index
                self.piecebuf, self.hashlist = self.storage.get_piece(index, 0, -1)
            try:
                piece = self.piecebuf[begin:begin + length]
            except:
                self.connection.close()
                return

            if begin == 0:
                hashlist = self.hashlist
            else:
                hashlist = []
        else:
            if self.piecebuf:
                self.piecebuf.release()
                self.piecedl = None
            piece, hashlist = self.storage.get_piece(index, begin, length)
            if piece is None:
                self.connection.close()
                return
        self.measure.update_rate(len(piece))
        self.totalup.update_rate(len(piece))
        self.connection.total_uploaded += length
        return (index,
         begin,
         hashlist,
         piece)

    def got_request(self, index, begin, length):
        if self.super_seeding and index not in self.seed_have_list or not self.connection.connection.is_coordinator_con() and not self.interested or length > self.max_slice_length:
            self.connection.close()
            return
        if not self.cleared:
            self.buffer.append((index, begin, length))
        if not self.choked and self.connection.next_upload is None:
            self.ratelimiter.queue(self.connection)

    def got_cancel(self, index, begin, length):
        try:
            self.buffer.remove((index, begin, length))
        except ValueError:
            pass

    def choke(self):
        if not self.choked:
            if DEBUG:
                log('uploader::choke: ip', self.connection.get_ip(), 'port', self.connection.get_port())
            self.choked = True
            self.connection.send_choke()
        self.piecedl = None
        if self.piecebuf:
            self.piecebuf.release()
            self.piecebuf = None

    def choke_sent(self):
        del self.buffer[:]
        self.cleared = True

    def unchoke(self):
        if self.choked:
            try:
                if DEBUG:
                    log('uploader::unchoke: ip', self.connection.get_ip(), 'port', self.connection.get_port())
                if self.connection.send_unchoke():
                    self.choked = False
                    self.cleared = False
            except:
                pass

    def disconnected(self):
        if self.piecebuf:
            self.piecebuf.release()
            self.piecebuf = None

    def is_choked(self):
        return self.choked

    def is_interested(self):
        return self.interested

    def has_queries(self):
        return not self.choked and self.buffer

    def get_rate(self):
        return self.measure.get_rate()