class SingleDownload(SingleDownloadHelperInterface): # _2fastbt def __init__(self, downloader, connection): # 2fastbt_ SingleDownloadHelperInterface.__init__(self) # _2fastbt self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.peermeasure = Measure(downloader.max_rate_period) self.have = Bitfield(downloader.numpieces) self.last = -1000 self.last2 = -1000 self.example_interest = None self.backlog = 2 self.ip = connection.get_ip() self.guard = BadDataGuard(self) # 2fastbt_ self.helper = downloader.picker.helper # _2fastbt def _backlog(self, just_unchoked): self.backlog = int( min( 2 + int(4 * self.measure.get_rate() / self.downloader.chunksize), (2 * just_unchoked) + self.downloader.queue_limit())) if self.backlog > 50: self.backlog = int(max(50, self.backlog * 0.075)) return self.backlog def disconnected(self): self.downloader.lost_peer(self) """ JD: obsoleted -- moved to picker.lost_peer if self.have.complete(): self.downloader.picker.lost_seed() else: for i in xrange(len(self.have)): if self.have[i]: self.downloader.picker.lost_have(i) """ if self.have.complete() and self.downloader.storage.is_endgame(): self.downloader.add_disconnected_seed( self.connection.get_readable_id()) self._letgo() self.guard.download = None def _letgo(self): if self.downloader.queued_out.has_key(self): del self.downloader.queued_out[self] if not self.active_requests: return if self.downloader.endgamemode: self.active_requests = [] return lost = {} for index, begin, length in self.active_requests: self.downloader.storage.request_lost(index, begin, length) lost[index] = 1 lost = lost.keys() self.active_requests = [] if self.downloader.paused: return ds = [d for d in self.downloader.downloads if not d.choked] shuffle(ds) for d in ds: d._request_more() for d in self.downloader.downloads: if d.choked and not d.interested: for l in lost: if d.have[l] and self.downloader.storage.do_I_have_requests( l): d.send_interested() break def got_choke(self): if not self.choked: self.choked = True self._letgo() def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more(new_unchoke=True) self.last2 = clock() def is_choked(self): return self.choked def is_interested(self): return self.interested def send_interested(self): if not self.interested: self.interested = True self.connection.send_interested() def send_not_interested(self): if self.interested: self.interested = False self.connection.send_not_interested() def got_piece(self, index, begin, hashlist, piece): """ Returns True if the piece is complete. """ length = len(piece) #if DEBUG: # print >> sys.stderr, 'Downloader: got piece of length %d' % length try: self.active_requests.remove((index, begin, length)) except ValueError: self.downloader.discarded += length return False if self.downloader.endgamemode: self.downloader.all_requests.remove((index, begin, length)) self.last = clock() self.last2 = clock() self.measure.update_rate(length) self.downloader.measurefunc(length) if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard): self.downloader.piece_flunked(index) return False if self.downloader.storage.do_I_have(index): self.downloader.picker.complete(index) if self.downloader.endgamemode: for d in self.downloader.downloads: if d is not self: if d.interested: if d.choked: assert not d.active_requests d.fix_download_endgame() else: try: d.active_requests.remove( (index, begin, length)) except ValueError: continue d.connection.send_cancel(index, begin, length) d.fix_download_endgame() else: assert not d.active_requests self._request_more() self.downloader.check_complete(index) # BarterCast counter self.connection.total_downloaded += length return self.downloader.storage.do_I_have(index) # 2fastbt_ def helper_forces_unchoke(self): self.choked = False # _2fastbt def _request_more(self, new_unchoke=False, slowpieces=[]): # 2fastbt_ if DEBUG: print >> sys.stderr, "Downloader: _request_more()" if self.is_frozen_by_helper(): if DEBUG: print >> sys.stderr, "Downloader: _request_more: blocked, returning" return # _2fastbt if self.choked: if DEBUG: print >> sys.stderr, "Downloader: _request_more: choked, returning" return # 2fastbt_ # do not download from coordinator if self.connection.connection.is_coordinator_con(): if DEBUG: print >> sys.stderr, "Downloader: _request_more: coordinator conn" return # _2fastbt if self.downloader.endgamemode: self.fix_download_endgame(new_unchoke) if DEBUG: print >> sys.stderr, "Downloader: _request_more: endgame mode, returning" return if self.downloader.paused: if DEBUG: print >> sys.stderr, "Downloader: _request_more: paused, returning" return if len(self.active_requests) >= self._backlog(new_unchoke): if DEBUG: print >> sys.stderr, "Downloader: more req than unchoke (active req: %d >= backlog: %d)" % ( len(self.active_requests), self._backlog(new_unchoke)) # Jelle: Schedule _request more to be called in some time. Otherwise requesting and receiving packages # may stop, if they arrive to quickly if self.downloader.download_rate: wait_period = self.downloader.chunksize / self.downloader.download_rate / 2.0 if DEBUG: print >> sys.stderr, "Downloader: waiting for %f s to call _request_more again" % wait_period self.downloader.scheduler(self._request_more, wait_period) if not (self.active_requests or self.backlog): self.downloader.queued_out[self] = 1 return #if DEBUG: # print >>sys.stderr,"Downloader: _request_more: len act",len(self.active_requests),"back",self.backlog lost_interests = [] while len(self.active_requests) < self.backlog: #if DEBUG: # print >>sys.stderr,"Downloader: Looking for interesting piece" #st = time.time() interest = self.downloader.picker.next( self.have, self.downloader.storage.do_I_have_requests, self, self.downloader.too_many_partials(), self.connection.connection.is_helper_con(), slowpieces=slowpieces, connection=self.connection) #et = time.time() #diff = et-st diff = -1 if DEBUG: print >> sys.stderr, "Downloader: _request_more: next() returned", interest, "took %.5f" % ( diff) if interest is None: break self.example_interest = interest self.send_interested() loop = True while len(self.active_requests) < self.backlog and loop: begin, length = self.downloader.storage.new_request(interest) if DEBUG: print >> sys.stderr, "Downloader: new_request", interest, begin, length, "to", self.connection.connection.get_ip( ), self.connection.connection.get_port() self.downloader.picker.requested(interest) self.active_requests.append((interest, begin, length)) self.connection.send_request(interest, begin, length) self.downloader.chunk_requested(length) if not self.downloader.storage.do_I_have_requests(interest): loop = False lost_interests.append(interest) if not self.active_requests: self.send_not_interested() if lost_interests: for d in self.downloader.downloads: if d.active_requests or not d.interested: continue if d.example_interest is not None and self.downloader.storage.do_I_have_requests( d.example_interest): continue for lost in lost_interests: if d.have[lost]: break else: continue # 2fastbt_ #st = time.time() interest = self.downloader.picker.next( d.have, self.downloader.storage.do_I_have_requests, self, # Arno, 2008-05-22; self -> d? Original Pawel code self.downloader.too_many_partials(), self.connection.connection.is_helper_con(), willrequest=False, connection=self.connection) #et = time.time() #diff = et-st diff = -1 if DEBUG: print >> sys.stderr, "Downloader: _request_more: next()2 returned", interest, "took %.5f" % ( diff) # _2fastbt if interest is None: d.send_not_interested() else: d.example_interest = interest # Arno: LIVEWRAP: no endgame if self.downloader.storage.is_endgame( ) and not self.downloader.picker.live_streaming: self.downloader.start_endgame() def fix_download_endgame(self, new_unchoke=False): # 2fastbt_ # do not download from coordinator if self.downloader.paused or self.connection.connection.is_coordinator_con( ): # _2fastbt return if len(self.active_requests) >= self._backlog(new_unchoke): if not (self.active_requests or self.backlog) and not self.choked: self.downloader.queued_out[self] = 1 return # 2fastbt_ want = [ a for a in self.downloader.all_requests if self.have[a[0]] and a not in self.active_requests and (self.helper is None or self.connection.connection.is_helper_con() or not self.helper.is_ignored(a[0])) ] # _2fastbt if not (self.active_requests or want): self.send_not_interested() return if want: self.send_interested() if self.choked: return shuffle(want) del want[self.backlog - len(self.active_requests):] self.active_requests.extend(want) for piece, begin, length in want: # 2fastbt_ if self.helper is None or self.connection.connection.is_helper_con( ) or self.helper.reserve_piece(piece, self): self.connection.send_request(piece, begin, length) self.downloader.chunk_requested(length) # _2fastbt def got_have(self, index): if DEBUG: print >> sys.stderr, "Downloader: got_have", index if index == self.downloader.numpieces - 1: self.downloader.totalmeasure.update_rate( self.downloader.storage.total_length - (self.downloader.numpieces - 1) * self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.total_length - (self.downloader.numpieces - 1) * self.downloader.storage.piece_length) else: self.downloader.totalmeasure.update_rate( self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.piece_length) # Arno: LIVEWRAP if not self.downloader.picker.is_valid_piece(index): if DEBUG: print >> sys.stderr, "Downloader: got_have", index, "is invalid piece" return # TODO: should we request_more()? if self.have[index]: return self.have[index] = True self.downloader.picker.got_have(index, self.connection) if self.have.complete(): self.downloader.picker.became_seed() if self.downloader.picker.am_I_complete(): self.downloader.add_disconnected_seed( self.connection.get_readable_id()) self.connection.close() return if self.downloader.endgamemode: self.fix_download_endgame() elif (not self.downloader.paused and not self.downloader.picker.is_blocked(index) and self.downloader.storage.do_I_have_requests(index)): if not self.choked: self._request_more() else: self.send_interested() def _check_interests(self): if self.interested or self.downloader.paused: return for i in xrange(len(self.have)): if (self.have[i] and not self.downloader.picker.is_blocked(i) and (self.downloader.endgamemode or self.downloader.storage.do_I_have_requests(i))): self.send_interested() return def got_have_bitfield(self, have): if self.downloader.picker.am_I_complete() and have.complete(): # Arno: If we're both seeds if self.downloader.super_seeding: self.connection.send_bitfield( have.tostring()) # be nice, show you're a seed too self.connection.close() self.downloader.add_disconnected_seed( self.connection.get_readable_id()) return #print >>sys.stderr,"Downloader: got_have_bitfield: VVV#############################################################################################VVVVVVVVVVVVVVVVVVVVVVVVV valid",self.downloader.picker.get_valid_range_iterator(),"len",self.downloader.numpieces #print >>sys.stderr,"Downloader: got_have_bitfield: input",`have.toboollist()` if have.complete(): # Arno: He is seed self.downloader.picker.got_seed() else: # Arno: LIVEWRAP: filter out valid pieces # TODO: may be slow with 32K pieces. validhave = Bitfield(self.downloader.numpieces) for i in self.downloader.picker.get_valid_range_iterator(): if have[i]: validhave[i] = True self.downloader.picker.got_have(i, self.connection) have = validhave # Store filtered bitfield self.have = have #print >>sys.stderr,"Downloader: got_have_bitfield: valid",`have.toboollist()` if self.downloader.endgamemode and not self.downloader.paused: for piece, begin, length in self.downloader.all_requests: if self.have[piece]: self.send_interested() break return self._check_interests() def get_rate(self): return self.measure.get_rate() def is_snubbed(self): # 2fastbt_ if not self.choked and clock() - self.last2 > self.downloader.snub_time and \ not self.connection.connection.is_helper_con() and \ not self.connection.connection.is_coordinator_con(): # _2fastbt for index, begin, length in self.active_requests: self.connection.send_cancel(index, begin, length) self.got_choke() # treat it just like a choke return clock() - self.last > self.downloader.snub_time def peer_is_complete(self): return self.have.complete()
def _test_proxy(self, genresdict): """ Send messages to the helper instance and test it. Testing ASK_FOR_HELP, STOP_HELPING, REQUEST_PIECES, CANCEL_PIECE and METADATA """ # 1. Establish overlay connection to Tribler ol_connection = OLConnection(self.my_keypair, 'localhost', self.hisport, mylistenport=self.mylistenport2) # 2. Send the ASK_FOR_HELP message (generate_data, sent_good_values) = genresdict[ASK_FOR_HELP] msg = generate_data() ol_connection.send(msg) if sent_good_values: # Read the helper's response resp = ol_connection.recv() # Check the helper's response # 3. At this point, the helper does not have the .torrent file, so it requests it with a METADATA message self.assert_(resp[0] == GET_METADATA) self.check_get_metadata(resp[1:]) print >> sys.stderr, "test: Got GET_METADATA for torrent, good" else: # Read the helper's response resp = ol_connection.recv() # Check the helper's response self.assert_(len(resp) == 0) ol_connection.close() return # 4. Send METADATA (generate_data, sent_good_values) = genresdict[METADATA] msg = generate_data() ol_connection.send(msg) if sent_good_values: # 5. At this point the helper is confirming his availability to help # Read the helper's response resp = ol_connection.recv() # Check the helper's response self.assert_(resp[0] == JOIN_HELPERS) self.check_ask_for_help(resp) print >> sys.stderr, "test: Got JOIN_HELPERS for torrent, good" # 6. At this point, the helper will contact the tracker and then wait for REQUEST_PIECES messages # So we send a request pieces message (generate_data, sent_good_values) = genresdict[REQUEST_PIECES] msg = generate_data() ol_connection.send(msg) # At this point the helper will contact the seeders in the swarm to download the requested piece # There is only one seeder in the swarm, the coordinator's twin # 8. Our tracker says there is another peer (also us) on port 4810 # Now accept a connection on that port and pretend we're a seeder self.myss.settimeout(10.0) conn, addr = self.myss.accept() options = '\x00\x00\x00\x00\x00\x00\x00\x00' s2 = BTConnection('', 0, conn, user_option_pattern=options, user_infohash=self.infohash, myid=self.myid) s2.read_handshake_medium_rare() # Send a bitfield message to the helper (pretending we are a regular seeder) b = Bitfield(self.numpieces) for i in range(self.numpieces): b[i] = True self.assert_(b.complete()) msg = BITFIELD + b.tostring() s2.send(msg) msg = UNCHOKE s2.send(msg) print >> sys.stderr, "test: Got BT connection to us, as fake seeder, good" else: resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close() return # 7. Accept the data connection the helper wants to establish with us, the coordinator. # The helper will send via this connection the pieces we request it to download. self.myss2.settimeout(10.0) conn, addr = self.myss2.accept() s3 = BTConnection('', 0, conn, user_infohash=self.infohash, myid=self.myid2) s3.read_handshake_medium_rare() msg = UNCHOKE s3.send(msg) print >> sys.stderr, "test: Got data connection to us, as coordinator, good" # 9. At this point the helper should sent a PROXY_HAVE message on the overlay connection # resp = ol_connection.recv() # self.assert_(resp[0] == PROXY_HAVE) # print >>sys.stderr,"test: Got PROXY)HAVE, good" # 10. Await REQUEST on fake seeder try: while True: s2.s.settimeout(10.0) resp = s2.recv() self.assert_(len(resp) > 0) print "test: Fake seeder got message", getMessageName(resp[0]) if resp[0] == REQUEST: self.check_request(resp[1:]) print >> sys.stderr, "test: Fake seeder got REQUEST for reserved piece, good" break except socket.timeout: print >> sys.stderr, "test: Timeout, bad, fake seeder didn't reply with message" self.assert_(False) # 11. Sent the helper a STOP_HELPING message (generate_data, sent_good_values) = genresdict[STOP_HELPING] msg = generate_data() ol_connection.send(msg) # The other side should close the connection, whether the msg was good or bad resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close()
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) # Let Tribler start downloading an non-functioning torrent, so # we can talk to a normal download engine. self.tdef = TorrentDef() self.sourcefn = os.path.join(os.getcwd(), "API", "file2.wmv") self.tdef.add_content(self.sourcefn) self.tdef.set_create_merkle_torrent(True) self.tdef.set_tracker("http://127.0.0.1:12/announce") self.tdef.finalize() self.torrentfn = os.path.join(self.session.get_state_dir(), "gen.torrent") self.tdef.save(self.torrentfn) dscfg = self.setUpDownloadConfig() self.session.start_download(self.tdef, dscfg) self.infohash = self.tdef.get_infohash() self.mylistenport = 4810 self.numpieces = (self.tdef.get_length() + self.tdef.get_piece_length() - 1) / self.tdef.get_piece_length() b = Bitfield(self.numpieces) for i in range(self.numpieces): b[i] = True self.assert_(b.complete()) self.seederbitfieldstr = b.tostring() #piece_hashes = ['\x01\x02\x03\x04\x05\x06\x07\x08\x07\x06\x05\x04\x03\x02\x01\x00\x01\x02\x03\x04' ] * npieces # Construct Merkle tree tdef2 = TorrentDef() tdef2.add_content(self.sourcefn) tdef2.set_create_merkle_torrent(False) tdef2.set_tracker("http://127.0.0.1:12/announce") tdef2.set_piece_length(self.tdef.get_piece_length()) tdef2.finalize() metainfo = tdef2.get_metainfo() piecesstr = metainfo['info']['pieces'] print >> sys.stderr, "test: pieces has len", len(piecesstr) piece_hashes = [] for i in range(0, len(piecesstr), 20): hash = piecesstr[i:i + 20] print >> sys.stderr, "test: piece", i / 20, "hash", ` hash ` piece_hashes.append(hash) print >> sys.stderr, "test: Putting", len( piece_hashes), "into MerkleTree, size", self.tdef.get_piece_length( ), tdef2.get_piece_length() self.tree = MerkleTree(self.tdef.get_piece_length(), self.tdef.get_length(), None, piece_hashes) f = open(self.sourcefn, "rb") piece1 = f.read(2**18) piece2 = f.read(2**18) print >> sys.stderr, "read piece1", len(piece1) print >> sys.stderr, "read piece2", len(piece2) f.close() hash1 = sha(piece1).digest() hash2 = sha(piece2).digest() print >> sys.stderr, "hash piece1", ` hash1 ` print >> sys.stderr, "hash piece2", ` hash2 ` f2 = open("piece1.bin", "wb") f2.write(piece2) f2.close()
def _test_2fast(self,genresdict): """ test ASK_FOR_HELP, METADATA, PIECES_RESERVED and STOP_DOWNLOAD_HELP sequence """ # 1. Establish overlay connection to Tribler s = OLConnection(self.my_keypair,'localhost',self.hisport,mylistenport=self.mylistenport2) (func,good) = genresdict[ASK_FOR_HELP] msg = func() s.send(msg) if good: resp = s.recv() self.assert_(resp[0] == GET_METADATA) self.check_get_metadata(resp[1:]) print >>sys.stderr,"test: Got GET_METADATA for torrent, good" else: resp = s.recv() self.assert_(len(resp)==0) s.close() return (func,good) = genresdict[METADATA] msg = func() s.send(msg) if good: # 2. Accept the data connection Tribler wants to establish with us, the coordinator self.myss2.settimeout(10.0) conn, addr = self.myss2.accept() s3 = BTConnection('',0,conn,user_infohash=self.infohash,myid=self.myid2) s3.read_handshake_medium_rare() msg = UNCHOKE s3.send(msg) print >>sys.stderr,"test: Got data connection to us, as coordinator, good" else: resp = s.recv() self.assert_(len(resp)==0) s.close() return # 3. Our tracker says there is another peer (also us) on port 4810 # Now accept a connection on that port and pretend we're a seeder self.myss.settimeout(10.0) conn, addr = self.myss.accept() options = '\x00\x00\x00\x00\x00\x00\x00\x00' s2 = BTConnection('',0,conn,user_option_pattern=options,user_infohash=self.infohash,myid=self.myid) s2.read_handshake_medium_rare() numpieces = 10 # must correspond to the torrent in test/extend_hs_dir b = Bitfield(numpieces) for i in range(numpieces): b[i] = True self.assert_(b.complete()) msg = BITFIELD+b.tostring() s2.send(msg) msg = UNCHOKE s2.send(msg) print >>sys.stderr,"test: Got BT connection to us, as fake seeder, good" # 4. Await a RESERVE_PIECES message on the overlay connection resp = s.recv() self.assert_(resp[0] == RESERVE_PIECES) pieces = self.check_reserve_pieces(resp[1:]) print >>sys.stderr,"test: Got RESERVE_PIECES, good" (func,good) = genresdict[PIECES_RESERVED] # 5. Reply with PIECES_RESERVED msg = func(pieces) s.send(msg) if good: # 6. Await REQUEST on fake seeder try: while True: s2.s.settimeout(10.0) resp = s2.recv() self.assert_(len(resp) > 0) print "test: Fake seeder got message",getMessageName(resp[0]) if resp[0] == REQUEST: self.check_request(resp[1:],pieces) print >>sys.stderr,"test: Fake seeder got REQUEST for reserved piece, good" break except socket.timeout: print >> sys.stderr,"test: Timeout, bad, fake seeder didn't reply with message" self.assert_(False) else: resp = s.recv() self.assert_(len(resp)==0) s.close() return (func,good) = genresdict[STOP_DOWNLOAD_HELP] # 5. Reply with STOP_DOWNLOAD_HELP msg = func() s.send(msg) # the other side should close the connection, whether the msg was good or bad resp = s.recv() self.assert_(len(resp)==0) s.close()
def _test_proxy(self, genresdict): """ Send messages to the helper instance and test it. Testing ASK_FOR_HELP, STOP_HELPING, REQUEST_PIECES, CANCEL_PIECE and METADATA """ # 1. Establish overlay connection to Tribler ol_connection = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.mylistenport2) # 2. Send the ASK_FOR_HELP message (generate_data, sent_good_values) = genresdict[ASK_FOR_HELP] msg = generate_data() ol_connection.send(msg) if sent_good_values: # Read the helper's response resp = ol_connection.recv() # Check the helper's response # 3. At this point, the helper does not have the .torrent file, so it requests it with a METADATA message self.assert_(resp[0] == GET_METADATA) self.check_get_metadata(resp[1:]) print >> sys.stderr, "test: Got GET_METADATA for torrent, good" else: # Read the helper's response resp = ol_connection.recv() # Check the helper's response self.assert_(len(resp) == 0) ol_connection.close() return # 4. Send METADATA (generate_data, sent_good_values) = genresdict[METADATA] msg = generate_data() ol_connection.send(msg) if sent_good_values: # 5. At this point the helper is confirming his availability to help # Read the helper's response resp = ol_connection.recv() # Check the helper's response self.assert_(resp[0] == JOIN_HELPERS) self.check_ask_for_help(resp) print >> sys.stderr, "test: Got JOIN_HELPERS for torrent, good" # 6. At this point, the helper will contact the tracker and then wait for REQUEST_PIECES messages # So we send a request pieces message (generate_data, sent_good_values) = genresdict[REQUEST_PIECES] msg = generate_data() ol_connection.send(msg) # At this point the helper will contact the seeders in the swarm to download the requested piece # There is only one seeder in the swarm, the coordinator's twin # 8. Our tracker says there is another peer (also us) on port 4810 # Now accept a connection on that port and pretend we're a seeder self.myss.settimeout(10.0) conn, addr = self.myss.accept() options = "\x00\x00\x00\x00\x00\x00\x00\x00" s2 = BTConnection("", 0, conn, user_option_pattern=options, user_infohash=self.infohash, myid=self.myid) s2.read_handshake_medium_rare() # Send a bitfield message to the helper (pretending we are a regular seeder) b = Bitfield(self.numpieces) for i in range(self.numpieces): b[i] = True self.assert_(b.complete()) msg = BITFIELD + b.tostring() s2.send(msg) msg = UNCHOKE s2.send(msg) print >> sys.stderr, "test: Got BT connection to us, as fake seeder, good" else: resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close() return # 7. Accept the data connection the helper wants to establish with us, the coordinator. # The helper will send via this connection the pieces we request it to download. self.myss2.settimeout(10.0) conn, addr = self.myss2.accept() s3 = BTConnection("", 0, conn, user_infohash=self.infohash, myid=self.myid2) s3.read_handshake_medium_rare() msg = UNCHOKE s3.send(msg) print >> sys.stderr, "test: Got data connection to us, as coordinator, good" # 9. At this point the helper should sent a PROXY_HAVE message on the overlay connection # resp = ol_connection.recv() # self.assert_(resp[0] == PROXY_HAVE) # print >>sys.stderr,"test: Got PROXY)HAVE, good" # 10. Await REQUEST on fake seeder try: while True: s2.s.settimeout(10.0) resp = s2.recv() self.assert_(len(resp) > 0) print "test: Fake seeder got message", getMessageName(resp[0]) if resp[0] == REQUEST: self.check_request(resp[1:]) print >> sys.stderr, "test: Fake seeder got REQUEST for reserved piece, good" break except socket.timeout: print >> sys.stderr, "test: Timeout, bad, fake seeder didn't reply with message" self.assert_(False) # 11. Sent the helper a STOP_HELPING message (generate_data, sent_good_values) = genresdict[STOP_HELPING] msg = generate_data() ol_connection.send(msg) # The other side should close the connection, whether the msg was good or bad resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close()
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) # Let Tribler start downloading an non-functioning torrent, so # we can talk to a normal download engine. self.tdef = TorrentDef() self.sourcefn = os.path.join(os.getcwd(),"API","file2.wmv") self.tdef.add_content(self.sourcefn) self.tdef.set_create_merkle_torrent(True) self.tdef.set_tracker("http://127.0.0.1:12/announce") self.tdef.finalize() self.torrentfn = os.path.join(self.session.get_state_dir(),"gen.torrent") self.tdef.save(self.torrentfn) dscfg = self.setUpDownloadConfig() self.session.start_download(self.tdef,dscfg) self.infohash = self.tdef.get_infohash() self.mylistenport = 4810 self.numpieces = (self.tdef.get_length()+self.tdef.get_piece_length()-1) / self.tdef.get_piece_length() b = Bitfield(self.numpieces) for i in range(self.numpieces): b[i] = True self.assert_(b.complete()) self.seederbitfieldstr = b.tostring() #piece_hashes = ['\x01\x02\x03\x04\x05\x06\x07\x08\x07\x06\x05\x04\x03\x02\x01\x00\x01\x02\x03\x04' ] * npieces # Construct Merkle tree tdef2 = TorrentDef() tdef2.add_content(self.sourcefn) tdef2.set_create_merkle_torrent(False) tdef2.set_tracker("http://127.0.0.1:12/announce") tdef2.set_piece_length(self.tdef.get_piece_length()) tdef2.finalize() metainfo = tdef2.get_metainfo() piecesstr = metainfo['info']['pieces'] print >>sys.stderr,"test: pieces has len",len(piecesstr) piece_hashes = [] for i in range(0,len(piecesstr),20): hash = piecesstr[i:i+20] print >>sys.stderr,"test: piece",i/20,"hash",`hash` piece_hashes.append(hash) print >>sys.stderr,"test: Putting",len(piece_hashes),"into MerkleTree, size",self.tdef.get_piece_length(),tdef2.get_piece_length() self.tree = MerkleTree(self.tdef.get_piece_length(),self.tdef.get_length(),None,piece_hashes) f = open(self.sourcefn,"rb") piece1 = f.read(2 ** 18) piece2 = f.read(2 ** 18) print >>sys.stderr,"read piece1",len(piece1) print >>sys.stderr,"read piece2",len(piece2) f.close() hash1 = sha(piece1).digest() hash2 = sha(piece2).digest() print >>sys.stderr,"hash piece1",`hash1` print >>sys.stderr,"hash piece2",`hash2` f2 = open("piece1.bin","wb") f2.write(piece2) f2.close()
class SingleDownload(): def __init__(self, downloader, connection): self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.peermeasure = Measure(downloader.max_rate_period) self.have = Bitfield(downloader.numpieces) self.last = -1000 self.last2 = -1000 self.example_interest = None self.backlog = 2 self.ip = connection.get_ip() self.guard = BadDataGuard(self) # boudewijn: VOD needs a download measurement that is not # averaged over a 'long' period. downloader.max_rate_period is # (by default) 20 seconds because this matches the unchoke # policy. self.short_term_measure = Measure(5) # boudewijn: each download maintains a counter for the number # of high priority piece requests that did not get any # responce within x seconds. self.bad_performance_counter = 0 def _backlog(self, just_unchoked): self.backlog = int(min( 2+int(4*self.measure.get_rate()/self.downloader.chunksize), (2*just_unchoked)+self.downloader.queue_limit() )) if self.backlog > 50: self.backlog = int(max(50, self.backlog * 0.075)) return self.backlog def disconnected(self): self.downloader.lost_peer(self) """ JD: obsoleted -- moved to picker.lost_peer if self.have.complete(): self.downloader.picker.lost_seed() else: for i in xrange(len(self.have)): if self.have[i]: self.downloader.picker.lost_have(i) """ if self.have.complete() and self.downloader.storage.is_endgame(): self.downloader.add_disconnected_seed(self.connection.get_readable_id()) self._letgo() self.guard.download = None def _letgo(self): if self.downloader.queued_out.has_key(self): del self.downloader.queued_out[self] if not self.active_requests: return if self.downloader.endgamemode: self.active_requests = [] return lost = {} for index, begin, length in self.active_requests: self.downloader.storage.request_lost(index, begin, length) lost[index] = 1 lost = lost.keys() self.active_requests = [] if self.downloader.paused: return ds = [d for d in self.downloader.downloads if not d.choked] shuffle(ds) for d in ds: d._request_more() for d in self.downloader.downloads: if d.choked and not d.interested: for l in lost: if d.have[l] and self.downloader.storage.do_I_have_requests(l): d.send_interested() break def got_choke(self): if not self.choked: self.choked = True self._letgo() def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more(new_unchoke = True) self.last2 = clock() def is_choked(self): return self.choked def is_interested(self): return self.interested def send_interested(self): if not self.interested: self.interested = True self.connection.send_interested() def send_not_interested(self): if self.interested: self.interested = False self.connection.send_not_interested() def got_piece(self, index, begin, hashlist, piece): """ Returns True if the piece is complete. Note that in this case a -piece- means a chunk! """ if self.bad_performance_counter: self.bad_performance_counter -= 1 if DEBUG: print >>sys.stderr, "decreased bad_performance_counter to", self.bad_performance_counter length = len(piece) #if DEBUG: # print >> sys.stderr, 'Downloader: got piece of length %d' % length try: self.active_requests.remove((index, begin, length)) except ValueError: self.downloader.discarded += length return False if self.downloader.endgamemode: self.downloader.all_requests.remove((index, begin, length)) if DEBUG: print >>sys.stderr, "Downloader: got_piece: removed one request from all_requests", len(self.downloader.all_requests), "remaining" self.last = clock() self.last2 = clock() self.measure.update_rate(length) # Update statistic gatherer status = get_status_holder("LivingLab") s_download = status.get_or_create_status_element("downloaded",0) s_download.inc(length) self.short_term_measure.update_rate(length) self.downloader.measurefunc(length) if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard): self.downloader.piece_flunked(index) return False # boudewijn: we need more accurate (if possibly invalid) # measurements on current download speed self.downloader.picker.got_piece(index, begin, length) if self.downloader.storage.do_I_have(index): # The piece (actual piece, not chunk) is complete self.downloader.picker.complete(index) # ProxyService_ # if self.downloader.proxydownloader: if DEBUG: print >>sys.stderr, "downloader: got_piece. Searching if piece", index, "was requested by a doe node." if index in self.downloader.proxydownloader.proxy.currently_downloading_pieces: # get_piece(index, 0, -1) returns the complete piece data [piece_data, hash_list] = self.downloader.storage.get_piece(index, 0, -1) self.downloader.proxydownloader.proxy.retrieved_piece(index, piece_data) # # _ProxyService if self.downloader.endgamemode: for d in self.downloader.downloads: if d is not self: if d.interested: if d.choked: assert not d.active_requests d.fix_download_endgame() else: try: d.active_requests.remove((index, begin, length)) except ValueError: continue d.connection.send_cancel(index, begin, length) d.fix_download_endgame() else: assert not d.active_requests self._request_more() self.downloader.check_complete(index) # BarterCast counter self.connection.total_downloaded += length return self.downloader.storage.do_I_have(index) def _request_more(self, new_unchoke = False, slowpieces = []): if self.choked: if DEBUG: print >>sys.stderr,"Downloader: _request_more: choked, returning" return if self.downloader.endgamemode: self.fix_download_endgame(new_unchoke) if DEBUG: print >>sys.stderr,"Downloader: _request_more: endgame mode, returning" return if self.downloader.paused: if DEBUG: print >>sys.stderr,"Downloader: _request_more: paused, returning" return if len(self.active_requests) >= self._backlog(new_unchoke): if DEBUG: print >>sys.stderr,"Downloader: more req than unchoke (active req: %d >= backlog: %d)" % (len(self.active_requests), self._backlog(new_unchoke)) # Jelle: Schedule _request more to be called in some time. Otherwise requesting and receiving packages # may stop, if they arrive to quickly if self.downloader.download_rate: wait_period = self.downloader.chunksize / self.downloader.download_rate / 2.0 # Boudewijn: when wait_period is 0.0 this will cause # the the _request_more method to be scheduled # multiple times (recursively), causing severe cpu # problems. # # Therefore, only schedule _request_more to be called # if the call will be made in the future. The minimal # wait_period should be tweaked. if wait_period > 1.0: if DEBUG: print >>sys.stderr,"Downloader: waiting for %f s to call _request_more again" % wait_period self.downloader.scheduler(self._request_more, wait_period) if not (self.active_requests or self.backlog): self.downloader.queued_out[self] = 1 return #if DEBUG: # print >>sys.stderr,"Downloader: _request_more: len act",len(self.active_requests),"back",self.backlog lost_interests = [] while len(self.active_requests) < self.backlog: #if DEBUG: # print >>sys.stderr,"Downloader: Looking for interesting piece" #st = time.time() #print "DOWNLOADER self.have=", self.have.toboollist() # This is the PiecePicker call if the current client is a Doe # TODO: check if the above comment is true interest = self.downloader.picker.next(self.have, self.downloader.storage.do_I_have_requests, self, self.downloader.too_many_partials(), slowpieces = slowpieces, connection = self.connection) #et = time.time() #diff = et-st if DEBUG: diff=-1 print >>sys.stderr,"Downloader: _request_more: next() returned",interest,"took %.5f" % (diff) if interest is None: break self.example_interest = interest self.send_interested() loop = True while len(self.active_requests) < self.backlog and loop: begin, length = self.downloader.storage.new_request(interest) if DEBUG: print >>sys.stderr,"Downloader: new_request",interest,begin,length,"to",self.connection.connection.get_ip(),self.connection.connection.get_port() self.downloader.picker.requested(interest, begin, length) self.active_requests.append((interest, begin, length)) self.connection.send_request(interest, begin, length) self.downloader.chunk_requested(length) if not self.downloader.storage.do_I_have_requests(interest): loop = False lost_interests.append(interest) if not self.active_requests: self.send_not_interested() if lost_interests: for d in self.downloader.downloads: if d.active_requests or not d.interested: continue if d.example_interest is not None and self.downloader.storage.do_I_have_requests(d.example_interest): continue for lost in lost_interests: if d.have[lost]: break else: continue #st = time.time() interest = self.downloader.picker.next(d.have, self.downloader.storage.do_I_have_requests, self, # Arno, 2008-05-22; self -> d? Original Pawel code self.downloader.too_many_partials(), willrequest=False,connection=self.connection) #et = time.time() #diff = et-st if DEBUG: diff=-1 print >>sys.stderr,"Downloader: _request_more: next()2 returned",interest,"took %.5f" % (diff) if interest is None: d.send_not_interested() else: d.example_interest = interest # Arno: LIVEWRAP: no endgame if not self.downloader.endgamemode and \ self.downloader.storage.is_endgame() and \ not (self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming): self.downloader.start_endgame() def fix_download_endgame(self, new_unchoke = False): if self.downloader.paused: if DEBUG: print >>sys.stderr, "Downloader: fix_download_endgame: paused", self.downloader.paused return if len(self.active_requests) >= self._backlog(new_unchoke): if not (self.active_requests or self.backlog) and not self.choked: self.downloader.queued_out[self] = 1 if DEBUG: print >>sys.stderr, "Downloader: fix_download_endgame: returned" return want = [a for a in self.downloader.all_requests if self.have[a[0]] and a not in self.active_requests] if not (self.active_requests or want): self.send_not_interested() if DEBUG: print >>sys.stderr, "Downloader: fix_download_endgame: not interested" return if want: self.send_interested() if self.choked: if DEBUG: print >>sys.stderr, "Downloader: fix_download_endgame: choked" return shuffle(want) del want[self.backlog - len(self.active_requests):] self.active_requests.extend(want) for piece, begin, length in want: self.connection.send_request(piece, begin, length) self.downloader.chunk_requested(length) def got_have(self, index): # print >>sys.stderr,"Downloader: got_have",index if DEBUG: print >>sys.stderr,"Downloader: got_have",index if index == self.downloader.numpieces-1: self.downloader.totalmeasure.update_rate(self.downloader.storage.total_length-(self.downloader.numpieces-1)*self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.total_length-(self.downloader.numpieces-1)*self.downloader.storage.piece_length) else: self.downloader.totalmeasure.update_rate(self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.piece_length) # Arno: LIVEWRAP if not self.downloader.picker.is_valid_piece(index): if DEBUG: print >>sys.stderr,"Downloader: got_have",index,"is invalid piece" return # TODO: should we request_more()? if self.have[index]: return self.have[index] = True self.downloader.picker.got_have(index,self.connection) # ProxyService_ # # Aggregate the haves bitfields and send them to the doe nodes # If I am a doe, i will exit shortly self.downloader.aggregate_and_send_haves() # # _ProxyService if self.have.complete(): self.downloader.picker.became_seed() if self.downloader.picker.am_I_complete(): self.downloader.add_disconnected_seed(self.connection.get_readable_id()) self.connection.close() return if self.downloader.endgamemode: self.fix_download_endgame() elif ( not self.downloader.paused and not self.downloader.picker.is_blocked(index) and self.downloader.storage.do_I_have_requests(index) ): if not self.choked: self._request_more() else: self.send_interested() def _check_interests(self): if self.interested or self.downloader.paused: return for i in xrange(len(self.have)): if ( self.have[i] and not self.downloader.picker.is_blocked(i) and ( self.downloader.endgamemode or self.downloader.storage.do_I_have_requests(i) ) ): self.send_interested() return def got_have_bitfield(self, have): if self.downloader.picker.am_I_complete() and have.complete(): # Arno: If we're both seeds if self.downloader.super_seeding: self.connection.send_bitfield(have.tostring()) # be nice, show you're a seed too # Niels: We're both seeds, but try to get some additional peers from this seed self.connection.try_send_pex() def auto_close(): self.connection.close() self.downloader.add_disconnected_seed(self.connection.get_readable_id()) self.downloader.scheduler(auto_close, REPEX_LISTEN_TIME) return if DEBUGBF: st = time.time() if have.complete(): # Arno: He is seed self.downloader.picker.got_seed() else: # Arno: pass on HAVE knowledge to PiecePicker and if LIVEWRAP: # filter out valid pieces # STBSPEED: if we haven't hooked in yet, don't iterate over whole range # just over the active ranges in the received Bitfield activerangeiterators = [] if self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming and self.downloader.picker.videostatus.get_live_startpos() is None: # Not hooked in activeranges = have.get_active_ranges() if len(activeranges) == 0: # Bug, fallback to whole range activerangeiterators = [self.downloader.picker.get_valid_range_iterator()] else: # Create iterators for the active ranges for (s,e) in activeranges: activerangeiterators.append(xrange(s,e+1)) else: # Hooked in, use own valid range as active range # Arno, 2010-04-20: Not correct for VOD with seeking, then we # should store the HAVE info for things before playback too. activerangeiterators = [self.downloader.picker.get_valid_range_iterator()] if DEBUGBF: print >>sys.stderr,"Downloader: got_have_field: live: Filtering bitfield",activerangeiterators if not self.downloader.picker.videostatus or self.downloader.picker.videostatus.live_streaming: if DEBUGBF: print >>sys.stderr,"Downloader: got_have_field: live or normal filter" # Transfer HAVE knowledge to PiecePicker and filter pieces if live validhave = Bitfield(self.downloader.numpieces) for iterator in activerangeiterators: for i in iterator: if have[i]: validhave[i] = True self.downloader.picker.got_have(i,self.connection) else: # VOD if DEBUGBF: print >>sys.stderr,"Downloader: got_have_field: VOD filter" validhave = Bitfield(self.downloader.numpieces) (first,last) = self.downloader.picker.videostatus.download_range() for i in xrange(first,last): if have[i]: validhave[i] = True self.downloader.picker.got_have(i,self.connection) # ProxyService_ # # Aggregate the haves bitfields and send them to the doe nodes # ARNOPS: Shouldn't this be done after have = validhave? self.downloader.aggregate_and_send_haves() # # _ProxyService """ # SANITY CHECK checkhave = Bitfield(self.downloader.numpieces) for i in self.downloader.picker.get_valid_range_iterator(): if have[i]: checkhave[i] = True assert validhave.tostring() == checkhave.tostring() """ # Store filtered bitfield instead of received one have = validhave if DEBUGBF: et = time.time() diff = et - st print >>sys.stderr,"Download: got_have_field: took",diff self.have = have #print >>sys.stderr,"Downloader: got_have_bitfield: valid",`have.toboollist()` if self.downloader.endgamemode and not self.downloader.paused: for piece, begin, length in self.downloader.all_requests: if self.have[piece]: self.send_interested() break return self._check_interests() def get_rate(self): return self.measure.get_rate() def get_short_term_rate(self): return self.short_term_measure.get_rate() def is_snubbed(self): if not self.choked and clock() - self.last2 > self.downloader.snub_time: for index, begin, length in self.active_requests: self.connection.send_cancel(index, begin, length) self.got_choke() # treat it just like a choke return clock() - self.last > self.downloader.snub_time def peer_is_complete(self): return self.have.complete()
def _test_2fast(self,genresdict): """ test ASK_FOR_HELP, METADATA, PIECES_RESERVED and STOP_DOWNLOAD_HELP sequence """ # 1. Establish overlay connection to Tribler ol_connection = OLConnection(self.my_keypair, 'localhost', self.hisport, mylistenport=self.mylistenport2) # Send ASK_FOR_HELP (generate_data, sent_good_values) = genresdict[ASK_FOR_HELP] msg = generate_data() ol_connection.send(msg) if sent_good_values: resp = ol_connection.recv() self.assert_(resp[0] == GET_METADATA) self.check_get_metadata(resp[1:]) print >>sys.stderr,"test: Got GET_METADATA for torrent, sent_good_values" else: resp = ol_connection.recv() self.assert_(len(resp)==0) ol_connection.close() return # Send METADATA (generate_data,sent_good_values) = genresdict[METADATA] msg = generate_data() ol_connection.send(msg) if sent_good_values: # 2. Accept the data connection Tribler wants to establish with us, the coordinator self.myss2.settimeout(10.0) conn, addr = self.myss2.accept() #(self,hostname,port,opensock=None,user_option_pattern=None,user_infohash=None,myid=None,mylistenport=None,myoversion=None): bt_connection_2 = BTConnection('', 0, conn, user_infohash=self.infohash, myid=self.myid2) bt_connection_2.read_handshake_medium_rare() msg = UNCHOKE bt_connection_2.send(msg) print >>sys.stderr,"test: Got data connection to us, as coordinator, sent_good_values" else: resp = ol_connection.recv() self.assert_(len(resp)==0) ol_connection.close() return # 3. Our tracker says there is another peer (also us) on port 4810 # Now accept a connection on that port and pretend we're a seeder self.myss.settimeout(10.0) conn, addr = self.myss.accept() options = '\x00\x00\x00\x00\x00\x00\x00\x00' bt_connection = BTConnection('', 0, conn, user_option_pattern=options, user_infohash=self.infohash, myid=self.myid) bt_connection.read_handshake_medium_rare() # Get the number of pieces from the .torrent file torrentfile_content = open(self.torrentfile, "rb") metadata_dict = bdecode(torrentfile_content.read()) torrentfile_content.close() if "length" in metadata_dict["info"]: length = metadata_dict["info"]["length"] else: length = 0 for file in metadata_dict["info"]["files"]: length += file["length"] numpieces = length / metadata_dict["info"]["piece length"] bitf = Bitfield(numpieces) for i in range(numpieces): bitf[i] = True self.assert_(bitf.complete()) msg = BITFIELD+bitf.tostring() bt_connection.send(msg) msg = UNCHOKE bt_connection.send(msg) print >>sys.stderr,"test: Got BT connection to us, as fake seeder, sent_good_values" # 4. Await a RESERVE_PIECES message on the overlay connection resp = ol_connection.recv() self.assert_(resp[0] == RESERVE_PIECES) pieces = self.check_reserve_pieces(resp[1:]) print >>sys.stderr,"test: Got RESERVE_PIECES, sent_good_values" # 5. Reply with PIECES_RESERVED (generate_data, sent_good_values) = genresdict[PIECES_RESERVED] msg = generate_data(pieces) ol_connection.send(msg) if sent_good_values: # 6. Await REQUEST on fake seeder while True: resp = bt_connection.recv() self.assert_(len(resp) > 0) print "test: Fake seeder got message",getMessageName(resp[0]) if resp[0] == REQUEST: self.check_request(resp[1:],pieces) print >>sys.stderr,"test: Fake seeder got REQUEST for reserved piece, sent_good_values" break else: resp = ol_connection.recv() self.assert_(len(resp)==0) ol_connection.close() return # 7. Reply with STOP_DOWNLOAD_HELP (generate_data, sent_good_values) = genresdict[STOP_DOWNLOAD_HELP] msg = generate_data() ol_connection.send(msg) # the other side should close the connection, whether the msg was sent_good_values or bad resp = ol_connection.recv() self.assert_(len(resp)==0) ol_connection.close()
class SingleDownload(SingleDownloadHelperInterface): # _2fastbt def __init__(self, downloader, connection): # 2fastbt_ SingleDownloadHelperInterface.__init__(self) # _2fastbt self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.peermeasure = Measure(downloader.max_rate_period) self.have = Bitfield(downloader.numpieces) self.last = -1000 self.last2 = -1000 self.example_interest = None self.backlog = 2 self.ip = connection.get_ip() self.guard = BadDataGuard(self) # 2fastbt_ self.helper = downloader.picker.helper # _2fastbt def _backlog(self, just_unchoked): self.backlog = int(min( 2+int(4*self.measure.get_rate()/self.downloader.chunksize), (2*just_unchoked)+self.downloader.queue_limit() )) if self.backlog > 50: self.backlog = int(max(50, self.backlog * 0.075)) return self.backlog def disconnected(self): self.downloader.lost_peer(self) """ JD: obsoleted -- moved to picker.lost_peer if self.have.complete(): self.downloader.picker.lost_seed() else: for i in xrange(len(self.have)): if self.have[i]: self.downloader.picker.lost_have(i) """ if self.have.complete() and self.downloader.storage.is_endgame(): self.downloader.add_disconnected_seed(self.connection.get_readable_id()) self._letgo() self.guard.download = None def _letgo(self): if self.downloader.queued_out.has_key(self): del self.downloader.queued_out[self] if not self.active_requests: return if self.downloader.endgamemode: self.active_requests = [] return lost = {} for index, begin, length in self.active_requests: self.downloader.storage.request_lost(index, begin, length) lost[index] = 1 lost = lost.keys() self.active_requests = [] if self.downloader.paused: return ds = [d for d in self.downloader.downloads if not d.choked] shuffle(ds) for d in ds: d._request_more() for d in self.downloader.downloads: if d.choked and not d.interested: for l in lost: if d.have[l] and self.downloader.storage.do_I_have_requests(l): d.send_interested() break def got_choke(self): if not self.choked: self.choked = True self._letgo() def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more(new_unchoke = True) self.last2 = clock() def is_choked(self): return self.choked def is_interested(self): return self.interested def send_interested(self): if not self.interested: self.interested = True self.connection.send_interested() def send_not_interested(self): if self.interested: self.interested = False self.connection.send_not_interested() def got_piece(self, index, begin, hashlist, piece): """ Returns True if the piece is complete. """ length = len(piece) #if DEBUG: # print >> sys.stderr, 'Downloader: got piece of length %d' % length try: self.active_requests.remove((index, begin, length)) except ValueError: self.downloader.discarded += length return False if self.downloader.endgamemode: self.downloader.all_requests.remove((index, begin, length)) self.last = clock() self.last2 = clock() self.measure.update_rate(length) self.downloader.measurefunc(length) if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard): self.downloader.piece_flunked(index) return False if self.downloader.storage.do_I_have(index): self.downloader.picker.complete(index) if self.downloader.endgamemode: for d in self.downloader.downloads: if d is not self: if d.interested: if d.choked: assert not d.active_requests d.fix_download_endgame() else: try: d.active_requests.remove((index, begin, length)) except ValueError: continue d.connection.send_cancel(index, begin, length) d.fix_download_endgame() else: assert not d.active_requests self._request_more() self.downloader.check_complete(index) # BarterCast counter self.connection.total_downloaded += length return self.downloader.storage.do_I_have(index) # 2fastbt_ def helper_forces_unchoke(self): self.choked = False # _2fastbt def _request_more(self, new_unchoke = False, slowpieces = []): # 2fastbt_ if DEBUG: print >>sys.stderr,"Downloader: _request_more()" if self.is_frozen_by_helper(): if DEBUG: print >>sys.stderr,"Downloader: _request_more: blocked, returning" return # _2fastbt if self.choked: if DEBUG: print >>sys.stderr,"Downloader: _request_more: choked, returning" return # 2fastbt_ # do not download from coordinator if self.connection.connection.is_coordinator_con(): if DEBUG: print >>sys.stderr,"Downloader: _request_more: coordinator conn" return # _2fastbt if self.downloader.endgamemode: self.fix_download_endgame(new_unchoke) if DEBUG: print >>sys.stderr,"Downloader: _request_more: endgame mode, returning" return if self.downloader.paused: if DEBUG: print >>sys.stderr,"Downloader: _request_more: paused, returning" return if len(self.active_requests) >= self._backlog(new_unchoke): if DEBUG: print >>sys.stderr,"Downloader: more req than unchoke (active req: %d >= backlog: %d)" % (len(self.active_requests), self._backlog(new_unchoke)) # Jelle: Schedule _request more to be called in some time. Otherwise requesting and receiving packages # may stop, if they arrive to quickly if self.downloader.download_rate: wait_period = self.downloader.chunksize / self.downloader.download_rate / 2.0 if DEBUG: print >>sys.stderr,"Downloader: waiting for %f s to call _request_more again" % wait_period self.downloader.scheduler(self._request_more, wait_period) if not (self.active_requests or self.backlog): self.downloader.queued_out[self] = 1 return #if DEBUG: # print >>sys.stderr,"Downloader: _request_more: len act",len(self.active_requests),"back",self.backlog lost_interests = [] while len(self.active_requests) < self.backlog: #if DEBUG: # print >>sys.stderr,"Downloader: Looking for interesting piece" #st = time.time() interest = self.downloader.picker.next(self.have, self.downloader.storage.do_I_have_requests, self, self.downloader.too_many_partials(), self.connection.connection.is_helper_con(), slowpieces = slowpieces, connection = self.connection) #et = time.time() #diff = et-st diff=-1 if DEBUG: print >>sys.stderr,"Downloader: _request_more: next() returned",interest,"took %.5f" % (diff) if interest is None: break self.example_interest = interest self.send_interested() loop = True while len(self.active_requests) < self.backlog and loop: begin, length = self.downloader.storage.new_request(interest) if DEBUG: print >>sys.stderr,"Downloader: new_request",interest,begin,length,"to",self.connection.connection.get_ip(),self.connection.connection.get_port() self.downloader.picker.requested(interest) self.active_requests.append((interest, begin, length)) self.connection.send_request(interest, begin, length) self.downloader.chunk_requested(length) if not self.downloader.storage.do_I_have_requests(interest): loop = False lost_interests.append(interest) if not self.active_requests: self.send_not_interested() if lost_interests: for d in self.downloader.downloads: if d.active_requests or not d.interested: continue if d.example_interest is not None and self.downloader.storage.do_I_have_requests(d.example_interest): continue for lost in lost_interests: if d.have[lost]: break else: continue # 2fastbt_ #st = time.time() interest = self.downloader.picker.next(d.have, self.downloader.storage.do_I_have_requests, self, # Arno, 2008-05-22; self -> d? Original Pawel code self.downloader.too_many_partials(), self.connection.connection.is_helper_con(), willrequest=False,connection=self.connection) #et = time.time() #diff = et-st diff=-1 if DEBUG: print >>sys.stderr,"Downloader: _request_more: next()2 returned",interest,"took %.5f" % (diff) # _2fastbt if interest is None: d.send_not_interested() else: d.example_interest = interest # Arno: LIVEWRAP: no endgame if self.downloader.storage.is_endgame() and not self.downloader.picker.live_streaming: self.downloader.start_endgame() def fix_download_endgame(self, new_unchoke = False): # 2fastbt_ # do not download from coordinator if self.downloader.paused or self.connection.connection.is_coordinator_con(): # _2fastbt return if len(self.active_requests) >= self._backlog(new_unchoke): if not (self.active_requests or self.backlog) and not self.choked: self.downloader.queued_out[self] = 1 return # 2fastbt_ want = [a for a in self.downloader.all_requests if self.have[a[0]] and a not in self.active_requests and (self.helper is None or self.connection.connection.is_helper_con() or not self.helper.is_ignored(a[0]))] # _2fastbt if not (self.active_requests or want): self.send_not_interested() return if want: self.send_interested() if self.choked: return shuffle(want) del want[self.backlog - len(self.active_requests):] self.active_requests.extend(want) for piece, begin, length in want: # 2fastbt_ if self.helper is None or self.connection.connection.is_helper_con() or self.helper.reserve_piece(piece,self): self.connection.send_request(piece, begin, length) self.downloader.chunk_requested(length) # _2fastbt def got_have(self, index): if DEBUG: print >>sys.stderr,"Downloader: got_have",index if index == self.downloader.numpieces-1: self.downloader.totalmeasure.update_rate(self.downloader.storage.total_length-(self.downloader.numpieces-1)*self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.total_length-(self.downloader.numpieces-1)*self.downloader.storage.piece_length) else: self.downloader.totalmeasure.update_rate(self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.piece_length) # Arno: LIVEWRAP if not self.downloader.picker.is_valid_piece(index): if DEBUG: print >>sys.stderr,"Downloader: got_have",index,"is invalid piece" return # TODO: should we request_more()? if self.have[index]: return self.have[index] = True self.downloader.picker.got_have(index,self.connection) if self.have.complete(): self.downloader.picker.became_seed() if self.downloader.picker.am_I_complete(): self.downloader.add_disconnected_seed(self.connection.get_readable_id()) self.connection.close() return if self.downloader.endgamemode: self.fix_download_endgame() elif ( not self.downloader.paused and not self.downloader.picker.is_blocked(index) and self.downloader.storage.do_I_have_requests(index) ): if not self.choked: self._request_more() else: self.send_interested() def _check_interests(self): if self.interested or self.downloader.paused: return for i in xrange(len(self.have)): if ( self.have[i] and not self.downloader.picker.is_blocked(i) and ( self.downloader.endgamemode or self.downloader.storage.do_I_have_requests(i) ) ): self.send_interested() return def got_have_bitfield(self, have): if self.downloader.picker.am_I_complete() and have.complete(): # Arno: If we're both seeds if self.downloader.super_seeding: self.connection.send_bitfield(have.tostring()) # be nice, show you're a seed too self.connection.close() self.downloader.add_disconnected_seed(self.connection.get_readable_id()) return #print >>sys.stderr,"Downloader: got_have_bitfield: VVV#############################################################################################VVVVVVVVVVVVVVVVVVVVVVVVV valid",self.downloader.picker.get_valid_range_iterator(),"len",self.downloader.numpieces #print >>sys.stderr,"Downloader: got_have_bitfield: input",`have.toboollist()` if have.complete(): # Arno: He is seed self.downloader.picker.got_seed() else: # Arno: LIVEWRAP: filter out valid pieces # TODO: may be slow with 32K pieces. validhave = Bitfield(self.downloader.numpieces) for i in self.downloader.picker.get_valid_range_iterator(): if have[i]: validhave[i] = True self.downloader.picker.got_have(i,self.connection) have = validhave # Store filtered bitfield self.have = have #print >>sys.stderr,"Downloader: got_have_bitfield: valid",`have.toboollist()` if self.downloader.endgamemode and not self.downloader.paused: for piece, begin, length in self.downloader.all_requests: if self.have[piece]: self.send_interested() break return self._check_interests() def get_rate(self): return self.measure.get_rate() def is_snubbed(self): # 2fastbt_ if not self.choked and clock() - self.last2 > self.downloader.snub_time and \ not self.connection.connection.is_helper_con() and \ not self.connection.connection.is_coordinator_con(): # _2fastbt for index, begin, length in self.active_requests: self.connection.send_cancel(index, begin, length) self.got_choke() # treat it just like a choke return clock() - self.last > self.downloader.snub_time def peer_is_complete(self): return self.have.complete()
def _test_2fast(self, genresdict): """ test ASK_FOR_HELP, METADATA, PIECES_RESERVED and STOP_DOWNLOAD_HELP sequence """ # 1. Establish overlay connection to Tribler s = OLConnection(self.my_keypair, 'localhost', self.hisport, mylistenport=self.mylistenport2) (func, good) = genresdict[ASK_FOR_HELP] msg = func() s.send(msg) if good: resp = s.recv() self.assert_(resp[0] == GET_METADATA) self.check_get_metadata(resp[1:]) print >> sys.stderr, "test: Got GET_METADATA for torrent, good" else: resp = s.recv() self.assert_(len(resp) == 0) s.close() return (func, good) = genresdict[METADATA] msg = func() s.send(msg) if good: # 2. Accept the data connection Tribler wants to establish with us, the coordinator self.myss2.settimeout(10.0) conn, addr = self.myss2.accept() s3 = BTConnection('', 0, conn, user_infohash=self.infohash, myid=self.myid2) s3.read_handshake_medium_rare() msg = UNCHOKE s3.send(msg) print >> sys.stderr, "test: Got data connection to us, as coordinator, good" else: resp = s.recv() self.assert_(len(resp) == 0) s.close() return # 3. Our tracker says there is another peer (also us) on port 4810 # Now accept a connection on that port and pretend we're a seeder self.myss.settimeout(10.0) conn, addr = self.myss.accept() options = '\x00\x00\x00\x00\x00\x00\x00\x00' s2 = BTConnection('', 0, conn, user_option_pattern=options, user_infohash=self.infohash, myid=self.myid) s2.read_handshake_medium_rare() numpieces = 10 # must correspond to the torrent in test/extend_hs_dir b = Bitfield(numpieces) for i in range(numpieces): b[i] = True self.assert_(b.complete()) msg = BITFIELD + b.tostring() s2.send(msg) msg = UNCHOKE s2.send(msg) print >> sys.stderr, "test: Got BT connection to us, as fake seeder, good" # 4. Await a RESERVE_PIECES message on the overlay connection resp = s.recv() self.assert_(resp[0] == RESERVE_PIECES) pieces = self.check_reserve_pieces(resp[1:]) print >> sys.stderr, "test: Got RESERVE_PIECES, good" (func, good) = genresdict[PIECES_RESERVED] # 5. Reply with PIECES_RESERVED msg = func(pieces) s.send(msg) if good: # 6. Await REQUEST on fake seeder try: while True: s2.s.settimeout(10.0) resp = s2.recv() self.assert_(len(resp) > 0) print "test: Fake seeder got message", getMessageName( resp[0]) if resp[0] == REQUEST: self.check_request(resp[1:], pieces) print >> sys.stderr, "test: Fake seeder got REQUEST for reserved piece, good" break except socket.timeout: print >> sys.stderr, "test: Timeout, bad, fake seeder didn't reply with message" self.assert_(False) else: resp = s.recv() self.assert_(len(resp) == 0) s.close() return (func, good) = genresdict[STOP_DOWNLOAD_HELP] # 5. Reply with STOP_DOWNLOAD_HELP msg = func() s.send(msg) # the other side should close the connection, whether the msg was good or bad resp = s.recv() self.assert_(len(resp) == 0) s.close()
def _test_2fast(self, genresdict): """ test ASK_FOR_HELP, METADATA, PIECES_RESERVED and STOP_DOWNLOAD_HELP sequence """ # 1. Establish overlay connection to Tribler ol_connection = OLConnection(self.my_keypair, 'localhost', self.hisport, mylistenport=self.mylistenport2) # Send ASK_FOR_HELP (generate_data, sent_good_values) = genresdict[ASK_FOR_HELP] msg = generate_data() ol_connection.send(msg) if sent_good_values: resp = ol_connection.recv() self.assert_(resp[0] == GET_METADATA) self.check_get_metadata(resp[1:]) print >> sys.stderr, "test: Got GET_METADATA for torrent, sent_good_values" else: resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close() return # Send METADATA (generate_data, sent_good_values) = genresdict[METADATA] msg = generate_data() ol_connection.send(msg) if sent_good_values: # 2. Accept the data connection Tribler wants to establish with us, the coordinator self.myss2.settimeout(10.0) conn, addr = self.myss2.accept() #(self,hostname,port,opensock=None,user_option_pattern=None,user_infohash=None,myid=None,mylistenport=None,myoversion=None): bt_connection_2 = BTConnection('', 0, conn, user_infohash=self.infohash, myid=self.myid2) bt_connection_2.read_handshake_medium_rare() msg = UNCHOKE bt_connection_2.send(msg) print >> sys.stderr, "test: Got data connection to us, as coordinator, sent_good_values" else: resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close() return # 3. Our tracker says there is another peer (also us) on port 4810 # Now accept a connection on that port and pretend we're a seeder self.myss.settimeout(10.0) conn, addr = self.myss.accept() options = '\x00\x00\x00\x00\x00\x00\x00\x00' bt_connection = BTConnection('', 0, conn, user_option_pattern=options, user_infohash=self.infohash, myid=self.myid) bt_connection.read_handshake_medium_rare() # Get the number of pieces from the .torrent file torrentfile_content = open(self.torrentfile, "rb") metadata_dict = bdecode(torrentfile_content.read()) torrentfile_content.close() if "length" in metadata_dict["info"]: length = metadata_dict["info"]["length"] else: length = 0 for file in metadata_dict["info"]["files"]: length += file["length"] numpieces = length / metadata_dict["info"]["piece length"] bitf = Bitfield(numpieces) for i in range(numpieces): bitf[i] = True self.assert_(bitf.complete()) msg = BITFIELD + bitf.tostring() bt_connection.send(msg) msg = UNCHOKE bt_connection.send(msg) print >> sys.stderr, "test: Got BT connection to us, as fake seeder, sent_good_values" # 4. Await a RESERVE_PIECES message on the overlay connection resp = ol_connection.recv() self.assert_(resp[0] == RESERVE_PIECES) pieces = self.check_reserve_pieces(resp[1:]) print >> sys.stderr, "test: Got RESERVE_PIECES, sent_good_values" # 5. Reply with PIECES_RESERVED (generate_data, sent_good_values) = genresdict[PIECES_RESERVED] msg = generate_data(pieces) ol_connection.send(msg) if sent_good_values: # 6. Await REQUEST on fake seeder while True: resp = bt_connection.recv() self.assert_(len(resp) > 0) print "test: Fake seeder got message", getMessageName(resp[0]) if resp[0] == REQUEST: self.check_request(resp[1:], pieces) print >> sys.stderr, "test: Fake seeder got REQUEST for reserved piece, sent_good_values" break else: resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close() return # 7. Reply with STOP_DOWNLOAD_HELP (generate_data, sent_good_values) = genresdict[STOP_DOWNLOAD_HELP] msg = generate_data() ol_connection.send(msg) # the other side should close the connection, whether the msg was sent_good_values or bad resp = ol_connection.recv() self.assert_(len(resp) == 0) ol_connection.close()
class SingleDownload(): def __init__(self, downloader, connection): self.downloader = downloader self.connection = connection self.choked = True self.interested = False self.active_requests = [] self.measure = Measure(downloader.max_rate_period) self.peermeasure = Measure(downloader.max_rate_period) self.have = Bitfield(downloader.numpieces) self.last = -1000 self.last2 = -1000 self.example_interest = None self.backlog = 2 self.ip = connection.get_ip() self.guard = BadDataGuard(self) # boudewijn: VOD needs a download measurement that is not # averaged over a 'long' period. downloader.max_rate_period is # (by default) 20 seconds because this matches the unchoke # policy. self.short_term_measure = Measure(5) # boudewijn: each download maintains a counter for the number # of high priority piece requests that did not get any # responce within x seconds. self.bad_performance_counter = 0 def _backlog(self, just_unchoked): self.backlog = int( min( 2 + int(4 * self.measure.get_rate() / self.downloader.chunksize), (2 * just_unchoked) + self.downloader.queue_limit())) if self.backlog > 50: self.backlog = int(max(50, self.backlog * 0.075)) return self.backlog def disconnected(self): self.downloader.lost_peer(self) """ JD: obsoleted -- moved to picker.lost_peer if self.have.complete(): self.downloader.picker.lost_seed() else: for i in xrange(len(self.have)): if self.have[i]: self.downloader.picker.lost_have(i) """ if self.have.complete() and self.downloader.storage.is_endgame(): self.downloader.add_disconnected_seed( self.connection.get_readable_id()) self._letgo() self.guard.download = None def _letgo(self): if self.downloader.queued_out.has_key(self): del self.downloader.queued_out[self] if not self.active_requests: return if self.downloader.endgamemode: self.active_requests = [] return lost = {} for index, begin, length in self.active_requests: self.downloader.storage.request_lost(index, begin, length) lost[index] = 1 lost = lost.keys() self.active_requests = [] if self.downloader.paused: return ds = [d for d in self.downloader.downloads if not d.choked] shuffle(ds) for d in ds: d._request_more() for d in self.downloader.downloads: if d.choked and not d.interested: for l in lost: if d.have[l] and self.downloader.storage.do_I_have_requests( l): d.send_interested() break def got_choke(self): if not self.choked: self.choked = True self._letgo() def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more(new_unchoke=True) self.last2 = clock() def is_choked(self): return self.choked def is_interested(self): return self.interested def send_interested(self): if not self.interested: self.interested = True self.connection.send_interested() def send_not_interested(self): if self.interested: self.interested = False self.connection.send_not_interested() def got_piece(self, index, begin, hashlist, piece): """ Returns True if the piece is complete. Note that in this case a -piece- means a chunk! """ if self.bad_performance_counter: self.bad_performance_counter -= 1 if DEBUG: print >> sys.stderr, "decreased bad_performance_counter to", self.bad_performance_counter length = len(piece) #if DEBUG: # print >> sys.stderr, 'Downloader: got piece of length %d' % length try: self.active_requests.remove((index, begin, length)) except ValueError: self.downloader.discarded += length return False if self.downloader.endgamemode: self.downloader.all_requests.remove((index, begin, length)) if DEBUG: print >> sys.stderr, "Downloader: got_piece: removed one request from all_requests", len( self.downloader.all_requests), "remaining" self.last = clock() self.last2 = clock() self.measure.update_rate(length) # Update statistic gatherer status = get_status_holder("LivingLab") s_download = status.get_or_create_status_element("downloaded", 0) s_download.inc(length) self.short_term_measure.update_rate(length) self.downloader.measurefunc(length) if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard): self.downloader.piece_flunked(index) return False # boudewijn: we need more accurate (if possibly invalid) # measurements on current download speed self.downloader.picker.got_piece(index, begin, length) if self.downloader.storage.do_I_have(index): # The piece (actual piece, not chunk) is complete self.downloader.picker.complete(index) # ProxyService_ # if self.downloader.proxydownloader: if DEBUG: print >> sys.stderr, "downloader: got_piece. Searching if piece", index, "was requested by a doe node." if index in self.downloader.proxydownloader.proxy.currently_downloading_pieces: # get_piece(index, 0, -1) returns the complete piece data [piece_data, hash_list ] = self.downloader.storage.get_piece(index, 0, -1) self.downloader.proxydownloader.proxy.retrieved_piece( index, piece_data) # # _ProxyService if self.downloader.endgamemode: for d in self.downloader.downloads: if d is not self: if d.interested: if d.choked: assert not d.active_requests d.fix_download_endgame() else: try: d.active_requests.remove( (index, begin, length)) except ValueError: continue d.connection.send_cancel(index, begin, length) d.fix_download_endgame() else: assert not d.active_requests self._request_more() self.downloader.check_complete(index) # BarterCast counter self.connection.total_downloaded += length return self.downloader.storage.do_I_have(index) def _request_more(self, new_unchoke=False, slowpieces=[]): if self.choked: if DEBUG: print >> sys.stderr, "Downloader: _request_more: choked, returning" return if self.downloader.endgamemode: self.fix_download_endgame(new_unchoke) if DEBUG: print >> sys.stderr, "Downloader: _request_more: endgame mode, returning" return if self.downloader.paused: if DEBUG: print >> sys.stderr, "Downloader: _request_more: paused, returning" return if len(self.active_requests) >= self._backlog(new_unchoke): if DEBUG: print >> sys.stderr, "Downloader: more req than unchoke (active req: %d >= backlog: %d)" % ( len(self.active_requests), self._backlog(new_unchoke)) # Jelle: Schedule _request more to be called in some time. Otherwise requesting and receiving packages # may stop, if they arrive to quickly if self.downloader.download_rate: wait_period = self.downloader.chunksize / self.downloader.download_rate / 2.0 # Boudewijn: when wait_period is 0.0 this will cause # the the _request_more method to be scheduled # multiple times (recursively), causing severe cpu # problems. # # Therefore, only schedule _request_more to be called # if the call will be made in the future. The minimal # wait_period should be tweaked. if wait_period > 1.0: if DEBUG: print >> sys.stderr, "Downloader: waiting for %f s to call _request_more again" % wait_period self.downloader.scheduler(self._request_more, wait_period) if not (self.active_requests or self.backlog): self.downloader.queued_out[self] = 1 return #if DEBUG: # print >>sys.stderr,"Downloader: _request_more: len act",len(self.active_requests),"back",self.backlog lost_interests = [] while len(self.active_requests) < self.backlog: #if DEBUG: # print >>sys.stderr,"Downloader: Looking for interesting piece" #st = time.time() #print "DOWNLOADER self.have=", self.have.toboollist() # This is the PiecePicker call if the current client is a Doe # TODO: check if the above comment is true interest = self.downloader.picker.next( self.have, self.downloader.storage.do_I_have_requests, self, self.downloader.too_many_partials(), slowpieces=slowpieces, connection=self.connection) #et = time.time() #diff = et-st if DEBUG: diff = -1 print >> sys.stderr, "Downloader: _request_more: next() returned", interest, "took %.5f" % ( diff) if interest is None: break self.example_interest = interest self.send_interested() loop = True while len(self.active_requests) < self.backlog and loop: begin, length = self.downloader.storage.new_request(interest) if DEBUG: print >> sys.stderr, "Downloader: new_request", interest, begin, length, "to", self.connection.connection.get_ip( ), self.connection.connection.get_port() self.downloader.picker.requested(interest, begin, length) self.active_requests.append((interest, begin, length)) self.connection.send_request(interest, begin, length) self.downloader.chunk_requested(length) if not self.downloader.storage.do_I_have_requests(interest): loop = False lost_interests.append(interest) if not self.active_requests: self.send_not_interested() if lost_interests: for d in self.downloader.downloads: if d.active_requests or not d.interested: continue if d.example_interest is not None and self.downloader.storage.do_I_have_requests( d.example_interest): continue for lost in lost_interests: if d.have[lost]: break else: continue #st = time.time() interest = self.downloader.picker.next( d.have, self.downloader.storage.do_I_have_requests, self, # Arno, 2008-05-22; self -> d? Original Pawel code self.downloader.too_many_partials(), willrequest=False, connection=self.connection) #et = time.time() #diff = et-st if DEBUG: diff = -1 print >> sys.stderr, "Downloader: _request_more: next()2 returned", interest, "took %.5f" % ( diff) if interest is None: d.send_not_interested() else: d.example_interest = interest # Arno: LIVEWRAP: no endgame if not self.downloader.endgamemode and \ self.downloader.storage.is_endgame() and \ not (self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming): self.downloader.start_endgame() def fix_download_endgame(self, new_unchoke=False): if self.downloader.paused: if DEBUG: print >> sys.stderr, "Downloader: fix_download_endgame: paused", self.downloader.paused return if len(self.active_requests) >= self._backlog(new_unchoke): if not (self.active_requests or self.backlog) and not self.choked: self.downloader.queued_out[self] = 1 if DEBUG: print >> sys.stderr, "Downloader: fix_download_endgame: returned" return want = [ a for a in self.downloader.all_requests if self.have[a[0]] and a not in self.active_requests ] if not (self.active_requests or want): self.send_not_interested() if DEBUG: print >> sys.stderr, "Downloader: fix_download_endgame: not interested" return if want: self.send_interested() if self.choked: if DEBUG: print >> sys.stderr, "Downloader: fix_download_endgame: choked" return shuffle(want) del want[self.backlog - len(self.active_requests):] self.active_requests.extend(want) for piece, begin, length in want: self.connection.send_request(piece, begin, length) self.downloader.chunk_requested(length) def got_have(self, index): # print >>sys.stderr,"Downloader: got_have",index if DEBUG: print >> sys.stderr, "Downloader: got_have", index if index == self.downloader.numpieces - 1: self.downloader.totalmeasure.update_rate( self.downloader.storage.total_length - (self.downloader.numpieces - 1) * self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.total_length - (self.downloader.numpieces - 1) * self.downloader.storage.piece_length) else: self.downloader.totalmeasure.update_rate( self.downloader.storage.piece_length) self.peermeasure.update_rate(self.downloader.storage.piece_length) # Arno: LIVEWRAP if not self.downloader.picker.is_valid_piece(index): if DEBUG: print >> sys.stderr, "Downloader: got_have", index, "is invalid piece" return # TODO: should we request_more()? if self.have[index]: return self.have[index] = True self.downloader.picker.got_have(index, self.connection) # ProxyService_ # # Aggregate the haves bitfields and send them to the doe nodes # If I am a doe, i will exit shortly self.downloader.aggregate_and_send_haves() # # _ProxyService if self.have.complete(): self.downloader.picker.became_seed() if self.downloader.picker.am_I_complete(): self.downloader.add_disconnected_seed( self.connection.get_readable_id()) self.connection.close() return if self.downloader.endgamemode: self.fix_download_endgame() elif (not self.downloader.paused and not self.downloader.picker.is_blocked(index) and self.downloader.storage.do_I_have_requests(index)): if not self.choked: self._request_more() else: self.send_interested() def _check_interests(self): if self.interested or self.downloader.paused: return for i in xrange(len(self.have)): if (self.have[i] and not self.downloader.picker.is_blocked(i) and (self.downloader.endgamemode or self.downloader.storage.do_I_have_requests(i))): self.send_interested() return def got_have_bitfield(self, have): if self.downloader.picker.am_I_complete() and have.complete(): # Arno: If we're both seeds if self.downloader.super_seeding: self.connection.send_bitfield( have.tostring()) # be nice, show you're a seed too # Niels: We're both seeds, but try to get some additional peers from this seed self.connection.try_send_pex() def auto_close(): self.connection.close() self.downloader.add_disconnected_seed( self.connection.get_readable_id()) self.downloader.scheduler(auto_close, REPEX_LISTEN_TIME) return if DEBUGBF: st = time.time() if have.complete(): # Arno: He is seed self.downloader.picker.got_seed() else: # Arno: pass on HAVE knowledge to PiecePicker and if LIVEWRAP: # filter out valid pieces # STBSPEED: if we haven't hooked in yet, don't iterate over whole range # just over the active ranges in the received Bitfield activerangeiterators = [] if self.downloader.picker.videostatus and self.downloader.picker.videostatus.live_streaming and self.downloader.picker.videostatus.get_live_startpos( ) is None: # Not hooked in activeranges = have.get_active_ranges() if len(activeranges) == 0: # Bug, fallback to whole range activerangeiterators = [ self.downloader.picker.get_valid_range_iterator() ] else: # Create iterators for the active ranges for (s, e) in activeranges: activerangeiterators.append(xrange(s, e + 1)) else: # Hooked in, use own valid range as active range # Arno, 2010-04-20: Not correct for VOD with seeking, then we # should store the HAVE info for things before playback too. activerangeiterators = [ self.downloader.picker.get_valid_range_iterator() ] if DEBUGBF: print >> sys.stderr, "Downloader: got_have_field: live: Filtering bitfield", activerangeiterators if not self.downloader.picker.videostatus or self.downloader.picker.videostatus.live_streaming: if DEBUGBF: print >> sys.stderr, "Downloader: got_have_field: live or normal filter" # Transfer HAVE knowledge to PiecePicker and filter pieces if live validhave = Bitfield(self.downloader.numpieces) for iterator in activerangeiterators: for i in iterator: if have[i]: validhave[i] = True self.downloader.picker.got_have(i, self.connection) else: # VOD if DEBUGBF: print >> sys.stderr, "Downloader: got_have_field: VOD filter" validhave = Bitfield(self.downloader.numpieces) (first, last) = self.downloader.picker.videostatus.download_range() for i in xrange(first, last): if have[i]: validhave[i] = True self.downloader.picker.got_have(i, self.connection) # ProxyService_ # # Aggregate the haves bitfields and send them to the doe nodes # ARNOPS: Shouldn't this be done after have = validhave? self.downloader.aggregate_and_send_haves() # # _ProxyService """ # SANITY CHECK checkhave = Bitfield(self.downloader.numpieces) for i in self.downloader.picker.get_valid_range_iterator(): if have[i]: checkhave[i] = True assert validhave.tostring() == checkhave.tostring() """ # Store filtered bitfield instead of received one have = validhave if DEBUGBF: et = time.time() diff = et - st print >> sys.stderr, "Download: got_have_field: took", diff self.have = have #print >>sys.stderr,"Downloader: got_have_bitfield: valid",`have.toboollist()` if self.downloader.endgamemode and not self.downloader.paused: for piece, begin, length in self.downloader.all_requests: if self.have[piece]: self.send_interested() break return self._check_interests() def get_rate(self): return self.measure.get_rate() def get_short_term_rate(self): return self.short_term_measure.get_rate() def is_snubbed(self): if not self.choked and clock() - self.last2 > self.downloader.snub_time: for index, begin, length in self.active_requests: self.connection.send_cancel(index, begin, length) self.got_choke() # treat it just like a choke return clock() - self.last > self.downloader.snub_time def peer_is_complete(self): return self.have.complete()