def getTorrent(self, torrent, callback): """ TORRENT is a dictionary containing torrent information used to display the entry on the UI. it is NOT the torrent file! CALLBACK is called when the torrent is downloaded. When no torrent can be downloaded the callback is ignored Returns a filename, if filename is known or a boolean + request_type describing if the torrent is requested """ torrent_dir = self.guiUtility.utility.session.get_torrent_collecting_dir() if 'torrent_file_name' not in torrent or not torrent['torrent_file_name']: torrent['torrent_file_name'] = get_collected_torrent_filename(torrent['infohash']) torrent_filename = os.path.join(torrent_dir, torrent['torrent_file_name']) #.torrent found, return complete filename if os.path.isfile(torrent_filename): return torrent_filename #.torrent not found, possibly a new torrent_collecting_dir torrent['torrent_file_name'] = get_collected_torrent_filename(torrent['infohash']) torrent_filename = os.path.join(torrent_dir, torrent['torrent_file_name']) if os.path.isfile(torrent_filename): return torrent_filename #.torrent not found, try to download from peers if self.downloadTorrentfileFromPeers(torrent, callback): return (True, "from peers") return (False, "could not get torrent")
def readTorrent(self, torrent): try: torrent_path = torrent['torrent_path'] if not path.isfile(torrent_path): #torrent not found, try filename + current torrent collection directory torrent_collection_dir = Session.get_instance().get_torrent_collecting_dir() _, torrent_filename = path.split(torrent_path) torrent_path = path.join(torrent_collection_dir, torrent_filename) if not path.isfile(torrent_path): #torrent still not found, determine filename + current torrent collection directory torrent_path = path.join(torrent_collection_dir, get_collected_torrent_filename(torrent['infohash'])) if path.isfile(torrent_path): f = open(torrent_path,'rb') _data = f.read() f.close() data = bdecode(_data) assert 'info' in data del data['info'] torrent['info'] = data return torrent except Exception: #print_exc() return torrent
def readTorrent(self, torrent): try: torrent_path = torrent['torrent_path'] if not path.isfile(torrent_path): #torrent not found, try filename + current torrent collection directory torrent_collection_dir = Session.get_instance( ).get_torrent_collecting_dir() _, torrent_filename = path.split(torrent_path) torrent_path = path.join(torrent_collection_dir, torrent_filename) if not path.isfile(torrent_path): #torrent still not found, determine filename + current torrent collection directory torrent_path = path.join( torrent_collection_dir, get_collected_torrent_filename(torrent['infohash'])) if path.isfile(torrent_path): f = open(torrent_path, 'rb') _data = f.read() f.close() data = bdecode(_data) assert 'info' in data del data['info'] torrent['info'] = data return torrent except Exception: #print_exc() return torrent
def save_torrent(self, infohash, metadata, source='BC', extra_info={}): # check if disk is full before save it to disk and database if not self.initialized: return None self.check_overflow() if self.min_free_space != 0 and (self.free_space - len(metadata) < self.min_free_space or self.num_collected_torrents % 10 == 0): self.free_space = self.get_free_space() if self.free_space - len(metadata) < self.min_free_space: self.warn_disk_full() return None file_name = get_collected_torrent_filename(infohash) if DEBUG: print >> sys.stderr,time.asctime(),'-', "metadata: Storing torrent", sha(infohash).hexdigest(),"in",file_name save_path = self.write_torrent(metadata, self.torrent_dir, file_name) if save_path: self.num_collected_torrents += 1 self.free_space -= len(metadata) self.addTorrentToDB(save_path, infohash, metadata, source=source, extra_info=extra_info) # check if space is enough and remove old torrents return file_name
def getCollectedFilename(self, torrent): torrent_dir = self.guiUtility.utility.session.get_torrent_collecting_dir() if 'torrent_file_name' not in torrent or not torrent['torrent_file_name']: torrent['torrent_file_name'] = get_collected_torrent_filename(torrent['infohash']) torrent_filename = os.path.join(torrent_dir, torrent['torrent_file_name']) #.torrent found, return complete filename if os.path.isfile(torrent_filename): return torrent_filename #.torrent not found, possibly a new torrent_collecting_dir torrent['torrent_file_name'] = get_collected_torrent_filename(torrent['infohash']) torrent_filename = os.path.join(torrent_dir, torrent['torrent_file_name']) if os.path.isfile(torrent_filename): return torrent_filename
def save_torrent(self, infohash, metadata, source='BC', extra_info={}): # check if disk is full before save it to disk and database if not self.initialized: return None self.check_overflow() if self.min_free_space != 0 and ( self.free_space - len(metadata) < self.min_free_space or self.num_collected_torrents % 10 == 0): self.free_space = self.get_free_space() if self.free_space - len(metadata) < self.min_free_space: self.warn_disk_full() return None file_name = get_collected_torrent_filename(infohash) if DEBUG: print >> sys.stderr, "metadata: Storing torrent", sha( infohash).hexdigest(), "in", file_name save_path = self.write_torrent(metadata, self.torrent_dir, file_name) if save_path: self.num_collected_torrents += 1 self.free_space -= len(metadata) self.addTorrentToDB(save_path, infohash, metadata, source=source, extra_info=extra_info) # check if space is enough and remove old torrents return file_name
def __torrentdef_retrieved(self, tdef): infohash = tdef.get_infohash() if DEBUG: print >>sys.stderr, "magnetrequester: received torrent", bin2str(infohash) # remove from requested list if infohash in self.requestedInfohashes: self.requestedInfohashes.remove(infohash) # save torrent torrent = self.torrent_db.getTorrent(infohash, ["torrent_file_name"], include_mypref=False) if torrent and torrent.get("torrent_file_name", False) and not os.path.isabs(torrent["torrent_file_name"]): torrent_filename = os.path.join(self.metadatahandler.torrent_dir, torrent["torrent_file_name"]) else: torrent_filename = os.path.join( self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash) ) tdef.save(torrent_filename) # calculate root-hash sdef = SwiftDef() sdef.add_content(torrent_filename) sdef.finalize(self.session.get_swift_path()) # add this new torrent to db self.torrent_db.addExternalTorrent(tdef, extra_info={"swift_torrent_hash": bin2str(sdef.get_roothash())}) # notify all self.remoteTorrentHandler.metadatahandler_got_torrent(infohash, tdef, torrent_filename) self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def _save_torrent(self, tdef, callback=None): tmp_filename = os.path.join( self.session.get_torrent_collecting_dir(), "tmp_" + get_collected_torrent_filename(tdef.get_infohash())) filename_index = 0 while os.path.exists(tmp_filename): filename_index += 1 tmp_filename = os.path.join( self.session.get_torrent_collecting_dir(), ("tmp_%d_" % filename_index) + get_collected_torrent_filename(tdef.get_infohash())) tdef.save(tmp_filename) sdef, swiftpath = self._write_to_collected(tmp_filename) try: os.remove(tmp_filename) except: atexit.register( lambda tmp_filename=tmp_filename: os.remove(tmp_filename)) def do_db(callback): #add this new torrent to db infohash = tdef.get_infohash() if self.torrent_db.hasTorrent(infohash): self.torrent_db.updateTorrent( infohash, swift_torrent_hash=sdef.get_roothash(), torrent_file_name=swiftpath) else: self.torrent_db.addExternalTorrent(tdef, extra_info={ 'filename': swiftpath, 'swift_torrent_hash': sdef.get_roothash(), 'status': 'good' }) #notify all self.notify_possible_torrent_infohash(infohash, True) if callback: callback() startWorker(None, do_db, wargs=(callback, ))
def __requestMagnet(self): try: if len(self.requestedInfohashes) < self.MAX_CONCURRENT: #request new infohash from queue while True: if len(self.list) == 0: return prio, infohash = self.list.pop(0) if infohash in self.requestedInfohashes: if DEBUG: print >> sys.stderr, 'magnetrequester: magnet already requested', bin2str( infohash) continue torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name'], include_mypref=False) torrent_alt_filename = os.path.join( self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash)) if torrent and torrent.get('torrent_file_name', False): torrent_filename = os.path.join( self.metadatahandler.torrent_dir, torrent['torrent_file_name']) else: torrent_filename = torrent_alt_filename if os.path.isfile(torrent_filename) or os.path.isfile( torrent_alt_filename): if DEBUG: print >> sys.stderr, 'magnetrequester: magnet already on disk', bin2str( infohash) else: break #do request else: #requesting max_concurrent return except: print_exc() #try magnet link magnetlink = "magnet:?xt=urn:btih:" + hexlify(infohash) if DEBUG: print >> sys.stderr, 'magnetrequester: requesting magnet', bin2str( infohash), prio, magnetlink self.requestedInfohashes.add(infohash) TorrentDef.retrieve_from_magnet(magnetlink, self.__torrentdef_retrieved, self.MAGNET_RETRIEVE_TIMEOUT) self.overlay_bridge.add_task( lambda: self.__torrentdef_failed(infohash), self.MAGNET_RETRIEVE_TIMEOUT, infohash) if len(self.requestedInfohashes) < self.MAX_CONCURRENT: self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def torrent_exists(self, infohash): # if the torrent is already on disk, put it in db file_name = get_collected_torrent_filename(infohash) torrent_path = os.path.join(self.torrent_dir, file_name) if not os.path.exists(torrent_path): return None,None else: metadata = self.read_torrent(torrent_path) if not self.valid_metadata(infohash, metadata): return None self.addTorrentToDB(torrent_path, infohash, metadata, source="BC", extra_info={}) return file_name, metadata
def prefetch_hits(self): """ Prefetching attempts to reduce the time required to get the user the data it wants. We assume the torrent at the beginning of self.hits are more likely to be selected by the user than the ones at the end. This allows us to perform prefetching operations on a subselection of these items. The prefetch_hits function can be called multiple times. It will only attempt to prefetch every PREFETCH_DELAY seconds. This gives search results from multiple sources the chance to be received and sorted before prefetching a subset. """ if DEBUG: begin_time = time() torrent_dir = Session.get_instance().get_torrent_collecting_dir() hit_counter = 0 prefetch_counter = 0 # prefetch .torrent files if they are from buddycast sources for hit in self.hits: def sesscb_prefetch_done(infohash, metadata, filename): if DEBUG: # find the origional hit for hit in self.hits: if hit["infohash"] == infohash: print >> sys.stderr, "Prefetch: in", "%.1fs" % (time() - begin_time), `hit["name"]` return print >> sys.stderr, "Prefetch BUG. We got a hit from something we didn't ask for" if 'torrent_file_name' not in hit or not hit['torrent_file_name']: hit['torrent_file_name'] = get_collected_torrent_filename(hit['infohash']) torrent_filename = os.path.join(torrent_dir, hit['torrent_file_name']) if not os.path.isfile(torrent_filename): if self.downloadTorrentfileFromPeers(hit, sesscb_prefetch_done, duplicate=False, prio = 1): prefetch_counter += 1 if DEBUG: print >> sys.stderr, "Prefetch: attempting to download", `hit["name"]` hit_counter += 1 if prefetch_counter >= 10 or hit_counter >= 25: # (1) prefetch a maximum of N hits # (2) prefetch only from the first M hits # (.) wichever is lowest or (1) or (2) break
def tqueue_save_collected_torrent(self, metatype, metadata): """ Run by TimedTaskQueueThread """ if metatype == URL_MIME_TYPE: tdef = TorrentDef.load_from_url(metadata) else: metainfo = bdecode(metadata) tdef = TorrentDef.load_from_dict(metainfo) infohash = tdef.get_infohash() colldir = self.session.get_torrent_collecting_dir() filename = get_collected_torrent_filename(infohash) torrentpath = os.path.join(colldir, filename) print >> sys.stderr, "bg: search: saving remotehit", torrentpath tdef.save(torrentpath) return torrentpath
def torrent_exists(self, infohash): # if the torrent is already on disk, put it in db file_name = get_collected_torrent_filename(infohash) torrent_path = os.path.join(self.torrent_dir, file_name) if not os.path.exists(torrent_path): return None, None else: metadata = self.read_torrent(torrent_path) if not self.valid_metadata(infohash, metadata): return None, None self.addTorrentToDB(torrent_path, infohash, metadata, source="BC", extra_info={}) return file_name, metadata
def tqueue_save_collected_torrent(self,metatype,metadata): """ Run by TimedTaskQueueThread """ if metatype == URL_MIME_TYPE: tdef = TorrentDef.load_from_url(metadata) else: metainfo = bdecode(metadata) tdef = TorrentDef.load_from_dict(metainfo) infohash = tdef.get_infohash() colldir = self.session.get_torrent_collecting_dir() filename = get_collected_torrent_filename(infohash) torrentpath = os.path.join(colldir, filename) print >>sys.stderr,"bg: search: saving remotehit",torrentpath tdef.save(torrentpath) return torrentpath
def __requestMagnet(self): try: if len(self.requestedInfohashes) < self.MAX_CONCURRENT: # request new infohash from queue while True: if len(self.list) == 0: return prio, infohash = self.list.pop(0) if infohash in self.requestedInfohashes: if DEBUG: print >>sys.stderr, "magnetrequester: magnet already requested", bin2str(infohash) continue torrent = self.torrent_db.getTorrent(infohash, ["torrent_file_name"], include_mypref=False) torrent_alt_filename = os.path.join( self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash) ) if torrent and torrent.get("torrent_file_name", False): torrent_filename = os.path.join(self.metadatahandler.torrent_dir, torrent["torrent_file_name"]) else: torrent_filename = torrent_alt_filename if os.path.isfile(torrent_filename) or os.path.isfile(torrent_alt_filename): if DEBUG: print >>sys.stderr, "magnetrequester: magnet already on disk", bin2str(infohash) else: break # do request else: # requesting max_concurrent return except: print_exc() # try magnet link magnetlink = "magnet:?xt=urn:btih:" + hexlify(infohash) if DEBUG: print >>sys.stderr, "magnetrequester: requesting magnet", bin2str(infohash), prio, magnetlink self.requestedInfohashes.add(infohash) TorrentDef.retrieve_from_magnet(magnetlink, self.__torrentdef_retrieved, self.MAGNET_RETRIEVE_TIMEOUT) self.overlay_bridge.add_task(lambda: self.__torrentdef_failed(infohash), self.MAGNET_RETRIEVE_TIMEOUT, infohash) if len(self.requestedInfohashes) < self.MAX_CONCURRENT: self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def __torrentdef_retrieved(self, tdef): infohash = tdef.get_infohash() if DEBUG: print >> sys.stderr, 'magnetrequester: received torrent', bin2str( infohash) #remove from requested list if infohash in self.requestedInfohashes: self.requestedInfohashes.remove(infohash) #save torrent torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name'], include_mypref=False) if torrent and torrent.get( 'torrent_file_name', False) and not os.path.isabs(torrent['torrent_file_name']): torrent_filename = os.path.join( self.metadatahandler.torrent_dir, torrent['torrent_file_name']) else: torrent_filename = os.path.join( self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash)) tdef.save(torrent_filename) #calculate root-hash sdef = SwiftDef() sdef.add_content(torrent_filename) sdef.finalize(self.session.get_swift_path()) #add this new torrent to db self.torrent_db.addExternalTorrent(tdef, extra_info={ 'swift_torrent_hash': bin2str(sdef.get_roothash()) }) #notify all self.remoteTorrentHandler.metadatahandler_got_torrent( infohash, tdef, torrent_filename) self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def __torrentdef_retrieved(self, tdef): infohash = tdef.get_infohash() if DEBUG: print >> sys.stderr, 'magnetrequester: received torrent', bin2str(infohash) #remove from requested list if infohash in self.requestedInfohashes: self.requestedInfohashes.remove(infohash) #save torrent torrent_filename = os.path.join(self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash)) tdef.save(torrent_filename) #add this new torrent to db torrent_db = self.session.open_dbhandler('torrents') torrent_db.addExternalTorrent(tdef) #notify all self.remoteTorrentHandler.metadatahandler_got_torrent(infohash, tdef, torrent_filename) self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def __torrentdef_retrieved(self, tdef): infohash = tdef.get_infohash() if DEBUG: print >> sys.stderr, 'magnetrequester: received torrent', bin2str(infohash) #remove from requested list if infohash in self.requestedInfohashes: self.requestedInfohashes.remove(infohash) #save torrent torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name'], include_mypref = False) if torrent and torrent.get('torrent_file_name', False) and not os.path.isabs(torrent['torrent_file_name']): torrent_filename = os.path.join(self.metadatahandler.torrent_dir, torrent['torrent_file_name']) else: torrent_filename = os.path.join(self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash)) tdef.save(torrent_filename) #add this new torrent to db self.torrent_db.addExternalTorrent(tdef) #notify all self.remoteTorrentHandler.metadatahandler_got_torrent(infohash, tdef, torrent_filename) self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def read_and_send_metadata(self, permid, infohash, torrent_path, selversion): torrent_data = self.read_torrent(torrent_path) if torrent_data: # Arno: Don't send private torrents try: metainfo = bdecode(torrent_data) if 'info' in metainfo and 'private' in metainfo['info'] and metainfo['info']['private']: if DEBUG: print >> sys.stderr,"metadata: Not sending torrent", `torrent_path`,"because it is private" return 0 except: print_exc() return 0 if DEBUG: print >> sys.stderr,"metadata: sending torrent", `torrent_path`, len(torrent_data) torrent = {} torrent['torrent_hash'] = infohash # P2PURLs: If URL compat then send URL tdef = TorrentDef.load_from_dict(metainfo) if selversion >= OLPROTO_VER_ELEVENTH and tdef.get_url_compat(): torrent['metatype'] = URL_MIME_TYPE torrent['metadata'] = tdef.get_url() else: torrent['metatype'] = TSTREAM_MIME_TYPE torrent['metadata'] = torrent_data if selversion >= OLPROTO_VER_FOURTH: data = self.torrent_db.getTorrent(infohash) if data is None: # DB inconsistency return 0 nleechers = data.get('leecher', -1) nseeders = data.get('seeder', -1) last_check_ago = int(time()) - data.get('last_check_time', 0) # relative time if last_check_ago < 0: last_check_ago = 0 status = data.get('status', 'unknown') torrent.update({'leecher':nleechers, 'seeder':nseeders, 'last_check_time':last_check_ago, 'status':status}) return self.do_send_metadata(permid, torrent, selversion) else: file_name = get_collected_torrent_filename(infohash) torrent_path2 = os.path.join(self.torrent_dir, file_name) samefile = os.path.abspath(torrent_path) == os.path.abspath(torrent_path2) if os.path.exists(torrent_path2) and not samefile: return self.read_and_send_metadata(permid, infohash, torrent_path2, selversion) # deleted before sending it self.torrent_db.deleteTorrent(infohash, delete_file=True, commit=True) if DEBUG: print >> sys.stderr,"metadata: GET_METADATA: no torrent data to send" return 0
def read_and_send_metadata(self, permid, infohash, torrent_path, selversion): torrent_data = self.read_torrent(torrent_path) if torrent_data: # Arno: Don't send private torrents try: metainfo = bdecode(torrent_data) if 'info' in metainfo and 'private' in metainfo[ 'info'] and metainfo['info']['private']: if DEBUG: print >> sys.stderr, "metadata: Not sending torrent", ` torrent_path `, "because it is private" return 0 except: print_exc() return 0 if DEBUG: print >> sys.stderr, "metadata: sending torrent", ` torrent_path `, len( torrent_data) torrent = {} torrent['torrent_hash'] = infohash # P2PURLs: If URL compat then send URL tdef = TorrentDef.load_from_dict(metainfo) if selversion >= OLPROTO_VER_ELEVENTH and tdef.get_url_compat(): torrent['metatype'] = URL_MIME_TYPE torrent['metadata'] = tdef.get_url() else: torrent['metatype'] = TSTREAM_MIME_TYPE torrent['metadata'] = torrent_data if selversion >= OLPROTO_VER_FOURTH: data = self.torrent_db.getTorrent(infohash) if data is None: # DB inconsistency return 0 nleechers = data.get('leecher', -1) nseeders = data.get('seeder', -1) last_check_ago = int(time()) - data.get('last_check_time', 0) # relative time if last_check_ago < 0: last_check_ago = 0 status = data.get('status', 'unknown') torrent.update({ 'leecher': nleechers, 'seeder': nseeders, 'last_check_time': last_check_ago, 'status': status }) return self.do_send_metadata(permid, torrent, selversion) else: file_name = get_collected_torrent_filename(infohash) torrent_path2 = os.path.join(self.torrent_dir, file_name) samefile = os.path.abspath(torrent_path) == os.path.abspath( torrent_path2) if os.path.exists(torrent_path2) and not samefile: return self.read_and_send_metadata(permid, infohash, torrent_path2, selversion) # deleted before sending it self.torrent_db.deleteTorrent(infohash, delete_file=True, commit=True) if DEBUG: print >> sys.stderr, "metadata: GET_METADATA: no torrent data to send" return 0
def gettorrentfilename(self, tdef): tor_dir = self.session.get_torrent_collecting_dir() tor_filename = get_collected_torrent_filename(tdef.get_infohash()) return os.path.join(tor_dir, tor_filename)
def __requestMagnet(self): self.listLock.acquire() try: if len(self.requestedInfohashes) < self.MAX_CONCURRENT: #request new infohash from queue while True: if len(self.list) == 0: return prio, infohash = self.list.pop(0) torrent_filename = os.path.join(self.metadatahandler.torrent_dir, get_collected_torrent_filename(infohash)) if infohash in self.requestedInfohashes: if DEBUG: print >> sys.stderr, 'magnetrequester: magnet already requested', bin2str(infohash) elif os.path.isfile(torrent_filename): if DEBUG: print >> sys.stderr, 'magnetrequester: magnet already on disk', bin2str(infohash) else: break else: #requesting max_concurrent return except: print_exc() finally: self.listLock.release() #try magnet link magnetlink = "magnet:?xt=urn:btih:" + hexlify(infohash) if DEBUG: print >> sys.stderr, 'magnetrequester: requesting magnet', bin2str(infohash), prio, magnetlink self.requestedInfohashes.add(infohash) TorrentDef.retrieve_from_magnet(magnetlink, self.__torrentdef_retrieved, self.MAGNET_RETRIEVE_TIMEOUT) self.overlay_bridge.add_task(lambda: self.__torrentdef_failed(infohash), self.MAGNET_RETRIEVE_TIMEOUT, infohash) if len(self.requestedInfohashes) < self.MAX_CONCURRENT: self.overlay_bridge.add_task(self.__requestMagnet, self.REQUEST_INTERVAL)
def _save_torrent(self, tdef, callback = None): tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), "tmp_"+get_collected_torrent_filename(tdef.get_infohash())) filename_index = 0 while os.path.exists(tmp_filename): filename_index += 1 tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), ("tmp_%d_"%filename_index)+get_collected_torrent_filename(tdef.get_infohash())) tdef.save(tmp_filename) sdef, swiftpath = self._write_to_collected(tmp_filename) try: os.remove(tmp_filename) except: atexit.register(lambda tmp_filename=tmp_filename: os.remove(tmp_filename)) def do_db(callback): #add this new torrent to db infohash = tdef.get_infohash() if self.torrent_db.hasTorrent(infohash): self.torrent_db.updateTorrent(infohash, swift_torrent_hash = sdef.get_roothash(), torrent_file_name = swiftpath) else: self.torrent_db.addExternalTorrent(tdef, extra_info = {'filename': swiftpath, 'swift_torrent_hash':sdef.get_roothash(), 'status':'good'}) #notify all self.notify_possible_torrent_infohash(infohash, True) if callback: callback() startWorker(None, do_db, wargs = (callback, ))