def resume_download(self, filename, setupDelay=0): tdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('download_defaults', 'saveas') and \ isinstance(pstate.get('download_defaults', 'saveas'), tuple): pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) except: # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-6]) torrent_data = self.torrent_store.get(infohash) if torrent_data: try: tdef = TorrentDef.load_from_memory(torrent_data) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db is not None: dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash) if dest_dir and os.path.isdir(dest_dir): dscfg.set_dest_dir(dest_dir) except ValueError: self._logger.warning("tlm: torrent data invalid") if pstate is not None: has_resume_data = pstate.get('state', 'engineresumedata') is not None self._logger.debug("tlm: load_checkpoint: resumedata %s", 'len %s ' % len(pstate.get('state', 'engineresumedata')) if has_resume_data else 'None') if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if self.download_exists(tdef.get_infohash()): self._logger.info("tlm: not resuming checkpoint because download has already been added") elif dscfg.get_credit_mining() and not self.session.config.get_credit_mining_enabled(): self._logger.info("tlm: not resuming checkpoint since token mining is disabled") else: self.add(tdef, dscfg, pstate, setupDelay=setupDelay) except Exception as e: self._logger.exception("tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg)
def update_trackers(self, infohash, trackers): """ Update the trackers for a download. :param infohash: infohash of the torrent that needs to be updated :param trackers: A list of tracker urls. """ dl = self.get_download(infohash) old_def = dl.get_def() if dl else None if old_def: old_trackers = old_def.get_trackers_as_single_tuple() new_trackers = list(set(trackers) - set(old_trackers)) all_trackers = list(old_trackers) + new_trackers if new_trackers: # Add new trackers to the download dl.add_trackers(new_trackers) # Create a new TorrentDef if isinstance(old_def, TorrentDefNoMetainfo): new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link()) else: metainfo = old_def.get_metainfo() if len(all_trackers) > 1: metainfo["announce-list"] = [all_trackers] else: metainfo["announce"] = all_trackers[0] new_def = TorrentDef.load_from_dict(metainfo) # Set TorrentDef + checkpoint dl.set_def(new_def) dl.checkpoint()
def on_metadata_received_alert(self, alert): torrent_info = get_info_from_handle(self.handle) if not torrent_info: return metadata = {'info': lt.bdecode(torrent_info.metadata())} trackers = [tracker['url'] for tracker in self.handle.trackers()] if trackers: if len(trackers) > 1: metadata["announce-list"] = [trackers] else: metadata["announce"] = trackers[0] try: self.tdef = TorrentDef.load_from_dict(metadata) except ValueError as ve: self._logger.exception(ve) return try: torrent_files = lt.torrent_info(metadata).files() except RuntimeError: self._logger.warning("Torrent contains no files!") torrent_files = [] self.orig_files = [torrent_file.path.decode('utf-8') for torrent_file in torrent_files] self.set_corrected_infoname() self.set_filepieceranges() self.set_selected_files() self.checkpoint()
def _add_torrent_and_check(metainfo): tdef = TorrentDef.load_from_dict(metainfo) assert (tdef.infohash == infohash), "DHT infohash does not match locally generated one" self._logger.info("Chant-managed torrent fetched from DHT. Adding it to local cache, %s", self.infohash) self.session.lm.torrent_db.addExternalTorrent(tdef) self.session.lm.torrent_db._db.commit_now() make_torrent_health_request()
def resume_download(self, filename, setupDelay=0): tdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('download_defaults', 'saveas') and \ isinstance(pstate.get('download_defaults', 'saveas'), tuple): pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) except: # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-6]) torrent_data = self.torrent_store.get(infohash) if torrent_data: try: tdef = TorrentDef.load_from_memory(torrent_data) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db is not None: dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash) if dest_dir and os.path.isdir(dest_dir): dscfg.set_dest_dir(dest_dir) except ValueError: self._logger.warning("tlm: torrent data invalid") if pstate is not None: has_resume_data = pstate.get('state', 'engineresumedata') is not None self._logger.debug("tlm: load_checkpoint: resumedata %s", 'len %s ' % len(pstate.get('state', 'engineresumedata')) if has_resume_data else 'None') if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists(tdef.get_infohash()): self.add(tdef, dscfg, pstate, setupDelay=setupDelay) else: self._logger.info("tlm: not resuming checkpoint because download has already been added") except Exception as e: self._logger.exception("tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg)
def test_load_from_dict(self): metainfo = { "info": { "name": "my_torrent", "piece length": 12345, "pieces": "12345678901234567890", "files": [] } } torrent = TorrentDef.load_from_dict(metainfo) self.assertTrue(isValidTorrentFile(torrent.get_metainfo()))
def read_and_send_metadata(self, permid, infohash, torrent_path, selversion): torrent_data = self.read_torrent(torrent_path) if torrent_data: # Arno: Don't send private torrents try: metainfo = bdecode(torrent_data) if 'info' in metainfo and 'private' in metainfo['info'] and metainfo['info']['private']: if DEBUG: print >> sys.stderr,time.asctime(),'-', "metadata: Not sending torrent", `torrent_path`,"because it is private" return 0 except: print_exc() return 0 if DEBUG: print >> sys.stderr,time.asctime(),'-', "metadata: sending torrent", `torrent_path`, len(torrent_data) torrent = {} torrent['torrent_hash'] = infohash # P2PURLs: If URL compat then send URL tdef = TorrentDef.load_from_dict(metainfo) if selversion >= OLPROTO_VER_ELEVENTH and tdef.get_url_compat(): torrent['metatype'] = URL_MIME_TYPE torrent['metadata'] = tdef.get_url() else: torrent['metatype'] = TSTREAM_MIME_TYPE torrent['metadata'] = torrent_data if selversion >= OLPROTO_VER_FOURTH: data = self.torrent_db.getTorrent(infohash) if data is None: # DB inconsistency return 0 nleechers = data.get('leecher', -1) nseeders = data.get('seeder', -1) last_check_ago = int(time()) - data.get('last_check_time', 0) # relative time if last_check_ago < 0: last_check_ago = 0 status = data.get('status', 'unknown') torrent.update({'leecher':nleechers, 'seeder':nseeders, 'last_check_time':last_check_ago, 'status':status}) return self.do_send_metadata(permid, torrent, selversion) else: # deleted before sending it self.torrent_db.deleteTorrent(infohash, delete_file=True, updateFlag=True) if DEBUG: print >> sys.stderr,time.asctime(),'-', "metadata: GET_METADATA: no torrent data to send" return 0
def update_trackers(self, infohash, trackers): """ Update the trackers for a download. :param infohash: infohash of the torrent that needs to be updated :param trackers: A list of tracker urls. """ dl = self.get_download(infohash) old_def = dl.get_def() if dl else None if old_def: old_trackers = old_def.get_trackers_as_single_tuple() new_trackers = list(set(trackers) - set(old_trackers)) all_trackers = list(old_trackers) + new_trackers if new_trackers: # Add new trackers to the download dl.add_trackers(new_trackers) # Create a new TorrentDef if isinstance(old_def, TorrentDefNoMetainfo): new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link()) else: metainfo = old_def.get_metainfo() if len(all_trackers) > 1: metainfo["announce-list"] = [all_trackers] else: metainfo["announce"] = all_trackers[0] new_def = TorrentDef.load_from_dict(metainfo) # Set TorrentDef + checkpoint dl.set_def(new_def) dl.checkpoint() if isinstance(old_def, TorrentDefNoMetainfo): @forceDBThread def update_trackers_db(infohash, new_trackers): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id is not None: self.torrent_db.addTorrentTrackerMappingInBatch( torrent_id, new_trackers) self.session.notifier.notify( NTFY_TORRENTS, NTFY_UPDATE, infohash) if self.session.config.get_megacache_enabled(): update_trackers_db(infohash, new_trackers) elif not isinstance( old_def, TorrentDefNoMetainfo) and self.rtorrent_handler: # Update collected torrents self.rtorrent_handler.save_torrent(new_def)
def render_POST(self, request): with db_session: my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() if not my_channel: request.setResponseCode(http.NOT_FOUND) return json.dumps({"error": "your channel has not been created"}) torrent_dict = my_channel.commit_channel_torrent() if torrent_dict: self.session.lm.gigachannel_manager.updated_my_channel(TorrentDef.load_from_dict(torrent_dict)) return json.dumps({"success": True})
def test_create_torrent_from_def(self): """ Testing whether a correct Dispersy message is created when we add a torrent to our channel """ metainfo = {"info": {"name": "my_torrent", "piece length": 12345, "pieces": "12345678901234567890", "files": [{'path': ['test.txt'], 'length': 1234}]}} torrent = TorrentDef.load_from_dict(metainfo) self.channel_community.initialize() message = self.channel_community._disp_create_torrent_from_torrentdef(torrent, 12345) self.assertEqual(message.payload.name, "my_torrent") self.assertEqual(len(message.payload.files), 1)
def resume_download(self, filename, setupDelay=0): tdef = dscfg = pstate = None pstate = self.load_download_pstate(filename) metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('download_defaults', 'saveas') and \ isinstance(pstate.get('download_defaults', 'saveas'), tuple): pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) if pstate is not None: has_resume_data = pstate.get('state', 'engineresumedata') is not None self._logger.debug( "tlm: load_checkpoint: resumedata %s", 'len %s ' % len(pstate.get('state', 'engineresumedata')) if has_resume_data else 'None') if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if self.download_exists(tdef.get_infohash()): self._logger.info( "tlm: not resuming checkpoint because download has already been added" ) elif dscfg.get_credit_mining( ) and not self.session.config.get_credit_mining_enabled(): self._logger.info( "tlm: not resuming checkpoint since token mining is disabled" ) else: self.add(tdef, dscfg, pstate, setupDelay=setupDelay) except Exception as e: self._logger.exception( "tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg)
def valid_metadata(self, infohash, metadata): try: metainfo = bdecode(metadata) tdef = TorrentDef.load_from_dict(metainfo) got_infohash = tdef.get_infohash() if infohash != got_infohash: print >>sys.stderr, "metadata: infohash doesn't match the torrent " + "hash. Required: " + ` infohash ` + ", but got: " + ` got_infohash ` return False return True except: print_exc() # print >> sys.stderr, "problem metadata:", repr(metadata) return False
def read_and_send_metadata(self, permid, infohash, torrent_path, selversion): torrent_data = self.read_torrent(torrent_path) if torrent_data: # Arno: Don't send private torrents try: metainfo = bdecode(torrent_data) if "info" in metainfo and "private" in metainfo["info"] and metainfo["info"]["private"]: if DEBUG: print >>sys.stderr, "metadata: Not sending torrent", ` torrent_path `, "because it is private" return 0 except: print_exc() return 0 if DEBUG: print >>sys.stderr, "metadata: sending torrent", ` torrent_path `, len(torrent_data) torrent = {} torrent["torrent_hash"] = infohash # P2PURLs: If URL compat then send URL tdef = TorrentDef.load_from_dict(metainfo) if selversion >= OLPROTO_VER_ELEVENTH and tdef.get_url_compat(): torrent["metatype"] = URL_MIME_TYPE torrent["metadata"] = tdef.get_url() else: torrent["metatype"] = TSTREAM_MIME_TYPE torrent["metadata"] = torrent_data if selversion >= OLPROTO_VER_FOURTH: data = self.torrent_db.getTorrent(infohash) if data is None: # DB inconsistency return 0 nleechers = data.get("leecher", -1) nseeders = data.get("seeder", -1) last_check_ago = int(time()) - data.get("last_check_time", 0) # relative time if last_check_ago < 0: last_check_ago = 0 status = data.get("status", "unknown") torrent.update( {"leecher": nleechers, "seeder": nseeders, "last_check_time": last_check_ago, "status": status} ) return self.do_send_metadata(permid, torrent, selversion) else: # deleted before sending it self.torrent_db.deleteTorrent(infohash, delete_file=True, commit=True) if DEBUG: print >>sys.stderr, "metadata: GET_METADATA: no torrent data to send" return 0
def render_POST(self, request): with db_session: my_channel = self.session.lm.mds.ChannelMetadata.get_my_channel() if not my_channel: request.setResponseCode(http.NOT_FOUND) return json.dumps( {"error": "your channel has not been created"}) torrent_dict = my_channel.commit_channel_torrent() if torrent_dict: self.session.lm.gigachannel_manager.updated_my_channel( TorrentDef.load_from_dict(torrent_dict)) return json.dumps({"success": True})
def valid_metadata(self, infohash, metadata): try: metainfo = bdecode(metadata) tdef = TorrentDef.load_from_dict(metainfo) got_infohash = tdef.get_infohash() if infohash != got_infohash: print >> sys.stderr, "metadata: infohash doesn't match the torrent " + \ "hash. Required: " + `infohash` + ", but got: " + `got_infohash` return False return True except: print_exc() #print >> sys.stderr, "problem metadata:", repr(metadata) return False
def __cb_body(body_bin, item_torrent_entry): tdef = None metainfo = None # tdef.get_infohash returned binary string by length 20 try: metainfo = lt.bdecode(body_bin) tdef = TorrentDef.load_from_dict(metainfo) self.session.save_collected_torrent(tdef.get_infohash(), body_bin) except ValueError, err: self._logger.error( "Could not parse/save torrent, skipping %s. Reason: %s", item_torrent_entry['link'], err.message + ", metainfo is " + ("not " if metainfo else "") + "None")
def process_alert(self, alert, alert_type): if DEBUG or alert.category() in [lt.alert.category_t.error_notification, lt.alert.category_t.performance_warning]: print >> sys.stderr, "LibtorrentDownloadImpl: alert %s with message %s" % (alert_type, alert) if self.handle and self.handle.is_valid(): status = self.handle.status() with self.dllock: if alert_type == 'metadata_received_alert': self.metadata = {'info': lt.bdecode(self.handle.get_torrent_info().metadata())} self.tdef = TorrentDef.load_from_dict(self.metadata) self.orig_files = [torrent_file.path for torrent_file in lt.torrent_info(self.metadata).files()] self.set_files() if self.session.lm.rtorrent_handler: self.session.lm.rtorrent_handler.save_torrent(self.tdef) elif self.session.lm.torrent_db: self.session.lm.torrent_db.addExternalTorrent(self.tdef, source = '', extra_info = {'status':'good'}, commit = True) # Checkpoint (infohash, pstate) = self.network_checkpoint() checkpoint = lambda : self.session.lm.save_download_pstate(infohash, pstate) self.session.lm.rawserver.add_task(checkpoint, 0) if alert_type == 'file_renamed_alert': if os.path.exists(self.unwanteddir_abs) and not os.listdir(self.unwanteddir_abs) and all(self.handle.file_priorities()): os.rmdir(self.unwanteddir_abs) if alert_type == 'torrent_checked_alert' and self.pause_after_next_hashcheck: self.handle.pause() self.pause_after_next_hashcheck = False self.dlstate = DLSTATUS_STOPPED else: if alert_type == 'torrent_paused_alert': self.dlstate = DLSTATUS_STOPPED_ON_ERROR if status.error else DLSTATUS_STOPPED else: self.dlstate = self.dlstates[status.state] if not status.paused else DLSTATUS_STOPPED self.error = unicode(status.error) if status.error else None self.length = float(status.total_wanted) self.progress = status.progress self.curspeeds[DOWNLOAD] = float(status.download_payload_rate) if self.dlstate not in [DLSTATUS_STOPPED, DLSTATUS_STOPPED] else 0.0 self.curspeeds[UPLOAD] = float(status.upload_payload_rate) if self.dlstate not in [DLSTATUS_STOPPED, DLSTATUS_STOPPED] else 0.0 self.all_time_upload = status.all_time_upload self.all_time_download = status.all_time_download self.finished_time = status.finished_time
def isValidRemoteVal(d,selversion): if not isinstance(d,dict): if DEBUG: print >>sys.stderr,"rqmh: reply: a: value not dict" return False if selversion >= OLPROTO_VER_TWELFTH: if not ('content_name' in d and 'length' in d and 'leecher' in d and 'seeder' in d and 'category' in d and 'torrent_size' in d and 'channel_permid' in d and 'channel_name' in d): if DEBUG: print >>sys.stderr,"rqmh: reply: torrentrec12: key missing, got",d.keys() return False if 'metatype' in d and 'metadata' in d: try: metatype = d['metatype'] metadata = d['metadata'] if metatype == URL_MIME_TYPE: tdef = TorrentDef.load_from_url(metadata) else: metainfo = bdecode(metadata) tdef = TorrentDef.load_from_dict(metainfo) except: if DEBUG: print >>sys.stderr,"rqmh: reply: torrentrec12: metadata invalid" print_exc() return False elif selversion >= OLPROTO_VER_ELEVENTH: if not ('content_name' in d and 'length' in d and 'leecher' in d and 'seeder' in d and 'category' in d and 'torrent_size' in d and 'channel_permid' in d and 'channel_name' in d): if DEBUG: print >>sys.stderr,"rqmh: reply: torrentrec11: key missing, got",d.keys() return False elif selversion >= OLPROTO_VER_NINETH: if not ('content_name' in d and 'length' in d and 'leecher' in d and 'seeder' in d and 'category' in d and 'torrent_size' in d): if DEBUG: print >>sys.stderr,"rqmh: reply: torrentrec9: key missing, got",d.keys() return False else: if not ('content_name' in d and 'length' in d and 'leecher' in d and 'seeder' in d and 'category' in d): if DEBUG: print >>sys.stderr,"rqmh: reply: torrentrec6: key missing, got",d.keys() return False # if not (isinstance(d['content_name'],str) and isinstance(d['length'],int) and isinstance(d['leecher'],int) and isinstance(d['seeder'],int)): # return False # if len(d) > 4: # no other keys # return False return True
def update_trackers(self, infohash, trackers): """ Update the trackers for a download. :param infohash: infohash of the torrent that needs to be updated :param trackers: A list of tracker urls. """ dl = self.get_download(infohash) old_def = dl.get_def() if dl else None if old_def: old_trackers = old_def.get_trackers_as_single_tuple() new_trackers = list(set(trackers) - set(old_trackers)) all_trackers = list(old_trackers) + new_trackers if new_trackers: # Add new trackers to the download dl.add_trackers(new_trackers) # Create a new TorrentDef if isinstance(old_def, TorrentDefNoMetainfo): new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link()) else: metainfo = old_def.get_metainfo() if len(all_trackers) > 1: metainfo["announce-list"] = [all_trackers] else: metainfo["announce"] = all_trackers[0] new_def = TorrentDef.load_from_dict(metainfo) # Set TorrentDef + checkpoint dl.set_def(new_def) dl.checkpoint() if isinstance(old_def, TorrentDefNoMetainfo): @forceDBThread def update_trackers_db(infohash, new_trackers): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id is not None: self.torrent_db.addTorrentTrackerMappingInBatch(torrent_id, new_trackers) self.session.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) if self.session.config.get_megacache_enabled(): update_trackers_db(infohash, new_trackers) elif not isinstance(old_def, TorrentDefNoMetainfo) and self.rtorrent_handler: # Update collected torrents self.rtorrent_handler.save_torrent(new_def)
def _success_callback(self, meta_info): """ The callback that will be called by LibtorrentMgr when a download was successful. """ tdef = TorrentDef.load_from_dict(meta_info) assert tdef.get_infohash() in self._running_requests infohash = tdef.get_infohash() self._logger.debug(u"received torrent %s through magnet", hexlify(infohash)) self._remote_torrent_handler.save_torrent(tdef) self._running_requests.remove(infohash) self._requests_succeeded += 1 self._total_bandwidth += tdef.get_torrent_size() self._start_pending_requests()
def test_check_torrent_health_chant(self): """ Test the endpoint to fetch the health of a chant-managed, infohash-only torrent """ infohash = 'a' * 20 tracker_url = 'udp://localhost:%s/announce' % self.udp_port meta_info = {"info": {"name": "my_torrent", "piece length": 42, "root hash": infohash, "files": [], "url-list": tracker_url}} tdef = TorrentDef.load_from_dict(meta_info) with db_session: self.session.lm.mds.TorrentMetadata(infohash=tdef.infohash, title='ubuntu-torrent.iso', size=42, tracker_info=tracker_url) url = 'torrents/%s/health?timeout=10&refresh=1' % tdef.infohash.encode('hex') self.should_check_equality = False def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): meta_info_extended = meta_info.copy() meta_info_extended['seeders'] = 12 meta_info_extended['leechers'] = 11 callback(meta_info_extended) # Initialize the torrent checker self.session.lm.torrent_checker = TorrentChecker(self.session) self.session.lm.torrent_checker.initialize() self.session.lm.ltmgr = MockObject() self.session.lm.ltmgr.get_metainfo = fake_get_metainfo def verify_response_no_trackers(response): json_response = json.loads(response) expected_dict = {u"health": {u"DHT": {u"leechers": 11, u"seeders": 12, u"infohash": unicode(tdef.infohash.encode('hex'))}}} self.assertDictEqual(json_response, expected_dict) # Left for compatibility with other tests in this object self.udp_tracker.start() self.http_tracker.start() # TODO: add test for DHT timeout yield self.do_request(url, expected_code=200, request_type='GET').addCallback(verify_response_no_trackers)
def _success_callback(self, meta_info): """ The callback that will be called by LibtorrentMgr when a download was successful. """ tdef = TorrentDef.load_from_dict(meta_info) assert tdef.get_infohash() in self._running_requests infohash = tdef.get_infohash() self._logger.debug(u"received torrent %s through magnet", hexlify(infohash)) self._remote_torrent_handler.save_torrent(tdef) self._running_requests.remove(infohash) self._requests_succeeded += 1 self._total_bandwidth += tdef.get_torrent_size() self._start_pending_requests()
def on_metadata_received_alert(self, alert): self.metadata = {'info': lt.bdecode(self.handle.get_torrent_info().metadata())} trackers = [tracker['url'] for tracker in self.handle.trackers()] if trackers: if len(trackers) > 1: self.metadata["announce-list"] = [trackers] else: self.metadata["announce"] = trackers[0] self.tdef = TorrentDef.load_from_dict(self.metadata) self.orig_files = [torrent_file.path for torrent_file in lt.torrent_info(self.metadata).files()] self.set_files() if self.session.lm.rtorrent_handler: self.session.lm.rtorrent_handler.save_torrent(self.tdef) elif self.session.lm.torrent_db: self.session.lm.torrent_db.addExternalTorrent(self.tdef, source = '', extra_info = {'status':'good'}, commit = True) self.checkpoint()
def test_create_torrent_from_def(self): """ Testing whether a correct Dispersy message is created when we add a torrent to our channel """ metainfo = { "info": { "name": "my_torrent", "piece length": 12345, "pieces": "12345678901234567890", "files": [{ 'path': ['test.txt'], 'length': 1234 }] } } torrent = TorrentDef.load_from_dict(metainfo) self.channel_community.initialize() message = self.channel_community._disp_create_torrent_from_torrentdef( torrent, 12345) self.assertEqual(message.payload.name, "my_torrent") self.assertEqual(len(message.payload.files), 1)
def test_check_torrent_health_chant(self): """ Test the endpoint to fetch the health of a chant-managed, infohash-only torrent """ infohash = 'a' * 20 tracker_url = 'udp://localhost:%s/announce' % self.udp_port meta_info = { "info": { "name": "my_torrent", "piece length": 42, "root hash": infohash, "files": [], "url-list": tracker_url } } tdef = TorrentDef.load_from_dict(meta_info) with db_session: self.session.lm.mds.TorrentMetadata(infohash=tdef.infohash, title='ubuntu-torrent.iso', size=42, tracker_info=tracker_url) url = 'torrents/%s/health?timeout=10&refresh=1' % tdef.infohash.encode( 'hex') self.should_check_equality = False def fake_get_metainfo(_, callback, timeout=10, timeout_callback=None, notify=True): meta_info_extended = meta_info.copy() meta_info_extended['seeders'] = 12 meta_info_extended['leechers'] = 11 callback(meta_info_extended) # Initialize the torrent checker self.session.lm.torrent_checker = TorrentChecker(self.session) self.session.lm.torrent_checker.initialize() self.session.lm.ltmgr = MockObject() self.session.lm.ltmgr.get_metainfo = fake_get_metainfo def verify_response_no_trackers(response): json_response = json.loads(response) expected_dict = { u"health": { u"DHT": { u"leechers": 11, u"seeders": 12, u"infohash": unicode(tdef.infohash.encode('hex')) } } } self.assertDictEqual(json_response, expected_dict) # Left for compatibility with other tests in this object self.udp_tracker.start() self.http_tracker.start() # TODO: add test for DHT timeout yield self.do_request( url, expected_code=200, request_type='GET').addCallback(verify_response_no_trackers)
def read_and_send_metadata(self, permid, infohash, torrent_path, selversion): torrent_data = self.read_torrent(torrent_path) if torrent_data: # Arno: Don't send private torrents try: metainfo = bdecode(torrent_data) if 'info' in metainfo and 'private' in metainfo[ 'info'] and metainfo['info']['private']: if DEBUG: print >> sys.stderr, "metadata: Not sending torrent", ` torrent_path `, "because it is private" return 0 except: print_exc() return 0 if DEBUG: print >> sys.stderr, "metadata: sending torrent", ` torrent_path `, len( torrent_data) torrent = {} torrent['torrent_hash'] = infohash # P2PURLs: If URL compat then send URL tdef = TorrentDef.load_from_dict(metainfo) if selversion >= OLPROTO_VER_ELEVENTH and tdef.get_url_compat(): torrent['metatype'] = URL_MIME_TYPE torrent['metadata'] = tdef.get_url() else: torrent['metatype'] = TSTREAM_MIME_TYPE torrent['metadata'] = torrent_data if selversion >= OLPROTO_VER_FOURTH: data = self.torrent_db.getTorrent(infohash) if data is None: # DB inconsistency return 0 nleechers = data.get('leecher', -1) nseeders = data.get('seeder', -1) last_check_ago = int(time()) - data.get('last_check_time', 0) # relative time if last_check_ago < 0: last_check_ago = 0 status = data.get('status', 'unknown') torrent.update({ 'leecher': nleechers, 'seeder': nseeders, 'last_check_time': last_check_ago, 'status': status }) return self.do_send_metadata(permid, torrent, selversion) else: file_name = get_collected_torrent_filename(infohash) torrent_path2 = os.path.join(self.torrent_dir, file_name) samefile = os.path.abspath(torrent_path) == os.path.abspath( torrent_path2) if os.path.exists(torrent_path2) and not samefile: return self.read_and_send_metadata(permid, infohash, torrent_path2, selversion) # deleted before sending it self.torrent_db.deleteTorrent(infohash, delete_file=True, commit=True) if DEBUG: print >> sys.stderr, "metadata: GET_METADATA: no torrent data to send" return 0
def test_load_from_dict(self): metainfo = {"info": {"name": "my_torrent", "piece length": 12345, "pieces": "12345678901234567890", "files": []}} torrent = TorrentDef.load_from_dict(metainfo) self.assertTrue(valid_torrent_file(torrent.get_metainfo()))
def resume_download(self, filename, initialdlstatus=None, initialdlstatus_dict={}, commit=True, setupDelay=0): tdef = sdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC if SwiftDef.is_swift_url(pstate['metainfo']): sdef = SwiftDef.load_from_url(pstate['metainfo']) elif 'infohash' in pstate['metainfo']: tdef = TorrentDefNoMetainfo(pstate['metainfo']['infohash'], pstate['metainfo']['name']) else: tdef = TorrentDef.load_from_dict(pstate['metainfo']) dlconfig = pstate['dlconfig'] if isinstance(dlconfig['saveas'], tuple): dlconfig['saveas'] = dlconfig['saveas'][-1] if sdef and 'name' in dlconfig and isinstance( dlconfig['name'], basestring): sdef.set_name(dlconfig['name']) if sdef and sdef.get_tracker().startswith("127.0.0.1:"): current_port = int(sdef.get_tracker().split(":")[1]) if current_port != self.session.get_swift_dht_listen_port(): print >> sys.stderr, "Modified SwiftDef to new tracker port" sdef.set_tracker("127.0.0.1:%d" % self.session.get_swift_dht_listen_port()) dscfg = DownloadStartupConfig(dlconfig) except: print_exc() # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-7]) torrent = self.torrent_db.getTorrent( infohash, keys=['name', 'torrent_file_name', 'swift_torrent_hash'], include_mypref=False) torrentfile = None if torrent: torrent_dir = self.session.get_torrent_collecting_dir() if torrent['swift_torrent_hash']: sdef = SwiftDef(torrent['swift_torrent_hash']) save_name = sdef.get_roothash_as_hex() torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): # normal torrentfile is not present, see if readable torrent is there save_name = get_readable_torrent_name( infohash, torrent['name']) torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): tdef = TorrentDef.load(torrentfile) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db != None: preferences = self.mypref_db.getMyPrefStatsInfohash( infohash) if preferences: if os.path.isdir( preferences[2]) or preferences[2] == '': dscfg.set_dest_dir(preferences[2]) if DEBUG: print >> sys.stderr, "tlm: load_checkpoint: pstate is", dlstatus_strings[ pstate['dlstate']['status']], pstate['dlstate']['progress'] if pstate['engineresumedata'] is None: print >> sys.stderr, "tlm: load_checkpoint: resumedata None" else: print >> sys.stderr, "tlm: load_checkpoint: resumedata len", len( pstate['engineresumedata']) if (tdef or sdef) and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists((tdef or sdef).get_id()): if tdef: initialdlstatus = initialdlstatus_dict.get( tdef.get_id(), initialdlstatus) self.add(tdef, dscfg, pstate, initialdlstatus, commit=commit, setupDelay=setupDelay) else: initialdlstatus = initialdlstatus_dict.get( sdef.get_id(), initialdlstatus) self.swift_add(sdef, dscfg, pstate, initialdlstatus) else: print >> sys.stderr, "tlm: not resuming checkpoint because download has already been added" except Exception as e: self.rawserver_nonfatalerrorfunc(e) else: print >> sys.stderr, "tlm: removing checkpoint", filename, "destdir is", dscfg.get_dest_dir( ) os.remove(filename) else: print >> sys.stderr, "tlm: could not resume checkpoint", filename, tdef, dscfg
def test_load_from_dict(self): with open(os.path.join(TESTS_DATA_DIR, "bak_single.torrent"), mode='rb') as torrent_file: encoded_metainfo = torrent_file.read() self.assertTrue(TorrentDef.load_from_dict(bdecode(encoded_metainfo)))
def resume_download(self, filename, initialdlstatus=None, initialdlstatus_dict={}, commit=True, setupDelay=0): tdef = sdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC if SwiftDef.is_swift_url(pstate['metainfo']): sdef = SwiftDef.load_from_url(pstate['metainfo']) elif 'infohash' in pstate['metainfo']: tdef = TorrentDefNoMetainfo(pstate['metainfo']['infohash'], pstate['metainfo']['name']) else: tdef = TorrentDef.load_from_dict(pstate['metainfo']) dlconfig = pstate['dlconfig'] if isinstance(dlconfig['saveas'], tuple): dlconfig['saveas'] = dlconfig['saveas'][-1] if sdef and 'name' in dlconfig and isinstance(dlconfig['name'], basestring): sdef.set_name(dlconfig['name']) if sdef and sdef.get_tracker().startswith("127.0.0.1:"): current_port = int(sdef.get_tracker().split(":")[1]) if current_port != self.session.get_swift_dht_listen_port(): print >> sys.stderr, "Modified SwiftDef to new tracker port" sdef.set_tracker("127.0.0.1:%d" % self.session.get_swift_dht_listen_port()) dscfg = DownloadStartupConfig(dlconfig) except: print_exc() # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-7]) torrent = self.torrent_db.getTorrent(infohash, keys=['name', 'torrent_file_name', 'swift_torrent_hash'], include_mypref=False) torrentfile = None if torrent: torrent_dir = self.session.get_torrent_collecting_dir() if torrent['swift_torrent_hash']: sdef = SwiftDef(torrent['swift_torrent_hash']) save_name = sdef.get_roothash_as_hex() torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): # normal torrentfile is not present, see if readable torrent is there save_name = get_readable_torrent_name(infohash, torrent['name']) torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): tdef = TorrentDef.load(torrentfile) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db != None: preferences = self.mypref_db.getMyPrefStatsInfohash(infohash) if preferences: if os.path.isdir(preferences[2]) or preferences[2] == '': dscfg.set_dest_dir(preferences[2]) if DEBUG: print >> sys.stderr, "tlm: load_checkpoint: pstate is", dlstatus_strings[pstate['dlstate']['status']], pstate['dlstate']['progress'] if pstate['engineresumedata'] is None: print >> sys.stderr, "tlm: load_checkpoint: resumedata None" else: print >> sys.stderr, "tlm: load_checkpoint: resumedata len", len(pstate['engineresumedata']) if (tdef or sdef) and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists((tdef or sdef).get_id()): if tdef: initialdlstatus = initialdlstatus_dict.get(tdef.get_id(), initialdlstatus) self.add(tdef, dscfg, pstate, initialdlstatus, commit=commit, setupDelay=setupDelay) else: initialdlstatus = initialdlstatus_dict.get(sdef.get_id(), initialdlstatus) self.swift_add(sdef, dscfg, pstate, initialdlstatus) else: print >> sys.stderr, "tlm: not resuming checkpoint because download has already been added" except Exception as e: self.rawserver_nonfatalerrorfunc(e) else: print >> sys.stderr, "tlm: removing checkpoint", filename, "destdir is", dscfg.get_dest_dir() os.remove(filename) else: print >> sys.stderr, "tlm: could not resume checkpoint", filename, tdef, dscfg
def resume_download(self, filename, initialdlstatus=None, initialdlstatus_dict={}, setupDelay=0): tdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('downloadconfig', 'saveas') and \ isinstance(pstate.get('downloadconfig', 'saveas'), tuple): pstate.set('downloadconfig', 'saveas', pstate.get('downloadconfig', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) except: # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-6]) torrent_data = self.torrent_store.get(infohash) if torrent_data: tdef = TorrentDef.load_from_memory(torrent_data) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db is not None: dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash) if dest_dir: if os.path.isdir(dest_dir) or dest_dir == '': dscfg.set_dest_dir(dest_dir) self._logger.debug("tlm: load_checkpoint: pstate is %s %s", pstate.get('dlstate', 'status'), pstate.get('dlstate', 'progress')) if pstate is None or pstate.get('state', 'engineresumedata') is None: self._logger.debug("tlm: load_checkpoint: resumedata None") else: self._logger.debug("tlm: load_checkpoint: resumedata len %d", len(pstate.get('state', 'engineresumedata'))) if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists(tdef.get_infohash()): initialdlstatus = initialdlstatus_dict.get(tdef.get_infohash(), initialdlstatus) self.add(tdef, dscfg, pstate, initialdlstatus, setupDelay=setupDelay) else: self._logger.info("tlm: not resuming checkpoint because download has already been added") except Exception as e: self._logger.exception("tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg)
def _on_magnet_fetched(meta_info): return TorrentDef.load_from_dict(meta_info)
def recv_query_reply(self,permid,message,selversion): #print "****** recv query reply", len(message) if selversion < OLPROTO_VER_SIXTH: return False #if len(message) > MAX_QUERY_REPLY_LEN: # return True # don't close # Unpack try: d = bdecode(message[1:]) except: if DEBUG: print >>sys.stderr,"rquery: Cannot bdecode QUERY_REPLY message", selversion return False if not isValidQueryReply(d,selversion): if DEBUG: print >>sys.stderr,"rquery: not valid QUERY_REPLY message", selversion return False # Check auth queryrec = self.is_registered_query_id(d['id']) if not queryrec: if DEBUG: print >>sys.stderr,"rquery: QUERY_REPLY has unknown query ID", selversion return False if selversion >= OLPROTO_VER_TWELFTH: if queryrec['query'].startswith('SIMPLE+METADATA'): for infohash,torrentrec in d['a'].iteritems(): if not 'metatype' in torrentrec: if DEBUG: print >>sys.stderr,"rquery: QUERY_REPLY has no metatype field", selversion return False if not 'metadata' in torrentrec: if DEBUG: print >>sys.stderr,"rquery: QUERY_REPLY has no metadata field", selversion return False if torrentrec['torrent_size'] != len(torrentrec['metadata']): if DEBUG: print >>sys.stderr,"rquery: QUERY_REPLY torrent_size != len metadata", selversion return False try: # Validity test if torrentrec['metatype'] == URL_MIME_TYPE: tdef = TorrentDef.load_from_url(torrentrec['metadata']) else: metainfo = bdecode(torrentrec['metadata']) tdef = TorrentDef.load_from_dict(metainfo) except: if DEBUG: print_exc() return False # Process self.process_query_reply(permid,queryrec['query'],queryrec['usercallback'],d) return True
def _on_magnet_fetched(meta_info): return TorrentDef.load_from_dict(meta_info)