def add_torrent(self): self.logger.debug("Adding torrent hash {0}".format(self.info_hash)) info = lt.torrent_info(lt.big_number(self.info_hash.decode('hex'))) # Add OpenBitTorrent trackers info.add_tracker('udp://tracker.openbittorrent.com:80', 0) info.add_tracker('udp://tracker.publicbt.com:80', 0) info.add_tracker('udp://tracker.ccc.de:80', 0) info.add_tracker('udp://tracker.istole.it:80', 0) self.logger.info("Adding hash {0} to session".format(self.info_hash)) # self.handle = self.libtorrent_session.add_torrent(info, './') self.handle = lt.add_magnet_uri(self.libtorrent_session, str(self.magnet), {'save_path': '/tmp', 'storage_mode': lt.storage_mode_t.storage_mode_sparse, 'paused':True }) #wait for the download to start while not self.handle.status().state == self.handle.status().downloading: time.sleep(1) self.logger.debug("{0} changed state to downloading".format(self.info_hash)) #set all file prio to 0 self.handle.prioritize_files([0 for i in self.handle.file_priorities()]) # for i in range(0, self.handle.get_torrent_info().num_pieces()): # self.handle.piece_priority(i, 0) self.logger.debug("Done setting priority 0 for hash {0}".format(self.info_hash))
def _handle_stop_torrent(self, params): info_hash_hex = str(params['info_hash']) info_hash = lt.big_number(info_hash_hex) th = self.session._ses.find_torrent(info_hash) if not th: self.session._log.error('Missing torrent handle trying to stop %s' % info_hash) return print 'torrent handle: ' + str(th.info_hash()) torrents = self.session._ses.get_torrents() print '' print '' for t in torrents: print 'info_hash: ' + str(t.info_hash()) print '' print '' th = self.session._ses.find_torrent(info_hash) if not th: self.session._log.error('Missing torrent handle trying to stop %s' % info_hash) return return self.session._log.info('Removing torrent from session %s' % info_hash) self.session._ses.remove_torrent(th)
def add_torrent(self, torrentdl, atp): # If we are collecting the torrent for this infohash, abort this first. with self.metainfo_lock: ltsession = self.get_session(atp.pop('hops', 0)) if 'ti' in atp: infohash = str(atp['ti'].info_hash()) elif 'url' in atp: infohash = binascii.hexlify(parse_magnetlink(atp['url'])[1]) else: raise ValueError('No ti or url key in add_torrent_params') # Check if we added this torrent before known = [str(h.info_hash()) for h in ltsession.get_torrents()] if infohash in known: self.torrents[infohash] = (torrentdl, ltsession) infohash_bin = binascii.unhexlify(infohash) return succeed(ltsession.find_torrent(lt.big_number(infohash_bin))) if infohash in self.torrents: self._logger.info("Torrent already exists in the downloads. Infohash:%s", infohash.encode('hex')) # Otherwise, add it anew ltsession.async_add_torrent(encode_atp(atp)) self.torrents[infohash] = (torrentdl, ltsession) self._logger.debug("Adding torrent %s", infohash) return torrentdl.deferred_added
def do_supply(): # supply fake addresses (regular dht obviously wont work here) ltmgr = LibtorrentMgr.getInstance() for infohash in ltmgr.metainfo_requests: handle = ltmgr.ltsession.find_torrent( lt.big_number(infohash.decode('hex'))) handle.connect_peer(("127.0.0.1", LISTEN_PORT), 0)
def scrape_trackers(self): """ Manually scrape tracker by requesting to tracker manager """ for infohash in list(self.torrents): # torrent handle lt_torrent = self.session.lm.ltmgr.get_session().find_torrent( lt.big_number(infohash)) peer_list = [] for i in lt_torrent.get_peer_info(): peer = LibtorrentDownloadImpl.create_peerlist_data(i) peer_list.append(peer) num_seed, num_leech = utilities.translate_peers_into_health( peer_list) # calculate number of seeder and leecher by looking at the peers if self.torrents[infohash]['num_seeders'] == 0: self.torrents[infohash]['num_seeders'] = num_seed if self.torrents[infohash]['num_leechers'] == 0: self.torrents[infohash]['num_leechers'] = num_leech self._logger.debug( "Seeder/leecher data translated from peers : seeder %s, leecher %s", num_seed, num_leech) # check health(seeder/leecher) self.session.lm.torrent_checker.add_gui_request(infohash, True)
def add_torrent(self, torrentdl, atp): # If we are collecting the torrent for this infohash, abort this first. with self.metainfo_lock: ltsession = self.get_session(atp.pop('hops', 0)) if 'ti' in atp: infohash = str(atp['ti'].info_hash()) elif 'url' in atp: infohash = binascii.hexlify(parse_magnetlink(atp['url'])[1]) else: raise ValueError('No ti or url key in add_torrent_params') # Check if we added this torrent before known = [str(h.info_hash()) for h in ltsession.get_torrents()] if infohash in known: self.torrents[infohash] = (torrentdl, ltsession) infohash_bin = binascii.unhexlify(infohash) return ltsession.find_torrent(lt.big_number(infohash_bin)) # Otherwise, add it anew torrent_handle = ltsession.add_torrent(encode_atp(atp)) infohash = str(torrent_handle.info_hash()) if infohash in self.torrents: raise DuplicateDownloadException( "This download already exists.") self.torrents[infohash] = (torrentdl, ltsession) self._logger.debug("added torrent %s", infohash) return torrent_handle
def ltrm(self, irc, msg ,args, tids): """Removes the torrent from libtorrent""" if not hasattr(self, 'ltses'): irc.error('no libtorrent session') return with closing(getdb()) as db, closing(db.cursor()) as cur: tids = tids.split() for tid in tids: if not re.match('^([0-9]+|[0-9]+-[0-9]+)$', tid): irc.error('invalid id %s' % tid) continue if '-' in tid: start,stop = map(int, tid.split('-')) else: start = stop = int(tid) for i in xrange(start, stop+1): cur.execute('SELECT Name, info_hash FROM torrents WHERE ID = %s', (i,)) result = cur.fetchone() if not result: irc.error('%s doesnt exist' % tids) return name, infohash = result infohash = lt.big_number(str(infohash)) handle = self.ltses.find_torrent(infohash) if handle.is_valid(): self.ltses.remove_torrent(handle) else: irc.error('%s not found in lt' % i)
def pause_torrent(self, infohash): """ returns a Deferred. callback arg is a libtorrent.torrent_paused_alert """ retval = self._make_torrent_alert_handler(infohash, libtorrent.torrent_paused_alert) torrent = self._ses.find_torrent(libtorrent.big_number(infohash.decode('hex'))) torrent.pause() return retval
def get_metainfo(self, infohash_or_magnet, callback, timeout=30, timeout_callback=None, notify=True): if not self.is_dht_ready() and timeout > 5: self._logger.info("DHT not ready, rescheduling get_metainfo") self.trsession.lm.threadpool.add_task(lambda i=infohash_or_magnet, c=callback, t=timeout - 5, tcb=timeout_callback, n=notify: self.get_metainfo(i, c, t, tcb, n), 5) return magnet = infohash_or_magnet if infohash_or_magnet.startswith('magnet') else None infohash_bin = infohash_or_magnet if not magnet else parse_magnetlink(magnet)[1] infohash = binascii.hexlify(infohash_bin) if infohash in self.torrents: return with self.metainfo_lock: self._logger.debug('get_metainfo %s %s %s', infohash_or_magnet, callback, timeout) cache_result = self._get_cached_metainfo(infohash) if cache_result: self.trsession.lm.threadpool.call_in_thread(0, callback, deepcopy(cache_result)) elif infohash not in self.metainfo_requests: # Flags = 4 (upload mode), should prevent libtorrent from creating files atp = {'save_path': self.metadata_tmpdir, 'duplicate_is_error': True, 'paused': False, 'auto_managed': False, 'upload_mode': True} if magnet: atp['url'] = magnet else: atp['info_hash'] = lt.big_number(infohash_bin) try: handle = self.get_session().add_torrent(encode_atp(atp)) except TypeError as e: self._logger.warning("Failed to add torrent with infohash %s, using libtorrent version %s, " "attempting to use it as it is and hoping for the best", hexlify(infohash_bin), lt.version) self._logger.warning("Error was: %s", e) atp['info_hash'] = infohash_bin handle = self.get_session().add_torrent(encode_atp(atp)) if notify: self.notifier.notify(NTFY_TORRENTS, NTFY_MAGNET_STARTED, infohash_bin) self.metainfo_requests[infohash] = {'handle': handle, 'callbacks': [callback], 'timeout_callbacks': [timeout_callback] if timeout_callback else [], 'notify': notify} self.trsession.lm.threadpool.add_task(lambda: self.got_metainfo(infohash, timeout=True), timeout) else: self.metainfo_requests[infohash]['notify'] = self.metainfo_requests[infohash]['notify'] and notify callbacks = self.metainfo_requests[infohash]['callbacks'] if callback not in callbacks: callbacks.append(callback) else: self._logger.debug('get_metainfo duplicate detected, ignoring')
def get_metainfo(self, infohash_or_magnet, callback, timeout=30): if not self.is_dht_ready() and timeout > 5: print >> sys.stderr, "LibtorrentDownloadImpl: DHT not ready, rescheduling get_metainfo" self.trsession.lm.rawserver.add_task( lambda i=infohash_or_magnet, c=callback, t=timeout - 5: self. get_metainfo(i, c, t), 5) return magnet = infohash_or_magnet if infohash_or_magnet.startswith( 'magnet') else None infohash_bin = infohash_or_magnet if not magnet else parse_magnetlink( magnet)[1] infohash = binascii.hexlify(infohash_bin) with self.torlock: if infohash in self.torrents: return with self.metainfo_lock: if DEBUG: print >> sys.stderr, 'LibtorrentMgr: get_metainfo', infohash_or_magnet, callback, timeout cache_result = self._get_cached_metainfo(infohash) if cache_result: self.trsession.uch.perform_usercallback( lambda cb=callback, mi=deepcopy(cache_result): cb(mi)) elif infohash not in self.metainfo_requests: # Flags = 4 (upload mode), should prevent libtorrent from creating files atp = { 'save_path': tempfile.gettempdir(), 'duplicate_is_error': True, 'paused': False, 'auto_managed': False, 'flags': 4 } if magnet: atp['url'] = magnet else: atp['info_hash'] = lt.big_number(infohash_bin) handle = self.ltsession.add_torrent(atp) self.metainfo_requests[infohash] = (handle, [callback]) self.trsession.lm.rawserver.add_task( lambda: self.got_metainfo(infohash, True), timeout) else: callbacks = self.metainfo_requests[infohash][1] if callback not in callbacks: callbacks.append(callback) elif DEBUG: print >> sys.stderr, 'LibtorrentMgr: get_metainfo duplicate detected, ignoring'
def RemoveTorrent(self, hash): try: hash = libtorrent.big_number(hash.decode('hex')) except TypeError: return False torrent = self._session.find_torrent(hash) if not torrent.is_valid(): return False self._session.remove_torrent(torrent) return True
def stop_download(self, torrent): """ Stopping torrent that currently downloading """ ihash = lt.big_number(torrent["metainfo"].get_infohash()) self._logger.info("Stopping %s", str(ihash)) download = torrent.pop('download', False) lt_torrent = self.session.lm.ltmgr.get_session().find_torrent(ihash) if download and lt_torrent.is_valid(): self._logger.info("Writing resume data for %s", str(ihash)) download.save_resume_data() self.session.remove_download(download, hidden=True)
def _handle_bump_torrent(self, params): info_hash_hex = str(params['info_hash']) info_hash = lt.big_number(info_hash_hex) torrent_handle = self.session._ses.find_torrent(info_hash) if not torrent_handle: self.session._log.error('Missing torrent handle trying to bump %s' % info_hash) return self.session._log.info('Pausing torrent %s' % info_hash) torrent_handle.pause() time.sleep(0.5) self.session._log.info('Resuming torrent %s' % info_hash) torrent_handle.resume()
def network_create_engine_wrapper(self, lm_network_engine_wrapper_created_callback, pstate, lm_network_vod_event_callback, initialdlstatus = None): # Called by any thread, assume dllock already acquired if DEBUG: print >>sys.stderr,"LibtorrentDownloadImpl: create_engine_wrapper()" atp = {} atp["save_path"] = str(self.dlconfig['saveas']) atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse atp["paused"] = True atp["auto_managed"] = False atp["duplicate_is_error"] = True if not isinstance(self.tdef, TorrentDefNoMetainfo): metainfo = self.tdef.get_metainfo() torrentinfo = lt.torrent_info(metainfo) torrent_files = torrentinfo.files() is_multifile = len(self.tdef.get_files_as_unicode()) > 1 commonprefix = os.path.commonprefix([file_entry.path for file_entry in torrent_files]) if is_multifile else '' swarmname = os.path.split(commonprefix)[0] or os.path.split(commonprefix)[1] if is_multifile and swarmname != self.correctedinfoname: for i, file_entry in enumerate(torrent_files): filename = file_entry.path[len(swarmname)+1:] torrentinfo.rename_file(i, str(os.path.join(self.correctedinfoname, filename))) self.orig_files = [torrent_file.path for torrent_file in torrentinfo.files()] atp["ti"] = torrentinfo if pstate and pstate.get('engineresumedata', None): atp["resume_data"] = lt.bencode(pstate['engineresumedata']) print >> sys.stderr, self.tdef.get_name_as_unicode(), pstate.get('engineresumedata', None) if pstate else None else: atp["info_hash"] = lt.big_number(self.tdef.get_infohash()) atp["name"] = str(self.tdef.get_name()) self.handle = self.ltmgr.add_torrent(self, atp) self.lm_network_vod_event_callback = lm_network_vod_event_callback if self.handle: self.set_selected_files() if self.get_mode() == DLMODE_VOD: self.set_vod_mode() if initialdlstatus != DLSTATUS_STOPPED: self.handle.resume() with self.dllock: self.cew_scheduled = False if lm_network_engine_wrapper_created_callback is not None: lm_network_engine_wrapper_created_callback(self,pstate)
def _create_session(self, port_min, port_max): flags = 0 # TODO: Move seedbank version to a version or init file s = lt.session(lt.fingerprint("TE", 1, 0, 0, 0), flags) settings = get_server_settings() s.set_settings(settings) if self._peer_id: s.set_peer_id(lt.big_number(self._peer_id)) s.set_alert_mask(ALERT_MASK_DEFAULT) listen_flags = lt.listen_on_flags_t.listen_reuse_address | lt.listen_on_flags_t.listen_no_system_port listen_interface = None s.listen_on(port_min, port_max, listen_interface, listen_flags) return s
def load_torrent(self, infohash): d = self._make_torrent_alert_handler(infohash, ( # libtorrent.torrent_loaded_alert, type('AnonymousType', (object, ), {}), )) handle = self._ses.find_torrent(libtorrent.big_number(infohash.decode('hex'))) try: handle.infohash() except RuntimeError as e: d.errback(failure.Failure(ValueError("Invalid handle"))) if infohash not in self._metainfo_torrent: d.errback(failure.Failure(ValueError("Unknown infohash"))) else: if infohash in self._resume_storage: pass return d
def get_metainfo(self, infohash_or_magnet, callback, timeout=30): if not self.is_dht_ready() and timeout > 5: print >> sys.stderr, "LibtorrentDownloadImpl: DHT not ready, rescheduling get_metainfo" self.trsession.lm.rawserver.add_task(lambda i=infohash_or_magnet, c=callback, t=timeout - 5: self.get_metainfo(i, c, t), 5) return magnet = infohash_or_magnet if infohash_or_magnet.startswith('magnet') else None infohash_bin = infohash_or_magnet if not magnet else parse_magnetlink(magnet)[1] infohash = binascii.hexlify(infohash_bin) with self.torlock: if infohash in self.torrents: return with self.metainfo_lock: if DEBUG: print >> sys.stderr, 'LibtorrentMgr: get_metainfo', infohash_or_magnet, callback, timeout cache_result = self._get_cached_metainfo(infohash) if cache_result: self.trsession.uch.perform_usercallback(lambda cb=callback, mi=deepcopy(cache_result): cb(mi)) elif infohash not in self.metainfo_requests: # Flags = 4 (upload mode), should prevent libtorrent from creating files atp = {'save_path': tempfile.gettempdir(), 'duplicate_is_error': True, 'paused': False, 'auto_managed': False, 'flags': 4} if magnet: atp['url'] = magnet else: atp['info_hash'] = lt.big_number(infohash_bin) handle = self.ltsession.add_torrent(atp) self.metainfo_requests[infohash] = (handle, [callback]) self.trsession.lm.rawserver.add_task(lambda: self.got_metainfo(infohash, True), timeout) else: callbacks = self.metainfo_requests[infohash][1] if callback not in callbacks: callbacks.append(callback) elif DEBUG: print >> sys.stderr, 'LibtorrentMgr: get_metainfo duplicate detected, ignoring'
def get_metainfo(self, infohash_or_magnet, callback, timeout=30, timeout_callback=None, notify=True): if not self.is_dht_ready() and timeout > 5: self._logger.info("DHT not ready, rescheduling get_metainfo") def schedule_call(): random_id = ''.join( random.choice('0123456789abcdef') for _ in xrange(30)) self.register_task( "schedule_metainfo_lookup_%s" % random_id, reactor.callLater( 5, lambda i=infohash_or_magnet, c=callback, t=timeout - 5, tcb=timeout_callback, n=notify: self.get_metainfo( i, c, t, tcb, n))) reactor.callFromThread(schedule_call) return magnet = infohash_or_magnet if infohash_or_magnet.startswith( 'magnet') else None infohash_bin = infohash_or_magnet if not magnet else parse_magnetlink( magnet)[1] infohash = binascii.hexlify(infohash_bin) if infohash in self.torrents: return with self.metainfo_lock: self._logger.debug('get_metainfo %s %s %s', infohash_or_magnet, callback, timeout) cache_result = self._get_cached_metainfo(infohash) if cache_result: callback(deepcopy(cache_result)) elif infohash not in self.metainfo_requests: # Flags = 4 (upload mode), should prevent libtorrent from creating files atp = { 'save_path': self.metadata_tmpdir, 'flags': (lt.add_torrent_params_flags_t.flag_upload_mode) } if magnet: atp['url'] = magnet else: atp['info_hash'] = lt.big_number(infohash_bin) try: handle = self.ltsession_metainfo.add_torrent( encode_atp(atp)) except TypeError as e: self._logger.warning( "Failed to add torrent with infohash %s, " "attempting to use it as it is and hoping for the best", hexlify(infohash_bin)) self._logger.warning("Error was: %s", e) atp['info_hash'] = infohash_bin handle = self.ltsession_metainfo.add_torrent( encode_atp(atp)) if notify: self.notifier.notify(NTFY_TORRENTS, NTFY_MAGNET_STARTED, infohash_bin) self.metainfo_requests[infohash] = { 'handle': handle, 'callbacks': [callback], 'timeout_callbacks': [timeout_callback] if timeout_callback else [], 'notify': notify } # if the handle is valid and already has metadata which is the case when torrent already exists in # session then metadata_received_alert is not fired so we call self.got_metainfo() directly here if handle.is_valid() and handle.has_metadata(): self.got_metainfo(infohash, timeout=False) return def schedule_call(): random_id = ''.join( random.choice('0123456789abcdef') for _ in xrange(30)) self.register_task( "schedule_got_metainfo_lookup_%s" % random_id, reactor.callLater( timeout, lambda: self.got_metainfo(infohash, timeout=True))) reactor.callFromThread(schedule_call) else: self.metainfo_requests[infohash][ 'notify'] = self.metainfo_requests[infohash][ 'notify'] and notify callbacks = self.metainfo_requests[infohash]['callbacks'] if callback not in callbacks: callbacks.append(callback) else: self._logger.debug( 'get_metainfo duplicate detected, ignoring')
def hex_to_hash(hex_): if hash_is_bytes(): return hex_ else: return lt.big_number(hex_)
def get_metainfo(self, infohash_or_magnet, callback, timeout=30, timeout_callback=None, notify=True): if not self.is_dht_ready() and timeout > 5: self._logger.info("DHT not ready, rescheduling get_metainfo") self.trsession.lm.threadpool.add_task( lambda i=infohash_or_magnet, c=callback, t=timeout - 5, tcb= timeout_callback, n=notify: self.get_metainfo(i, c, t, tcb, n), 5) return magnet = infohash_or_magnet if infohash_or_magnet.startswith( 'magnet') else None infohash_bin = infohash_or_magnet if not magnet else parse_magnetlink( magnet)[1] infohash = binascii.hexlify(infohash_bin) if infohash in self.torrents: return with self.metainfo_lock: self._logger.debug('get_metainfo %s %s %s', infohash_or_magnet, callback, timeout) cache_result = self._get_cached_metainfo(infohash) if cache_result: self.trsession.lm.threadpool.call_in_thread( 0, callback, deepcopy(cache_result)) elif infohash not in self.metainfo_requests: # Flags = 4 (upload mode), should prevent libtorrent from creating files atp = { 'save_path': self.metadata_tmpdir, 'duplicate_is_error': True, 'paused': False, 'auto_managed': False, 'upload_mode': True } if magnet: atp['url'] = magnet else: atp['info_hash'] = lt.big_number(infohash_bin) try: handle = self.get_session().add_torrent(encode_atp(atp)) except TypeError as e: self._logger.warning( "Failed to add torrent with infohash %s, using libtorrent version %s, " "attempting to use it as it is and hoping for the best", hexlify(infohash_bin), lt.version) self._logger.warning("Error was: %s", e) atp['info_hash'] = infohash_bin handle = self.get_session().add_torrent(encode_atp(atp)) if notify: self.notifier.notify(NTFY_TORRENTS, NTFY_MAGNET_STARTED, infohash_bin) self.metainfo_requests[infohash] = { 'handle': handle, 'callbacks': [callback], 'timeout_callbacks': [timeout_callback] if timeout_callback else [], 'notify': notify } self.trsession.lm.threadpool.add_task( lambda: self.got_metainfo(infohash, timeout=True), timeout) else: self.metainfo_requests[infohash][ 'notify'] = self.metainfo_requests[infohash][ 'notify'] and notify callbacks = self.metainfo_requests[infohash]['callbacks'] if callback not in callbacks: callbacks.append(callback) else: self._logger.debug( 'get_metainfo duplicate detected, ignoring')
def get_metainfo(self, infohash_or_magnet, callback, timeout=30, timeout_callback=None, notify=True): if not self.is_dht_ready() and timeout > 5: self._logger.info("DHT not ready, rescheduling get_metainfo") def schedule_call(): self.register_anonymous_task("schedule_metainfo_lookup", reactor.callLater(5, lambda i=infohash_or_magnet, c=callback, t=timeout-5, tcb=timeout_callback, n=notify: self.get_metainfo(i, c, t, tcb, n))) reactor.callFromThread(schedule_call) return magnet = infohash_or_magnet if infohash_or_magnet.startswith('magnet') else None infohash_bin = infohash_or_magnet if not magnet else parse_magnetlink(magnet)[1] infohash = binascii.hexlify(infohash_bin) if infohash in self.torrents: return with self.metainfo_lock: self._logger.debug('get_metainfo %s %s %s', infohash_or_magnet, callback, timeout) cache_result = self._get_cached_metainfo(infohash) if cache_result: callback(deepcopy(cache_result)) elif infohash not in self.metainfo_requests: # Flags = 4 (upload mode), should prevent libtorrent from creating files atp = {'save_path': self.metadata_tmpdir, 'flags': (lt.add_torrent_params_flags_t.flag_upload_mode)} if magnet: atp['url'] = magnet else: atp['info_hash'] = lt.big_number(infohash_bin) try: handle = self.ltsession_metainfo.add_torrent(encode_atp(atp)) except TypeError as e: self._logger.warning("Failed to add torrent with infohash %s, " "attempting to use it as it is and hoping for the best", hexlify(infohash_bin)) self._logger.warning("Error was: %s", e) atp['info_hash'] = infohash_bin handle = self.ltsession_metainfo.add_torrent(encode_atp(atp)) if notify: self.notifier.notify(NTFY_TORRENTS, NTFY_MAGNET_STARTED, infohash_bin) self.metainfo_requests[infohash] = {'handle': handle, 'callbacks': [callback], 'timeout_callbacks': [timeout_callback] if timeout_callback else [], 'notify': notify} # if the handle is valid and already has metadata which is the case when torrent already exists in # session then metadata_received_alert is not fired so we call self.got_metainfo() directly here if handle.is_valid() and handle.has_metadata(): self.got_metainfo(infohash, timeout=False) return def schedule_call(): self.register_anonymous_task("schedule_got_metainfo_lookup", reactor.callLater(timeout, lambda: self.got_metainfo(infohash, timeout=True))) reactor.callFromThread(schedule_call) else: self.metainfo_requests[infohash]['notify'] = self.metainfo_requests[infohash]['notify'] and notify callbacks = self.metainfo_requests[infohash]['callbacks'] if callback not in callbacks: callbacks.append(callback) else: self._logger.debug('get_metainfo duplicate detected, ignoring')
def network_create_engine_wrapper(self, lm_network_engine_wrapper_created_callback, pstate, lm_network_vod_event_callback, initialdlstatus = None): # Called by any thread, assume dllock already acquired if DEBUG: print >>sys.stderr,"LibtorrentDownloadImpl: create_engine_wrapper()" atp = {} atp["save_path"] = str(self.dlconfig['saveas']) # Using full allocation seems to fix issues with streaming certain files. atp["storage_mode"] = lt.storage_mode_t.storage_mode_allocate if self.get_mode() == DLMODE_VOD else lt.storage_mode_t.storage_mode_sparse atp["paused"] = True atp["auto_managed"] = False atp["duplicate_is_error"] = True resume_data = pstate.get('engineresumedata', None) if pstate else None if not isinstance(self.tdef, TorrentDefNoMetainfo): metainfo = self.tdef.get_metainfo() torrentinfo = lt.torrent_info(metainfo) torrent_files = torrentinfo.files() is_multifile = len(self.tdef.get_files_as_unicode()) > 1 commonprefix = os.path.commonprefix([file_entry.path for file_entry in torrent_files]) if is_multifile else '' swarmname = os.path.split(commonprefix)[0] or os.path.split(commonprefix)[1] if is_multifile and swarmname != self.correctedinfoname: for i, file_entry in enumerate(torrent_files): filename = file_entry.path[len(swarmname)+1:] torrentinfo.rename_file(i, str(os.path.join(self.correctedinfoname, filename))) self.orig_files = [torrent_file.path for torrent_file in torrentinfo.files()] atp["ti"] = torrentinfo if resume_data: atp["resume_data"] = lt.bencode(resume_data) print >> sys.stderr, self.tdef.get_name_as_unicode(), resume_data else: if self.tdef.get_url(): # We prefer to use an url, since it may contain trackers atp["url"] = self.tdef.get_url() else: atp["info_hash"] = lt.big_number(self.tdef.get_infohash()) atp["name"] = str(self.tdef.get_name()) self.handle = self.ltmgr.add_torrent(self, atp) self.lm_network_vod_event_callback = lm_network_vod_event_callback if self.handle: self.set_selected_files() if self.get_mode() == DLMODE_VOD: self.set_vod_mode() # If we lost resume_data always resume download in order to force checking if initialdlstatus != DLSTATUS_STOPPED or not resume_data: self.handle.resume() # If we only needed to perform checking, pause download after it is complete self.pause_after_next_hashcheck = initialdlstatus == DLSTATUS_STOPPED else: print >> sys.stderr, "Could not add torrent to LibtorrentManager", self.tdef.get_name_as_unicode() with self.dllock: self.cew_scheduled = False if lm_network_engine_wrapper_created_callback is not None: lm_network_engine_wrapper_created_callback(self,pstate)
def do_supply(): # supply fake addresses (regular dht obviously wont work here) ltmgr = LibtorrentMgr.getInstance() for infohash in ltmgr.metainfo_requests: handle = ltmgr.ltsession.find_torrent(lt.big_number(infohash.decode('hex'))) handle.connect_peer(("127.0.0.1", LISTEN_PORT), 0)