def _create_torrent(self, resource, fs, root='.', use_sudo=False): t = lt.create_torrent(fs) transports = resource.transports() torrent_transport = next( (x for x in transports if x['name'] == 'torrent')) trackers = torrent_transport['trackers'] for tracker in trackers: t.add_tracker(tracker) lt.set_piece_hashes(t, os.path.join(root, '..')) torrent = t.generate() torrent['priv'] = True # private torrent, no DHT, only trackers name = self._create_torrent_name() try: # not checking for path existence with open(name, 'wb') as f: f.write(lt.bencode(torrent)) except IOError as e: if e.errno != errno.ENOENT: raise os.makedirs(self._torrent_path) with open(name, 'wb') as f: f.write(lt.bencode(torrent)) log.debug("Created torrent file %s", name) magnet_uri = lt.make_magnet_uri(lt.torrent_info(name)) # self._torrents[root] = (name, magnet_uri) if not use_sudo: self._torrents.append((name, magnet_uri, root)) else: self._sudo_torrents.append((name, magnet_uri, root)) return name
def on_got_metainfo(metainfo): if not isinstance(metainfo, dict) or 'info' not in metainfo: self._logger.warning("Received metainfo is not a valid dictionary") request.setResponseCode(http.INTERNAL_SERVER_ERROR) request.write(json.dumps({"error": 'invalid response'})) self.finish_request(request) return infohash = hashlib.sha1(bencode(metainfo['info'])).digest() # Check if the torrent is already in the downloads metainfo['download_exists'] = infohash in self.session.lm.downloads # Update the torrent database with metainfo if it is an unnamed torrent if self.session.lm.torrent_db: self.session.lm.torrent_db.update_torrent_with_metainfo(infohash, metainfo) self.session.lm.torrent_db._db.commit_now() # Save the torrent to our store try: self.session.save_collected_torrent(infohash, bencode(metainfo)) except TypeError: # Note: in libtorrent 1.1.1, bencode throws a TypeError which is a known bug pass request.write(json.dumps({"metainfo": metainfo}, ensure_ascii=False)) self.finish_request(request)
def create_torrent(path,filename=""): fs = lt.file_storage() if filename=="": listz = list_folder(top) for tuplez in listz: path=tuplez[0] filename=tuplez[1] fs.add_file(pathz , size) else: pathz = os.path.join(path,filename) print "pathz", pathz size = os.path.getsize(pathz) fs.add_file(filename, size) #fs.add_file(pathz , size) tor = lt.create_torrent(fs) lt.set_piece_hashes(tor,'.') tor.set_comment("COMEONES") tor.set_creator("PEONDUSUD") announce_url = "http://www.gks.gs/tracker" tor.add_tracker(announce_url) tor.set_priv(True) tor.add_url_seed("http://192.168.70.136:58888") print dir(tor) f = open(save_torrent_folder + filename + ".torrent", "wb") f.write(lt.bencode(tor.generate())) f.close() print "torrent raw :" print lt.bencode(tor.generate())
def magnet2torrent(magnet, output_name = None): if output_name and \ not pt.isdir(output_name) and \ not pt.isdir(pt.dirname(pt.abspath(output_name))): print "Invalid output folder: " + pt.dirname(pt.abspath(output_name)) print "" return tempdir = tempfile.mkdtemp() ses = lt.session() params = { 'save_path': tempdir, 'duplicate_is_error': True, 'storage_mode': lt.storage_mode_t(2), 'paused': False, 'auto_managed': True, 'duplicate_is_error': True } handle = lt.add_magnet_uri(ses, magnet, params) print "Downloading Metadata (this may take a while)" while (not handle.has_metadata()): try: time.sleep(1) except KeyboardInterrupt: print "Abrorting..." ses.pause() print "Cleanup dir " + tempdir shutil.rmtree(tempdir) return print "done" torinfo = handle.get_torrent_info() output = pt.abspath(torinfo.name() + ".torrent" ) if output_name: if pt.isdir(output_name): output = pt.abspath(pt.join(output_name, torinfo.name() + ".torrent")) elif pt.isdir(pt.dirname(pt.abspath(output_name))) == True: output = pt.abspath(output_name) print 'saving torrent file here : ' + output + " ..." fs = lt.file_storage() for file in torinfo.files(): fs.add_file(file) torfile = lt.create_torrent(fs) torfile.set_comment(torinfo.comment()) torfile.set_creator(torinfo.creator()) torcontent = lt.bencode(torfile.generate()) f = open(output, "wb") f.write(lt.bencode(torfile.generate())) f.close() print 'Saved! Cleaning up dir: ' + tempdir shutil.rmtree(tempdir) return output
def magnet2torrent(magnet, output_name=None): if output_name and \ not pt.isdir(output_name) and \ not pt.isdir(pt.dirname(pt.abspath(output_name))): print("Invalid output folder: " + pt.dirname(pt.abspath(output_name))) print("") sys.exit(0) tempdir = tempfile.mkdtemp() ses = lt.session() params = { 'save_path': tempdir, 'duplicate_is_error': True, 'storage_mode': lt.storage_mode_t(2), 'paused': False, 'auto_managed': True, 'duplicate_is_error': True } handle = lt.add_magnet_uri(ses, magnet, params) print("Downloading Metadata (this may take a while)") while (not handle.has_metadata()): try: sleep(1) except KeyboardInterrupt: print("Aborting...") ses.pause() print("Cleanup dir " + tempdir) shutil.rmtree(tempdir) sys.exit(0) ses.pause() print("Done") torinfo = handle.get_torrent_info() torfile = lt.create_torrent(torinfo) output = pt.abspath(torinfo.name() + ".torrent") if output_name: if pt.isdir(output_name): output = pt.abspath(pt.join( output_name, torinfo.name() + ".torrent")) elif pt.isdir(pt.dirname(pt.abspath(output_name))): output = pt.abspath(output_name) print("Saving torrent file here : " + output + " ...") torcontent = lt.bencode(torfile.generate()) f = open(output, "wb") f.write(lt.bencode(torfile.generate())) f.close() print("Saved! Cleaning up dir: " + tempdir) ses.remove_torrent(handle) shutil.rmtree(tempdir) return output
def __teardown(self): self.l.log("TorrentAgent:stopping") self.session.pause() resume_ctr = 0 # Save session state for individual torrents for h, f in self.handle_by_name.values(): if h.is_paused(): continue if not h.is_valid(): continue if not h.has_metadata(): continue self.l.log("TorrentAgent:saving resume data for " + h.name(), L.DBG) h.save_resume_data() resume_ctr += 1 self.l.log("TorrentAgent:waiting for resume data...") while resume_ctr > 0: a = self.session.wait_for_alert(30 * 1000) # 30 secs if a == None: self.l.log( "TorrentAgent:aborting with outstanding torrents to save resume data", L.WARN) break dummy = self.session.pop_alert() if a.what() == "save resume data complete": filename = self.c["session_path"]+"/"+str(a.handle.info_hash())+".resume" try: f = open(filename,'wb') self.l.log("TorrentAgent:writing resume data to " + filename, L.DBG) self.l.log_ui("Writing resume data to " + filename) f.write(LT.bencode(a.resume_data)) f.close() except: self.l.log("TorrentAgent:error saving resume data " + filename, L.ERR) elif a.what() == "save resume data failed": self.l.log_ui("No resume data for " + a.handle.name()) self.l.log( "TorrentAgent:error getting resume data for " + a.handle.name() + " " + a.what(), L.WARN) else: self.l.log("TorrentAgent:unknown alert received " + a.what(), L.WARN) continue resume_ctr = resume_ctr - 1 # Save state for the entire session self.l.log("TorrentAgent:saving session state", L.DBG) try: f = open(self.c["session_file"],'wb') f.write(LT.bencode(self.session.save_state())) f.close() except: self.l.log("TorrentAgent:error save session file " + self.c["session_file"], L.ERR)
def magnet2torrent(link): sess = lt.session() sess.add_dht_router("router.bittorrent.com", 6881) sess.add_dht_router("router.utorrent.com", 6881) sess.add_dht_router("router.bitcomet.com", 6881) sess.add_dht_router("dht.transmissionbt.com", 6881) sess.start_dht() params = { "save_path": "/tmp/tor", # "storage_mode":lt.storage_mode_t.storage_mode_sparse, # "paused": True, # "auto_managed": True, "duplicate_is_error": True, } handle = lt.add_magnet_uri(sess, link, params) # waiting for metadata while True: if not handle.has_metadata(): time.sleep(6) else: break sess.pause() # create a torrent torinfo = handle.get_torrent_info() torfile = lt.create_torrent(torinfo) torcontent = lt.bencode(torfile.generate()) torname = torinfo.name() sess.remove_torrent(handle, 1) return {"status": 200, "name": torname, "data": torcontent}
def finalize(self, path, uid): #print 'finalize', path, uid try: fs = lt.file_storage() tmp_fn = os.path.join(self.tmp, uid) try: st_size = os.stat(tmp_fn).st_size except: traceback.print_exc() return #print tmp_fn, st_size lt.add_files(fs, tmp_fn, st_size) t = lt.create_torrent(fs) t.set_creator("DelugeFS"); lt.set_piece_hashes(t, self.tmp) tdata = t.generate() #print tdata with open(self.hgdb+path, 'wb') as f: f.write(lt.bencode(tdata)) #print 'wrote', self.hgdb+path dat_dir = os.path.join(self.dat, uid[:2]) if not os.path.isdir(dat_dir): try: os.mkdir(dat_dir) except: pass os.rename(tmp_fn, os.path.join(dat_dir, uid)) #if os.path.exists(self.shadow+path): os.remove(self.shadow+path) #os.symlink(os.path.join(dat_dir, uid), self.shadow+path) #print 'committing', self.hgdb+path self.repo.hg_commit('wrote %s' % path, files=[self.hgdb+path]) self.should_push = True self.__add_torrent(tdata, path) except Exception as e: traceback.print_exc() raise e
def get_torrent_info_hash(path): """get_torrent_info_hash(path) NOTE: Important. These OS functions can throw IOError or OSError. Make sure you catch these in the caller. """ if os.path.getsize(path) > MAX_TORRENT_SIZE: # file is too large, bailout. (see #12301) raise ValueError("%s is not a valid torrent" % path) f = open(path, "rb") try: import libtorrent data = f.read(MAX_TORRENT_SIZE) if not data or data[0] != "d": # File doesn't start with 'd', bailout (see #12301) raise ValueError("%s is not a valid torrent" % path) metainfo = libtorrent.bdecode(data) try: infohash = metainfo["info"] except StandardError: raise ValueError("%s is not a valid torrent" % path) infohash = sha(libtorrent.bencode(infohash)).digest() return infohash finally: f.close()
def read_extend_metadata_reject(self, conn, piece): while True: response = conn.recv() assert len(response) > 0 if response[0] == EXTEND: break assert response[0] == EXTEND assert ord(response[1]) == 3 payload = bdecode(response[2:]) length = len(bencode(payload)) assert payload["msg_type"] in (1, 2), [payload, response[2:2 + length]] assert payload["piece"] == piece, [payload, response[2:2 + length]] # some clients return msg_type 1, unfortunately this is not a reject but a proper response. # instead libtorrent warns: max outstanding piece requests reached if payload["msg_type"] == 1: assert response[2 + length:] == self.metadata_list[piece] # some clients return msg_type 2, we must make sure no "data" is given (i.e. the request was # rejected) if payload["msg_type"] == 2: assert payload["piece"] == piece, [payload, response[2:2 + length]] assert not "data" in payload, [payload, response[2:2 + length]]
def test_got_metainfo(self): """ Testing whether the callback is correctly invoked when we received metainfo """ test_deferred = Deferred() self.ltmgr.initialize() def metainfo_cb(metainfo): self.assertDictEqual(metainfo, {'info': {'pieces': ['a']}, 'leechers': 0, 'nodes': [], 'seeders': 0, 'initial peers': []}) test_deferred.callback(None) fake_handle = MockObject() torrent_info = MockObject() torrent_info.metadata = lambda: bencode({'pieces': ['a']}) torrent_info.trackers = lambda: [] fake_handle.get_peer_info = lambda: [] fake_handle.torrent_file = lambda: torrent_info self.ltmgr.ltsession_metainfo.remove_torrent = lambda *_: None self.ltmgr.metainfo_requests['a' * 20] = { 'handle': fake_handle, 'timeout_callbacks': [], 'callbacks': [metainfo_cb], 'notify': False } self.ltmgr.got_metainfo("a" * 20) return test_deferred
def test_read_resume_data(self): resume_data = lt.bencode({'file-format': 'libtorrent resume file', 'info-hash': 'abababababababababab', 'name': 'test', 'save_path': '.', 'peers': '\x01\x01\x01\x01\x00\x01\x02\x02\x02\x02\x00\x02', 'file_priority': [0, 1, 1]}) tp = lt.read_resume_data(resume_data) self.assertEqual(tp.name, 'test') self.assertEqual(tp.info_hash, lt.sha1_hash('abababababababababab')) self.assertEqual(tp.file_priorities, [0, 1, 1]) self.assertEqual(tp.peers, [('1.1.1.1', 1), ('2.2.2.2', 2)]) ses = lt.session({'alert_mask': lt.alert.category_t.all_categories}) h = ses.add_torrent(tp) h.connect_peer(('3.3.3.3', 3)) for i in range(0, 10): alerts = ses.pop_alerts() for a in alerts: print(a.message()) time.sleep(0.1)
def createTorrent(torrent_file_path, content_folder_path, progress_cb = None): print("createTorrent") fs = libtorrent.file_storage() if not os.path.isabs(torrent_file_path): raise "Torrent path not absolute" if not os.path.isabs(content_folder_path): raise "Content path not absolute" libtorrent.add_files(fs, content_folder_path) if fs.num_files() == 0: print("No files to add.") return t = libtorrent.create_torrent(fs, piece_size=0, pad_file_limit=(4 * 1024 * 1024)) def progress(piece_num): if progress_cb: pc = int((100.0 * (1.0+ piece_num)) / fs.num_pieces()) progress_cb(pc) parent = os.path.dirname(content_folder_path) libtorrent.set_piece_hashes(t, parent, progress) data = libtorrent.bencode(t.generate()) file = open(torrent_file_path,'wb') file.write(data) file.close()
def test_get_metainfo(self): """ Testing the metainfo fetching method """ test_deferred = Deferred() def metainfo_cb(metainfo): self.assertEqual(metainfo, {'info': {'pieces': ['a']}, 'leechers': 0, 'nodes': [], 'seeders': 0, 'initial peers': []}) test_deferred.callback(None) infohash = "a" * 20 self.ltmgr.initialize() torrent_info = MockObject() torrent_info.metadata = lambda: bencode({'pieces': ['a']}) torrent_info.trackers = lambda: [] fake_handle = MockObject() fake_handle.is_valid = lambda: True fake_handle.has_metadata = lambda: True fake_handle.get_peer_info = lambda: [] fake_handle.torrent_file = lambda: torrent_info self.ltmgr.ltsession_metainfo.add_torrent = lambda *_: fake_handle self.ltmgr.ltsession_metainfo.remove_torrent = lambda *_: None fake_alert = type('lt.metadata_received_alert', (object,), dict(handle=fake_handle)) self.ltmgr.ltsession_metainfo.pop_alerts = lambda: [fake_alert] self.ltmgr.get_metainfo(unhexlify(infohash), metainfo_cb) return test_deferred
def shutdown(self): self.shutdown_task_manager() # remove all upnp mapping for upnp_handle in self.upnp_mapping_dict.itervalues(): self.get_session().delete_port_mapping(upnp_handle) self.upnp_mapping_dict = None self.get_session().stop_upnp() # Save libtorrent state ltstate_file = open(os.path.join(self.tribler_session.config.get_state_dir(), LTSTATE_FILENAME), 'w') ltstate_file.write(lt.bencode(self.get_session().save_state())) ltstate_file.close() for ltsession in self.ltsessions.itervalues(): del ltsession self.ltsessions = None self.ltsession_metainfo = None # remove metadata temporary directory rmtree(self.metadata_tmpdir) self.metadata_tmpdir = None self.tribler_session = None
def save_state(self): state = lt.session.save_state(self) with open(os.path.join(data_dir, "session"), 'wb') as f: f.write(lt.bencode(state)) self.log.debug("Session state saved.")
def make_torrent_file(input, userabortflag=None, userprogresscallback=lambda x: None): """ Create a torrent file from the supplied input. Returns a (infohash,metainfo) pair, or (None,None) on userabort. """ (info, piece_length) = makeinfo(input, userabortflag, userprogresscallback) if userabortflag is not None and userabortflag.isSet(): return None, None if info is None: return None, None metainfo = {'info': info, 'encoding': input['encoding'], 'creation date': long(time())} metainfo = create_valid_metainfo(metainfo) # http://www.bittorrent.org/beps/bep_0005.html says both announce and nodes # are not allowed, but some torrents (Azureus?) apparently violate this. if input['nodes'] is None and input['announce'] is None: raise ValueError('No tracker set') for key in ['announce', 'announce-list', 'nodes', 'comment', 'created by', 'httpseeds', 'url-list']: if input[key] is not None and len(input[key]) > 0: metainfo[key] = input[key] if key == 'comment': metainfo['comment.utf-8'] = uniconvert(input['comment'], 'utf-8') if 'private' in input: metainfo['info']['private'] = input['private'] if 'anonymous' in input: metainfo['info']['anonymous'] = input['anonymous'] # Two places where infohash calculated, here and in TorrentDef. # Elsewhere: must use TorrentDef.get_infohash() to allow P2PURLs. infohash = sha1(bencode(info)).digest() return infohash, metainfo
def make_torrent(self, tracker_url, torrent_name, dir_name): mkdir_p('torrent_files') fs = lt.file_storage() lt.add_files(fs, dir_name) t = lt.create_torrent(fs) t.add_tracker(tracker_url) lt.set_piece_hashes(t, './torrent_data') f = open(torrent_name, "wb") f.write(lt.bencode(t.generate())) f.close() e = lt.bdecode(open(torrent_name, 'rb').read()) info = lt.torrent_info(e) params = { 'save_path': './torrent_data', 'ti': info, 'seed_mode': True } h = self.ses.add_torrent(params) # Wait a bit for the tracker sleep(5)
def shutdown(self, timeout=30): self.tribler_session.notify_shutdown_state("Shutting down Libtorrent Manager...") # If libtorrent session has pending disk io, wait until timeout (default: 30 seconds) to let it finish. # In between ask for session stats to check if state is clean for shutdown. if not self.is_shutdown_ready() and timeout > 5: self.tribler_session.notify_shutdown_state("Waiting for Libtorrent to finish...") self.post_session_stats() later = Deferred().addCallbacks(lambda _: self.shutdown(timeout-5), lambda _: None) self.register_anonymous_task("reschedule_shutdown", later, delay=5.0) return self.shutdown_task_manager() # remove all upnp mapping for upnp_handle in self.upnp_mapping_dict.values(): self.get_session().delete_port_mapping(upnp_handle) self.upnp_mapping_dict = None self.get_session().stop_upnp() # Save libtorrent state ltstate_file = open(os.path.join(self.tribler_session.config.get_state_dir(), LTSTATE_FILENAME), 'w') ltstate_file.write(lt.bencode(self.get_session().save_state())) ltstate_file.close() for ltsession in self.ltsessions.values(): del ltsession self.ltsessions = None self.ltsession_metainfo = None # remove metadata temporary directory rmtree(self.metadata_tmpdir) self.metadata_tmpdir = None self.tribler_session = None
def magnetToTorrent(self, magnet): session = libtorrent.session() session.start_dht() session.add_dht_router("router.bittorrent.com", 6881) session.add_dht_router("router.utorrent.com", 6881) session.add_dht_router("router.bitcomet.com", 6881) session.listen_on(6881, 6891) session.set_alert_mask(libtorrent.alert.category_t.storage_notification) handle = libtorrent.add_magnet_uri(session, magnet, {'save_path': self.storageDirectory}) iterator = 0 progressBar = xbmcgui.DialogProgress() progressBar.create('Подождите', 'Идёт преобразование magnet-ссылки.') while not handle.has_metadata(): time.sleep(0.1) progressBar.update(iterator) iterator += 1 if iterator == 100: iterator = 0 if progressBar.iscanceled(): progressBar.update(0) progressBar.close() return progressBar.update(0) progressBar.close() torrent = libtorrent.create_torrent(handle.get_torrent_info()) torentFile = open(self.torrentFile, "wb") torentFile.write(libtorrent.bencode(torrent.generate())) torentFile.close() session.remove_torrent(handle)
def save_resume_data_alert(alert): self.log.info("save_resume_data_alert %s", alert.handle.get_torrent_info().name()) try: with open(alert.handle.save_path() + "/" + alert.handle.get_torrent_info().name() + ".fastresume", 'wb') as fd: fd.write(libtorrent.bencode(alert.resume_data)) except (IOError, EOFError) as e: self.log.error("Unable to save fastresume %s", e)
def _save_torrent_info(self, torr_handle): """ Save torrent metatata and a .torrent file for resume. @param torr_handle: object - torrent handle @return: """ if self._persistent: info_hash = str(torr_handle.info_hash()) torr_filepath = os.path.join(self._resume_dir, info_hash + '.torrent') meta_filepath = os.path.join(self._resume_dir, info_hash + '.resume') torr_info = torr_handle.get_torrent_info() torr_file = libtorrent.create_torrent(torr_info) torr_content = torr_file.generate() torr_bencoded = libtorrent.bencode(torr_content) with open(torr_filepath, 'wb') as t_file: t_file.write(torr_bencoded) metadata = {'name': torr_handle.name(), 'info_hash': info_hash, 'save_path': torr_handle.save_path(), 'resume_data': None} with open(meta_filepath, mode='wb') as m_file: pickle.dump(metadata, m_file) else: raise TorrenterError('Trying to save torrent metadata for a non-persistent instance!')
def file_complete(self, torrent): info_hash=str(torrent.info_hash()) nt=lt.create_torrent(torrent) tname=self._tname(info_hash) f = open(tname, 'wb') f.write(lt.bencode(nt.generate())) f.close()
def fetch_magnet(magnet_uri): tempdir = tempfile.mkdtemp() logger.debug("Fetching magnet to '%s'" % tempdir) params = { "save_path" : tempdir, "duplicate_is_error" : True, "paused" : False, "auto_managed" : True, "url" : magnet_uri, "storage_mode" : lt.storage_mode_t(2) } handle = ses.add_torrent(params) def cleanup(): ses.remove_torrent(handle) shutil.rmtree(tempdir) while not handle.has_metadata(): try : time.sleep(1) except NameError as e: logger.debug("Exception!! %s" % str(e)) cleanup() return logger.debug("Magnet %s fetched!" % magnet_uri) torrent_data = lt.create_torrent(handle.get_torrent_info()).generate() add_from_torrent_info(handle.get_torrent_info(), lt.bencode(torrent_data)) cleanup()
def magnet2torrent(link, torrent_file): sess = lt.session() sess.add_dht_router('router.bittorrent.com', 6881) sess.add_dht_router('router.utorrent.com', 6881) sess.add_dht_router('router.bitcomet.com', 6881) sess.add_dht_router('dht.transmissionbt.com', 6881) sess.start_dht(); params = { "save_path": 'D:\\Desktop', #"storage_mode":lt.storage_mode_t.storage_mode_sparse, #"paused": True, #"auto_managed": True, "duplicate_is_error": True } handle = lt.add_magnet_uri(sess, link, params) # waiting for metadata while (not handle.has_metadata()): time.sleep(5) # create a torrent torinfo = handle.get_torrent_info() torfile = lt.create_torrent(torinfo) torcontent = lt.bencode(torfile.generate()) # save to file t = open(torrent_file, "wb") t.write(torcontent) t.close() return True
def generate_torrent_from_magnet(url): try: session = libtorrent.session() tempdir = tempfile.mkdtemp() params = { 'save_path': tempdir, 'storage_mode': libtorrent.storage_mode_t(2), 'paused': False, 'auto_managed': True, 'duplicate_is_error': True } handle = libtorrent.add_magnet_uri(session, url, params) while (not handle.has_metadata()): time.sleep(.1) session.pause() torinfo = handle.get_torrent_info() fs = libtorrent.file_storage() for file in torinfo.files(): fs.add_file(file) torfile = libtorrent.create_torrent(fs) torfile.set_comment(torinfo.comment()) torfile.set_creator(torinfo.creator()) torrent_data = libtorrent.bencode(torfile.generate()) session.remove_torrent(handle) return torrent_data except: torrent_data = None if handle and session: session.remove_torrent(handle) return torrent_data
def test_read_resume_data(self): resume_data = lt.bencode( { "file-format": "libtorrent resume file", "info-hash": "abababababababababab", "name": "test", "save_path": ".", "peers": "\x01\x01\x01\x01\x00\x01\x02\x02\x02\x02\x00\x02", "file_priority": [0, 1, 1], } ) tp = lt.read_resume_data(resume_data) self.assertEqual(tp.name, "test") self.assertEqual(tp.info_hash, lt.sha1_hash("abababababababababab")) self.assertEqual(tp.file_priorities, [0, 1, 1]) self.assertEqual(tp.peers, [("1.1.1.1", 1), ("2.2.2.2", 2)]) ses = lt.session({"alert_mask": lt.alert.category_t.all_categories}) h = ses.add_torrent(tp) h.connect_peer(("3.3.3.3", 3)) for i in range(0, 10): alerts = ses.pop_alerts() for a in alerts: print(a.message()) time.sleep(0.1)
def render_GET(self, request): """ .. http:get:: /download/(string: infohash)/torrent A GET request to this endpoint returns the .torrent file associated with the specified download. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/downloads/4344503b7e797ebf31582327a5baae35b11bda01/torrent **Example response**: The contents of the .torrent file. """ download = self.session.get_download(self.infohash) if not download: return DownloadSpecificEndpoint.return_404(request) if not download.handle or not download.handle.is_valid() or not download.handle.has_metadata(): return DownloadSpecificEndpoint.return_404(request) torrent_info = get_info_from_handle(download.handle) t = create_torrent(torrent_info) torrent = t.generate() bencoded_torrent = bencode(torrent) request.setHeader(b'content-type', 'application/x-bittorrent') request.setHeader(b'Content-Disposition', 'attachment; filename=%s.torrent' % hexlify(self.infohash)) return bencoded_torrent
def magnet2t(link, tfile): sess = lt.session() params = { "save_path": '/', "storage_mode": lt.storage_mode_t.storage_mode_sparse, "paused": True, "auto_managed": True, "duplicate_is_error": True } try: handle = lt.add_magnet_uri(sess, link, params) state_str = ['queued', 'checking', 'downloading metadata', 'downloading', 'finished', 'seeding', 'allocating'] while (not handle.has_metadata()): s = handle.status() print '%.2f%% complete (down: %.1f kb/s up: %.1f kB/s peers: %d) %s' % ( s.progress * 100, s.download_rate / 1000, s.upload_rate / 1000, s. num_peers, state_str[s.state]) time.sleep(5) print handle.has_metadata() torinfo = handle.get_torrent_info() torfile = lt.create_torrent(torinfo) t = open(tfile, "wb") t.write(lt.bencode(torfile.generate())) t.close() print '%s generated!' % tfile except Exception, ex: print Exception, ":", ex return False
def crawl(magnet_link): print magnet_link h = lt.add_magnet_uri(ses, magnet_link, params) timeout_counter = 0 while (not h.has_metadata()): #print "Sleeping..." time.sleep(1) # Kill this torrent when the timeout reaches 3minutes timeout_counter += 1 if timeout_counter == 180: with open("failed_magnets.txt", "a") as fail: fail.write(magnet_link + '\n') return if h.has_metadata(): #print "Got the metadata" torinfo = h.get_torrent_info() torfile = lt.create_torrent( torinfo ) name = (magnet_link.split("magnet:?xt=urn:btih:")[1][:40]).upper() with open( name + ".torrent", "wb" ) as f: f.write(lt.bencode(torfile.generate())) return
def test_preformatted(self): encoded = lt.bencode((1, 2, 3, 4, 5)) self.assertEqual(encoded, b'\x01\x02\x03\x04\x05')
def test_float(self): # TODO: this should throw a TypeError in the future with self.assertWarns(DeprecationWarning): encoded = lt.bencode(1.337) self.assertEqual(encoded, b'0:')
def test_bytes(self): encoded = lt.bencode(b'foo') self.assertEqual(encoded, b'3:foo')
def magnet2torrent_libtorrent(magnet, output_name=None): if output_name and not os.path.isdir(output_name) and not os.path.isdir(os.path.dirname(os.path.abspath(output_name))): print("Invalid output folder: " + os.path.dirname(os.path.abspath(output_name))) print("") sys.exit(0) tempdir = tempfile.mkdtemp() ses = libtorrent.session() # one could want to set this ses.listen_on(6881, 6882) ses.add_dht_router("router.utorrent.com", 6881) ses.add_dht_router("router.bittorrent.com", 6881) ses.add_dht_router("router.bitcomet.com", 6881) ses.add_dht_router("dht.transmissionbt.com", 6881) ses.add_dht_router("dht.aelitis.com", 6881) ses.add_dht_router("67.215.246.10", 6881) ses.add_dht_router("82.221.103.244", 6881) ses.start_dht() ses.start_lsd() ses.start_upnp() ses.start_natpmp() # add 'url'. for add_torrent() params = { 'url': magnet, 'save_path': tempdir, 'storage_mode': libtorrent.storage_mode_t(2), 'paused': False, 'auto_managed': True, 'duplicate_is_error': True } # add_magnet_uri is deprecated # http://www.rasterbar.com/products/libtorrent/manual.html#add-magnet-uri # handle = libtorrent.add_magnet_uri(ses, magnet, params) handle = ses.add_torrent(params) print("Downloading Metadata (this may take a while)") # used to control "Maybe..." msgs # after sleep(1) x = 1 limit = 120 while (not handle.has_metadata()): try: sleep(1) if x > limit: print("Maybe your firewall is blocking, or the magnet link is not right...") limit += 30 x += 1 except KeyboardInterrupt: print("Aborting...") ses.pause() print("Cleanup dir " + tempdir) shutil.rmtree(tempdir) sys.exit(0) ses.pause() print 'Got metadata, starting torrent download...' torinfo = handle.get_torrent_info() torfile = libtorrent.create_torrent(torinfo) output = os.path.abspath(torinfo.name() + ".torrent") if output_name: if os.path.isdir(output_name): output = os.path.abspath(os.path.join(output_name, torinfo.name() + ".torrent")) elif os.path.isdir(os.path.dirname(os.path.abspath(output_name))): output = os.path.abspath(output_name) print("Saving torrent file here: " + output + " ...") torcontent = libtorrent.bencode(torfile.generate()) f = open(output, "wb") f.write(libtorrent.bencode(torfile.generate())) f.close() print("Saved! Cleaning up dir: " + tempdir) ses.remove_torrent(handle) shutil.rmtree(tempdir) return output
]) and alert.info_hash not in seen: seen.add(alert.info_hash) torrent_params = get_params_for_info_hash(alert.info_hash) handles.add(session.add_torrent(torrent_params)) # TODO: https://www.libtorrent.org/reference-Core.html#post_torrent_updates() to_remove = set() for handle in handles: status = handle.status() if (status.has_metadata): metadata = status.torrent_file print('<ut_metadata> {} ({})'.format(metadata.name(), handle.info_hash()), flush=True) with open(metadata.name() + '.torrent', 'wb') as f: f.write(lt.bencode(lt.create_torrent(metadata).generate())) meta_info_count += 1 to_remove.add(handle) else: print('{} - {} peers ({} connected), prio {}, {} {:2}%'.format( status.info_hash, status.list_peers, status.num_peers, status.queue_position, state_str[status.state], status.progress * 100), flush=True) for handle in to_remove: session.remove_torrent(handle) handles -= to_remove time.sleep(10)
def play(url, xlistitem={}, is_view=None, subtitle=""): allocate = True try: import platform xbmc.log( "XXX KODI XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" ) xbmc.log("OS platform: %s %s" % (platform.system(), platform.release())) xbmc.log("xbmc/kodi version: %s" % xbmc.getInfoLabel("System.BuildVersion")) xbmc_version = int(xbmc.getInfoLabel("System.BuildVersion")[:2]) xbmc.log("xbmc/kodi version number: %s" % xbmc_version) xbmc.log( "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX KODI XXXX" ) _platform = get_platform() if str(_platform['system']) in [ "android_armv7", "linux_armv6", "linux_armv7" ]: allocate = False # -- log ------------------------------------------------ xbmc.log( "XXX platform XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" ) xbmc.log("_platform['system']: %s" % _platform['system']) xbmc.log("allocate: %s" % allocate) xbmc.log( "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX platform XXXX" ) # -- ---------------------------------------------------- except: pass DOWNLOAD_PATH = config.get_setting("downloadpath") # -- adfly: ------------------------------------ if url.startswith("http://adf.ly/"): try: data = scrapertools.downloadpage(url) url = decode_adfly(data) except: ddd = xbmcgui.Dialog() ddd.ok( "pelisalacarta-MCT: Sin soporte adf.ly", "El script no tiene soporte para el acortador de urls adf.ly.", "", "url: " + url) return # -- Necesario para algunas webs ---------------------------- if not url.endswith(".torrent") and not url.startswith("magnet"): t_file = scrapertools.get_header_from_response( url, header_to_get="location") if len(t_file) > 0: url = t_file t_file = scrapertools.get_header_from_response( url, header_to_get="location") if len(t_file) > 0: url = t_file # -- Crear dos carpetas en descargas para los archivos ------ save_path_videos = os.path.join(DOWNLOAD_PATH, "torrent-videos") save_path_torrents = os.path.join(DOWNLOAD_PATH, "torrent-torrents") if not os.path.exists(save_path_torrents): os.mkdir(save_path_torrents) # -- Usar - archivo torrent desde web, magnet o HD --------- if not os.path.isfile(url) and not url.startswith("magnet"): # -- http - crear archivo torrent ----------------------- data = url_get(url) # -- El nombre del torrent será el que contiene en los -- # -- datos. - re_name = urllib.unquote( scrapertools.get_match(data, ':name\d+:(.*?)\d+:')) torrent_file = filetools.join(save_path_torrents, filetools.encode(re_name + '.torrent')) f = open(torrent_file, 'wb') f.write(data) f.close() elif os.path.isfile(url): # -- file - para usar torrens desde el HD --------------- torrent_file = url else: # -- magnet --------------------------------------------- torrent_file = url # ----------------------------------------------------------- # -- MCT - MiniClienteTorrent ------------------------------- ses = lt.session() # -- log ---------------------------------------------------- xbmc.log("### Init session ########") xbmc.log(lt.version) xbmc.log("#########################") # -- -------------------------------------------------------- ses.add_dht_router("router.bittorrent.com", 6881) ses.add_dht_router("router.utorrent.com", 6881) ses.add_dht_router("dht.transmissionbt.com", 6881) trackers = [ "udp://tracker.openbittorrent.com:80/announce", "http://tracker.torrentbay.to:6969/announce", "http://tracker.pow7.com/announce", "udp://tracker.ccc.de:80/announce", "udp://open.demonii.com:1337", "http://9.rarbg.com:2710/announce", "http://bt.careland.com.cn:6969/announce", "http://explodie.org:6969/announce", "http://mgtracker.org:2710/announce", "http://tracker.best-torrents.net:6969/announce", "http://tracker.tfile.me/announce", "http://tracker1.wasabii.com.tw:6969/announce", "udp://9.rarbg.com:2710/announce", "udp://9.rarbg.me:2710/announce", "udp://coppersurfer.tk:6969/announce", "http://www.spanishtracker.com:2710/announce", "http://www.todotorrents.com:2710/announce", ] video_file = "" # -- magnet2torrent ----------------------------------------- if torrent_file.startswith("magnet"): try: import zlib btih = hex( zlib.crc32( scrapertools.get_match( torrent_file, 'magnet:\?xt=urn:(?:[A-z0-9:]+|)([A-z0-9]{32})')) & 0xffffffff) files = [ f for f in os.listdir(save_path_torrents) if os.path.isfile(os.path.join(save_path_torrents, f)) ] for file in files: if btih in os.path.basename(file): torrent_file = os.path.join(save_path_torrents, file) except: pass if torrent_file.startswith("magnet"): try: tempdir = tempfile.mkdtemp() except IOError: tempdir = os.path.join(save_path_torrents, "temp") if not os.path.exists(tempdir): os.mkdir(tempdir) params = { 'save_path': tempdir, 'trackers': trackers, 'storage_mode': lt.storage_mode_t.storage_mode_allocate, 'paused': False, 'auto_managed': True, 'duplicate_is_error': True } h = lt.add_magnet_uri(ses, torrent_file, params) dp = xbmcgui.DialogProgress() dp.create('pelisalacarta-MCT') while not h.has_metadata(): message, porcent, msg_file, s, download = getProgress( h, "Creando torrent desde magnet") dp.update(porcent, message, msg_file) if s.state == 1: download = 1 if dp.iscanceled(): dp.close() remove_files(download, torrent_file, video_file, ses, h) return h.force_dht_announce() xbmc.sleep(1000) dp.close() info = h.get_torrent_info() data = lt.bencode(lt.create_torrent(info).generate()) torrent_file = os.path.join( save_path_torrents, unicode(info.name() + "-" + btih, "'utf-8'", errors="replace") + ".torrent") f = open(torrent_file, 'wb') f.write(data) f.close() ses.remove_torrent(h) shutil.rmtree(tempdir) # ----------------------------------------------------------- # -- Archivos torrent --------------------------------------- e = lt.bdecode(open(torrent_file, 'rb').read()) info = lt.torrent_info(e) # -- El más gordo o uno de los más gordo se entiende que es - # -- el vídeo o es el vídeo que se usará como referencia - # -- para el tipo de archivo - xbmc.log("##### Archivos ## %s ##" % len(info.files())) _index_file, _video_file, _size_file = get_video_file(info) # -- Prioritarizar/Seleccionar archivo----------------------- _index, video_file, video_size, len_files = get_video_files_sizes(info) if len_files == 0: dp = xbmcgui.Dialog().ok( "No se puede reproducir", "El torrent no contiene ningún archivo de vídeo") if _index == -1: _index = _index_file video_file = _video_file video_size = _size_file _video_file_ext = os.path.splitext(_video_file)[1] xbmc.log("##### _video_file_ext ## %s ##" % _video_file_ext) if (_video_file_ext == ".avi" or _video_file_ext == ".mp4") and allocate: xbmc.log("##### storage_mode_t.storage_mode_allocate (" + _video_file_ext + ") #####") h = ses.add_torrent({ 'ti': info, 'save_path': save_path_videos, 'trackers': trackers, 'storage_mode': lt.storage_mode_t.storage_mode_allocate }) else: xbmc.log("##### storage_mode_t.storage_mode_sparse (" + _video_file_ext + ") #####") h = ses.add_torrent({ 'ti': info, 'save_path': save_path_videos, 'trackers': trackers, 'storage_mode': lt.storage_mode_t.storage_mode_sparse }) allocate = True # ----------------------------------------------------------- # -- Descarga secuencial - trozo 1, trozo 2, ... ------------ h.set_sequential_download(True) h.force_reannounce() h.force_dht_announce() # -- Inicio de variables para 'pause' automático cuando el - # -- el vídeo se acerca a una pieza sin completar - is_greater_num_pieces = False is_greater_num_pieces_plus = False is_greater_num_pieces_pause = False porcent4first_pieces = int(video_size * 0.000000005) if porcent4first_pieces < 10: porcent4first_pieces = 10 if porcent4first_pieces > 100: porcent4first_pieces = 100 porcent4last_pieces = int(porcent4first_pieces / 2) num_pieces_to_resume = int(video_size * 0.0000000025) if num_pieces_to_resume < 5: num_pieces_to_resume = 5 if num_pieces_to_resume > 25: num_pieces_to_resume = 25 xbmc.log("##### porcent4first_pieces ## %s ##" % porcent4first_pieces) xbmc.log("##### porcent4last_pieces ## %s ##" % porcent4last_pieces) xbmc.log("##### num_pieces_to_resume ## %s ##" % num_pieces_to_resume) # -- Prioritarizar o seleccionar las piezas del archivo que - # -- se desea reproducir con 'file_priorities' - piece_set = set_priority_pieces(h, _index, video_file, video_size, porcent4first_pieces, porcent4last_pieces, allocate) # -- Crear diálogo de progreso para el primer bucle --------- dp = xbmcgui.DialogProgress() dp.create('pelisalacarta-MCT') _pieces_info = {} # -- Doble bucle anidado ------------------------------------ # -- Descarga - Primer bucle - while not h.is_seed(): s = h.status() xbmc.sleep(100) # -- Recuperar los datos del progreso ------------------- message, porcent, msg_file, s, download = getProgress(h, video_file, _pf=_pieces_info) # -- Si hace 'checking' existe descarga ----------------- # -- 'download' Se usará para saber si hay datos - # -- descargados para el diálogo de 'remove_files' - if s.state == 1: download = 1 # -- Player - play -------------------------------------- # -- Comprobar si se han completado las piezas para el - # -- inicio del vídeo - first_pieces = True _c = 0 for i in range(piece_set[0], piece_set[porcent4first_pieces]): first_pieces &= h.have_piece(i) if h.have_piece(i): _c += 1 _pieces_info = { 'current': 0, 'continuous': "%s/%s" % (_c, porcent4first_pieces), 'continuous2': "", 'have': h.status().num_pieces, 'len': len(piece_set) } last_pieces = True if not allocate: _c = len(piece_set) - 1 _cc = 0 for i in range( len(piece_set) - porcent4last_pieces, len(piece_set)): last_pieces &= h.have_piece(i) if h.have_piece(i): _c -= 1 _cc += 1 _pieces_info['continuous2'] = "[%s/%s] " % (_cc, porcent4last_pieces) if is_view != "Ok" and first_pieces and last_pieces: _pieces_info['continuous2'] = "" xbmc.log("##### porcent [%.2f%%]" % (s.progress * 100)) is_view = "Ok" dp.close() # -- Player - Ver el vídeo -------------------------- playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) playlist.clear() ren_video_file = os.path.join(save_path_videos, video_file) try: playlist.add(ren_video_file, xlistitem) except: playlist.add(ren_video_file) if xbmc_version < 17: player = play_video(xbmc.PLAYER_CORE_AUTO) else: player = play_video() player.play(playlist) # -- Contador de cancelaciones para la ventana de - # -- 'pause' automático - is_greater_num_pieces_canceled = 0 continuous_pieces = 0 porcent_time = 0.00 current_piece = 0 set_next_continuous_pieces = porcent4first_pieces # -- Impedir que kodi haga 'resume' a un archivo ---- # -- que se reprodujo con anterioridad y que se - # -- eliminó para impedir que intente la reprucción - # -- en una pieza que aún no se ha completado y se - # -- active 'pause' automático - not_resume = True # -- Bandera subTítulos _sub = False # -- Segundo bucle - Player - Control de eventos ---- while player.isPlaying(): xbmc.sleep(100) # -- Añadir subTítulos if subtitle != "" and not _sub: _sub = True player.setSubtitles(subtitle) # -- Impedir que kodi haga 'resume' al inicio --- # -- de la descarga de un archivo conocido - if not_resume: player.seekTime(0) not_resume = False # -- Control 'pause' automático - continuous_pieces = count_completed_continuous_pieces( h, piece_set) if xbmc.Player().isPlaying(): # -- Porcentage del progreso del vídeo ------ player_getTime = player.getTime() player_getTotalTime = player.getTotalTime() porcent_time = player_getTime / player_getTotalTime * 100 # -- Pieza que se está reproduciendo -------- current_piece = int(porcent_time / 100 * len(piece_set)) # -- Banderas de control -------------------- is_greater_num_pieces = ( current_piece > continuous_pieces - num_pieces_to_resume) is_greater_num_pieces_plus = ( current_piece + porcent4first_pieces > continuous_pieces) is_greater_num_pieces_finished = ( current_piece + porcent4first_pieces >= len(piece_set)) # -- Activa 'pause' automático -------------- if is_greater_num_pieces and not player.paused and not is_greater_num_pieces_finished: is_greater_num_pieces_pause = True player.pause() if continuous_pieces >= set_next_continuous_pieces: set_next_continuous_pieces = continuous_pieces + num_pieces_to_resume next_continuous_pieces = str( continuous_pieces - current_piece) + "/" + str(set_next_continuous_pieces - current_piece) _pieces_info = { 'current': current_piece, 'continuous': next_continuous_pieces, 'continuous2': _pieces_info['continuous2'], 'have': h.status().num_pieces, 'len': len(piece_set) } # -- Cerrar el diálogo de progreso -------------- if player.resumed: dp.close() # -- Mostrar el diálogo de progreso ------------- if player.paused: # -- Crear diálogo si no existe ------------- if not player.statusDialogoProgress: dp = xbmcgui.DialogProgress() dp.create('pelisalacarta-MCT') player.setDialogoProgress() # -- Diálogos de estado en el visionado ----- if not h.is_seed(): # -- Recuperar los datos del progreso --- message, porcent, msg_file, s, download = getProgress( h, video_file, _pf=_pieces_info) dp.update(porcent, message, msg_file) else: dp.update(100, "Descarga completa: " + video_file) # -- Se canceló el progreso en el visionado - # -- Continuar - if dp.iscanceled(): dp.close() player.pause() # -- Se canceló el progreso en el visionado - # -- en la ventana de 'pause' automático. - # -- Parar si el contador llega a 3 - if dp.iscanceled() and is_greater_num_pieces_pause: is_greater_num_pieces_canceled += 1 if is_greater_num_pieces_canceled == 3: player.stop() # -- Desactiva 'pause' automático y --------- # -- reinicia el contador de cancelaciones - if not dp.iscanceled( ) and not is_greater_num_pieces_plus and is_greater_num_pieces_pause: dp.close() player.pause() is_greater_num_pieces_pause = False is_greater_num_pieces_canceled = 0 # -- El usuario cancelo el visionado -------- # -- Terminar - if player.ended: # -- Diálogo eliminar archivos ---------- remove_files(download, torrent_file, video_file, ses, h) return # -- Kodi - Se cerró el visionado ----------------------- # -- Continuar | Terminar - if is_view == "Ok" and not xbmc.Player().isPlaying(): if info.num_files() == 1: # -- Diálogo continuar o terminar --------------- d = xbmcgui.Dialog() ok = d.yesno('pelisalacarta-MCT', 'XBMC-Kodi Cerró el vídeo.', '¿Continuar con la sesión?') else: ok = False # -- SI --------------------------------------------- if ok: # -- Continuar: --------------------------------- is_view = None else: # -- Terminar: ---------------------------------- # -- Comprobar si el vídeo pertenece a una ------ # -- lista de archivos - _index, video_file, video_size, len_files = get_video_files_sizes( info) if _index == -1 or len_files == 1: # -- Diálogo eliminar archivos -------------- remove_files(download, torrent_file, video_file, ses, h) return else: # -- Lista de archivos. Diálogo de opciones - piece_set = set_priority_pieces(h, _index, video_file, video_size, porcent4first_pieces, porcent4last_pieces, allocate) is_view = None dp = xbmcgui.DialogProgress() dp.create('pelisalacarta-MCT') # -- Mostar progeso antes del visionado ----------------- if is_view != "Ok": dp.update(porcent, message, msg_file) # -- Se canceló el progreso antes del visionado --------- # -- Terminar - if dp.iscanceled(): dp.close() # -- Comprobar si el vídeo pertenece a una lista de - # -- archivos - _index, video_file, video_size, len_files = get_video_files_sizes( info) if _index == -1 or len_files == 1: # -- Diálogo eliminar archivos ------------------ remove_files(download, torrent_file, video_file, ses, h) return else: # -- Lista de archivos. Diálogo de opciones ----- piece_set = set_priority_pieces(h, _index, video_file, video_size, porcent4first_pieces, porcent4last_pieces, allocate) is_view = None dp = xbmcgui.DialogProgress() dp.create('pelisalacarta-MCT') # -- Kodi - Error? - No debería llegar aquí ----------------- if is_view == "Ok" and not xbmc.Player().isPlaying(): dp.close() # -- Diálogo eliminar archivos -------------------------- remove_files(download, torrent_file, video_file, ses, h) return
# skip directories starting with . if os.path.split(root)[1][0] == '.': continue for f in files: # skip files starting with . if f[0] == '.': continue # skip thumbs.db on windows if f == 'Thumbs.db': continue fname = os.path.join(root[len(parent_input) + 1:], f) size = os.path.getsize(os.path.join(parent_input, fname)) print '%10d kiB %s' % (size / 1024, fname) fs.add_file(fname, size) if fs.num_files() == 0: print 'no files added' sys.exit(1) t = libtorrent.create_torrent(fs, 0, 4 * 1024 * 1024) t.add_tracker(sys.argv[2]) t.set_creator('libtorrent %s' % libtorrent.version) libtorrent.set_piece_hashes(t, parent_input, lambda x: sys.stderr.write('.')) sys.stderr.write('\n') f = open('out.torrent', 'wb+') print >> f, libtorrent.bencode(t.generate()) f.close()
def test_metadata_received_invalid_torrent_with_value_error(self): """ Testing whether the right operations happen when we receive metadata but the torrent info is invalid and throws Value Error """ def mocked_checkpoint(): raise RuntimeError("This code should not be reached!") mocked_file = MockObject() mocked_file.path = 'test' # The line below should trigger Value Error self.libtorrent_download_impl.handle.trackers = lambda: [{'url': 'no-DHT'}] torrent_dict = {'name': 'test', 'piece length': 42, 'pieces': '', 'files': []} get_info_from_handle(self.libtorrent_download_impl.handle).metadata = lambda: lt.bencode(torrent_dict) get_info_from_handle(self.libtorrent_download_impl.handle).files = lambda: [mocked_file] self.libtorrent_download_impl.checkpoint = mocked_checkpoint self.libtorrent_download_impl.on_metadata_received_alert(None)
def network_create_engine_wrapper(self, pstate, checkpoint_disabled=False, share_mode=False): with self.dllock: self._logger.debug("LibtorrentDownloadImpl: network_create_engine_wrapper()") atp = {} atp["save_path"] = os.path.normpath(os.path.join(get_default_dest_dir(), self.get_dest_dir())) atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse atp["paused"] = True atp["auto_managed"] = False atp["duplicate_is_error"] = False atp["hops"] = self.get_hops() if share_mode: atp["flags"] = lt.add_torrent_params_flags_t.flag_share_mode self.set_checkpoint_disabled(checkpoint_disabled) resume_data = pstate.get('state', 'engineresumedata') if pstate else None if not isinstance(self.tdef, TorrentDefNoMetainfo): metainfo = self.tdef.get_metainfo() torrentinfo = lt.torrent_info(metainfo) self.orig_files = [file_entry.path.decode('utf-8') for file_entry in torrentinfo.files()] is_multifile = len(self.orig_files) > 1 commonprefix = os.path.commonprefix(self.orig_files) if is_multifile else '' swarmname = commonprefix.partition(os.path.sep)[0] if is_multifile and swarmname != self.correctedinfoname: for i, filename_old in enumerate(self.orig_files): filename_new = os.path.join(self.correctedinfoname, filename_old[len(swarmname) + 1:]) # Path should be unicode if Libtorrent is using std::wstring (on Windows), # else we use str (on Linux). try: torrentinfo.rename_file(i, filename_new) except TypeError: torrentinfo.rename_file(i, filename_new.encode("utf-8")) self.orig_files[i] = filename_new atp["ti"] = torrentinfo has_resume_data = resume_data and isinstance(resume_data, dict) if has_resume_data: atp["resume_data"] = lt.bencode(resume_data) else: atp["url"] = self.tdef.get_url() or "magnet:?xt=urn:btih:" + hexlify(self.tdef.get_infohash()) atp["name"] = self.tdef.get_name_as_unicode() def on_torrent_added(handle): self.handle = handle if self.handle.is_valid(): self.set_selected_files() user_stopped = pstate.get('download_defaults', 'user_stopped') if pstate else False # If we lost resume_data always resume download in order to force checking if not user_stopped or not resume_data: self.handle.resume() # If we only needed to perform checking, pause download after it is complete self.pause_after_next_hashcheck = user_stopped self.set_vod_mode(self.get_mode() == DLMODE_VOD) # Limit the amount of connections if we have specified that self.handle.set_max_connections(self.session.config.get_libtorrent_max_conn_download()) return self def on_torrent_failed(failure): self._logger.error("Could not add torrent to LibtorrentManager %s", self.tdef.get_name_as_unicode()) self.cew_scheduled = False return Failure((self, pstate)) return self.ltmgr.add_torrent(self, atp).addCallbacks(on_torrent_added, on_torrent_failed)
def info_hashes(info: Dict[bytes, Any]) -> lt.info_hash_t: return lt.info_hash_t(lt.sha1_hash( hashlib.sha1(lt.bencode(info)).digest()))
def torrent_file(torrent_dict: Dict[bytes, Any]) -> bytes: return lt.bencode(torrent_dict)
async def save_session_state(self): state = await asyncio.get_event_loop().run_in_executor( None, functools.partial(self.session.save_state)) f = Path(self.state_dir, 'session.state') f.write_bytes(lt.bencode(state))
def test_bencode(self): encoded = lt.bencode({'a': 1, 'b': [1, 2, 3], 'c': 'foo'}) self.assertEqual(encoded, b'd1:ai1e1:bli1ei2ei3ee1:c3:fooe')
def save_state(state_file, session): open(state_file, 'w').write(lt.bencode(session.save_state(0x4)))
def set_engineresumedata(self, engineresumedata): self.config['state']['engineresumedata'] = base64.b64encode( lt.bencode(engineresumedata)).decode('utf-8')
def test_httpsession_failure_reason_in_dict(self): session = HttpTrackerSession("localhost", ("localhost", 8475), "/announce", 5) session._infohash_list = [] session._process_scrape_response(bencode({'failure reason': 'test'})) self.assertTrue(session.is_failed)
def save_torrent(self): with self.save_lock: path = os.path.join(session.torrent_dir, self.id + '.torrent') data = dict(info=lt.bdecode(self.metadata)) with open(path, "wb") as f: f.write(lt.bencode(data))
def create_torrent_file(file_path_list, params, torrent_filepath=None): fs = libtorrent.file_storage() # filter all non-files file_path_list_filtered = [] for path in file_path_list: path = path_util.Path(path) if not path.exists(): raise IOError('Path does not exist: %s' % path) elif path.is_file(): file_path_list_filtered.append(path) # get the directory where these files are in. If there are multiple files, take the common directory they are in if len(file_path_list_filtered) == 1: base_path = path_util.split(file_path_list_filtered[0])[0] else: base_path = path_util.abspath(commonprefix(file_path_list_filtered)) # the base_dir directory is the parent directory of the base_path and is passed to the set_piece_hash method if len(file_path_list_filtered) == 1: filename = path_util.basename(file_path_list_filtered[0]) fs.add_file(filename, path_util.getsize(file_path_list_filtered[0])) else: for full_file_path in file_path_list_filtered: #FIXME: there should be a better, cleaner way to define this filename = path_util.join( *full_file_path.parts[len(base_path.parent.parts):]) fs.add_file(str(filename), path_util.getsize(full_file_path)) if params.get(b'piece length'): piece_size = params[b'piece length'] else: piece_size = 0 flags = libtorrent.create_torrent_flags_t.optimize # This flag doesn't exist anymore in libtorrent V1.1.0 if hasattr(libtorrent.create_torrent_flags_t, 'calculate_file_hashes'): flags |= libtorrent.create_torrent_flags_t.calculate_file_hashes params = { k: (v.decode('utf-8') if isinstance(v, bytes) else v) for k, v in params.items() } torrent = libtorrent.create_torrent(fs, piece_size=piece_size, flags=flags) # Python2 wants binary, python3 want unicode if params.get(b'comment'): torrent.set_comment(params[b'comment']) if params.get(b'created by'): torrent.set_creator(params[b'created by']) # main tracker if params.get(b'announce'): torrent.add_tracker(params[b'announce']) # tracker list if params.get(b'announce-list'): tier = 1 for tracker in params[b'announce-list']: torrent.add_tracker(tracker, tier=tier) tier += 1 # DHT nodes # http://www.bittorrent.org/beps/bep_0005.html if params.get(b'nodes'): for node in params[b'nodes']: torrent.add_node(*node) # HTTP seeding # http://www.bittorrent.org/beps/bep_0017.html if params.get(b'httpseeds'): torrent.add_http_seed(params[b'httpseeds']) # Web seeding # http://www.bittorrent.org/beps/bep_0019.html if len(file_path_list) == 1: if params.get(b'urllist', False): torrent.add_url_seed(params[b'urllist']) # read the files and calculate the hashes if len(file_path_list) == 1: libtorrent.set_piece_hashes(torrent, str(base_path)) else: libtorrent.set_piece_hashes(torrent, str(base_path.parent)) t1 = torrent.generate() torrent = libtorrent.bencode(t1) if torrent_filepath: with open(torrent_filepath, 'wb') as f: f.write(torrent) return { 'success': True, 'base_path': base_path, 'base_dir': base_path.parent, 'torrent_file_path': torrent_filepath, 'metainfo': torrent, 'infohash': sha1(bencode(t1[b'info'])).digest() }
def save_state(self): if self.session_state_file is not None: data = lt.bencode(self.session.save_state()) with open(self.session_state_file, 'wb') as f: f.write(data)
dp = xbmcgui.DialogProgress() dp.create(msg_header) while not h.has_metadata(): message, porcent, msg_file, s, download = getProgress(h, "Creando torrent desde magnet") dp.update(porcent, message, msg_file) if s.state == 1: download = 1 if dp.iscanceled(): dp.close() remove_files( download, torrent_file, video_file, ses, h ) return h.force_dht_announce() xbmc.sleep(1000) dp.close() info = h.get_torrent_info() data = lt.bencode( lt.create_torrent(info).generate() ) #torrent_file = os.path.join(save_path_torrents, unicode(info.name()+"-"+btih, "'utf-8'", errors="replace") + ".torrent") torrent_file = os.path.join(save_path_torrents, info.name()+"-"+btih+ ".torrent") f = open(torrent_file,'wb') f.write(data) f.close() ses.remove_torrent(h) shutil.rmtree(tempdir) # ----------------------------------------------------------- # -- Archivos torrent --------------------------------------- e = lt.bdecode(open(torrent_file, 'rb').read()) info = lt.torrent_info(e) # -- El más gordo o uno de los más gordo se entiende que es -
async def test_failed_unicode(self): session = HttpTrackerSession(u"localhost", ("localhost", 8475), "/announce", 5) self.assertRaises(ValueError, session._process_scrape_response, bencode({'failure reason': '\xe9'})) await session.cleanup()
def set_metainfo(self, metainfo): self.config['state']['metainfo'] = base64.b64encode( lt.bencode(metainfo)).decode('utf-8')
def file_complete(self, torrent): info_hash = str(torrent.info_hash()) nt = lt.create_torrent(torrent) tname = self._tname(info_hash) with open(tname, 'wb') as f: f.write(lt.bencode(nt.generate()))
def main(): from optparse import OptionParser parser = OptionParser() parser.add_option('-p', '--port', type='int', help='set listening port') parser.add_option( '-d', '--max-download-rate', type='float', help='the maximum download rate given in kB/s. 0 means infinite.') parser.add_option( '-u', '--max-upload-rate', type='float', help='the maximum upload rate given in kB/s. 0 means infinite.') parser.add_option( '-s', '--save-path', type='string', help='the path where the downloaded file/folder should be placed.') parser.add_option( '-r', '--proxy-host', type='string', help='sets HTTP proxy host and port (separated by \':\')') parser.set_defaults( port=6881, max_download_rate=0, max_upload_rate=0, save_path='.', proxy_host='' ) (options, args) = parser.parse_args() if options.port < 0 or options.port > 65525: options.port = 6881 options.max_upload_rate *= 1000 options.max_download_rate *= 1000 if options.max_upload_rate <= 0: options.max_upload_rate = -1 if options.max_download_rate <= 0: options.max_download_rate = -1 ses = lt.session({'user_agent': 'python_client/' + lt.__version__, 'listen_interfaces':'0.0.0.0:' + str(options.port), 'download_rate_limit': int(options.max_download_rate), 'upload_rate_limit': int(options.max_upload_rate) }) if options.proxy_host != '': ps = lt.proxy_settings() ps.type = lt.proxy_type.http ps.hostname = options.proxy_host.split(':')[0] ps.port = int(options.proxy_host.split(':')[1]) ses.set_proxy(ps) handles = [] alerts = [] for f in args: atp = {} atp["save_path"] = options.save_path atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse atp["flags"] = lt.torrent_flags.duplicate_is_error \ | lt.torrent_flags.auto_managed if f.startswith('magnet:') or f.startswith( 'http://') or f.startswith('https://'): atp["url"] = f else: info = lt.torrent_info(f) print('Adding \'%s\'...' % info.name()) try: atp["resume_data"] = open(os.path.join( options.save_path, info.name() + '.fastresume'), 'rb').read() except: pass atp["ti"] = info h = ses.add_torrent(atp) handles.append(h) h.set_max_connections(60) h.set_max_uploads(-1) if os.name == 'nt': console = WindowsConsole() else: console = UnixConsole() alive = True while alive: console.clear() out = '' for h in handles: s = h.status() if s.has_metadata: name = h.torrent_file().name()[:40] else: name = '-' out += 'name: %-40s\n' % name if s.state != lt.torrent_status.seeding: state_str = ['queued', 'checking', 'downloading metadata', 'downloading', 'finished', 'seeding', 'allocating', 'checking fastresume'] out += state_str[s.state] + ' ' out += '%5.4f%% ' % (s.progress*100) out += progress_bar(s.progress, 49) out += '\n' out += 'total downloaded: %d Bytes\n' % s.total_done out += 'peers: %d seeds: %d distributed copies: %d\n' % \ (s.num_peers, s.num_seeds, s.distributed_copies) out += '\n' out += 'download: %s/s (%s) ' \ % (add_suffix(s.download_rate), add_suffix(s.total_download)) out += 'upload: %s/s (%s) ' \ % (add_suffix(s.upload_rate), add_suffix(s.total_upload)) if s.state != lt.torrent_status.seeding: out += 'info-hash: %s\n' % h.info_hash() out += 'next announce: %s\n' % s.next_announce out += 'tracker: %s\n' % s.current_tracker write_line(console, out) print_peer_info(console, h.get_peer_info()) print_download_queue(console, h.get_download_queue()) if s.state != lt.torrent_status.seeding: try: out = '\n' fp = h.file_progress() ti = h.get_torrent_info() for f, p in zip(ti.files(), fp): out += progress_bar(p / float(f.size), 20) out += ' ' + f.path + '\n' write_line(console, out) except: pass write_line(console, 76 * '-' + '\n') write_line(console, '(q)uit), (p)ause), (u)npause), (r)eannounce\n') write_line(console, 76 * '-' + '\n') # only print the last 20 alerts alerts = ses.pop_alerts()[-20:] for a in alerts: if type(a) == str: write_line(console, a + '\n') else: write_line(console, a.message() + '\n') c = console.sleep_and_input(0.5) if not c: continue if c == 'r': for h in handles: h.force_reannounce() elif c == 'q': alive = False elif c == 'p': for h in handles: h.pause() elif c == 'u': for h in handles: h.resume() ses.pause() for h in handles: if not h.is_valid() or not s.has_metadata: continue data = lt.bencode(h.write_resume_data()) open(os.path.join(options.save_path, h.get_torrent_info().name() + '.fastresume'), 'wb').write(data)
def magnet_to_torrent(self, magnet_uri, destination_folder, timeout, num_try, use_dht, http_proxy): import libtorrent as lt # parameters params = lt.parse_magnet_uri(magnet_uri) # prevent downloading # https://stackoverflow.com/q/45680113 if isinstance(params, dict): params['flags'] |= lt.add_torrent_params_flags_t.flag_upload_mode else: params.flags |= lt.add_torrent_params_flags_t.flag_upload_mode lt_version = [int(v) for v in lt.version.split('.')] if [0, 16, 13, 0] < lt_version < [1, 1, 3, 0]: # for some reason the info_hash needs to be bytes but it's a struct called sha1_hash if isinstance(params, dict): params['info_hash'] = params['info_hash'].to_bytes() else: params.info_hash = params.info_hash.to_bytes() # add_trackers - currently always append try: if isinstance(params, dict): params['trackers'] += self.trackers else: params.trackers += self.trackers except Exception as e: logger.debug('Failed to add trackers: {}', str(e)) # session from setting pack settings = { # basics # 'user_agent': 'libtorrent/' + lt.__version__, 'listen_interfaces': '0.0.0.0:6881', # dht 'enable_dht': use_dht, 'use_dht_as_fallback': True, 'dht_bootstrap_nodes': 'router.bittorrent.com:6881,dht.transmissionbt.com:6881,router.utorrent.com:6881,127.0.0.1:6881', 'enable_lsd': False, 'enable_upnp': True, 'enable_natpmp': True, 'announce_to_all_tiers': True, 'announce_to_all_trackers': True, 'aio_threads': 4*2, 'checking_mem_usage': 1024*2, } if http_proxy: # TODO: TEST http_proxy proxy_url = urlparse(http_proxy) logger.debug(proxy_url) settings.update({ 'proxy_username': proxy_url.username, 'proxy_password': proxy_url.password, 'proxy_hostname': proxy_url.hostname, 'proxy_port': proxy_url.port, 'proxy_type': lt.proxy_type_t.http_pw if proxy_url.username and proxy_url.password else lt.proxy_type_t.http, 'force_proxy': True, 'anonymous_mode': True, }) session = lt.session(settings) # session.add_extension('ut_metadata') # session.add_extension('ut_pex') # session.add_extension('metadata_transfer') # handle handle = session.add_torrent(params) if use_dht: handle.force_dht_announce() logger.debug('Acquiring torrent metadata for magnet {}', magnet_uri) max_try = max(num_try, 1) for tryid in range(max_try): timeout_value = timeout logger.debug('Trying to get metadata ... {}/{}'.format(tryid+1, max_try)) while not handle.has_metadata(): time.sleep(0.1) timeout_value -= 0.1 if timeout_value <= 0: break if handle.has_metadata(): lt_info = handle.get_torrent_info() logger.debug('Metadata acquired after {}*{}+{:.1f} seconds', tryid, timeout, timeout - timeout_value) break else: if tryid+1 == max_try: session.remove_torrent(handle, True) raise plugin.PluginError( 'Timed out after {}*{} seconds'.format(max_try, timeout) ) # create torrent object torrent = lt.create_torrent(lt_info) torrent.set_creator('libtorrent v{}'.format(lt.version)) # signature torrent_dict = torrent.generate() torrent_info = self.convert_torrent_info(lt_info) torrent_info.update({ 'trackers': params['trackers'] if isinstance(params, dict) else params.trackers, 'creation_date': datetime.fromtimestamp(torrent_dict[b'creation date']).isoformat(), }) # start scraping timeout_value = timeout logger.debug('Trying to get peerinfo ... ') while handle.status(0).num_complete < 0: time.sleep(0.1) timeout_value -= 0.1 if timeout_value <= 0: break if handle.status(0).num_complete >= 0: torrent_status = handle.status(0) logger.debug('Peerinfo acquired after {:.1f} seconds', timeout - timeout_value) torrent_info.update({ 'seeders': torrent_status.num_complete, 'peers': torrent_status.num_incomplete, }) else: raise plugin.PluginError('Timed out after {} seconds'.format(timeout)) session.remove_torrent(handle, True) torrent_path = pathscrub( os.path.join(destination_folder, lt_info.name() + ".torrent") ) with open(torrent_path, "wb") as f: f.write(lt.bencode(torrent_dict)) logger.debug('Torrent file wrote to {}', torrent_path) return torrent_path, torrent_info
def main(): from optparse import OptionParser parser = OptionParser() parser.add_option('-p', '--port', type='int', help='set listening port') parser.add_option( '-r', '--ratio', type='float', help= 'set the preferred upload/download ratio. 0 means infinite. Values smaller than 1 are clamped to 1' ) parser.add_option( '-d', '--max-download-rate', type='float', help='the maximum download rate given in kB/s. 0 means infinite.') parser.add_option( '-u', '--max-upload-rate', type='float', help='the maximum upload rate given in kB/s. 0 means infinite.') parser.add_option( '-s', '--save-path', type='string', help='the path where the downloaded file/folder should be placed.') parser.add_option( '-a', '--allocation-mode', type='string', help= 'sets mode used for allocating the downloaded files on disk. Possible options are [full | compact]' ) parser.set_defaults(port=6881, ratio=0, max_download_rate=0, max_upload_rate=0, save_path='./', allocation_mode='compact') (options, args) = parser.parse_args() if options.port < 0 or options.port > 65525: options.port = 6881 options.max_upload_rate *= 1000 options.max_download_rate *= 1000 if options.max_upload_rate <= 0: options.max_upload_rate = -1 if options.max_download_rate <= 0: options.max_download_rate = -1 compact_allocation = options.allocation_mode == 'compact' settings = lt.session_settings() settings.user_agent = 'python_client/' + lt.version ses = lt.session() ses.set_download_rate_limit(int(options.max_download_rate)) ses.set_upload_rate_limit(int(options.max_upload_rate)) ses.listen_on(options.port, options.port + 10) ses.set_settings(settings) # ses.set_severity_level(lt.alert.severity_levels.info) ses.set_alert_mask(0xfffffff) # ses.add_extension(lt.create_ut_pex_plugin) # ses.add_extension(lt.create_ut_metadata_plugin) # ses.add_extension(lt.create_metadata_plugin) handles = [] alerts = [] # Extensions # ses.add_extension(lambda x: PythonExtension(alerts)) for f in args: e = lt.bdecode(open(f, 'rb').read()) info = lt.torrent_info(e) print 'Adding \'%s\'...' % info.name() atp = {} try: atp["resume_data"] = open( os.path.join(options.save_path, info.name() + '.fastresume'), 'rb').read() except: pass atp["ti"] = info atp["save_path"] = options.save_path atp["storage_mode"] = lt.storage_mode_t.storage_mode_sparse atp["paused"] = False atp["auto_managed"] = True atp["duplicate_is_error"] = True h = ses.add_torrent(atp) handles.append(h) h.set_max_connections(60) h.set_max_uploads(-1) h.set_ratio(options.ratio) if os.name == 'nt': console = WindowsConsole() else: console = UnixConsole() alive = True while alive: console.clear() out = '' for h in handles: if h.has_metadata(): name = h.get_torrent_info().name()[:40] else: name = '-' out += 'name: %-40s\n' % name s = h.status() if s.state != lt.torrent_status.seeding: state_str = ['queued', 'checking', 'downloading metadata', \ 'downloading', 'finished', 'seeding', \ 'allocating', 'checking fastresume'] out += state_str[s.state] + ' ' out += '%5.4f%% ' % (s.progress * 100) out += progress_bar(s.progress, 49) out += '\n' out += 'total downloaded: %d Bytes\n' % s.total_done out += 'peers: %d seeds: %d distributed copies: %d\n' % \ (s.num_peers, s.num_seeds, s.distributed_copies) out += '\n' out += 'download: %s/s (%s) ' \ % (add_suffix(s.download_rate), add_suffix(s.total_download)) out += 'upload: %s/s (%s) ' \ % (add_suffix(s.upload_rate), add_suffix(s.total_upload)) out += 'ratio: %s\n' % '0' if s.state != lt.torrent_status.seeding: out += 'info-hash: %s\n' % h.info_hash() out += 'next announce: %s\n' % s.next_announce out += 'tracker: %s\n' % s.current_tracker write_line(console, out) print_peer_info(console, h.get_peer_info()) print_download_queue(console, h.get_download_queue()) if True and s.state != lt.torrent_status.seeding: out = '\n' fp = h.file_progress() ti = h.get_torrent_info() for f, p in zip(ti.files(), fp): out += progress_bar(p / f.size, 20) out += ' ' + f.path + '\n' write_line(console, out) write_line(console, 76 * '-' + '\n') write_line(console, '(q)uit), (p)ause), (u)npause), (r)eannounce\n') write_line(console, 76 * '-' + '\n') while 1: a = ses.pop_alert() if not a: break alerts.append(a) if len(alerts) > 8: del alerts[:len(alerts) - 8] for a in alerts: if type(a) == str: write_line(console, a + '\n') else: write_line(console, a.message() + '\n') c = console.sleep_and_input(0.5) if not c: continue if c == 'r': for h in handles: h.force_reannounce() elif c == 'q': alive = False elif c == 'p': for h in handles: h.pause() elif c == 'u': for h in handles: h.resume() ses.pause() for h in handles: if not h.is_valid() or not h.has_metadata(): continue data = lt.bencode(h.write_resume_data()) open( os.path.join(options.save_path, h.get_torrent_info().name() + '.fastresume'), 'wb').write(data)
def write_fastresume(handle, save_dir, filename): if handle.is_valid() and handle.has_metadata(): data = lt.bencode(handle.write_resume_data()) with open(os.path.join(save_dir, filename + ".fastresume") , 'wb+') as f: f.write(data)
def test_string(self): encoded = lt.bencode('foo\u00e5\u00e4\u00f6') self.assertEqual(encoded, b'9:foo\xc3\xa5\xc3\xa4\xc3\xb6')
def test_on_metadata_received_alert(self): """ Testing whether the right operations happen when we receive metadata """ test_deferred = Deferred() def mocked_checkpoint(): test_deferred.callback(None) mocked_file = MockObject() mocked_file.path = 'test' self.libtorrent_download_impl.handle.trackers = lambda: [] self.libtorrent_download_impl.handle.save_resume_data = lambda: None torrent_dict = {'name': 'test', 'piece length': 42, 'pieces': '', 'files': []} get_info_from_handle(self.libtorrent_download_impl.handle).metadata = lambda: lt.bencode(torrent_dict) get_info_from_handle(self.libtorrent_download_impl.handle).files = lambda: [mocked_file] self.libtorrent_download_impl.checkpoint = mocked_checkpoint self.libtorrent_download_impl.session = MockObject() self.libtorrent_download_impl.session.lm = MockObject() self.libtorrent_download_impl.session.lm.rtorrent_handler = None self.libtorrent_download_impl.session.lm.torrent_db = None self.libtorrent_download_impl.handle.save_path = lambda: None self.libtorrent_download_impl.handle.prioritize_files = lambda _: None self.libtorrent_download_impl.get_save_path = lambda: '' self.libtorrent_download_impl.get_share_mode = lambda: False self.libtorrent_download_impl.on_metadata_received_alert(None) return test_deferred
def magnet_to_torrent(self, magnet_uri, destination_folder, timeout, num_try): import libtorrent # parameters params = libtorrent.parse_magnet_uri(magnet_uri) # prevent downloading # https://stackoverflow.com/q/45680113 params.flags |= libtorrent.add_torrent_params_flags_t.flag_upload_mode lt_version = [int(v) for v in libtorrent.version.split('.')] if [0, 16, 13, 0] < lt_version < [1, 1, 3, 0]: # for some reason the info_hash needs to be bytes but it's a struct called sha1_hash params.info_hash = params.info_hash.to_bytes() # add_trackers if len(params.trackers) == 0: try: import random params.trackers = random.sample(self.trackers, 5) except Exception as e: logger.debug('Failed to add trackers: {}', str(e)) # session session = libtorrent.session() session.listen_on(6881, 6891) session.add_extension('ut_metadata') session.add_extension('ut_pex') session.add_extension('metadata_transfer') session.add_dht_router('router.utorrent.com', 6881) session.add_dht_router('router.bittorrent.com', 6881) session.add_dht_router("dht.transmissionbt.com", 6881) session.add_dht_router('127.0.0.1', 6881) session.start_dht() # handle handle = session.add_torrent(params) handle.force_dht_announce() logger.debug('Acquiring torrent metadata for magnet {}', magnet_uri) for tryid in range(max(num_try, 1)): timeout_value = timeout while not handle.has_metadata(): time.sleep(0.1) timeout_value -= 0.1 if timeout_value <= 0: logger.debug( 'Failed to get metadata on trial: {}/{}'.format( tryid + 1, num_try)) break if handle.has_metadata(): logger.debug( 'Metadata acquired after {} seconds on trial {}'.format( timeout - timeout_value, tryid + 1)) break else: if tryid + 1 == max(num_try, 1): session.remove_torrent(handle, True) raise plugin.PluginError( 'Timed out after {}x{} seconds trying to magnetize'. format(timeout, num_try)) torrent_info = handle.get_torrent_info() torrent_file = libtorrent.create_torrent(torrent_info) torrent_path = pathscrub( os.path.join(destination_folder, torrent_info.name() + ".torrent")) with open(torrent_path, "wb") as f: f.write(libtorrent.bencode(torrent_file.generate())) logger.debug('Torrent file wrote to {}', torrent_path) return torrent_path