def test_read_resume_data(self): resume_data = lt.bencode({ 'file-format': 'libtorrent resume file', 'info-hash': 'abababababababababab', 'name': 'test', 'save_path': '.', 'peers': '\x01\x01\x01\x01\x00\x01\x02\x02\x02\x02\x00\x02', 'file_priority': [0, 1, 1] }) tp = lt.read_resume_data(resume_data) self.assertEqual(tp.name, 'test') self.assertEqual(tp.info_hash, lt.sha1_hash('abababababababababab')) self.assertEqual(tp.file_priorities, [0, 1, 1]) self.assertEqual(tp.peers, [('1.1.1.1', 1), ('2.2.2.2', 2)]) ses = lt.session({'alert_mask': lt.alert.category_t.all_categories}) h = ses.add_torrent(tp) h.connect_peer(('3.3.3.3', 3)) for i in range(0, 10): alerts = ses.pop_alerts() for a in alerts: print(a.message()) time.sleep(0.1)
def test_read_resume_data(self): resume_data = lt.bencode( { "file-format": "libtorrent resume file", "info-hash": "abababababababababab", "name": "test", "save_path": ".", "peers": "\x01\x01\x01\x01\x00\x01\x02\x02\x02\x02\x00\x02", "file_priority": [0, 1, 1], } ) tp = lt.read_resume_data(resume_data) self.assertEqual(tp.name, "test") self.assertEqual(tp.info_hash, lt.sha1_hash("abababababababababab")) self.assertEqual(tp.file_priorities, [0, 1, 1]) self.assertEqual(tp.peers, [("1.1.1.1", 1), ("2.2.2.2", 2)]) ses = lt.session({"alert_mask": lt.alert.category_t.all_categories}) h = ses.add_torrent(tp) h.connect_peer(("3.3.3.3", 3)) for i in range(0, 10): alerts = ses.pop_alerts() for a in alerts: print(a.message()) time.sleep(0.1)
def test_read_resume_data(self): resume_data = lt.bencode({ 'file-format': 'libtorrent resume file', 'info-hash': 'abababababababababab', 'name': 'test', 'save_path': '.', 'peers': '\x01\x01\x01\x01\x00\x01\x02\x02\x02\x02\x00\x02', 'file_priority': [0, 1, 1] }) tp = lt.read_resume_data(resume_data) self.assertEqual(tp.name, 'test') self.assertEqual(tp.info_hashes.v1, lt.sha1_hash('abababababababababab')) self.assertEqual(tp.file_priorities, [0, 1, 1]) self.assertEqual(tp.peers, [('1.1.1.1', 1), ('2.2.2.2', 2)]) ses = lt.session(settings) h = ses.add_torrent(tp) for attr in dir(tp): print('%s: %s' % (attr, getattr(tp, attr))) h.connect_peer(('3.3.3.3', 3)) for i in range(0, 10): alerts = ses.pop_alerts() for a in alerts: print(a.message()) time.sleep(0.1)
def test_read_resume_data(self): resume_data = lt.bencode({'file-format': 'libtorrent resume file', 'info-hash': 'abababababababababab', 'name': 'test', 'save_path': '.', 'peers': '\x01\x01\x01\x01\x00\x01\x02\x02\x02\x02\x00\x02', 'file_priority': [0, 1, 1]}) tp = lt.read_resume_data(resume_data) self.assertEqual(tp.name, 'test') self.assertEqual(tp.info_hash, lt.sha1_hash('abababababababababab')) self.assertEqual(tp.file_priorities, [0, 1, 1]) self.assertEqual(tp.peers, [('1.1.1.1', 1), ('2.2.2.2', 2)]) ses = lt.session({'alert_mask': lt.alert.category_t.all_categories}) h = ses.add_torrent(tp) h.connect_peer(('3.3.3.3', 3)) for i in range(0, 10): alerts = ses.pop_alerts() for a in alerts: print(a.message()) time.sleep(0.1)
def iter_resume_data_from_db(conn: apsw.Connection) -> Iterator[lt.add_torrent_params]: version = get_version(conn) if version == 0: return dbver.semver_check_breaking(LATEST, version) cur = conn.cursor().execute( "SELECT info_sha1, info_sha256, resume_data, info FROM torrent" ) for row in cur: info_sha1, info_sha256, resume_data, info = cast( tuple[Optional[bytes], Optional[bytes], bytes, Optional[bytes]], row ) # NB: certain fields (creation date, creator, comment) live in the torrent_info # object at runtime, but are serialized with the resume data. If the b"info" # field is empty, the torrent_info won't be created, and these fields will be # dropped. We want to deserialize the resume data all at once, rather than # deserialize the torrent_info separately. info_dict: Optional[Any] = None if info is not None: try: with ltpy.translate_exceptions(): info_dict = lt.bdecode(info) except ltpy.Error: _LOG.exception( "%s parsing info dict", _log_ih_bytes(info_sha1, info_sha256) ) try: with ltpy.translate_exceptions(): bdecoded = lt.bdecode(resume_data) if not isinstance(bdecoded, dict): _LOG.error( "%s resume data not a dict", _log_ih_bytes(info_sha1, info_sha256), ) continue if bdecoded.get(b"info") is None and info_dict is not None: bdecoded[b"info"] = info_dict yield lt.read_resume_data(lt.bencode(bdecoded)) except ltpy.Error: _LOG.exception( "%s parsing resume data", _log_ih_bytes(info_sha1, info_sha256) )
def add_torrent(ses, filename, options): atp = lt.add_torrent_params() if filename.startswith('magnet:'): atp = lt.parse_magnet_uri(filename) else: ti = lt.torrent_info(filename) resume_file = os.path.join(options.save_path, ti.name() + '.fastresume') try: atp = lt.read_resume_data(open(resume_file, 'rb').read()) except Exception as e: print('failed to open resume file "%s": %s' % (resume_file, e)) atp.ti = ti atp.save_path = options.save_path atp.storage_mode = lt.storage_mode_t.storage_mode_sparse atp.flags |= lt.torrent_flags.duplicate_is_error \ | lt.torrent_flags.auto_managed \ | lt.torrent_flags.duplicate_is_error ses.async_add_torrent(atp)
async def load_resume_data(self): log.info(f'Loading resume data from {self.state_dir}') for f in self.state_dir.iterdir(): if f.suffix == '.resume': log.info(f'Found {f.name}, attempting add..') b = f.read_bytes() atp = lt.read_resume_data(b) try: await asyncio.get_event_loop().run_in_executor( None, functools.partial(self.session.add_torrent), atp) except RuntimeError as e: log.error(f'Error loading resume data {f}: {e}') d = lt.bdecode(b) info_hash = binascii.hexlify(d[b'info-hash']).decode() self.torrent_data[info_hash] = {} for key, value in d.items(): if key.startswith(b'spritzle.'): self.torrent_data[info_hash][key.decode()] = value
def open_tor(path, u, w): try: with open(path + ".fastresume") as f: b = f.read() c = eval(b) d = lt.bencode(c) td = lt.read_resume_data(d) except Exception: info = lt.torrent_info(path) pv = k.gtk_entry_buffer_get_text(sets.fold_bf) td = {'ti': info, 'save_path': pv} th = ses.add_torrent(td) #got no Name right after opening with libtor for x in torrents: if x.h == th: return False t = tor(th, u) torrents.insert(0, t) ratio.gain(w) log.addT(path) return True
def add_torrent(ses: lt.session, filename: str, options: optparse.Values) -> None: atp = lt.add_torrent_params() if filename.startswith("magnet:"): atp = lt.parse_magnet_uri(filename) else: ti = lt.torrent_info(filename) resume_file = os.path.join(options.save_path, ti.name() + ".fastresume") try: atp = lt.read_resume_data(open(resume_file, "rb").read()) except Exception as e: print('failed to open resume file "%s": %s' % (resume_file, e)) atp.ti = ti atp.save_path = options.save_path atp.storage_mode = lt.storage_mode_t.storage_mode_sparse atp.flags |= (lt.torrent_flags.duplicate_is_error | lt.torrent_flags.auto_managed | lt.torrent_flags.duplicate_is_error) ses.async_add_torrent(atp)
def copy(atp: lt.add_torrent_params) -> lt.add_torrent_params: # TODO: use copy constructor when available with ltpy.translate_exceptions(): return lt.read_resume_data(lt.write_resume_data_buf(atp))