def __init__(self, metainfo=None, torrent_parameters=None, ignore_validation=False): """ Create a new TorrentDef object, possibly based on existing data. :param metainfo: A dictionary with metainfo, i.e. from a .torrent file. :param torrent_parameters: User-defined parameters for the new TorrentDef. :param ignore_validation: Whether we ignore the libtorrent validation. """ self._logger = logging.getLogger(self.__class__.__name__) self.torrent_parameters = {} self.metainfo = None self.files_list = [] self.infohash = None if metainfo is not None: # First, make sure the passed metainfo is valid if not ignore_validation: try: lt.torrent_info(metainfo) except RuntimeError as exc: raise ValueError(str(exc)) self.metainfo = metainfo self.infohash = sha1(lt.bencode(self.metainfo[b'info'])).digest() self.copy_metainfo_to_torrent_parameters() elif torrent_parameters: self.torrent_parameters.update(torrent_parameters)
def setup(self, config=None, hidden=False, checkpoint_disabled=False): """ Create a Download object. Used internally by Session. @param config DownloadConfig or None (in which case a new DownloadConfig() is created :returns a Deferred to which a callback can be added which returns the result of network_create_engine_wrapper. """ self.hidden = hidden self.checkpoint_disabled = checkpoint_disabled or self.dummy self.config = config or DownloadConfig( state_dir=self.session.config.get_state_dir()) self._logger.debug("Setup: %s", hexlify(self.tdef.get_infohash())) self.checkpoint() atp = { "save_path": path_util.normpath(get_default_dest_dir() / self.config.get_dest_dir()), "storage_mode": lt.storage_mode_t.storage_mode_sparse, "flags": lt.add_torrent_params_flags_t.flag_paused | lt.add_torrent_params_flags_t.flag_duplicate_is_error | lt.add_torrent_params_flags_t.flag_update_subscribe } if self.config.get_share_mode(): atp["flags"] = atp[ "flags"] | lt.add_torrent_params_flags_t.flag_share_mode if self.config.get_upload_mode(): atp["flags"] = atp[ "flags"] | lt.add_torrent_params_flags_t.flag_upload_mode resume_data = self.config.get_engineresumedata() if not isinstance(self.tdef, TorrentDefNoMetainfo): metainfo = self.tdef.get_metainfo() torrentinfo = lt.torrent_info(metainfo) atp["ti"] = torrentinfo if resume_data and isinstance(resume_data, dict): # Rewrite save_path as a global path, if it is given as a relative path if b"save_path" in resume_data and not path_util.isabs( ensure_unicode(resume_data[b"save_path"], 'utf8')): resume_data[ b"save_path"] = self.state_dir / ensure_unicode( resume_data[b"save_path"], 'utf8') atp["resume_data"] = lt.bencode(resume_data) else: atp["url"] = self.tdef.get_url( ) or "magnet:?xt=urn:btih:" + hexlify(self.tdef.get_infohash()) atp["name"] = self.tdef.get_name_as_unicode() return atp
def create_torrent_from_dir(directory, torrent_filename): fs = lt.file_storage() lt.add_files(fs, str(directory)) t = lt.create_torrent(fs) # t = create_torrent(fs, flags=17) # piece alignment t.set_priv(False) lt.set_piece_hashes(t, str(directory.parent)) torrent = t.generate() with open(torrent_filename, 'wb') as f: f.write(lt.bencode(torrent)) infohash = lt.torrent_info(torrent).info_hash().to_bytes() return torrent, infohash
async def shutdown(self, timeout=30): if self.downloads: self.tribler_session.notify_shutdown_state( "Checkpointing Downloads...") await gather( *[download.stop() for download in self.downloads.values()], return_exceptions=True) self.tribler_session.notify_shutdown_state( "Shutting down Downloads...") await gather( *[download.shutdown() for download in self.downloads.values()], return_exceptions=True) self.tribler_session.notify_shutdown_state( "Shutting down Libtorrent Manager...") # If libtorrent session has pending disk io, wait until timeout (default: 30 seconds) to let it finish. # In between ask for session stats to check if state is clean for shutdown. while not self.is_shutdown_ready() and timeout >= 1: self.tribler_session.notify_shutdown_state( "Waiting for Libtorrent to finish...") self.post_session_stats() timeout -= 1 await asyncio.sleep(1) await self.shutdown_task_manager() if self.dht_health_manager: await self.dht_health_manager.shutdown_task_manager() # Save libtorrent state if self.has_session(): with open( self.tribler_session.config.get_state_dir() / LTSTATE_FILENAME, 'wb') as ltstate_file: ltstate_file.write(lt.bencode(self.get_session().save_state())) if self.has_session(): self.get_session().stop_upnp() for ltsession in self.ltsessions.values(): del ltsession self.ltsessions = None # Remove metadata temporary directory if self.metadata_tmpdir: rmtree(self.metadata_tmpdir) self.metadata_tmpdir = None self.tribler_session = None
async def get_torrent(self, request): infohash = unhexlify(request.match_info['infohash']) download = self.session.dlmgr.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) torrent = download.get_torrent_data() if not torrent: return DownloadsEndpoint.return_404(request) return RESTResponse(lt.bencode(torrent), headers={ 'content-type': 'application/x-bittorrent', 'Content-Disposition': 'attachment; filename=%s.torrent' % hexlify(infohash).encode('utf-8') })
def _fix_state_config(config): for section, option in [('state', 'metainfo'), ('state', 'engineresumedata')]: value = config.get(section, option, literal_eval=False) if not value or not refactoring_tool: continue try: value = str( refactoring_tool.refactor_string(value + '\n', option + '_2to3')) ungarbled_dict = recursive_ungarble_metainfo( ast.literal_eval(value)) value = ungarbled_dict or ast.literal_eval(value) config.set(section, option, base64.b64encode(lt.bencode(value)).decode('utf-8')) except (ValueError, SyntaxError, ParseError) as ex: logger.error( "Config could not be fixed, probably corrupted. Exception: %s %s", type(ex), str(ex)) return None return config
def set_engineresumedata(self, engineresumedata): self.config['state']['engineresumedata'] = base64.b64encode( lt.bencode(engineresumedata)).decode('utf-8')
def set_metainfo(self, metainfo): self.config['state']['metainfo'] = base64.b64encode( lt.bencode(metainfo)).decode('utf-8')
async def get_torrent_info(self, request): args = request.query hops = None if 'hops' in args: try: hops = int(args['hops']) except ValueError: return RESTResponse({"error": f"wrong value of 'hops' parameter: {repr(args['hops'])}"}, status=HTTP_BAD_REQUEST) if 'uri' not in args or not args['uri']: return RESTResponse({"error": "uri parameter missing"}, status=HTTP_BAD_REQUEST) uri = args['uri'] if uri.startswith('file:'): try: filename = url2pathname(uri[5:]) tdef = TorrentDef.load(filename) metainfo = tdef.get_metainfo() except (TypeError, RuntimeError): return RESTResponse({"error": "error while decoding torrent file"}, status=HTTP_INTERNAL_SERVER_ERROR) elif uri.startswith('http'): try: async with ClientSession(raise_for_status=True) as session: response = await session.get(uri) response = await response.read() except (ServerConnectionError, ClientResponseError) as e: return RESTResponse({"error": str(e)}, status=HTTP_INTERNAL_SERVER_ERROR) if response.startswith(b'magnet'): _, infohash, _ = parse_magnetlink(response) if infohash: metainfo = await self.session.dlmgr.get_metainfo(infohash, timeout=60, hops=hops, url=response) else: metainfo = bdecode_compat(response) elif uri.startswith('magnet'): infohash = parse_magnetlink(uri)[1] if infohash is None: return RESTResponse({"error": "missing infohash"}, status=HTTP_BAD_REQUEST) metainfo = await self.session.dlmgr.get_metainfo(infohash, timeout=60, hops=hops, url=uri) else: return RESTResponse({"error": "invalid uri"}, status=HTTP_BAD_REQUEST) if not metainfo: return RESTResponse({"error": "metainfo error"}, status=HTTP_INTERNAL_SERVER_ERROR) if not isinstance(metainfo, dict) or b'info' not in metainfo: self._logger.warning("Received metainfo is not a valid dictionary") return RESTResponse({"error": "invalid response"}, status=HTTP_INTERNAL_SERVER_ERROR) # Add the torrent to GigaChannel as a free-for-all entry, so others can search it self.session.mds.TorrentMetadata.add_ffa_from_dict( tdef_to_metadata_dict(TorrentDef.load_from_dict(metainfo))) # TODO(Martijn): store the stuff in a database!!! # TODO(Vadim): this means cache the downloaded torrent in a binary storage, like LevelDB infohash = hashlib.sha1(lt.bencode(metainfo[b'info'])).digest() download = self.session.dlmgr.downloads.get(infohash) metainfo_request = self.session.dlmgr.metainfo_requests.get(infohash, [None])[0] download_is_metainfo_request = download == metainfo_request # Check if the torrent is already in the downloads encoded_metainfo = deepcopy(metainfo) # FIXME: json.dumps garbles binary data that is used by the 'pieces' field # However, this is fine as long as the GUI does not use this field. encoded_metainfo[b'info'][b'pieces'] = hexlify(encoded_metainfo[b'info'][b'pieces']).encode('utf-8') encoded_metainfo = hexlify(json.dumps(recursive_unicode(encoded_metainfo, ignore_errors=True), ensure_ascii=False).encode('utf-8')) return RESTResponse({"metainfo": encoded_metainfo, "download_exists": download and not download_is_metainfo_request})
def create_torrent_file(file_path_list, params, torrent_filepath=None): fs = lt.file_storage() # filter all non-files file_path_list_filtered = [] for path in file_path_list: path = path_util.Path(path) if not path.exists(): raise OSError(f'Path does not exist: {path}') elif path.is_file(): file_path_list_filtered.append(path) # get the directory where these files are in. If there are multiple files, take the common directory they are in if len(file_path_list_filtered) == 1: base_path = path_util.split(file_path_list_filtered[0])[0] else: base_path = path_util.abspath(commonprefix(file_path_list_filtered)) # the base_dir directory is the parent directory of the base_path and is passed to the set_piece_hash method if len(file_path_list_filtered) == 1: filename = path_util.basename(file_path_list_filtered[0]) fs.add_file(filename, path_util.getsize(file_path_list_filtered[0])) else: for full_file_path in file_path_list_filtered: #FIXME: there should be a better, cleaner way to define this filename = path_util.join( *full_file_path.parts[len(base_path.parent.parts):]) fs.add_file(str(filename), path_util.getsize(full_file_path)) if params.get(b'piece length'): piece_size = params[b'piece length'] else: piece_size = 0 flags = lt.create_torrent_flags_t.optimize # This flag doesn't exist anymore in libtorrent V1.1.0 if hasattr(lt.create_torrent_flags_t, 'calculate_file_hashes'): flags |= lt.create_torrent_flags_t.calculate_file_hashes params = { k: (v.decode('utf-8') if isinstance(v, bytes) else v) for k, v in params.items() } torrent = lt.create_torrent(fs, piece_size=piece_size, flags=flags) # Python2 wants binary, python3 want unicode if params.get(b'comment'): torrent.set_comment(params[b'comment']) if params.get(b'created by'): torrent.set_creator(params[b'created by']) # main tracker if params.get(b'announce'): torrent.add_tracker(params[b'announce']) # tracker list if params.get(b'announce-list'): tier = 1 for tracker in params[b'announce-list']: torrent.add_tracker(tracker, tier=tier) tier += 1 # DHT nodes # http://www.bittorrent.org/beps/bep_0005.html if params.get(b'nodes'): for node in params[b'nodes']: torrent.add_node(*node) # HTTP seeding # http://www.bittorrent.org/beps/bep_0017.html if params.get(b'httpseeds'): torrent.add_http_seed(params[b'httpseeds']) # Web seeding # http://www.bittorrent.org/beps/bep_0019.html if len(file_path_list) == 1: if params.get(b'urllist', False): torrent.add_url_seed(params[b'urllist']) # read the files and calculate the hashes if len(file_path_list) == 1: lt.set_piece_hashes(torrent, str(base_path)) else: lt.set_piece_hashes(torrent, str(base_path.parent)) t1 = torrent.generate() torrent = lt.bencode(t1) if torrent_filepath: with open(torrent_filepath, 'wb') as f: f.write(torrent) return { 'success': True, 'base_path': base_path, 'base_dir': base_path.parent, 'torrent_file_path': torrent_filepath, 'metainfo': torrent, 'infohash': sha1(lt.bencode(t1[b'info'])).digest() }