def on_save_resume_data_alert(self, alert): """ Callback for the alert that contains the resume data of a specific download. This resume data will be written to a file on disk. """ self._logger.debug(f'On save resume data alert: {alert}') if self.checkpoint_disabled: return resume_data = alert.resume_data # Make save_path relative if the torrent is saved in the Tribler state directory if self.state_dir and b'save_path' in resume_data: save_path = Path(resume_data[b'save_path'].decode('utf8')) if save_path.exists(): resume_data[b'save_path'] = str( save_path.normalize_to(self.state_dir)) metainfo = { 'infohash': self.tdef.get_infohash(), 'name': self.tdef.get_name_as_unicode(), 'url': self.tdef.get_url() } if isinstance(self.tdef, TorrentDefNoMetainfo) else self.tdef.get_metainfo() self.config.set_metainfo(metainfo) self.config.set_engineresumedata(resume_data) # Save it to file basename = hexlify(resume_data[b'info-hash']) + '.conf' filename = self.dlmgr.get_checkpoint_dir() / basename self.config.config['download_defaults'][ 'name'] = self.tdef.get_name_as_unicode( ) # store name (for debugging) self.config.write(str(filename)) self._logger.debug('Saving download config to file %s', filename)
def _existing_files(path_list): for path in path_list: path = Path(path) if not path.exists(): raise OSError(f'Path does not exist: {path}') elif path.is_file(): yield path
def load_or_create(private_key_path: Path, public_key_path: Path = None) -> LibNaCLSK: if private_key_path.exists(): return LibNaCLSK(private_key_path.read_bytes()) key = LibNaCLSK() private_key_path.write_bytes(key.key.sk + key.key.seed) if public_key_path: public_key_path.write_bytes(key.key.pk) return key
async def update_download(self, request): infohash = unhexlify(request.match_info['infohash']) download = self.download_manager.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) parameters = await request.json() vod_mode = parameters.get("vod_mode") if vod_mode is not None: if not isinstance(vod_mode, bool): return RESTResponse({"error": "vod_mode must be bool flag"}, status=HTTP_BAD_REQUEST) return await self.vod_response(download, parameters, request, vod_mode) if len(parameters) > 1 and 'anon_hops' in parameters: return RESTResponse( { "error": "anon_hops must be the only parameter in this request" }, status=HTTP_BAD_REQUEST) elif 'anon_hops' in parameters: anon_hops = int(parameters['anon_hops']) try: await self.download_manager.update_hops(download, anon_hops) except Exception as e: self._logger.exception(e) return return_handled_exception(request, e) return RESTResponse({ "modified": True, "infohash": hexlify(download.get_def().get_infohash()) }) if 'selected_files' in parameters: selected_files_list = parameters['selected_files'] num_files = len(download.tdef.get_files()) if not all( [0 <= index < num_files for index in selected_files_list]): return RESTResponse({"error": "index out of range"}, status=HTTP_BAD_REQUEST) download.set_selected_files(selected_files_list) if parameters.get('state'): state = parameters['state'] if state == "resume": download.resume() elif state == "stop": await download.stop(user_stopped=True) elif state == "recheck": download.force_recheck() elif state == "move_storage": dest_dir = Path(parameters['dest_dir']) if not dest_dir.exists(): return RESTResponse({ "error": f"Target directory ({dest_dir}) does not exist" }) download.move_storage(dest_dir) download.checkpoint() else: return RESTResponse({"error": "unknown state parameter"}, status=HTTP_BAD_REQUEST) return RESTResponse({ "modified": True, "infohash": hexlify(download.get_def().get_infohash()) })
async def update_download(self, request): infohash = unhexlify(request.match_info['infohash']) download = self.session.dlmgr.get_download(infohash) if not download: return DownloadsEndpoint.return_404(request) parameters = await request.json() vod_mode = parameters.get("vod_mode") if vod_mode is not None: if not isinstance(vod_mode, bool): return RESTResponse({"error": "vod_mode must be bool flag"}, status=HTTP_BAD_REQUEST) file_index = 0 modified = False if vod_mode: file_index = parameters.get("fileindex") if file_index is None: return RESTResponse( {"error": "fileindex is necessary to enable vod_mode"}, status=HTTP_BAD_REQUEST) if not download.stream.enabled or download.stream.fileindex != file_index: await wait_for( download.stream.enable(file_index, request.http_range.start or 0), 10) await download.stream.updateprios() modified = True elif not vod_mode and download.stream.enabled: download.stream.disable() modified = True return RESTResponse({ "vod_prebuffering_progress": download.stream.prebuffprogress, "vod_prebuffering_progress_consec": download.stream.prebuffprogress_consec, "vod_header_progress": download.stream.headerprogress, "vod_footer_progress": download.stream.footerprogress, "vod_mode": download.stream.enabled, "infohash": hexlify(download.get_def().get_infohash()), "modified": modified, }) if len(parameters) > 1 and 'anon_hops' in parameters: return RESTResponse( { "error": "anon_hops must be the only parameter in this request" }, status=HTTP_BAD_REQUEST) elif 'anon_hops' in parameters: anon_hops = int(parameters['anon_hops']) try: await self.session.dlmgr.update_hops(download, anon_hops) except Exception as e: self._logger.exception(e) return return_handled_exception(request, e) return RESTResponse({ "modified": True, "infohash": hexlify(download.get_def().get_infohash()) }) if 'selected_files' in parameters: selected_files_list = parameters['selected_files'] num_files = len(download.tdef.get_files()) if not all( [0 <= index < num_files for index in selected_files_list]): return RESTResponse({"error": "index out of range"}, status=HTTP_BAD_REQUEST) download.set_selected_files(selected_files_list) if parameters.get('state'): state = parameters['state'] if state == "resume": download.resume() elif state == "stop": await download.stop(user_stopped=True) elif state == "recheck": download.force_recheck() elif state == "move_storage": dest_dir = Path(parameters['dest_dir']) if not dest_dir.exists(): return RESTResponse({ "error": "Target directory (%s) does not exist" % dest_dir }) download.move_storage(dest_dir) download.checkpoint() else: return RESTResponse({"error": "unknown state parameter"}, status=HTTP_BAD_REQUEST) return RESTResponse({ "modified": True, "infohash": hexlify(download.get_def().get_infohash()) })
def update_channel_torrent(self, metadata_list): """ Channel torrents are append-only to support seeding the old versions from the same dir and avoid updating already downloaded blobs. :param metadata_list: The list of metadata entries to add to the torrent dir. ACHTUNG: TODELETE entries _MUST_ be sorted to the end of the list to prevent channel corruption! :return The newly create channel torrent infohash, final timestamp for the channel and torrent date """ # As a workaround for delete entries not having a timestamp in the DB, delete entries should # be placed after create/modify entries: # | create/modify entries | delete entries | <- final timestamp # Create dir for the metadata files channel_dir = Path(self._channels_dir / self.dirname).absolute() if not channel_dir.is_dir(): os.makedirs(Path.fix_win_long_file(channel_dir)) existing_contents = sorted(channel_dir.iterdir()) last_existing_blob_number = get_mdblob_sequence_number(existing_contents[-1]) if existing_contents else None index = 0 while index < len(metadata_list): # Squash several serialized and signed metadata entries into a single file data, index = entries_to_chunk(metadata_list, self._CHUNK_SIZE_LIMIT, start_index=index) # Blobs ending with TODELETE entries increase the final timestamp as a workaround for delete commands # possessing no timestamp. if metadata_list[index - 1].status == TODELETE: blob_timestamp = clock.tick() else: blob_timestamp = metadata_list[index - 1].timestamp # The final file in the sequence should get a timestamp that is higher than the timestamp of # the last channel contents entry. This final timestamp then should be returned to the calling function # to be assigned to the corresponding channel entry. # Otherwise, the local channel version will never become equal to its timestamp. if index >= len(metadata_list): blob_timestamp = clock.tick() # Check that the mdblob we're going to create has a greater timestamp than the existing ones assert last_existing_blob_number is None or (blob_timestamp > last_existing_blob_number) blob_filename = Path(channel_dir, str(blob_timestamp).zfill(12) + BLOB_EXTENSION + '.lz4') assert not blob_filename.exists() # Never ever write over existing files. blob_filename.write_bytes(data) last_existing_blob_number = blob_timestamp with db_session: thumb_exists = db.ChannelThumbnail.exists( lambda g: g.public_key == self.public_key and g.origin_id == self.id_ and g.status != TODELETE ) descr_exists = db.ChannelDescription.exists( lambda g: g.public_key == self.public_key and g.origin_id == self.id_ and g.status != TODELETE ) flags = CHANNEL_THUMBNAIL_FLAG * (int(thumb_exists)) + CHANNEL_DESCRIPTION_FLAG * (int(descr_exists)) # Note: the timestamp can end up messed in case of an error # Make torrent out of dir with metadata files torrent, infohash = create_torrent_from_dir(channel_dir, self._channels_dir / (self.dirname + ".torrent")) torrent_date = datetime.utcfromtimestamp(torrent[b'creation date']) return { "infohash": infohash, "timestamp": last_existing_blob_number, "torrent_date": torrent_date, "reserved_flags": flags, }, torrent
async def create_torrent(self, request): parameters = await request.json() params = {} if 'files' in parameters and parameters['files']: file_path_list = [ ensure_unicode(f, 'utf-8') for f in parameters['files'] ] else: return RESTResponse({"error": "files parameter missing"}, status=HTTP_BAD_REQUEST) if 'description' in parameters and parameters['description']: params['comment'] = parameters['description'] if 'trackers' in parameters and parameters['trackers']: tracker_url_list = parameters['trackers'] params['announce'] = tracker_url_list[0] params['announce-list'] = tracker_url_list name = 'unknown' if 'name' in parameters and parameters['name']: name = parameters['name'] params['name'] = name export_dir = None if 'export_dir' in parameters and parameters['export_dir']: export_dir = Path(parameters['export_dir']) from tribler_core.version import version_id params['created by'] = '%s version: %s' % ('Tribler', version_id) params['nodes'] = False params['httpseeds'] = False params['encoding'] = False params['piece length'] = 0 # auto try: result = await self.session.dlmgr.create_torrent_file( file_path_list, recursive_bytes(params)) except (IOError, UnicodeDecodeError, RuntimeError) as e: self._logger.exception(e) return return_handled_exception(request, e) metainfo_dict = bdecode_compat(result['metainfo']) if export_dir and export_dir.exists(): save_path = export_dir / ("%s.torrent" % name) with open(save_path, "wb") as fd: fd.write(result['metainfo']) # Download this torrent if specified if 'download' in request.query and request.query[ 'download'] and request.query['download'] == "1": download_config = DownloadConfig() download_config.set_dest_dir(result['base_path'] if len( file_path_list) == 1 else result['base_dir']) try: self.session.dlmgr.start_download( tdef=TorrentDef(metainfo_dict), config=download_config) except DuplicateDownloadException: self._logger.warning( "The created torrent is already being downloaded.") return RESTResponse( json.dumps({ "torrent": base64.b64encode(result['metainfo']).decode('utf-8') }))