def update_loading_state(self, title, speed, buffered, connected_peers, available_peers): if not self.loading_details_visible: self.background_canvas.itemconfig(self.loading_speed_label, state="normal") self.background_canvas.itemconfig(self.loading_buffered_label, state="normal") self.background_canvas.itemconfig(self.loading_speed_value, state="normal") self.background_canvas.itemconfig(self.loading_buffered_value, state="normal") self.background_canvas.itemconfig( self.loading_peers_connected_label, state="normal") self.background_canvas.itemconfig( self.loading_peers_available_label, state="normal") self.background_canvas.itemconfig( self.loading_peers_connected_value, state="normal") self.background_canvas.itemconfig( self.loading_peers_available_value, state="normal") self.loading_details_visible = True self.background_canvas.itemconfig(self.loading_speed_value, text=write_size(speed) + "ps") self.background_canvas.itemconfig(self.loading_buffered_value, text=write_size(buffered)) self.background_canvas.itemconfig(self.loading_peers_available_value, text=str(available_peers)) self.background_canvas.itemconfig(self.loading_peers_connected_value, text=str(connected_peers))
def stop_slowest_peer(self): if not self.should_stop_peers(): return True if self.max_peers_connected - len(self.connected_peers) > 3: return True # No need to stop slowest if we have enough room to connect more peers_to_check = [ x for x in self.connected_peers if current_time() - x.connection_manager.connected_on > 30000 ] peers_to_check = sorted(peers_to_check, key=lambda x: x.counter.value) if len(peers_to_check) == 0: return True slowest = peers_to_check[0] if slowest.counter.value > 100000: # if the slowest peer is downloading with more than 100kbps we're fine return True Logger().write( LogVerbosity.Info, str(slowest.id) + " stopping peer to find a potential faster one. Peer speed last 5 seconds was " + str(write_size(slowest.counter.value)) + ", total: " + str(write_size(slowest.counter.total))) slowest.stop_async("Slowest") return True
def check_size(self): for key, size in sorted([(key, asizeof.asizeof(value)) for key, value in self.__dict__.items()], key=lambda key_value: key_value[1], reverse=True): Logger().write( LogVerbosity.Important, " Size of " + str(key) + ": " + write_size(size))
def check_size(self): for key, value, size in sorted( [(key, value, asizeof.asizeof(value)) for key, value in self.__dict__.items()], key=lambda key_value: key_value[2], reverse=True): Logger().write(LogVerbosity.Important, " Size of " + str(key) + ": " + write_size(size)) if getattr(value, "check_size", None) is not None: value.check_size()
def __init__(self, path, title, size, season, episode, media_type, media_file, img, seen): self.path = path self.title = title self.size = write_size(size) self.season = season self.episode = episode self.type = media_type self.media_file = media_file self.img = img self.seen = seen
def check_pieces_size(self): Logger().write( LogVerbosity.Important, " _pieces size: " + write_size(asizeof.asizeof(self._pieces))) not_done_pieces = [ piece for piece in self._pieces.values() if not piece.done ] done_pieces = [piece for piece in self._pieces.values() if piece.done] stream_index = [ piece for piece in self._pieces.values() if piece.index < self.torrent.stream_position ] stream_index_50_mb = [ piece for piece in self._pieces.values() if piece.index > self.torrent.stream_position + (50000000 // self.piece_length) ] Logger().write( LogVerbosity.Important, " pieces not done: " + str(len(not_done_pieces)) + " - " + write_size(asizeof.asizeof(not_done_pieces))) Logger().write( LogVerbosity.Important, " pieces done: " + str(len(done_pieces)) + " - " + write_size(asizeof.asizeof(done_pieces))) Logger().write( LogVerbosity.Important, " pieces < stream index: " + str(len(stream_index)) + " - " + write_size(asizeof.asizeof(stream_index))) Logger().write( LogVerbosity.Important, " pieces > stream index + 50mb: " + str(len(stream_index_50_mb)) + " - " + write_size(asizeof.asizeof(stream_index_50_mb))) Logger().write( LogVerbosity.Important, " pieces with initialized blocks: " + str( len([ piece for piece in self._pieces.values() if len(piece._blocks) > 0 ]))) if self.torrent.stream_manager.buffer is not None: data_ready = [ piece for piece in self.torrent.stream_manager.buffer.data_ready ] Logger().write( LogVerbosity.Important, " pieces in data_ready: " + str(len(data_ready)) + " - " + write_size(asizeof.asizeof(data_ready)))
def update(self): big_buffer_but_not_consequetive = self.torrent.bytes_total_in_buffer - self.torrent.bytes_ready_in_buffer > Settings.get_int("important_only_start_threshold") queued_high_priority_piece_timeout_while_downloading = len([x for x in self.torrent.download_manager.queue[0: 10] if x.max_priority_set_time != 0 and current_time() - x.max_priority_set_time > 10000]) > 0 and self.torrent.network_manager.average_download_counter.value > 200000 if (big_buffer_but_not_consequetive or queued_high_priority_piece_timeout_while_downloading) and self.torrent.download_manager.download_mode == DownloadMode.Full: Logger().write(LogVerbosity.Info, "Entering ImportantOnly download mode: " + write_size(self.torrent.bytes_total_in_buffer) + " in buffer total") self.torrent.download_manager.download_mode = DownloadMode.ImportantOnly elif self.torrent.bytes_total_in_buffer - self.torrent.bytes_ready_in_buffer < Settings.get_int("important_only_stop_threshold") and self.torrent.download_manager.download_mode == DownloadMode.ImportantOnly: Logger().write(LogVerbosity.Info, "Leaving ImportantOnly download mode") self.torrent.download_manager.download_mode = DownloadMode.Full
def get_log_files(): log_files = Logger.get_log_files() return to_JSON([(name, path, write_size(size)) for name, path, size in log_files])