class Timer: @staticmethod def __convert_2_local_time(wpm): return 60 * 1000 / wpm def __init__(self): self.__timer = QTimer() def start(self, time, function, make_conversion=True): self.__timer = QTimer() if make_conversion: time = self.__convert_2_local_time(time) self.__timer.timeout.connect(function) self.__timer.start(time) def stop(self): self.__timer.stop() def delete(self): self.__timer = None def is_active(self): return self.__timer and self.__timer.isActive() def is_not_deleted(self): return self.__timer is not None
class CountdownModel(QObject): """CountdownModel is the model class for the GUI. It holds the counter property and handles event generated by the click on the button.""" def __init__(self): QObject.__init__(self) # Value to count from self.total = 30 self._remaining = 30 # Timer self.timer = QTimer() self.timer.setInterval(1000) self.timer.timeout.connect(self.process_timer) def set_remaining(self, val): if val != self._remaining: self._remaining = val self.remaining_changed.emit() # If the timer is inactive, update also a value to count from if not self.timer.isActive(): self.total = self.remaining remaining_changed = Signal() # Property holding actual remaining number of seconds remaining = Property(int, lambda self: self._remaining, set_remaining, notify=remaining_changed) timeout = Signal() @Slot() def process_timer(self): """Handler for the timer event. Decrease the remaining value or stop the timer and emit timeout signal if the time is over""" if self.remaining == 1: self.timer.stop() self.remaining = self.total # Reset the timer value self.timeout.emit() return self.remaining -= 1 @Slot() def start(self): """Start the countdown""" print("Starting") print(self.total,self.remaining) self.timer.start() @Slot() def pause(self): """Pause the countdown""" print("Pausing") print(self.total,self.remaining) self.timer.stop() @Slot() def stop(self): """Stop (and reset) the countdown""" print("Stopping") print(self.total,self.remaining) self.timer.stop() self.remaining = self.total
class RandomTimer(QObject): timeout = Signal() intervalChanged = Signal() activeChanged = Signal() def __init__(self): super(RandomTimer, self).__init__() self.timer = QTimer() self.timer.timeout.connect(self.timeout) def interval(self): return self.timer.interval() def setInterval(self, msec): if self.timer.interval() != msec: self.timer.setInterval(msec) self.intervalChanged.emit() print("interval = {}".format(self.timer.interval())) @Slot() def isActive(self): return self.timer.isActive() @Slot() def start(self): if not self.timer.isActive(): self.timer.start() self.activeChanged.emit() @Slot() def stop(self): if self.timer.isActive(): self.timer.stop() self.activeChanged.emit() @Slot(int, int, result=int) def randomInterval(self, min, max): range = max - min msec = min + random.randint(0, range) return msec interval = Property(int, interval, setInterval, notify=intervalChanged) active = Property(bool, isActive, notify=activeChanged)
class TestThread(QObject): data = Signal(dict) def __init__(self,timer,name,ptnum,amplIncr,rampPts,parent=None): QObject.__init__(self,parent) self.pointNum = ptnum self.rampPoints = rampPts self.counterIncr = 1 self.amplIncr = amplIncr self.name = name self.amplCounter = 1 self.timer = timer self.internalTimer = None @Slot() def startTimer(self): self.internalTimer = QTimer(self) self.internalTimer.timeout.connect(self.calcData) self.internalTimer.setSingleShot(False) self.internalTimer.start(self.timer) def calcData(self): dataX = np.arange(0,self.pointNum)*2*np.pi/self.pointNum dataY = np.sin(dataX)*(self.amplIncr*self.amplCounter) toSend = {"name":self.name,"x":dataX,"y":dataY} self.amplCounter += self.counterIncr if self.amplCounter == self.rampPoints or self.amplCounter == -1*self.rampPoints: self.counterIncr *= -1 self.amplCounter += self.counterIncr self.data.emit(toSend) @Slot() def stopTimer(self): if self.internalTimer is not None: self.internalTimer.stop() while self.internalTimer.isActive(): sleep(0.05) self.internalTimer = None
class ScatterDataMofifier(QObject): def __init__(self, scatter, s_data): self.scatter = scatter self._s_data = s_data self.timer = QTimer() self.series = QtDataVisualization.QScatter3DSeries() self.data = [ QVector3D(0.5, 0.5, 0.5), QVector3D(-0.3, -0.5, -0.4), QVector3D(0.0, -0.3, 0.2) ] self.series.dataProxy().addItems(self.data) self.series.setItemSize(0.5) self.series.setMeshSmooth(True) self.scatter.addSeries(self.series) self.scatter.setAspectRatio(1.0) self.scatter.setHorizontalAspectRatio(1.0) # Setting of plot limits self.scatter.axisX().setRange(0, s_data.getBoxWidth()) self.scatter.axisY().setRange(0, s_data.getBoxWidth()) self.scatter.axisZ().setRange(0, s_data.getBoxWidth()) self._time_step = s_data.getTimeStep() * 1000 self.timer.timeout.connect(self.makeStep) # Drowing of the data and starting of the timer self.drawData() self.timer.start(self._time_step) def drawData(self): data = [] for mol in self._s_data.getCurrentCoordinates(): data.append(QVector3D(*mol[:3])) self.series.dataProxy().removeItems(0, len(data)) self.series.dataProxy().addItems(data) def makeStep(self): self._s_data.makeStep() self.drawData() def toggleStopAnimate(self): if (self.timer.isActive()): self.timer.stop() else: self.timer.start(self._time_step)
def open(self, url, timeout=60): """Wait for download to complete and return result""" loop = QEventLoop() timer = QTimer() timer.setSingleShot(True) timer.timeout.connect(loop.quit) self.loadFinished.connect(loop.quit) self.load(QUrl(url)) timer.start(timeout * 1000) loop.exec_() # delay here until download finished if timer.isActive(): # downloaded successfully timer.stop() return self.html() else: # timed out print('Request timed out:', url)
def open(self, url: str, timeout: int = 10): """Wait for download to complete and return result""" loop = QEventLoop() timer = QTimer() timer.setSingleShot(True) # noinspection PyUnresolvedReferences timer.timeout.connect(loop.quit) # noinspection PyUnresolvedReferences self.loadFinished.connect(loop.quit) self.load(QUrl(url)) # noinspection PyArgumentList timer.start(timeout * 1000) loop.exec_() # delay here until download finished if timer.isActive(): # downloaded successfully timer.stop() else: logger.info('Request timed out: %s' % url)
def wait_signal(signal, timeout=5000): """Block loop until signal emitted, or timeout (ms) elapses.""" loop = QEventLoop() signal.connect(loop.quit) yield if timeout: timer = QTimer() timer.setInterval(timeout) timer.setSingleShot(True) timer.timeout.connect(loop.quit) timer.start() else: timer = None loop.exec_() signal.disconnect(loop.quit) if timer and timer.isActive(): timer.stop()
class TestScatter(QObject): data = Signal(dict) def __init__(self,timer,name,xInc,parent=None): QObject.__init__(self,parent) self.xInc = xInc self.currX = 0 self.name = name self.timer = timer self.internalTimer = None @Slot() def startTimer(self): self.internalTimer = QTimer(self) self.internalTimer.timeout.connect(self.calcData) self.internalTimer.setSingleShot(False) self.internalTimer.start(self.timer) def calcData(self): dataX = self.currX dataY = np.sin(dataX) self.currX+=self.xInc toSend = {self.name:[dataX,dataY]} self.data.emit(toSend) @Slot() def stopTimer(self): if self.internalTimer is not None: self.internalTimer.stop() while self.internalTimer.isActive(): sleep(0.05) self.internalTimer = None
class IdleDetection(QObject): def __init__(self, parent): super(IdleDetection, self).__init__(parent) self._parent: QWidget = parent # Report user inactivity self._idle_timer = QTimer() self._idle_timer.setSingleShot(True) self._idle_timer.setTimerType(Qt.VeryCoarseTimer) self._idle_timer.setInterval(10000) # Detect inactivity for automatic session save self._idle_timer.timeout.connect(self.set_inactive) self.idle = False self.parent.installEventFilter(self) def is_active(self): return self.idle def set_active(self): self.idle = False self._idle_timer.stop() def set_inactive(self): self.idle = True def eventFilter(self, obj, eve): if eve is None or obj is None: return False if eve.type() == QEvent.KeyPress or \ eve.type() == QEvent.MouseMove or \ eve.type() == QEvent.MouseButtonPress: self.set_active() return False if not self._idle_timer.isActive(): self._idle_timer.start() return False
class LcdNumber(QLCDNumber): def __init__(self, parent=None): super().__init__(parent) self.setWindowFlag(Qt.FramelessWindowHint, True) self.setAttribute(Qt.WA_StyledBackground) self.resize(130, 40) self.move(680, 150) self.setDigitCount(8) self.setMode(QLCDNumber.Dec) self.setSegmentStyle(QLCDNumber.Flat) self.timer = QTimer(self) self.timer.timeout.connect(self.update_time) self.time = datetime.strptime('00:00:00', '%H:%M:%S') self.one_seconds = timedelta(seconds=1) self.timer.start(1000) self.display(self.time.strftime('%H:%M:%S')) self.setStyleSheet(""" QLCDNumber{ border:none; } """) self.timer.interval() def re_init(self): self.timer.start() self.time = datetime.strptime('00:00:00', '%H:%M:%S') self.display(self.time.strftime('%H:%M:%S')) self.timer.interval() pass def update_time(self): self.time += self.one_seconds self.display(self.time.strftime('%H:%M:%S')) def stop(self): if self.timer.isActive(): self.timer.stop() else: self.timer.start()
class SupportDialog(QObject): SHORT_FEEDBACK_INTERVAL = 5 * 60 * 1000 DAYS_TO_FEEDBACK = 7 DROPDOWN_BACKGROUND_COLOR = "#f78d1e" DROPDOWN_COLOR = "white" _sending_error = Signal() SUBJECT = { 1: "TECHNICAL", 2: "OTHER", 3: "FEEDBACK", } def __init__(self, parent, parent_window, config, dp=1, selected_index=0): QObject.__init__(self, parent) self._parent = parent self._parent_window = parent_window self._config = config self._dp = dp self._selected_index = selected_index self._dialog = QDialog(parent_window) self._dialog.setWindowFlags(Qt.Dialog) self._dialog.setAttribute(Qt.WA_MacFrameworkScaled) self._is_opened = False self._pipe = None self._feedback_mode = False self._ui = Ui_Dialog() self._ui.setupUi(self._dialog) self._init_ui() self._old_close_event = self._dialog.closeEvent self._feedback_timer = QTimer(self) self._feedback_timer.setSingleShot(True) self._feedback_timer.timeout.connect(self._show_feedback_form) self._parent.service_started.connect(self._check_feedback_needed) self._parent.exit_request.connect(self._on_exit_request) def _init_ui(self): self._ui.pushButton.setEnabled(False) self._ui.comboBox.addItem(tr("---Select Subject---")) self._ui.comboBox.addItem(tr("Technical Question")) self._ui.comboBox.addItem(tr("Other Question")) self._ui.comboBox.addItem(tr("Feedback")) self._ui.comboBox.setCurrentIndex(self._selected_index) palette = self._ui.comboBox.palette() palette.setColor(QPalette.HighlightedText, QColor(self.DROPDOWN_COLOR)) palette.setColor(QPalette.Highlight, QColor(self.DROPDOWN_BACKGROUND_COLOR)) self._ui.comboBox.setPalette(palette) palette = self._ui.comboBox.view().palette() palette.setColor(QPalette.HighlightedText, QColor(self.DROPDOWN_COLOR)) palette.setColor(QPalette.Highlight, QColor(self.DROPDOWN_BACKGROUND_COLOR)) self._ui.comboBox.view().setPalette(palette) self._set_tooltip() self._ui.comboBox.currentIndexChanged.connect(self._on_index_changed) self._ui.plainTextEdit.textChanged.connect(self._set_tooltip) self._ui.pushButton.clicked.connect(self._on_send_clicked) self._ui.text_label.linkActivated.connect(self._on_link_activated) self._sending_error.connect(self._clear_pipe_state) self._set_fonts() def _set_fonts(self): ui = self._ui controls = [ ui.plainTextEdit, ui.pushButton, ui.comboBox, ui.text_label, ui.checkBox ] for control in controls: font = control.font() font_size = control.font().pointSize() * self._dp if font_size > 0: control_font = QFont(font.family(), font_size) control_font.setBold(font.bold()) control.setFont(control_font) def set_selected_index(self, selected_index): self._selected_index = selected_index if self._is_opened: self._ui.comboBox.setCurrentIndex(self._selected_index) def show(self): if self._parent.dialogs_opened(): return self._is_opened = True logger.debug("Support dialog opening...") self._pipe = None self.set_selected_index(self._selected_index) self._ui.checkBox.setChecked(False) self._ui.comboBox.setEnabled(not self._feedback_mode) self._dialog.exec_() logger.debug("Support dialog closed") if self._pipe: try: self._pipe.stop() self._clear_pipe_state() except Exception as e: logger.error("Unexpected error stopping pipe: (%s)", e) self._is_opened = False self._selected_index = 0 self._ui.plainTextEdit.document().clear() self._ui.checkBox.setChecked(False) def dialog_opened(self): return self._is_opened def close(self): self._dialog.close() def _set_tooltip(self): if not self._selected_index: tooltip = tr("Please select subject") self._ui.pushButton.setEnabled(False) elif not self._ui.plainTextEdit.document().toPlainText(): tooltip = tr("Message can't be empty") self._ui.pushButton.setEnabled(False) else: tooltip = tr("Click to send message") self._ui.pushButton.setEnabled(True) self._ui.pushButton.setToolTip(tooltip) def _on_index_changed(self, selected_index): self._selected_index = selected_index self._set_tooltip() def _on_send_clicked(self): self._dialog.setEnabled(False) self._pipe = ProgressPipe(self, self._ui.pushButton, timeout=1000, final_text=tr("Sent"), final_timeout=500) self._pipe.pipe_finished.connect(self._on_pipe_finished) if self._ui.checkBox.isChecked(): self._pipe.add_task(tr("Compressing"), self._archive_logs()) self._pipe.add_task(tr("Uploading"), self._upload_file()) self._pipe.add_task(tr("Sending"), self._send_message()) self._pipe.start() def _on_pipe_finished(self): self._clear_feedback_flag() self.close() def _clear_pipe_state(self): self._dialog.setEnabled(True) try: self._pipe.pipe_finished.disconnect(self._on_pipe_finished) except Exception as e: logger.warning("Can't disconnect signal: %s", e) self._ui.pushButton.setText(tr("SEND")) def _send_message(self): def send(log_file_name=""): logger.debug("Support compressed log_file_name %s", log_file_name) if self._selected_index not in self.SUBJECT: logger.warning("Attempt to send message to support " "with invalid subject") return subject = self.SUBJECT[self._selected_index] res = self._parent.web_api.send_support_message( subject, self._ui.plainTextEdit.document().toPlainText(), log_file_name) was_error = False msg = tr("Can't send message to support") if res and "result" in res: if res["result"] != "success": was_error = True msg = str(res.get("info", msg)) else: was_error = True if was_error: self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) return send def _archive_logs(self): def archive(): # uses function attributes to track progress # archive.size, archive.progress, archive.stop logs_dir = get_bases_dir(self._config.sync_directory) log_files = glob("{}{}*.log".format(logs_dir, os.sep)) log_sizes = list(map(os.path.getsize, log_files)) # mark overall size archive.size = sum(log_sizes) old_archives = glob("{}{}2*_logs.zip".format(logs_dir, os.sep)) try: list(map(remove_file, old_archives)) except Exception as e: logger.warning("Can't delete old archives. Reason: (%s)", e) if get_free_space(logs_dir) < archive.size // 5: # archive.size // 5 is approx future archive size msg = tr("Insufficient disk space to archive logs. " "Please clean disk") self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) archive_name = time.strftime('%Y%m%d_%H%M%S_logs.zip') archive_path = "{}{}{}".format(logs_dir, os.sep, archive_name) archive_dir = op.dirname(archive_path) f = zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) try: with cwd(archive_dir): for i, log_file in enumerate(log_files): if not op.isfile(log_file): continue f.write(op.basename(log_file)) # mark progress archive.progress += log_sizes[i] if archive.stop: return except Exception as e: msg = tr("Can't archive logs.") logger.warning(msg + " Reason: (%s)", e) self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) finally: f.close() if archive.stop: remove_file(archive_path) return archive_path return archive def _upload_file(self): def upload(path): # uses function attributes to track progress # upload.size, upload.progress, upload.stop upload.size = op.getsize(path) res = self._parent.web_api.upload_file(path, "application/zip", callback) was_error = False msg = tr("Can't upload archive file") if res and "result" in res: if res["result"] == "success": filename = res.get("file_name", "") else: was_error = True msg = str(res.get("info", msg)) else: was_error = True if was_error and not upload.stop: self._parent.show_tray_notification(msg) self._sending_error.emit() raise SendingError(msg) remove_file(path) return filename def callback(monitor): upload.progress = monitor.bytes_read if upload.stop: raise SendingError("Stopped") return upload def _on_link_activated(self): open_link(self._parent.get_help_uri())() self.close() def _check_feedback_needed(self): if self._feedback_timer.isActive(): return start_date = get_init_done() now = datetime.datetime.now() logger.debug("Start date is %s", start_date) if start_date is None: return interval = (start_date - now) \ + datetime.timedelta(days=self.DAYS_TO_FEEDBACK) if interval.total_seconds() <= 0: logger.debug("Feedback form date time is now") self._show_feedback_form() else: self._feedback_timer.setInterval(interval.seconds * 1000) self._feedback_timer.start() logger.debug("Feedback form date time is %s", now + interval) def _show_feedback_form(self): if self._is_opened: self._feedback_timer.setInterval(self.SHORT_FEEDBACK_INTERVAL) self._feedback_timer.start() return self._feedback_mode = True self._selected_index = 3 self._dialog.closeEvent = self._close_event window_title = self._dialog.windowTitle() label_text = self._ui.text_label.text() feedback_text = tr("Please leave your feedback for Pvtbox") self._ui.text_label.setText( "<html><head/><body><p>{}</p></body></html>".format(feedback_text)) self._dialog.setWindowTitle(tr("Feedback")) self.show() self._dialog.setWindowTitle(window_title) self._ui.text_label.setText(label_text) def _close_event(self, event): if event.spontaneous(): self._clear_feedback_flag() self._old_close_event(event) def _clear_feedback_flag(self): if self._feedback_mode: logger.debug("Feedback flag cleared") clear_init_done() self._feedback_mode = False self._dialog.closeEvent = self._old_close_event def _on_exit_request(self): if self._is_opened: self.close()
class DownloadTask(QObject): download_ready = Signal(QObject) download_not_ready = Signal(QObject) download_complete = Signal(QObject) download_failed = Signal(QObject) download_error = Signal(str) download_ok = Signal() download_finishing = Signal() copy_added = Signal(str) chunk_downloaded = Signal( str, # obj_id str, # str(offset) to fix offset >= 2**31 int) # length chunk_aborted = Signal() request_data = Signal( str, # node_id str, # obj_id str, # str(offset) to fix offset >= 2**31 int) # length abort_data = Signal( str, # node_id str, # obj_id str) # str(offset) to fix offset >= 2**31 possibly_sync_folder_is_removed = Signal() no_disk_space = Signal( QObject, # task str, # display_name bool) # is error wrong_hash = Signal(QObject) # task) signal_info_rx = Signal(tuple) default_part_size = DOWNLOAD_PART_SIZE receive_timeout = 20 # seconds retry_limit = 2 timeouts_limit = 2 max_node_chunk_requests = 128 end_race_timeout = 5. # seconds def __init__(self, tracker, connectivity_service, priority, obj_id, obj_size, file_path, display_name, file_hash=None, parent=None, files_info=None): QObject.__init__(self, parent=parent) self._tracker = tracker self._connectivity_service = connectivity_service self.priority = priority self.size = obj_size self.id = obj_id self.file_path = file_path self.file_hash = file_hash self.download_path = file_path + '.download' self._info_path = file_path + '.info' self.display_name = display_name self.received = 0 self.files_info = files_info self.hash_is_wrong = False self._ready = False self._started = False self._paused = False self._finished = False self._no_disk_space_error = False self._wanted_chunks = SortedDict() self._downloaded_chunks = SortedDict() self._nodes_available_chunks = dict() self._nodes_requested_chunks = dict() self._nodes_last_receive_time = dict() self._nodes_downloaded_chunks_count = dict() self._nodes_timeouts_count = dict() self._total_chunks_count = 0 self._file = None self._info_file = None self._started_time = time() self._took_from_turn = 0 self._received_via_turn = 0 self._received_via_p2p = 0 self._retry = 0 self._limiter = None self._init_wanted_chunks() self._on_downloaded_cb = None self._on_failed_cb = None self.download_complete.connect(self._on_downloaded) self.download_failed.connect(self._on_failed) self._timeout_timer = QTimer(self) self._timeout_timer.setInterval(15 * 1000) self._timeout_timer.setSingleShot(False) self._timeout_timer.timeout.connect(self._on_check_timeouts) self._leaky_timer = QTimer(self) self._leaky_timer.setInterval(1000) self._leaky_timer.setSingleShot(True) self._leaky_timer.timeout.connect(self._download_chunks) self._network_limited_error_set = False def __lt__(self, other): if not isinstance(other, DownloadTask): return object.__lt__(self, other) if self == other: return False if self.priority == other.priority: if self.size - self.received == other.size - other.received: return self.id < other.id return self.size - self.received < other.size - other.received return self.priority > other.priority def __le__(self, other): if not isinstance(other, DownloadTask): return object.__le__(self, other) if self == other: return True if self.priority == other.priority: if self.size - self.received == other.size - other.received: return self.id < other.id return self.size - self.received < other.size - other.received return self.priority >= other.priority def __gt__(self, other): if not isinstance(other, DownloadTask): return object.__gt__(self, other) if self == other: return False if self.priority == other.priority: if self.size - self.received == other.size - other.received: return self.id > other.id return self.size - self.received > other.size - other.received return self.priority <= other.priority def __ge__(self, other): if not isinstance(other, DownloadTask): return object.__ge__(self, other) if self == other: return True if self.priority == other.priority: if self.size - self.received == other.size - other.received: return self.id > other.id return self.size - self.received > other.size - other.received return self.priority <= other.priority def __eq__(self, other): if not isinstance(other, DownloadTask): return object.__eq__(self, other) return self.id == other.id def on_availability_info_received(self, node_id, obj_id, info): if obj_id != self.id or self._finished or not info: return logger.info( "availability info received, " "node_id: %s, obj_id: %s, info: %s", node_id, obj_id, info) new_chunks_stored = self._store_availability_info(node_id, info) if not self._ready and new_chunks_stored: if self._check_can_receive(node_id): self._ready = True self.download_ready.emit(self) else: self.download_error.emit('Turn limit reached') if self._started and not self._paused \ and not self._nodes_requested_chunks.get(node_id, None): logger.debug("Downloading next chunk") self._download_next_chunks(node_id) self._clean_nodes_last_receive_time() self._check_download_not_ready(self._nodes_requested_chunks) def on_availability_info_failure(self, node_id, obj_id, error): if obj_id != self.id or self._finished: return logger.info( "availability info failure, " "node_id: %s, obj_id: %s, error: %s", node_id, obj_id, error) try: if error["err_code"] == "FILE_CHANGED": self.download_failed.emit(self) except Exception as e: logger.warning("Can't parse error message. Reson: %s", e) def start(self, limiter): if exists(self.file_path): logger.info("download task file already downloaded %s", self.file_path) self.received = self.size self.download_finishing.emit() self.download_complete.emit(self) return self._limiter = limiter if self._started: # if we swapped task earlier self.resume() return self._no_disk_space_error = False if not self.check_disk_space(): return logger.info("starting download task, obj_id: %s", self.id) self._started = True self._paused = False self.hash_is_wrong = False self._started_time = time() self._send_start_statistic() if not self._open_file(): return self._read_info_file() for downloaded_chunk in self._downloaded_chunks.items(): self._remove_from_chunks(downloaded_chunk[0], downloaded_chunk[1], self._wanted_chunks) self.received = sum(self._downloaded_chunks.values()) if self._complete_download(): return self._download_chunks() if not self._timeout_timer.isActive(): self._timeout_timer.start() def check_disk_space(self): if self.size * 2 + get_signature_file_size(self.size) > \ get_free_space_by_filepath(self.file_path): self._emit_no_disk_space() return False return True def pause(self, disconnect_cb=True): self._paused = True if disconnect_cb: self.disconnect_callbacks() self.stop_download_chunks() def resume(self, start_download=True): self._started_time = time() self._paused = False self.hash_is_wrong = False if start_download: self._started = True self._download_chunks() if not self._timeout_timer.isActive(): self._timeout_timer.start() def cancel(self): self._close_file() self._close_info_file() self.stop_download_chunks() self._finished = True def clean(self): logger.debug("Cleaning download files %s", self.download_path) try: remove_file(self.download_path) except: pass try: remove_file(self._info_path) except: pass def connect_callbacks(self, on_downloaded, on_failed): self._on_downloaded_cb = on_downloaded self._on_failed_cb = on_failed def disconnect_callbacks(self): self._on_downloaded_cb = None self._on_failed_cb = None @property def ready(self): return self._ready @property def paused(self): return self._paused @property def no_disk_space_error(self): return self._no_disk_space_error def _init_wanted_chunks(self): self._total_chunks_count = math.ceil( float(self.size) / float(DOWNLOAD_CHUNK_SIZE)) self._wanted_chunks[0] = self.size def _on_downloaded(self, task): if callable(self._on_downloaded_cb): self._on_downloaded_cb(task) self._on_downloaded_cb = None def _on_failed(self, task): if callable(self._on_failed_cb): self._on_failed_cb(task) self._on_failed_cb = None def on_data_received(self, node_id, obj_id, offset, length, data): if obj_id != self.id or self._finished: return logger.debug( "on_data_received for objId: %s, offset: %s, from node_id: %s", self.id, offset, node_id) now = time() last_received_time = self._nodes_last_receive_time.get(node_id, 0.) if node_id in self._nodes_last_receive_time: self._nodes_last_receive_time[node_id] = now self._nodes_timeouts_count.pop(node_id, 0) downloaded_count = \ self._nodes_downloaded_chunks_count.get(node_id, 0) + 1 self._nodes_downloaded_chunks_count[node_id] = downloaded_count # to collect traffic info node_type = self._connectivity_service.get_self_node_type() is_share = node_type == "webshare" # tuple -> (obj_id, rx_wd, rx_wr, is_share) if self._connectivity_service.is_relayed(node_id): # relayed traffic info_rx = (obj_id, 0, length, is_share) else: # p2p traffic info_rx = (obj_id, length, 0, is_share) self.signal_info_rx.emit(info_rx) if not self._is_chunk_already_downloaded(offset): if not self._on_new_chunk_downloaded(node_id, offset, length, data): return else: logger.debug("chunk %s already downloaded", offset) requested_chunks = self._nodes_requested_chunks.get( node_id, SortedDict()) if not requested_chunks: return self._remove_from_chunks(offset, length, requested_chunks) if not requested_chunks: self._nodes_requested_chunks.pop(node_id, None) requested_count = sum(requested_chunks.values()) // DOWNLOAD_CHUNK_SIZE if downloaded_count * 4 >= requested_count \ and requested_count < self.max_node_chunk_requests: self._download_next_chunks(node_id, now - last_received_time) self._clean_nodes_last_receive_time() self._check_download_not_ready(self._nodes_requested_chunks) def _is_chunk_already_downloaded(self, offset): if self._downloaded_chunks: chunk_index = self._downloaded_chunks.bisect_right(offset) if chunk_index > 0: chunk_index -= 1 chunk = self._downloaded_chunks.peekitem(chunk_index) if offset < chunk[0] + chunk[1]: return True return False def _on_new_chunk_downloaded(self, node_id, offset, length, data): if not self._write_to_file(offset, data): return False self.received += length if self._connectivity_service.is_relayed(node_id): self._received_via_turn += length else: self._received_via_p2p += length new_offset = offset new_length = length left_index = self._downloaded_chunks.bisect_right(new_offset) if left_index > 0: left_chunk = self._downloaded_chunks.peekitem(left_index - 1) if left_chunk[0] + left_chunk[1] == new_offset: new_offset = left_chunk[0] new_length += left_chunk[1] self._downloaded_chunks.popitem(left_index - 1) right_index = self._downloaded_chunks.bisect_right(new_offset + new_length) if right_index > 0: right_chunk = self._downloaded_chunks.peekitem(right_index - 1) if right_chunk[0] == new_offset + new_length: new_length += right_chunk[1] self._downloaded_chunks.popitem(right_index - 1) self._downloaded_chunks[new_offset] = new_length assert self._remove_from_chunks(offset, length, self._wanted_chunks) logger.debug("new chunk downloaded from node: %s, wanted size: %s", node_id, sum(self._wanted_chunks.values())) part_offset = (offset / DOWNLOAD_PART_SIZE) * DOWNLOAD_PART_SIZE part_size = min([DOWNLOAD_PART_SIZE, self.size - part_offset]) if new_offset <= part_offset \ and new_offset + new_length >= part_offset + part_size: if self._file: self._file.flush() self._write_info_file() self.chunk_downloaded.emit(self.id, str(part_offset), part_size) if self._complete_download(): return False return True def _remove_from_chunks(self, offset, length, chunks): if not chunks: return False chunk_left_index = chunks.bisect_right(offset) if chunk_left_index > 0: left_chunk = chunks.peekitem(chunk_left_index - 1) if offset >= left_chunk[0] + left_chunk[1] \ and len(chunks) > chunk_left_index: left_chunk = chunks.peekitem(chunk_left_index) else: chunk_left_index -= 1 else: left_chunk = chunks.peekitem(chunk_left_index) if offset >= left_chunk[0] + left_chunk[1] or \ offset + length <= left_chunk[0]: return False chunk_right_index = chunks.bisect_right(offset + length) right_chunk = chunks.peekitem(chunk_right_index - 1) if chunk_right_index == chunk_left_index: to_del = [right_chunk[0]] else: to_del = list(chunks.islice(chunk_left_index, chunk_right_index)) for chunk in to_del: chunks.pop(chunk) if left_chunk[0] < offset: if left_chunk[0] + left_chunk[1] >= offset: chunks[left_chunk[0]] = offset - left_chunk[0] if right_chunk[0] + right_chunk[1] > offset + length: chunks[offset + length] = \ right_chunk[0] + right_chunk[1] - offset - length return True def on_data_failed(self, node_id, obj_id, offset, error): if obj_id != self.id or self._finished: return logger.info( "data request failure, " "node_id: %s, obj_id: %s, offset: %s, error: %s", node_id, obj_id, offset, error) self.on_node_disconnected(node_id) def get_downloaded_chunks(self): if not self._downloaded_chunks: return None return self._downloaded_chunks def on_node_disconnected(self, node_id, connection_alive=False, timeout_limit_exceed=True): requested_chunks = self._nodes_requested_chunks.pop(node_id, None) logger.info("node disconnected %s, chunks removed from requested: %s", node_id, requested_chunks) if timeout_limit_exceed: self._nodes_available_chunks.pop(node_id, None) self._nodes_timeouts_count.pop(node_id, None) if connection_alive: self._connectivity_service.reconnect(node_id) self._nodes_last_receive_time.pop(node_id, None) self._nodes_downloaded_chunks_count.pop(node_id, None) if connection_alive: self.abort_data.emit(node_id, self.id, None) if self._nodes_available_chunks: self._download_chunks(check_node_busy=True) else: chunks_to_test = self._nodes_requested_chunks \ if self._started and not self._paused \ else self._nodes_available_chunks self._check_download_not_ready(chunks_to_test) def complete(self): if self._started and not self._finished: self._complete_download(force_complete=True) elif not self._finished: self._finished = True self.clean() self.download_complete.emit(self) def _download_chunks(self, check_node_busy=False): if not self._started or self._paused or self._finished: return logger.debug("download_chunks for %s", self.id) node_ids = list(self._nodes_available_chunks.keys()) random.shuffle(node_ids) for node_id in node_ids: node_free = not check_node_busy or \ not self._nodes_requested_chunks.get(node_id, None) if node_free: self._download_next_chunks(node_id) self._clean_nodes_last_receive_time() self._check_download_not_ready(self._nodes_requested_chunks) def _check_can_receive(self, node_id): return True def _write_to_file(self, offset, data): self._file.seek(offset) try: self._file.write(data) except EnvironmentError as e: logger.error("Download task %s can't write to file. Reason: %s", self.id, e) self._send_error_statistic() if e.errno == errno.ENOSPC: self._emit_no_disk_space(error=True) else: self.download_failed.emit(self) self.possibly_sync_folder_is_removed.emit() return False return True def _open_file(self, clean=False): if not self._file or self._file.closed: try: if clean: self._file = open(self.download_path, 'wb') else: self._file = open(self.download_path, 'r+b') except IOError: try: self._file = open(self.download_path, 'wb') except IOError as e: logger.error( "Can't open file for download for task %s. " "Reason: %s", self.id, e) self.download_failed.emit(self) return False return True def _close_file(self): if not self._file: return True try: self._file.close() except EnvironmentError as e: logger.error("Download task %s can't close file. Reason: %s", self.id, e) self._send_error_statistic() if e.errno == errno.ENOSPC: self._emit_no_disk_space(error=True) else: self.download_failed.emit(self) self.possibly_sync_folder_is_removed.emit() self._file = None return False self._file = None return True def _write_info_file(self): try: self._info_file.seek(0) self._info_file.truncate() pickle.dump(self._downloaded_chunks, self._info_file, pickle.HIGHEST_PROTOCOL) self._info_file.flush() except EnvironmentError as e: logger.debug("Can't write to info file for task id %s. Reason: %s", self.id, e) def _read_info_file(self): try: if not self._info_file or self._info_file.closed: self._info_file = open(self._info_path, 'a+b') self._info_file.seek(0) try: self._downloaded_chunks = pickle.load(self._info_file) except: pass except EnvironmentError as e: logger.debug("Can't open info file for task id %s. Reason: %s", self.id, e) def _close_info_file(self, to_remove=False): if not self._info_file: return try: self._info_file.close() if to_remove: remove_file(self._info_path) except Exception as e: logger.debug( "Can't close or remove info file " "for task id %s. Reason: %s", self.id, e) self._info_file = None def _complete_download(self, force_complete=False): if (not self._wanted_chunks or force_complete) and \ not self._finished: logger.debug("download %s completed", self.id) self._nodes_requested_chunks.clear() for node_id in self._nodes_last_receive_time.keys(): self.abort_data.emit(node_id, self.id, None) if not force_complete: self.download_finishing.emit() if not force_complete and self.file_hash: hash_check_result = self._check_file_hash() if hash_check_result is not None: return hash_check_result self._started = False self._finished = True self.stop_download_chunks() self._close_info_file(to_remove=True) if not self._close_file(): return False try: if force_complete: remove_file(self.download_path) self.download_complete.emit(self) else: shutil.move(self.download_path, self.file_path) self._send_end_statistic() self.download_complete.emit(self) if self.file_hash: self.copy_added.emit(self.file_hash) except EnvironmentError as e: logger.error( "Download task %s can't (re)move file. " "Reason: %s", self.id, e) self._send_error_statistic() self.download_failed.emit(self) self.possibly_sync_folder_is_removed.emit() return False result = True else: result = not self._wanted_chunks return result def _check_file_hash(self): self._file.flush() try: hash = Rsync.hash_from_block_checksum( Rsync.block_checksum(self.download_path)) except IOError as e: logger.error("download %s error: %s", self.id, e) hash = None if hash != self.file_hash: logger.error( "download hash check failed objId: %s, " "expected hash: %s, actual hash: %s", self.id, self.file_hash, hash) if not self._close_file() or not self._open_file(clean=True): return False self._downloaded_chunks.clear() self._nodes_downloaded_chunks_count.clear() self._nodes_last_receive_time.clear() self._nodes_timeouts_count.clear() self._write_info_file() self._init_wanted_chunks() self.received = 0 if self._retry < self.retry_limit: self._retry += 1 self.resume() else: self._retry = 0 self._nodes_available_chunks.clear() self.hash_is_wrong = True self.wrong_hash.emit(self) return True return None def _download_next_chunks(self, node_id, time_from_last_received_chunk=0.): if (self._paused or not self._started or not self._ready or self._finished or not self._wanted_chunks or self._leaky_timer.isActive()): return total_requested = sum( map(lambda x: sum(x.values()), self._nodes_requested_chunks.values())) if total_requested + self.received >= self.size: if self._nodes_requested_chunks.get(node_id, None) and \ time_from_last_received_chunk <= self.end_race_timeout: return available_chunks = \ self._get_end_race_chunks_to_download_from_node(node_id) else: available_chunks = \ self._get_available_chunks_to_download_from_node(node_id) if not available_chunks: logger.debug("no chunks available for download %s", self.id) logger.debug("downloading from: %s nodes, length: %s, wanted: %s", len(self._nodes_requested_chunks), total_requested, self.size - self.received) return available_offset = random.sample(available_chunks.keys(), 1)[0] available_length = available_chunks[available_offset] logger.debug("selected random offset: %s", available_offset) parts_count = math.ceil( float(available_length) / float(DOWNLOAD_PART_SIZE)) - 1 logger.debug("parts count: %s", parts_count) part_to_download_number = random.randint(0, parts_count) offset = available_offset + \ part_to_download_number * DOWNLOAD_PART_SIZE length = min(DOWNLOAD_PART_SIZE, available_offset + available_length - offset) logger.debug("selected random part: %s, offset: %s, length: %s", part_to_download_number, offset, length) self._request_data(node_id, offset, length) def _get_end_race_chunks_to_download_from_node(self, node_id): available_chunks = self._nodes_available_chunks.get(node_id, None) if not available_chunks: return [] available_chunks = available_chunks.copy() logger.debug("end race downloaded_chunks: %s", self._downloaded_chunks) logger.debug("end race requested_chunks: %s", self._nodes_requested_chunks) logger.debug("end race available_chunks before excludes: %s", available_chunks) if self._downloaded_chunks: for downloaded_chunk in self._downloaded_chunks.items(): self._remove_from_chunks(downloaded_chunk[0], downloaded_chunk[1], available_chunks) if not available_chunks: return [] available_from_other_nodes = available_chunks.copy() for requested_offset, requested_length in \ self._nodes_requested_chunks.get(node_id, dict()).items(): self._remove_from_chunks(requested_offset, requested_length, available_from_other_nodes) result = available_from_other_nodes if available_from_other_nodes \ else available_chunks if result: logger.debug("end race available_chunks after excludes: %s", available_chunks) return result def _get_available_chunks_to_download_from_node(self, node_id): available_chunks = self._nodes_available_chunks.get(node_id, None) if not available_chunks: return [] available_chunks = available_chunks.copy() logger.debug("downloaded_chunks: %s", self._downloaded_chunks) logger.debug("requested_chunks: %s", self._nodes_requested_chunks) logger.debug("available_chunks before excludes: %s", available_chunks) for _, requested_chunks in self._nodes_requested_chunks.items(): for requested_offset, requested_length in requested_chunks.items(): self._remove_from_chunks(requested_offset, requested_length, available_chunks) if not available_chunks: return [] for downloaded_chunk in self._downloaded_chunks.items(): self._remove_from_chunks(downloaded_chunk[0], downloaded_chunk[1], available_chunks) logger.debug("available_chunks after excludes: %s", available_chunks) return available_chunks def _request_data(self, node_id, offset, length): logger.debug("Requesting date from node %s, request_chunk (%s, %s)", node_id, offset, length) if self._limiter: try: self._limiter.leak(length) except LeakyBucketException: if node_id not in self._nodes_requested_chunks: self._nodes_last_receive_time.pop(node_id, None) if not self._network_limited_error_set: self.download_error.emit('Network limited.') self._network_limited_error_set = True if not self._leaky_timer.isActive(): self._leaky_timer.start() return if self._network_limited_error_set: self._network_limited_error_set = False self.download_ok.emit() requested_chunks = self._nodes_requested_chunks.get(node_id, None) if not requested_chunks: requested_chunks = SortedDict() self._nodes_requested_chunks[node_id] = requested_chunks requested_chunks[offset] = length logger.debug("Requested chunks %s", requested_chunks) self._nodes_last_receive_time[node_id] = time() self.request_data.emit(node_id, self.id, str(offset), length) def _clean_nodes_last_receive_time(self): for node_id in list(self._nodes_last_receive_time.keys()): if node_id not in self._nodes_requested_chunks: self._nodes_last_receive_time.pop(node_id, None) def _on_check_timeouts(self): if self._paused or not self._started \ or self._finished or self._leaky_timer.isActive(): return timed_out_nodes = set() cur_time = time() logger.debug("Chunk requests check %s", len(self._nodes_requested_chunks)) if self._check_download_not_ready(self._nodes_requested_chunks): return for node_id in self._nodes_last_receive_time: last_receive_time = self._nodes_last_receive_time.get(node_id) if cur_time - last_receive_time > self.receive_timeout: timed_out_nodes.add(node_id) logger.debug("Timed out nodes %s, nodes last receive time %s", timed_out_nodes, self._nodes_last_receive_time) for node_id in timed_out_nodes: timeout_count = self._nodes_timeouts_count.pop(node_id, 0) timeout_count += 1 if timeout_count >= self.timeouts_limit: retry = False else: retry = True self._nodes_timeouts_count[node_id] = timeout_count logger.debug("Node if %s, timeout_count %s, retry %s", node_id, timeout_count, retry) self.on_node_disconnected(node_id, connection_alive=True, timeout_limit_exceed=not retry) def _get_chunks_from_info(self, chunks, info): new_added = False for part_info in info: logger.debug("get_chunks_from_info part_info %s", part_info) if part_info.length == 0: continue if not chunks: chunks[part_info.offset] = part_info.length new_added = True continue result_offset = part_info.offset result_length = part_info.length left_index = chunks.bisect_right(part_info.offset) if left_index > 0: left_chunk = chunks.peekitem(left_index - 1) if (left_chunk[0] <= part_info.offset and left_chunk[0] + left_chunk[1] >= part_info.offset + part_info.length): continue if part_info.offset <= left_chunk[0] + left_chunk[1]: result_offset = left_chunk[0] result_length = part_info.offset + \ part_info.length - result_offset left_index -= 1 right_index = chunks.bisect_right(part_info.offset + part_info.length) if right_index > 0: right_chunk = chunks.peekitem(right_index - 1) if part_info.offset + part_info.length <= \ right_chunk[0] + right_chunk[1]: result_length = right_chunk[0] + \ right_chunk[1] - result_offset to_delete = list(chunks.islice(left_index, right_index)) for to_del in to_delete: chunks.pop(to_del) new_added = True chunks[result_offset] = result_length return new_added def _store_availability_info(self, node_id, info): known_chunks = self._nodes_available_chunks.get(node_id, None) if not known_chunks: known_chunks = SortedDict() self._nodes_available_chunks[node_id] = known_chunks return self._get_chunks_from_info(known_chunks, info) def _check_download_not_ready(self, checkable): if not self._wanted_chunks and self._started: self._complete_download(force_complete=False) return False if self._leaky_timer.isActive(): if not self._nodes_available_chunks: self._make_not_ready() return True elif not checkable: self._make_not_ready() return True return False def _make_not_ready(self): if not self._ready: return logger.info("download %s not ready now", self.id) self._ready = False self._started = False if self._timeout_timer.isActive(): self._timeout_timer.stop() if self._leaky_timer.isActive(): self._leaky_timer.stop() self.download_not_ready.emit(self) def _clear_globals(self): self._wanted_chunks.clear() self._downloaded_chunks.clear() self._nodes_available_chunks.clear() self._nodes_requested_chunks.clear() self._nodes_last_receive_time.clear() self._nodes_downloaded_chunks_count.clear() self._nodes_timeouts_count.clear() self._total_chunks_count = 0 def stop_download_chunks(self): if self._leaky_timer.isActive(): self._leaky_timer.stop() if self._timeout_timer.isActive(): self._timeout_timer.stop() for node_id in self._nodes_requested_chunks: self.abort_data.emit(node_id, self.id, None) self._nodes_requested_chunks.clear() self._nodes_last_receive_time.clear() def _emit_no_disk_space(self, error=False): self._no_disk_space_error = True self._nodes_available_chunks.clear() self._clear_globals() self._make_not_ready() file_name = self.display_name.split()[-1] \ if self.display_name else "" self.no_disk_space.emit(self, file_name, error) def _send_start_statistic(self): if self._tracker: self._tracker.download_start(self.id, self.size) def _send_end_statistic(self): if self._tracker: time_diff = time() - self._started_time if time_diff < 1e-3: time_diff = 1e-3 self._tracker.download_end( self.id, time_diff, websockets_bytes=0, webrtc_direct_bytes=self._received_via_p2p, webrtc_relay_bytes=self._received_via_turn, chunks=len(self._downloaded_chunks), chunks_reloaded=0, nodes=len(self._nodes_available_chunks)) def _send_error_statistic(self): if self._tracker: time_diff = time() - self._started_time if time_diff < 1e-3: time_diff = 1e-3 self._tracker.download_error( self.id, time_diff, websockets_bytes=0, webrtc_direct_bytes=self._received_via_p2p, webrtc_relay_bytes=self._received_via_turn, chunks=len(self._downloaded_chunks), chunks_reloaded=0, nodes=len(self._nodes_available_chunks))
class InfoOverlay(QtWidgets.QWidget): # Positioning y_offset_factor = 0.20 x_offset_factor = 0.15 # Default opacity txt_opacity = 255 bg_opacity = 200 bg_color = (90, 90, 90, bg_opacity) queue_limit = 12 def __init__(self, parent: QtWidgets.QWidget): super(InfoOverlay, self).__init__(parent) # --- These will be replaced from the ui file --- self.overlay_grp = QtWidgets.QWidget() self.top_space_widget = QtWidgets.QWidget() self.left_space_widget = QtWidgets.QWidget() self.btn_box = QtWidgets.QWidget() self.text_label = QtWidgets.QLabel() SetupWidget.from_ui_file(self, Resource.ui_paths['overlay']) # --- Init Attributes --- self.parent = parent self.queue = list() self.btn_list = list() self.message_active = False # --- Get header height --- self.header_height = 0 if hasattr(parent, 'header'): self.header_height = parent.header().height() # --- Setup Overlay Attributes --- self.style = 'background: rgba(' + f'{self.bg_color[0]}, {self.bg_color[1]}, {self.bg_color[2]},'\ + '{0}); color: rgba(233, 233, 233, {1});' self.bg_anim = BgrAnimation( self.overlay_grp, self.bg_color, additional_stylesheet= f'color: rgba(233, 233, 233, {self.txt_opacity});') self.restore_visibility() self.overlay_grp.installEventFilter(self) self.animation = QPropertyAnimation(self.overlay_grp, b"geometry") self.text_label.setOpenExternalLinks(True) # --- Init Timers --- self.msg_timer = QTimer() self.msg_timer.setSingleShot(True) self.msg_timer.timeout.connect(self._next_entry) self.mouse_leave_timer = QTimer() self.mouse_leave_timer.setSingleShot(True) self.mouse_leave_timer.setInterval(150) self.mouse_leave_timer.timeout.connect(self.restore_visibility) self.click_timer = QTimer() self.click_timer.setSingleShot(True) # --- Install parent resize wrapper --- self._org_parent_resize_event = self.parent.resizeEvent self.parent.resizeEvent = self._parent_resize_wrapper # --- Install Show Event wrapper --- # On document tab change, parent widget will not trigger resize event # but maybe was resized while hidden. This wrapper will make sure we adapt # size on tab change. self._org_parent_show_event = self.parent.showEvent self.parent.showEvent = self._parent_show_wrapper # Manually trigger an initial resize event QTimer.singleShot(1, self._adapt_size) self.hide_all() def eventFilter(self, obj, event): """ Make Widget transparent on Mouse Move and Enter Event """ if obj in (self.overlay_grp, self.text_label, self.btn_box): # --- Detect Mouse Events --- if event.type() == QEnterEvent.Enter or event.type( ) == QMouseEvent.MouseMove: self.mouse_leave_timer.stop() self.set_opacity(30) event.accept() return True if event.type() == QEnterEvent.Leave: self.mouse_leave_timer.start() event.accept() return True if event.type( ) == QMouseEvent.MouseButtonPress and not self.btn_list and not self.click_timer.isActive( ): self.display_exit() event.accept() return True return False def _parent_resize_wrapper(self, event): self._org_parent_resize_event(event) self._adapt_size() event.accept() def _parent_show_wrapper(self, event): self._org_parent_show_event(event) self._adapt_size() event.accept() def _adapt_size(self): top_spacing = round(self.parent.frameGeometry().height() * self.y_offset_factor) + self.header_height left_spacing = round(self.parent.frameGeometry().width() * self.x_offset_factor) self.left_space_widget.setMinimumWidth(left_spacing) self.top_space_widget.setMinimumHeight(top_spacing) self.resize(self.parent.size()) # Mask out invisible areas to -not- grab mouse events from that region reg = QRegion(self.parent.frameGeometry()) reg -= self.frameGeometry() reg += self.overlay_grp.frameGeometry() self.setMask(reg) def set_opacity(self, opacity: int): opacity = min(255, max(0, opacity)) self.overlay_grp.setStyleSheet(self.style.format(opacity, opacity)) def restore_visibility(self): self.overlay_grp.setStyleSheet( self.style.format(self.bg_opacity, self.txt_opacity)) def display(self, message: str = '', duration: int = 3000, immediate: bool = False, buttons: tuple = tuple()): if len(self.queue) > self.queue_limit: return self.queue.append(( self._force_word_wrap(message), duration, buttons, )) if not self.msg_timer.isActive() or immediate: self._next_entry(False) def display_confirm(self, message: str = '', buttons: Tuple[Tuple] = tuple(), immediate: bool = False): self.display(message, 1000, immediate, buttons) def display_exit(self): """ Immediately hide the current message """ self.click_timer.start(100) self.msg_timer.stop() if self.btn_list: for btn in self.btn_list: btn.deleteLater() self.btn_list = list() if self.queue: self._next_entry(False) else: self._init_fade_anim(False) QTimer.singleShot(300, self.hide_all) def _next_entry(self, called_from_timer: bool = True): """ Display the next entry in the queue """ if self.btn_list: return if self.queue: message, duration, buttons = self.queue.pop(0) LOGGER.debug('Displaying: %s (%s)', message[:30], len(self.queue)) else: self.display_exit() LOGGER.debug('Overlay stopping.') return if buttons: self.btn_list = [self.create_button(btn) for btn in buttons] self.btn_box.show() else: self.btn_box.hide() self.text_label.setText(message) self.show_all() self.restore_visibility() # Animate if not called from the queue timer if not called_from_timer and not self.message_active: self.overlay_grp.setUpdatesEnabled(False) self._init_fade_anim(True) QTimer.singleShot(150, self._enable_updates) QTimer.singleShot(1, self._adapt_size) self.message_active = True self.msg_timer.start(duration) def _enable_updates(self): self.overlay_grp.setUpdatesEnabled(True) def _init_fade_anim(self, fade_in: bool = True): if self.bg_anim.fade_anim.state() == QAbstractAnimation.Running: LOGGER.debug('Stopping running animation.') self.bg_anim.fade_anim.stop() if fade_in: self.bg_anim.fade( (self.bg_color[0], self.bg_color[1], self.bg_color[2], 0), self.bg_color, 500) else: self.bg_anim.fade( self.bg_color, (self.bg_color[0], self.bg_color[1], self.bg_color[2], 0), 300) def create_button(self, button): """ Dynamic button creation on request """ txt, callback = button new_button = QtWidgets.QPushButton(txt, self.btn_box) new_button.setStyleSheet( 'background: rgba(80, 80, 80, 255); color: rgb(230, 230, 230);') self.btn_box.layout().addWidget(new_button, 0, Qt.AlignLeft) if callback is None: new_button.pressed.connect(self.display_exit) else: new_button.pressed.connect(callback) return new_button def hide_all(self): if not self.msg_timer.isActive() and not self.queue: self.hide() self.btn_box.hide() self.message_active = False def show_all(self): self.show() @staticmethod def _force_word_wrap(message: str) -> str: """ Force white space in too long words """ word_chr_limit = 35 new_message = '' # Find words aswell as whitespace \W for word in re.findall("[\w']+|[\W]+", message): if not len(word) > word_chr_limit: new_message += word continue # Add a hard line break in words longer than the limit for start in range(0, len(word), word_chr_limit): end = start + word_chr_limit new_message += word[start:end] + '\n' # Return without trailing space return new_message
class TransfersInfo(QObject): downloads_info_changed = Signal(dict, bool) uploads_info_changed = Signal(dict) downloads_state_changed = Signal(dict) uploads_state_changed = Signal(dict) speed_size_changed = Signal(float, float, float, float) revert_downloads = Signal(list, # reverted files list, # reverted patches list) # reverted shares pause_resume_clicked = Signal() transfers_ready = Signal() add_to_sync_folder = Signal(list) download_link_handler = Signal(str) def __init__(self, parent, parent_window, dp): QObject.__init__(self, parent) self._downloads_info = dict() self._uploads_info = dict() self._init_speed_size() self._parent_window = parent_window self._dp = dp self._transfers_dialog = None self._time_delta_timer = QTimer(self) self._time_delta_timer.setInterval(1 * 60 * 1000) # 1 minute self._speed_charts_timer = QTimer(self) self._speed_charts_timer.setInterval(1000) # 1 second self._speed_charts_timer.timeout.connect( self._update_speed_charts, Qt.QueuedConnection) self._speed_charts_timer.start() self._all_disconnected_timer = QTimer(self) self._all_disconnected_timer.setInterval(5 * 1000) self._all_disconnected_timer.setSingleShot(True) self._all_disconnected_timer.timeout.connect( self._on_all_disconnected) self._paused = False self._resuming = False self._transfers_dialog_calls = 0 self._init_changed_statuses() self.transfers_ready.connect( self._on_transfers_ready, Qt.QueuedConnection) self._nodes_num = 0 self._signalserver_address = '' def _init_speed_size(self): self._download_speed = 0 self._download_size = 0 self._upload_speed = 0 self._upload_size = 0 self._download_speeds = deque( [0] * SPEED_CHART_CAPACITY, maxlen=SPEED_CHART_CAPACITY) self._upload_speeds = deque( [0] * SPEED_CHART_CAPACITY, maxlen=SPEED_CHART_CAPACITY) def _init_changed_statuses(self): self._downloads_changed = False self._uploads_changed = False self._reload_downloads = False self._reload_uploads = False self._changed_info = dict() def update_info(self, downloads_info, uploads_info): logger.verbose("Updating transfers info") added_info, changed_info, deleted_info = downloads_info downloads_changed = added_info or changed_info or deleted_info \ or self._resuming reload_downloads = added_info or deleted_info or self._resuming self._resuming = False self._downloads_info.update(added_info) for obj_id, changed in list(changed_info.items()): saved_info = self._downloads_info.get(obj_id) if not saved_info: changed_info.pop(obj_id, None) continue old_state = saved_info["state"] was_current = old_state in TransfersDialog.CURRENT_TASK_STATES new_state = changed["state"] is_current = new_state in TransfersDialog.CURRENT_TASK_STATES reload_downloads = reload_downloads or not ( old_state == new_state or was_current and is_current) saved_info.update(changed) for obj_id in deleted_info: self._downloads_info.pop(obj_id, None) reload_uploads = set(self._uploads_info) != set(uploads_info) uploads_changed = self._uploads_info != uploads_info self._uploads_info = uploads_info self._update_info(reload_downloads, reload_uploads, changed_info, downloads_changed, uploads_changed) def update_download_speed(self, value): self._download_speed = value self._update_speed_size() def update_download_size(self, value): self._download_size = value self._update_speed_size() def update_upload_speed(self, value): self._upload_speed = value self._update_speed_size() def update_upload_size(self, value): self._upload_size = value self._update_speed_size() def _update_speed_size(self): if not self._transfers_dialog: return self.speed_size_changed.emit( self._download_speed, self._download_size, self._upload_speed, self._upload_size) def _update_info(self, reload_downloads=True, reload_uploads=True, changed_info=(), downloads_changed=True, uploads_changed=True, supress_paused=False): if not self._transfers_dialog: return if self._transfers_dialog_calls: logger.verbose("Transfers dialog not ready") self._downloads_changed |= bool(downloads_changed) self._uploads_changed |= bool(uploads_changed) self._reload_downloads |= bool(reload_downloads) self._reload_uploads |= bool(reload_uploads) self._changed_info.update(changed_info) return if downloads_changed: if reload_downloads: self.downloads_info_changed.emit( deepcopy(self._downloads_info), supress_paused) else: self.downloads_state_changed.emit(deepcopy(changed_info)) self._transfers_dialog_calls += 1 if uploads_changed: if reload_uploads: self.uploads_info_changed.emit(deepcopy(self._uploads_info)) else: self.uploads_state_changed.emit(deepcopy(self._uploads_info)) self._transfers_dialog_calls += 1 self._init_changed_statuses() def show_dialog(self): if self._transfers_dialog: self._transfers_dialog.raise_dialog() return self._transfers_dialog = TransfersDialog( self._parent_window, self.revert_downloads.emit, self.pause_resume_clicked.emit, self.add_to_sync_folder.emit, self._handle_link, self.transfers_ready.emit, self._paused, self._dp, SPEED_CHART_CAPACITY, self._download_speeds, self._upload_speeds, self._signalserver_address) self._connect_slots() self._time_delta_timer.start() self._transfers_dialog.set_nodes_num(self._nodes_num) if not self._nodes_num: self._all_disconnected_timer.start() self._transfers_dialog.show(self.on_dialog_finished) self._update_speed_size() self._update_info(supress_paused=True) def on_dialog_finished(self): self._disconnect_slots() if self._time_delta_timer.isActive(): self._time_delta_timer.stop() if self._all_disconnected_timer.isActive(): self._all_disconnected_timer.stop() self._transfers_dialog = None self._transfers_dialog_calls = 0 def clear(self): self._downloads_info.clear() self._uploads_info.clear() self._changed_info = dict() self._transfers_dialog_calls = 0 self._update_info(supress_paused=True) self._init_speed_size() self._update_speed_size() def close(self): self.clear() if not self._transfers_dialog: return self._transfers_dialog.close() def set_paused_state(self, paused): self._paused = paused if not self._transfers_dialog: return self._transfers_dialog.set_paused_state(paused) self._resuming = not paused def revert_failed(self, failed_uuids): if not self._transfers_dialog: return self._transfers_dialog.revert_failed(failed_uuids) self._update_info(supress_paused=True) def on_connected_nodes_changed(self, nodes_num): all_disconnected = self._nodes_num and not nodes_num self._nodes_num = nodes_num if not self._transfers_dialog: return if all_disconnected: if not self._all_disconnected_timer.isActive(): self._all_disconnected_timer.start() elif nodes_num: if self._all_disconnected_timer.isActive(): self._all_disconnected_timer.stop() self._transfers_dialog.set_nodes_num(nodes_num) def _on_all_disconnected(self): if self._transfers_dialog: self._transfers_dialog.show_all_disconnected_alert() def _connect_slots(self): self.downloads_info_changed.connect( self._transfers_dialog.on_downloads_info_changed, Qt.QueuedConnection) self.uploads_info_changed.connect( self._transfers_dialog.on_uploads_info_changed, Qt.QueuedConnection) self.downloads_state_changed.connect( self._transfers_dialog.on_downloads_state_changed, Qt.QueuedConnection) self.uploads_state_changed.connect( self._transfers_dialog.on_uploads_state_changed, Qt.QueuedConnection) self.speed_size_changed.connect( self._transfers_dialog.on_size_speed_changed, Qt.QueuedConnection) self._time_delta_timer.timeout.connect( self._transfers_dialog.refresh_time_deltas, Qt.QueuedConnection) def _disconnect_slots(self): try: self.downloads_info_changed.disconnect( self._transfers_dialog.on_downloads_info_changed) self.uploads_info_changed.disconnect( self._transfers_dialog.on_uploads_info_changed) self.downloads_state_changed.disconnect( self._transfers_dialog.on_downloads_state_changed) self.uploads_state_changed.disconnect( self._transfers_dialog.on_uploads_state_changed) self.speed_size_changed.disconnect( self._transfers_dialog.on_size_speed_changed) self._time_delta_timer.timeout.disconnect( self._transfers_dialog.refresh_time_deltas) except Exception as e: logger.warning("Can't disconnect transfers signal. Reason: %s", e) def _on_transfers_ready(self): self._transfers_dialog_calls -= 1 if self._transfers_dialog_calls < 0: logger.warning("More ready signals than transfers dialog calls") self._transfers_dialog_calls = 0 if not self._transfers_dialog_calls: logger.verbose("Transfers dialog is ready") self._update_info( self._reload_downloads, self._reload_uploads, self._changed_info, self._downloads_changed, self._uploads_changed, supress_paused=True) def _update_speed_charts(self): self._download_speeds.append(self._download_speed) self._upload_speeds.append(self._upload_speed) if not self._transfers_dialog: return self._transfers_dialog.update_speed_charts( self._download_speed, self._upload_speed) def dialog_opened(self): return bool(self._transfers_dialog) def _handle_link(self, link, is_shared): if is_shared: self.download_link_handler.emit(link) def set_signalserver_address(self, address): self._signalserver_address = address if self._transfers_dialog: self._transfers_dialog.set_signalserver_address(address)
class DatapoolDialog(QDialog): finished = Signal(KnechtModel, Path) check_column = Kg.NAME # Close connection after 10 minutes of user inactivity timeout = 600000 def __init__(self, ui): """ Dialog to import Datapool items :param modules.gui.main_ui.KnechtWindow ui: Main Window """ super(DatapoolDialog, self).__init__(ui) SetupWidget.from_ui_file(self, Resource.ui_paths['knecht_datapool']) self.setWindowTitle('Datapool Import') self._asked_for_close = False self._current_project_name = '' self.ui = ui # Avoid db/thread polling within timeout self.action_timeout = QTimer() self.action_timeout.setSingleShot(True) self.action_timeout.setInterval(300) # --- Translations n Style --- self.project_icon: QLabel self.project_icon.setPixmap(IconRsc.get_pixmap('storage')) self.project_title: QLabel self.project_title.setText(_('Datapool Projekte')) self.image_icon: QLabel self.image_icon.setPixmap(IconRsc.get_pixmap('img')) self.image_title: QLabel self.image_title.setText(_('Bildeinträge')) self.details_btn: QPushButton self.details_btn.setText(_('Detailspalten anzeigen')) self.details_btn.toggled.connect(self.toggle_view_columns) self.filter_box: QLineEdit self.filter_box.setPlaceholderText( _('Im Baum tippen um zu filtern...')) # -- Trigger filter update for all views --- self.update_filter_timer = QTimer() self.update_filter_timer.setInterval(5) self.update_filter_timer.setSingleShot(True) self.update_filter_timer.timeout.connect(self.update_filter_all_views) self.filter_box: QLineEdit self.filter_box.textChanged.connect(self.update_filter_timer.start) # --- Init Tree Views --- self.project_view = KnechtTreeViewCheckable( self, None, filter_widget=self.filter_box, replace=self.project_view) self.image_view = KnechtTreeViewCheckable( self, None, filter_widget=self.filter_box, replace=self.image_view) # --- Database Connector --- self.dp = DatapoolController(self) self.dp.add_projects.connect(self.update_project_view) self.dp.add_images.connect(self.update_image_view) self.dp.error.connect(self.error) # Connection timeout self.connection_timeout = QTimer() self.connection_timeout.setInterval(self.timeout) self.connection_timeout.setSingleShot(True) self.connection_timeout.timeout.connect(self.connection_timed_out) # Make sure to end thread on App close self.ui.is_about_to_quit.connect(self.close) # Intercept mouse press events from project view self.org_view_mouse_press_event = self.project_view.mousePressEvent self.project_view.mousePressEvent = self.view_mouse_press_event # Start thread QTimer.singleShot(100, self.start_datapool_connection) def view_mouse_press_event(self, event: QMouseEvent): if event.buttons( ) == Qt.LeftButton and not self.action_timeout.isActive(): idx = self.project_view.indexAt(event.pos()) name = idx.siblingAtColumn(Kg.NAME).data(Qt.DisplayRole) self._current_project_name = name _id = idx.siblingAtColumn(Kg.ID).data(Qt.DisplayRole) LOGGER.debug('Project %s Id %s selected', name, _id) self.action_timeout.start() if _id: self.request_project(_id) self.org_view_mouse_press_event(event) def update_filter_all_views(self): # Do not filter project view self.project_view.filter_timer.stop() # Update image view filter if not self.filter_box.text(): self.image_view.clear_filter() else: self.image_view.filter_timer.start() def start_datapool_connection(self): self.show_progress(_('Verbinde mit Datenbank')) self.dp.start() self.connection_timeout.start() @Slot(dict) def update_project_view(self, projects: dict): """ (Name, ModelYear, 'JobNo) """ if not projects: return root_item = KnechtItem( None, ('', _('Bezeichnung'), _('Modelljahr'), _('Job'), '', _('Id'))) for num_idx, (_id, project_data) in enumerate(projects.items()): data = (f'{num_idx:03d}', *project_data, '', str(_id)) p_item = KnechtItem(root_item, data) KnechtItemStyle.style_column(p_item, 'render_preset', column=Kg.NAME) root_item.append_item_child(p_item) update_model = UpdateModel(self.project_view) update_model.update(KnechtModel(root_item)) self.toggle_view_columns(self.details_btn.isChecked()) self.project_view.setHeaderHidden(False) def request_project(self, _id: str): self.image_view.clear_filter() self.image_view.progress_msg.msg(_('Daten werden angefordert')) self.image_view.progress_msg.show_progress() self.dp.request_project(_id) self.connection_timeout.start() @Slot(dict) def update_image_view(self, images: dict): if not images: return root_item = KnechtItem(None, ('', _('Name'), _('Priorität'), _('Erstellt'), '', _('wagenbauteil Id'))) for num_idx, (img_id, image_data) in enumerate(images.items()): """ (name, priority, created, pr_string, opt_id, produced_image_id) """ name, priority, created, pr_string, opt_id, produced_image_id = image_data img_item = KnechtItem( root_item, (f'{num_idx:03d}', name, priority, created, '', str(opt_id))) KnechtItemStyle.style_column(img_item, 'preset', Kg.NAME) root_item.append_item_child(img_item) update_model = UpdateModel(self.image_view) update_model.update( KnechtModel(root_item, checkable_columns=[self.check_column])) self.toggle_view_columns(self.details_btn.isChecked()) self.image_view.setHeaderHidden(False) self.image_view.check_items([], Kg.NAME, check_all=True) def create_presets(self): root_item = KnechtItem() for (src_index, item) in self.image_view.editor.iterator.iterate_view(): if item.data(self.check_column, Qt.CheckStateRole) == Qt.Unchecked: continue name = item.data(Kg.NAME) data = (f'{root_item.childCount():03d}', name, '', 'preset', '', Kid.convert_id(f'{root_item.childCount()}')) root_item.insertChildren(root_item.childCount(), 1, data) date = datetime.datetime.now().strftime('%Y%m%d') project = self._current_project_name.replace(' ', '_') self.finished.emit(KnechtModel(root_item), Path(f'{date}_{project}.xml')) def toggle_view_columns(self, checked: bool): columns = { Kg.ORDER, Kg.NAME, Kg.VALUE, Kg.TYPE, Kg.REF, Kg.ID, Kg.DESC } if checked: show_columns = {Kg.NAME, Kg.VALUE, Kg.TYPE, Kg.ID} else: show_columns = {Kg.NAME} for col in columns: self.project_view.setColumnHidden(col, col not in show_columns) self.image_view.setColumnHidden(col, col not in show_columns) self._setup_view_headers() def _setup_view_headers(self): setup_header_layout(self.project_view) setup_header_layout(self.image_view) @Slot(str) def error(self, error_msg): self.project_view.progress_msg.hide_progress() self.image_view.progress_msg.hide_progress() self.ui.msg(error_msg, 9000) @Slot(str) def show_progress(self, msg: str): self.project_view.progress_msg.msg(msg) self.project_view.progress_msg.show_progress() self.image_view.progress_msg.msg(_('Projekt auswählen')) self.image_view.progress_msg.show_progress() self.image_view.progress_msg.progressBar.setValue(0) def connection_timed_out(self): self.show_progress(_('Zeitüberschreitung')) self.ui.msg( _('Zeitüberschreitung bei Datenbankverbindung. Die Verbindung wurde automatisch getrennt.' ), 12000) self.dp.close() def reject(self): self.close() def accept(self): self.create_presets() self._asked_for_close = True self.close() def closeEvent(self, close_event): LOGGER.debug('Datapool close event called. %s', close_event.type()) if self._ask_abort_close(): close_event.ignore() return False LOGGER.info( 'Datapool window close event triggered. Aborting database connection' ) # End thread if not self._finalize_dialog(): close_event.ignore() return False close_event.accept() return True def _ask_abort_close(self): if self._asked_for_close: return False msg_box = AskToContinue(self) if not msg_box.ask( title=_('Importvorgang'), txt=_('Soll der Vorgang wirklich abgebrochen werden?'), ok_btn_txt=_('Ja'), abort_btn_txt=_('Nein'), ): # Cancel close return True # Close confirmed return False def _finalize_dialog(self, self_destruct: bool = True) -> bool: LOGGER.debug('Datapool dialog is finishing tasks.') if not self.dp.close(): return False if self_destruct: self.deleteLater() return True
class BaseThreadCapturer(QObject): started = Signal() imageChanged = Signal(QImage) readFinished = Signal(np.ndarray) convertFinished = Signal() def __init__(self, parent=None): super().__init__(parent) self._timer = QTimer(singleShot=True, interval=0, timeout=self._handle_timeout) self.readFinished.connect(self.convert_cv_to_qimage) self.convertFinished.connect(self._timer.start) self._is_stopped = threading.Event() self._parameters = None self._thread = None @property def parameters(self): return self._parameters @Slot("QVariant") def start(self, parameters): self._is_stopped.clear() self._timer.start() self.started.emit() self._parameters = parameters @Slot() def stop(self): if self._timer.isActive(): self._timer.stop() self._is_stopped.set() if self._thread is not None and self._thread.is_alive(): self._thread.join() self._thread = None def _handle_timeout(self): self._thread = threading.Thread(target=self._read_from_capture, daemon=True) self._thread.start() def _read_from_capture(self): if self._is_stopped.isSet(): return ret, frame = self.read() if ret and shiboken2.isValid(self): self.readFinished.emit(frame.copy()) @Slot(np.ndarray) def convert_cv_to_qimage(self, frame): result = self.convert(frame) qimage = qimage2ndarray.array2qimage(result) self.imageChanged.emit(qimage.copy()) self.convertFinished.emit() def read(self): return True, np.ndarray() def convert(self, frame): return QImage()
class Chrono(QMainWindow): def __init__(self, parent=None): super(Chrono, self).__init__(parent) self.createMenus() self.createSystemTrayIcon() self.timer = QTimer(self) self.timer.timeout.connect(self.tick) self.isRunning = False self.refresh_rate = 100 # ms self.progressBar = QProgressBar() self.progressBar.setValue(0) self.begin_time = self.end_time = 0 self.label = QLabel(" ") self.button = QPushButton() self.button.setIcon(self.style().standardIcon(QStyle.SP_MediaPause)) self.end_delay = self.begin_delay = 0 bottomLayout = QHBoxLayout() bottomLayout.addWidget(self.progressBar) bottomLayout.addWidget(self.button) self.button.clicked.connect(self.pause) mainLayout = QVBoxLayout() mainLayout.addWidget(self.label) mainLayout.addLayout(bottomLayout) centralWidget = QWidget() centralWidget.setLayout(mainLayout) self.setCentralWidget(centralWidget) self.statusBar = QStatusBar(self) self.setStatusBar(self.statusBar) self.notification = self.notification_popup = self.notification_tray = self.notification_sound = True self.notification_soundfile = os.path.dirname( sys.argv[0]) + '/notification.mp3' # os.path.dirname(__file__) + self.setWindowTitle(TITLE) self.resize(400, self.sizeHint().height()) self.setFixedHeight(self.sizeHint().height()) def createMenus(self): menus = QMenuBar() fileMenu = menus.addMenu("&Fichier") file_newMenu = fileMenu.addMenu( self.style().standardIcon(QStyle.SP_FileIcon), "Nouveau") file_newMenu.addAction("Date", self.createDateDialog, 'CTRL+D') file_newMenu.addAction("Durée", self.createDurationDialog, 'CTRL+N') fileMenu.addSeparator() fileMenu.addAction(self.style().standardIcon(QStyle.SP_BrowserStop), "Quitter", sys.exit, 'CTRL+Q') optionMenu = menus.addMenu("&Options") optionMenu.addAction( self.style().standardIcon(QStyle.SP_MessageBoxInformation), "Évènements", self.createNotificationPopup, 'CTRL+O') optionMenu.addAction( QAction("Rester au premier plan", optionMenu, triggered=self.stayOnTop, checkable=True)) aideMenu = menus.addMenu("&Aide") aideMenu.addAction( self.style().standardIcon(QStyle.SP_DialogHelpButton), "À propos", lambda: QMessageBox.information( self, "À propos", TITLE + " " + str(VERSION)), 'CTRL+H') aideMenu.addSeparator() aideMenu.addAction( self.style().standardIcon(QStyle.SP_TitleBarMenuButton), "À propos de Qt", QApplication.aboutQt, 'CTRL+A') self.setMenuBar(menus) def createSystemTrayIcon(self): self.tray = QSystemTrayIcon() self.tray.setIcon(QIcon(os.path.dirname(sys.argv[0]) + '/icon.svg')) # os.path.dirname(__file__) + self.tray.setToolTip(TITLE) self.tray.show() systemTrayMenu = QMenu() pauseAction = QAction(self.style().standardIcon(QStyle.SP_MediaPause), "Pause / Reprendre", systemTrayMenu) pauseAction.triggered.connect(self.pause) systemTrayMenu.addAction(pauseAction) systemTrayMenu.addSeparator() systemTrayMenu.addAction( self.style().standardIcon(QStyle.SP_BrowserStop), "Quitter", sys.exit) self.tray.setContextMenu(systemTrayMenu) self.tray.activated.connect(self.show) def stayOnTop(self): self.setWindowFlags(self.windowFlags() ^ Qt.WindowStaysOnTopHint) # self.windowFlags() | Qt.CustomizeWindowHint | Qt.Window | Qt.WindowStaysOnTopHint | Qt.X11BypassWindowManagerHint) # Qt.Dialog | Qt.WindowStaysOnTopHint | Qt.X11BypassWindowManagerHint) self.show() def createNotificationPopup(self): popup = QDialog(self) popup.setFixedSize(popup.sizeHint().height(), popup.sizeHint().width()) popup.setWindowTitle("Évènements") innerLayout = QVBoxLayout() GroupBox = QGroupBox("Activer les notifications") GroupBox.setCheckable(True) GroupBox.setChecked(self.notification) checkBox_popup = QCheckBox("Afficher une popup") checkBox_notification = QCheckBox("Afficher une notification") checkBox_sound = QCheckBox("Jouer un son") if self.notification_popup: checkBox_popup.setCheckState(Qt.Checked) if self.notification_tray: checkBox_notification.setCheckState(Qt.Checked) if self.notification_sound: checkBox_sound.setCheckState(Qt.Checked) innerLayout.addWidget(checkBox_popup) innerLayout.addWidget(checkBox_notification) innerLayout.addWidget(checkBox_sound) innerLayout.addStretch(1) GroupBox.setLayout(innerLayout) button = QPushButton("Ok") button.clicked.connect(lambda: self.changeNotificationSettings( popup, GroupBox, checkBox_popup, checkBox_notification, checkBox_sound)) outerLayout = QVBoxLayout() outerLayout.addWidget(GroupBox) outerLayout.addWidget(button) popup.setLayout(outerLayout) popup.exec_() def changeNotificationSettings(self, popup, GroupBox, checkBox_popup, checkBox_notification, checkBox_sound): self.notification, self.notification_popup, self.notification_tray, self.notification_sound = GroupBox.isChecked( ), checkBox_popup.isChecked(), checkBox_notification.isChecked( ), checkBox_sound.isChecked() if not any([ self.notification_popup, self.notification_tray, self.notification_sound ]): self.notification = False popup.close() def createDateDialog(self): popup = QDialog(self) popup.setFixedSize(270, 60) popup.setWindowTitle("Nouvelle date") layout = QHBoxLayout() prefix = QLabel("Heure cible: ") layout.addWidget(prefix) qline = QTimeEdit() qline.setDisplayFormat("hh:mm:ss") qline.setTime(QTime.currentTime()) layout.addWidget(qline) button = QPushButton("Ok") button.clicked.connect(lambda: self.createDate(popup, qline.time().hour(), qline.time().minute(), qline.time().second())) layout.addWidget(button) popup.setLayout(layout) popup.exec_() def createDurationDialog(self): popup = QDialog(self) popup.setFixedSize(150, 150) popup.setWindowTitle("Nouvelle durée") layout = QVBoxLayout() hourLayout = QHBoxLayout() hourLabel = QLabel("Heures:") hourSpin = QSpinBox() hourLayout.addWidget(hourLabel) hourLayout.addWidget(hourSpin) minuteLayout = QHBoxLayout() minuteLabel = QLabel("Minutes:") minuteSpin = QSpinBox() minuteLayout.addWidget(minuteLabel) minuteLayout.addWidget(minuteSpin) secondLayout = QHBoxLayout() secondLabel = QLabel("Secondes:") secondSpin = QSpinBox() secondLayout.addWidget(secondLabel) secondLayout.addWidget(secondSpin) layout.addLayout(hourLayout) layout.addLayout(minuteLayout) layout.addLayout(secondLayout) button = QPushButton("Ok") button.clicked.connect(lambda: self.createDuration( popup, hourSpin.value(), minuteSpin.value(), secondSpin.value())) layout.addWidget(button) popup.setLayout(layout) popup.exec_() def createDuration(self, popup: QDialog, hours: int, minutes: int, seconds: int): popup.close() self.begin_time = datetime.timestamp(datetime.now()) self.end_time = self.begin_time + seconds + minutes * 60 + hours * 3600 self.progressBar.setRange(0, 100) self.progressBar.setValue(0) self.isRunning = True self.timer.stop() self.timer.start(self.refresh_rate) def createDate(self, popup: QDialog, hours: int, minutes: int, seconds: int): popup.close() self.begin_time = datetime.timestamp(datetime.now()) now = datetime.now().time() target = time(hours, minutes, seconds) now_delta = timedelta(hours=now.hour, minutes=now.minute, seconds=now.second) target_delta = timedelta(hours=target.hour, minutes=target.minute, seconds=target.second) if target_delta == now_delta: self.end_time = self.begin_time + 60 * 60 * 24 else: d = target_delta - now_delta self.end_time = self.begin_time + d.seconds self.progressBar.setRange(0, 100) self.progressBar.setValue(0) self.isRunning = True self.timer.stop() self.timer.start(self.refresh_rate) def tick(self): self.progressBar.setValue( 100 * (datetime.timestamp(datetime.now()) - self.begin_time) / (self.end_time - self.begin_time)) seconds = int( ceil(self.end_time - datetime.timestamp(datetime.now())) % 60) minutes = int( ceil(self.end_time - datetime.timestamp(datetime.now())) / 60 % 60) hours = int( ceil(self.end_time - datetime.timestamp(datetime.now())) / 3600) self.label.setText(f'{hours:02}:{minutes:02}:{seconds:02}') self.setWindowTitle(f'{TITLE} - {hours:02}:{minutes:02}:{seconds:02}') self.tray.setToolTip(f'{hours:02}:{minutes:02}:{seconds:02}') if datetime.timestamp(datetime.now()) >= self.end_time: self.isRunning = False self.timer.stop() self.progressBar.setRange(0, 0) self.show() self.notify() def notify(self): if not self.notification: return if self.notification_tray: self.tray.showMessage( "Finished", "Le décompte est terminé", self.style().standardIcon(QStyle.SP_MessageBoxInformation)) if self.notification_sound: test = QMediaPlayer() test.setMedia(QUrl.fromLocalFile(self.notification_soundfile)) test.play() if self.notification_popup: QMessageBox.information(self, "Finished", "Le décompte est terminé") def pause(self): if not self.isRunning: return self.progressBar.setDisabled(self.timer.isActive()) if self.timer.isActive(): self.end_delay = self.end_time - datetime.timestamp(datetime.now()) self.begin_delay = datetime.timestamp( datetime.now()) - self.begin_time print(self.begin_time) print(self.end_time) print(self.end_delay) self.statusBar.showMessage("Pause") self.tray.setToolTip(self.tray.toolTip() + ' - Pause') self.timer.stop() self.button.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay)) else: self.begin_time = datetime.timestamp( datetime.now()) - self.begin_delay self.end_time = datetime.timestamp(datetime.now()) + self.end_delay print(self.begin_time) print(self.end_time) self.statusBar.clearMessage() self.timer.start() self.button.setIcon(self.style().standardIcon( QStyle.SP_MediaPause)) # Override def closeEvent(self, event): self.hide() event.ignore()
class RenderWindow(QWindow): def __init__(self, format): super(RenderWindow, self).__init__() self.setSurfaceType(QWindow.OpenGLSurface) self.setFormat(format) self.context = QOpenGLContext(self) self.context.setFormat(self.requestedFormat()) if not self.context.create(): raise Exception("Unable to create GL context") self.program = None self.timer = None self.angle = 0 def initGl(self): self.program = QOpenGLShaderProgram(self) self.vao = QOpenGLVertexArrayObject() self.vbo = QOpenGLBuffer() format = self.context.format() useNewStyleShader = format.profile() == QSurfaceFormat.CoreProfile # Try to handle 3.0 & 3.1 that do not have the core/compatibility profile # concept 3.2+ has. This may still fail since version 150 (3.2) is # specified in the sources but it's worth a try. if (format.renderableType() == QSurfaceFormat.OpenGL and format.majorVersion() == 3 and format.minorVersion() <= 1): useNewStyleShader = not format.testOption(QSurfaceFormat.DeprecatedFunctions) vertexShader = vertexShaderSource if useNewStyleShader else vertexShaderSource110 fragmentShader = fragmentShaderSource if useNewStyleShader else fragmentShaderSource110 if not self.program.addShaderFromSourceCode(QOpenGLShader.Vertex, vertexShader): raise Exception("Vertex shader could not be added: {} ({})".format(self.program.log(), vertexShader)) if not self.program.addShaderFromSourceCode(QOpenGLShader.Fragment, fragmentShader): raise Exception("Fragment shader could not be added: {} ({})".format(self.program.log(), fragmentShader)) if not self.program.link(): raise Exception("Could not link shaders: {}".format(self.program.log())) self.posAttr = self.program.attributeLocation("posAttr") self.colAttr = self.program.attributeLocation("colAttr") self.matrixUniform = self.program.uniformLocation("matrix") self.vbo.create() self.vbo.bind() self.verticesData = vertices.tobytes() self.colorsData = colors.tobytes() verticesSize = 4 * vertices.size colorsSize = 4 * colors.size self.vbo.allocate(VoidPtr(self.verticesData), verticesSize + colorsSize) self.vbo.write(verticesSize, VoidPtr(self.colorsData), colorsSize) self.vbo.release() vaoBinder = QOpenGLVertexArrayObject.Binder(self.vao) if self.vao.isCreated(): # have VAO support, use it self.setupVertexAttribs() def setupVertexAttribs(self): self.vbo.bind() self.program.setAttributeBuffer(self.posAttr, GL.GL_FLOAT, 0, 2) self.program.setAttributeBuffer(self.colAttr, GL.GL_FLOAT, 4 * vertices.size, 3) self.program.enableAttributeArray(self.posAttr) self.program.enableAttributeArray(self.colAttr) self.vbo.release() def exposeEvent(self, event): if self.isExposed(): self.render() if self.timer is None: self.timer = QTimer(self) self.timer.timeout.connect(self.slotTimer) if not self.timer.isActive(): self.timer.start(10) else: if self.timer and self.timer.isActive(): self.timer.stop() def render(self): if not self.context.makeCurrent(self): raise Exception("makeCurrent() failed") functions = self.context.functions() if self.program is None: functions.glEnable(GL.GL_DEPTH_TEST) functions.glClearColor(0, 0, 0, 1) self.initGl() retinaScale = self.devicePixelRatio() functions.glViewport(0, 0, self.width() * retinaScale, self.height() * retinaScale) functions.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) self.program.bind() matrix = QMatrix4x4() matrix.perspective(60, 4 / 3, 0.1, 100) matrix.translate(0, 0, -2) matrix.rotate(self.angle, 0, 1, 0) self.program.setUniformValue(self.matrixUniform, matrix) if self.vao.isCreated(): self.vao.bind() else: # no VAO support, set the vertex attribute arrays now self.setupVertexAttribs() functions.glDrawArrays(GL.GL_TRIANGLES, 0, 3) self.vao.release() self.program.release() # swapInterval is 1 by default which means that swapBuffers() will (hopefully) block # and wait for vsync. self.context.swapBuffers(self) self.context.doneCurrent() def slotTimer(self): self.render() self.angle += 1 def glInfo(self): if not self.context.makeCurrent(self): raise Exception("makeCurrent() failed") functions = self.context.functions() text = """Vendor: {}\nRenderer: {}\nVersion: {}\nShading language: {} \nContext Format: {}\n\nSurface Format: {}""".format( functions.glGetString(GL.GL_VENDOR), functions.glGetString(GL.GL_RENDERER), functions.glGetString(GL.GL_VERSION), functions.glGetString(GL.GL_SHADING_LANGUAGE_VERSION), print_surface_format(self.context.format()), print_surface_format(self.format())) self.context.doneCurrent() return text
class View(QWidget): previous = Signal() next = Signal() def __init__(self, window): super(View, self).__init__(window) self.setFocusPolicy(Qt.StrongFocus) self.shiftKey = False self.ctrlKey = False self.lastMousePos = QPoint() self.lastTabletPos = QPoint() self.mode = 'add' self.maskOnly = False self.refresh = QTimer(self) self.refresh.setSingleShot(True) self.refresh.timeout.connect(self.repaint) self.addCursor = makeCursor('images/cursor-add.png', QColor.fromRgbF(0.5, 0.5, 1.0)) self.delCursor = makeCursor('images/cursor-del.png', QColor.fromRgbF(1.0, 0.5, 0.5)) self.setCursor(self.addCursor) self.imagefile = None self.maskfile = None self.image = QImage() self.mask = QImage(self.image.size(), QImage.Format_RGB32) self.mask.fill(Qt.black) self.changed = False self.update() self.path = list() self.load_threads = QThreadPool() self.load_threads.setMaxThreadCount(4) def load(self, filename): self.load_threads.start(LoadTask(self, filename)) def save(self): if self.maskfile and self.changed: self.load_threads.waitForDone() if self.maskfile and self.changed: bitmap = self.mask.createMaskFromColor( QColor.fromRgbF(1.0, 0.0, 1.0).rgb()) bitmap.save(str(self.maskfile), "PNG") self.changed = False def update(self): widgetRatio = self.width() / self.height() aspectRatio = self.image.width() / max(1, self.image.height()) if aspectRatio >= widgetRatio: width = self.width() height = self.width() / aspectRatio else: width = self.height() * aspectRatio height = self.height() self.rc = QRectF((self.width() - width) / 2.0, (self.height() - height) / 2.0, width, height) self.repaint() def resizeEvent(self, event): self.update() def paintEvent(self, event): p = QPainter(self.mask) for (mode, p1, p2, weight) in self.path: if mode == 'add': p.setPen( QPen(QColor.fromRgbF(1.0, 0.0, 1.0), (weight * 10.0)**2, Qt.SolidLine, Qt.RoundCap, Qt.RoundJoin)) else: p.setPen( QPen(QColor.fromRgbF(0.0, 0.0, 0.0), (weight * 10.0)**2, Qt.SolidLine, Qt.RoundCap, Qt.RoundJoin)) p.drawLine(realCoords(p1, self.mask.rect()), realCoords(p2, self.mask.rect())) self.changed = True self.path = list() p.end() p = QPainter(self) p.setCompositionMode(QPainter.CompositionMode_SourceOver) if not self.maskOnly: p.drawImage(self.rc, self.image) p.setCompositionMode(QPainter.CompositionMode_Plus) p.drawImage(self.rc, self.mask) p.end() def closeEvent(self, event): self.refresh.stop() event.accept() def enterEvent(self, event): self.setFocus(Qt.OtherFocusReason) def keyPressEvent(self, event): k = event.key() if k == Qt.Key_Shift: self.shiftKey = True if k == Qt.Key_Control: self.ctrlKey = True if k == Qt.Key_Space: self.maskOnly = not self.maskOnly self.repaint() def keyReleaseEvent(self, event): k = event.key() mod = event.modifiers() if k == Qt.Key_Shift: self.shiftKey = False if k == Qt.Key_Control: self.ctrlKey = False def mousePressEvent(self, event): x = event.x() y = event.y() self.lastMousePos = event.pos() if event.button() == Qt.ExtraButton1: if self.mode == 'add': self.mode = 'del' self.setCursor(self.delCursor) else: self.mode = 'add' self.setCursor(self.addCursor) elif event.button() == Qt.ExtraButton2: self.maskOnly = not self.maskOnly self.repaint() elif event.button() == Qt.ExtraButton3: self.previous.emit() elif event.button() == Qt.ExtraButton4: self.next.emit() def mouseMoveEvent(self, event): x = event.x() y = event.y() dx = x - self.lastMousePos.x() dy = y - self.lastMousePos.y() self.lastMousePos = event.pos() # if event.buttons() & Qt.LeftButton: # elif event.buttons() & Qt.MiddleButton: # elif event.buttons() & Qt.RightButton: def wheelEvent(self, event): dx = event.angleDelta().x() / 8 dy = event.angleDelta().y() / 8 # self.cameraZoom.emit(dy / 15) def tabletEvent(self, event): if event.device() == QTabletEvent.Stylus and event.pointerType( ) == QTabletEvent.Pen: if event.type() == QEvent.TabletPress: self.tabletPressEvent(event) elif event.type() == QEvent.TabletRelease: self.tabletReleaseEvent(event) elif event.type() == QEvent.TabletMove: if event.pressure() > 0.0: self.tabletMoveEvent(event) else: print('tabletEvent', event.device(), event.type(), event.pointerType()) else: print('tabletEvent', event.device(), event.type(), event.pointerType()) def tabletPressEvent(self, event): if event.buttons() & Qt.LeftButton: self.lastTabletPos = normalizeCoords(event.posF(), self.rc) if event.buttons() & Qt.MiddleButton: if self.mode == 'add': self.mode = 'del' self.setCursor(self.delCursor) else: self.mode = 'add' self.setCursor(self.addCursor) if event.buttons() & Qt.RightButton: self.next.emit() def tabletReleaseEvent(self, event): self.lastTabletPos = normalizeCoords(event.posF(), self.rc) def tabletMoveEvent(self, event): self.path.append((self.mode, self.lastTabletPos, normalizeCoords(event.posF(), self.rc), event.pressure())) self.lastTabletPos = normalizeCoords(event.posF(), self.rc) if not self.refresh.isActive(): self.refresh.start(50)
class FSWatcher(QObject): itemsPathUpdatedSignal = Signal(list) isConnectedSignal = Signal(bool) startedSignal = Signal() stoppedSignal = Signal() pathUnreachableSignal = Signal() def __init__(self, path: str, intervalMs: int, parent=None): super().__init__(parent) self.__path = path self.timer = QTimer(self) self.timeoutMs = intervalMs self.timer.setInterval(self.timeoutMs) self.__isConnected = False self.setupSignalsAndSlots() def setupSignalsAndSlots(self): self.timer.timeout.connect(self.process) @Slot() def startProcess(self): try: self.timer.start() self.startedSignal.emit() except: self.setConnected(False) self.restartProcess() @Slot() def stopProcess(self): self.timer.stop() self.stoppedSignal.emit() self.setConnected(False) @Slot() def restartProcess(self): if self.timer.isActive(): self.timer.stop() QTimer().singleShot(1000, self.startProcess) @Slot() def process(self): res = None try: res = os.listdir(self.__path) self.setConnected(True) self.itemsPathUpdatedSignal.emit(res) except OSError as err: Logger().error("FSWatcher error:" + str(err)) self.pathUnreachableSignal.emit() self.setConnected(False) self.restartProcess() # print(res) def isConnected(self): return self.__isConnected def setConnected(self, isConnected: bool): if self.__isConnected != isConnected: self.__isConnected = isConnected self.isConnectedSignal.emit(self.__isConnected)
class Dimmer: timeout = 0 brightness = -1 brightness_dimmed = -1 __stopped = False __dimmer_brightness = -1 __timer = None __change_timer = None def __init__( self, timeout: int, brightness: int, brightness_dimmed: int, brightness_callback: Callable[[int], None], ): """Constructs a new Dimmer instance :param int timeout: The time in seconds before the dimmer starts. :param int brightness: The normal brightness level. :param Callable[[int], None] brightness_callback: Callback that receives the current brightness level. """ self.timeout = timeout self.brightness = brightness self.brightness_dimmed = brightness_dimmed self.brightness_callback = brightness_callback def stop(self) -> None: """Stops the dimmer and sets the brightness back to normal. Call reset to start normal dimming operation.""" if self.__timer: self.__timer.stop() if self.__change_timer: self.__change_timer.stop() self.__dimmer_brightness = self.brightness self.brightness_callback(self.brightness) self.__stopped = True def reset(self) -> bool: """Reset the dimmer and start counting down again. If it was busy dimming, it will immediately stop dimming. Callback fires to set brightness back to normal.""" self.__stopped = False if self.__timer: self.__timer.stop() if self.__change_timer: self.__change_timer.stop() if self.timeout: self.__timer = QTimer() self.__timer.setSingleShot(True) self.__timer.timeout.connect(partial(self.change_brightness)) self.__timer.start(self.timeout * 1000) if self.__dimmer_brightness != self.brightness: previous_dimmer_brightness = self.__dimmer_brightness self.brightness_callback(self.brightness) self.__dimmer_brightness = self.brightness if previous_dimmer_brightness < 10: return True return False def dim(self, toggle: bool = False): """Manually initiate a dim event. If the dimmer is stopped, this has no effect.""" if self.__stopped: return if toggle and self.__dimmer_brightness == 0: self.reset() elif self.__timer and self.__timer.isActive(): # No need for the timer anymore, stop it self.__timer.stop() # Verify that we're not already at the target brightness nor # busy with dimming already if self.__change_timer is None and self.__dimmer_brightness: self.change_brightness() def change_brightness(self): """Move the brightness level down by one and schedule another change_brightness event.""" if self.__dimmer_brightness and self.__dimmer_brightness >= self.brightness_dimmed: self.__dimmer_brightness = self.__dimmer_brightness - 1 self.brightness_callback(self.__dimmer_brightness) self.__change_timer = QTimer() self.__change_timer.setSingleShot(True) self.__change_timer.timeout.connect(partial( self.change_brightness)) self.__change_timer.start(10) else: self.__change_timer = None
class ConnectivityService(QObject): HARD_CONNECTIONS_LIMIT = 8 CONNECT_TIMEOUT = 20 * 1000 CONNECT_INTERVAL = 1000 RESEND_INTERVAL = 250 LEAKY_INTERVAL = 15 connected_nodes_incoming_changed = Signal(set) connected_nodes_outgoing_changed = Signal(set) node_incoming_connected = Signal(str) node_outgoing_connected = Signal(str) node_incoming_disconnected = Signal(str) node_outgoing_disconnected = Signal(str) # workaround for PySide crash. see # https://stackoverflow.com/questions/23728401 # /pyside-crashing-python-when-emitting-none-between-threads # tuple is (unicode, object) data_received = Signal(tuple, # params str) # connection id quit = Signal() exited = Signal() init = Signal() disconnect_ss_slots = Signal() connect_ss_slots = Signal() _refresh_connections = Signal() _connect_to_node = Signal(str) _check_connected = Signal(bytes, bool) _disconnect_from_node = Signal(str, bool, bool) _disconnect_from_all_nodes = Signal() _send_messages = Signal(tuple) _add_ice_server = Signal(tuple) _reconnect = Signal(str) connected = Signal(bytes) disconnected = Signal(bytes) message = Signal(tuple) buffered_amount_changed = Signal(bytes, int) statistic = Signal(bytes, bytes) on_local_description = Signal(bytes, bytes, bytes) on_candidate = Signal(bytes, bytes, int, bytes) # sdp_message_from_peer = Signal(str, str, str) _connection_is_relayed = Signal(bytes) def __init__(self, ss_client, network_speed_calculator, parent=None, webrtc_class=WebRtc): QObject.__init__(self, parent=parent) self._webrtc_class = webrtc_class self._ss_client = ss_client self._ice_servers = dict() self._incoming_connections = dict() self._incoming_node_connections = defaultdict(set) self._outgoing_connections = dict() self._outgoing_node_connections = defaultdict(set) self._connected_incoming_nodes = set() self._connected_outgoing_nodes = set() self._relayed_nodes = set() self._nodes_waiting_for_connect = set() self._upload_limiter = None self._network_speed_calculator = network_speed_calculator self._refresh_connections_timer = QTimer(self) self._refresh_connections_timer.setInterval(1000) self._refresh_connections_timer.setSingleShot(True) self._refresh_connections_timer.timeout.connect( self._refresh_connections.emit) self._refresh_connections.connect( self._on_refresh_connections, Qt.QueuedConnection) self._disconnect_from_node.connect( self._on_disconnect_from_node, Qt.QueuedConnection) self._disconnect_from_all_nodes.connect( self._on_disconnect_from_all_nodes, Qt.QueuedConnection) self.init.connect( self._on_init, Qt.QueuedConnection) self.disconnect_ss_slots.connect( self._disconnect_ss_slots, Qt.QueuedConnection) self.connect_ss_slots.connect( self._connect_ss_slots, Qt.QueuedConnection) self._connect_to_node.connect(self._on_connect_to_node, Qt.QueuedConnection) self._check_connected.connect(self._on_check_connected, Qt.QueuedConnection) self._reconnect.connect(self._on_reconnect_to_node, Qt.QueuedConnection) self.quit.connect(self._on_quit, Qt.QueuedConnection) self._start_method_time = 0 self._end_method_time = 0 @contextmanager def _mark_time(self): self._start_method_time = time() try: yield finally: self._end_method_time = time() def add_ice_server(self, server_id, url, login, password): self._add_ice_server.emit((server_id, url, login, password)) def send(self, node_id, message, by_incoming_connection): self._send_messages.emit( (node_id, [message], None, None, None, False, by_incoming_connection)) def send_messages(self, node_id, messages, request, on_sent_callback=None, check_func=None): self._send_messages.emit( (node_id, messages, request, on_sent_callback, check_func, True, True)) def is_relayed(self, node_id): return node_id in self._relayed_nodes def set_upload_limiter(self, upload_limiter): self._upload_limiter = upload_limiter def reconnect(self, node_id): self._reconnect.emit(node_id) def _on_add_ice_server(self, server_data): with self._mark_time(): server_id, url, login, password = server_data if server_id in self._ice_servers: return self._ice_servers[server_id] = (url, login, password) self._webrtc.add_ice_server( url.encode(), login.encode(), password.encode()) def _on_send_messages(self, messages_tuple): node_id, messages, request, on_sent_callback, \ check_func, limit_upload, is_incoming = \ messages_tuple logger.debug("Sending messages for node %s", node_id) existing_connections = self._get_existing_connections( node_id, request, on_sent_callback, check_func, is_incoming) if not existing_connections: return while messages: is_sent, interval = self._send_message_through_existing_connection( node_id, messages, existing_connections, limit_upload) if not is_sent: QTimer.singleShot( interval, lambda: self._send_messages.emit(messages_tuple)) return if callable(on_sent_callback): on_sent_callback(request) def _get_existing_connections(self, node_id, request, on_sent_callback, check_func, is_incoming): connections = list() if callable(check_func): if not check_func(request): logger.warning("check_func returned None (False) " "for node_id %s, request %s", node_id, request) if callable(on_sent_callback): on_sent_callback(request) return connections if is_incoming: connection_ids = self._incoming_node_connections.get(node_id, []) connections = [self._incoming_connections[c_id] for c_id in connection_ids] else: connection_ids = self._outgoing_node_connections.get(node_id, []) connections = [self._outgoing_connections[c_id] for c_id in connection_ids] if not connections: logger.warning("No connections for node %s", node_id) if callable(on_sent_callback): on_sent_callback(request) return connections def _send_message_through_existing_connection(self, node_id, messages, existing_connections, limit_upload): with self._mark_time(): ready_connections = [c for c in existing_connections if c.open and not c.is_buffer_overflow()] if not ready_connections: return False, self.RESEND_INTERVAL message = messages[0] message_len = len(message) if self._upload_limiter and limit_upload: try: self._upload_limiter.leak(message_len) except LeakyBucketException: return False, self.LEAKY_INTERVAL connection = random.choice(ready_connections) connection.used = True messages.pop(0) logger.verbose("Sending message through connection %s", connection.id) self._webrtc.send(connection.id, message, message_len, True) if self._network_speed_calculator: self._network_speed_calculator.on_data_uploaded( message_len, NETWORK_WEBRTC_RELAY if self.is_relayed(node_id) else NETWORK_WEBRTC_DIRECT) return True, 0 def _on_init(self): logger.debug("Connectivity thread started") self._start_method_time = self._end_method_time = time() with self._mark_time(): self._send_messages.connect( self._on_send_messages, Qt.QueuedConnection) self._add_ice_server.connect( self._on_add_ice_server, Qt.QueuedConnection) self.connected.connect(self._on_connected, Qt.QueuedConnection) self.disconnected.connect(self._on_disconnected, Qt.QueuedConnection) self.message.connect(self._on_message, Qt.QueuedConnection) self.buffered_amount_changed.connect( self._on_buffered_amount_changed, Qt.QueuedConnection) self.on_local_description.connect( self._on_local_description, Qt.QueuedConnection) self.on_candidate.connect(self._on_candidate, Qt.QueuedConnection) self.statistic.connect(self._on_statistic, Qt.QueuedConnection) self._connection_is_relayed.connect( self._on_connection_is_relayed, Qt.QueuedConnection) self._webrtc_listener = WebRtcListener(self) self._webrtc = self._webrtc_class() self._webrtc.set_listener(self._webrtc_listener) # self.sdp_message_from_peer.connect(self._sdp_message_from_peer) self._connect_ss_slots() self._refresh_connections.emit() def _connect_ss_slots(self): self._ss_client.node_list_obtained.connect( self.on_node_list_obtained_cb, Qt.QueuedConnection) self._ss_client.node_connect.connect( self.on_node_connect_cb, Qt.QueuedConnection) self._ss_client.node_disconnect.connect( self.on_node_disconnect_cb, Qt.QueuedConnection) self._ss_client.server_disconnect.connect( self.on_server_disconnect_cb, Qt.QueuedConnection) self._ss_client.sdp_message.connect( self._sdp_message_from_peer, Qt.QueuedConnection) def _disconnect_ss_slots(self): try: self._ss_client.node_list_obtained.disconnect( self.on_node_list_obtained_cb) self._ss_client.node_connect.disconnect( self.on_node_connect_cb) self._ss_client.node_disconnect.disconnect( self.on_node_disconnect_cb) self._ss_client.server_disconnect.disconnect( self.on_server_disconnect_cb) self._ss_client.sdp_message.disconnect( self._sdp_message_from_peer) except Exception as e: logger.warning("Can't disconnect ss slots. Reason: (%s)", e) def on_node_list_obtained_cb(self, _): self._refresh_connections.emit() def on_node_connect_cb(self, node_info): self._disconnect_from_node.emit(node_info.get('id', ''), True, True) self._refresh_connections.emit() def on_node_disconnect_cb(self, node_id): self._disconnect_from_node.emit(node_id, True, True) def on_server_disconnect_cb(self): self._disconnect_from_all_nodes.emit() def get_connected_incoming_nodes(self): return self._connected_incoming_nodes def get_connected_outgoing_nodes(self): return self._connected_outgoing_nodes def _on_refresh_connections(self): logger.info("Refreshing connections") if not self._ss_client.is_connected(): return online_node_ids = set( self._ss_client.get_nodes( allowed_types=('node',), online_only=True)) logger.debug("Online node ids: %s", online_node_ids) count = 0 for node_id in online_node_ids: count += 1 self._schedule_node_connect(self.CONNECT_INTERVAL * count, node_id) def _schedule_node_connect(self, interval, node_id): def connect(): self._nodes_waiting_for_connect.discard(node_id) self._connect_to_node.emit(node_id) if node_id not in self._nodes_waiting_for_connect: self._nodes_waiting_for_connect.add(node_id) QTimer.singleShot(interval, connect) def _on_connect_to_node(self, node_id): if not self._ss_client.is_connected(): return logger.debug("on_connect_to_node: %s", node_id) online_node_ids = set( self._ss_client.get_nodes( allowed_types=('node',), online_only=True)) if node_id not in online_node_ids: return if self._is_connections_limit_reached( self._outgoing_node_connections.get(node_id), 5 // len(online_node_ids) + 1): return self._connect_to_node_via_webrtc(node_id) def _on_reconnect_to_node(self, node_id): self._on_disconnect_from_node(node_id, False, True) if self._refresh_connections_timer.isActive(): self._refresh_connections_timer.stop() self._refresh_connections_timer.start() def _is_connections_limit_reached(self, connections, limit): return len(connections) >= limit if connections else False def _connect_to_node_via_webrtc(self, node_id): with self._mark_time(): connection = Connection(node_id) connection_id = connection.id logger.debug("Connecting to node %s via webrtc... " "Connection id %s", node_id, connection_id) self._outgoing_connections[connection_id] = connection self._outgoing_node_connections[node_id].add(connection_id) self._webrtc.create_connection(connection_id) self._webrtc.initiate_connection(connection_id) QTimer.singleShot(self.CONNECT_TIMEOUT, lambda: self._check_connected.emit( connection_id, False)) def _on_check_connected(self, connection_id, is_incoming): with self._mark_time(): logger.debug("Check connected %s", connection_id) if is_incoming: connection = self._incoming_connections.get( connection_id, None) else: connection = self._outgoing_connections.get( connection_id, None) if not connection: logger.debug("No connection %s", connection_id) return if not connection.open: logger.debug("Connection %s is not opened", connection_id) self._webrtc.disconnect(connection_id) self._on_disconnected(connection_id) def _on_connected(self, connection_id): with self._mark_time(): logger.info("_on_connected %s", connection_id) is_incoming = False connection = self._outgoing_connections.get( connection_id, None) if not connection: connection = self._incoming_connections.get( connection_id, None) is_incoming = True if not connection: logger.debug("No node id for connection id %s", connection_id) self._webrtc.disconnect(connection_id) return connection.open = True if connection.node_id not in self._connected_incoming_nodes and \ connection.node_id not in self._connected_outgoing_nodes: self._webrtc.request_statistic(connection_id) if not is_incoming and connection.node_id not in \ self._connected_outgoing_nodes: self._connected_outgoing_nodes.add(connection.node_id) self.connected_nodes_outgoing_changed.emit( self._connected_outgoing_nodes) self.node_outgoing_connected.emit(connection.node_id) elif is_incoming and connection.node_id not in \ self._connected_incoming_nodes: self._connected_incoming_nodes.add(connection.node_id) self.connected_nodes_incoming_changed.emit( self._connected_incoming_nodes) self.node_incoming_connected.emit(connection.node_id) self._refresh_connections.emit() def _on_disconnected(self, connection_id): logger.info("_on_disconnected %s", connection_id) is_incoming = False connection = self._outgoing_connections.pop(connection_id, None) if not connection: connection = self._incoming_connections.pop(connection_id, None) is_incoming = True if connection: connection.open = False if is_incoming: connections = self._incoming_node_connections[ connection.node_id] connections.discard(connection_id) if not connections: self._on_disconnect_from_node( connection.node_id, True, False) else: connections = self._outgoing_node_connections[ connection.node_id] connections.discard(connection_id) self._on_disconnect_from_node( connection.node_id, False, True) if self._refresh_connections_timer.isActive(): self._refresh_connections_timer.stop() self._refresh_connections_timer.start() def _on_buffered_amount_changed(self, connection_id, amount): connection = self._incoming_connections.get(connection_id, None) if not connection: connection = self._outgoing_connections.get(connection_id, None) if connection: connection.buffered_amount = amount def _on_message(self, message_tuple): connection_id, message = message_tuple logger.debug("_on_message. Connection id %s", connection_id) connection = self._incoming_connections.get(connection_id, None) if not connection: connection = self._outgoing_connections.get(connection_id, None) node_id = connection.node_id if connection else None if self._network_speed_calculator: self._network_speed_calculator.on_data_downloaded( len(message), NETWORK_WEBRTC_RELAY if self.is_relayed(node_id) else NETWORK_WEBRTC_DIRECT) if node_id: self.data_received.emit((node_id, message), connection_id.decode()) else: logger.warning("_on_message from unknown connection: %s", connection_id) @qt_run def _on_statistic(self, connection_id, statistics): logger.debug("_on_statistic") statistic = StatisticParser.parse_statistic(statistics.decode()) if statistic is None: logger.warning("Failed to parse connection statistic") return if StatisticParser.determine_if_connection_relayed(statistic): self._connection_is_relayed.emit(connection_id) def _on_connection_is_relayed(self, connection_id): logger.debug("_on_connection_is_relayed") connection = self._outgoing_connections.get(connection_id, None) if not connection: connection = self._incoming_connections.get(connection_id, None) if connection: self._relayed_nodes.add(connection.node_id) def _on_disconnect_from_node(self, node_id, disconnect_incoming, disconnect_outgoing): with self._mark_time(): logger.info("Node %s disconnected from signal server, " "or no reliable connections for node." "disconnect from it", node_id) if disconnect_incoming: connections = self._incoming_node_connections[node_id] for connection_id in connections: logger.debug("Disconnect connection %s", connection_id) self._webrtc.disconnect(connection_id) self._incoming_connections.pop(connection_id, None) del self._incoming_node_connections[node_id] if node_id in self._connected_incoming_nodes: logger.debug("Disconnect node %s", node_id) self._connected_incoming_nodes.discard(node_id) self.node_incoming_disconnected.emit(node_id) self.connected_nodes_incoming_changed.emit( self._connected_incoming_nodes) if disconnect_outgoing: connections = self._outgoing_node_connections[node_id] for connection_id in connections: logger.debug("Disconnect connection %s", connection_id) self._webrtc.disconnect(connection_id) self._outgoing_connections.pop(connection_id, None) del self._outgoing_node_connections[node_id] self._relayed_nodes.discard(node_id) if node_id in self._connected_outgoing_nodes: logger.debug("Disconnect node %s", node_id) self._connected_outgoing_nodes.discard(node_id) self.node_outgoing_disconnected.emit(node_id) self.connected_nodes_outgoing_changed.emit( self._connected_outgoing_nodes) def _on_disconnect_from_all_nodes(self): with self._mark_time(): logger.info("Connection with signal server lost, " "disconnect from all nodes") for connection_id in self._incoming_connections.keys(): self._webrtc.disconnect(connection_id) self._incoming_connections.clear() self._incoming_node_connections.clear() for connection_id in self._outgoing_connections.keys(): self._webrtc.disconnect(connection_id) self._outgoing_connections.clear() self._outgoing_node_connections.clear() self._relayed_nodes.clear() for node_id in self._connected_incoming_nodes: self.node_incoming_disconnected.emit(node_id) self._connected_incoming_nodes.clear() self.connected_nodes_incoming_changed.emit( self._connected_incoming_nodes) for node_id in self._connected_outgoing_nodes: self.node_outgoing_disconnected.emit(node_id) self._connected_outgoing_nodes.clear() self.connected_nodes_outgoing_changed.emit( self._connected_outgoing_nodes) def _on_quit(self): self._on_disconnect_from_all_nodes() if self._refresh_connections_timer.isActive(): self._refresh_connections_timer.stop() self._refresh_connections_timer = None self.disconnect(self) self._webrtc.close() self._webrtc = None self.exited.emit() def _on_local_description(self, connection_id, type, sdp): logger.debug("on_local_description") with self._mark_time(): connection = self._incoming_connections.get( connection_id, None) if not connection: connection = self._outgoing_connections.get( connection_id, None) if not connection: logger.warning("on_local_description: connection %s not found", connection_id) self._webrtc.disconnect(connection_id) return self._ss_client.send_sdp_message( connection.node_id, connection_id.decode(), json.dumps(dict( type=type.decode(), sdp=sdp.decode(), ))) def _on_candidate(self, connection_id, sdp_mid, sdp_m_line_index, candidate): with self._mark_time(): connection = self._incoming_connections.get( connection_id, None) if not connection: connection = self._outgoing_connections.get( connection_id, None) if not connection: self._webrtc.disconnect(connection_id) return self._ss_client.send_sdp_message( connection.node_id, connection_id.decode(), json.dumps(dict( sdpMid=sdp_mid.decode(), sdpMLineIndex=sdp_m_line_index, candidate=candidate.decode(), ))) def _sdp_message_from_peer(self, node_id, connection_id, message): logger.verbose("sdp_message_from_peer: %s", message) connection_id = connection_id.encode() try: sdp_message = json.loads(message) except JSONDecodeError as e: logger.warning("Failed to decode json: %s", e) return with self._mark_time(): if not self._check_add_connection(node_id, connection_id): return if "type" not in sdp_message: sdp_mid_found = "sdpMid" in sdp_message sdp_m_line_index_found = "sdpMLineIndex" in sdp_message candidate_found = "candidate" in sdp_message if sdp_mid_found and sdp_m_line_index_found and \ candidate_found: sdp_mid = sdp_message.get("sdpMid", "") sdp_m_line_index = int(sdp_message.get("sdpMLineIndex", 0)) candidate = sdp_message.get("candidate", "") self._webrtc.set_candidate( connection_id, sdp_mid.encode(), sdp_m_line_index, candidate.encode()) else: logger.warning("Invalid spd message, spd_mid_found: %s, " "sdp_m_line_index_found: %s, " "candidate_found: %s", sdp_mid_found, sdp_m_line_index_found, candidate_found) return type = sdp_message.get("type", "") sdp = sdp_message.get("sdp", "") logger.debug("On sdp message: type: %s, sdp: %s", type, sdp) self._webrtc.set_remote_description( connection_id, type.encode(), sdp.encode()) logger.debug("On sdp message: done") def _check_add_connection(self, node_id, connection_id): incoming_connection_ids = self._incoming_node_connections[node_id] outgoing_connection_ids = self._outgoing_node_connections[node_id] if connection_id not in incoming_connection_ids and \ connection_id not in outgoing_connection_ids: if self._is_connections_limit_reached( incoming_connection_ids, self.HARD_CONNECTIONS_LIMIT): logger.debug("Incomming connections limit reached for node %s", node_id) return False connection = Connection(node_id, connection_id) self._incoming_connections[connection_id] = connection incoming_connection_ids.add(connection_id) self._webrtc.create_connection(connection_id) logger.debug("On sdp message: connection added %s", connection_id) QTimer.singleShot(self.CONNECT_TIMEOUT, lambda: self._check_connected.emit( connection_id, True)) return True def get_node_type(self, node_id): node_type = None try: node_info = self._ss_client.get_node_info(node_id) node_type = node_info.get("type", None) if not node_type or type(node_type) not in (str, str): raise NameError("Invalid node type") except Exception as e: logger.warning("Can't get node type for node %s. Reason: %s", node_id, e) return node_type def get_self_node_type(self): return self._ss_client.get_self_client_type() def get_sharing_info(self): return self._ss_client.get_sharing_info() def is_alive(self): return not ( self._start_method_time > self._end_method_time and time() - self._start_method_time > CONNECTIVITY_ALIVE_TIMEOUT)
class Mainwindow(QWidget): def __init__(self): super().__init__() self.setupUI() self.setupCamera() self.window.show() def setupUI(self): # 載入ui檔"mainwindow.ui" path = os.path.join(os.path.dirname(__file__), "mainwindow.ui") uiFile = QFile(path) uiFile.open(QFile.ReadOnly) loader = QUiLoader() self.window = loader.load(uiFile) uiFile.close() # 設定widget # 所有widget都要在前面加上self 否則其他function無法使用那個widget # 顯示攝影機畫面 self.imageLabel = self.window.findChild(QLabel, 'imageLabel') # 按一下拍照 將畫面定格 self.btn_takePict = self.window.findChild(QPushButton, 'btn_takePict') self.btn_takePict.clicked.connect(self.takePicture) # 按一下取消畫面定格 self.btn_cancelTakePict = self.window.findChild( QPushButton, 'btn_cancelTakePict') self.btn_cancelTakePict.clicked.connect(self.cancelTakePicture) # 按一下把臉上的眼鏡清空 self.btn_revert = self.window.findChild(QPushButton, 'btn_revert') self.btn_revert.clicked.connect(self.revert) # 提示user按下拍照件 並在拍照後顯示user臉型 self.faceshapeOfUser = self.window.findChild(QTextBrowser, "faceshapeOfUser") self.faceshapeOfUser.setText("按下拍照來確認自己臉型") # 按下拍照後 會顯示適合的眼鏡 依照眼鏡類型分類 self.tabWidget = self.window.findChild(QTabWidget, "tabWidget") self.numOfTab = 0 def setupCamera(self): # 打開鏡頭 self.capture = cv2.VideoCapture(0) # 設定大小 self.capture.set(cv2.CAP_PROP_FRAME_WIDTH, 600) self.capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 500) # 顯示畫面 self.timer = QTimer() self.timer.timeout.connect(self.displayVideoStream) self.timer.start(30) def displayVideoStream(self): _, frame = self.capture.read() frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR) frame = cv2.flip(frame, 1) self.image = QImage(frame, frame.shape[1], frame.shape[0], frame.strides[0], QImage.Format_RGB888) self.imageLabel.setPixmap(QPixmap.fromImage(self.image)) """ 下面是設定glassesButton的function """ """ 在tabWidget中 放很多個tab 代表各種眼鏡種類 每個tab會放一個scrollArea 每個scrollArea中放一個QWidget(稱buttons) buttons中放許多pushButton 每個pushButton都代表一個眼鏡 按下就讓畫面中的人戴上眼鏡 """ def setTabWidget(self, faceShape): # glassesShape[0]存絕配眼鏡 glassesShape[1]存可搭配的眼鏡 path = os.path.join(os.path.dirname(__file__), "datas", "recommendType", faceShape + ".txt") fin = open(path, 'r') # 第一次讀絕配眼鏡 第二次讀可搭配的眼鏡 for i in range(2): num = int(fin.readline()) for j in range(num): glassesShape = fin.readline().strip("\n") # 建立一個tabWidget tab = self.createScrollArea(glassesShape) self.tabWidget.addTab(tab, glassesShape) # 將絕配眼鏡種類和可搭配眼鏡種類以字體顏色做出區別 if i == 0: # 絕配為綠色 self.tabWidget.tabBar().setTabTextColor( j, QColor(0, 100, 0)) if i == 1: # 可搭配為黃色 self.tabWidget.tabBar().setTabTextColor( self.numOfTab + j, QColor(255, 165, 0)) self.numOfTab += num def createScrollArea(self, glassesType): scrollArea = QScrollArea() scrollArea.setWidget(self.setButtons(glassesType)) return scrollArea def setButtons(self, glassesType): buttons = QWidget() layout = QVBoxLayout() glassesFileAddr = "images/glasses/" + str(glassesType) path = os.path.join(os.path.dirname(__file__), glassesFileAddr) # 根據資料夾中眼鏡照片的數量來依序建立按鈕 for i in range(self.numOfGlasses(path)): glassesImgAddr = glassesFileAddr + "/" + str(i + 1) + ".png" path = os.path.join(os.path.dirname(__file__), glassesImgAddr) btn = QPushButton() btn.setIcon(QIcon(path)) btn.setIconSize(QtCore.QSize(128, 128)) btn.pressed.connect( lambda val=glassesImgAddr: self.putOnGlasses(val)) layout.addWidget(btn) buttons.setLayout(layout) return buttons def numOfGlasses(self, filePath): # fileInfo有三種資料 # fileInfo[0]為檔案位址 fileInfo[1]為子資料夾名稱 fileInfo[2]為資料夾內檔案名稱 fileInfo = next(os.walk(filePath)) return len(fileInfo[2]) """ 上面是設定glassesButton的function """ @QtCore.Slot() def takePicture(self): if not self.timer.isActive(): return self.timer.stop() _, frame = self.capture.read() frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR) frame = cv2.flip(frame, 1) self.image = QImage(frame, frame.shape[1], frame.shape[0], frame.strides[0], QImage.Format_RGB888) path = os.path.join(os.path.dirname(__file__), "images", "saved.jpg") self.image.save(path, "JPG") faceShape = faceShapeRecognizer.run() if (faceShape == "error1" or faceShape == "error2" or faceShape == "error3"): self.faceshapeOfUser.setText("請取消重拍 並再試一次") else: self.faceshapeOfUser.setText("你是" + faceShape + "臉") self.setTabWidget(faceShape) @QtCore.Slot() def cancelTakePicture(self): self.timer.start(30) # 刪除glassesButtons for i in range(self.numOfTab): self.tabWidget.removeTab(0) self.numOfTab = 0 self.faceshapeOfUser.setText("按下拍照來確認自己臉型") @QtCore.Slot() def revert(self): if self.timer.isActive(): return path = os.path.join(os.path.dirname(__file__), "images", "saved.jpg") self.imageLabel.setPixmap(QPixmap.fromImage(path)) @QtCore.Slot(str) def putOnGlasses(self, str): pictureCompound.run(str) path = os.path.join(os.path.dirname(__file__), "images", "result.jpg") self.image = QImage(path) self.imageLabel.setPixmap(QPixmap.fromImage(self.image))
class DownloadManager(QObject): idle = Signal() working = Signal() progress = Signal(str, int, int) downloads_status = Signal( str, int, int, list, # downloads info - # list(added_info, changed_info, deleted_info) dict) # uploads_info error = Signal(str) clear_error = Signal() no_disk_space = Signal( QObject, # task str, # display_name bool) # is error # workaround for PySide crash. see # https://stackoverflow.com/questions/23728401 # /pyside-crashing-python-when-emitting-none-between-threads # tuple is (int, unicode, long, unicode, unicode, object, object) _file_download = Signal(tuple) # tuple is (int, unicode, long, unicode, unicode, object, object, # object, list) _patch_download = Signal(tuple) _many_file_downloads = Signal(list) # downloads list _pause_all = Signal(bool) _resume_all = Signal() _cancel_all = Signal() _cancel_one = Signal(str) _accept_one = Signal(str) _prepare_cleanup = Signal() # tuple is (object, ) _set_download_limiter = Signal(tuple) _task_priority = Signal( str, # obj_id int) # new priority _copy_added = Signal(str) on_patch_availability_info_request = Signal(Message, str) on_patch_availability_info_response = Signal(Message, str) on_patch_availability_info_abort = Signal(Message, str) on_patch_availability_info_failure = Signal(Message, str) on_patch_data_request = Signal(Message, str) on_patch_data_response = Signal(Message, str) on_patch_data_abort = Signal(Message, str) on_patch_data_failure = Signal(Message, str) on_file_availability_info_request = Signal(Message, str) on_file_availability_info_response = Signal(Message, str) on_file_availability_info_abort = Signal(Message, str) on_file_availability_info_failure = Signal(Message, str) on_file_data_request = Signal(Message, str) on_file_data_response = Signal(Message, str) on_file_data_abort = Signal(Message, str) on_file_data_failure = Signal(Message, str) on_file_availability_info_requests = Signal(Messages, str) on_file_availability_info_responses = Signal(Messages, str) on_patch_availability_info_requests = Signal(Messages, str) on_patch_availability_info_responses = Signal(Messages, str) signal_info_tx = Signal(tuple) signal_info_rx = Signal(tuple) quit = Signal() possibly_sync_folder_is_removed = \ DownloadTask.possibly_sync_folder_is_removed supplying_finished = Signal() ready_timeout = 10 * 1000 download_error_timeout = 10 * 1000 rerequest_info_timeout = 10 * 1000 cleanup_timeout = 5 * 60 * 1000 progress_timeout = 1 * 1000 def __init__(self, connectivity_service, ss_client, events_db=None, copies_storage=None, patches_storage=None, upload_enabled=True, tracker=None, parent=None, get_download_backups_mode=lambda: None, get_file_path=lambda p, sq=False: ""): QObject.__init__(self, parent=parent) self._paused = False self._connectivity_service = connectivity_service self._ss_client = ss_client self._tracker = tracker self._events_db = events_db self._copies_storage = copies_storage self._patches_storage = patches_storage self._upload_enabled = upload_enabled self._downloads = dict() self._ready_downloads_queue = [] self._current_task = None self._node_incoming_list = self._connectivity_service. \ get_connected_incoming_nodes().copy() self._node_outgoing_list = self._connectivity_service.\ get_connected_outgoing_nodes().copy() self._important_downloads_info = dict() self._last_uploads_info = dict() self._limiter = None self._info_priority = DOWNLOAD_PRIORITY_WANTED_DIRECT_PATCH self._empty_progress = ("", 0, 0) self._last_progress_sent = self._empty_progress self._error_set = False self._cleanup_directories = [] self._init_suppliers(get_download_backups_mode, get_file_path) self._init_consumers() self._connect_slots() self._init_timers() def _init_suppliers(self, get_download_backups_mode, get_file_path): if not self._upload_enabled: self._file_availability_info_supplier = None self._patch_availability_info_supplier = None self._file_data_supplier = None self._patch_data_supplier = None return self._file_availability_info_supplier = \ FileAvailabilityInfoSupplier( self, self, self._connectivity_service, self._node_incoming_list, self._events_db, self._copies_storage, get_download_backups_mode, get_file_path) self._patch_availability_info_supplier = \ PatchAvailabilityInfoSupplier( self, self, self._connectivity_service, self._node_incoming_list, self._patches_storage) self._file_data_supplier = FileDataSupplier(self, self._connectivity_service, self._events_db, self._copies_storage, get_file_path) self._patch_data_supplier = PatchDataSupplier( self, self._connectivity_service, self._patches_storage, self._events_db) self._connect_suppliers_slots() def _connect_suppliers_slots(self): self.on_file_availability_info_request.connect( self._file_availability_info_supplier._availability_info_request, Qt.QueuedConnection) self.on_file_availability_info_abort.connect( self._file_availability_info_supplier._availability_info_abort) self.on_patch_availability_info_request.connect( self._patch_availability_info_supplier._availability_info_request, Qt.QueuedConnection) self.on_patch_availability_info_abort.connect( self._patch_availability_info_supplier._availability_info_abort) self.on_file_availability_info_requests.connect( self._file_availability_info_supplier._availability_info_requests, Qt.QueuedConnection) self.on_patch_availability_info_requests.connect( self._patch_availability_info_supplier._availability_info_requests, Qt.QueuedConnection) self.on_file_data_request.connect( self._file_data_supplier._data_request, Qt.QueuedConnection) self.on_file_data_abort.connect(self._file_data_supplier._data_abort) self.on_patch_data_request.connect( self._patch_data_supplier._data_request, Qt.QueuedConnection) self.on_patch_data_abort.connect(self._patch_data_supplier._data_abort) self._connectivity_service.node_incoming_disconnected.connect( self._file_availability_info_supplier.on_node_disconnected, Qt.QueuedConnection) self._connectivity_service.node_incoming_disconnected.connect( self._patch_availability_info_supplier.on_node_disconnected, Qt.QueuedConnection) self._connectivity_service.connected_nodes_incoming_changed.connect( self._file_availability_info_supplier.on_connected_nodes_changed, Qt.QueuedConnection) self._connectivity_service.connected_nodes_incoming_changed.connect( self._patch_availability_info_supplier.on_connected_nodes_changed, Qt.QueuedConnection) # connect traffic info signal 'signal_info_tx' self._file_data_supplier.signal_info_tx.connect( self._on_info_tx, Qt.QueuedConnection) self._patch_data_supplier.signal_info_tx.connect( self._on_info_tx, Qt.QueuedConnection) self._file_data_supplier.supplying_finished.connect( self.supplying_finished.emit, Qt.QueuedConnection) def _init_consumers(self): self._file_availability_info_consumer = FileAvailabilityInfoConsumer( self, self._connectivity_service, self._node_outgoing_list) self._patch_availability_info_consumer = PatchAvailabilityInfoConsumer( self, self._connectivity_service, self._node_outgoing_list) self._file_data_consumer = FileDataConsumer(self, self._connectivity_service) self._patch_data_consumer = PatchDataConsumer( self, self._connectivity_service) self._connect_consumers_slots() def _connect_consumers_slots(self): self.on_file_availability_info_response.connect( self._file_availability_info_consumer._availability_info_response, Qt.QueuedConnection) self.on_file_availability_info_failure.connect( self._file_availability_info_consumer._availability_info_failure, Qt.QueuedConnection) self.on_patch_availability_info_response.connect( self._patch_availability_info_consumer._availability_info_response, Qt.QueuedConnection) self.on_patch_availability_info_failure.connect( self._patch_availability_info_consumer._availability_info_failure, Qt.QueuedConnection) self.on_file_availability_info_responses.connect( self._file_availability_info_consumer._availability_info_responses, Qt.QueuedConnection) self.on_patch_availability_info_responses.connect( self._patch_availability_info_consumer. _availability_info_responses, Qt.QueuedConnection) self._file_availability_info_consumer.availability_info_received\ .connect(self._on_availability_info_received, Qt.QueuedConnection) self._file_availability_info_consumer.availability_info_failure\ .connect(self._on_availability_info_failure, Qt.QueuedConnection) self._file_data_consumer.data_received.connect( self._on_task_data_received, Qt.QueuedConnection) self._file_data_consumer.error_received.connect( self._on_task_data_failed, Qt.QueuedConnection) self._patch_availability_info_consumer.availability_info_received\ .connect(self._on_availability_info_received, Qt.QueuedConnection) self._patch_availability_info_consumer.availability_info_failure\ .connect(self._on_availability_info_failure, Qt.QueuedConnection) self._patch_data_consumer.data_received.connect( self._on_task_data_received, Qt.QueuedConnection) self._patch_data_consumer.error_received.connect( self._on_task_data_failed, Qt.QueuedConnection) self.on_file_data_response.connect( self._file_data_consumer._data_response, Qt.QueuedConnection) self.on_file_data_failure.connect( self._file_data_consumer._data_failure, Qt.QueuedConnection) self.on_patch_data_response.connect( self._patch_data_consumer._data_response, Qt.QueuedConnection) self.on_patch_data_failure.connect( self._patch_data_consumer._data_failure, Qt.QueuedConnection) self._connectivity_service.connected_nodes_outgoing_changed.connect( self._file_availability_info_consumer.on_connected_nodes_changed, Qt.QueuedConnection) self._connectivity_service.connected_nodes_outgoing_changed.connect( self._patch_availability_info_consumer.on_connected_nodes_changed, Qt.QueuedConnection) self._connectivity_service.node_outgoing_connected.connect( self._file_availability_info_consumer.on_node_connected, Qt.QueuedConnection) self._connectivity_service.node_outgoing_connected.connect( self._patch_availability_info_consumer.on_node_connected, Qt.QueuedConnection) def _connect_slots(self): self._connectivity_service.data_received.connect( self._on_data_received, Qt.QueuedConnection) self._connectivity_service.connected_nodes_outgoing_changed.connect( self._connected_nodes_outgoing_changed, Qt.QueuedConnection) self._connectivity_service.connected_nodes_incoming_changed.connect( self._connected_nodes_incoming_changed, Qt.QueuedConnection) self._file_download.connect(self._add_file_download, Qt.QueuedConnection) self._patch_download.connect(self._add_patch_download, Qt.QueuedConnection) self._many_file_downloads.connect(self._add_many_file_downloads, Qt.QueuedConnection) self._pause_all.connect(self._on_pause_all_downloads, Qt.QueuedConnection) self._resume_all.connect(self._on_resume_all_downloads, Qt.QueuedConnection) self._cancel_all.connect(self._on_cancel_all_downloads, Qt.QueuedConnection) self._cancel_one.connect(self._on_cancel_download, Qt.QueuedConnection) self._accept_one.connect(self._on_accept_download, Qt.QueuedConnection) self._set_download_limiter.connect(self._on_set_download_limiter, Qt.QueuedConnection) self._task_priority.connect(self._on_set_task_priority, Qt.QueuedConnection) self._copy_added.connect(self._on_copy_added, Qt.QueuedConnection) self._prepare_cleanup.connect(self._on_prepare_cleanup, Qt.QueuedConnection) self.quit.connect(self._on_quit, Qt.QueuedConnection) def _init_timers(self): self._timers = [] self._ready_timer = QTimer(self) self._ready_timer.setInterval(self.ready_timeout) self._ready_timer.setSingleShot(True) self._ready_timer.timeout.connect(self._check_downloads) self._timers.append(self._ready_timer) self._downloads_error_timer = QTimer(self) self._downloads_error_timer.setInterval(self.download_error_timeout) self._downloads_error_timer.setSingleShot(True) self._downloads_error_timer.timeout.connect( self._check_send_download_error) self._timers.append(self._downloads_error_timer) self._rerequest_info_timer = QTimer(self) self._rerequest_info_timer.setInterval(self.rerequest_info_timeout) self._rerequest_info_timer.setSingleShot(True) self._rerequest_info_timer.timeout.connect( self._rerequest_info_for_not_ready_downloads) self._timers.append(self._rerequest_info_timer) self._cleanup_timer = QTimer(self) self._cleanup_timer.setInterval(self.cleanup_timeout) self._cleanup_timer.setSingleShot(True) self._cleanup_timer.timeout.connect(self._on_cleanup) self._timers.append(self._cleanup_timer) self._progress_timer = QTimer(self) self._progress_timer.setInterval(self.progress_timeout) self._progress_timer.timeout.connect(self._on_download_progress) self._timers.append(self._progress_timer) self._progress_timer.start() def get_downloads_count(self): return len(self._downloads) def is_download_ready(self, obj_id): for task in self._ready_downloads_queue: if task.id == obj_id: return True return False def set_info_priority(self, new_priority): self._info_priority = new_priority def prepare_cleanup(self, cleanup_directories): self._cleanup_directories = cleanup_directories self._prepare_cleanup.emit() def set_download_limiter(self, limiter): self._set_download_limiter.emit((limiter, )) def pause_all_downloads(self, disconnect_callbacks=True): self._pause_all.emit(disconnect_callbacks) def resume_all_downloads(self): self._resume_all.emit() def cancel_all_downloads(self): self._cancel_all.emit() def cancel_download(self, obj_id): self._cancel_one.emit(obj_id) def accept_download(self, obj_id): self._accept_one.emit(obj_id) def add_file_download(self, priority, obj_id, obj_size, file_hash, path, display_name, on_downloaded=None, on_failed=None, files_info=None): logger.info("adding file download, id: %s, size: %s, priority: %s", obj_id, obj_size, priority) self._file_download.emit( (priority, obj_id, obj_size, file_hash, path, display_name, on_downloaded, on_failed, files_info)) def add_many_file_downloads(self, downloads_list): logger.info("adding many file downloads: %s", len(downloads_list)) self._many_file_downloads.emit(downloads_list) def add_patch_download(self, priority, obj_id, obj_size, path, display_name, on_downloaded=None, on_failed=None, files_info=None): logger.info("adding patch download, id: %s, size: %s, priority: %s", obj_id, obj_size, priority) self._patch_download.emit( (priority, obj_id, obj_size, path, display_name, on_downloaded, on_failed, files_info)) def get_downloaded_chunks(self, obj_id): task = self._downloads.get(obj_id, None) return task.get_downloaded_chunks() if task else set() def on_file_changed(self, event_uuid_before, event_uuid_after): if self._file_availability_info_supplier: self._file_availability_info_supplier.on_file_changed( event_uuid_before, event_uuid_after) def set_task_priority(self, obj_id, new_priority): logger.debug("Setting priority %s for task %s", new_priority, obj_id) self._task_priority.emit(obj_id, new_priority) def copy_added(self, file_hash): logger.debug("Copy added. File hash: %s", file_hash) self._copy_added.emit(file_hash) def _on_prepare_cleanup(self): logger.debug("Download manager prepare cleanup") self._cleanup_timer.start() def _on_set_download_limiter(self, limiter_tuple): self._limiter, = limiter_tuple if self._current_task: self._current_task.start(self._limiter) def _on_cleanup(self): if self._downloads: return logger.debug("Cleaning all download files in %s", self._cleanup_directories) for paths in chain( (iglob(join(d, '*.download')) for d in self._cleanup_directories), (iglob(join(d, '*.info')) for d in self._cleanup_directories)): for path in paths: try: remove_file(path) except: pass def _on_pause_all_downloads(self, disconnect_callbacks=True): self._paused = True for download in self._downloads.values(): download.pause(disconnect_callbacks) if self._ready_timer.isActive(): self._ready_timer.stop() self._on_download_progress() self.idle.emit() def _on_resume_all_downloads(self): self._paused = False for download in self._downloads.values(): if download is not self._current_task: download.resume(start_download=False) if not self._current_task: self._start_next_task() else: self._current_task.resume() if self._get_important_downloads_count(): self.working.emit() self._on_download_progress(force_sending=True) def _on_cancel_all_downloads(self): logger.debug("_on_cancel_all_downloads") for download in self._downloads.values(): download.cancel() self._finish_task(download) self._current_task = None self._downloads.clear() self._ready_downloads_queue = list() if self._ready_timer.isActive(): self._ready_timer.stop() self._on_download_progress() self.idle.emit() def _on_cancel_download(self, obj_id): task = self._find_task_by_id(obj_id) if task: task.download_failed.emit(task) self._clear_network_error() def _on_accept_download(self, obj_id): task = self._find_task_by_id(obj_id) if task: task.download_complete.emit(task) self._clear_network_error() def _on_data_received(self, param_tuple, connection_id): node_id, data = param_tuple try: messages = Messages().decode(data, node_id) msg = messages.msg[0] event_name = get_event_name(msg.obj_type, msg.mtype, repeating=True) logger.debug( "Repeating event detected: event %s, obj_id %s, " "node_id %s, connection_id %s", event_name, msg.obj_id, node_id, connection_id) signal = getattr(self, event_name, None) if isinstance(signal, Signal): signal.emit(messages, node_id) except Exception as e: try: msg = Message().decode(data, node_id) event_name = get_event_name(msg.obj_type, msg.mtype) logger.debug( "Event detected: event %s, obj_id %s, " "node_id %s, connection_id %s", event_name, msg.obj_id, node_id, connection_id) signal = getattr(self, event_name, None) if isinstance(signal, Signal): signal.emit(msg, node_id) except Exception: logger.error("Unhandled exception while processing") logger.error("data: '%s'", data) def _add_file_download(self, param_tuple, to_subscribe=True): priority, obj_id, obj_size, file_hash, file_path, display_name, \ on_downloaded, on_failed, files_info = param_tuple task = self._find_task_by_id(obj_id) if task: task.connect_callbacks(on_downloaded, on_failed) return task = FileDownloadTask(self._tracker, self._connectivity_service, priority, obj_id, obj_size, file_path, file_hash, display_name, parent=self, files_info=files_info) self._connect_task_signals(task, self._file_availability_info_supplier, self._file_data_consumer, on_downloaded, on_failed) self._downloads[obj_id] = task if not task.check_disk_space(): self._on_download_not_ready(task) return if to_subscribe: self._file_availability_info_consumer.subscribe(obj_id, priority=priority) self._emit_add_download_signals( emit_working=priority > IMPORTANT_DOWNLOAD_PRIORITY) if self._cleanup_timer.isActive(): self._cleanup_timer.stop() def _add_many_file_downloads(self, downloads_list): list( map( lambda d: self._add_file_download( (*d, None), to_subscribe=False), downloads_list)) # d[1] - obj_id, d[0] - priority subscription_list = [(d[1], d[0]) for d in downloads_list] self._file_availability_info_consumer.subscribe_many(subscription_list) def _add_patch_download(self, param_tuple): priority, obj_id, obj_size, file_path, display_name, on_downloaded, \ on_failed, files_info = param_tuple task = self._find_task_by_id(obj_id) if task: task.connect_callbacks(on_downloaded, on_failed) return task = PatchDownloadTask(self._tracker, self._connectivity_service, priority, obj_id, obj_size, file_path, display_name, parent=self, files_info=files_info) self._connect_task_signals(task, self._patch_availability_info_supplier, self._patch_data_consumer, on_downloaded, on_failed) self._downloads[obj_id] = task if not task.check_disk_space(): return self._patch_availability_info_consumer.subscribe(obj_id, priority=priority) self._emit_add_download_signals( emit_working=priority > IMPORTANT_DOWNLOAD_PRIORITY) if self._cleanup_timer.isActive(): self._cleanup_timer.stop() def _on_set_task_priority(self, obj_id, new_priority): task = self._find_task_by_id(obj_id) if not task: return task.priority = new_priority if task is self._current_task: self._swap_current_task() if not self._get_important_downloads_count(): self.idle.emit() logger.debug("Priority %s for task.id %s is set", new_priority, obj_id) def _swap_current_task(self): if not self._ready_downloads_queue: return first_task = heappop(self._ready_downloads_queue) heappush(self._ready_downloads_queue, first_task) if first_task < self._current_task: self._current_task.pause(disconnect_cb=False) heappush(self._ready_downloads_queue, self._current_task) logger.debug( "Task %s with priority %s " "swapped by task %s with priority %s", self._current_task.id, self._current_task.priority, first_task.id, first_task.priority) self._start_next_task() def _connect_task_signals(self, task, info_supplier, data_consumer, on_downloaded, on_failed): self._connectivity_service.node_outgoing_disconnected.connect( task.on_node_disconnected, Qt.QueuedConnection) task.download_ready.connect(self._on_download_ready, Qt.QueuedConnection) task.download_not_ready.connect(self._on_download_not_ready, Qt.QueuedConnection) task.download_complete.connect(self._on_download_complete, Qt.QueuedConnection) task.download_failed.connect(self._on_download_failure, Qt.QueuedConnection) task.download_error.connect(self._on_download_error, Qt.QueuedConnection) task.download_ok.connect(self._clear_network_error, Qt.QueuedConnection) task.download_finishing.connect(self._on_download_progress, Qt.DirectConnection) task.request_data.connect(data_consumer.request_data) task.abort_data.connect(data_consumer.abort_data_request, Qt.QueuedConnection) if info_supplier: task.chunk_downloaded.connect( info_supplier.on_new_availability_info, Qt.QueuedConnection) # connect traffic info signal 'signal_info_rx' task.signal_info_rx.connect(self._on_info_rx, Qt.QueuedConnection) task.no_disk_space.connect(self.no_disk_space.emit) task.copy_added.connect(self._on_copy_added, Qt.QueuedConnection) task.wrong_hash.connect(self._on_wrong_hash, Qt.QueuedConnection) task.connect_callbacks(on_downloaded, on_failed) def _emit_add_download_signals(self, emit_working=True): if not self._ready_timer.isActive(): self._ready_timer.start() if not self._rerequest_info_timer.isActive(): self._rerequest_info_timer.start() if emit_working: self.working.emit() def _on_download_ready(self, task): if not self._find_task_by_id(task.id): return logger.debug("download ready: %s", task.id) self._clear_network_error() if not self._paused and self._current_task is None: self._current_task = task task.start(self._limiter) else: self._add_to_queue(task) if self._current_task: self._swap_current_task() def _on_download_not_ready(self, task): logger.debug("download not ready: %s", task.id) try: self._ready_downloads_queue.remove(task) except ValueError: pass if task == self._current_task: if self._paused: self._current_task = None else: self._start_next_task() if isinstance(task, FileDownloadTask): info_consumer = self._file_availability_info_consumer else: info_consumer = self._patch_availability_info_consumer info_consumer.subscribe(task.id, force=True, priority=task.priority) if not self._paused and not self._ready_downloads_queue: self._emit_add_download_signals(emit_working=False) def _on_download_complete(self, task): logger.debug("on_download_complete: %s", task.id) self._downloads.pop(task.id, None) self._finish_task(task) if not self._current_task or task == self._current_task: self._start_next_task() else: try: self._ready_downloads_queue.remove(task) except ValueError: pass logger.debug( "on_download_complete, tasks left: %s, " "important downloads: %s," "ready_downloads_queue size: %s, current task: %s", self.get_downloads_count(), self._get_important_downloads_count(), len(self._ready_downloads_queue), self._current_task.id if self._current_task else "None") if not self._ready_downloads_queue and not self._current_task: self._process_empty_ready_downloads() def _on_download_failure(self, task): try: self._ready_downloads_queue.remove(task) except ValueError: pass self._downloads.pop(task.id, None) if task == self._current_task or not self._current_task: self._start_next_task() task.cancel() # task.clean() self._finish_task(task) def _on_download_progress(self, force_sending=False): downloads_info = self._get_important_downloads_info() uploads_info = self._get_important_uploads_info() task = self._current_task if not task or not isinstance(task, FileDownloadTask) or \ task.priority <= IMPORTANT_DOWNLOAD_PRIORITY or \ task.received == task.size: to_send = self._empty_progress else: progress = int(float(task.received) / float(task.size) * 100) objects_num = len([ t for t in self._downloads.values() if (isinstance(t, FileDownloadTask) and t.priority > IMPORTANT_DOWNLOAD_PRIORITY) ]) to_send = (task.display_name, min(progress, 100), objects_num) return self._send_progress(to_send, downloads_info, uploads_info, force_sending=force_sending) def _send_progress(self, to_send, downloads_info, uploads_info, force_sending=False): if to_send == self._last_progress_sent and \ not any(downloads_info) and \ uploads_info == self._last_uploads_info and not force_sending: return logger.verbose("Sending downloads status %s, %s, %s", to_send, downloads_info, uploads_info) self._last_uploads_info = deepcopy(uploads_info) self._last_progress_sent = to_send self.downloads_status.emit(*to_send, list(downloads_info), uploads_info) def _check_send_download_error(self): has_important_downloads = self._get_important_downloads_count() logger.debug( "_check_send_download_error, current_task: %s, " "ready downloads: %s, downloads: %s, " "has_important_downloads: %s", self._current_task, len(self._ready_downloads_queue), len(self._downloads), has_important_downloads) if (self._downloads and not self._current_task and not self._ready_downloads_queue): if not has_important_downloads: return self._on_download_error() def _on_download_error(self, error=""): if not error: error = "Waiting for nodes." self.error.emit(error) self._error_set = True def _start_next_task(self): self._current_task = None if self._paused: return if self._ready_downloads_queue: task = heappop(self._ready_downloads_queue) self._current_task = task task.start(self._limiter) if self._get_important_downloads_count(): self.working.emit() self._clear_network_error() else: self._process_empty_ready_downloads() def _process_empty_ready_downloads(self): self._current_task = None logger.debug("_process_empty_ready_downloads, downloads: %s", self._downloads) if not self._get_important_downloads_count(): self.idle.emit() self._clear_network_error() if not self._downloads and not self._cleanup_timer.isActive(): self._cleanup_timer.start() if self._downloads: if not self._ready_timer.isActive(): self._ready_timer.start() def _add_to_queue(self, task): heappush(self._ready_downloads_queue, task) def _connected_nodes_incoming_changed(self, nodes): self._node_incoming_list = nodes.copy() def _connected_nodes_outgoing_changed(self, nodes): self._node_outgoing_list = nodes.copy() def _check_downloads(self): has_important_downloads = self._get_important_downloads_count() logger.debug( "_check_downloads, current_task: %s, " "ready downloads: %s, downloads: %s, " "has_important_downloads: %s", self._current_task, len(self._ready_downloads_queue), len(self._downloads), has_important_downloads) if (self._downloads and not self._current_task and not self._ready_downloads_queue): if not has_important_downloads: return if not self._downloads_error_timer.isActive(): self._downloads_error_timer.start() if not self._rerequest_info_timer.isActive(): self._rerequest_info_timer.start() else: self._ready_timer.stop() def _get_important_downloads_count(self): return len( list( filter(lambda t: t.priority > IMPORTANT_DOWNLOAD_PRIORITY, self._downloads.values()))) def _on_wrong_hash(self, task): self._on_download_progress() def _rerequest_info_for_not_ready_downloads(self): if self._downloads and not self._current_task \ and not self._ready_downloads_queue: for task in self._downloads.values(): if task.priority >= IMPORTANT_DOWNLOAD_PRIORITY: if isinstance(task, FileDownloadTask): info_consumer = self._file_availability_info_consumer else: info_consumer = self._patch_availability_info_consumer info_consumer.subscribe(task.id, force=True, priority=task.priority) def _find_task_by_id(self, obj_id): return self._downloads.get(obj_id) def _on_availability_info_received(self, node_id, obj_id, info): task = self._find_task_by_id(obj_id) if task: task.on_availability_info_received(node_id, obj_id, info) def _on_availability_info_failure(self, node_id, obj_id, error): task = self._find_task_by_id(obj_id) if task: task.on_availability_info_failure(node_id, obj_id, error) def _on_task_data_received(self, param_tuple): node_id, obj_id, offset, length, data = param_tuple task = self._find_task_by_id(obj_id) if task: task.on_data_received(node_id, obj_id, offset, length, data) else: logger.warning( "No task to receive data. node_id %s, " "obj_id %s, offset %s, length %s", node_id, obj_id, offset, length) def _on_task_data_failed(self, node_id, obj_id, offset_str, error): task = self._find_task_by_id(obj_id) if task: task.on_data_failed(node_id, obj_id, int(offset_str), error) def _on_copy_added(self, file_hash): for task in self._downloads.values(): if task.file_hash == file_hash: task.complete() def _finish_task(self, task): if isinstance(task, FileDownloadTask): info_consumer = self._file_availability_info_consumer info_supplier = self._file_availability_info_supplier data_consumer = self._file_data_consumer else: info_consumer = self._patch_availability_info_consumer info_supplier = self._patch_availability_info_supplier data_consumer = self._patch_data_consumer info_consumer.unsubscribe(task.id, silently=True) if info_supplier: info_supplier.remove_subscriptions_on_download(task.id, task.size) try: task.chunk_downloaded.disconnect( info_supplier.on_new_availability_info) except Exception: pass try: task.disconnect(data_consumer) except Exception: pass try: self._connectivity_service.node_outgoing_disconnected.disconnect( task.on_node_disconnected) except Exception: pass try: task.disconnect(self) except Exception: pass try: task.signal_info_rx.disconnect(self._on_info_rx) except Exception: pass try: task.disconnect(task) except Exception: pass task.deleteLater() def _clear_network_error(self): if self._downloads_error_timer.isActive(): self._downloads_error_timer.stop() if self._error_set: self.clear_error.emit() self._error_set = False def _get_important_downloads_info(self): added_info = dict() changed_info = dict() for task in self._downloads.values(): if task.priority <= self._info_priority: continue obj_id = task.id downloaded = task.received state = \ DOWNLOAD_NO_DISK_ERROR if task.no_disk_space_error else \ DOWNLOAD_NOT_READY if not self._current_task else \ DOWNLOAD_READY if task != self._current_task else \ DOWNLOAD_STARTING if downloaded == 0 and \ not task.hash_is_wrong else \ DOWNLOAD_FAILED if downloaded == 0 else \ DOWNLOAD_LOADING if downloaded < task.size else \ DOWNLOAD_FINISHING short_info = { "state": state, "downloaded": downloaded, "priority": task.priority, "is_file": isinstance(task, FileDownloadTask) } if obj_id not in self._important_downloads_info: added_info[obj_id] = \ {"files_info": task.files_info, "size": task.size,} added_info[obj_id].update(short_info) elif self._important_downloads_info[obj_id] != short_info: changed_info[obj_id] = short_info self._important_downloads_info[obj_id] = short_info deleted_info = list( set(self._important_downloads_info) - set(self._downloads)) for obj_id in deleted_info: self._important_downloads_info.pop(obj_id, None) return added_info, changed_info, deleted_info def _get_important_uploads_info(self): if self._upload_enabled: uploads_info = deepcopy( self._file_data_supplier.get_uploads_info()) uploads_info.update( deepcopy(self._patch_data_supplier.get_uploads_info())) else: uploads_info = dict() return uploads_info def _kill_timers(self): for timer in self._timers: if timer.isActive(): timer.stop() def _on_quit(self): self._connectivity_service.connected_nodes_outgoing_changed.disconnect( self._file_availability_info_consumer.on_connected_nodes_changed) self._connectivity_service.connected_nodes_outgoing_changed.disconnect( self._patch_availability_info_consumer.on_connected_nodes_changed) self._connectivity_service.node_outgoing_connected.disconnect( self._file_availability_info_consumer.on_node_connected) self._connectivity_service.node_outgoing_connected.disconnect( self._patch_availability_info_consumer.on_node_connected) self._connectivity_service.data_received.disconnect( self._on_data_received) self.disconnect(self) self._on_cancel_all_downloads() self._kill_timers() if self._file_availability_info_supplier: self._connectivity_service.node_incoming_disconnected.disconnect( self._file_availability_info_supplier.on_node_disconnected) self._file_availability_info_supplier.disconnect( self._file_availability_info_supplier) self._connectivity_service.connected_nodes_incoming_changed\ .disconnect(self._file_availability_info_supplier .on_connected_nodes_changed) if self._patch_availability_info_supplier: self._connectivity_service.node_incoming_disconnected.disconnect( self._patch_availability_info_supplier.on_node_disconnected) self._patch_availability_info_supplier.disconnect( self._patch_availability_info_supplier) self._connectivity_service.connected_nodes_incoming_changed\ .disconnect(self._patch_availability_info_supplier .on_connected_nodes_changed) self._file_availability_info_consumer.disconnect( self._file_availability_info_consumer) self._file_availability_info_consumer.stop() self._file_data_consumer.disconnect(self._file_data_consumer) self._patch_availability_info_consumer.disconnect( self._patch_availability_info_consumer) self._patch_availability_info_consumer.stop() self._patch_data_consumer.disconnect(self._patch_data_consumer) if self._file_data_supplier: self._file_data_supplier.signal_info_tx.disconnect( self._on_info_tx) if self._patch_data_supplier: self._patch_data_supplier.signal_info_tx.disconnect( self._on_info_tx) def _on_info_tx(self, info_tx): self.signal_info_tx.emit(info_tx) def _on_info_rx(self, info_rx): self.signal_info_rx.emit(info_rx)
class UpdaterWorker(QObject): exited = Signal() exit = Signal() settings_changed = Signal(dict) check_for_update = Signal() install_update = Signal() show_tray_notification = Signal(str, str) # callback _update_install = Signal() _update_cancel = Signal() def __init__(self, updater, cfg): QObject.__init__(self, parent=None) self._cfg = cfg self._updater = updater self._updater.setParent(self) self._update_request_pending = False def start(self): self._updater_timer = QTimer(self) self._updater_timer.setSingleShot(True) self._updater_timer.timeout.connect(self._periodic_update_check) self._connect_signals() if self._cfg.autoupdate: self._on_start_periodic_update_check() def _connect_signals(self): self._cfg.settings_changed.connect( lambda settings: self.settings_changed.emit(settings)) self.settings_changed.connect(self._on_settings_changed) self.exit.connect(self._on_exit) self._update_install.connect(self._on_update_install) self._update_cancel.connect(self._on_update_cancel) self.install_update.connect(self._updater.install_update) self.check_for_update.connect(self._updater.check_for_update) self.check_for_update.connect(self._on_restart_periodic_update_check) def _on_exit(self): if self._updater_timer and self._updater_timer.isActive(): self._updater_timer.stop() self._updater.stop() try: self._updater.updater_status_changed.disconnect() self._updater.update_ready.disconnect() self._updater.downloading_update.disconnect() except RuntimeError as e: logger.debug("Can't disconnect signal: %s", e) self._updater = None self.disconnect(self) self.exited.emit() def _setup_updater(self): try: self._updater.update_ready.disconnect(self._on_update_ready) except RuntimeError as e: logger.debug("Can't disconnect signal update_ready: %s", e) self._updater.update_ready.connect(self._on_update_ready) def _on_update_ready(self, ready): if not ready: return logger.debug("on_update_ready") self.show_tray_notification.emit( tr("Application will restart automatically in one minute"), tr("Pvtbox is going to update")) QTimer.singleShot(60 * 1000, self._update_install.emit) def _on_update_install(self): self._update_request_pending = False if self._updater_timer.isActive(): self._updater_timer.stop() if not self._updater.install_update(): self._updater_timer.start(60 * 60 * 1000) def _on_update_cancel(self): self._update_request_pending = False one_day = 1 * 24 * 60 * 60 self._cfg.set_settings(dict(next_update_check=time.time() + one_day)) if self._updater_timer.isActive(): self._updater_timer.stop() self._updater_timer.start(one_day * 1000) def change_update_request_pending(self, is_pending): self._update_request_pending = is_pending def _periodic_update_check(self): if self._cfg.autoupdate: if time.time() > self._cfg.next_update_check: if not self._update_request_pending: self._updater.check_for_update() # recheck every hour if no updates found or until cancelled self._updater_timer.start(60 * 60 * 1000) else: self._updater_timer.start( (self._cfg.next_update_check - time.time()) * 1000) def _apply_autoupdate(self): if self._cfg.autoupdate: self._setup_updater() self._cfg.set_settings(dict(next_update_check=0)) self._updater_timer.start(100) else: try: self._updater.update_ready.disconnect(self._on_update_ready) except RuntimeError as e: logger.debug("Can't disconnect signal update_ready: %s", e) def _on_settings_changed(self, changed_params): if 'autoupdate' in changed_params: self._apply_autoupdate() def _on_start_periodic_update_check(self): self._setup_updater() self._updater_timer.start( 10 * 60 * 1000 if self._cfg.next_update_check < time.time() else (self._cfg.next_update_check - time.time()) * 1000) def _on_restart_periodic_update_check(self): if self._updater_timer.isActive(): self._updater_timer.stop() if self._cfg.autoupdate: self._cfg.set_settings(dict(next_update_check=0)) self._on_start_periodic_update_check()
class ModList(QTableView): def __init__(self, parent: QWidget, model: Model) -> None: super().__init__(parent) self.hoverIndexRow = -1 self.modmodel = model self.modCountLastUpdate = len(self.modmodel) self.installLock = asyncio.Lock() self.setMouseTracking(True) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setSelectionBehavior(QAbstractItemView.SelectRows) self.setWordWrap(False) self.setSortingEnabled(True) self.setFocusPolicy(Qt.StrongFocus) self.setAcceptDrops(True) self.setEditTriggers(QTableView.EditKeyPressed | QTableView.DoubleClicked) self.setShowGrid(False) self.setStyleSheet(''' QTableView { gridline-color: rgba(255,255,255,1); } QTableView::item:!selected:hover { background-color: rgb(217, 235, 249); } ''') self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setHorizontalScrollMode(QAbstractItemView.ScrollPerPixel) self.setContextMenuPolicy(Qt.CustomContextMenu) self.customContextMenuRequested.connect(self.showContextMenu) self.verticalHeader().hide() self.verticalHeader().setVisible(False) self.verticalHeader().setDefaultSectionSize(25) self.setCornerButtonEnabled(False) self.horizontalHeader().setHighlightSections(False) self.horizontalHeader().setStretchLastSection(True) self.horizontalHeader().setSectionsMovable(True) self.listmodel = ModListModel(self, model) self.filtermodel = ModListFilterModel(self, self.listmodel) self.setModel(self.filtermodel) self.setItemDelegate(ModListItemDelegate(self)) self.setSelectionModel(ModListSelectionModel(self, self.filtermodel)) self.resizeColumnsToContents() settings = QSettings() if settings.value('modlistHorizontalHeaderState'): self.horizontalHeader().restoreState(settings.value('modlistHorizontalHeaderState')) # type: ignore self.horizontalHeader().sectionMoved.connect(lambda: self.headerChangedEvent()) self.horizontalHeader().sectionResized.connect(lambda: self.headerChangedEvent()) self.setFocus() self.sortByColumn(3, Qt.AscendingOrder, False) self.sortByColumn(2, Qt.AscendingOrder, False) self.sortByColumn(1, Qt.AscendingOrder, False) if settings.value('modlistSortColumn') is not None and \ settings.value('modlistSortOrder') is not None: try: self.sortByColumn( cast(int, settings.value('modlistSortColumn', 1, int)), Qt.DescendingOrder if cast(int, settings.value('modlistSortOrder', 1, int)) else Qt.AscendingOrder, False ) except Exception as e: logger.exception(f'could not restore sort order: {e}') self.horizontalHeader().sortIndicatorChanged.connect(self.sortByColumn) self.doubleClicked.connect(self.doubleClickEvent) model.updateCallbacks.append(self.modelUpdateEvent) # setup viewport caching to counter slow resizing with many table elements self.resizeTimer = QTimer(self) self.resizeTimer.setSingleShot(True) self.resizeTimer.setInterval(250) self.resizeTimer.timeout.connect(lambda: [ self.resizeTimer.stop(), self.viewport().repaint(), ]) self.viewportCache = QPixmap() self.viewportCacheSize = QSize(0, 0) # TODO: enhancement: notify of inconsistencies like enabled-but-unconfigured-mods # TODO: enhancement: offer option to read readme and other additional text files @debounce(200) async def headerChangedEvent(self) -> None: settings = QSettings() state = self.horizontalHeader().saveState() # call later to work around pyqt5 StopIteration exception asyncio.get_running_loop().call_later( 25 / 1000.0, lambda: settings.setValue('modlistHorizontalHeaderState', state) ) def modelUpdateEvent(self, model: Model) -> None: if not self.modCountLastUpdate: # if list was empty before, auto resize columns self.resizeColumnsToContents() self.modCountLastUpdate = len(self.modmodel) def mouseMoveEvent(self, event: QMouseEvent) -> None: self.hoverIndexRow = self.indexAt(event.pos()).row() return super().mouseMoveEvent(event) def doubleClickEvent(self, index: QModelIndex) -> None: if self.filtermodel.mapToSource(index).column() == 0: mod = self.modmodel[self.filtermodel.mapToSource(index).row()] if mod.enabled: asyncio.create_task(self.modmodel.disable(mod)) else: asyncio.create_task(self.modmodel.enable(mod)) def resizeEvent(self, event: QResizeEvent) -> None: super().resizeEvent(event) if not self.resizeTimer.isActive() and event.size() != self.viewportCacheSize: self.viewportCacheSize = event.size() self.viewportCache = self.viewport().grab() self.resizeTimer.start() def paintEvent(self, event: QPaintEvent) -> None: if self.resizeTimer.isActive(): painter = QPainter(self.viewport()) painter.drawPixmap(0, 0, self.viewportCache) else: super().paintEvent(event) def selectionChanged(self, selected: QItemSelection, deselected: QItemSelection) -> None: super().selectionChanged(selected, deselected) def eventFilter(self, obj: QObject, event: QEvent) -> bool: return super().eventFilter(obj, event) def sortByColumn(self, col: int, order: Qt.SortOrder, save: bool = True) -> None: # type: ignore if save and col is not None and order is not None: settings = QSettings() settings.setValue('modlistSortColumn', col) settings.setValue('modlistSortOrder', 0 if order == Qt.AscendingOrder else 1) super().sortByColumn(col, order) def showContextMenu(self, pos: QPoint) -> None: mods = self.getSelectedMods() if not mods: return menu = QMenu(self) actionOpen = menu.addAction(QIcon(str(getRuntimePath('resources/icons/open-folder.ico'))), '&Open Directory') actionOpen.triggered.connect(lambda: [ util.openDirectory(self.modmodel.getModPath(mod)) # type: ignore for mod in mods ]) menu.addSeparator() actionEnable = menu.addAction('&Enable') actionEnable.triggered.connect(lambda: [ asyncio.create_task(self.enableSelectedMods(True)) ]) actionEnable.setEnabled(not all(mod.enabled for mod in mods)) actionDisable = menu.addAction('&Disable') actionDisable.triggered.connect(lambda: [ asyncio.create_task(self.enableSelectedMods(False)) ]) actionDisable.setEnabled(not all(not mod.enabled for mod in mods)) menu.addSeparator() actionUninstall = menu.addAction('&Uninstall') actionUninstall.triggered.connect(lambda: [ asyncio.create_task(self.deleteSelectedMods()) ]) menu.addSeparator() actionOpenNexus = menu.addAction( QIcon(str(getRuntimePath('resources/icons/browse.ico'))), 'Open &Nexus Mods page') actionOpenNexus.triggered.connect(lambda: [ QDesktopServices.openUrl(QUrl(f'https://www.nexusmods.com/witcher3/mods/{modid}')) for modid in {mod.modid for mod in mods if mod.modid > 0} ]) actionOpenNexus.setEnabled(not all(mod.modid <= 0 for mod in mods)) menu.popup(self.viewport().mapToGlobal(pos)) def selectRowChecked(self, row: int) -> None: nums: int = self.filtermodel.rowCount() if row < nums and row >= 0: self.selectRow(row) elif nums > 0: self.selectRow(nums - 1) def getSelectedMods(self) -> List[Mod]: return [ self.modmodel[self.filtermodel.mapToSource(index).row()] for index in self.selectionModel().selectedRows() ] async def enableSelectedMods(self, enable: bool = True) -> None: if not self.selectionModel().hasSelection(): return mods = self.getSelectedMods() self.setDisabled(True) for mod in mods: try: if enable: await self.modmodel.enable(mod) else: await self.modmodel.disable(mod) except Exception as e: logger.bind(name=mod.filename).exception(f'Could not enable/disable mod: {e}') self.setDisabled(False) self.setFocus() async def deleteSelectedMods(self) -> None: if not self.selectionModel().hasSelection(): return self.setDisabled(True) mods = self.getSelectedMods() # TODO: incomplete: ask if selected mods should really be removed inds = self.selectedIndexes() self.selectionModel().clear() for mod in mods: try: await self.modmodel.remove(mod) except Exception as e: logger.bind(name=mod.filename).exception(f'Could not delete mod: {e}') asyncio.get_running_loop().call_later(100 / 1000.0, partial(self.selectRowChecked, inds[0].row())) self.setDisabled(False) self.setFocus() async def updateModDetails(self, mod: Mod) -> bool: logger.bind(name=mod.filename, dots=True).debug('Requesting details for mod') if not mod.md5hash: logger.bind(name=mod.filename).warning('Could not get details for mod not installed from archive') return False try: details = await getModInformation(mod.md5hash) except Exception as e: logger.bind(name=mod.filename).warning(f'{e}') return False try: package = str(details[0]['mod']['name']) summary = str(details[0]['mod']['summary']) modid = int(details[0]['mod']['mod_id']) category = int(details[0]['mod']['category_id']) version = str(details[0]['file_details']['version']) fileid = int(details[0]['file_details']['file_id']) uploadname = str(details[0]['file_details']['name']) uploadtime = str(details[0]['file_details']['uploaded_time']) mod.package = package mod.summary = summary mod.modid = modid mod.category = getCategoryName(category) mod.version = version mod.fileid = fileid mod.uploadname = uploadname uploaddate = dateparser.parse(uploadtime) if uploaddate: mod.uploaddate = uploaddate.astimezone(tz=timezone.utc) else: logger.bind(name=mod.filename).debug( f'Could not parse date {uploadtime} in mod information response') except KeyError as e: logger.bind(name=mod.filename).exception( f'Could not find key "{str(e)}" in mod information response') return False try: await self.modmodel.update(mod) except Exception as e: logger.bind(name=mod.filename).exception(f'Could not update mod: {e}') return False return True async def updateSelectedModsDetails(self) -> None: if not self.selectionModel().hasSelection(): return self.setDisabled(True) updatetime = datetime.now(tz=timezone.utc) mods = self.getSelectedMods() logger.bind(newline=True, output=False).debug(f'Requesting details for {len(mods)} mods') results = await asyncio.gather( *[self.updateModDetails(mod) for mod in mods], loop=asyncio.get_running_loop(), return_exceptions=True ) successes = sum(results) errors = len(results) - successes message = 'Updated details for {0} mods{1}'.format(successes, f' ({errors} errors)' if errors else '') if errors: logger.warning(message) else: logger.success(message) self.modmodel.setLastUpdateTime(updatetime) self.setDisabled(False) self.setFocus() def keyPressEvent(self, event: QKeyEvent) -> None: if event.matches(QKeySequence.Delete): asyncio.create_task(self.deleteSelectedMods()) super().keyPressEvent(event) def setFilter(self, search: str) -> None: self.filtermodel.setFilterRegularExpression( QRegularExpression(search, QRegularExpression.CaseInsensitiveOption) ) async def checkInstallFromURLs(self, paths: List[Union[str, QUrl]], local: bool = True, web: bool = True) -> None: await self.installLock.acquire() installed = 0 errors = 0 installtime = datetime.now(tz=timezone.utc) # remove duplicate paths paths = list(set(paths)) logger.bind(newline=True, output=False).debug('Starting install from URLs') try: results = await asyncio.gather( *[self.installFromURL(path, local, web, installtime) for path in paths], loop=asyncio.get_running_loop() ) for result in results: installed += result[0] errors += result[1] except Exception as e: # we should never land here, but don't lock up the UI if it happens logger.exception(str(e)) errors += 1 if installed > 0 or errors > 0: log = logger.bind(modlist=bool(installed)) message = 'Installed {0} mods{1}'.format(installed, f' ({errors} errors)' if errors else '') if installed > 0 and errors > 0: log.warning(message) elif installed > 0: log.success(message) else: log.error(message) self.setDisabled(False) self.setFocus() self.installLock.release() async def installFromURL( self, path: Union[str, QUrl], local: bool = True, web: bool = True, installtime: Optional[datetime] = None ) -> Tuple[int, int]: installed = 0 errors = 0 if not installtime: installtime = datetime.now(tz=timezone.utc) if isinstance(path, QUrl): path = path.toString() if web and isValidModDownloadUrl(path): self.setDisabled(True) logger.bind(dots=True, path=path).info(f'Installing mods from') i, e = await self.installFromFileDownload(path, installtime) installed += i errors += e elif local and isValidFileUrl(path): self.setDisabled(True) path = QUrl(path) logger.bind(dots=True, path=Path(path.toLocalFile())).info(f'Installing mods from') i, e = await self.installFromFile(Path(path.toLocalFile()), installtime) installed += i errors += e else: logger.bind(path=path).error('Could not install mods from') return installed, errors async def installFromFileDownload(self, url: str, installtime: Optional[datetime] = None) -> Tuple[int, int]: installed = 0 errors = 0 if not installtime: installtime = datetime.now(tz=timezone.utc) try: target = Path(urlparse(url).path) target = Path(tempfile.gettempdir()).joinpath( 'w3modmanager/download').joinpath(f'{unquote(target.name)}') except ValueError: logger.bind(name=url).exception('Wrong request URL') return 0, 1 try: target.parent.mkdir(parents=True, exist_ok=True) logger.bind(name=url).info('Starting to download file') await downloadFile(url, target) installed, errors = await self.installFromFile(target, installtime) except (RequestError, ResponseError, Exception) as e: logger.bind(name=url).exception(f'Failed to download file: {e}') return 0, 1 except Exception as e: logger.exception(str(e)) return 0, 1 finally: if target.is_file(): target.unlink() return installed, errors async def installFromFile(self, path: Path, installtime: Optional[datetime] = None) -> Tuple[int, int]: originalpath = path installed = 0 errors = 0 archive = path.is_file() source = None md5hash = '' details = None detailsrequest: Optional[asyncio.Task] = None if not installtime: installtime = datetime.now(tz=timezone.utc) try: if archive: # unpack archive, set source and request details md5hash = getMD5Hash(path) source = path settings = QSettings() if settings.value('nexusGetInfo', 'False') == 'True': logger.bind(path=str(path), dots=True).debug('Requesting details for archive') detailsrequest = asyncio.create_task(getModInformation(md5hash)) logger.bind(path=str(path), dots=True).debug('Unpacking archive') path = await extractMod(source) # validate and read mod valid, exhausted = containsValidMod(path, searchlimit=8) if not valid: if not exhausted and self.showContinueSearchDialog(searchlimit=8): if not containsValidMod(path): raise InvalidPathError(path, 'Invalid mod') elif not exhausted: raise InvalidPathError(path, 'Stopped searching for mod') else: raise InvalidPathError(path, 'Invalid mod') mods = Mod.fromDirectory(path, searchCommonRoot=not archive) installedMods = [] # update mod details and add mods to the model for mod in mods: mod.md5hash = md5hash try: # TODO: incomplete: check if mod is installed, ask if replace await self.modmodel.add(mod) installedMods.append(mod) installed += 1 except ModExistsError: logger.bind(path=source if source else mod.source, name=mod.filename).error(f'Mod exists') errors += 1 continue # wait for details response if requested if detailsrequest: try: details = await detailsrequest except (RequestError, ResponseError, Exception) as e: logger.warning(f'Could not get information for {source.name if source else path.name}: {e}') # update mod with additional information if source or details: for mod in installedMods: if source: # set source if it differs from the scan directory, e.g. an archive mod.source = source if details: # set additional details if requested and available try: package = str(details[0]['mod']['name']) summary = str(details[0]['mod']['summary']) modid = int(details[0]['mod']['mod_id']) category = int(details[0]['mod']['category_id']) version = str(details[0]['file_details']['version']) fileid = int(details[0]['file_details']['file_id']) uploadname = str(details[0]['file_details']['name']) uploadtime = str(details[0]['file_details']['uploaded_time']) mod.package = package mod.summary = summary mod.modid = modid mod.category = getCategoryName(category) mod.version = version mod.fileid = fileid mod.uploadname = uploadname uploaddate = dateparser.parse(uploadtime) if uploaddate: mod.uploaddate = uploaddate.astimezone(tz=timezone.utc) else: logger.bind(name=mod.filename).debug( f'Could not parse date {uploadtime} in mod information response') except KeyError as e: logger.bind(name=mod.filename).exception( f'Could not find key "{str(e)}" in mod information response') try: await self.modmodel.update(mod) except Exception: logger.bind(name=mod.filename).warning('Could not update mod details') except ModelError as e: logger.bind(path=e.path).error(e.message) errors += 1 except InvalidPathError as e: # TODO: enhancement: better install error message logger.bind(path=e.path).error(e.message) errors += 1 except FileNotFoundError as e: logger.bind(path=e.filename).error(e.strerror if e.strerror else str(e)) errors += 1 except OSError as e: logger.bind(path=e.filename).error(e.strerror if e.strerror else str(e)) errors += 1 except Exception as e: logger.exception(str(e)) errors += 1 finally: if detailsrequest and not detailsrequest.done(): detailsrequest.cancel() if archive and not path == originalpath: try: util.removeDirectory(path) except Exception: logger.bind(path=path).warning('Could not remove temporary directory') self.modmodel.setLastUpdateTime(installtime) self.repaint() return installed, errors def showContinueSearchDialog(self, searchlimit: int) -> bool: messagebox = QMessageBox(self) messagebox.setWindowTitle('Unusual search depth') messagebox.setText(f''' <p>No mod detected after searching through {searchlimit} directories.</p> <p>Are you sure this is a valid mod?</p> ''') messagebox.setTextFormat(Qt.RichText) messagebox.setStandardButtons(QMessageBox.Cancel) yes: QPushButton = QPushButton(' Yes, continue searching ', messagebox) yes.setAutoDefault(True) yes.setDefault(True) messagebox.addButton(yes, QMessageBox.YesRole) messagebox.exec_() return messagebox.clickedButton() == yes def dropEvent(self, event: QDropEvent) -> None: event.accept() self.setDisabled(True) self.repaint() asyncio.create_task(self.checkInstallFromURLs(event.mimeData().urls())) def dragEnterEvent(self, event: QDragEnterEvent) -> None: self.setDisabled(True) self.repaint() urls = event.mimeData().urls() if not urls: self.setDisabled(False) self.setFocus() event.ignore() return for url in urls: try: parse = urlparse(url.toString()) if parse.scheme not in ['file']: self.setDisabled(False) event.ignore() return filepath = Path(url.toLocalFile()) if isArchive(filepath) or containsValidMod(filepath, searchlimit=8)[0]: self.setDisabled(False) event.accept() return except Exception as e: logger.debug(str(e)) self.setDisabled(False) self.setFocus() event.ignore() def dragMoveEvent(self, event: QDragMoveEvent) -> None: event.accept() def dragLeaveEvent(self, event: QDragLeaveEvent) -> None: event.accept()
class UploadTaskHandler(QObject): """ Class incapsulating upload task related routines """ progress = Signal(str, int, int) download_status = Signal( str, int, int, list, # download info dict) # empty dict idle = Signal() working = Signal() upload_cancelled = Signal(str) # upload file name upload_folder_deleted = Signal(str) # upload file name upload_folder_excluded = Signal(str) # upload file name upload_folder_not_synced = Signal(str) # upload file name _on_server_connect_signal = Signal() _upload_task_completed_signal = Signal( str, # str(upload_id) float, # elapsed str) # str(total) _upload_task_error_signal = Signal( str, # str(upload_id) str) # error message _upload_task_progress_signal = Signal( str, # str(upload_id) str, # str(loaded) str, # str(total) float) # elapsed _on_upload_added_signal = Signal(dict) _on_upload_cancel_signal = Signal(int) def __init__(self, cfg, web_api, filename, ss_client, tracker=None, parent=None, network_speed_calculator=None, db=None): """ Constructor @param web_api Client_API class instance [Client_API] @param ss_client Instance of signalling.SignalServerClient @param tracker Instance of stat_tracking.Tracker """ QObject.__init__(self, parent=parent) self._cfg = cfg # Client_API class instance self._web_api = web_api # Signalling server client instance self._ss_client = ss_client self._tracker = tracker self._network_speed_calculator = network_speed_calculator self._db = db self._filename = ensure_unicode(filename) self.task_to_report = {} self._downloader = HttpDownloader( network_speed_calculator=self._network_speed_calculator) self._set_callbacks() # Download tasks info as task_id: info self.download_tasks_info = {} self._last_progress_report_time = 0 self._last_length = 0.0 self._was_stopped = False self._empty_progress = (None, 0, 0) self._open_uploads_file() self._uploads_deleted = \ self._uploads_excluded = \ self._uploads_not_synced = set() self._on_server_connect_signal.connect(self._on_server_connect) self._upload_task_completed_signal.connect( self._on_upload_task_completed) self._upload_task_error_signal.connect(self._on_upload_task_error) self._upload_task_progress_signal.connect( self._on_upload_task_progress) self._on_upload_added_signal.connect(self._on_upload_added) self._on_upload_cancel_signal.connect(self._on_upload_cancel) self._check_upload_path_timer = QTimer(self) self._check_upload_path_timer.setInterval(10 * 1000) self._check_upload_path_timer.timeout.connect(self._check_upload_paths) def set_download_limiter(self, download_speed_limiter): self._downloader.set_download_limiter(download_speed_limiter) def _set_callbacks(self): self._downloader.set_callbacks( on_download_auth_data_cb=self.on_download_auth_data_cb, on_download_completed=self.upload_task_completed_cb, on_download_error=self.upload_task_error_cb, on_download_progress=self.upload_task_progress_cb, on_get_upload_state_cb=self.get_upload_state_cb, ) def _open_uploads_file(self): logger.info("Loading upload task data from '%s'...", self._filename) # Load previously stored download task data try: if not op.exists(self._filename): with open(self._filename, 'wb') as f: pickle.dump(self.task_to_report, f) # Not reported complete download tasks info as task_id: info with open(self._filename, 'rb') as f: self.task_to_report = pickle.load(f) except Exception as e: logger.error("Failed to load upload task data (%s)", e) self.task_to_report = None try: remove_file(self._filename) except Exception: pass def _sync__uploads_file(self): try: with open(self._filename, 'wb') as f: pickle.dump(self.task_to_report, f) except Exception as e: logger.error("Failed to save upload task data (%s)", e) try: remove_file(self._filename) except Exception: pass def _cleanup(self, upload_id): """ Cleans data related with given upload task @param upload_id ID of upload task [string] """ if upload_id not in self.download_tasks_info: return logger.debug("Doing cleanup for upload task ID '%s'...", upload_id) # Remove temporary file tmp_fn = self.download_tasks_info[upload_id].get('tmp_fn', None) if tmp_fn is not None and op.exists(tmp_fn): try: os.remove(tmp_fn) except Exception as e: logger.warning("Failed to delete temporary file '%s' (%s)", tmp_fn, e) # Clear upload info del self.download_tasks_info[upload_id] if not self.download_tasks_info: self.idle.emit() def _on_upload_failed(self, upload_id): """ Routines to be executed on upload task fail @param upload_id ID of upload task [string] """ # Notify signalling server on upload fail self._ss_client.send_upload_failed(upload_id) task_info = self.download_tasks_info.get(upload_id, None) if task_info: self.download_status.emit(*self._empty_progress, [{}, {}, [str(upload_id)]], {}) # Cleanup upload data self._cleanup(upload_id) def _store_complete_upload_id(self, upload_id): """ Saves upload task ID in the case in could not be reported immediately @param upload_id ID of upload task [string] """ if self.task_to_report is None: self._open_uploads_file() if self.task_to_report is None: return self.task_to_report[str(upload_id)] = \ self.download_tasks_info[upload_id] self._sync__uploads_file() def _clean_complete_upload_id(self, upload_id): """ Remove previously stored upload task ID @param upload_id ID of upload task [string] """ if self.task_to_report is None: self._open_uploads_file() if self.task_to_report is None: return upload_id = str(upload_id) # For pickle if upload_id in self.task_to_report: del self.task_to_report[upload_id] self._sync__uploads_file() def _report_stored_uploads(self): """ Checks stored complete upload task IDs and report them to signalling server """ if self.task_to_report is None: self._open_uploads_file() if self.task_to_report is None: return logger.debug("Checking upload tasks haven't been reported...") count = 0 for upload_id in list(self.task_to_report.keys()): if self._ss_client.send_upload_complete(int(upload_id)): count += 1 self._clean_complete_upload_id(upload_id) if count > 0: logger.info( "Reported %s upload tasks completion to the signalling server") def _on_upload_complete(self, upload_id): """ Routines to be executed on upload task successful completion @param upload_id ID of upload task [string] """ # Notify signalling server on upload completetion if not self._ss_client.send_upload_complete(upload_id): self._store_complete_upload_id(upload_id) # Cleanup upload data self._cleanup(upload_id) def on_download_auth_data_cb(self, upload_id): """ Callback to be called on HTTP download start to obtain data to be sent to the server to confirm node auth @param upload_id ID of upload task @return Download auth data [string] """ # TODO: rework callback with signals/slots (issue # 335) auth_data = self._web_api.get_request_data('download', {'upload_id': upload_id}, force_update=True) logger.debug("Auth data for upload task ID '%s' is: '%s'", upload_id, auth_data) return auth_data def on_upload_added_cb(self, upload_info): """ Callback to be called on new upload notification @param upload_info Value of 'upload_add' protocol message 'data' field """ self._on_upload_added_signal.emit(upload_info) def _on_upload_added(self, upload_info): """ Slot to be called on new upload notification @param upload_info Value of 'upload_add' protocol message 'data' field """ upload_id = upload_info['upload_id'] # Check whether this download is already being processed if upload_id in self.download_tasks_info: logger.warning("Upload ID '%s' is being downloaded already", upload_id) return # Save upload data upload_info['loaded'] = 0 upload_info['size'] = 0 upload_info['state'] = 'running' upload_info['elapsed'] = 0.0 self.download_tasks_info[upload_id] = upload_info # Check whether upload path is not ready or is excluded from sync # or is deleted path = self._check_upload_path(upload_id) if path is None: return else: if not self._check_upload_path_timer.isActive(): self._check_upload_path_timer.start() added_info, changed_info = self._get_download_info( upload_id, is_first_report=True) self.download_status.emit(*self._empty_progress, [added_info, changed_info, []], {}) self.working.emit() # Generate filename to save file into tmp_fn = self.download_tasks_info[upload_id]['tmp_fn'] = op.join( get_patches_dir(self._cfg.sync_directory), '.upload_' + str(upload_id)) self._download(upload_id, tmp_fn) def _download(self, upload_id, path, proceed=None): """ Start new download or proceed resumed download @param upload_id - id if upload @param path - temporary path to save uploaded file @param proceed None if new download or tuple (offset, size) if resuming paused download """ self._downloader.download(id=upload_id, url=API_EVENTS_URI.format(self._cfg.host), path=path, do_post_request=True, timeout=self._cfg.http_downloader_timeout, proceed=proceed, host=self._cfg.host) self._last_progress_report_time = 0 self._last_length = 0.0 def on_upload_cancel_cb(self, upload_id): """ Callback to be called on upload_cancel notification @param upload_id Value of 'upload_cancel' protocol message 'data' field """ self._on_upload_cancel_signal.emit(upload_id) def _on_upload_cancel(self, upload_id): """ Slot to be called on upload_cancel notification @param upload_id Value of 'upload_cancel' protocol message 'data' field """ if upload_id not in self.download_tasks_info: return self.download_tasks_info[upload_id]["state"] = 'cancelled' def get_upload_state_cb(self, upload_id): """ Callback to be called on upload status request statuses are 'running' 'paused' 'cancelled' @param upload_id id of upload """ task_info = self.download_tasks_info.get(upload_id, None) if not task_info: return "" return task_info.get("state", "") def upload_task_progress_cb(self, upload_id, loaded, total, elapsed): """ Callback function to obtain upload task download progress @param upload_id ID of upload task @param loaded Amount of data downloaded already (in bytes) [long] @param total Size of file being downloaded (in bytes) [long] @param elapsed Time elapsed from download starting (in seconds) [float] """ task_info = self.download_tasks_info.get(upload_id, None) if not task_info: return self._upload_task_progress_signal.emit(str(upload_id), str(loaded), str(total), elapsed) def _on_upload_task_progress(self, upload_id_str, loaded_str, total_str, elapsed): """ Slot to obtain upload task download progress @param upload_id_str ID of upload task [str] @param loaded_str Amount of data downloaded already (in bytes) [str] @param total_str Size of file being downloaded (in bytes) [str] @param elapsed Time elapsed from download starting (in seconds) [float] """ upload_id, loaded, total = int(upload_id_str), int(loaded_str), \ int(total_str) self.download_tasks_info[upload_id]['loaded'] = loaded self.download_tasks_info[upload_id]['size'] = total if not total: return cur_time = time.time() # Report once in 0.5 sec and if downloaded more than 1% if cur_time - self._last_progress_report_time < 0.5 or \ (float(loaded) - self._last_length) / total < 0.01: return self._last_progress_report_time = cur_time self._last_length = float(loaded) logger.info( "Upload task ID '%s' progress: downloaded %s of %s bytes; " "%s seconds elapsed", upload_id, loaded, total, elapsed) percent = int(float(loaded) / total * 100) if self.download_tasks_info[upload_id]['state'] == 'running' and \ percent <= 98: progress = ('Downloading file {}'.format( self.download_tasks_info[upload_id]['upload_name']), percent, len(self.download_tasks_info)) else: progress = self._empty_progress added_info, changed_info = self._get_download_info(upload_id) self.download_status.emit(*progress, [added_info, changed_info, []], {}) def upload_task_error_cb(self, upload_id, message): """ Callback function to be called on upload task download error @param upload_id ID of upload task @param message Error description [string] """ self._upload_task_error_signal.emit(str(upload_id), message) def _on_upload_task_error(self, upload_id_str, message): """ Slot to be called on upload task download error @param upload_id_str ID of upload task [string] @param message Error description [string] """ upload_id = int(upload_id_str) logger.error("Upload task ID '%s' failed (%s)", upload_id, message) if self._tracker: self._tracker.http_error(upload_id) self._on_upload_failed(upload_id) def upload_task_completed_cb(self, upload_id, elapsed, total): """ Callback function to be called on upload task download completion @param upload_id ID of upload task @param elapsed Time elapsed from download starting (in seconds) [float] @param total Size of file being downloaded (in bytes) [long] """ self._upload_task_completed_signal.emit(str(upload_id), elapsed, str(total)) def _on_upload_task_completed(self, upload_id_str, elapsed, total_str): """ Slot to be called on upload task download completion @param upload_id_str ID of upload task [string] @param elapsed Time elapsed from download starting (in seconds) [float] @param total_str Size of file being downloaded (in bytes) [string] """ upload_id = int(upload_id_str) state = self.download_tasks_info[upload_id]['state'] upload_name = self.download_tasks_info[upload_id]['upload_name'] if state == 'cancelled': logger.debug("Upload task %s cancelled", upload_id) self._on_upload_failed(upload_id) # Tray notification self.upload_cancelled.emit(upload_name) return elif state == 'paused': self.download_tasks_info[upload_id]['elapsed'] += elapsed return elapsed += self.download_tasks_info[upload_id]['elapsed'] total = int(total_str) bps_avg = int(total / elapsed) if elapsed > 0 else 0 bps_avg = "{:,}".format(bps_avg) logger.info( "Upload task ID '%s' complete (downloaded %s bytes in %s seconds" "(%s Bps))", upload_id_str, total_str, elapsed, bps_avg) # Calculate checksum tmp_fn = self.download_tasks_info[upload_id]['tmp_fn'] checksum = self.download_tasks_info[upload_id]['upload_md5'] try: logger.debug("Calculating checksum for upload task ID '%s'...", upload_id) checksum_calculated = hashfile(tmp_fn) except Exception as e: logger.error("Failed to calculate checksum of '%s' (%s)", tmp_fn, e) self._on_upload_failed(upload_id) return if self._tracker: self._tracker.http_download(upload_id, total, elapsed, checksum_calculated == checksum) # Validate checksum if checksum_calculated != checksum: logger.error("MD5 checkfum of '%s' is '%s' instead of '%s'", tmp_fn, checksum_calculated, checksum) self._on_upload_failed(upload_id) return # Move file to its location path = self._check_upload_path(upload_id) if path is None: return path = FilePath(op.join(path, upload_name)) fullpath = ensure_unicode(op.join(self._cfg.sync_directory, path)) fullpath = FilePath(fullpath).longpath dirname = op.dirname(fullpath) if not op.isdir(dirname): logger.warning( "Destination directory %s" "does not exist for upload %s", dirname, fullpath) self._on_upload_failed(upload_id) return try: try: logger.info("Moving downloaded file '%s' to '%s'...", tmp_fn, fullpath) # Create necessary directories make_dirs(fullpath) # Move file shutil.move(src=tmp_fn, dst=fullpath) except OSError as e: if e.errno != errno.EACCES: raise e logger.warning( "Can't move downloaded file '%s' into '%s' (%s)", tmp_fn, dirname, e) fullpath = get_next_name(fullpath) shutil.move(src=tmp_fn, dst=fullpath) except Exception as e: logger.error("Failed to move downloaded file '%s' into '%s' (%s)", tmp_fn, dirname, e) self._on_upload_failed(upload_id) return self.download_status.emit(*self._empty_progress, [{}, {}, [upload_id_str]], {}) self._on_upload_complete(upload_id) def on_signal_server_connect_cb(self): """ Callback handling connection to signal server """ self._on_server_connect_signal.emit() def _on_server_connect(self): """ Slot handling connection to signal server """ self._report_stored_uploads() def _check_upload_path(self, upload_id): logger.debug("_check_upload_path") upload_info = self.download_tasks_info[upload_id] uuid = upload_info['folder_uuid'] if not uuid: path = '' deleted = excluded = False else: try: with self._db.soft_lock(): path, deleted, excluded = self._db\ .get_folder_path_deleted_excluded_by_uuid( upload_info['folder_uuid']) except EventsDbBusy: logger.debug("Events db busy") path = deleted = excluded = None upload_name = upload_info['upload_name'] if path is None or deleted or excluded: reason_str = 'not synced' if path is None \ else 'deleted' if deleted else 'excluded' logger.warning("Can't upload file '%s' because dir %s is %s", upload_name, upload_info['folder_uuid'], reason_str) self._on_upload_failed(upload_id) if path is None: if upload_name not in self._uploads_not_synced: self.upload_folder_not_synced.emit(upload_name) self._uploads_not_synced.add(upload_name) elif deleted: if upload_name not in self._uploads_deleted: self.upload_folder_deleted.emit(upload_name) self._uploads_deleted.add(upload_name) else: if upload_name not in self._uploads_excluded: self.upload_folder_excluded.emit(upload_name) self._uploads_excluded.add(upload_name) else: self._uploads_deleted.discard(upload_name) self._uploads_excluded.discard(upload_name) self._uploads_not_synced.discard(upload_name) if deleted or excluded: path = None return path def _check_upload_paths(self): running_count = 0 for upload_id in self.download_tasks_info: path = self._check_upload_path(upload_id) if path is None: self.download_tasks_info[upload_id]['state'] = 'cancelled' else: running_count += 1 if running_count: self._check_upload_path_timer.start() def _get_download_info(self, upload_id, is_first_report=False): added_info = dict() changed_info = dict() obj_id = str(upload_id) task_info = self.download_tasks_info[upload_id] downloaded = task_info['loaded'] state = \ DOWNLOAD_STARTING if downloaded == 0 else \ DOWNLOAD_LOADING if downloaded < task_info['upload_size'] else \ DOWNLOAD_FINISHING short_info = { "state": state, "downloaded": downloaded, "priority": 0, "is_file": True } if is_first_report: files_info = [{ "target_file_path": task_info['upload_path'], "mtime": -1, # mtime < 0 => http download "is_created": None, "is_deleted": None }] added_info[obj_id] = \ {"files_info": files_info, "size": task_info['upload_size'],} added_info[obj_id].update(short_info) else: changed_info[obj_id] = short_info return added_info, changed_info def stop(self, cancel_downloads=False): self._was_stopped = True for upload_id in self.download_tasks_info: self.download_tasks_info[upload_id]['state'] = 'cancelled' \ if cancel_downloads or \ not self.download_tasks_info[upload_id]['size'] \ else 'paused' if self._check_upload_path_timer.isActive(): self._check_upload_path_timer.stop() # deleted_list = [ # str(u) for u in self.download_tasks_info # if self.download_tasks_info[u]['state'] == 'cancelled'] # if deleted_list: # self.download_status.emit( # *self._empty_progress, [{}, {}, deleted_list], {}) self.idle.emit() def start(self): if not self._was_stopped: return self._was_stopped = False for upload_id in self.download_tasks_info: info = self.download_tasks_info[upload_id] info['state'] = 'running' self._download(upload_id, info['tmp_fn'], (info['loaded'], info['size'] - 1)) if self.download_tasks_info: self.working.emit() self._check_upload_path_timer.start() self._uploads_deleted = \ self._uploads_excluded = \ self._uploads_not_synced = set() def exit(self): self.stop(True) self._downloader.close(immediately=False)
class MainWindow(QWidget): def initUI(self): self.setWindowTitle("Controle Remoto do Reator PWR") self.setGeometry(200, 300, 500, 500) vBox = QVBoxLayout(self) hBox1 = QHBoxLayout() self.hostTxt = QLineEdit(host) self.portTxt = QLineEdit(str(port)) self.btnConnect = QPushButton("Conectar") self.btnConnect.clicked.connect(self.on_btnConnect_clicked) hBox1.addWidget(QLabel("host:")) hBox1.addWidget(self.hostTxt) hBox1.addWidget(QLabel("Porta:")) hBox1.addWidget(self.portTxt) hBox1.addWidget(QLabel("Status:")) self.lbConnected = QLabel("Desconectado") hBox1.addWidget(self.lbConnected) hBox1.addWidget(self.btnConnect) #hBox1.setAlignment(Qt.AlignTop) vBox.addLayout(hBox1) hBox2 = QHBoxLayout() vBox.addLayout(hBox2) #hBox2.setAlignment(Qt.AlignTop) #Núcleo do Reator grpCore = QGroupBox("Núcleo (R)") #grpCore.setSizePolicy(QSizePolicy.Ignored,QSizePolicy.Preferred) grpCoreLayout = QVBoxLayout() grpCore.setLayout(grpCoreLayout) inBarras = InputWidget("Barras (v_R)", "[cm]", 0.0) inBarras.c.inputValueChanged.connect(self.setRods) grpCoreLayout.addWidget(inBarras) self.outPotNucleo = OutputWidget("Pot Núcleo (W_R)", "[MW]", 1365.5) grpCoreLayout.addWidget(self.outPotNucleo) hBox2.addWidget(grpCore) grpPressurizer = QGroupBox("Pressurizador(PR)") grpPressurizerLayout = QVBoxLayout() grpPressurizer.setLayout(grpPressurizerLayout) inAquecPR = InputWidget("Pot Ent PR ", "(W_heat_PR)[kW]", 168.0) inAquecPR.c.inputValueChanged.connect(self.setHeater) grpPressurizerLayout.addWidget(inAquecPR) self.outPresPR = OutputWidget("Pres (p_PR)", "[bar]", 118.145) grpPressurizerLayout.addWidget(self.outPresPR) hBox2.addWidget(grpPressurizer) grpSteam = QGroupBox("Gerador de Vapor (SG)") grpSteamLayout = QVBoxLayout() grpSteam.setLayout(grpSteamLayout) inVazSG = InputWidget("Vaz Liq SG (m_SG)", "[kg/s]", 119.3) inVazSG.c.inputValueChanged.connect(self.setSGFlow) grpSteamLayout.addWidget(inVazSG) self.outPotSG = OutputWidget("Pot Req (W_SG)", "[MW]", 1349) grpSteamLayout.addWidget(self.outPotSG) hBox2.addWidget(grpSteam) lbGraf = QLabel("Gráficos") lbGraf.setAlignment(Qt.AlignHCenter) vBox.addWidget(lbGraf) vBox.addStretch(1) self.cbW_SG = QCheckBox("Potência no Gerador de Vapor (W_SG)") self.cbW_SG.setChecked(False) self.cbW_SG.stateChanged.connect(self.chart_W_SG) self.cbp_PR = QCheckBox("Pressão no Pressurizador(p_PR)") self.cbp_PR.stateChanged.connect(self.chart_p_PR) self.cbp_PR.setChecked(False) self.cbm_SG = QCheckBox("Vazão Líquida no Gerador de vapor(m_SG)") self.cbm_SG.stateChanged.connect(self.chart_m_SG) self.cbm_SG.setChecked(False) vBox.addWidget(self.cbW_SG) vBox.addWidget(self.cbp_PR) vBox.addWidget(self.cbm_SG) vBox.addStretch(1) def chart_p_PR(self, state): if (state == Qt.Checked): global outputs t = outputs[-3, :] * outputs[-2, :] y = outputs[1, :] * 1e-5 plt.plot(t, y) plt.show() #self.cbp_PR.setChecked(False) def chart_m_SG(self, state): if (state == Qt.Checked): global disturbances t = disturbances[-3, :] * disturbances[-2, :] y = disturbances[1, :] plt.plot(t, y) plt.show() #self.cbm_SG.setChecked(False) def chart_W_SG(self, state): if (state == Qt.Checked): global outputs t = outputs[-3, :] * outputs[-2, :] y = outputs[4, :] * 1e-6 plt.plot(t, y) plt.show() #self.cbW_SG.setChecked(False) def onTimer(self): try: global url #print(url) ri = requests.get(url + 'inputs', timeout=reqTimeout) ro = requests.get(url + 'outputs', timeout=reqTimeout) rd = requests.get(url + 'disturbances', timeout=reqTimeout) #print(ri.text) di = decodeInputData(ri.text) global inputs, outputs, disturbances inputs = np.append( inputs, di, axis=1) #aumenta uma coluna na matriz de entradas do = decodeInputData(ro.text) outputs = np.append(outputs, do, axis=1) dd = decodeInputData(rd.text) disturbances = np.append(disturbances, dd, axis=1) self.outPotNucleo.out.display(do[3, 0] / 1e6) self.outPresPR.out.display(do[1, 0] / 1e5) self.outPotSG.out.display(do[4, 0] * 6e-6) print(inputs.shape) except (ConnectionError, requests.exceptions.RequestException): print("Não é possível conectar") self.lbConnected.setText("Falha de Conexão") self.btnConnect.setText("Conectar") self.connected = False return if ri.ok: self.lbConnected.setText("Conectado") self.btnConnect.setText("Desconectar") self.connected = True def testSignal(self, val): print(val) #in #m_in=0, v_R, W_heat_PR, //ones, #dist # m_out=0, m_SG, M_SG, T_PC_I, T_SG_SW, #out #l_PR=0, p_PR, p_SG, W_R, W_SG, #sta #N=0, m_PR, M_PC, T_PC, T_PR, T_SG, def setRods(self, val): if not self.connected: return print("set rods") #{ "idx" : "v_R", "val":"12.3"} try: global url print(url) r = requests.post(url + 'input', data={ 'idx': 'v_R', 'val': str(val) }) except (ConnectionError, requests.exceptions.RequestException): print("erro ao enviar v_R") def setHeater(self, val): try: global url r = requests.post(url + 'input', data={ 'idx': 'W_heat_PR', 'val': str(val) }) except (ConnectionError, requests.exceptions.RequestException): print("erro ao enviar W_heat_PR") def setSGFlow(self, val): try: global url r = requests.post(url + 'disturbance', data={ 'idx': 'm_SG', 'val': str(val) }) except (ConnectionError, requests.exceptions.RequestException): print("erro ao enviar m_SG") def __init__(self): super().__init__() self.timer = QTimer(self) self.timer.timeout.connect(self.onTimer) self.initUI() self.connected = False def on_btnConnect_clicked(self): if (self.connected): self.timer.stop() self.btnConnect.setText("Conectar") self.connected = False return host = self.hostTxt.text() port = self.portTxt.text() global url url = 'http://' + str(host) + ':' + str(port) + '/' if not self.timer.isActive(): self.timer.start(timerInterval) # 500 milissegundos
class VoiceRecognitionService(QObject): """ Voice Recognition service which takes an microphone input and converts it to text by using the Google Cloud Speech-to-Text API. Configuration dictionary must contain the following keys: porcupine dynamic library path: \ Porcupine/lib/<operating_system>/<processor_type>/<library_file> porcupine model file path: \ Porcupine/lib/common/porcupine_params.pv porcupine keyword file(s): \ Porcupine/resources/keyword_files/<operating_system>/<keyword> optional keys: google credentials file: json file with google cloud api credentials recogniser: api to use, options are sphinx, google, google_cloud, \ bing, houdify, ibm, wit sphinx keywords: a list of keywords and sensitivities for sphinx timeout for command: default None """ start_listen = Signal() stop_timer = Signal() google_api_not_understand = Signal() google_api_request_failure = Signal(str) voice_command = Signal(str) start_processing_request = Signal() def __init__(self, config: dict): """ Construct the service. """ LOGGER.info("Creating Voice Recognition Service") # Need this for SignalInstance super().__init__() self.timeout_for_command = config.get("timeout for command", None) library_path = config.get("porcupine dynamic library path", None) if library_path is None: raise KeyError("Config must contain porcupine dynamic", " library path") model_file_path = config.get("porcupine model file path", None) if model_file_path is None: raise KeyError("Config must contain porcupine model file path") keyword_file_paths = config.get("porcupine keyword file", None) if keyword_file_paths is None: raise KeyError("Config must contain porcupine keyword file") self.recogniser = config.get("recogniser", "sphinx") self.sphinx_keywords = config.get("sphinx keywords", None) self.recognizer = sr.Recognizer() sensitivities = config.get("sensitivities", [1.0]) self.interval = config.get("interval", 10) self.handle = pvporcupine.create(library_path=library_path, model_path=model_file_path, keyword_paths=keyword_file_paths, sensitivities=sensitivities) audio = pyaudio.PyAudio() self.audio_stream = \ audio.open(rate=self.handle.sample_rate, channels=1, format=pyaudio.paInt16, input=True, frames_per_buffer=self.handle.frame_length) # this is to add the credentials for the google cloud api # set the environment variable GOOGLE_APPLICATION_CREDENTIALS # to the path of your json file with credentials key_file_path = config.get('google credentials file', None) self.credentials = None if key_file_path is not None: with open(key_file_path, 'r') as file: self.credentials = file.read() #r aises a ValueError if the credential file isn't a valid json json.loads(self.credentials) # Creating timer later, in the context of the running thread. self.timer = None LOGGER.info("Created Voice Recognition Service") def run(self): """ Entry point for the QThread which starts the timer to listen in the background """ LOGGER.info("run method executed") # Creating the timer in the context of the running thread. self.timer = QTimer() self.timer.setInterval(self.interval) self.timer.timeout.connect(self.listen_for_keyword) self.stop_timer.connect(self.__stop) # start the timer to start the background listening self.timer.start() def request_stop(self): """ Called by external client to stop timer. """ LOGGER.info("Requesting VoiceRecognitionService to stop timer.") self.stop_timer.emit() QThread.msleep(self.interval * 3) while self.timer.isActive(): QThread.msleep(self.interval * 3) LOGGER.info("Requested VoiceRecognitionService to stop timer.") @Slot() def __stop(self): LOGGER.info("Stopping VoiceRecognitionService timer.") self.timer.stop() QThread.msleep(self.interval * 3) LOGGER.info("Stopped VoiceRecognitionService timer.") def listen_for_keyword(self): """ This method is called every 100 milliseconds by the QThread running and listens for the keyword """ pcm = self.audio_stream.read(self.handle.frame_length) pcm = struct.unpack_from("h" * self.handle.frame_length, pcm) result = self.handle.process(pcm) if result >= 0: # when the keyword gets detected, the user can input a command LOGGER.info('[%s] detected keyword', str(datetime.now())) self.start_listen.emit() self.listen_to_command() def listen_to_command(self): """ This method gets called when a specific command is said. It then listens for specific commands and converts them to QT Signals """ # listen to a single command with sr.Microphone() as source: audio = self.recognizer \ .listen(source, phrase_time_limit=self.timeout_for_command) try: # convert command to string, # this string should later be used to fire a certain GUI command self.start_processing_request.emit() words = self._recognise(audio) self.voice_command.emit(words) except sr.UnknownValueError: self.google_api_not_understand.emit() except sr.RequestError as exception: self.google_api_request_failure.emit(str(exception)) def _recognise(self, audio): words = "" if self.recogniser == "sphinx": words = self.recognizer.recognize_sphinx( audio, keyword_entries=self.sphinx_keywords) elif self.recogniser == "google_cloud": words = self.recognizer.recognize_google_cloud( audio, credentials_json=self.credentials) elif self.recogniser == "google": words = self.recognizer.recognize_google(audio) elif self.recogniser == "bing": raise NotImplementedError("Key credentials for bing not set up") #something like this, but might need to change credentials #words = self.recognizer.recognize_bing(audio, key=self.credentials) elif self.recogniser == "houndify": raise NotImplementedError( "Key credentials for houndify not set up") #something like this, but might need to change credentials #words = self.recognizer.recognize_houndify( # audio, client_id=self.credentials, # client_key = self.credentials) elif self.recogniser == "ibm": raise NotImplementedError("Key credentials for ibm not set up") #something like this, but might need to change credentials #words = recognizer.recognize_ibm( # audio, username=notset, password=notset) elif self.recogniser == "wit": raise NotImplementedError("Key credentials for wit not set up") #something like this, but might need to change credentials #words = self.recognizer.recognize_wit(audio, key=self.credentials) else: raise ValueError("Unrecognised recogniser", self.recogniser) return words