def REPEATINGWorkOnQueue(self, page_key): with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._queue_repeating_job.Cancel() return queue_paused = self._queue_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') queue_good = not queue_paused page_shown = not HG.client_controller.PageClosedButNotDestroyed( page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = queue_good and page_shown and network_engine_good while ok_to_work: try: did_work = self._WorkOnQueue(page_key) if did_work: time.sleep(ClientImporting. DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME) else: return HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._queue_repeating_job.Cancel() return queue_paused = self._queue_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') queue_good = not queue_paused page_shown = not HG.client_controller.PageClosedButNotDestroyed( page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = queue_good and page_shown and network_engine_good
def MergeFile(source, dest): if not os.path.isdir(source): MakeFileWritable(source) if PathsHaveSameSizeAndDate(source, dest): DeletePath(source) else: try: # this overwrites on conflict without hassle shutil.move(source, dest) except Exception as e: HydrusData.ShowText('Trying to move ' + source + ' to ' + dest + ' caused the following problem:') HydrusData.ShowException(e) return False return True
def GetMediaBackgroundPixmap(self): pixmap_path = self._controller.new_options.GetNoneableString( 'media_background_bmp_path') if pixmap_path != self._media_background_pixmap_path: self._media_background_pixmap_path = pixmap_path try: self._media_background_pixmap = QG.QPixmap( self._media_background_pixmap_path) except Exception as e: self._media_background_pixmap = None HydrusData.ShowText('Loading a bmp caused an error!') HydrusData.ShowException(e) return None return self._media_background_pixmap
def MergeFile(source, dest): # this can merge a file, but if it is given a dir it will just straight up overwrite not merge if not os.path.isdir(source): MakeFileWriteable(source) if PathsHaveSameSizeAndDate(source, dest): DeletePath(source) return True try: # this overwrites on conflict without hassle shutil.move(source, dest, copy_function=safe_copy2) except Exception as e: HydrusData.ShowText('Trying to move ' + source + ' to ' + dest + ' caused the following problem:') HydrusData.ShowException(e) return False return True
def EventCopyExternalShareURL(self): internal_port = self._service.GetPort() if internal_port is None: QW.QMessageBox.warning( self, 'Warning', 'The local booru is not currently running!') try: url = self._service.GetExternalShareURL(self._share_key) except Exception as e: HydrusData.ShowException(e) QW.QMessageBox.critical( self, 'Error', 'Unfortunately, could not generate an external URL: {}'.format( e)) return HG.client_controller.pub('clipboard', 'text', url)
def CommitChanges(self): services = self._services_listctrl.GetData() try: response = self._clientside_admin_service.Request( HC.POST, 'services', {'services': services}) except Exception as e: HydrusData.ShowException(e) raise HydrusExceptions.VetoException( 'There was an error: {}'.format(str(e))) service_keys_to_access_keys = dict( response['service_keys_to_access_keys']) admin_service_key = self._clientside_admin_service.GetServiceKey() with HG.dirty_object_lock: HG.client_controller.WriteSynchronous('update_server_services', admin_service_key, services, service_keys_to_access_keys, self._deletee_service_keys) HG.client_controller.RefreshServices()
def REPEATINGWorkOnChecker(self): with self._lock: if ClientImporting.PageImporterShouldStopWorking(self._page_key): self._checker_repeating_job.Cancel() return checking_paused = self._checking_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_watcher_checkers') able_to_check = self._checking_status == ClientImporting.CHECKER_STATUS_OK and self._HasURL( ) and not checking_paused check_due = HydrusData.TimeHasPassed(self._next_check_time) no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) time_to_check = able_to_check and check_due and no_delays and page_shown and network_engine_good if time_to_check: try: self._CheckWatchableURL() except Exception as e: HydrusData.ShowException(e)
def REPEATINGWorkOnFiles(self): while True: try: try: self.CheckCanDoFileWork() except HydrusExceptions.VetoException as e: with self._lock: self._files_status = str(e) break self._WorkOnFiles() HG.client_controller.WaitUntilViewFree() self._SerialisableChangeMade() except Exception as e: with self._lock: self._files_status = 'stopping work: {}'.format(str(e)) HydrusData.ShowException(e) return
def run(self): try: while True: while self._queue.empty(): CheckIfThreadShuttingDown() self._event.wait(10.0) self._event.clear() CheckIfThreadShuttingDown() try: try: (callable, args, kwargs) = self._queue.get(1.0) except queue.Empty: # https://github.com/hydrusnetwork/hydrus/issues/750 # this shouldn't happen, but... # even if we assume we'll never get this, we don't want to make a business of hanging forever on things continue self._DoPreCall() self._callable = (callable, args, kwargs) callable(*args, **kwargs) self._callable = None del callable except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.Print(traceback.format_exc()) HydrusData.ShowException(e) finally: self._currently_working = False time.sleep(0.00001) except HydrusExceptions.ShutdownException: return
def handle_e(page_tuple, e): HydrusData.ShowText( 'Attempting to save a page to the session failed! Its data tuple and error follows! Please close it or see if you can clear any potentially invalid data from it!' ) HydrusData.ShowText(page_tuple) HydrusData.ShowException(e)
def handle_e(serialisable_page_tuple, e): HydrusData.ShowText( 'A page failed to load! Its serialised data and error follows!' ) HydrusData.ShowText(serialisable_page_tuple) HydrusData.ShowException(e)
def SetHashedJSONDumps(self, hashes_to_objs): for (hash, obj) in hashes_to_objs.items(): if self.HaveHashedJSONDump(hash): continue (dump_type, version, serialisable_info) = obj.GetSerialisableTuple() try: dump = json.dumps(serialisable_info) except Exception as e: HydrusData.ShowException(e) HydrusData.Print(obj) HydrusData.Print(serialisable_info) raise Exception( 'Trying to json dump the hashed object ' + str(obj) + ' caused an error. Its serialisable info has been dumped to the log.' ) maintenance_tracker = MaintenanceTracker.instance() maintenance_tracker.RegisterNewHashedSerialisable(len(dump)) dump_buffer = GenerateBigSQLiteDumpBuffer(dump) try: self._c.execute( 'INSERT INTO json_dumps_hashed ( hash, dump_type, version, dump ) VALUES ( ?, ?, ?, ? );', (sqlite3.Binary(hash), dump_type, version, dump_buffer)) except: HydrusData.DebugPrint(dump) HydrusData.ShowText( 'Had a problem saving a hashed JSON object. The dump has been printed to the log.' ) try: HydrusData.Print('Dump had length {}!'.format( HydrusData.ToHumanBytes(len(dump_buffer)))) except: pass raise
def run( self ) -> None: while True: try: while self._NoWorkToStart(): if IsThreadShuttingDown(): return # if self._cancel_filter_needed.is_set(): self._FilterCancelled() self._cancel_filter_needed.clear() if self._sort_needed.is_set(): self._SortWaiting() self._sort_needed.clear() continue # if some work is now due, let's do it! # wait_time = self._GetLoopWaitTime() self._new_job_arrived.wait( wait_time ) self._new_job_arrived.clear() self._StartWork() except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.Print( traceback.format_exc() ) HydrusData.ShowException( e ) time.sleep( 0.00001 )
def REPEATINGWorkOnFiles(self): with self._lock: if ClientImporting.PageImporterShouldStopWorking(self._page_key): self._files_repeating_job.Cancel() return files_paused = self._files_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues') work_pending = self._file_seed_cache.WorkToDo( ) and not files_paused no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_pending and no_delays and page_shown and network_engine_good while ok_to_work: try: self._WorkOnFiles() HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking( self._page_key): self._files_repeating_job.Cancel() return files_paused = self._files_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues') work_pending = self._file_seed_cache.WorkToDo( ) and not files_paused no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_pending and no_delays and page_shown and network_engine_good
def run( self ): try: while True: while self._queue.empty(): CheckIfThreadShuttingDown() self._event.wait( 10.0 ) self._event.clear() CheckIfThreadShuttingDown() self._DoPreCall() try: ( callable, args, kwargs ) = self._queue.get() self._callable = ( callable, args, kwargs ) callable( *args, **kwargs ) self._callable = None del callable except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.Print( traceback.format_exc() ) HydrusData.ShowException( e ) finally: self._currently_working = False time.sleep( 0.00001 ) except HydrusExceptions.ShutdownException: return
def work_callable(): try: mappings = HydrusNATPunch.GetUPnPMappings() except Exception as e: HydrusData.ShowException(e) return e return mappings
def REPEATINGWorkOnGallery(self, page_key): with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._gallery_repeating_job.Cancel() return gallery_paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') work_to_do = self._gallery_seed_log.WorkToDo() and not ( gallery_paused or HG.client_controller.PageClosedButNotDestroyed(page_key)) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_to_do and network_engine_good while ok_to_work: try: self._WorkOnGallery(page_key) HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._gallery_repeating_job.Cancel() return gallery_paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') work_to_do = self._gallery_seed_log.WorkToDo() and not ( gallery_paused or HG.client_controller.PageClosedButNotDestroyed(page_key)) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_to_do and network_engine_good
def REPEATINGWorkOnFiles(self, page_key): while self.CanDoFileWork(page_key): try: self._WorkOnFiles(page_key) HG.client_controller.WaitUntilViewFree() self._SerialisableChangeMade() except Exception as e: HydrusData.ShowException(e)
def DoWork(self): regular_run_due = self._run_regularly and HydrusData.TimeHasPassed( self._last_checked + self._period) good_to_go = (regular_run_due or self._run_now) and not self._paused if not good_to_go: return try: if self._path == '': raise Exception('No path set for the folder!') if not os.path.exists(self._path): raise Exception('The path, "{}", does not exist!'.format( self._path)) if not os.path.isdir(self._path): raise Exception('The path, "{}", is not a directory!'.format( self._path)) self._DoExport() self._last_error = '' except Exception as e: self._paused = True HydrusData.ShowText( 'The export folder "' + self._name + '" encountered an error! It has now been paused. Please check the folder\'s settings and maybe report to hydrus dev if the error is complicated! The error follows:' ) HydrusData.ShowException(e) self._last_error = str(e) finally: self._last_checked = HydrusData.GetNow() self._run_now = False HG.client_controller.WriteSynchronous('serialisable', self)
def THREADdo_it(): try: mappings = HydrusNATPunch.GetUPnPMappings() except Exception as e: HydrusData.ShowException( e ) QP.CallAfter( QW.QMessageBox.critical, self, 'Error', 'Could not load mappings:'+os.linesep*2+str(e) ) return QP.CallAfter( qt_code, mappings )
def DequantizePILImage(pil_image: PILImage.Image) -> PILImage.Image: if HasICCProfile(pil_image): try: pil_image = NormaliseICCProfilePILImageToSRGB(pil_image) except Exception as e: HydrusData.ShowException(e) HydrusData.ShowText('Failed to normalise image ICC profile.') pil_image = NormalisePILImageToRGB(pil_image) return pil_image
def DAEMONPubSub(self): while not HG.model_shutdown: if self._pubsub.WorkToDo(): try: self._pubsub.Process() except Exception as e: HydrusData.ShowException(e, do_wait=True) else: self._pubsub.WaitOnPub()
def DumpToPNG(width, payload_bytes, title, payload_description, text, path): payload_bytes_length = len(payload_bytes) header_and_payload_bytes_length = payload_bytes_length + 4 payload_height = int(header_and_payload_bytes_length / width) if (header_and_payload_bytes_length / width) % 1.0 > 0: payload_height += 1 top_image = CreateTopImage(width, title, payload_description, text) payload_length_header = struct.pack('!I', payload_bytes_length) num_empty_bytes = payload_height * width - header_and_payload_bytes_length header_and_payload_bytes = payload_length_header + payload_bytes + b'\x00' * num_empty_bytes payload_image = numpy.fromstring(header_and_payload_bytes, dtype='uint8').reshape( (payload_height, width)) finished_image = numpy.concatenate((top_image, payload_image)) # this is to deal with unicode paths, which cv2 can't handle (os_file_handle, temp_path) = HydrusTemp.GetTempPath(suffix='.png') try: cv2.imwrite(temp_path, finished_image, [cv2.IMWRITE_PNG_COMPRESSION, 9]) HydrusPaths.MirrorFile(temp_path, path) except Exception as e: HydrusData.ShowException(e) raise Exception('Could not save the png!') finally: HydrusTemp.CleanUpTempPath(os_file_handle, temp_path)
def UpdateConf(self): mpv_config_path = HG.client_controller.GetMPVConfPath() if not os.path.exists(mpv_config_path): default_mpv_config_path = HG.client_controller.GetDefaultMPVConfPath( ) if not os.path.exists(default_mpv_config_path): HydrusData.ShowText( 'There is no default mpv configuration file to load! Perhaps there is a problem with your install?' ) return else: HydrusPaths.MirrorFile(default_mpv_config_path, mpv_config_path) #To load an existing config file (by default it doesn't load the user/global config like standalone mpv does): load_f = getattr(mpv, '_mpv_load_config_file', None) if load_f is not None and callable(load_f): try: load_f(self._player.handle, mpv_config_path.encode('utf-8')) # pylint: disable=E1102 except Exception as e: HydrusData.ShowText( 'MPV could not load its configuration file! This was probably due to an invalid parameter value inside the conf. The error follows:' ) HydrusData.ShowException(e) else: HydrusData.Print( 'Was unable to load mpv.conf--has the MPV API changed?')
def LoadFromPNG( path ): # this is to deal with unicode paths, which cv2 can't handle ( os_file_handle, temp_path ) = HydrusTemp.GetTempPath() try: HydrusPaths.MirrorFile( path, temp_path ) try: # unchanged because we want exact byte data, no conversions or other gubbins numpy_image = cv2.imread( temp_path, flags = IMREAD_UNCHANGED ) if numpy_image is None: raise Exception() except Exception as e: try: # dequantize = False because we don't want to convert to RGB pil_image = HydrusImageHandling.GeneratePILImage( temp_path, dequantize = False ) numpy_image = HydrusImageHandling.GenerateNumPyImageFromPILImage( pil_image ) except Exception as e: HydrusData.ShowException( e ) raise Exception( '"{}" did not appear to be a valid image!'.format( path ) ) finally: HydrusTemp.CleanUpTempPath( os_file_handle, temp_path ) return LoadFromNumPyImage( numpy_image )
def REPEATINGWorkOnFiles(self, page_key): with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._files_repeating_job.Cancel() return paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues') work_to_do = self._file_seed_cache.WorkToDo() and not ( paused or HG.client_controller.PageClosedButNotDestroyed(page_key)) while work_to_do: try: self._WorkOnFiles(page_key) HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._files_repeating_job.Cancel() return paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues') work_to_do = self._file_seed_cache.WorkToDo() and not ( paused or HG.client_controller.PageClosedButNotDestroyed(page_key))
def MirrorFile(source, dest): if not PathsHaveSameSizeAndDate(source, dest): try: MakeFileWriteable(dest) safe_copy2(source, dest) except Exception as e: HydrusData.ShowText('Trying to copy ' + source + ' to ' + dest + ' caused the following problem:') HydrusData.ShowException(e) return False return True
def run( self ) -> None: try: self._DoAWait( self._init_wait ) while True: CheckIfThreadShuttingDown() self._DoAWait( self._pre_call_wait, event_can_wake = False ) CheckIfThreadShuttingDown() self._WaitUntilCanStart() CheckIfThreadShuttingDown() self._DoPreCall() try: self._callable( self._controller ) except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.ShowText( 'Daemon ' + self._name + ' encountered an exception:' ) HydrusData.ShowException( e ) self._DoAWait( self._period ) except HydrusExceptions.ShutdownException: return
def MirrorFile(source, dest): if not PathsHaveSameSizeAndDate(source, dest): try: MakeFileWritable(dest) # this overwrites on conflict without hassle shutil.copy2(source, dest) except Exception as e: HydrusData.ShowText('Trying to copy ' + source + ' to ' + dest + ' caused the following problem:') HydrusData.ShowException(e) return False return True
def ProcessForceLogins(): if len(self._domains_to_login ) > 0 and self._current_login_process is None: try: login_domain = self._domains_to_login.pop(0) login_process = self.login_manager.GenerateLoginProcessForDomain( login_domain) except Exception as e: HydrusData.ShowException(e) return self.controller.CallToThread(login_process.Start) self._current_login_process = login_process