def HasVideoStream(path): try: info = Hydrusffmpeg_parse_infos(path) except IOError as e: HydrusData.ShowException('Determining the mime for the file at ' + path + ' caused the following problem:') HydrusData.ShowException(e) return False return info['video_found']
def LoadFromPng(path): # this is to deal with unicode paths, which cv2 can't handle (os_file_handle, temp_path) = ClientPaths.GetTempPath() try: shutil.copy2(path, temp_path) numpy_image = cv2.imread(temp_path, flags=IMREAD_UNCHANGED) except Exception as e: HydrusData.ShowException(e) raise Exception('That did not appear to be a valid image!') finally: HydrusPaths.CleanUpTempPath(os_file_handle, temp_path) try: (height, width) = numpy_image.shape complete_data = numpy_image.tostring() top_height_header = complete_data[:2] (top_height, ) = struct.unpack('!H', top_height_header) full_payload_string = complete_data[width * top_height:] payload_length_header = full_payload_string[:4] (payload_length, ) = struct.unpack('!I', payload_length_header) payload = full_payload_string[4:4 + payload_length] except Exception as e: HydrusData.ShowException(e) raise Exception( 'The image was fine, but it did not seem to have hydrus data encoded in it!' ) return payload
def REPEATINGWorkOnQueue(self, page_key): with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._queue_repeating_job.Cancel() return queue_paused = self._queue_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') queue_good = not queue_paused page_shown = not HG.client_controller.PageClosedButNotDestroyed( page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = queue_good and page_shown and network_engine_good while ok_to_work: try: did_work = self._WorkOnQueue(page_key) if did_work: time.sleep(ClientImporting. DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME) else: return HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._queue_repeating_job.Cancel() return queue_paused = self._queue_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') queue_good = not queue_paused page_shown = not HG.client_controller.PageClosedButNotDestroyed( page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = queue_good and page_shown and network_engine_good
def REPEATINGWorkOnChecker(self): with self._lock: if ClientImporting.PageImporterShouldStopWorking(self._page_key): self._checker_repeating_job.Cancel() return able_to_check = self._HasURL() and not self._checking_paused check_due = HydrusData.TimeHasPassed(self._next_check_time) no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) time_to_check = able_to_check and check_due and no_delays and page_shown if time_to_check: try: self._CheckWatchableURL() except Exception as e: HydrusData.ShowException(e)
def MergeFile( source, dest ): if not os.path.isdir( source ): MakeFileWritable( source ) if PathsHaveSameSizeAndDate( source, dest ): DeletePath( source ) else: try: # this overwrites on conflict without hassle shutil.move( source, dest ) except Exception as e: HydrusData.ShowText( 'Trying to move ' + source + ' to ' + dest + ' caused the following problem:' ) HydrusData.ShowException( e ) return False return True
def run(self): time.sleep(3) while True: if IsThreadShuttingDown(): return try: self._callable(self._controller) except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.ShowText('Daemon ' + self._name + ' encountered an exception:') HydrusData.ShowException(e) if IsThreadShuttingDown(): return self._event.wait(self._period) self._event.clear()
def THREADdo_it(): def wx_code(mappings): if not self: return self._mappings = mappings for mapping in self._mappings: self._mappings_list_ctrl.Append(mapping, mapping) self._status_st.SetLabelText('') try: mappings = HydrusNATPunch.GetUPnPMappings() except Exception as e: HydrusData.ShowException(e) wx.CallAfter( wx.MessageBox, 'Could not load mappings:' + os.linesep * 2 + str(e)) return wx.CallAfter(wx_code, mappings)
def run(self): time.sleep(3) while True: while self._queue.empty(): if IsThreadShuttingDown(): return self._event.wait(self._period) self._event.clear() items = [] while not self._queue.empty(): items.append(self._queue.get()) try: self._callable(self._controller, items) except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.ShowException(e)
def run(self): while True: while self._queue.empty(): if self._controller.ModelIsShutdown(): return self._event.wait(1200) self._event.clear() try: (callable, args, kwargs) = self._queue.get() callable(*args, **kwargs) del callable except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.ShowException(e) time.sleep(0.00001)
def DeletePath( path ): if os.path.exists( path ): MakeFileWritable( path ) try: if os.path.isdir( path ): shutil.rmtree( path ) else: os.remove( path ) except Exception as e: if 'Error 32' in HydrusData.ToUnicode( e ): # file in use by another process HydrusData.DebugPrint( 'Trying to delete ' + path + ' failed because it was in use by another process.' ) else: HydrusData.ShowText( 'Trying to delete ' + path + ' caused the following error:' ) HydrusData.ShowException( e )
def REPEATINGWorkOnChecker(self): with self._lock: if ClientImporting.PageImporterShouldStopWorking(self._page_key): self._checker_repeating_job.Cancel() return checking_paused = self._checking_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_watcher_checkers') able_to_check = self._checking_status == ClientImporting.CHECKER_STATUS_OK and self._HasURL( ) and not checking_paused check_due = HydrusData.TimeHasPassed(self._next_check_time) no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) time_to_check = able_to_check and check_due and no_delays and page_shown and network_engine_good if time_to_check: try: self._CheckWatchableURL() except Exception as e: HydrusData.ShowException(e)
def HasVideoStream(path): try: info = Hydrusffmpeg_parse_infos(path) except IOError as e: HydrusData.ShowException(e) return False return info['video_found']
def REPEATINGWorkOnFiles(self): with self._lock: if ClientImporting.PageImporterShouldStopWorking(self._page_key): self._files_repeating_job.Cancel() return files_paused = self._files_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues') work_pending = self._file_seed_cache.WorkToDo( ) and not files_paused no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_pending and no_delays and page_shown and network_engine_good while ok_to_work: try: self._WorkOnFiles() HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking( self._page_key): self._files_repeating_job.Cancel() return files_paused = self._files_paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues') work_pending = self._file_seed_cache.WorkToDo( ) and not files_paused no_delays = HydrusData.TimeHasPassed(self._no_work_until) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_pending and no_delays and page_shown and network_engine_good
def do_it(): try: self._service.SyncAccount(force=True) except Exception as e: HydrusData.ShowException(e) wx.CallAfter(wx.MessageBox, HydrusData.ToUnicode(e)) wx.CallAfter(self._Refresh)
def _SetError( self, e, error ): self._error_exception = e self._error_text = error if HG.network_report_mode: HydrusData.ShowText( 'Network error should follow:' ) HydrusData.ShowException( e ) HydrusData.ShowText( error ) self._SetDone()
def REPEATINGWorkOnGallery(self, page_key): with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._gallery_repeating_job.Cancel() return gallery_paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') work_to_do = self._gallery_seed_log.WorkToDo() and not ( gallery_paused or HG.client_controller.PageClosedButNotDestroyed(page_key)) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_to_do and network_engine_good while ok_to_work: try: self._WorkOnGallery(page_key) HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._gallery_repeating_job.Cancel() return gallery_paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_gallery_searches') work_to_do = self._gallery_seed_log.WorkToDo() and not ( gallery_paused or HG.client_controller.PageClosedButNotDestroyed(page_key)) network_engine_good = not HG.client_controller.network_engine.IsBusy( ) ok_to_work = work_to_do and network_engine_good
def GetFFMPEGVersion(): # open the file in a pipe, provoke an error, read output cmd = [FFMPEG_PATH, '-version'] try: proc = subprocess.Popen( cmd, bufsize=10**5, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=HydrusData.GetHideTerminalSubprocessStartupInfo()) except Exception as e: if not os.path.exists(FFMPEG_PATH): return 'no ffmpeg found' else: HydrusData.ShowException(e) return 'unable to execute ffmpeg' infos = proc.stdout.read().decode('utf8') proc.terminate() del proc lines = infos.splitlines() if len(lines) > 0: # typically 'ffmpeg version [VERSION] Copyright ... top_line = lines[0] if top_line.startswith('ffmpeg version '): top_line = top_line.replace('ffmpeg version ', '') if ' ' in top_line: version_string = top_line.split(' ')[0] return version_string return 'unknown'
def run(self): while True: try: while self._NoWorkToStart(): if self._controller.ModelIsShutdown(): return # if self._cancel_filter_needed.is_set(): self._FilterCancelled() self._cancel_filter_needed.clear() if self._sort_needed.is_set(): self._SortWaiting() self._sort_needed.clear() continue # if some work is now due, let's do it! # wait_time = self._GetLoopWaitTime() self._new_job_arrived.wait(wait_time) self._new_job_arrived.clear() self._StartWork() except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.Print(traceback.format_exc()) HydrusData.ShowException(e) time.sleep(0.00001)
def _Paste( self ): raw_text = HG.client_controller.GetClipboardText() try: tag_import_options = HydrusSerialisable.CreateFromString( raw_text ) self._tag_import_options = tag_import_options except Exception as e: wx.MessageBox( 'I could not understand what was in the clipboard' ) HydrusData.ShowException( e )
def DAEMONPubSub(self): while not HG.model_shutdown: if self._pubsub.WorkToDo(): try: self.ProcessPubSub() except Exception as e: HydrusData.ShowException(e, do_wait=True) else: self._pubsub.WaitOnPub()
def REPEATINGWorkOnQueue(self, page_key): with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._queue_repeating_job.Cancel() return ok_to_work = not ( self._queue_paused or HG.client_controller.PageClosedButNotDestroyed(page_key)) while ok_to_work: try: did_work = self._WorkOnQueue(page_key) if did_work: time.sleep(ClientImporting. DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME) else: return HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException(e) with self._lock: if ClientImporting.PageImporterShouldStopWorking(page_key): self._queue_repeating_job.Cancel() return ok_to_work = not ( self._queue_paused or HG.client_controller.PageClosedButNotDestroyed(page_key))
def REPEATINGWorkOnFiles( self ): with self._lock: if ClientImporting.PageImporterShouldStopWorking( self._page_key ): self._files_repeating_job.Cancel() return work_pending = self._file_seed_cache.WorkToDo() and not self._files_paused no_delays = HydrusData.TimeHasPassed( self._no_work_until ) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key ) ok_to_work = work_pending and no_delays and page_shown while ok_to_work: try: self._WorkOnFiles() HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException( e ) with self._lock: if ClientImporting.PageImporterShouldStopWorking( self._page_key ): self._files_repeating_job.Cancel() return work_pending = self._file_seed_cache.WorkToDo() and not self._files_paused no_delays = HydrusData.TimeHasPassed( self._no_work_until ) page_shown = not HG.client_controller.PageClosedButNotDestroyed( self._page_key ) ok_to_work = work_pending and no_delays and page_shown
def do_it(launch_path): if HC.PLATFORM_WINDOWS and launch_path is None: os.startfile(path) else: if launch_path is None: launch_path = GetDefaultLaunchPath() cmd = launch_path.replace('%path%', path) if HC.PLATFORM_WINDOWS: preexec_fn = None else: # setsid call un-childs this new process preexec_fn = os.setsid cmd = shlex.split(cmd) try: process = subprocess.Popen( cmd, preexec_fn=preexec_fn, startupinfo=HydrusData. GetHideTerminalSubprocessStartupInfo()) process.wait() process.communicate() except Exception as e: HydrusData.ShowText( 'Could not launch a file! Command used was:' + os.linesep + HydrusData.ToUnicode(cmd)) HydrusData.ShowException(e)
def DumpToPng(width, payload, title, payload_description, text, path): payload_length = len(payload) payload_string_length = payload_length + 4 payload_height = int(float(payload_string_length) / width) if float(payload_string_length) / width % 1.0 > 0: payload_height += 1 top_image = CreateTopImage(width, title, payload_description, text) payload_length_header = struct.pack('!I', payload_length) num_empty_bytes = payload_height * width - payload_string_length full_payload_string = payload_length_header + payload + '\x00' * num_empty_bytes payload_image = numpy.fromstring(full_payload_string, dtype='uint8').reshape( (payload_height, width)) finished_image = numpy.concatenate((top_image, payload_image)) # this is to deal with unicode paths, which cv2 can't handle (os_file_handle, temp_path) = ClientPaths.GetTempPath(suffix='.png') try: cv2.imwrite(temp_path, finished_image, [cv2.IMWRITE_PNG_COMPRESSION, 9]) shutil.copy2(temp_path, path) except Exception as e: HydrusData.ShowException(e) raise Exception('Could not save the png!') finally: HydrusPaths.CleanUpTempPath(os_file_handle, temp_path)
def MirrorFile(source, dest): if not PathsHaveSameSizeAndDate(source, dest): try: shutil.copy2(source, dest) except Exception as e: HydrusData.ShowText('Trying to copy ' + source + ' to ' + dest + ' caused the following problem:') HydrusData.ShowException(e) return False return True
def run(self): while True: try: while self._queue.empty(): if IsThreadShuttingDown(): return self._event.wait(1200) self._event.clear() self._DoPreCall() (callable, args, kwargs) = self._queue.get() self._callable = (callable, args, kwargs) callable(*args, **kwargs) self._callable = None del callable except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.Print(traceback.format_exc()) HydrusData.ShowException(e) finally: self._currently_working = False time.sleep(0.00001)
def run( self ): self._event.wait( self._init_wait ) while True: if IsThreadShuttingDown(): return time_started_waiting = HydrusData.GetNow() while not self._CanStart( time_started_waiting ): time.sleep( 1 ) if IsThreadShuttingDown(): return try: self._callable( self._controller ) except HydrusExceptions.ShutdownException: return except Exception as e: HydrusData.ShowText( 'Daemon ' + self._name + ' encountered an exception:' ) HydrusData.ShowException( e ) if IsThreadShuttingDown(): return self._event.wait( self._period ) self._event.clear()
def REPEATINGWorkOnFiles( self, page_key ): with self._lock: if ClientImporting.PageImporterShouldStopWorking( page_key ): self._files_repeating_job.Cancel() return paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues' ) work_to_do = self._file_seed_cache.WorkToDo() and not ( paused or HG.client_controller.PageClosedButNotDestroyed( page_key ) ) while work_to_do: try: self._WorkOnFiles( page_key ) HG.client_controller.WaitUntilViewFree() except Exception as e: HydrusData.ShowException( e ) with self._lock: if ClientImporting.PageImporterShouldStopWorking( page_key ): self._files_repeating_job.Cancel() return paused = self._paused or HG.client_controller.new_options.GetBoolean( 'pause_all_file_queues' ) work_to_do = self._file_seed_cache.WorkToDo() and not ( paused or HG.client_controller.PageClosedButNotDestroyed( page_key ) )
def DeletePath(path): if os.path.exists(path): MakeFileWritable(path) try: if os.path.isdir(path): shutil.rmtree(path) else: os.remove(path) except Exception as e: HydrusData.ShowText('Trying to delete ' + path + ' caused the following error:') HydrusData.ShowException(e)
def MirrorFile(source, dest): if not PathsHaveSameSizeAndDate(source, dest): try: MakeFileWritable(dest) # this overwrites on conflict without hassle shutil.copy2(source, dest) except Exception as e: HydrusData.ShowText('Trying to copy ' + source + ' to ' + dest + ' caused the following problem:') HydrusData.ShowException(e) return False return True