def _UpdateSerialisableInfo( self, version, old_serialisable_info ): if version == 1: serialisable_watchers = old_serialisable_info try: checker_options = HG.client_controller.new_options.GetDefaultWatcherCheckerOptions() file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' ) tag_import_options = ClientImportOptions.TagImportOptions( is_default = True ) except: checker_options = ClientImportOptions.CheckerOptions() file_import_options = ClientImportOptions.FileImportOptions() tag_import_options = ClientImportOptions.TagImportOptions() serialisable_checker_options = checker_options.GetSerialisableTuple() serialisable_file_import_options = file_import_options.GetSerialisableTuple() serialisable_tag_import_options = tag_import_options.GetSerialisableTuple() highlighted_watcher_key = None serialisable_highlighted_watcher_key = highlighted_watcher_key new_serialisable_info = ( serialisable_watchers, serialisable_highlighted_watcher_key, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options ) return ( 2, new_serialisable_info )
def test_SERIALISABLE_TYPE_SUBSCRIPTION( self ): def test( obj, dupe_obj ): self.assertEqual( obj.GetName(), dupe_obj.GetName() ) self.assertEqual( obj._gug_key_and_name, dupe_obj._gug_key_and_name ) self.assertEqual( len( obj._queries ), len( dupe_obj._queries ) ) self.assertEqual( obj._initial_file_limit, dupe_obj._initial_file_limit ) self.assertEqual( obj._periodic_file_limit, dupe_obj._periodic_file_limit ) self.assertEqual( obj._paused, dupe_obj._paused ) self.assertEqual( obj._file_import_options.GetSerialisableTuple(), dupe_obj._file_import_options.GetSerialisableTuple() ) self.assertEqual( obj._tag_import_options.GetSerialisableTuple(), dupe_obj._tag_import_options.GetSerialisableTuple() ) self.assertEqual( obj._no_work_until, dupe_obj._no_work_until ) sub = ClientImportSubscriptions.Subscription( 'test sub' ) self._dump_and_load_and_test( sub, test ) gug_key_and_name = ( HydrusData.GenerateKey(), 'muh test gug' ) queries = [ ClientImportSubscriptionQuery.SubscriptionQuery( 'test query' ), ClientImportSubscriptionQuery.SubscriptionQuery( 'test query 2' ) ] checker_options = ClientImportOptions.CheckerOptions() initial_file_limit = 100 periodic_file_limit = 50 paused = False file_import_options = ClientImportOptions.FileImportOptions() service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_tags = False, additional_tags = { 'test additional tag', 'and another' } ) tag_import_options = ClientImportOptions.TagImportOptions( service_keys_to_service_tag_import_options = { HydrusData.GenerateKey() : service_tag_import_options } ) no_work_until = HydrusData.GetNow() - 86400 * 20 sub.SetTuple( gug_key_and_name, checker_options, initial_file_limit, periodic_file_limit, paused, file_import_options, tag_import_options, no_work_until ) sub.SetQueries( queries ) self.assertEqual( sub.GetGUGKeyAndName(), gug_key_and_name ) self.assertEqual( sub.GetTagImportOptions(), tag_import_options ) self.assertEqual( sub.GetQueries(), queries ) self.assertEqual( sub._paused, False ) sub.PauseResume() self.assertEqual( sub._paused, True ) sub.PauseResume() self.assertEqual( sub._paused, False ) self._dump_and_load_and_test( sub, test )
def _import_and_find_dupes(self): phash = os.urandom(8) # fake-import the files with the phash (size, mime, width, height, duration, num_frames, has_audio, num_words) = (65535, HC.IMAGE_JPEG, 640, 480, None, None, False, None) for hash in self._all_hashes: fake_file_import_job = ClientImportFileSeeds.FileImportJob( 'fake path') fake_file_import_job._hash = hash fake_file_import_job._file_info = (size, mime, width, height, duration, num_frames, has_audio, num_words) fake_file_import_job._extra_hashes = (b'abcd', b'abcd', b'abcd') fake_file_import_job._phashes = [phash] fake_file_import_job._file_import_options = ClientImportOptions.FileImportOptions( ) self._write('import_file', fake_file_import_job) # run search maintenance self._write('maintain_similar_files_tree') self._write('maintain_similar_files_search_for_potential_duplicates', 0)
def _UpdateSerialisableInfo(self, version, old_serialisable_info): if version == 1: (serialisable_file_seed_cache, serialisable_file_import_options, paused) = old_serialisable_info gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() serialisable_gallery_seed_log = gallery_seed_log.GetSerialisableTuple( ) new_serialisable_info = (serialisable_gallery_seed_log, serialisable_file_seed_cache, serialisable_file_import_options, paused) return (2, new_serialisable_info) if version == 2: (serialisable_gallery_seed_log, serialisable_file_seed_cache, serialisable_file_import_options, paused) = old_serialisable_info tag_import_options = ClientImportOptions.TagImportOptions( is_default=True) serialisable_tag_import_options = tag_import_options.GetSerialisableTuple( ) new_serialisable_info = (serialisable_gallery_seed_log, serialisable_file_seed_cache, serialisable_file_import_options, serialisable_tag_import_options, paused) return (3, new_serialisable_info)
def __init__(self): HydrusSerialisable.SerialisableBase.__init__(self) self._gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() self._file_seed_cache = ClientImportFileSeeds.FileSeedCache() self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud') self._tag_import_options = ClientImportOptions.TagImportOptions( is_default=True) self._paused = False self._downloader_key = HydrusData.GenerateKey() self._lock = threading.Lock() self._files_network_job = None self._gallery_network_job = None self._files_repeating_job = None self._gallery_repeating_job = None HG.client_controller.sub(self, 'NotifyFileSeedsUpdated', 'file_seed_cache_file_seeds_updated') HG.client_controller.sub(self, 'NotifyGallerySeedsUpdated', 'gallery_seed_log_gallery_seeds_updated')
def _UpdateSerialisableInfo( self, version, old_serialisable_info ): if version == 1: ( query, check_now, last_check_time, next_check_time, paused, status, serialisable_file_seed_cache ) = old_serialisable_info gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() serialisable_gallery_seed_log = gallery_seed_log.GetSerialisableTuple() new_serialisable_info = ( query, check_now, last_check_time, next_check_time, paused, status, serialisable_gallery_seed_log, serialisable_file_seed_cache ) return ( 2, new_serialisable_info ) if version == 2: ( query, check_now, last_check_time, next_check_time, paused, status, serialisable_gallery_seed_log, serialisable_file_seed_cache ) = old_serialisable_info display_name = None tag_import_options = ClientImportOptions.TagImportOptions() serialisable_tag_import_options = tag_import_options.GetSerialisableTuple() new_serialisable_info = ( query, display_name, check_now, last_check_time, next_check_time, paused, status, serialisable_gallery_seed_log, serialisable_file_seed_cache, serialisable_tag_import_options ) return ( 3, new_serialisable_info )
def __init__( self, name, path = '', file_import_options = None, tag_import_options = None, tag_service_keys_to_filename_tagging_options = None, mimes = None, actions = None, action_locations = None, period = 3600, check_regularly = True, show_working_popup = True, publish_files_to_popup_button = True, publish_files_to_page = False ): if mimes is None: mimes = HC.ALLOWED_MIMES if file_import_options is None: file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'quiet' ) if tag_import_options is None: tag_import_options = ClientImportOptions.TagImportOptions() if tag_service_keys_to_filename_tagging_options is None: tag_service_keys_to_filename_tagging_options = {} if actions is None: actions = {} actions[ CC.STATUS_SUCCESSFUL_AND_NEW ] = CC.IMPORT_FOLDER_IGNORE actions[ CC.STATUS_SUCCESSFUL_BUT_REDUNDANT ] = CC.IMPORT_FOLDER_IGNORE actions[ CC.STATUS_DELETED ] = CC.IMPORT_FOLDER_IGNORE actions[ CC.STATUS_ERROR ] = CC.IMPORT_FOLDER_IGNORE if action_locations is None: action_locations = {} HydrusSerialisable.SerialisableBaseNamed.__init__( self, name ) self._path = path self._mimes = mimes self._file_import_options = file_import_options self._tag_import_options = tag_import_options self._tag_service_keys_to_filename_tagging_options = tag_service_keys_to_filename_tagging_options self._actions = actions self._action_locations = action_locations self._period = period self._check_regularly = check_regularly self._file_seed_cache = ClientImportFileSeeds.FileSeedCache() self._last_checked = 0 self._paused = False self._check_now = False self._show_working_popup = show_working_popup self._publish_files_to_popup_button = publish_files_to_popup_button self._publish_files_to_page = publish_files_to_page
def GetNewHashes(self): with self._lock: file_import_options = ClientImportOptions.FileImportOptions() file_import_options.SetPresentationOptions(True, False, False) return self._file_seed_cache.GetPresentedHashes( file_import_options)
def __init__(self): HydrusSerialisable.SerialisableBase.__init__(self) self._page_key = 'initialising page key' self._publish_to_page = False self._url = '' self._gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() self._file_seed_cache = ClientImportFileSeeds.FileSeedCache() self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags() self._checker_options = HG.client_controller.new_options.GetDefaultWatcherCheckerOptions( ) self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud') self._tag_import_options = ClientImportOptions.TagImportOptions( is_default=True) self._last_check_time = 0 self._checking_status = ClientImporting.CHECKER_STATUS_OK self._subject = 'unknown subject' self._next_check_time = None self._file_network_job = None self._checker_network_job = None self._check_now = False self._files_paused = False self._checking_paused = False self._no_work_until = 0 self._no_work_until_reason = '' self._creation_time = HydrusData.GetNow() self._file_velocity_status = '' self._file_status = '' self._watcher_status = '' self._watcher_key = HydrusData.GenerateKey() self._lock = threading.Lock() self._last_pubbed_page_name = '' self._files_repeating_job = None self._checker_repeating_job = None HG.client_controller.sub(self, 'NotifyFileSeedsUpdated', 'file_seed_cache_file_seeds_updated')
def __init__( self, query = 'query text' ): HydrusSerialisable.SerialisableBase.__init__( self ) self._query = query self._display_name = None self._check_now = False self._last_check_time = 0 self._next_check_time = 0 self._paused = False self._status = ClientImporting.CHECKER_STATUS_OK self._gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() self._file_seed_cache = ClientImportFileSeeds.FileSeedCache() self._tag_import_options = ClientImportOptions.TagImportOptions()
def _do_fake_imports(self): self._md5_to_sha256 = {} self._sha256_to_md5 = {} self._sha256_to_sha1 = {} self._my_files_sha256 = set() self._hashes_to_current_tags = {} self._hashes_to_pending_tags = {} self._hashes_to_deleted_tags = {} (size, mime, width, height, duration, num_frames, has_audio, num_words) = (65535, HC.IMAGE_JPEG, 640, 480, None, None, False, None) for i in range(100): hash = HydrusData.GenerateKey() md5 = os.urandom(16) sha1 = os.urandom(20) sha512 = os.urandom(64) self._md5_to_sha256[md5] = hash self._sha256_to_md5[hash] = md5 self._sha256_to_sha1[hash] = sha1 self._hashes_to_current_tags[hash] = set( random.sample(current_tag_pool, 3)) self._hashes_to_pending_tags[hash] = set( random.sample(pending_tag_pool, 3)) self._hashes_to_deleted_tags[hash] = set( random.sample(deleted_tag_pool, 3)) if i < 50: fake_file_import_job = ClientImportFileSeeds.FileImportJob( 'fake path') fake_file_import_job._hash = hash fake_file_import_job._file_info = (size, mime, width, height, duration, num_frames, has_audio, num_words) fake_file_import_job._extra_hashes = (md5, sha1, sha512) fake_file_import_job._phashes = [os.urandom(8)] fake_file_import_job._file_import_options = ClientImportOptions.FileImportOptions( ) self.WriteSynchronous('import_file', fake_file_import_job) self._my_files_sha256.add(hash)
def GetValue( self ): death_file_velocity = self._death_file_velocity.GetValue() intended_files_per_check = self._intended_files_per_check.value() if self._flat_check_period_checkbox.isChecked(): never_faster_than = self._flat_check_period.GetValue() never_slower_than = never_faster_than else: never_faster_than = self._never_faster_than.GetValue() never_slower_than = self._never_slower_than.GetValue() return ClientImportOptions.CheckerOptions( intended_files_per_check, never_faster_than, never_slower_than, death_file_velocity )
def __init__(self, url=None): HydrusSerialisable.SerialisableBase.__init__(self) self._lock = threading.Lock() self._page_key = 'initialising page key' self._watchers = HydrusSerialisable.SerialisableList() self._highlighted_watcher_url = None self._checker_options = HG.client_controller.new_options.GetDefaultWatcherCheckerOptions( ) self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud') self._tag_import_options = ClientImportOptions.TagImportOptions( is_default=True) self._watcher_keys_to_watchers = {} self._watcher_keys_to_added_timestamps = {} self._watcher_keys_to_already_in_timestamps = {} self._watchers_repeating_job = None self._status_dirty = True self._status_cache = None self._status_cache_generation_time = 0 # if url is not None: watcher = WatcherImport() watcher.SetURL(url) self._AddWatcher(watcher) self._last_time_watchers_changed = HydrusData.GetNowPrecise() self._last_pubbed_value_range = (0, 0) self._next_pub_value_check_time = 0
def __init__( self ): HydrusSerialisable.SerialisableBase.__init__( self ) self._query_log_container_name = GenerateQueryLogContainerName() self._query_text = 'query' self._display_name = None self._check_now = False self._last_check_time = 0 self._next_check_time = 0 self._paused = False self._checker_status = ClientImporting.CHECKER_STATUS_OK self._query_log_container_status = LOG_CONTAINER_UNSYNCED self._file_seed_cache_status = ClientImportFileSeeds.FileSeedCacheStatus() self._tag_import_options = ClientImportOptions.TagImportOptions() self._raw_file_velocity = ( 0, 1 ) self._pretty_file_velocity = 'unknown' self._example_file_seed = None self._example_gallery_seed = None
def _WorkOnFiles( self, page_key ): file_seed = self._file_seed_cache.GetNextFileSeed( CC.STATUS_UNKNOWN ) if file_seed is None: return did_substantial_work = False def status_hook( text ): with self._lock: if len( text ) > 0: text = text.splitlines()[0] self._current_action = text tag_import_options = ClientImportOptions.TagImportOptions( is_default = True ) did_substantial_work = file_seed.WorkOnURL( self._file_seed_cache, status_hook, self._NetworkJobFactory, self._FileNetworkJobPresentationContextFactory, self._file_import_options, tag_import_options ) if file_seed.ShouldPresent( self._file_import_options ): file_seed.PresentToPage( page_key ) did_substantial_work = True with self._lock: self._current_action = '' if did_substantial_work: time.sleep( ClientImporting.DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME )
def _ShowFilesInNewPage( self, show = 'all' ): file_seed_cache = self._file_seed_cache_get_callable() if show == 'all': hashes = file_seed_cache.GetHashes() elif show == 'new': file_import_options = ClientImportOptions.FileImportOptions() file_import_options.SetPresentationOptions( True, False, False ) hashes = file_seed_cache.GetPresentedHashes( file_import_options ) if len( hashes ) > 0: HG.client_controller.pub( 'new_page_query', CC.LOCAL_FILE_SERVICE_KEY, initial_hashes = hashes )
def _UpdateSerialisableInfo(self, version, old_serialisable_info): if version == 1: (path, mimes, serialisable_file_import_options, action_pairs, action_location_pairs, period, open_popup, tag, serialisable_file_seed_cache, last_checked, paused) = old_serialisable_info # edited out tag carry-over to tio due to bit rot tag_import_options = ClientImportOptions.TagImportOptions() serialisable_tag_import_options = tag_import_options.GetSerialisableTuple( ) new_serialisable_info = (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused) return (2, new_serialisable_info) if version == 2: (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused) = old_serialisable_info serialisable_txt_parse_tag_service_keys = [] new_serialisable_info = (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_txt_parse_tag_service_keys, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused) return (3, new_serialisable_info) if version == 3: (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_txt_parse_tag_service_keys, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused) = old_serialisable_info check_now = False new_serialisable_info = (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_txt_parse_tag_service_keys, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused, check_now) return (4, new_serialisable_info) if version == 4: (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_txt_parse_tag_service_keys, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused, check_now) = old_serialisable_info txt_parse_tag_service_keys = [ bytes.fromhex(service_key) for service_key in serialisable_txt_parse_tag_service_keys ] tag_service_keys_to_filename_tagging_options = {} for service_key in txt_parse_tag_service_keys: filename_tagging_options = ClientImportOptions.FilenameTaggingOptions( ) filename_tagging_options._load_from_neighbouring_txt_files = True tag_service_keys_to_filename_tagging_options[ service_key] = filename_tagging_options serialisable_tag_service_keys_to_filename_tagging_options = [ (service_key.hex(), filename_tagging_options.GetSerialisableTuple()) for (service_key, filename_tagging_options) in list( tag_service_keys_to_filename_tagging_options.items()) ] new_serialisable_info = ( path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_tag_service_keys_to_filename_tagging_options, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused, check_now) return (5, new_serialisable_info) if version == 5: (path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_tag_service_keys_to_filename_tagging_options, action_pairs, action_location_pairs, period, open_popup, serialisable_file_seed_cache, last_checked, paused, check_now) = old_serialisable_info check_regularly = not paused show_working_popup = True publish_files_to_page = False publish_files_to_popup_button = open_popup new_serialisable_info = ( path, mimes, serialisable_file_import_options, serialisable_tag_import_options, serialisable_tag_service_keys_to_filename_tagging_options, action_pairs, action_location_pairs, period, check_regularly, serialisable_file_seed_cache, last_checked, paused, check_now, show_working_popup, publish_files_to_popup_button, publish_files_to_page) return (6, new_serialisable_info)
def _UpdateSerialisableInfo(self, version, old_serialisable_info): if version == 1: (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_file_import_options, serialisable_tag_import_options, times_to_check, check_period, last_check_time, paused) = old_serialisable_info checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check=8, never_faster_than=300, never_slower_than=86400, death_file_velocity=(1, 86400)) serialisable_checker_options = checker_options.GetSerialisableTuple( ) files_paused = paused checking_paused = paused new_serialisable_info = (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused) return (2, new_serialisable_info) if version == 2: (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused) = old_serialisable_info checking_status = ClientImporting.CHECKER_STATUS_OK subject = 'unknown subject' new_serialisable_info = (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject) return (3, new_serialisable_info) if version == 3: (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject) = old_serialisable_info no_work_until = 0 no_work_until_reason = '' new_serialisable_info = (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason) return (4, new_serialisable_info) if version == 4: (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason) = old_serialisable_info creation_time = HydrusData.GetNow() new_serialisable_info = (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time) return (5, new_serialisable_info) if version == 5: (url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time) = old_serialisable_info gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() serialisable_gallery_seed_log = gallery_seed_log.GetSerialisableTuple( ) new_serialisable_info = (url, serialisable_gallery_seed_log, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time) return (6, new_serialisable_info) if version == 6: (url, serialisable_gallery_seed_log, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time) = old_serialisable_info fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags() serialisable_fixed_service_keys_to_tags = fixed_service_keys_to_tags.GetSerialisableTuple( ) new_serialisable_info = (url, serialisable_gallery_seed_log, serialisable_file_seed_cache, serialisable_fixed_service_keys_to_tags, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time) return (7, new_serialisable_info)
def MainLoop(self): hashes_still_to_download_in_this_run = set() total_hashes_in_this_run = 0 total_successful_hashes_in_this_run = 0 while not (HydrusThreading.IsThreadShuttingDown() or self._shutting_down or HG.view_shutdown): with self._lock: if len(self._pending_hashes) > 0: if total_hashes_in_this_run == 0: job_key = ClientThreading.JobKey(cancellable=True) job_key.SetStatusTitle('downloading') job_key.SetVariable('popup_text_1', 'initialising downloader') job_key_pub_job = self._controller.CallLater( 2.0, self._controller.pub, 'message', job_key) num_before = len(hashes_still_to_download_in_this_run) hashes_still_to_download_in_this_run.update( self._pending_hashes) num_after = len(hashes_still_to_download_in_this_run) total_hashes_in_this_run += num_after - num_before self._pending_hashes = set() if len(hashes_still_to_download_in_this_run) == 0: total_hashes_in_this_run = 0 total_successful_hashes_in_this_run = 0 self._new_files_event.wait(5) self._new_files_event.clear() continue if job_key.IsCancelled(): hashes_still_to_download_in_this_run = set() continue hash = random.sample(hashes_still_to_download_in_this_run, 1)[0] hashes_still_to_download_in_this_run.discard(hash) total_done = total_hashes_in_this_run - len( hashes_still_to_download_in_this_run) job_key.SetVariable( 'popup_text_1', 'downloading files from remote services: {}'.format( HydrusData.ConvertValueRangeToPrettyString( total_done, total_hashes_in_this_run))) job_key.SetVariable('popup_gauge_1', (total_done, total_hashes_in_this_run)) try: errors_occured = [] file_successful = False media_result = self._controller.Read('media_result', hash) service_keys = list( media_result.GetLocationsManager().GetCurrent()) random.shuffle(service_keys) if CC.COMBINED_LOCAL_FILE_SERVICE_KEY in service_keys: total_successful_hashes_in_this_run += 1 continue for service_key in service_keys: try: service = self._controller.services_manager.GetService( service_key) except: continue try: if service.GetServiceType() == HC.FILE_REPOSITORY: file_repository = service if file_repository.IsFunctional(): (os_file_handle, temp_path) = HydrusPaths.GetTempPath() try: file_repository.Request( HC.GET, 'file', {'hash': hash}, temp_path=temp_path) exclude_deleted = False # this is the important part here do_not_check_known_urls_before_importing = False do_not_check_hashes_before_importing = False allow_decompression_bombs = True min_size = None max_size = None max_gif_size = None min_resolution = None max_resolution = None automatic_archive = False associate_source_urls = True file_import_options = ClientImportOptions.FileImportOptions( ) file_import_options.SetPreImportOptions( exclude_deleted, do_not_check_known_urls_before_importing, do_not_check_hashes_before_importing, allow_decompression_bombs, min_size, max_size, max_gif_size, min_resolution, max_resolution) file_import_options.SetPostImportOptions( automatic_archive, associate_source_urls) file_import_job = ClientImportFileSeeds.FileImportJob( temp_path, file_import_options) file_import_job.DoWork() file_successful = True break finally: HydrusPaths.CleanUpTempPath( os_file_handle, temp_path) elif service.GetServiceType() == HC.IPFS: multihashes = HG.client_controller.Read( 'service_filenames', service_key, {hash}) if len(multihashes) > 0: multihash = multihashes[0] service.ImportFile(multihash, silent=True) file_successful = True break except Exception as e: errors_occured.append(e) if file_successful: total_successful_hashes_in_this_run += 1 if len(errors_occured) > 0: if not file_successful: raise errors_occured[0] except Exception as e: HydrusData.ShowException(e) hashes_still_to_download_in_this_run = 0 finally: if len(hashes_still_to_download_in_this_run) == 0: job_key.DeleteVariable('popup_text_1') job_key.DeleteVariable('popup_gauge_1') if total_successful_hashes_in_this_run > 0: job_key.SetVariable( 'popup_text_1', HydrusData.ToHumanInt( total_successful_hashes_in_this_run) + ' files downloaded') job_key_pub_job.Cancel() job_key.Finish() job_key.Delete(1)