def GetServiceKeysToContentUpdates( self, status: int, media_result: ClientMediaResult.MediaResult, filterable_tags: typing.Iterable[str], external_filterable_tags=None, external_additional_service_keys_to_tags=None): if external_filterable_tags is None: external_filterable_tags = set() if external_additional_service_keys_to_tags is None: external_additional_service_keys_to_tags = ClientTags.ServiceKeysToTags( ) filterable_tags = HydrusTags.CleanTags(filterable_tags) service_keys_to_tags = ClientTags.ServiceKeysToTags() for service_key in HG.client_controller.services_manager.GetServiceKeys( HC.REAL_TAG_SERVICES): service_additional_tags = set() if service_key in external_additional_service_keys_to_tags: service_additional_tags.update( external_additional_service_keys_to_tags[service_key]) if service_key in self._service_keys_to_service_tag_import_options: service_tag_import_options = self._service_keys_to_service_tag_import_options[ service_key] service_filterable_tags = set(filterable_tags) service_filterable_tags.update(external_filterable_tags) service_tags = service_tag_import_options.GetTags( service_key, status, media_result, service_filterable_tags, service_additional_tags) else: service_tags = service_additional_tags if len(service_tags) > 0: service_keys_to_tags[service_key] = service_tags hash = media_result.GetHash() service_keys_to_content_updates = ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( {hash}, service_keys_to_tags) return service_keys_to_content_updates
def __init__(self, url=None, can_generate_more_pages=True): if url is None: url = 'https://nostrils-central.cx/index.php?post=s&tag=hyper_nostrils&page=3' else: try: url = HG.client_controller.network_engine.domain_manager.NormaliseURL( url) except HydrusExceptions.URLClassException: pass HydrusSerialisable.SerialisableBase.__init__(self) self.url = url self._can_generate_more_pages = can_generate_more_pages self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags() self.created = HydrusData.GetNow() self.modified = self.created self.status = CC.STATUS_UNKNOWN self.note = '' self._referral_url = None self._force_next_page_url_generation = False self._run_token = HydrusData.GenerateKey()
def GetRowsOfPresentationTextsWithNamespaces( self, render_for_user: bool, sibling_decoration_allowed: bool, child_rows_allowed: bool ) -> typing.List[typing.List[typing.Tuple[str, str]]]: # this should be with counts or whatever, but we need to think about this more lad (namespace, subtag) = HydrusTags.SplitTag(self._tag) tag_text = ClientTags.RenderTag(self._tag, render_for_user) first_row_of_texts_with_namespaces = [(tag_text, namespace)] if sibling_decoration_allowed and self._ideal_tag is not None: self._AppendIdealTagTextWithNamespace( first_row_of_texts_with_namespaces, render_for_user) rows_of_texts_with_namespaces = [first_row_of_texts_with_namespaces] if self._parent_tags is not None: if child_rows_allowed: self._AppendParentsTextWithNamespaces( rows_of_texts_with_namespaces, render_for_user) elif sibling_decoration_allowed: self._AppendParentSuffixTagTextWithNamespace( first_row_of_texts_with_namespaces) return rows_of_texts_with_namespaces
def __init__( self, name = 'new api permissions', access_key = None, basic_permissions = None, search_tag_filter = None ): if access_key is None: access_key = HydrusData.GenerateKey() if basic_permissions is None: basic_permissions = set() if search_tag_filter is None: search_tag_filter = ClientTags.TagFilter() HydrusSerialisable.SerialisableBaseNamed.__init__( self, name ) self._access_key = access_key self._basic_permissions = set( basic_permissions ) self._search_tag_filter = search_tag_filter self._last_search_results = None self._search_results_timeout = 0 self._lock = threading.Lock()
def _UpdateSerialisableInfo( self, version, old_serialisable_info ): if version == 1: ( url, can_generate_more_pages, created, modified, status, note, referral_url ) = old_serialisable_info external_additional_service_keys_to_tags = ClientTags.ServiceKeysToTags() serialisable_external_additional_service_keys_to_tags = external_additional_service_keys_to_tags.GetSerialisableTuple() new_serialisable_info = ( url, can_generate_more_pages, serialisable_external_additional_service_keys_to_tags, created, modified, status, note, referral_url ) return ( 2, new_serialisable_info ) if version == 2: ( url, can_generate_more_pages, serialisable_external_additional_service_keys_to_tags, created, modified, status, note, referral_url ) = old_serialisable_info external_filterable_tags = set() serialisable_external_filterable_tags = list( external_filterable_tags ) new_serialisable_info = ( url, can_generate_more_pages, serialisable_external_filterable_tags, serialisable_external_additional_service_keys_to_tags, created, modified, status, note, referral_url ) return ( 3, new_serialisable_info )
def PendURLs(self, urls, service_keys_to_tags=None): if service_keys_to_tags is None: service_keys_to_tags = ClientTags.ServiceKeysToTags() with self._lock: urls = [u for u in urls if len(u) > 1 ] # > _1_ to take out the occasional whitespace file_seeds = [] gallery_seeds = [] for url in urls: try: url_class = HG.client_controller.network_engine.domain_manager.GetURLClass( url) except HydrusExceptions.URLClassException: continue if url_class is None or url_class.GetURLType() in ( HC.URL_TYPE_FILE, HC.URL_TYPE_POST): file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url) file_seed.SetFixedServiceKeysToTags(service_keys_to_tags) file_seeds.append(file_seed) else: can_generate_more_pages = False gallery_seed = ClientImportGallerySeeds.GallerySeed( url, can_generate_more_pages=can_generate_more_pages) gallery_seed.SetFixedServiceKeysToTags( service_keys_to_tags) gallery_seeds.append(gallery_seed) if len(gallery_seeds) > 0: self._gallery_seed_log.AddGallerySeeds(gallery_seeds) ClientImporting.WakeRepeatingJob(self._gallery_repeating_job) if len(file_seeds) > 0: self._file_seed_cache.AddFileSeeds(file_seeds) ClientImporting.WakeRepeatingJob(self._files_repeating_job)
def _UpdateTagDisplay( self ): favourites = list( HG.client_controller.new_options.GetSuggestedTagsFavourites( self._service_key ) ) ClientTags.SortTags( HC.options[ 'default_tag_sort' ], favourites ) tags = FilterSuggestedTagsForMedia( favourites, self._media, self._service_key ) self._favourite_tags.SetTags( tags )
def _AppendIdealTagTextWithNamespace(self, texts_with_namespaces, render_for_user): (namespace, subtag) = HydrusTags.SplitTag(self._ideal_tag) ideal_text = ' (displays as {})'.format( ClientTags.RenderTag(self._ideal_tag, render_for_user)) texts_with_namespaces.append((ideal_text, namespace))
def _AppendParentsTextWithNamespaces(self, rows_of_texts_with_namespaces, render_for_user): indent = ' ' for parent in self._parent_tags: (namespace, subtag) = HydrusTags.SplitTag(parent) tag_text = ClientTags.RenderTag(parent, render_for_user) texts_with_namespaces = [(indent + tag_text, namespace)] rows_of_texts_with_namespaces.append(texts_with_namespaces)
def __init__( self ): HydrusSerialisable.SerialisableBase.__init__( self ) self._page_key = 'initialising page key' self._publish_to_page = False self._url = '' self._gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() self._file_seed_cache = ClientImportFileSeeds.FileSeedCache() self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags() self._checker_options = HG.client_controller.new_options.GetDefaultWatcherCheckerOptions() self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud' ) self._tag_import_options = ClientImportOptions.TagImportOptions( is_default = True ) self._last_check_time = 0 self._checking_status = ClientImporting.CHECKER_STATUS_OK self._subject = 'unknown subject' self._next_check_time = None self._file_network_job = None self._checker_network_job = None self._check_now = False self._files_paused = False self._checking_paused = False self._no_work_until = 0 self._no_work_until_reason = '' self._creation_time = HydrusData.GetNow() self._file_velocity_status = '' self._file_status = '' self._watcher_status = '' self._watcher_key = HydrusData.GenerateKey() self._lock = threading.Lock() self._last_pubbed_page_name = '' self._files_repeating_job = None self._checker_repeating_job = None HG.client_controller.sub( self, 'NotifyFileSeedsUpdated', 'file_seed_cache_file_seeds_updated' )
def GetRowsOfPresentationTextsWithNamespaces( self, render_for_user: bool, sibling_decoration_allowed: bool, child_rows_allowed: bool ) -> typing.List[typing.List[typing.Tuple[str, str]]]: rows_of_texts_and_namespaces = [] first_row_of_texts_and_namespaces = self._predicate.GetTextsAndNamespaces( render_for_user, or_under_construction=self._i_am_an_or_under_construction) if sibling_decoration_allowed and self._predicate.HasIdealSibling(): ideal_sibling = self._predicate.GetIdealSibling() (ideal_namespace, ideal_subtag) = HydrusTags.SplitTag(ideal_sibling) ideal_text = ' (displays as {})'.format( ClientTags.RenderTag(ideal_sibling, render_for_user)) first_row_of_texts_and_namespaces.append( (ideal_text, ideal_namespace)) rows_of_texts_and_namespaces.append(first_row_of_texts_and_namespaces) parent_preds = self._predicate.GetParentPredicates() if len(parent_preds) > 0: if child_rows_allowed: for parent_pred in self._predicate.GetParentPredicates(): rows_of_texts_and_namespaces.append( parent_pred.GetTextsAndNamespaces(render_for_user)) elif sibling_decoration_allowed: parents_text = ' ({} parents)'.format( HydrusData.ToHumanInt(len(parent_preds))) first_row_of_texts_and_namespaces.append((parents_text, '')) return rows_of_texts_and_namespaces
def _UpdateSerialisableInfo(self, version, old_serialisable_info): if version == 1: (url, can_generate_more_pages, created, modified, status, note, referral_url) = old_serialisable_info fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags() serialisable_fixed_service_keys_to_tags = fixed_service_keys_to_tags.GetSerialisableTuple( ) new_serialisable_info = (url, can_generate_more_pages, serialisable_fixed_service_keys_to_tags, created, modified, status, note, referral_url) return (2, new_serialisable_info)
def test_SERIALISABLE_TYPE_TAG_FILTER(self): def test(obj, dupe_obj): self.assertEqual(obj._tag_slices_to_rules, dupe_obj._tag_slices_to_rules) tags = set() tags.add('title:test title') tags.add('series:neon genesis evangelion') tags.add('series:kill la kill') tags.add('smile') tags.add('blue eyes') # tag_filter = ClientTags.TagFilter() self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter(tags), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' }) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('', CC.FILTER_BLACKLIST) tag_filter.SetRule(':', CC.FILTER_BLACKLIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(tags), set()) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('', CC.FILTER_BLACKLIST) tag_filter.SetRule(':', CC.FILTER_BLACKLIST) tag_filter.SetRule('series:', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter(tags), {'series:neon genesis evangelion', 'series:kill la kill'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('', CC.FILTER_BLACKLIST) tag_filter.SetRule(':', CC.FILTER_BLACKLIST) tag_filter.SetRule('series:kill la kill', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(tags), {'series:kill la kill'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('', CC.FILTER_BLACKLIST) tag_filter.SetRule(':', CC.FILTER_BLACKLIST) tag_filter.SetRule('smile', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(tags), {'smile'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule(':', CC.FILTER_BLACKLIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(tags), {'smile', 'blue eyes'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule(':', CC.FILTER_BLACKLIST) tag_filter.SetRule('series:', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter(tags), { 'smile', 'blue eyes', 'series:neon genesis evangelion', 'series:kill la kill' }) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule(':', CC.FILTER_BLACKLIST) tag_filter.SetRule('series:kill la kill', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(tags), {'smile', 'blue eyes', 'series:kill la kill'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('series:', CC.FILTER_BLACKLIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(tags), {'smile', 'blue eyes', 'title:test title'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('series:', CC.FILTER_BLACKLIST) tag_filter.SetRule('series:neon genesis evangelion', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter(tags), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion' }) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('', CC.FILTER_BLACKLIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter(tags), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' }) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('', CC.FILTER_BLACKLIST) tag_filter.SetRule('blue eyes', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter(tags), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill', 'blue eyes' }) # blacklist namespace test blacklist_tags = {'nintendo', 'studio:nintendo'} # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('nintendo', CC.FILTER_BLACKLIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual(tag_filter.Filter(blacklist_tags), {'studio:nintendo'}) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('nintendo', CC.FILTER_BLACKLIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags=True), set()) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule('nintendo', CC.FILTER_BLACKLIST) tag_filter.SetRule('studio:nintendo', CC.FILTER_WHITELIST) self._dump_and_load_and_test(tag_filter, test) self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags=True), {'studio:nintendo'})
def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS(self): def test(obj, dupe_obj): self.assertEqual(obj.ToTuple(), dupe_obj.ToTuple()) duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions( [(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientTags.TagFilter())], [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE), (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE)]) duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions( [(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientTags.TagFilter())], [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY), (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY)]) duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions( [(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter())], [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE), (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE)]) inbox = True size = 40960 mime = HC.IMAGE_JPEG width = 640 height = 480 duration = None num_frames = None has_audio = False num_words = None local_locations_manager = ClientMediaManagers.LocationsManager( {CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), set(), inbox) trash_locations_manager = ClientMediaManagers.LocationsManager( {CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), set(), inbox) deleted_locations_manager = ClientMediaManagers.LocationsManager( set(), {CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), inbox) # duplicate to generate proper dicts one_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'one'} } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'one'} } }).Duplicate() two_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'two'} } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'two'} } }).Duplicate() substantial_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'test tag', 'series:namespaced test tag'} } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'test tag', 'series:namespaced test tag'} } }).Duplicate() empty_tags_manager = ClientMediaManagers.TagsManager({}, {}).Duplicate() one_ratings_manager = ClientMediaManagers.RatingsManager({ TC.LOCAL_RATING_LIKE_SERVICE_KEY: 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY: 0.8 }) two_ratings_manager = ClientMediaManagers.RatingsManager({ TC.LOCAL_RATING_LIKE_SERVICE_KEY: 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY: 0.6 }) substantial_ratings_manager = ClientMediaManagers.RatingsManager({ TC.LOCAL_RATING_LIKE_SERVICE_KEY: 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY: 0.8 }) empty_ratings_manager = ClientMediaManagers.RatingsManager({}) notes_manager = ClientMediaManagers.NotesManager({}) file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager( ) # local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 1, local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager) local_media_has_values = ClientMedia.MediaSingleton(media_result) # other_local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 2, other_local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager) other_local_media_has_values = ClientMedia.MediaSingleton(media_result) # local_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 3, local_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager) local_media_empty = ClientMedia.MediaSingleton(media_result) # trashed_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 4, trashed_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager) trashed_media_empty = ClientMedia.MediaSingleton(media_result) # deleted_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 5, deleted_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager) deleted_media_empty = ClientMedia.MediaSingleton(media_result) # one_hash = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 6, one_hash, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager, notes_manager, file_viewing_stats_manager) one_media = ClientMedia.MediaSingleton(media_result) # two_hash = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 7, two_hash, size, mime, width, height, duration, num_frames, has_audio, num_words) media_result = ClientMediaResult.MediaResult( file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager, notes_manager, file_viewing_stats_manager) two_media = ClientMedia.MediaSingleton(media_result) # self._dump_and_load_and_test(duplicate_action_options_delete_and_move, test) self._dump_and_load_and_test(duplicate_action_options_copy, test) self._dump_and_load_and_test(duplicate_action_options_merge, test) # def assertSCUEqual(one, two): self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable(one), TC.ConvertServiceKeysToContentUpdatesToComparable(two)) file_deletion_reason = 'test delete' # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, delete_second=True, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.LOCAL_FILE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {local_hash_empty}, reason=file_deletion_reason) ] assertSCUEqual(result, scu) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty, delete_second=True, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.TRASH_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {trashed_hash_empty}, reason=file_deletion_reason) ] assertSCUEqual(result, scu) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty, delete_second=True, file_deletion_reason=file_deletion_reason) self.assertEqual(result, {}) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values, delete_second=True, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('test tag', {other_local_hash_has_values})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('series:namespaced test tag', {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] scu[CC.LOCAL_FILE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {other_local_hash_has_values}, reason=file_deletion_reason) ] assertSCUEqual(result, scu) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, delete_second=True, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('test tag', {other_local_hash_has_values})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('series:namespaced test tag', {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})), HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})), HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] scu[CC.LOCAL_FILE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {other_local_hash_has_values}, reason=file_deletion_reason) ] assertSCUEqual(result, scu) # # result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason=file_deletion_reason) self.assertEqual(result, {}) # result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})) ] assertSCUEqual(result, scu) # # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})) ] assertSCUEqual(result, scu) # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})) ] assertSCUEqual(result, scu) # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media, file_deletion_reason=file_deletion_reason) scu = {} scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('one', {two_hash})), HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('two', {one_hash})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {two_hash})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {two_hash})) ] assertSCUEqual(result, scu)
def _ImportFiles(self, job_key): did_work = False time_to_save = HydrusData.GetNow() + 600 num_files_imported = 0 presentation_hashes = [] presentation_hashes_fast = set() i = 0 # don't want to start at 23/100 because of carrying over failed results or whatever # num_to_do is num currently unknown num_total = self._file_seed_cache.GetFileSeedCount(CC.STATUS_UNKNOWN) while True: file_seed = self._file_seed_cache.GetNextFileSeed( CC.STATUS_UNKNOWN) p1 = HG.client_controller.new_options.GetBoolean( 'pause_import_folders_sync') or self._paused p2 = HydrusThreading.IsThreadShuttingDown() p3 = job_key.IsCancelled() if file_seed is None or p1 or p2 or p3: break did_work = True if HydrusData.TimeHasPassed(time_to_save): HG.client_controller.WriteSynchronous('serialisable', self) time_to_save = HydrusData.GetNow() + 600 gauge_num_done = num_files_imported + 1 job_key.SetVariable( 'popup_text_1', 'importing file ' + HydrusData.ConvertValueRangeToPrettyString( gauge_num_done, num_total)) job_key.SetVariable('popup_gauge_1', (gauge_num_done, num_total)) path = file_seed.file_seed_data file_seed.ImportPath(self._file_seed_cache, self._file_import_options, limited_mimes=self._mimes) if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES: hash = None if file_seed.HasHash(): hash = file_seed.GetHash() if self._tag_import_options.HasAdditionalTags(): media_result = HG.client_controller.Read( 'media_result', hash) downloaded_tags = [] service_keys_to_content_updates = self._tag_import_options.GetServiceKeysToContentUpdates( file_seed.status, media_result, downloaded_tags) # additional tags if len(service_keys_to_content_updates) > 0: HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates) service_keys_to_tags = ClientTags.ServiceKeysToTags() for (tag_service_key, filename_tagging_options) in list( self._tag_service_keys_to_filename_tagging_options. items()): if not HG.client_controller.services_manager.ServiceExists( tag_service_key): continue try: tags = filename_tagging_options.GetTags( tag_service_key, path) if len(tags) > 0: service_keys_to_tags[tag_service_key] = tags except Exception as e: HydrusData.ShowText( 'Trying to parse filename tags in the import folder "' + self._name + '" threw an error!') HydrusData.ShowException(e) if len(service_keys_to_tags) > 0: service_keys_to_content_updates = ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( {hash}, service_keys_to_tags) HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates) num_files_imported += 1 if hash not in presentation_hashes_fast: if file_seed.ShouldPresent(self._file_import_options. GetPresentationImportOptions()): presentation_hashes.append(hash) presentation_hashes_fast.add(hash) elif file_seed.status == CC.STATUS_ERROR: HydrusData.Print( 'A file failed to import from import folder ' + self._name + ':' + path) i += 1 if i % 10 == 0: self._ActionPaths() if num_files_imported > 0: HydrusData.Print('Import folder ' + self._name + ' imported ' + HydrusData.ToHumanInt(num_files_imported) + ' files.') if len(presentation_hashes) > 0: ClientImporting.PublishPresentationHashes( self._name, presentation_hashes, self._publish_files_to_popup_button, self._publish_files_to_page) self._ActionPaths() return did_work
def GetRowsOfPresentationTextsWithNamespaces( self, render_for_user: bool, sibling_decoration_allowed: bool, child_rows_allowed: bool ) -> typing.List[typing.List[typing.Tuple[str, str]]]: # this should be with counts or whatever, but we need to think about this more lad (namespace, subtag) = HydrusTags.SplitTag(self._tag) tag_text = ClientTags.RenderTag(self._tag, render_for_user) if self._include_actual_counts: if self._current_count > 0: tag_text += ' ({})'.format( HydrusData.ToHumanInt(self._current_count)) if self._pending_count > 0: tag_text += ' (+{})'.format( HydrusData.ToHumanInt(self._pending_count)) if self._petitioned_count > 0: tag_text += ' (-{})'.format( HydrusData.ToHumanInt(self._petitioned_count)) if self._deleted_count > 0: tag_text += ' (X{})'.format( HydrusData.ToHumanInt(self._deleted_count)) else: if self._pending_count > 0: tag_text += ' (+)' if self._petitioned_count > 0: tag_text += ' (-)' if self._deleted_count > 0: tag_text += ' (X)' first_row_of_texts_with_namespaces = [(tag_text, namespace)] if sibling_decoration_allowed and self._ideal_tag is not None: self._AppendIdealTagTextWithNamespace( first_row_of_texts_with_namespaces, render_for_user) rows_of_texts_with_namespaces = [first_row_of_texts_with_namespaces] if self._parent_tags is not None: if child_rows_allowed: self._AppendParentsTextWithNamespaces( rows_of_texts_with_namespaces, render_for_user) elif sibling_decoration_allowed: self._AppendParentSuffixTagTextWithNamespace( first_row_of_texts_with_namespaces) return rows_of_texts_with_namespaces
def SetFixedServiceKeysToTags(self, service_keys_to_tags): self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags( service_keys_to_tags)
def test_dict_to_content_updates(self): hash = HydrusData.GenerateKey() hashes = {hash} local_key = CC.DEFAULT_LOCAL_TAG_SERVICE_KEY remote_key = HG.test_controller.example_tag_repo_service_key service_keys_to_tags = ClientTags.ServiceKeysToTags({local_key: {'a'}}) content_updates = { local_key: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('a', hashes)) ] } self.assertEqual( ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( {hash}, service_keys_to_tags), content_updates) service_keys_to_tags = ClientTags.ServiceKeysToTags( {remote_key: {'c'}}) content_updates = { remote_key: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ('c', hashes)) ] } self.assertEqual( ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( {hash}, service_keys_to_tags), content_updates) service_keys_to_tags = ClientTags.ServiceKeysToTags({ local_key: ['a', 'character:b'], remote_key: ['c', 'series:d'] }) content_updates = {} content_updates[local_key] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('a', hashes)), HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('character:b', hashes)) ] content_updates[remote_key] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ('c', hashes)), HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ('series:d', hashes)) ] self.assertEqual( HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, 'c'), HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, 'c')) self.assertEqual( ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( {hash}, service_keys_to_tags), content_updates)
def _UpdateSerialisableInfo(self, version, old_serialisable_info): if version == 1: (serialisable_service_actions, delete_second_file) = old_serialisable_info tag_service_actions = [] rating_service_actions = [] # As the client isn't booted when this is loaded in options, there isn't a good way to figure out tag from rating # So, let's just dupe and purge later on, in serialisation for (service_key_encoded, action) in serialisable_service_actions: service_key = bytes.fromhex(service_key_encoded) tag_filter = ClientTags.TagFilter() tag_service_actions.append((service_key, action, tag_filter)) rating_service_actions.append((service_key, action)) serialisable_tag_service_actions = [ (service_key.hex(), action, tag_filter.GetSerialisableTuple()) for (service_key, action, tag_filter) in tag_service_actions ] serialisable_rating_service_actions = [ (service_key.hex(), action) for (service_key, action) in rating_service_actions ] sync_archive = delete_second_file delete_both_files = False new_serialisable_info = (serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files) return (2, new_serialisable_info) if version == 2: (serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files) = old_serialisable_info sync_urls_action = None new_serialisable_info = (serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files, sync_urls_action) return (3, new_serialisable_info) if version == 3: (serialisable_tag_service_actions, serialisable_rating_service_actions, delete_second_file, sync_archive, delete_both_files, sync_urls_action) = old_serialisable_info new_serialisable_info = (serialisable_tag_service_actions, serialisable_rating_service_actions, sync_archive, sync_urls_action) return (4, new_serialisable_info)
def SetExternalAdditionalServiceKeysToTags( self, service_keys_to_tags ): self._external_additional_service_keys_to_tags = ClientTags.ServiceKeysToTags( service_keys_to_tags )
def _UpdateSerialisableInfo( self, version, old_serialisable_info ): if version == 1: ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_file_import_options, serialisable_tag_import_options, times_to_check, check_period, last_check_time, paused ) = old_serialisable_info checker_options = ClientImportOptions.CheckerOptions( intended_files_per_check = 8, never_faster_than = 300, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) ) serialisable_checker_options = checker_options.GetSerialisableTuple() files_paused = paused checking_paused = paused new_serialisable_info = ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused ) return ( 2, new_serialisable_info ) if version == 2: ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused ) = old_serialisable_info checking_status = ClientImporting.CHECKER_STATUS_OK subject = 'unknown subject' new_serialisable_info = ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject ) return ( 3, new_serialisable_info ) if version == 3: ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject ) = old_serialisable_info no_work_until = 0 no_work_until_reason = '' new_serialisable_info = ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason ) return ( 4, new_serialisable_info ) if version == 4: ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason ) = old_serialisable_info creation_time = HydrusData.GetNow() new_serialisable_info = ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time ) return ( 5, new_serialisable_info ) if version == 5: ( url, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time ) = old_serialisable_info gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog() serialisable_gallery_seed_log = gallery_seed_log.GetSerialisableTuple() new_serialisable_info = ( url, serialisable_gallery_seed_log, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time ) return ( 6, new_serialisable_info ) if version == 6: ( url, serialisable_gallery_seed_log, serialisable_file_seed_cache, urls_to_filenames, urls_to_md5_base64, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time ) = old_serialisable_info fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags() serialisable_fixed_service_keys_to_tags = fixed_service_keys_to_tags.GetSerialisableTuple() new_serialisable_info = ( url, serialisable_gallery_seed_log, serialisable_file_seed_cache, serialisable_fixed_service_keys_to_tags, serialisable_checker_options, serialisable_file_import_options, serialisable_tag_import_options, last_check_time, files_paused, checking_paused, checking_status, subject, no_work_until, no_work_until_reason, creation_time ) return ( 7, new_serialisable_info )