def _FetchRelatedTags(self, max_time_to_take): (m, ) = self._media hash = m.GetHash() (current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count) = ClientData.GetMediasTagCount( self._media, tag_service_key=self._service_key, collapse_siblings=False) tags_to_count = collections.Counter() tags_to_count.update(current_tags_to_count) tags_to_count.update(pending_tags_to_count) search_tags = set(tags_to_count.keys()) max_results = 100 predicates = HydrusGlobals.client_controller.Read( 'related_tags', self._service_key, hash, search_tags, max_results, max_time_to_take) predicates = ClientSearch.SortPredicates(predicates) self._related_tags.SetPredicates(predicates)
def do_it( service_key ): def wx_code(): if not self: return self._related_tags.SetPredicates( predicates ) predicates = HG.client_controller.Read( 'related_tags', service_key, hash, search_tags, max_results, max_time_to_take ) predicates = ClientSearch.SortPredicates( predicates ) wx.CallAfter( wx_code )
def _GenerateMatches(self): self._next_updatelist_is_probably_fast = False num_autocomplete_chars = HC.options['num_autocomplete_chars'] (inclusive, search_text, explicit_wildcard, cache_text, entry_predicate) = self._ParseSearchText() if search_text in ('', ':', '*'): input_just_changed = self._cache_text is not None db_not_going_to_hang_if_we_hit_it = not HG.client_controller.DBCurrentlyDoingJob( ) if input_just_changed or db_not_going_to_hang_if_we_hit_it or not self._initial_matches_fetched: self._cache_text = None if self._file_service_key == CC.COMBINED_FILE_SERVICE_KEY: search_service_key = self._tag_service_key else: search_service_key = self._file_service_key self._cached_results = HG.client_controller.Read( 'file_system_predicates', search_service_key) matches = self._cached_results else: (namespace, half_complete_subtag) = HydrusTags.SplitTag(search_text) siblings_manager = HG.client_controller.GetManager('tag_siblings') if False and half_complete_subtag == '': self._cache_text = None matches = [] # a query like 'namespace:' else: fetch_from_db = True if self._media_callable is not None: media = self._media_callable() can_fetch_from_media = media is not None and len(media) > 0 if can_fetch_from_media and self._synchronised.IsOn(): fetch_from_db = False if fetch_from_db: # if user searches 'blah', then we include 'blah (23)' for 'series:blah (10)', 'blah (13)' # if they search for 'series:blah', then we don't! add_namespaceless = ':' not in namespace include_current = self._file_search_context.IncludeCurrentTags( ) include_pending = self._file_search_context.IncludePendingTags( ) small_and_specific_search = cache_text is not None and len( cache_text) < num_autocomplete_chars if small_and_specific_search: predicates = HG.client_controller.Read( 'autocomplete_predicates', file_service_key=self._file_service_key, tag_service_key=self._tag_service_key, search_text=cache_text, exact_match=True, inclusive=inclusive, include_current=include_current, include_pending=include_pending, add_namespaceless=add_namespaceless, collapse_siblings=True) else: cache_invalid_for_this_search = cache_text is None or self._cache_text is None or not cache_text.startswith( self._cache_text) if cache_invalid_for_this_search: self._cache_text = cache_text self._cached_results = HG.client_controller.Read( 'autocomplete_predicates', file_service_key=self._file_service_key, tag_service_key=self._tag_service_key, search_text=search_text, inclusive=inclusive, include_current=include_current, include_pending=include_pending, add_namespaceless=add_namespaceless, collapse_siblings=True) predicates = self._cached_results self._next_updatelist_is_probably_fast = True else: # it is possible that media will change between calls to this, so don't cache it # it's also quick as hell, so who cares tags_managers = [] for m in media: if m.IsCollection(): tags_managers.extend(m.GetSingletonsTagsManagers()) else: tags_managers.append(m.GetTagsManager()) tags_to_do = set() current_tags_to_count = collections.Counter() pending_tags_to_count = collections.Counter() if self._file_search_context.IncludeCurrentTags(): lists_of_current_tags = [ list(tags_manager.GetCurrent( self._tag_service_key)) for tags_manager in tags_managers ] current_tags_flat_iterable = itertools.chain.from_iterable( lists_of_current_tags) current_tags_flat = ClientSearch.FilterTagsBySearchText( self._tag_service_key, search_text, current_tags_flat_iterable) current_tags_to_count.update(current_tags_flat) tags_to_do.update(current_tags_to_count.keys()) if self._file_search_context.IncludePendingTags(): lists_of_pending_tags = [ list(tags_manager.GetPending( self._tag_service_key)) for tags_manager in tags_managers ] pending_tags_flat_iterable = itertools.chain.from_iterable( lists_of_pending_tags) pending_tags_flat = ClientSearch.FilterTagsBySearchText( self._tag_service_key, search_text, pending_tags_flat_iterable) pending_tags_to_count.update(pending_tags_flat) tags_to_do.update(pending_tags_to_count.keys()) predicates = [ ClientSearch.Predicate(HC.PREDICATE_TYPE_TAG, tag, inclusive, current_tags_to_count[tag], pending_tags_to_count[tag]) for tag in tags_to_do ] if self._tag_service_key != CC.COMBINED_TAG_SERVICE_KEY: predicates = siblings_manager.CollapsePredicates( self._tag_service_key, predicates) if namespace == '': predicates = ClientData.MergePredicates( predicates, add_namespaceless=True) self._next_updatelist_is_probably_fast = True matches = ClientSearch.FilterPredicatesBySearchText( self._tag_service_key, search_text, predicates) matches = ClientSearch.SortPredicates(matches) if self._include_unusual_predicate_types: if explicit_wildcard: matches.insert( 0, ClientSearch.Predicate(HC.PREDICATE_TYPE_WILDCARD, search_text, inclusive)) else: if namespace != '' and half_complete_subtag in ('', '*'): matches.insert( 0, ClientSearch.Predicate(HC.PREDICATE_TYPE_NAMESPACE, namespace, inclusive)) for match in matches: if match.GetInclusive() != inclusive: match.SetInclusive(inclusive) try: index = matches.index(entry_predicate) predicate = matches[index] del matches[index] matches.insert(0, predicate) except: pass return matches
def _GenerateMatches(self): self._next_updatelist_is_probably_fast = False num_autocomplete_chars = HC.options['num_autocomplete_chars'] (search_text, cache_text, entry_predicate, sibling_predicate) = self._ParseSearchText() if search_text in ('', ':', '*'): self._cache_text = None matches = [] else: must_do_a_search = False small_and_specific_search = cache_text is not None and len( cache_text) < num_autocomplete_chars if small_and_specific_search: predicates = HG.client_controller.Read( 'autocomplete_predicates', file_service_key=self._file_service_key, tag_service_key=self._tag_service_key, search_text=cache_text, exact_match=True, add_namespaceless=False, collapse_siblings=False) else: cache_invalid_for_this_search = cache_text is None or self._cache_text is None or not cache_text.startswith( self._cache_text) if must_do_a_search or cache_invalid_for_this_search: self._cache_text = cache_text self._cached_results = HG.client_controller.Read( 'autocomplete_predicates', file_service_key=self._file_service_key, tag_service_key=self._tag_service_key, search_text=search_text, add_namespaceless=False, collapse_siblings=False) predicates = self._cached_results self._next_updatelist_is_probably_fast = True matches = ClientSearch.FilterPredicatesBySearchText( self._tag_service_key, search_text, predicates) matches = ClientSearch.SortPredicates(matches) self._PutAtTopOfMatches(matches, entry_predicate) if sibling_predicate is not None: self._PutAtTopOfMatches(matches, sibling_predicate) if self._expand_parents: parents_manager = HG.client_controller.GetManager( 'tag_parents') matches = parents_manager.ExpandPredicates( self._tag_service_key, matches) return matches