def _ParseSearchText(self): raw_entry = self._text_ctrl.GetValue() tag = HydrusTags.CleanTag(raw_entry) search_text = ClientSearch.ConvertEntryTextToSearchText(raw_entry) if ClientSearch.IsComplexWildcard(search_text): cache_text = None else: cache_text = search_text[: -1] # take off the trailing '*' for the cache text entry_predicate = ClientSearch.Predicate(HC.PREDICATE_TYPE_TAG, tag) siblings_manager = HG.client_controller.GetManager('tag_siblings') sibling = siblings_manager.GetSibling(self._tag_service_key, tag) if sibling is not None: sibling_predicate = ClientSearch.Predicate(HC.PREDICATE_TYPE_TAG, sibling) else: sibling_predicate = None return (search_text, cache_text, entry_predicate, sibling_predicate)
def test_basics(self): def test(obj, dupe_obj): self.assertEqual(len(obj.items()), len(dupe_obj.items())) for (key, value) in obj.items(): self.assertEqual(value, dupe_obj[key]) # d = HydrusSerialisable.SerialisableDictionary() d[1] = 2 d[3] = 'test1' d['test2'] = 4 d['test3'] = 5 d[6] = HydrusSerialisable.SerialisableDictionary( {i: 'test' + str(i) for i in range(20)}) d[ClientSearch.Predicate(HC.PREDICATE_TYPE_TAG, 'test pred 1')] = 56 d[ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, 'test pred 2')] = HydrusSerialisable.SerialisableList([ ClientSearch.Predicate(HC.PREDICATE_TYPE_TAG, 'test' + str(i)) for i in range(10) ]) self.assertEqual(len(d.keys()), 7) for (key, value) in d.items(): self.assertEqual(d[key], value) self._dump_and_load_and_test(d, test) # db = HydrusSerialisable.SerialisableBytesDictionary() db[HydrusData.GenerateKey()] = HydrusData.GenerateKey() db[HydrusData.GenerateKey()] = [ HydrusData.GenerateKey() for i in range(10) ] db[1] = HydrusData.GenerateKey() db[2] = [HydrusData.GenerateKey() for i in range(10)] self.assertEqual(len(db.keys()), 4) for (key, value) in db.items(): self.assertEqual(db[key], value) self._dump_and_load_and_test(db, test)
def _ParseSearchText(self): raw_entry = self._text_ctrl.GetValue() if raw_entry.startswith('-'): inclusive = False entry_text = raw_entry[1:] else: inclusive = True entry_text = raw_entry tag = HydrusTags.CleanTag(entry_text) explicit_wildcard = '*' in entry_text search_text = ClientSearch.ConvertEntryTextToSearchText(entry_text) if explicit_wildcard: cache_text = None entry_predicate = ClientSearch.Predicate( HC.PREDICATE_TYPE_WILDCARD, search_text, inclusive) else: cache_text = search_text[: -1] # take off the trailing '*' for the cache text siblings_manager = HydrusGlobals.client_controller.GetManager( 'tag_siblings') sibling = siblings_manager.GetSibling(self._tag_service_key, tag) if sibling is None: entry_predicate = ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, tag, inclusive) else: entry_predicate = ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, sibling, inclusive) return (inclusive, search_text, explicit_wildcard, cache_text, entry_predicate)
def GetPredicates(self): info = self.GetInfo() predicates = (ClientSearch.Predicate(self.PREDICATE_TYPE, info), ) return predicates
def GetPredicates( self ): infos = self.GetInfo() predicates = [ ClientSearch.Predicate( self.PREDICATE_TYPE, info ) for info in infos ] return predicates
def _GenerateMatches(self): self._next_updatelist_is_probably_fast = False num_autocomplete_chars = HC.options['num_autocomplete_chars'] (inclusive, search_text, explicit_wildcard, cache_text, entry_predicate) = self._ParseSearchText() if search_text in ('', ':', '*'): input_just_changed = self._cache_text is not None db_not_going_to_hang_if_we_hit_it = not HG.client_controller.DBCurrentlyDoingJob( ) if input_just_changed or db_not_going_to_hang_if_we_hit_it or not self._initial_matches_fetched: self._cache_text = None if self._file_service_key == CC.COMBINED_FILE_SERVICE_KEY: search_service_key = self._tag_service_key else: search_service_key = self._file_service_key self._cached_results = HG.client_controller.Read( 'file_system_predicates', search_service_key) matches = self._cached_results else: (namespace, half_complete_subtag) = HydrusTags.SplitTag(search_text) siblings_manager = HG.client_controller.GetManager('tag_siblings') if False and half_complete_subtag == '': self._cache_text = None matches = [] # a query like 'namespace:' else: fetch_from_db = True if self._media_callable is not None: media = self._media_callable() can_fetch_from_media = media is not None and len(media) > 0 if can_fetch_from_media and self._synchronised.IsOn(): fetch_from_db = False if fetch_from_db: # if user searches 'blah', then we include 'blah (23)' for 'series:blah (10)', 'blah (13)' # if they search for 'series:blah', then we don't! add_namespaceless = ':' not in namespace include_current = self._file_search_context.IncludeCurrentTags( ) include_pending = self._file_search_context.IncludePendingTags( ) small_and_specific_search = cache_text is not None and len( cache_text) < num_autocomplete_chars if small_and_specific_search: predicates = HG.client_controller.Read( 'autocomplete_predicates', file_service_key=self._file_service_key, tag_service_key=self._tag_service_key, search_text=cache_text, exact_match=True, inclusive=inclusive, include_current=include_current, include_pending=include_pending, add_namespaceless=add_namespaceless, collapse_siblings=True) else: cache_invalid_for_this_search = cache_text is None or self._cache_text is None or not cache_text.startswith( self._cache_text) if cache_invalid_for_this_search: self._cache_text = cache_text self._cached_results = HG.client_controller.Read( 'autocomplete_predicates', file_service_key=self._file_service_key, tag_service_key=self._tag_service_key, search_text=search_text, inclusive=inclusive, include_current=include_current, include_pending=include_pending, add_namespaceless=add_namespaceless, collapse_siblings=True) predicates = self._cached_results self._next_updatelist_is_probably_fast = True else: # it is possible that media will change between calls to this, so don't cache it # it's also quick as hell, so who cares tags_managers = [] for m in media: if m.IsCollection(): tags_managers.extend(m.GetSingletonsTagsManagers()) else: tags_managers.append(m.GetTagsManager()) tags_to_do = set() current_tags_to_count = collections.Counter() pending_tags_to_count = collections.Counter() if self._file_search_context.IncludeCurrentTags(): lists_of_current_tags = [ list(tags_manager.GetCurrent( self._tag_service_key)) for tags_manager in tags_managers ] current_tags_flat_iterable = itertools.chain.from_iterable( lists_of_current_tags) current_tags_flat = ClientSearch.FilterTagsBySearchText( self._tag_service_key, search_text, current_tags_flat_iterable) current_tags_to_count.update(current_tags_flat) tags_to_do.update(current_tags_to_count.keys()) if self._file_search_context.IncludePendingTags(): lists_of_pending_tags = [ list(tags_manager.GetPending( self._tag_service_key)) for tags_manager in tags_managers ] pending_tags_flat_iterable = itertools.chain.from_iterable( lists_of_pending_tags) pending_tags_flat = ClientSearch.FilterTagsBySearchText( self._tag_service_key, search_text, pending_tags_flat_iterable) pending_tags_to_count.update(pending_tags_flat) tags_to_do.update(pending_tags_to_count.keys()) predicates = [ ClientSearch.Predicate(HC.PREDICATE_TYPE_TAG, tag, inclusive, current_tags_to_count[tag], pending_tags_to_count[tag]) for tag in tags_to_do ] if self._tag_service_key != CC.COMBINED_TAG_SERVICE_KEY: predicates = siblings_manager.CollapsePredicates( self._tag_service_key, predicates) if namespace == '': predicates = ClientData.MergePredicates( predicates, add_namespaceless=True) self._next_updatelist_is_probably_fast = True matches = ClientSearch.FilterPredicatesBySearchText( self._tag_service_key, search_text, predicates) matches = ClientSearch.SortPredicates(matches) if self._include_unusual_predicate_types: if explicit_wildcard: matches.insert( 0, ClientSearch.Predicate(HC.PREDICATE_TYPE_WILDCARD, search_text, inclusive)) else: if namespace != '' and half_complete_subtag in ('', '*'): matches.insert( 0, ClientSearch.Predicate(HC.PREDICATE_TYPE_NAMESPACE, namespace, inclusive)) for match in matches: if match.GetInclusive() != inclusive: match.SetInclusive(inclusive) try: index = matches.index(entry_predicate) predicate = matches[index] del matches[index] matches.insert(0, predicate) except: pass return matches