def populate_indices(self): """Populate keywords for fast test case list searching.""" self.keywords = list( search_tokenizer.tokenize(self.crash_state) | search_tokenizer.tokenize(self.crash_type) | search_tokenizer.tokenize(self.fuzzer_name) | search_tokenizer.tokenize(self.overridden_fuzzer_name) | search_tokenizer.tokenize(self.job_type) | search_tokenizer.tokenize(self.platform_id)) self.bug_indices = search_tokenizer.tokenize_bug_information(self) self.has_bug_flag = bool(self.bug_indices) self.is_a_duplicate_flag = bool(self.duplicate_of) fuzzer_name_indices = list( set([self.fuzzer_name, self.overridden_fuzzer_name])) self.fuzzer_name_indices = [f for f in fuzzer_name_indices if f] # If the impact task hasn't been run (aka is_impact_set_flag=False) OR # if impact isn't applicable (aka has_impacts() is False), we wipe all # the impact fields' indices. if self.has_impacts() and self.is_impact_set_flag: self.impact_stable_version_indices = ( search_tokenizer.tokenize_impact_version( self.impact_stable_version)) self.impact_beta_version_indices = ( search_tokenizer.tokenize_impact_version( self.impact_beta_version)) self.impact_version_indices = list( set(self.impact_stable_version_indices + self.impact_beta_version_indices)) if self.impact_beta_version: self.impact_version_indices.append('beta') if self.impact_stable_version: self.impact_version_indices.append('stable') if not self.impacts_production(): self.impact_version_indices.append('head') else: self.impact_version_indices = [] self.impact_stable_version_indices = [] self.impact_beta_version_indices = []
def populate_indices(self): """Populate keywords for fast job searching.""" self.keywords = list( search_tokenizer.tokenize(self.bot_name) | search_tokenizer.tokenize(self.task_payload))
def populate_indices(self): """Populate keywords for fast job searching.""" self.keywords = list( search_tokenizer.tokenize(self.name) | search_tokenizer.tokenize(self.project))
def test_non_ascii(self): s = 'IsString ¿ÓÞΤ utf' self.assertSetEqual( {'is', 'string', 'utf', 'isstring', 'isstring utf', 'string utf', s.lower()}, search_tokenizer.tokenize(s))
def test_non_string(self): """Test non string.""" self.assertSetEqual({'123'}, search_tokenizer.tokenize(123)) self.assertSetEqual({'true'}, search_tokenizer.tokenize(True)) self.assertSetEqual(set(), search_tokenizer.tokenize(None))
def test_empty(self): """Test empty string.""" self.assertSetEqual(set(), search_tokenizer.tokenize(''))