コード例 #1
0
 def test_non_ascii(self):
     s = 'IsString ¿ÓÞ·ž¤ utf'
     self.assertSetEqual(
         set([
             'is', 'string', 'utf', 'isstring', 'isstring utf',
             'string utf',
             s.lower()
         ]), search_tokenizer.tokenize(s))
コード例 #2
0
ファイル: data_types.py プロジェクト: google/clusterfuzz
    def populate_indices(self):
        """Populate keywords for fast test case list searching."""
        self.keywords = list(
            search_tokenizer.tokenize(self.crash_state)
            | search_tokenizer.tokenize(self.crash_type)
            | search_tokenizer.tokenize(self.fuzzer_name)
            | search_tokenizer.tokenize(self.overridden_fuzzer_name)
            | search_tokenizer.tokenize(self.job_type)
            | search_tokenizer.tokenize(self.platform_id))

        self.bug_indices = search_tokenizer.tokenize_bug_information(self)
        self.has_bug_flag = bool(self.bug_indices)
        self.is_a_duplicate_flag = bool(self.duplicate_of)
        fuzzer_name_indices = list(
            set([self.fuzzer_name, self.overridden_fuzzer_name]))
        self.fuzzer_name_indices = [f for f in fuzzer_name_indices if f]

        # If the impact task hasn't been run (aka is_impact_set_flag=False) OR
        # if impact isn't applicable (aka has_impacts() is False), we wipe all
        # the impact fields' indices.
        if self.has_impacts() and self.is_impact_set_flag:
            self.impact_extended_stable_version_indices = (
                search_tokenizer.tokenize_impact_version(
                    self.impact_extended_stable_version))
            self.impact_stable_version_indices = (
                search_tokenizer.tokenize_impact_version(
                    self.impact_stable_version))
            self.impact_beta_version_indices = (
                search_tokenizer.tokenize_impact_version(
                    self.impact_beta_version))
            self.impact_head_version_indices = (
                search_tokenizer.tokenize_impact_version(
                    self.impact_head_version))
            self.impact_version_indices = list(
                set(self.impact_extended_stable_version_indices +
                    self.impact_stable_version_indices +
                    self.impact_head_version_indices +
                    self.impact_beta_version_indices))
            if self.impact_extended_stable_version:
                self.impact_version_indices.append('extended_stable')
            if self.impact_beta_version:
                self.impact_version_indices.append('beta')
            if self.impact_stable_version:
                self.impact_version_indices.append('stable')
            if not self.impacts_production():
                self.impact_version_indices.append('head')
        else:
            self.impact_version_indices = []
            self.impact_extended_stable_version_indices = []
            self.impact_stable_version_indices = []
            self.impact_beta_version_indices = []
コード例 #3
0
 def test_real_example(self):
     """Test real example."""
     crash_states = '\n'.join([
         'track 1 fast;',
         'android.media.MediaCodec.native_setup',
     ])
     expected = set([
         'track',
         '1',
         'fast',
         'android',
         'media',
         'codec',
         'native',
         'setup',
         'track 1',
         'track 1 fast',
         'track 1 fast;',
         '1 fast',
         '1 fast;',
         'fast;',
         'android.media',
         'android.media.media',
         'android.media.mediacodec',
         'android.media.mediacodec.native',
         'android.media.mediacodec.native_setup',
         'media.media',
         'media.mediacodec',
         'media.mediacodec.native',
         'media.mediacodec.native_setup',
         'mediacodec',
         'mediacodec.native',
         'mediacodec.native_setup',
         'codec.native',
         'codec.native_setup',
         'native_setup',
     ])
     self.assertSetEqual(expected, search_tokenizer.tokenize(crash_states))
コード例 #4
0
ファイル: data_types.py プロジェクト: google/clusterfuzz
 def populate_indices(self):
     """Populate keywords for fast job searching."""
     self.keywords = list(
         search_tokenizer.tokenize(self.name)
         | search_tokenizer.tokenize(self.project))
コード例 #5
0
ファイル: data_types.py プロジェクト: google/clusterfuzz
 def populate_indices(self):
     """Populate keywords for fast job searching."""
     self.keywords = list(
         search_tokenizer.tokenize(self.bot_name)
         | search_tokenizer.tokenize(self.task_payload))
コード例 #6
0
 def test_non_string(self):
     """Test non string."""
     self.assertSetEqual(set(['123']), search_tokenizer.tokenize(123))
     self.assertSetEqual(set(['true']), search_tokenizer.tokenize(True))
     self.assertSetEqual(set([]), search_tokenizer.tokenize(None))
コード例 #7
0
 def test_empty(self):
     """Test empty string."""
     self.assertSetEqual(set(), search_tokenizer.tokenize(''))