def test_real_example(self):
     """Test real example."""
     crash_state = 'void WTF::Vector<blink::Member, 64ul'
     expected = set([
         'void',
         'void wtf',
         'void wtf::vector',
         'void wtf::vector<blink',
         'void wtf::vector<blink::member',
         'void wtf::vector<blink::member, 64ul',
         'wtf',
         'wtf::vector',
         'wtf::vector<blink',
         'wtf::vector<blink::member',
         'wtf::vector<blink::member, 64ul',
         'vector',
         'vector<blink',
         'vector<blink::member',
         'vector<blink::member, 64ul',
         'blink',
         'blink::member',
         'blink::member, 64ul',
         'member',
         'member, 64ul',
         '64ul',
     ])
     self.assertSetEqual(expected,
                         search_tokenizer._complex_tokenize(crash_state, 6))
 def test_multiple_tokens(self):
     """Test multiple tokens."""
     self.assertSetEqual(
         set([
             'abcd', 'abcd::edfg', 'abcd::edfghijk', 'edfg', 'edfghijk',
             'hijk'
         ]), search_tokenizer._complex_tokenize('abcd::edfgHijk', 3))
 def test_multple_tokens_with_empty_tokens(self):
     """Test multiple tokens with empty tokens."""
     self.assertSetEqual(
         set([
             '::abcd', '::abcd::edfg', '::abcd::edfghijk',
             '::abcd::edfghijk::', 'abcd', 'abcd::edfg', 'abcd::edfghijk',
             'abcd::edfghijk::', 'edfg', 'edfghijk', 'hijk', 'edfghijk::',
             'hijk::'
         ]), search_tokenizer._complex_tokenize('::abcd::edfgHijk::', 5))
 def test_exceed_limit(self):
     """Test exceeding limit."""
     crash_state = 'a:b:c'
     expected = set(['a', 'b', 'c', 'a:b', 'b:c'])
     self.assertSetEqual(expected,
                         search_tokenizer._complex_tokenize(crash_state, 2))
 def test_duplicate(self):
     """Test duplicate tokens."""
     crash_state = 'a:b:a:b'
     expected = set(['a', 'b', 'a:b', 'a:b:a', 'a:b:a:b', 'b:a', 'b:a:b'])
     self.assertSetEqual(expected,
                         search_tokenizer._complex_tokenize(crash_state, 4))
 def test_one_token(self):
     """Test one token."""
     self.assertSetEqual(set(['abcd']),
                         search_tokenizer._complex_tokenize('abcd', 3))
 def test_empty(self):
     """Test empty string."""
     self.assertSetEqual(set(), search_tokenizer._complex_tokenize('', 3))