Exemple #1
0
 def test_tokenize(self):
     expected = [
         Token("Name", "foo"),
         Token("Operator", "&&"),
         Token("Name", "bar")
     ]
     self.assertEqual(self.tokenizer.tokenize_string("foo && bar"),
                      expected)
Exemple #2
0
 def transform_raw_token(self, raw_token: RawToken) -> Token:
     token_type, token_value = raw_token
     token_type = str(token_type).replace("Token.", "")
     if not self.include_values:
         token_value = None
     return Token(token_type, token_value)
 def test_as_dict(self):
     self.assertEqual(Token("foo").as_dict(), {"type": "foo"})
     self.assertEqual(Token("foo", "bar").as_dict(), {"type": "foo", "value": "bar"})
 def test_repr(self):
     self.assertEqual(str(Token("foo")), "Token(type='foo')")
     self.assertEqual(str(Token("foo", "bar")), "Token(type='foo', value='bar')")
 def test_hash(self):
     self.assertEqual(hash(Token("foo")), hash(Token("foo")))
     self.assertEqual(hash(Token("foo", "bar")), hash(Token("foo", "bar")))
     self.assertNotEqual(hash(Token("foo", "bar")), hash(Token("foo")))
 def test_eq(self):
     self.assertEqual(Token("foo"), Token("foo"))
     self.assertEqual(Token("foo", "bar"), Token("foo", "bar"))
     self.assertNotEqual(Token("foo", "bar"), Token("foo"))
Exemple #7
0
 def test_transform_and_token(self):
     self.assertEqual(self._get_token("&& bar"), Token("Operator", "&&"))
Exemple #8
0
 def test_transform_normal_token(self):
     self.assertEqual(self._get_token("this.foo"), Token("Keyword", "this"))