Example #1
0
 def test_parse_closer_error(self, tokenizer, formula, offset, opener):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     tok.token_stack.append(tokenizer.Token(*opener))
     with pytest.raises(tokenizer.TokenizerError):
         tok._parse_closer()
Example #2
0
 def test_parse_closer(self, tokenizer, formula, offset, opener):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     tok.token_stack.append(tokenizer.Token(*opener))
     assert tok._parse_closer() == 1
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == formula[offset]
     assert token.type == opener[1]
     assert token.subtype == CLOSE
Example #3
0
 def test_parse_separator(self, tokenizer, formula, offset, opener, type_,
                          subtype):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     if opener:
         tok.token_stack.append(tokenizer.Token(*opener))
     assert tok._parse_separator() == 1
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == formula[offset]
     assert token.type == type_
     assert token.subtype == subtype
Example #4
0
 def test_get_closer(self, tokenizer, token, close_val):
     closer = tokenizer.Token(*token).get_closer()
     assert closer.value == close_val
     assert closer.type == token[1]
     assert closer.subtype == CLOSE
Example #5
0
 def test_init(self, tokenizer):
     tokenizer.Token('val', 'type', 'subtype')