Пример #1
0
 def test_parse_closer_error(self, tokenizer, formula, offset, opener):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     tok.token_stack.append(tokenizer.Token(*opener))
     with pytest.raises(tokenizer.TokenizerError):
         tok._parse_closer()
Пример #2
0
 def test_parse_brackets(self, tokenizer, formula, offset, result):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     assert tok._parse_brackets() == len(result)
     assert not tok.items
     assert tok.token[0] == result
     assert len(tok.token) == 1
Пример #3
0
 def test_assert_empty_token(self, tokenizer):
     tok = tokenizer.Tokenizer("")
     try:
         tok.assert_empty_token()
     except tokenizer.TokenizerError:
         pytest.fail("assert_empty_token raised TokenizerError incorrectly")
     tok.token.append("test")
     with pytest.raises(tokenizer.TokenizerError):
         tok.assert_empty_token()
Пример #4
0
 def test_save_token(self, tokenizer):
     tok = tokenizer.Tokenizer("")
     tok.save_token()
     assert not tok.items
     tok.token.append("test")
     tok.save_token()
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == "test"
     assert token.type == OPERAND
Пример #5
0
 def test_parse_closer(self, tokenizer, formula, offset, opener):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     tok.token_stack.append(tokenizer.Token(*opener))
     assert tok._parse_closer() == 1
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == formula[offset]
     assert token.type == opener[1]
     assert token.subtype == CLOSE
Пример #6
0
 def test_parse_operator(self, tokenizer, formula, result, type_):
     tok = tokenizer.Tokenizer(formula)
     tok.offset = 0
     del tok.items[:]
     assert tok._parse_operator() == len(result)
     assert len(tok.items) == 1
     assert not tok.token
     token = tok.items[0]
     assert token.value == result
     assert token.type == type_
     assert token.subtype == ''
Пример #7
0
 def test_parse_whitespace(self, tokenizer, formula):
     tok = tokenizer.Tokenizer(formula)
     tok.offset = 0
     del tok.items[:]
     assert tok._parse_whitespace() == len(formula)
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == " "
     assert token.type == WSPACE
     assert token.subtype == ""
     assert not tok.token
Пример #8
0
 def test_parse_error(self, tokenizer, error):
     tok = tokenizer.Tokenizer(error)
     tok.offset = 0
     del tok.items[:]
     assert tok._parse_error() == len(error)
     assert len(tok.items) == 1
     assert not tok.token
     token = tok.items[0]
     assert token.value == error
     assert token.type == OPERAND
     assert token.subtype == ERROR
Пример #9
0
 def test_parse_separator(self, tokenizer, formula, offset, opener, type_,
                          subtype):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     if opener:
         tok.token_stack.append(tokenizer.Token(*opener))
     assert tok._parse_separator() == 1
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == formula[offset]
     assert token.type == type_
     assert token.subtype == subtype
Пример #10
0
 def test_check_scientific_notation(self, tokenizer, formula, offset, token,
                                    ret):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     tok.token[:] = token
     assert ret is tok.check_scientific_notation()
     if ret:
         assert offset + 1 == tok.offset
         assert token == tok.token[:-1]
         assert tok.token[-1] == formula[offset]
     else:
         assert offset == tok.offset
         assert token == tok.token
Пример #11
0
 def test_parse_opener(self, tokenizer, prefix, char, type_):
     tok = tokenizer.Tokenizer(prefix + char)
     del tok.items[:]
     tok.offset = len(prefix)
     if prefix:
         tok.token.append(prefix)
     assert tok._parse_opener() == 1
     assert not tok.token
     assert len(tok.items) == 1
     token = tok.items[0]
     assert token.value == prefix + char
     assert token.type == type_
     assert token.subtype == OPEN
     assert len(tok.token_stack) == 1
     assert tok.token_stack[0] is token
Пример #12
0
 def test_parse_string(self, tokenizer, formula, offset, result):
     tok = tokenizer.Tokenizer(formula)
     del tok.items[:]
     tok.offset = offset
     if result is None:
         with pytest.raises(tokenizer.TokenizerError):
             tok._parse_string()
         return
     assert tok._parse_string() == len(result)
     if formula[offset] == '"':
         token = tok.items[0]
         assert token.value == result
         assert token.type == OPERAND
         assert token.subtype == TEXT
         assert not tok.token
     else:
         assert not tok.items
         assert tok.token[0] == result
         assert len(tok.token) == 1
Пример #13
0
 def test_parse_opener_error(self, tokenizer):
     tok = tokenizer.Tokenizer('name{')
     tok.offset = 4
     tok.token[:] = ('name', )
     with pytest.raises(tokenizer.TokenizerError):
         tok._parse_opener()
Пример #14
0
 def test_parse(self, tokenizer, formula, tokens):
     tok = tokenizer.Tokenizer(formula)
     result = [(token.value, token.type, token.subtype)
               for token in tok.items]
     assert result == tokens
Пример #15
0
 def test_parse_error_error(self, tokenizer):
     tok = tokenizer.Tokenizer("#NotAnError")
     tok.offset = 0
     del tok.items[:]
     with pytest.raises(tokenizer.TokenizerError):
         tok._parse_error()
Пример #16
0
 def test_parse_brackets_error(self, tokenizer):
     tok = tokenizer.Tokenizer('[unfinished business')
     with pytest.raises(tokenizer.TokenizerError):
         tok._parse_brackets()
Пример #17
0
 def test_render(self, tokenizer, formula):
     tok = tokenizer.Tokenizer(formula)
     assert tok.render() == formula
Пример #18
0
 def test_init(self, tokenizer):
     tok = tokenizer.Tokenizer("abcdefg")
     assert tok.formula == "abcdefg"
     tok = tokenizer.Tokenizer("=abcdefg")
     assert tok.formula == "=abcdefg"