def test_parse_closer_error(self, tokenizer, formula, offset, opener): tok = tokenizer.Tokenizer(formula) del tok.items[:] tok.offset = offset tok.token_stack.append(tokenizer.Token(*opener)) with pytest.raises(tokenizer.TokenizerError): tok._parse_closer()
def test_parse_closer(self, tokenizer, formula, offset, opener): tok = tokenizer.Tokenizer(formula) del tok.items[:] tok.offset = offset tok.token_stack.append(tokenizer.Token(*opener)) assert tok._parse_closer() == 1 assert len(tok.items) == 1 token = tok.items[0] assert token.value == formula[offset] assert token.type == opener[1] assert token.subtype == CLOSE
def test_parse_separator(self, tokenizer, formula, offset, opener, type_, subtype): tok = tokenizer.Tokenizer(formula) del tok.items[:] tok.offset = offset if opener: tok.token_stack.append(tokenizer.Token(*opener)) assert tok._parse_separator() == 1 assert len(tok.items) == 1 token = tok.items[0] assert token.value == formula[offset] assert token.type == type_ assert token.subtype == subtype
def test_get_closer(self, tokenizer, token, close_val): closer = tokenizer.Token(*token).get_closer() assert closer.value == close_val assert closer.type == token[1] assert closer.subtype == CLOSE
def test_init(self, tokenizer): tokenizer.Token('val', 'type', 'subtype')