def test_parse_simple_tagkey(inp_str, expected): stream = io.StringIO(inp_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) tagkey_parser = roffparse.RoffTagKeyParser(tokens, stream, parser) assert next(tagkey_parser.parse_tagkey()) == expected
def test_drop_comment_eof(): comment_buffer = io.StringIO("#a comment") tokenizer = TextRoffBodyTokenizer(comment_buffer) with pytest.raises(TokenizationError, match="Reached end of stream"): next(tokenizer.tokenize_comment())
def test_parse_byte_array_values(): stream = io.StringIO("array byte x 2 255 0") tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_array_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) assert next(iter(parser)) == ("x", b"\xff\x00")
def test_parse_byten_values(): stream = io.StringIO("byte x 1") tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) assert next(iter(parser)) == ("x", b"\x01")
def test_parse_tagkey_ascii_types(input_str, expected, expected_type): stream = io.StringIO(input_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_tagkey() parser = roffparse.RoffTagKeyParser(tokens, stream, roffparse.RoffParser(stream, tokens)) val = next(iter(parser)) assert val == expected assert isinstance(val[1], expected_type)
def test_tokenize_comment(): comment_buffer = io.StringIO("#a comment#1") tokenizer = TextRoffBodyTokenizer(comment_buffer) with pytest.raises(StopIteration): next(tokenizer.tokenize_comment()) assert comment_buffer.read(1) == "1"
def test_parse_boolean_values_typing(): stream = io.StringIO("bool x 2") tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) with pytest.raises(roffparse.RoffTypeError, match="must be either 1 or 0"): next(iter(parser))
def test_parse_tagkey_ascii_array_types(input_str, expected): stream = io.StringIO(input_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_array_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) val = next(iter(parser)) assert val[0] == expected[0] assert np.array_equal(val[1], expected[1])
def test_parse_array_tagkey(inp_str, expected): stream = io.StringIO(inp_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_array_tagkey() parser = roffparse.RoffParser(tokens, stream) tagkey_parser = roffparse.RoffTagKeyParser(tokens, stream, parser) varname, array = next(tagkey_parser.parse_tagkey()) assert expected[0] == varname assert np.array_equal(expected[1], array)
def test_tokenize_space(character, whitespace): starts_with_space = io.StringIO(whitespace + character) tokenizer = TextRoffBodyTokenizer(starts_with_space) test_tokenizer = tokenizer.tokenize_delimiter() with pytest.raises(StopIteration): next(test_tokenizer) assert starts_with_space.read(1) == character
def initialize_body_tokenizer(self, header_kind): if header_kind == TokenKind.ROFF_BIN: self.body_tokenizer = BinaryRoffBodyTokenizer( self.stream, endianess=self.endianess) elif header_kind == TokenKind.ROFF_ASC: self.body_tokenizer = TextRoffBodyTokenizer(self.stream) else: raise ValueError(f"Unexpected header kind {header_kind}")
def test_parse_simple_type(typestr, expected): stream = io.StringIO(typestr) tokenizer = TextRoffBodyTokenizer(stream) assert (next( roffparse.parse_simple_type( tokenizer.tokenize_keyword[expected]())) == expected)
def make_body_tokenizer(contents): contents = pad_with_space(pad_with_ascii_comment(contents)) stream = io.StringIO(contents) return TextRoffBodyTokenizer(stream)