def test_parse_simple_tagkey(inp_str, expected): stream = io.StringIO(inp_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) tagkey_parser = roffparse.RoffTagKeyParser(tokens, stream, parser) assert next(tagkey_parser.parse_tagkey()) == expected
def test_parse_binary_file(binary_str): stream = io.BytesIO(binary_str) tokens = iter(RoffTokenizer(stream, endianess="little")) parser = roffparse.RoffParser(tokens, stream) try: {t: {tk: v for tk, v in tags} for t, tags in iter(parser)} except roffparse.RoffTypeError: pass
def test_parse_ascii_file(ascii_str): stream = io.StringIO(ascii_str) tokens = iter(RoffTokenizer(stream)) parser = roffparse.RoffParser(tokens, stream) try: {t: {tk: v for tk, v in tags} for t, tags in iter(parser)} except roffparse.RoffTypeError: pass
def test_parse_byte_array_values(): stream = io.StringIO("array byte x 2 255 0") tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_array_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) assert next(iter(parser)) == ("x", b"\xff\x00")
def test_parse_byten_values(): stream = io.StringIO("byte x 1") tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) assert next(iter(parser)) == ("x", b"\x01")
def test_parse_tagkey_binary_array_types(input_str, expected): stream = io.BytesIO(input_str) tokens = BinaryRoffBodyTokenizer(stream).tokenize_array_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = True parser = roffparse.RoffTagKeyParser(tokens, stream, parser) val = next(iter(parser)) assert val[0] == expected[0] assert np.array_equal(val[1], expected[1])
def test_parse_tagkey_ascii_types(input_str, expected, expected_type): stream = io.StringIO(input_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_tagkey() parser = roffparse.RoffTagKeyParser(tokens, stream, roffparse.RoffParser(stream, tokens)) val = next(iter(parser)) assert val == expected assert isinstance(val[1], expected_type)
def test_parse_boolean_values_typing(): stream = io.StringIO("bool x 2") tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = False parser = roffparse.RoffTagKeyParser(tokens, stream, parser) with pytest.raises(roffparse.RoffTypeError, match="must be either 1 or 0"): next(iter(parser))
def test_parse_tagkey_binary_types(input_str, expected, expected_type): stream = io.BytesIO(input_str) tokenizer = BinaryRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_simple_tagkey() parser = roffparse.RoffParser(tokens, stream) parser.is_binary_file = True parser = roffparse.RoffTagKeyParser(tokens, stream, parser) val = next(iter(parser)) assert val == expected assert isinstance(val[1], expected_type)
def test_parse_array_tagkey(inp_str, expected): stream = io.StringIO(inp_str) tokenizer = TextRoffBodyTokenizer(stream) tokens = tokenizer.tokenize_array_tagkey() parser = roffparse.RoffParser(tokens, stream) tagkey_parser = roffparse.RoffTagKeyParser(tokens, stream, parser) varname, array = next(tagkey_parser.parse_tagkey()) assert expected[0] == varname assert np.array_equal(expected[1], array)
def lazy_read(filelike): file_stream = filelike did_open = False if isinstance(filelike, (str, pathlib.Path)): did_open = True file_stream = make_filestream(filelike) tokenizer = rofftok.RoffTokenizer(file_stream) parser = roffparse.RoffParser(iter(tokenizer), file_stream) yield iter(EndianessHandler(parser, tokenizer)) if did_open: file_stream.close()
def test_endianess_swap(): stream = io.BytesIO( b"roff-bin\0tag\0t\0int\0x\0\x01\0\0\0int\0y\0\0\0\0\xFFendtag\0") tokenizer = RoffTokenizer(stream) parser = roffparse.RoffParser(iter(tokenizer), stream) tag = next(iter(parser)) assert tag[0] == "t" assert next(tag[1]) == ("x", 1) assert tokenizer.endianess == "little" assert parser.endianess == "little" tokenizer.swap_endianess() parser.swap_endianess() assert tokenizer.endianess == "big" assert parser.endianess == "big" assert tag[0] == "t" assert next(tag[1]) == ("y", 255)