def test_line_type_tokenizer_shapes(): # A,.25,-.1,[BOX,ltypeshp.shx,x=-.1,s=.1],-.1,1 replacing BOX by shape index 132 ltype = 'A,.25,-.1,[132,ltypeshp.shx,x=-.1,s=.1],-.1,1' result = list(lin_tokenizer(ltype)) assert result == [ 'A', '.25', '-.1', '[132', 'ltypeshp.shx', 'x=-.1', 's=.1]', '-.1', '1' ]
def test_line_type_tokenizer_strings(): ltype = 'A,.5,-.2,["GAS",STANDARD,S=.1,U=0.0,X=-0.1,Y=-.05],-.25' result = list(lin_tokenizer(ltype)) assert result == [ 'A', '.5', '-.2', '["GAS"', 'STANDARD', 'S=.1', 'U=0.0', 'X=-0.1', 'Y=-.05]', '-.25' ]
def test_line_type_tokenizer_shapes(): # A,.25,-.1,[BOX,ltypeshp.shx,x=-.1,s=.1],-.1,1 replacing BOX by shape index 132 ltype = "A,.25,-.1,[132,ltypeshp.shx,x=-.1,s=.1],-.1,1" result = list(lin_tokenizer(ltype)) assert result == [ "A", ".25", "-.1", "[132", "ltypeshp.shx", "x=-.1", "s=.1]", "-.1", "1", ]
def test_line_type_tokenizer_strings(): ltype = 'A,.5,-.2,["GAS",STANDARD,S=.1,U=0.0,X=-0.1,Y=-.05],-.25' result = list(lin_tokenizer(ltype)) assert result == [ "A", ".5", "-.2", '["GAS"', "STANDARD", "S=.1", "U=0.0", "X=-0.1", "Y=-.05]", "-.25", ]
def test_line_type_tokenizer_just_numbers(): ltype = 'A,.25,-.125,.25,-.125,0,-.125' result = list(lin_tokenizer(ltype)) assert result == ['A', '.25', '-.125', '.25', '-.125', '0', '-.125']
def test_line_type_tokenizer_string_with_comma(): ltype = 'A, "TEXT, TEXT", 0' result = list(lin_tokenizer(ltype)) assert result == ['A', '"TEXT, TEXT"', '0']
def test_line_type_tokenizer_just_numbers(): ltype = "A,.25,-.125,.25,-.125,0,-.125" result = list(lin_tokenizer(ltype)) assert result == ["A", ".25", "-.125", ".25", "-.125", "0", "-.125"]