Example #1
0
def test_line_type_tokenizer_shapes():
    # A,.25,-.1,[BOX,ltypeshp.shx,x=-.1,s=.1],-.1,1 replacing BOX by shape index 132
    ltype = 'A,.25,-.1,[132,ltypeshp.shx,x=-.1,s=.1],-.1,1'
    result = list(lin_tokenizer(ltype))
    assert result == [
        'A', '.25', '-.1', '[132', 'ltypeshp.shx', 'x=-.1', 's=.1]', '-.1', '1'
    ]
Example #2
0
def test_line_type_tokenizer_strings():
    ltype = 'A,.5,-.2,["GAS",STANDARD,S=.1,U=0.0,X=-0.1,Y=-.05],-.25'
    result = list(lin_tokenizer(ltype))
    assert result == [
        'A', '.5', '-.2', '["GAS"', 'STANDARD', 'S=.1', 'U=0.0', 'X=-0.1',
        'Y=-.05]', '-.25'
    ]
Example #3
0
def test_line_type_tokenizer_shapes():
    # A,.25,-.1,[BOX,ltypeshp.shx,x=-.1,s=.1],-.1,1 replacing BOX by shape index 132
    ltype = "A,.25,-.1,[132,ltypeshp.shx,x=-.1,s=.1],-.1,1"
    result = list(lin_tokenizer(ltype))
    assert result == [
        "A",
        ".25",
        "-.1",
        "[132",
        "ltypeshp.shx",
        "x=-.1",
        "s=.1]",
        "-.1",
        "1",
    ]
Example #4
0
def test_line_type_tokenizer_strings():
    ltype = 'A,.5,-.2,["GAS",STANDARD,S=.1,U=0.0,X=-0.1,Y=-.05],-.25'
    result = list(lin_tokenizer(ltype))
    assert result == [
        "A",
        ".5",
        "-.2",
        '["GAS"',
        "STANDARD",
        "S=.1",
        "U=0.0",
        "X=-0.1",
        "Y=-.05]",
        "-.25",
    ]
Example #5
0
def test_line_type_tokenizer_just_numbers():
    ltype = 'A,.25,-.125,.25,-.125,0,-.125'
    result = list(lin_tokenizer(ltype))
    assert result == ['A', '.25', '-.125', '.25', '-.125', '0', '-.125']
Example #6
0
def test_line_type_tokenizer_string_with_comma():
    ltype = 'A, "TEXT, TEXT", 0'
    result = list(lin_tokenizer(ltype))
    assert result == ['A', '"TEXT, TEXT"', '0']
Example #7
0
def test_line_type_tokenizer_just_numbers():
    ltype = "A,.25,-.125,.25,-.125,0,-.125"
    result = list(lin_tokenizer(ltype))
    assert result == ["A", ".25", "-.125", ".25", "-.125", "0", "-.125"]