Exemple #1
0
def parse(
    text: str,
    cells: Optional[Index] = None,
    surfaces: Optional[Index] = None,
    transformations: Optional[Index] = None,
    compositions: Optional[Index] = None,
) -> Body:
    cells, surfaces, transformations, compositions = map(
        lambda x: x[1]() if x[0] is None else x[0],
        zip(
            [cells, surfaces, transformations, compositions],
            [
                CellStrictIndex,
                SurfaceStrictIndex,
                TransformationStrictIndex,
                CompositionStrictIndex,
            ],
        ),
    )
    original = text
    text = pu.drop_c_comments(text)
    text, comments, trailing_comments = pu.extract_comments(text)
    lexer = Lexer()
    parser = Parser(
        cells,
        surfaces,
        transformations,
        compositions,
        comments,
        trailing_comments,
        original,
    )
    result = parser.parse(lexer.tokenize(text))
    return result
Exemple #2
0
def parse(text):
    text = drop_c_comments(text)
    text, comments, trailing_comments = extract_comments(text)
    lexer = Lexer()
    parser = Parser(comments, trailing_comments)
    result = parser.parse(lexer.tokenize(text))
    return result
Exemple #3
0
def test_when_no_c_comments_in_text():
    text = """m1
    1001.21c -1.0
    """
    actual = m.drop_c_comments(text)
    assert (actual is text
            ), "drop_c_comments should return the text object without changes"
Exemple #4
0
def parse(text: str) -> Transformation:
    text = drop_c_comments(text)
    text, comments, trailing_comments = extract_comments(text)
    lexer = Lexer()
    parser = Parser()
    result: Transformation = parser.parse(lexer.tokenize(text))
    if trailing_comments:
        result.options["comment"] = trailing_comments
    return result
Exemple #5
0
def parse(text: str, transformations: Optional[Index] = None) -> Surface:
    if transformations is None:
        transformations = TransformationStrictIndex()
    else:
        assert isinstance(transformations, Index)
    text = drop_c_comments(text)
    text, comments, trailing_comments = extract_comments(text)
    lexer = Lexer()
    parser = Parser(transformations)
    result = parser.parse(lexer.tokenize(text))
    if trailing_comments:
        result.options["comment"] = trailing_comments
    return result
Exemple #6
0
def test_drop_c_comments(text, expected):
    actual = m.drop_c_comments(text)
    assert actual == expected