Esempio n. 1
0
def parse(text):
    text = drop_c_comments(text)
    text, comments, trailing_comments = extract_comments(text)
    lexer = Lexer()
    parser = Parser(comments, trailing_comments)
    result = parser.parse(lexer.tokenize(text))
    return result
Esempio n. 2
0
def parse(
    text: str,
    cells: Optional[Index] = None,
    surfaces: Optional[Index] = None,
    transformations: Optional[Index] = None,
    compositions: Optional[Index] = None,
) -> Body:
    cells, surfaces, transformations, compositions = map(
        lambda x: x[1]() if x[0] is None else x[0],
        zip(
            [cells, surfaces, transformations, compositions],
            [
                CellStrictIndex,
                SurfaceStrictIndex,
                TransformationStrictIndex,
                CompositionStrictIndex,
            ],
        ),
    )
    original = text
    text = pu.drop_c_comments(text)
    text, comments, trailing_comments = pu.extract_comments(text)
    lexer = Lexer()
    parser = Parser(
        cells,
        surfaces,
        transformations,
        compositions,
        comments,
        trailing_comments,
        original,
    )
    result = parser.parse(lexer.tokenize(text))
    return result
Esempio n. 3
0
def test_extract_comments(text, expected_new_text, expected_comments,
                          expected_trailing_comment):
    actual_new_text, actual_comments, actual_trailing_comment = m.extract_comments(
        text)
    assert actual_new_text == expected_new_text
    assert actual_comments == expected_comments
    assert actual_trailing_comment == expected_trailing_comment
Esempio n. 4
0
def parse(text: str) -> Transformation:
    text = drop_c_comments(text)
    text, comments, trailing_comments = extract_comments(text)
    lexer = Lexer()
    parser = Parser()
    result: Transformation = parser.parse(lexer.tokenize(text))
    if trailing_comments:
        result.options["comment"] = trailing_comments
    return result
Esempio n. 5
0
def parse(text: str, transformations: Optional[Index] = None) -> Surface:
    if transformations is None:
        transformations = TransformationStrictIndex()
    else:
        assert isinstance(transformations, Index)
    text = drop_c_comments(text)
    text, comments, trailing_comments = extract_comments(text)
    lexer = Lexer()
    parser = Parser(transformations)
    result = parser.parse(lexer.tokenize(text))
    if trailing_comments:
        result.options["comment"] = trailing_comments
    return result