Exemple #1
0
def test_comment():
    lexer = l.Lexer()
    tokens = lexer.tokenize(
        '<!-- In the interest of restricting article length, please limit this section to '
        'two or three short paragraphs and add any substantial information to the main Issues '
        'in anarchism article. Thank you. -->')

    assert tokens[0].token == l.Comment().start
Exemple #2
0
def test_tokenize():
    with open(TEST_DATA / 'wikitext_tokenize') as f:
        text = f.read()
        lexer = l.Lexer()
        tokens = lexer.tokenize(text)
        print(tokens)
        assert tokens[0].token == l.Template().start and tokens[
            len(tokens) - 2].token == l.LineBreak().start
Exemple #3
0
def test_parse_list():
    text = """
* asd
** asd
"""
    lexer = l.Lexer()
    logger.info(lexer.tokenize(text))
    parser = p.Parser()
    ast = parser.parse(text)
    logger.info(ast)
    assert isinstance(ast.children[0].value, p.LineBreakP) \
           and isinstance(ast.children[1], p.ListNode) \
           and isinstance(ast.children[1].children[0], p.Node)
Exemple #4
0
    def __init__(self, source):
        self.lexer = lexer.Lexer(source)
        self.tokens = []
        self.nodes = [] # stack holding the AST nodes

        # debug flag
        self._debug = False

        # output
        self.output = {}

        # vars used by semantic
        self.param = { 'T' : 0.0 }

        # coord
        self.origin_x = 0.0
        self.origin_y = 0.0
        self.scale_x = 1.0
        self.scale_y = 1.0
        self.rot_angle = 0.0
Exemple #5
0
    parse.add_argument('--html_cfg', dest='config_file_location',\
                        help='Location of the json config file', required=False)
    parse.add_argument('--out_html', dest='out_html_location',\
                        help='Location of the output html file', required=False)
    parse.add_argument('--out_dot', dest='out_dot_location',\
                        help='Location of the output dot file', required=True)
    parse.add_argument('--input', dest='in_file_location',\
                       help='Location of the input go file', required=True)
    return parse


parser = setup_parser()
res = parser.parse_args()

html_config = str(res.config_file_location)
output_html = str(res.out_html_location)
output_dot = str(res.out_dot_location)
input_file = str(res.in_file_location)

if html_config == 'None':
    html_config = 'config/html_config/color3.json'

if output_html == 'None':
    output_html = 'bin/default_html'

if output_dot == 'None':
    output_dot = 'bin/default_dot'

# call lexer
lexer.Lexer(html_config, input_file, output_html)
Exemple #6
0
def test_newline():
    text = """Anarchism is political movement.\n"""
    lexer = l.Lexer()
    tokens = lexer.tokenize(text)
    assert tokens[len(tokens) - 2].token == l.LineBreak().start
Exemple #7
0
def test_redirect():
    lexer = l.Lexer()
    text = """#REDIRECT [[Ancient Greece]]{{Rcat shell|{{R move}}{{R related}}{{R unprintworthy}}}}"""
    with pytest.raises(l.RedirectFound):
        tokens = lexer.tokenize(text)
Exemple #8
0
def test_tokenize_errors():
    text = """{{Infobox"""
    lexer = l.Lexer()

    with pytest.raises(utils.MalformedTag):
        lexer.tokenize(text)