Exemple #1
0
 def setUp(self):
     self.lexer = BSTLexer()
Exemple #2
0
def test_basic_bst():
    lexer = BSTLexer()
    data = """
        % BibTeX standard bibliography style `plain'

        INTEGERS { output.state before.all }

        FUNCTION {sort.format.title}
        { 't :=
        "A " #2
            "An " #3
            "The " #4 t chop.word
            chop.word
        chop.word
        sortify
        #1 global.max$ substring$
        }

        ITERATE {call.type$}
    """
    tokens = [
        (Token.Comment.SingleLine,
         "% BibTeX standard bibliography style `plain'"),
        (Token.Text, u'\n\n'),
        (Token.Keyword, u'INTEGERS'),
        (Token.Text, u' '),
        (Token.Punctuation, u'{'),
        (Token.Text, u' '),
        (Token.Name.Variable, u'output.state'),
        (Token.Text, u' '),
        (Token.Name.Variable, u'before.all'),
        (Token.Text, u' '),
        (Token.Punctuation, u'}'),
        (Token.Text, u'\n\n'),
        (Token.Keyword, u'FUNCTION'),
        (Token.Text, u' '),
        (Token.Punctuation, u'{'),
        (Token.Name.Variable, u'sort.format.title'),
        (Token.Punctuation, u'}'),
        (Token.Text, u'\n'),
        (Token.Punctuation, u'{'),
        (Token.Text, u' '),
        (Token.Name.Function, u"'t"),
        (Token.Text, u' '),
        (Token.Name.Variable, u':='),
        (Token.Text, u'\n'),
        (Token.Literal.String, u'"A "'),
        (Token.Text, u' '),
        (Token.Literal.Number, u'#2'),
        (Token.Text, u'\n    '),
        (Token.Literal.String, u'"An "'),
        (Token.Text, u' '),
        (Token.Literal.Number, u'#3'),
        (Token.Text, u'\n    '),
        (Token.Literal.String, u'"The "'),
        (Token.Text, u' '),
        (Token.Literal.Number, u'#4'),
        (Token.Text, u' '),
        (Token.Name.Variable, u't'),
        (Token.Text, u' '),
        (Token.Name.Variable, u'chop.word'),
        (Token.Text, u'\n    '),
        (Token.Name.Variable, u'chop.word'),
        (Token.Text, u'\n'),
        (Token.Name.Variable, u'chop.word'),
        (Token.Text, u'\n'),
        (Token.Name.Variable, u'sortify'),
        (Token.Text, u'\n'),
        (Token.Literal.Number, u'#1'),
        (Token.Text, u' '),
        (Token.Name.Builtin, u'global.max$'),
        (Token.Text, u' '),
        (Token.Name.Builtin, u'substring$'),
        (Token.Text, u'\n'),
        (Token.Punctuation, u'}'),
        (Token.Text, u'\n\n'),
        (Token.Keyword, u'ITERATE'),
        (Token.Text, u' '),
        (Token.Punctuation, u'{'),
        (Token.Name.Builtin, u'call.type$'),
        (Token.Punctuation, u'}'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(textwrap.dedent(data))) == tokens