Exemple #1
0
 def setUp(self):
     self.lexer = BSTLexer()
Exemple #2
0
class BSTTest(unittest.TestCase):
    def setUp(self):
        self.lexer = BSTLexer()

    def testBasicBST(self):
        data = """
            % BibTeX standard bibliography style `plain'

            INTEGERS { output.state before.all }

            FUNCTION {sort.format.title}
            { 't :=
            "A " #2
                "An " #3
                "The " #4 t chop.word
                chop.word
            chop.word
            sortify
            #1 global.max$ substring$
            }

            ITERATE {call.type$}
        """
        tokens = [
            (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
            (Token.Text, u'\n\n'),
            (Token.Keyword, u'INTEGERS'),
            (Token.Text, u' '),
            (Token.Punctuation, u'{'),
            (Token.Text, u' '),
            (Token.Name.Variable, u'output.state'),
            (Token.Text, u' '),
            (Token.Name.Variable, u'before.all'),
            (Token.Text, u' '),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n\n'),
            (Token.Keyword, u'FUNCTION'),
            (Token.Text, u' '),
            (Token.Punctuation, u'{'),
            (Token.Name.Variable, u'sort.format.title'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u' '),
            (Token.Name.Function, u"'t"),
            (Token.Text, u' '),
            (Token.Name.Variable, u':='),
            (Token.Text, u'\n'),
            (Token.Literal.String, u'"A "'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'#2'),
            (Token.Text, u'\n    '),
            (Token.Literal.String, u'"An "'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'#3'),
            (Token.Text, u'\n    '),
            (Token.Literal.String, u'"The "'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'#4'),
            (Token.Text, u' '),
            (Token.Name.Variable, u't'),
            (Token.Text, u' '),
            (Token.Name.Variable, u'chop.word'),
            (Token.Text, u'\n    '),
            (Token.Name.Variable, u'chop.word'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'chop.word'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'sortify'),
            (Token.Text, u'\n'),
            (Token.Literal.Number, u'#1'),
            (Token.Text, u' '),
            (Token.Name.Builtin, u'global.max$'),
            (Token.Text, u' '),
            (Token.Name.Builtin, u'substring$'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n\n'),
            (Token.Keyword, u'ITERATE'),
            (Token.Text, u' '),
            (Token.Punctuation, u'{'),
            (Token.Name.Builtin, u'call.type$'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
Exemple #3
0
class BSTTest(unittest.TestCase):
    def setUp(self):
        self.lexer = BSTLexer()

    def testBasicBST(self):
        data = """
            % BibTeX standard bibliography style `plain'

            INTEGERS { output.state before.all }

            FUNCTION {sort.format.title}
            { 't :=
            "A " #2
                "An " #3
                "The " #4 t chop.word
                chop.word
            chop.word
            sortify
            #1 global.max$ substring$
            }

            ITERATE {call.type$}
        """
        tokens = [
            (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
            (Token.Text, u'\n\n'),
            (Token.Keyword, u'INTEGERS'),
            (Token.Text, u' '),
            (Token.Punctuation, u'{'),
            (Token.Text, u' '),
            (Token.Name.Variable, u'output.state'),
            (Token.Text, u' '),
            (Token.Name.Variable, u'before.all'),
            (Token.Text, u' '),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n\n'),
            (Token.Keyword, u'FUNCTION'),
            (Token.Text, u' '),
            (Token.Punctuation, u'{'),
            (Token.Name.Variable, u'sort.format.title'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u' '),
            (Token.Name.Function, u"'t"),
            (Token.Text, u' '),
            (Token.Name.Variable, u':='),
            (Token.Text, u'\n'),
            (Token.Literal.String, u'"A "'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'#2'),
            (Token.Text, u'\n    '),
            (Token.Literal.String, u'"An "'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'#3'),
            (Token.Text, u'\n    '),
            (Token.Literal.String, u'"The "'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'#4'),
            (Token.Text, u' '),
            (Token.Name.Variable, u't'),
            (Token.Text, u' '),
            (Token.Name.Variable, u'chop.word'),
            (Token.Text, u'\n    '),
            (Token.Name.Variable, u'chop.word'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'chop.word'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'sortify'),
            (Token.Text, u'\n'),
            (Token.Literal.Number, u'#1'),
            (Token.Text, u' '),
            (Token.Name.Builtin, u'global.max$'),
            (Token.Text, u' '),
            (Token.Name.Builtin, u'substring$'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n\n'),
            (Token.Keyword, u'ITERATE'),
            (Token.Text, u' '),
            (Token.Punctuation, u'{'),
            (Token.Name.Builtin, u'call.type$'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
Exemple #4
0
 def setUp(self):
     self.lexer = BSTLexer()
def test_basic_bst():
    lexer = BSTLexer()
    data = """
        % BibTeX standard bibliography style `plain'

        INTEGERS { output.state before.all }

        FUNCTION {sort.format.title}
        { 't :=
        "A " #2
            "An " #3
            "The " #4 t chop.word
            chop.word
        chop.word
        sortify
        #1 global.max$ substring$
        }

        ITERATE {call.type$}
    """
    tokens = [
        (Token.Comment.SingleLine,
         "% BibTeX standard bibliography style `plain'"),
        (Token.Text, '\n\n'),
        (Token.Keyword, 'INTEGERS'),
        (Token.Text, ' '),
        (Token.Punctuation, '{'),
        (Token.Text, ' '),
        (Token.Name.Variable, 'output.state'),
        (Token.Text, ' '),
        (Token.Name.Variable, 'before.all'),
        (Token.Text, ' '),
        (Token.Punctuation, '}'),
        (Token.Text, '\n\n'),
        (Token.Keyword, 'FUNCTION'),
        (Token.Text, ' '),
        (Token.Punctuation, '{'),
        (Token.Name.Variable, 'sort.format.title'),
        (Token.Punctuation, '}'),
        (Token.Text, '\n'),
        (Token.Punctuation, '{'),
        (Token.Text, ' '),
        (Token.Name.Function, "'t"),
        (Token.Text, ' '),
        (Token.Name.Variable, ':='),
        (Token.Text, '\n'),
        (Token.Literal.String, '"A "'),
        (Token.Text, ' '),
        (Token.Literal.Number, '#2'),
        (Token.Text, '\n    '),
        (Token.Literal.String, '"An "'),
        (Token.Text, ' '),
        (Token.Literal.Number, '#3'),
        (Token.Text, '\n    '),
        (Token.Literal.String, '"The "'),
        (Token.Text, ' '),
        (Token.Literal.Number, '#4'),
        (Token.Text, ' '),
        (Token.Name.Variable, 't'),
        (Token.Text, ' '),
        (Token.Name.Variable, 'chop.word'),
        (Token.Text, '\n    '),
        (Token.Name.Variable, 'chop.word'),
        (Token.Text, '\n'),
        (Token.Name.Variable, 'chop.word'),
        (Token.Text, '\n'),
        (Token.Name.Variable, 'sortify'),
        (Token.Text, '\n'),
        (Token.Literal.Number, '#1'),
        (Token.Text, ' '),
        (Token.Name.Builtin, 'global.max$'),
        (Token.Text, ' '),
        (Token.Name.Builtin, 'substring$'),
        (Token.Text, '\n'),
        (Token.Punctuation, '}'),
        (Token.Text, '\n\n'),
        (Token.Keyword, 'ITERATE'),
        (Token.Text, ' '),
        (Token.Punctuation, '{'),
        (Token.Name.Builtin, 'call.type$'),
        (Token.Punctuation, '}'),
        (Token.Text, '\n'),
    ]
    assert list(lexer.get_tokens(textwrap.dedent(data))) == tokens