コード例 #1
0
ファイル: test_shell.py プロジェクト: Oire/gobyexample
class BashTest(unittest.TestCase):

    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #2
0
class BashTest(unittest.TestCase):
    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #3
0
class BashTest(unittest.TestCase):
    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testShortVariableNames(self):
        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
        tokens = [
            # single lone $
            (Token.Name.Variable, u'x'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'$'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # single letter shell var
            (Token.Name.Variable, u'y'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$_'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # multi-letter user var
            (Token.Name.Variable, u'z'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$abc'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testArrayNums(self):
        fragment = u'a=(1 2 3)\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Operator, u'('),
            (Token.Literal.Number, u'1'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'3'),
            (Token.Operator, u')'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testEndOfLineNums(self):
        fragment = u'a=1\nb=2 # comment\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'1'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'b'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Comment.Single, u'# comment\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #4
0
ファイル: test_lexers.py プロジェクト: fariasjr/CitiTuirer
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = '!echo $HOME\n'
        tokens = [
            (Token.Operator, '!'),
        ]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = '!' + fragment
        tokens_2 = [
            (Token.Operator, '!!'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t %%!\n' + fragment[1:]
        tokens_2 = [
            (Token.Text, '\t '),
            (Token.Operator, '%%!'),
            (Token.Text, '\n'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = %sx ' + fragment[1:]
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'sx'),
            (Token.Text, ' '),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'f = %R function () {}\n'
        tokens_2 = [
            (Token.Name, 'f'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'R'),
            (Token.Text, ' function () {}\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t%%xyz\n$foo\n'
        tokens_2 = [
            (Token.Text, '\t'),
            (Token.Operator, '%%'),
            (Token.Keyword, 'xyz'),
            (Token.Text, '\n$foo\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '%system?\n'
        tokens_2 = [
            (Token.Operator, '%'),
            (Token.Keyword, 'system'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x != y\n'
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '!='),
            (Token.Text, ' '),
            (Token.Name, 'y'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = ' ?math.sin\n'
        tokens_2 = [
            (Token.Text, ' '),
            (Token.Operator, '?'),
            (Token.Text, 'math.sin'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = ' *int*?\n'
        tokens = [
            (Token.Text, ' *int*'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #5
0
ファイル: test_lexers.py プロジェクト: 0038lana/Test-Task
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = '!echo $HOME\n'
        tokens = [
            (Token.Operator, '!'),
        ]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = '!' + fragment
        tokens_2 = [
            (Token.Operator, '!!'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t %%!\n' + fragment[1:]
        tokens_2 = [
            (Token.Text, '\t '),
            (Token.Operator, '%%!'),
            (Token.Text, '\n'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = %sx ' + fragment[1:]
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'sx'),
            (Token.Text, ' '),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'f = %R function () {}\n'
        tokens_2 = [
            (Token.Name, 'f'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'R'),
            (Token.Text, ' function () {}\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t%%xyz\n$foo\n'
        tokens_2 = [
            (Token.Text, '\t'),
            (Token.Operator, '%%'),
            (Token.Keyword, 'xyz'),
            (Token.Text, '\n$foo\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '%system?\n'
        tokens_2 = [
            (Token.Operator, '%'),
            (Token.Keyword, 'system'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x != y\n'
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '!='),
            (Token.Text, ' '),
            (Token.Name, 'y'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = ' ?math.sin\n'
        tokens_2 = [
            (Token.Text, ' '),
            (Token.Operator, '?'),
            (Token.Text, 'math.sin'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = ' *int*?\n'
        tokens = [
            (Token.Text, ' *int*'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #6
0
ファイル: test_shell.py プロジェクト: sol/pygments
class BashTest(unittest.TestCase):

    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testShortVariableNames(self):
        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
        tokens = [
            # single lone $
            (Token.Name.Variable, u'x'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'$'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # single letter shell var
            (Token.Name.Variable, u'y'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$_'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # multi-letter user var
            (Token.Name.Variable, u'z'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$abc'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testArrayNums(self):
        fragment = u'a=(1 2 3)\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Operator, u'('),
            (Token.Literal.Number, u'1'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'3'),
            (Token.Operator, u')'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testEndOfLineNums(self):
        fragment = u'a=1\nb=2 # comment\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'1'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'b'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Comment.Single, u'# comment\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #7
0
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = "!echo $HOME\n"
        tokens = [(Token.Operator, "!")]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = "!" + fragment
        tokens_2 = [(Token.Operator, "!!")] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "\t %%!\n" + fragment[1:]
        tokens_2 = [
            (Token.Text, "\t "),
            (Token.Operator, "%%!"),
            (Token.Text, "\n"),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "x = " + fragment
        tokens_2 = [
            (Token.Name, "x"),
            (Token.Text, " "),
            (Token.Operator, "="),
            (Token.Text, " "),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "x, = " + fragment
        tokens_2 = [
            (Token.Name, "x"),
            (Token.Punctuation, ","),
            (Token.Text, " "),
            (Token.Operator, "="),
            (Token.Text, " "),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "x, = %sx " + fragment[1:]
        tokens_2 = [
            (Token.Name, "x"),
            (Token.Punctuation, ","),
            (Token.Text, " "),
            (Token.Operator, "="),
            (Token.Text, " "),
            (Token.Operator, "%"),
            (Token.Keyword, "sx"),
            (Token.Text, " "),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "f = %R function () {}\n"
        tokens_2 = [
            (Token.Name, "f"),
            (Token.Text, " "),
            (Token.Operator, "="),
            (Token.Text, " "),
            (Token.Operator, "%"),
            (Token.Keyword, "R"),
            (Token.Text, " function () {}\n"),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "\t%%xyz\n$foo\n"
        tokens_2 = [
            (Token.Text, "\t"),
            (Token.Operator, "%%"),
            (Token.Keyword, "xyz"),
            (Token.Text, "\n$foo\n"),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "%system?\n"
        tokens_2 = [
            (Token.Operator, "%"),
            (Token.Keyword, "system"),
            (Token.Operator, "?"),
            (Token.Text, "\n"),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = "x != y\n"
        tokens_2 = [
            (Token.Name, "x"),
            (Token.Text, " "),
            (Token.Operator, "!="),
            (Token.Text, " "),
            (Token.Name, "y"),
            (Token.Text, "\n"),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = " ?math.sin\n"
        tokens_2 = [
            (Token.Text, " "),
            (Token.Operator, "?"),
            (Token.Text, "math.sin"),
            (Token.Text, "\n"),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = " *int*?\n"
        tokens = [(Token.Text, " *int*"), (Token.Operator, "?"),
                  (Token.Text, "\n")]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment = "%%writefile -a foo.py\nif a == b:\n    pass"
        tokens = [
            (Token.Operator, "%%writefile"),
            (Token.Text, " -a foo.py\n"),
            (Token.Keyword, "if"),
            (Token.Text, " "),
            (Token.Name, "a"),
            (Token.Text, " "),
            (Token.Operator, "=="),
            (Token.Text, " "),
            (Token.Name, "b"),
            (Token.Punctuation, ":"),
            (Token.Text, "\n"),
            (Token.Text, "    "),
            (Token.Keyword, "pass"),
            (Token.Text, "\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment = "%%timeit\nmath.sin(0)"
        tokens = [
            (Token.Operator, "%%timeit\n"),
            (Token.Name, "math"),
            (Token.Operator, "."),
            (Token.Name, "sin"),
            (Token.Punctuation, "("),
            (Token.Literal.Number.Integer, "0"),
            (Token.Punctuation, ")"),
            (Token.Text, "\n"),
        ]

        fragment = "%%HTML\n<div>foo</div>"
        tokens = [
            (Token.Operator, "%%HTML"),
            (Token.Text, "\n"),
            (Token.Punctuation, "<"),
            (Token.Name.Tag, "div"),
            (Token.Punctuation, ">"),
            (Token.Text, "foo"),
            (Token.Punctuation, "<"),
            (Token.Punctuation, "/"),
            (Token.Name.Tag, "div"),
            (Token.Punctuation, ">"),
            (Token.Text, "\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
コード例 #8
0
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = '!echo $HOME\n'
        tokens = [
            (Token.Operator, '!'),
        ]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = '!' + fragment
        tokens_2 = [
            (Token.Operator, '!!'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t %%!\n' + fragment[1:]
        tokens_2 = [
            (Token.Text, '\t '),
            (Token.Operator, '%%!'),
            (Token.Text, '\n'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = %sx ' + fragment[1:]
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'sx'),
            (Token.Text, ' '),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'f = %R function () {}\n'
        tokens_2 = [
            (Token.Name, 'f'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'R'),
            (Token.Text, ' function () {}\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t%%xyz\n$foo\n'
        tokens_2 = [
            (Token.Text, '\t'),
            (Token.Operator, '%%'),
            (Token.Keyword, 'xyz'),
            (Token.Text, '\n$foo\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '%system?\n'
        tokens_2 = [
            (Token.Operator, '%'),
            (Token.Keyword, 'system'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x != y\n'
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '!='),
            (Token.Text, ' '),
            (Token.Name, 'y'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = ' ?math.sin\n'
        tokens_2 = [
            (Token.Text, ' '),
            (Token.Operator, '?'),
            (Token.Text, 'math.sin'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = ' *int*?\n'
        tokens = [
            (Token.Text, ' *int*'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment = '%%writefile -a foo.py\nif a == b:\n    pass'
        tokens = [
            (Token.Operator, '%%writefile'),
            (Token.Text, ' -a foo.py\n'),
            (Token.Keyword, 'if'),
            (Token.Text, ' '),
            (Token.Name, 'a'),
            (Token.Text, ' '),
            (Token.Operator, '=='),
            (Token.Text, ' '),
            (Token.Name, 'b'),
            (Token.Punctuation, ':'),
            (Token.Text, '\n'),
            (Token.Text, '    '),
            (Token.Keyword, 'pass'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment = '%%timeit\nmath.sin(0)'
        tokens = [
            (Token.Operator, '%%timeit\n'),
            (Token.Name, 'math'),
            (Token.Operator, '.'),
            (Token.Name, 'sin'),
            (Token.Punctuation, '('),
            (Token.Literal.Number.Integer, '0'),
            (Token.Punctuation, ')'),
            (Token.Text, '\n'),
        ]

        fragment = '%%HTML\n<div>foo</div>'
        tokens = [
            (Token.Operator, '%%HTML'),
            (Token.Text, '\n'),
            (Token.Punctuation, '<'),
            (Token.Name.Tag, 'div'),
            (Token.Punctuation, '>'),
            (Token.Text, 'foo'),
            (Token.Punctuation, '<'),
            (Token.Punctuation, '/'),
            (Token.Name.Tag, 'div'),
            (Token.Punctuation, '>'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))