예제 #1
0
    def color_line(self, line):
        """
        """
        lexer = CLexer()
        tokens = list(lexer.get_tokens(line))
        new_line = ""
        for t in tokens:
            ttype = t[0]
            ttext = str(t[1])
            if ttype == Token.Text:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_INSN)

            elif ttype == Token.Text.Whitespace:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_INSN)

            elif ttype == Token.Error:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_ERROR)

            elif ttype == Token.Other:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_DSTR)

            elif ttype == Token.Keyword:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_KEYWORD)

            elif ttype == Token.Name:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_LIBNAME)

            elif ttype == Token.Literal:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_LOCNAME)

            elif ttype == Token.Literal.String:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_STRING)

            elif ttype == Token.Literal.Number:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_DNUM)

            elif ttype == Token.Operator:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_ALTOP)

            elif ttype == Token.Punctuation:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_SYMBOL)

            elif ttype == Token.Comment:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_REGCMT)

            elif ttype == Token.Comment.Single:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_REGCMT)

            elif ttype == Token.Generic:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_CREFTAIL)

            else:
                new_line += idaapi.COLSTR(ttext, idaapi.SCOLOR_CREFTAIL)
        return new_line
예제 #2
0
class CLexerTest(unittest.TestCase):

    def setUp(self):
        self.lexer = CLexer()

    def testNumbers(self):
        code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
        wanted = []
        for item in zip([Number.Integer, Number.Float, Number.Float,
                         Number.Float, Number.Oct, Number.Hex,
                         Number.Float, Number.Float], code.split()):
            wanted.append(item)
            wanted.append((Text, ' '))
        wanted = [(Text, '')] + wanted[:-1] + [(Text, '\n')]
        self.assertEqual(list(self.lexer.get_tokens(code)), wanted)
예제 #3
0
class CLexerTest(unittest.TestCase):
    def setUp(self):
        self.lexer = CLexer()

    def testNumbers(self):
        code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
        wanted = []
        for item in zip([
                Number.Integer, Number.Float, Number.Float, Number.Float,
                Number.Oct, Number.Hex, Number.Float, Number.Float
        ], code.split()):
            wanted.append(item)
            wanted.append((Text, ' '))
        wanted = [(Text, '')] + wanted[:-1] + [(Text, '\n')]
        self.assertEqual(list(self.lexer.get_tokens(code)), wanted)
예제 #4
0
    def add_comment(self):
        """
        Add a commment to the selected line
        """
        print("GhIDA:: [DEBUG] add_comment called")
        colored_line = self.GetCurrentLine(notags=1)
        if not colored_line:
            idaapi.warning("Select a line")
            return False

        # Use pygments to parse the line to check if there are comments
        line = idaapi.tag_remove(colored_line)
        lexer = CLexer()
        tokens = list(lexer.get_tokens(line))
        text = ""
        text_comment = ""
        for t in tokens:
            ttype = t[0]
            ttext = str(t[1])
            if ttype == Token.Comment.Single:
                text_comment = ttext.replace('//', '').strip()
            else:
                text += ttext

        # Get the new comment
        comment = gl.display_comment_form(text_comment)
        if not comment or len(comment) == 0:
            return False
        comment = comment.replace("//", "").replace("\n", " ")
        comment = comment.strip()

        # Create the new text
        full_comment = "    // %s" % comment
        text = text.rstrip()
        new_text = text + full_comment
        text_colored = self.color_line(new_text)

        num_line = self.GetLineNo()
        self.EditLine(num_line, text_colored)
        self.RefreshCurrent()

        # Add comment to cache
        COMMENTS_CACHE.add_comment_to_cache(self.__ea, num_line, full_comment)

        print("GhIDA:: [DEBUG] Added comment to #line: %d (%s)" %
              (num_line, new_text))
        return
예제 #5
0
class CLexerTest(unittest.TestCase):
    def setUp(self):
        self.lexer = CLexer()

    def testNumbers(self):
        code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
        wanted = []
        for item in zip([
                Number.Integer, Number.Float, Number.Float, Number.Float,
                Number.Oct, Number.Hex, Number.Float, Number.Float
        ], code.split()):
            wanted.append(item)
            wanted.append((Text, ' '))
        wanted = wanted[:-1] + [(Text, '\n')]
        self.assertEqual(list(self.lexer.get_tokens(code)), wanted)

    def testSwitch(self):
        fragment = u'''\
        int main()
        {
            switch (0)
            {
                case 0:
                default:
                    ;
            }
        }
        '''
        tokens = [
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Keyword, u'switch'),
            (Token.Text, u' '),
            (Token.Punctuation, u'('),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'case'),
            (Token.Text, u' '),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'default'),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'            '),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testSwitchSpaceBeforeColon(self):
        fragment = u'''\
        int main()
        {
            switch (0)
            {
                case 0 :
                default :
                    ;
            }
        }
        '''
        tokens = [
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Keyword, u'switch'),
            (Token.Text, u' '),
            (Token.Punctuation, u'('),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'case'),
            (Token.Text, u' '),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Text, u' '),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'default'),
            (Token.Text, u' '),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'            '),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabel(self):
        fragment = u'''\
        int main()
        {
        foo:
          goto foo;
        }
        '''
        tokens = [
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Name.Label, u'foo'),
            (Token.Punctuation, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'  '),
            (Token.Keyword, u'goto'),
            (Token.Text, u' '),
            (Token.Name, u'foo'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabelSpaceBeforeColon(self):
        fragment = u'''\
        int main()
        {
        foo :
          goto foo;
        }
        '''
        tokens = [
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Name.Label, u'foo'),
            (Token.Text, u' '),
            (Token.Punctuation, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'  '),
            (Token.Keyword, u'goto'),
            (Token.Text, u' '),
            (Token.Name, u'foo'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabelFollowedByStatement(self):
        fragment = u'''\
        int main()
        {
        foo:return 0;
          goto foo;
        }
        '''
        tokens = [
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Name.Label, u'foo'),
            (Token.Punctuation, u':'),
            (Token.Keyword, u'return'),
            (Token.Text, u' '),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Text, u'  '),
            (Token.Keyword, u'goto'),
            (Token.Text, u' '),
            (Token.Name, u'foo'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(
            tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testPreprocFile(self):
        fragment = u'#include <foo>\n'
        tokens = [
            (Token.Comment.Preproc, u'#'),
            (Token.Comment.Preproc, u'include'),
            (Token.Text, u' '),
            (Token.Comment.PreprocFile, u'<foo>'),
            (Token.Comment.Preproc, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testPreprocFile2(self):
        fragment = u'#include "foo.h"\n'
        tokens = [
            (Token.Comment.Preproc, u'#'),
            (Token.Comment.Preproc, u'include'),
            (Token.Text, u' '),
            (Token.Comment.PreprocFile, u'"foo.h"'),
            (Token.Comment.Preproc, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
예제 #6
0
class CLexerTest(unittest.TestCase):
    def setUp(self):
        self.lexer = CLexer()

    def testNumbers(self):
        code = "42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23"
        wanted = []
        for item in zip(
            [
                Number.Integer,
                Number.Float,
                Number.Float,
                Number.Float,
                Number.Oct,
                Number.Hex,
                Number.Float,
                Number.Float,
            ],
            code.split(),
        ):
            wanted.append(item)
            wanted.append((Text, " "))
        wanted = wanted[:-1] + [(Text, "\n")]
        self.assertEqual(list(self.lexer.get_tokens(code)), wanted)

    def testSwitch(self):
        fragment = u"""\
        int main()
        {
            switch (0)
            {
                case 0:
                default:
                    ;
            }
        }
        """
        tokens = [
            (Token.Keyword.Type, u"int"),
            (Token.Text, u" "),
            (Token.Name.Function, u"main"),
            (Token.Punctuation, u"("),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Text, u"    "),
            (Token.Keyword, u"switch"),
            (Token.Text, u" "),
            (Token.Punctuation, u"("),
            (Token.Literal.Number.Integer, u"0"),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Text, u"    "),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Text, u"        "),
            (Token.Keyword, u"case"),
            (Token.Text, u" "),
            (Token.Literal.Number.Integer, u"0"),
            (Token.Operator, u":"),
            (Token.Text, u"\n"),
            (Token.Text, u"        "),
            (Token.Keyword, u"default"),
            (Token.Operator, u":"),
            (Token.Text, u"\n"),
            (Token.Text, u"            "),
            (Token.Punctuation, u";"),
            (Token.Text, u"\n"),
            (Token.Text, u"    "),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testSwitchSpaceBeforeColon(self):
        fragment = u"""\
        int main()
        {
            switch (0)
            {
                case 0 :
                default :
                    ;
            }
        }
        """
        tokens = [
            (Token.Keyword.Type, u"int"),
            (Token.Text, u" "),
            (Token.Name.Function, u"main"),
            (Token.Punctuation, u"("),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Text, u"    "),
            (Token.Keyword, u"switch"),
            (Token.Text, u" "),
            (Token.Punctuation, u"("),
            (Token.Literal.Number.Integer, u"0"),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Text, u"    "),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Text, u"        "),
            (Token.Keyword, u"case"),
            (Token.Text, u" "),
            (Token.Literal.Number.Integer, u"0"),
            (Token.Text, u" "),
            (Token.Operator, u":"),
            (Token.Text, u"\n"),
            (Token.Text, u"        "),
            (Token.Keyword, u"default"),
            (Token.Text, u" "),
            (Token.Operator, u":"),
            (Token.Text, u"\n"),
            (Token.Text, u"            "),
            (Token.Punctuation, u";"),
            (Token.Text, u"\n"),
            (Token.Text, u"    "),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabel(self):
        fragment = u"""\
        int main()
        {
        foo:
          goto foo;
        }
        """
        tokens = [
            (Token.Keyword.Type, u"int"),
            (Token.Text, u" "),
            (Token.Name.Function, u"main"),
            (Token.Punctuation, u"("),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Name.Label, u"foo"),
            (Token.Punctuation, u":"),
            (Token.Text, u"\n"),
            (Token.Text, u"  "),
            (Token.Keyword, u"goto"),
            (Token.Text, u" "),
            (Token.Name, u"foo"),
            (Token.Punctuation, u";"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabelSpaceBeforeColon(self):
        fragment = u"""\
        int main()
        {
        foo :
          goto foo;
        }
        """
        tokens = [
            (Token.Keyword.Type, u"int"),
            (Token.Text, u" "),
            (Token.Name.Function, u"main"),
            (Token.Punctuation, u"("),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Name.Label, u"foo"),
            (Token.Text, u" "),
            (Token.Punctuation, u":"),
            (Token.Text, u"\n"),
            (Token.Text, u"  "),
            (Token.Keyword, u"goto"),
            (Token.Text, u" "),
            (Token.Name, u"foo"),
            (Token.Punctuation, u";"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabelFollowedByStatement(self):
        fragment = u"""\
        int main()
        {
        foo:return 0;
          goto foo;
        }
        """
        tokens = [
            (Token.Keyword.Type, u"int"),
            (Token.Text, u" "),
            (Token.Name.Function, u"main"),
            (Token.Punctuation, u"("),
            (Token.Punctuation, u")"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"{"),
            (Token.Text, u"\n"),
            (Token.Name.Label, u"foo"),
            (Token.Punctuation, u":"),
            (Token.Keyword, u"return"),
            (Token.Text, u" "),
            (Token.Literal.Number.Integer, u"0"),
            (Token.Punctuation, u";"),
            (Token.Text, u"\n"),
            (Token.Text, u"  "),
            (Token.Keyword, u"goto"),
            (Token.Text, u" "),
            (Token.Name, u"foo"),
            (Token.Punctuation, u";"),
            (Token.Text, u"\n"),
            (Token.Punctuation, u"}"),
            (Token.Text, u"\n"),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
class CLexerTest(unittest.TestCase):

    def setUp(self):
        self.lexer = CLexer()

    def testNumbers(self):
        code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
        wanted = []
        for item in zip([Number.Integer, Number.Float, Number.Float,
                         Number.Float, Number.Oct, Number.Hex,
                         Number.Float, Number.Float], code.split()):
            wanted.append(item)
            wanted.append((Text, ' '))
        wanted = [(Text, '')] + wanted[:-1] + [(Text, '\n')]
        self.assertEqual(list(self.lexer.get_tokens(code)), wanted)

    def testSwitch(self):
        fragment = u'''\
        int main()
        {
            switch (0)
            {
                case 0:
                default:
                    ;
            }
        }
        '''
        expected = [
            (Token.Text, u''),
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Text, u''),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Keyword, u'switch'),
            (Token.Text, u' '),
            (Token.Punctuation, u'('),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'case'),
            (Token.Text, u' '),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'default'),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'            '),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testSwitchSpaceBeforeColon(self):
        fragment = u'''\
        int main()
        {
            switch (0)
            {
                case 0 :
                default :
                    ;
            }
        }
        '''
        expected = [
            (Token.Text, u''),
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Text, u''),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Keyword, u'switch'),
            (Token.Text, u' '),
            (Token.Punctuation, u'('),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'case'),
            (Token.Text, u' '),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Text, u' '),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'        '),
            (Token.Keyword, u'default'),
            (Token.Text, u' '),
            (Token.Operator, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'            '),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Text, u'    '),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabel(self):
        fragment = u'''\
        int main()
        {
        foo:
          goto foo;
        }
        '''
        expected = [
            (Token.Text, u''),
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Text, u''),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Name.Label, u'foo'),
            (Token.Punctuation, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'  '),
            (Token.Keyword, u'goto'),
            (Token.Text, u' '),
            (Token.Name, u'foo'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabelSpaceBeforeColon(self):
        fragment = u'''\
        int main()
        {
        foo :
          goto foo;
        }
        '''
        expected = [
            (Token.Text, u''),
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Text, u''),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Name.Label, u'foo'),
            (Token.Text, u' '),
            (Token.Punctuation, u':'),
            (Token.Text, u'\n'),
            (Token.Text, u'  '),
            (Token.Keyword, u'goto'),
            (Token.Text, u' '),
            (Token.Name, u'foo'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))

    def testLabelFollowedByStatement(self):
        fragment = u'''\
        int main()
        {
        foo:return 0;
          goto foo;
        }
        '''
        expected = [
            (Token.Text, u''),
            (Token.Keyword.Type, u'int'),
            (Token.Text, u' '),
            (Token.Name.Function, u'main'),
            (Token.Text, u''),
            (Token.Punctuation, u'('),
            (Token.Punctuation, u')'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
            (Token.Punctuation, u'{'),
            (Token.Text, u'\n'),
            (Token.Name.Label, u'foo'),
            (Token.Punctuation, u':'),
            (Token.Keyword, u'return'),
            (Token.Text, u' '),
            (Token.Literal.Number.Integer, u'0'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Text, u'  '),
            (Token.Keyword, u'goto'),
            (Token.Text, u' '),
            (Token.Name, u'foo'),
            (Token.Punctuation, u';'),
            (Token.Text, u'\n'),
            (Token.Punctuation, u'}'),
            (Token.Text, u'\n'),
            (Token.Text, u''),
        ]
        self.assertEqual(expected, list(self.lexer.get_tokens(textwrap.dedent(fragment))))