Пример #1
0
    def test_with_correct_code(self, MockNannyNag):
        """A python source code without any whitespace related problems."""

        with TemporaryPyFile(SOURCE_CODES["error_free"]) as file_path:
            with open(file_path) as f:
                tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
            self.assertFalse(MockNannyNag.called)
Пример #2
0
 def tabnanny(self, filename):
     import tabnanny
     import tokenize
     f = open(filename, 'r')
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError, msg:
         self.errorbox("Token error", "Token error:\n%s" % str(msg))
         return 0
Пример #3
0
 def tabnanny(self, filename):
     f = open(filename, 'r')
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError, msg:
         msgtxt, (lineno, start) = msg
         self.editwin.gotoline(lineno)
         self.errorbox("Tabnanny Tokenizing Error",
                       "Token Error: %s" % msgtxt)
         return False
Пример #4
0
 def tabnanny(self, filename):
     import tabnanny
     import tokenize
     f = open(filename, 'r')
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError, msg:
         self.errorbox("Token error",
                       "Token error:\n%s" % str(msg))
         return 0
Пример #5
0
 def tabnanny(self, filename):
     f = open(filename, 'r')
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError, msg:
         msgtxt, (lineno, start) = msg
         self.editwin.gotoline(lineno)
         self.errorbox("Tabnanny Tokenizing Error",
                       "Token Error: %s" % msgtxt)
         return False
Пример #6
0
    def tabnanny(self, filename):
        f = open(filename, "r")
        try:
            tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
        except tokenize.TokenError as msg:
            msgtxt, (lineno, start) = msg
            self.editwin.gotoline(lineno)
            self.errorbox("Tabnanny Tokenizing Error", "Token Error: %s" % msgtxt)
            return False
        except tabnanny.NannyNag as nag:
            self.editwin.gotoline(nag.get_lineno())
            self.errorbox("Tab/space error", indent_message)
            return False

        return True
Пример #7
0
 def tabnanny(self, filename):
     with tokenize.open(filename) as f:
         try:
             tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
         except tokenize.TokenError as msg:
             msgtxt, (lineno, start) = msg.args
             self.editwin.gotoline(lineno)
             self.errorbox('Tabnanny Tokenizing Error',
                           'Token Error: %s' % msgtxt)
             return False
         except tabnanny.NannyNag as nag:
             self.editwin.gotoline(nag.get_lineno())
             self.errorbox('Tab/space error', indent_message)
             return False
     return True
Пример #8
0
    def file_passes(self, temp_filename, original_filename=None):
        if original_filename is None:
            original_filename = temp_filename

        with open(temp_filename, "r") as temp_file:
            code = temp_file.read()

        # note that this uses non-public elements from stdlib's tabnanny, because tabnanny
        # is (very frustratingly) written only to be used as a script, but using it that way
        # in this context requires writing temporarily files, running subprocesses, blah blah blah
        code_buffer = StringIO.StringIO(code)
        try:
            tabnanny.process_tokens(tokenize.generate_tokens(code_buffer.readline))
        except tokenize.TokenError, e:
            return False, "# Could not parse code in %(f)s: %(e)s" % dict(e=e, f=original_filename)
Пример #9
0
    def test_with_errored_codes_samples(self):
        """A python source code with whitespace related sampled problems."""

        # "tab_space_errored_1": executes block under type == tokenize.INDENT
        #                        at `tabnanny.process_tokens()`.
        # "tab space_errored_2": executes block under
        #                        `check_equal and type not in JUNK` condition at
        #                        `tabnanny.process_tokens()`.

        for key in ["tab_space_errored_1", "tab_space_errored_2"]:
            with self.subTest(key=key):
                with TemporaryPyFile(SOURCE_CODES[key]) as file_path:
                    with open(file_path) as f:
                        tokens = tokenize.generate_tokens(f.readline)
                        with self.assertRaises(tabnanny.NannyNag):
                            tabnanny.process_tokens(tokens)
Пример #10
0
def get_parse_error(code):
    """
    Checks code for ambiguous tabs or other basic parsing issues.

    :param code: a string containing a file's worth of Python code
    :returns: a string containing a description of the first parse error encountered,
              or None if the code is ok
    """
    # note that this uses non-public elements from stdlib's tabnanny, because tabnanny
    # is (very frustratingly) written only to be used as a script, but using it that way
    # in this context requires writing temporarily files, running subprocesses, blah blah blah
    code_buffer = StringIO(code)
    try:
        tabnanny.process_tokens(tokenize.generate_tokens(code_buffer.readline))
    except tokenize.TokenError, err:
        return "Could not parse code: %s" % err
Пример #11
0
def get_parse_error(code):
    """
    Checks code for ambiguous tabs or other basic parsing issues.

    :param code: a string containing a file's worth of Python code
    :returns: a string containing a description of the first parse error encountered,
              or None if the code is ok
    """
    # note that this uses non-public elements from stdlib's tabnanny, because tabnanny
    # is (very frustratingly) written only to be used as a script, but using it that way
    # in this context requires writing temporarily files, running subprocesses, blah blah blah
    code_buffer = StringIO(code)
    try:
        tabnanny.process_tokens(tokenize.generate_tokens(code_buffer.readline))
    except tokenize.TokenError, err:
        return "Could not parse code: %s" % err
Пример #12
0
 def tabnanny(self, filename):
     f = open(filename, 'r')
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError as msg:
         msgtxt, (lineno, start) = msg
         self.editwin.gotoline(lineno)
         self.errorbox("Tabnanny Tokenizing Error",
                       "Token Error: %s" % msgtxt)
         return False
     except tabnanny.NannyNag as nag:
         # The error messages from tabnanny are too confusing...
         self.editwin.gotoline(nag.get_lineno())
         self.errorbox("Tab/space error", indent_message)
         return False
     return True
Пример #13
0
 def tabnanny(self, filename):
     # XXX: tabnanny should work on binary files as well
     with tokenize.open(filename) as f:
         try:
             tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
         except tokenize.TokenError as msg:
             msgtxt, (lineno, start) = msg.args
             self.editwin.gotoline(lineno)
             self.errorbox("Tabnanny Tokenizing Error",
                           "Token Error: %s" % msgtxt)
             return False
         except tabnanny.NannyNag as nag:
             # The error messages from tabnanny are too confusing...
             self.editwin.gotoline(nag.get_lineno())
             self.errorbox("Tab/space error", indent_message)
             return False
     return True
Пример #14
0
    def file_passes(self, temp_filename, original_filename=None):
        if original_filename is None:
            original_filename = temp_filename

        with open(temp_filename, "r") as temp_file:
            code = temp_file.read()

        # note that this uses non-public elements from stdlib's tabnanny, because tabnanny
        # is (very frustratingly) written only to be used as a script, but using it that way
        # in this context requires writing temporarily files, running subprocesses, blah blah blah
        code_buffer = StringIO.StringIO(code)
        try:
            tabnanny.process_tokens(tokenize.generate_tokens(code_buffer.readline))
        except tokenize.TokenError as e:
            return False, "# Could not parse code in {f}: {e}".format(e=e, f=original_filename)
        except IndentationError as e:
            return False, "# Indentation error in {f}: {e}".format(e=e, f=original_filename)
        except tabnanny.NannyNag as e:
            return False, "# Ambiguous tab in {f} at line {line}; line is '{contents}'.".format(line=e.get_lineno(),
                                                                                                contents=e.get_line().rstrip(),
                                                                                                f=original_filename)
        return True, None
 def tabnanny(self, filename):
     # XXX: tabnanny should work on binary files as well
     with open(filename, 'r', encoding='iso-8859-1') as f:
         two_lines = f.readline() + f.readline()
     encoding = IOBinding.coding_spec(two_lines)
     if not encoding:
         encoding = 'utf-8'
     f = open(filename, 'r', encoding=encoding)
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError as msg:
         msgtxt, (lineno, start) = msg
         self.editwin.gotoline(lineno)
         self.errorbox("Tabnanny Tokenizing Error",
                       "Token Error: %s" % msgtxt)
         return False
     except tabnanny.NannyNag as nag:
         # The error messages from tabnanny are too confusing...
         self.editwin.gotoline(nag.get_lineno())
         self.errorbox("Tab/space error", indent_message)
         return False
     return True
Пример #16
0
 def tabnanny(self, filename):
     # XXX: tabnanny should work on binary files as well
     with open(filename, 'r', encoding='iso-8859-1') as f:
         two_lines = f.readline() + f.readline()
     encoding = IOBinding.coding_spec(two_lines)
     if not encoding:
         encoding = 'utf-8'
     f = open(filename, 'r', encoding=encoding)
     try:
         tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
     except tokenize.TokenError as msg:
         msgtxt, (lineno, start) = msg
         self.editwin.gotoline(lineno)
         self.errorbox("Tabnanny Tokenizing Error",
                       "Token Error: %s" % msgtxt)
         return False
     except tabnanny.NannyNag as nag:
         # The error messages from tabnanny are too confusing...
         self.editwin.gotoline(nag.get_lineno())
         self.errorbox("Tab/space error", indent_message)
         return False
     return True
Пример #17
0
 def update_event(self, inp=-1):
     self.set_output_val(0, tabnanny.process_tokens(self.input(0)))
Пример #18
0
"""Extension to execute code outside the Python shell window.
Пример #19
0
"""Extension to execute code outside the Python shell window.