def runPyflakes(script_code, script_path):
  from pyflakes.api import check
  from pyflakes import reporter
  from StringIO import StringIO
  stream = StringIO()
  check(script_code, script_path, reporter.Reporter(stream, stream))
  return stream.getvalue()
Пример #2
0
def check_flake(filename, code, builtins=None):
    """
    Given a filename and some code to be checked, uses the PyFlakesmodule to
    return a dictionary describing issues of code quality per line. See:

    https://github.com/PyCQA/pyflakes

    If a list symbols is passed in as "builtins" these are assumed to be
    additional builtins available when run by Mu.
    """
    import_all = "from microbit import *" in code
    if import_all:
        # Massage code so "from microbit import *" is expanded so the symbols
        # are known to flake.
        code = code.replace("from microbit import *", EXPANDED_IMPORT)
    reporter = MuFlakeCodeReporter()
    check(code, filename, reporter)
    if builtins:
        builtins_regex = re.compile(r"^undefined name '(" +
                                    '|'.join(builtins) + r")'")
    feedback = {}
    for log in reporter.log:
        if import_all:
            # Guard to stop unwanted "microbit.* imported but unused" messages.
            message = log['message']
            if EXPAND_FALSE_POSITIVE.match(message):
                continue
        if builtins:
            if builtins_regex.match(log['message']):
                continue
        if log['line_no'] not in feedback:
            feedback[log['line_no']] = []
        feedback[log['line_no']].append(log)
    return feedback
Пример #3
0
Файл: logic.py Проект: ntoll/mu
def check_flake(filename, code):
    """
    Given a filename and some code to be checked, uses the PyFlakesmodule to
    return a dictionary describing issues of code quality per line. See:

    https://github.com/PyCQA/pyflakes
    """
    import_all = "from microbit import *" in code
    if import_all:
        # Massage code so "from microbit import *" is expanded so the symbols
        # are known to flake.
        code = code.replace("from microbit import *", EXPANDED_IMPORT)
    reporter = MuFlakeCodeReporter()
    check(code, filename, reporter)
    feedback = {}
    for log in reporter.log:
        if import_all:
            # Guard to stop unwanted "microbit.* imported but unused" messages.
            message = log['message']
            if EXPAND_FALSE_POSITIVE.match(message):
                continue
        if log['line_no'] not in feedback:
            feedback[log['line_no']] = []
        feedback[log['line_no']].append(log)
    return feedback
Пример #4
0
def check_flake(filename, code):
    """
    Given a filename and some code to be checked, uses the PyFlakesmodule to
    return a dictionary describing issues of code quality per line. See:

    https://github.com/PyCQA/pyflakes
    """
    import_all = "from microbit import *" in code
    if import_all:
        # Massage code so "from microbit import *" is expanded so the symbols
        # are known to flake.
        code = code.replace("from microbit import *", EXPANDED_IMPORT)
    reporter = MuFlakeCodeReporter()
    check(code, filename, reporter)
    feedback = {}
    for log in reporter.log:
        if import_all:
            # Guard to stop unwanted "microbit.* imported but unused" messages.
            message = log['message']
            if EXPAND_FALSE_POSITIVE.match(message):
                continue
        if log['line_no'] not in feedback:
            feedback[log['line_no']] = []
        feedback[log['line_no']].append(log)
    return feedback
Пример #5
0
def check_flake(filename, code, builtins=None):
    """
    Given a filename and some code to be checked, uses the PyFlakesmodule to
    return a dictionary describing issues of code quality per line. See:

    https://github.com/PyCQA/pyflakes

    If a list symbols is passed in as "builtins" these are assumed to be
    additional builtins available when run by Mu.
    """
    import_all = "from microbit import *" in code
    if import_all:
        # Massage code so "from microbit import *" is expanded so the symbols
        # are known to flake.
        code = code.replace("from microbit import *", EXPANDED_IMPORT)
    reporter = MuFlakeCodeReporter()
    check(code, filename, reporter)
    if builtins:
        builtins_regex = re.compile(r"^undefined name '(" +
                                    '|'.join(builtins) + r")'")
    feedback = {}
    for log in reporter.log:
        if import_all:
            # Guard to stop unwanted "microbit.* imported but unused" messages.
            message = log['message']
            if EXPAND_FALSE_POSITIVE.match(message):
                continue
        if builtins:
            if builtins_regex.match(log['message']):
                continue
        if log['line_no'] not in feedback:
            feedback[log['line_no']] = []
        feedback[log['line_no']].append(log)
    return feedback
Пример #6
0
def pyflakes_report(code):
    """Inspect code using PyFlakes to detect any 'missing name' report.

    Args:
        code: A multiline string representing Python code

    Returns: a list of names that have been reported missing by Flakes
    """
    flakes_stdout = StreamList()
    flakes_stderr = StreamList()
    rep = pyflakes_reporter.Reporter(flakes_stdout.reset(),
                                     flakes_stderr.reset())
    pyflakes_api.check(code, filename="kale", reporter=rep)

    # the stderr stream should be used just for compilation error, so if any
    # message is found in the stderr stream, raise an exception
    if rep._stderr():
        raise RuntimeError("Flakes reported the following error:"
                           "\n{}".format('\t' + '\t'.join(rep._stderr())))

    # Match names
    p = r"'(.+?)'"

    out = rep._stdout()
    # Using a `set` to avoid repeating the same var names in case they are
    # reported missing multiple times by flakes
    undef_vars = set()
    # iterate over all the flakes report output, keeping only lines
    # with 'undefined name' reports
    for line in filter(lambda a: a != '\n' and 'undefined name' in a, out):
        var_search = re.search(p, line)
        undef_vars.add(var_search.group(1))
    return undef_vars
Пример #7
0
def check_flake(filename, code):
    """
    Given a filename and some code to be checked, uses the PyFlakesmodule to
    return a list of items describing issues of code quality. See:

    https://github.com/PyCQA/pyflakes
    """
    reporter = MuFlakeCodeReporter()
    check(code, filename, reporter)
    return reporter.log
Пример #8
0
Файл: logic.py Проект: eduvik/mu
def check_flake(filename, code):
    """
    Given a filename and some code to be checked, uses the PyFlakesmodule to
    return a list of items describing issues of code quality. See:

    https://github.com/PyCQA/pyflakes
    """
    reporter = MuFlakeCodeReporter()
    check(code, filename, reporter)
    return reporter.log
Пример #9
0
def check_code(script):
    """Run pyflakes checks over the script and capture warnings/errors."""
    errors = io.StringIO()
    warnings = io.StringIO()
    reporter = Reporter(warnings, errors)
    check(script, "notebook", reporter)

    warnings.seek(0)
    errors.seek(0)

    return warnings, errors
Пример #10
0
def run_pyflakes():
    """ check if the code wer'e about to import is fine"""
    returnSTRM = StringIO()
    reporter = modReporter.Reporter(returnSTRM, returnSTRM)
    with open("setup.py", "r") as file:
        pyflask_api.check(file.read(), 'line', reporter)

    output = returnSTRM.getvalue()
    returnSTRM.close()

    return output
Пример #11
0
def _pyflakes_annotations(path, text):
    warning_stream = io.StringIO()
    error_stream = io.StringIO()
    reporter = pyflakes.modReporter.Reporter(warning_stream, error_stream)

    pyflakes.check(text, path, reporter)

    warnings = _get_annotations(path, warning_stream, _Style.warning)
    errors = _get_annotations(path, error_stream, _Style.error)

    return warnings + errors
Пример #12
0
 def execute(self, finder):
     issues = []
     reporter = TidyPyReporter(self.config)
     for filepath in finder.files(self.config['filters']):
         try:
             source = finder.read_file(filepath)
         except EnvironmentError as exc:
             issues.append(AccessIssue(exc, filepath))
         else:
             check(source, filepath, reporter)
     return reporter.get_issues() + issues
Пример #13
0
def publish_diagnostic(source: str,
                       file_name=None) -> Tuple[WarningMsg, ErrorMsg]:
    file_name = file_name or "<stdin>"

    warning_buffer = StringIO()
    error_buffer = StringIO()
    reporter = Reporter(warning_buffer, error_buffer)

    check(source, file_name, reporter)
    LOGGER.debug("warning message: \n%s", warning_buffer.getvalue())
    LOGGER.debug("error message: \n%s", error_buffer.getvalue())

    return (warning_buffer.getvalue(), error_buffer.getvalue())
Пример #14
0
def main(files):
    """Call main in all given files."""
    t1 = time.time()
    for fn in files:
        # Report the file name.
        assert g.os_path_exists(fn), fn
        sfn = g.shortFileName(fn)
        s = g.readFileIntoEncodedString(fn)
        if s and s.strip():
            r = reporter.Reporter(errorStream=sys.stderr, warningStream=sys.stderr)
            api.check(s, sfn, r)
    t2 = time.time()
    n = len(files)
    print(f"{n} file{g.plural(n)}, time: {t2 - t1:5.2f} sec.")
Пример #15
0
 def check_all(self, log_flag, pyflakes_errors_only, roots):
     """Run pyflakes on all files in paths."""
     try:
         from pyflakes import api, reporter
     except Exception:  # ModuleNotFoundError
         return True  # Pretend all is fine.
     total_errors = 0
     for i, root in enumerate(roots):
         fn = self.finalize(root)
         sfn = g.shortFileName(fn)
         # #1306: nopyflakes
         if any([
                 z.strip().startswith('@nopyflakes')
                 for z in g.splitLines(root.b)
         ]):
             continue
         # Report the file name.
         s = g.readFileIntoEncodedString(fn)
         if s and s.strip():
             if not pyflakes_errors_only:
                 g.es(f"Pyflakes: {sfn}")
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(i, roots),
                 warningStream=self.LogStream(i, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
Пример #16
0
def test_snippets(snippet):
    snippet.log_action(action="testing")
    pyflakes_stream = StringIO()
    if check(snippet.source, "", Reporter(pyflakes_stream, pyflakes_stream)):
        logger.error(f"Pyflakes error: {pyflakes_stream.getvalue()}")
        pytest.fail()
    steadymark.Runner(text=f"```python\n{snippet.source}\n```").run()
Пример #17
0
    def check_all(self, log_flag, paths):
        '''Run pyflakes on all files in paths.'''
        from pyflakes import api, reporter
        total_errors = 0
        for fn in sorted(paths):
            # Report the file name.
            sfn = g.shortFileName(fn)
            s = g.readFileIntoEncodedString(fn, silent=False)
            if s.strip():
                g.es('Pyflakes: %s' % sfn)

                # Send all output to the log pane.
                class LogStream:
                    def write(self, s):
                        if s.strip():
                            g.es_print(s)
                            # It *is* useful to send pyflakes errors to the console.

                r = reporter.Reporter(
                    errorStream=LogStream(),
                    warningStream=LogStream(),
                )
                errors = api.check(s, sfn, r)
                total_errors += errors
        return total_errors
Пример #18
0
    def check_all(self, log_flag, paths, pyflakes_errors_only):
        '''Run pyflakes on all files in paths.'''
        from pyflakes import api, reporter
        total_errors = 0
        for fn in sorted(paths):
            # Report the file name.
            sfn = g.shortFileName(fn)
            s = g.readFileIntoEncodedString(fn)
            if s.strip():
                if not pyflakes_errors_only:
                    g.es('Pyflakes: %s' % sfn)
                # Send all output to the log pane.

                class LogStream:

                    def write(self, s):
                        if s.strip():
                            g.es_print(s)
                                # It *is* useful to send pyflakes errors to the console.

                r = reporter.Reporter(
                        errorStream=LogStream(),
                        warningStream=LogStream(),
                    )
                errors = api.check(s, sfn, r)
                total_errors += errors
        return total_errors
Пример #19
0
def identify_missing(file_dict):
    local_imports = {}
    for key, file_names in file_dict.items():
        if key in USED_FOLDER_EMAN2:
            local_imports[key] = []
            continue
        input_dir = os.path.join(NO_IMPORTS_DIR, key)
        for file_path in file_names:
            basename = os.path.basename(file_path)
            input_file_path = os.path.join(input_dir, basename)
            reset_lists()
            with open(input_file_path) as read:
                file_content = read.read()
            pyfl.check(file_content, basename, GLOBAL_REPORTER)
            local_imports[basename] = GLOBAL_CHECKER.undefined_names

    return local_imports
Пример #20
0
def main(files):
    '''Call run on all tables in tables_table.'''
    t1 = time.time()
    for fn in files:
        # Report the file name.
        assert g.os_path_exists(fn), fn
        sfn = g.shortFileName(fn)
        s = g.readFileIntoEncodedString(fn)
        if s and s.strip():
            r = reporter.Reporter(
                errorStream=sys.stderr,
                warningStream=sys.stderr,
            )
            api.check(s, sfn, r)
    t2 = time.time()
    n = len(files)
    print('%s file%s, time: %5.2f sec.' % (n, g.plural(n), t2 - t1))
Пример #21
0
def main(files):
    '''Call run on all tables in tables_table.'''    
    t1 = time.time()
    for fn in files:
        # Report the file name.
        assert g.os_path_exists(fn), fn
        sfn = g.shortFileName(fn)
        s = g.readFileIntoEncodedString(fn, silent=False)
        if s and s.strip():
            r = reporter.Reporter(
                errorStream=sys.stderr,
                warningStream=sys.stderr,
                )
            api.check(s, sfn, r)
    t2 = time.time()
    n = len(files)
    print('%s file%s, time: %5.2f sec.' % (n, g.plural(n), t2-t1))
Пример #22
0
 def check_script(self, p, script):
     from pyflakes import api, reporter
     r = reporter.Reporter(
         errorStream=self.LogStream(),
         warningStream=self.LogStream(),
     )
     errors = api.check(script, '', r)
     return errors == 0
Пример #23
0
 def check_and_get_results(filename, file_content=None):
     rprter = PyFlakeReporter()
     chk = check(file_content, filename, reporter=rprter)
     if not chk:
         return iter(())
     for result in rprter.errors:
         err_code, line, col, msg = result
         msg = get_show_msg(err_code, msg, line, col)
         yield (line, msg)
Пример #24
0
    def inspect_code(self, code):
        code = self._append_globals(code)
        rep = reporter.Reporter(self.flakes_stdout.reset(),
                                self.flakes_stderr.reset())
        api.check(code, filename="block", reporter=rep)

        # Match names
        p = r"'(.+?)'"

        out = rep._stdout()
        # Using set to avoid repeating same entry if same missing name is called multiple times
        undef_vars = set()
        for l in list(
                filter(lambda a: a != '\n' and 'undefined name' in a, out)):
            var_search = re.search(p, l)
            undef_vars.add(var_search.group(1))

        return set(undef_vars)
Пример #25
0
def check_source(nb):
    """
    Run pyflakes on a notebook, wil catch errors such as missing passed
    parameters that do not have default values
    """
    # concatenate all cell's source code in a single string
    source_code = '\n'.join([
        _comment_if_ipython_magic(c['source']) for c in nb.cells
        if c.cell_type == 'code'
    ])

    # this objects are needed to capture pyflakes output
    reporter = MyReporter()

    # run pyflakes.api.check on the source code
    pyflakes_api.check(source_code, filename='', reporter=reporter)

    reporter._check()
Пример #26
0
 def test_exception(self):
     codes = [code1, code2, code3]
     if sys.version_info < (2, 6):
         codes[0] = code0
     elif sys.version_info < (3,):
         codes.insert(0, code0)
     for code in codes:
         reporter = FlakesTestReporter()
         warnings = check(code, "(stdin)", reporter)
         self.assertFalse(reporter.messages)
         self.assertEqual(warnings, 0)
Пример #27
0
 def check_script(self, p, script):
     try:
         from pyflakes import api, reporter
     except Exception: # ModuleNotFoundError
         return True # Pretend all is fine.
     r = reporter.Reporter(
         errorStream=self.LogStream(),
         warningStream=self.LogStream(),
     )
     errors = api.check(script, '', r)
     return errors == 0
Пример #28
0
def revisar_sintaxis_codigo(codigo: str) -> (int, str, str):
    # pip install --upgrade pyflakes
    import io
    import pyflakes.api as api
    from pyflakes import reporter as modReporter

    warning_stream = io.StringIO()
    error_stream = io.StringIO()
    reporter = modReporter.Reporter(warning_stream, error_stream)
    cantidad = api.check(codigo, 'Envio usuario, línea', reporter)
    warnings = warning_stream.getvalue()
    errores = error_stream.getvalue()

    return (cantidad, warnings, errores)
Пример #29
0
 def _handle_diagnosis(self):
     reporter = PyflakesDiagnosticReport('')
     self.document_id = -1
     while 1:
         try:
             context = self._diagnosis_queue.get()
             params = context['params']
             if params['buffer_id'] <= self.document_id:
                 continue
             self.document_id = params['buffer_id']
             text_string = '\n'.join(params['buffer_content'])
             reporter.SetContent(text_string)
             pyflakes_api.check(text_string,
                                params['buffer_path'],
                                reporter=reporter)
             res = reporter.GetDiagnosis()
             rpc.DoCall('ECY#diagnostics#PlaceSign', [{
                 'engine_name': self.engine_name,
                 'res_list': res
             }])
             time.sleep(1)
         except Exception as e:
             logger.exception(e)
             break
Пример #30
0
 def check_script(self, p, script):
     """Call pyflakes to check the given script."""
     try:
         from pyflakes import api, reporter
     except Exception:  # ModuleNotFoundError
         return True  # Pretend all is fine.
     # #1306: nopyflakes
     lines = g.splitLines(p.b)
     for line in lines:
         if line.strip().startswith('@nopyflakes'):
             return True
     r = reporter.Reporter(
         errorStream=self.LogStream(),
         warningStream=self.LogStream(),
     )
     errors = api.check(script, '', r)
     return errors == 0
Пример #31
0
 def check_all(self, log_flag, paths, pyflakes_errors_only, roots=None):
     '''Run pyflakes on all files in paths.'''
     from pyflakes import api, reporter
     total_errors = 0
     # pylint: disable=cell-var-from-loop
     for fn_n, fn in enumerate(sorted(paths)):
         # Report the file name.
         sfn = g.shortFileName(fn)
         s = g.readFileIntoEncodedString(fn)
         if s.strip():
             if not pyflakes_errors_only:
                 g.es('Pyflakes: %s' % sfn)
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(fn_n, roots),
                 warningStream=self.LogStream(fn_n, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
Пример #32
0
 def check_all(self, roots):
     """Run pyflakes on all files in paths."""
     total_errors = 0
     for i, root in enumerate(roots):
         fn = self.finalize(root)
         sfn = g.shortFileName(fn)
         # #1306: nopyflakes
         if any(z.strip().startswith('@nopyflakes')
                for z in g.splitLines(root.b)):
             continue
         # Report the file name.
         s = g.readFileIntoEncodedString(fn)
         if s and s.strip():
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(i, roots),
                 warningStream=self.LogStream(i, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
Пример #33
0
def _flake_line(line_n):

    undefined = 'undefined'

    unused = 'unused'
    redefinition = 'redefinition'

    warnings = StringIO()

    errors = StringIO()

    rep = reporter.Reporter(warnings, errors)

    code = '\n'.join(vim.current.buffer[:])

    if api.check(code, 'f', rep):

        warnings.seek(0)

        errors = {
            undefined: [],
            unused: []
        }

        for line, error in [(int(x.split(':')[1]), x) for x in warnings.readlines()]:

            if undefined in error and line == line_n:

                module = error.split()[-1].strip("\n|'")

                errors[undefined].append(module)

            elif unused in error and redefinition not in error:

                module = error.split()[1].strip(" |'")

                errors[unused].append(module)

        return errors
Пример #34
0
 def check_all(self, log_flag, paths, pyflakes_errors_only, roots=None):
     '''Run pyflakes on all files in paths.'''
     try:
         from pyflakes import api, reporter
     except Exception: # ModuleNotFoundError
         return True # Pretend all is fine.
     total_errors = 0
     # pylint: disable=cell-var-from-loop
     for fn_n, fn in enumerate(sorted(paths)):
         # Report the file name.
         sfn = g.shortFileName(fn)
         s = g.readFileIntoEncodedString(fn)
         if s.strip():
             if not pyflakes_errors_only:
                 g.es('Pyflakes: %s' % sfn)
             # Send all output to the log pane.
             r = reporter.Reporter(
                 errorStream=self.LogStream(fn_n, roots),
                 warningStream=self.LogStream(fn_n, roots),
             )
             errors = api.check(s, sfn, r)
             total_errors += errors
     return total_errors
Пример #35
0
def _flake_line(line_n):

    undefined = 'undefined'

    unused = 'unused'
    redefinition = 'redefinition'

    warnings = StringIO()

    errors = StringIO()

    rep = reporter.Reporter(warnings, errors)

    code = '\n'.join(vim.current.buffer[:])

    if api.check(code, 'f', rep):

        warnings.seek(0)

        errors = {undefined: [], unused: []}

        for line, error in [(int(x.split(':')[1]), x)
                            for x in warnings.readlines()]:

            if undefined in error and line == line_n:

                module = error.split()[-1].strip("\n|'")

                errors[undefined].append(module)

            elif unused in error and redefinition not in error:

                module = error.split()[1].strip(" |'")

                errors[unused].append(module)

        return errors
Пример #36
0
def pylsp_lint(document):
    reporter = PyflakesDiagnosticReport(document.lines)
    pyflakes_api.check(document.source.encode('utf-8'),
                       document.path,
                       reporter=reporter)
    return reporter.diagnostics
Пример #37
0
 def run(self, doc_uri):
     document = self.workspace.get_document(doc_uri)
     reporter = PyflakesDiagnosticReport(document.lines)
     pyflakes_api.check(document.source, doc_uri, reporter=reporter)
     return reporter.diagnostics
Пример #38
0
 def test_import_exception_in_scope(self):
     reporter = FlakesTestReporter()
     warnings = check(code_import_exception, "(stdin)", reporter)
     self.assertFalse(reporter.messages)
     self.assertEqual(warnings, 0)
Пример #39
0
def set_source_text(text):
    global _reporter
    assert isinstance(_reporter, Reporter)
    _reporter.reset()
    check(text, "<stdin>", _reporter)
    _reporter.improve_errors_positions(text)
Пример #40
0
def set_source_text(text):
    global _reporter
    assert isinstance(_reporter, Reporter)
    _reporter.reset()
    check(text, "<stdin>", _reporter)
    _reporter.improve_errors_positions(text)
Пример #41
0
            else:
                find_multiline = '\n{0}'.format(string).join(
                    [entry.strip() for entry in lines[idx].split(';')])
                no_import_lines[
                    idx] = '{0}pass#IMPORTIMPORTIMPORT {1}\n'.format(
                        string, find_multiline)
        correct_imports = list(set(correct_imports))

        while True:
            stop = False
            need_intervention = False
            Checker.undefined_names = []
            Checker.unused_imports = []
            reporter.undefined_names = []
            file_content = ''.join(no_from_import_lines)
            pyfl.check(file_content, file_name, reporter)

            if not options.silent:
                with open(os.path.join('tmp', os.path.basename(file_name)),
                          'w') as write:
                    write.write(file_content)

            if not options.silent:
                file_content = ''.join(no_import_lines)
                with open(
                        os.path.join('no_import', os.path.basename(file_name)),
                        'w') as write:
                    write.write(file_content)

            if not reporter.undefined_names:
                stop = True