示例#1
0
    def pyflakes_check(self, report, filename, content):
        """
        Run pyflakes on given file with given content. Add pyflakes reports to
        report.
        """

        # Just exit silently if pyflakes is not available
        try:
            from pyflakes import api, reporter
        except ImportError:
            return

        lines_map = [(None, None)]
        current = True
        for line in content.splitlines():
            if line.strip() == b"# pyflakes off":
                current = False
            elif line.strip() == b"# pyflakes on":
                current = True
            lines_map.append((current, line))

        class CustomReporter(reporter.Reporter):
            def syntaxError(self, _, msg, lineno, offset, text):
                pass

            def unexpectedError(self, filename, msg):
                pass

            def flake(self, msg):
                if lines_map[msg.lineno][0]:
                    report.add(msg.message % msg.message_args, filename,
                               msg.lineno, 0)

        api.checkPath(filename,
                      reporter=CustomReporter(sys.stdout, sys.stderr))
示例#2
0
    def check(self, filepaths: List[str]) -> List[Message]:
        reporter = PyFlakes.CollectingReporter()
        for filepath in filepaths:
            checkPath(filepath, reporter=reporter)

        msgs = [self._error_to_msg(e) for e in reporter.errors]
        msgs.extend([self._flake_to_msg(f) for f in reporter.flakes])
        return msgs
示例#3
0
    def run(self):
        reporter = ProspectorReporter(ignore=self.ignore_codes)

        for filepath in iterSourceCode(self._paths):
            if any([ip.search(filepath) for ip in self._ignores]):
                continue

            checkPath(filepath, reporter)

        return reporter.get_messages()
示例#4
0
    def run(self):
        reporter = ProspectorReporter(ignore=self.ignore_codes)

        for filepath in iterSourceCode(self._paths):
            relpath = os.path.relpath(filepath, self._rootpath)
            if any([ip.search(relpath) for ip in self._ignores]):
                continue

            checkPath(filepath, reporter)

        return reporter.get_messages()
示例#5
0
def _get_error_density(_file_path: str) -> Union[float, bool]:
    """ Get standard compliance for a single file. """

    _output_handler = StringIO()
    _error_handler = StringIO()
    _pyflake_reporter = Reporter(_output_handler, _error_handler)


    checkPath(_file_path, _pyflake_reporter)

    _error_handler.close()
    _error_handler = None

    _error_string = _output_handler.getvalue()
    # We will just have an empty string 
    # if pyflakes outputs nothing i.e.
    # if we have no errors.
    if _error_string:
        # We use strip to not get empty strings
        # at the start or end
        # when we split the lines on \n.
        _error_lines = _error_string.strip().split('\n')
        _errors = len(_error_lines)
    else:
        return float(0)

    # Clean up space
    _output_handler.close()
    _output_handler = None
    _error_lines = None
    _pyflake_reporter = None


    if _errors is None:
        return None

    _file = open(_file_path, 'r', encoding='utf-8', errors='ignore')

    _line_number = 0
    for _line in _file.readlines():
        if re.match(r'^\s*$', _line):
            continue
        _line_number = _line_number + 1

    _file.close()

    if _line_number == 0:
        return False

    _error_density = float(_errors / _line_number)
    # We have calculated the percentage of non-compliant lines,
    # we want the percentage of the compliant lines.
    return _error_density
示例#6
0
 def handle(self, *args, **options):
     root = pathlib.Path(settings.BASE_DIR)
     g_args = ["--exclude-standard", "-X", root.joinpath(".lintignore")]
     if not options["all"]:
         g_args.append("-m")
     files_str = git.Git(settings.BASE_DIR).ls_files(g_args)
     del_files_str = git.Git(settings.BASE_DIR).ls_files("-d", "-k")
     files = set(
         pathlib.Path(p)
         for p in files_str.splitlines() if p[-3:] == ".py") - set(
             pathlib.Path(p) for p in del_files_str.splitlines())
     num_lint_errors = 0
     num_black_formats = 0
     for p in files:
         if not options["no_fix"]:
             changed = black.format_file_in_place(p, False,
                                                  black.FileMode(),
                                                  black.WriteBack.YES)
             if changed:
                 num_black_formats += 1
                 self.stdout.write("%s: Black formatted file" % str(p))
         if not options["no_lint"]:
             num_lint_errors += pyflakes.checkPath(str(p))
     if num_black_formats:
         raise CommandError("%i file(s) formatted by Black" %
                            num_black_formats)
     if num_lint_errors:
         raise CommandError("%i liniting error(s) found" % num_lint_errors)
示例#7
0
文件: tests.py 项目: ojii/gettext.js
 def test_pyflakes(self):
     files = [
         GETTEXT_PY_FILE,
     ]
     out = io.StringIO()
     reporter = Reporter(out, out)
     errors = sum(map(lambda f: api.checkPath(f, reporter), files))
     self.assertEqual(errors, 0, '\n' + out.getvalue())
示例#8
0
def test_pyflakes():
    '''Running pyflakes checks recursively in %s''' % tweedr.root
    from pyflakes import api as pyflakes
    total_errors = 0
    for filepath in walk(tweedr.root, not_egg, not_git, is_python):
        total_errors += pyflakes.checkPath(filepath)

    assert total_errors == 0, 'Codebase does not pass pyflakes (%d errors)' % total_errors
示例#9
0
def test_pyflakes():
    '''Running pyflakes checks recursively in %s''' % tweedr.root
    from pyflakes import api as pyflakes
    total_errors = 0
    for filepath in walk(tweedr.root, not_egg, not_git, is_python):
        total_errors += pyflakes.checkPath(filepath)

    assert total_errors == 0, 'Codebase does not pass pyflakes (%d errors)' % total_errors
示例#10
0
def _check_recursive(paths, reporter):
    """
    The builtin recursive checker tries to check .pyc files.
    """
    num_warnings = 0
    for path in api.iterSourceCode(paths):
        if path.endswith('.py'):
            num_warnings += api.checkPath(path, reporter)
    return num_warnings
示例#11
0
 def test_pyflakes(self):
     files = [
         GETTEXT_PY_FILE,
         THIS_FILE,
     ]
     out = io.StringIO()
     reporter = Reporter(out, out)
     errors = sum(map(lambda f: api.checkPath(f, reporter), files))
     self.assertEqual(errors, 0, '\n' + out.getvalue())
示例#12
0
def _check_recursive(paths, reporter):
    """
    The builtin recursive checker tries to check .pyc files.
    """
    num_warnings = 0
    for path in api.iterSourceCode(paths):
        if path.endswith('.py'):
            num_warnings += api.checkPath(path, reporter)
    return num_warnings
示例#13
0
    def getErrors(self, path):
        """
        Get any warnings or errors reported by pyflakes for the file at C{path}.

        @param path: The path to a Python file on disk that pyflakes will check.
        @return: C{(count, log)}, where C{count} is the number of warnings or
            errors generated, and log is a list of those warnings, presented
            as structured data.  See L{LoggingReporter} for more details.
        """
        log = []
        reporter = LoggingReporter(log)
        count = checkPath(path, reporter)
        return count, log
示例#14
0
    def getErrors(self, path):
        """
        Get any warnings or errors reported by pyflakes for the file at C{path}.

        @param path: The path to a Python file on disk that pyflakes will check.
        @return: C{(count, log)}, where C{count} is the number of warnings or
            errors generated, and log is a list of those warnings, presented
            as structured data.  See L{LoggingReporter} for more details.
        """
        log = []
        reporter = LoggingReporter(log)
        count = checkPath(path, reporter)
        return count, log
def check(data):
    # data is {'tag_already_exists': False, 'version': ..., 'workingdir': ..., 'name': ...},

    python_sources = find('*.py', data['workingdir'])
    if not python_sources:
        return

    reporter = Reporter(StringIO(), StringIO())
    
    for source in python_sources:
        checkPath(source, reporter)

    output = reporter._stdout.getvalue()

    if not output or not zest.releaser.utils.ask('You have Pyflakes warning. Do you want to see them?'):
        return

    print '\n'
    print output
    
    if not zest.releaser.utils.ask('Do you want to continue anyway?', default=False):
        print "Fix your warnings and retry"
        sys.exit(0)
示例#16
0
def pyflakes(ticket, **kwds):
    """
    run pyflakes on the modified .py files

    we do not check the files names "all.py" and "__init__.py" that
    usually just contain unused import lines, always triggering non-pertinent
    pyflakes warnings

    same thing for files named "*catalog*.py"
    """
    changed_files = list(
        subprocess.Popen([
            'git', 'diff', '--name-only',
            'patchbot/base..patchbot/ticket_merged'
        ],
                         stdout=subprocess.PIPE).stdout)
    changed_files = [f.decode('utf8').strip("\n") for f in changed_files]

    errors = 0
    msg_list = []
    msg_here = '{} pyflakes errors in file {}'
    for a_file in changed_files:
        if os.path.exists(a_file) and isPythonFile(a_file):
            filename = os.path.split(a_file)[1]
            if not (filename == "all.py" or filename == "__init__.py"
                    or "catalog" in filename):
                error_stream = io.StringIO()
                report = Reporter(error_stream, sys.stderr)
                errors_here = checkPath(a_file, report)  # run pyflakes
                if errors_here:
                    # here we try to care for lazy imported names
                    lazys = list(find_lazy_imports(a_file))  # ok
                    if lazys:
                        for err in error_stream.getvalue().splitlines():
                            if any(x in err.split(' ')[-1] for x in lazys):
                                errors_here -= 1
                            else:
                                print(err)
                    else:
                        print(error_stream.getvalue())
                error_stream.close()
                if errors_here:
                    errors += errors_here
                    msg_list.append(msg_here.format(errors_here, a_file))

    full_msg = "found {} pyflakes errors in the modified files"
    full_msg = full_msg.format(errors)
    print(full_msg)
    if errors:
        raise ValueError(full_msg)
示例#17
0
    def run_linter(self, *files):
        """
        for each file, run pyflakes and capture the output
        flag if files have too many errors relative to configured
        threshold

        """
        capture_stdout = StringIO()
        reporter.Reporter(capture_stdout, sys.stderr)
        for file in files:
            LOGGER.info("Pyflakes: {}".format(file))
            result = checkPath(file)
            if result:
                LOGGER.warning("Found {} flakes".format(result))
            if result > self.errors_per_file:
                self.report_error(file, capture_stdout.getvalue())
示例#18
0
def pyflakes(ticket, **kwds):
    """
    run pyflakes on the modified .py files

    we do not check the files names "all.py" and "__init__.py" that
    usually just contain unused import lines, always triggering non-pertinent
    pyflakes warnings

    same thing for files named "*catalog*.py"
    """
    changed_files = list(subprocess.Popen(['git', 'diff', '--name-only', 'patchbot/base..patchbot/ticket_merged'], stdout=subprocess.PIPE).stdout)
    changed_files = [f.decode('utf8').strip("\n") for f in changed_files]

    errors = 0
    msg_list = []
    msg_here = '{} pyflakes errors in file {}'
    for a_file in changed_files:
        if os.path.exists(a_file) and isPythonFile(a_file):
            filename = os.path.split(a_file)[1]
            if not (filename == "all.py" or filename == "__init__.py" or
                    "catalog" in filename):
                error_stream = io.StringIO()
                report = Reporter(error_stream, sys.stderr)
                errors_here = checkPath(a_file, report)  # run pyflakes
                if errors_here:
                    # here we try to care for lazy imported names
                    lazys = list(find_lazy_imports(a_file))  # ok
                    if lazys:
                        for err in error_stream.getvalue().splitlines():
                            if any(x in err.split(' ')[-1] for x in lazys):
                                errors_here -= 1
                            else:
                                print(err)
                    else:
                        print(error_stream.getvalue())
                error_stream.close()
                if errors_here:
                    errors += errors_here
                    msg_list.append(msg_here.format(errors_here, a_file))

    full_msg = "found {} pyflakes errors in the modified files"
    full_msg = full_msg.format(errors)
    print(full_msg)
    if errors:
        raise ValueError(full_msg)
示例#19
0
文件: main.py 项目: ament/ament_lint
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Check code using pyflakes.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument(
        'paths',
        nargs='*',
        default=[os.curdir],
        help='The files or directories to check. For directories files ending '
             "in '.py' will be considered.")
    parser.add_argument(
        '--exclude',
        metavar='filename',
        nargs='*',
        dest='excludes',
        help='The filenames to exclude.')
    # not using a file handle directly
    # in order to prevent leaving an empty file when something fails early
    parser.add_argument(
        '--xunit-file',
        help='Generate a xunit compliant XML file')
    args = parser.parse_args(argv)

    if args.xunit_file:
        start_time = time.time()

    filenames = get_files(args.paths)
    if args.excludes:
        filenames = [f for f in filenames if os.path.basename(f) not in args.excludes]
    if not filenames:
        print('No files found', file=sys.stderr)
        return 1

    report = []

    # invoke pyflakes for each file
    for filename in filenames:
        reporter = CustomReporter()
        print(filename)
        checkPath(filename, reporter=reporter)
        for error in reporter.errors:
            try:
                print(error, file=sys.stderr)
            except TypeError:
                # this can happen if the line contains percent characters
                print(error.__dict__, file=sys.stderr)
        report.append((filename, reporter.errors))
        print('')

    # output summary
    error_count = sum(len(r[1]) for r in report)
    if not error_count:
        print('No problems found')
        rc = 0
    else:
        print('%d errors' % error_count, file=sys.stderr)
        rc = 1

    # generate xunit file
    if args.xunit_file:
        folder_name = os.path.basename(os.path.dirname(args.xunit_file))
        file_name = os.path.basename(args.xunit_file)
        suffix = '.xml'
        if file_name.endswith(suffix):
            file_name = file_name[0:-len(suffix)]
            suffix = '.xunit'
            if file_name.endswith(suffix):
                file_name = file_name[0:-len(suffix)]
        testname = '%s.%s' % (folder_name, file_name)

        xml = get_xunit_content(report, testname, time.time() - start_time)
        path = os.path.dirname(os.path.abspath(args.xunit_file))
        if not os.path.exists(path):
            os.makedirs(path)
        with open(args.xunit_file, 'w') as f:
            f.write(xml)

    return rc
示例#20
0
文件: pyflakes.py 项目: saimn/doit-py
def check_path(filename):
    """a doit action - execute pyflakes in a single file.
    :return bool: check succeded
    """
    return not bool(checkPath(filename))
示例#21
0
"""
示例#22
0
def pyflakes_check(filename):
    warning_log, err_log = Logger(), Logger()
    checkPath(filename, Reporter(warning_log, err_log))
    return err_log.log, warning_log.log
示例#23
0
def run_by_single(index, base_url, path):
    """
    加载单个case用例信息
    :param index: int or str:用例索引
    :param base_url: str:环境地址
    :return: dict
    """
    testcase_list = {
        'config': {
            'name': '',
            'verify': "false",
            'variables': {},
            'base_url': base_url
        }
    }
    testcase_list['teststeps'] = []

    try:
        obj = TestCaseInfo.objects.get(id=index)
    except ObjectDoesNotExist:
        return testcase_list

    include = eval(obj.include)
    request = eval(obj.request)
    name = obj.name
    project = obj.belong_project
    module = obj.belong_module.module_name
    #替换当前用例的端口
    if 'base_url' in request['request'] and request['request']['base_url']:
        testcase_list['config']['base_url'] = request['request']['base_url']
    testcase_list['config']['name'] = name
    testcase_dir_path = os.path.join(path, project)
    #加载全局变量
    if request['request']['url'] == '' and "variables" in request.keys():
        testcase_list['config']['variables'] = request['variables']
    if not os.path.exists(testcase_dir_path):
        os.makedirs(testcase_dir_path)
        try:
            debugtalk = DebugTalk.objects.get(
                belong_project__project_name=project).debugtalk
        except ObjectDoesNotExist:
            debugtalk = ''
        dump_python_file(os.path.join(testcase_dir_path, 'debugtalk.py'),
                         debugtalk)

    #检查debugtakl.py 代码是否有异常,如果有抛出异常
    debugtalk_dir = os.path.join(testcase_dir_path, 'debugtalk.py')
    check_result = checkPath(debugtalk_dir)
    logger.info("check_result的值为:{}".format(check_result))
    if isinstance(check_result, int) and check_result == 1:
        shutil.rmtree(testcase_dir_path)
        raise AnalysisError("python文件解析异常,请检查debugtalk.py文件信息")
    else:
        msg = check_result[0]
        if msg > 0:
            errors = []
            for err in check_result[1]:
                errors.append(str(err.split(separator)[-1]) + "\n")
            shutil.rmtree(testcase_dir_path)
            raise SyntaxError(errors)

    testcase_dir_path = os.path.join(testcase_dir_path, module)
    if not os.path.exists(testcase_dir_path):
        os.mkdir(testcase_dir_path)

    for test_info in include:
        try:
            if isinstance(test_info, dict):
                config_id = test_info.pop('config')[0]
                config_request = eval(
                    TestCaseInfo.objects.get(id=config_id).request)['config']
                testcase_list['teststeps'].append(
                    modify_validate(config_request))
            else:
                id = test_info[0]
                pre_request = eval(TestCaseInfo.objects.get(id=id).request)
                testcase_list['teststeps'].append(modify_validate(pre_request))

        except ObjectDoesNotExist:
            return testcase_list

    if request['request']['url'] != '':
        testcase_list['teststeps'].append(modify_validate(request))
    testcase_dir_path = os.path.join(testcase_dir_path, name + '.yml')
    dump_yaml_file(testcase_dir_path, testcase_list)
示例#24
0
    def run(self, found_files):
        reporter = ProspectorReporter(ignore=self.ignore_codes)
        for filepath in found_files.iter_module_paths():
            checkPath(filepath, reporter)

        return reporter.get_messages()
示例#25
0
文件: main.py 项目: InigoMonreal/rcc
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Check code using pyflakes.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument(
        'paths',
        nargs='*',
        default=[os.curdir],
        help='The files or directories to check. For directories files ending '
        "in '.py' will be considered.")
    parser.add_argument('--exclude',
                        metavar='filename',
                        nargs='*',
                        dest='excludes',
                        help='The filenames to exclude.')
    # not using a file handle directly
    # in order to prevent leaving an empty file when something fails early
    parser.add_argument('--xunit-file',
                        help='Generate a xunit compliant XML file')
    args = parser.parse_args(argv)

    if args.xunit_file:
        start_time = time.time()

    filenames = get_files(args.paths)
    if args.excludes:
        filenames = [
            f for f in filenames if os.path.basename(f) not in args.excludes
        ]
    if not filenames:
        print('No files found', file=sys.stderr)
        return 1

    report = []

    # invoke pyflakes for each file
    for filename in filenames:
        reporter = CustomReporter()
        print(filename)
        checkPath(filename, reporter=reporter)
        for error in reporter.errors:
            try:
                print(error, file=sys.stderr)
            except TypeError:
                # this can happen if the line contains percent characters
                print(error.__dict__, file=sys.stderr)
        report.append((filename, reporter.errors))
        print('')

    # output summary
    error_count = sum(len(r[1]) for r in report)
    if not error_count:
        print('No problems found')
        rc = 0
    else:
        print('%d errors' % error_count, file=sys.stderr)
        rc = 1

    # generate xunit file
    if args.xunit_file:
        folder_name = os.path.basename(os.path.dirname(args.xunit_file))
        file_name = os.path.basename(args.xunit_file)
        suffix = '.xml'
        if file_name.endswith(suffix):
            file_name = file_name[0:-len(suffix)]
            suffix = '.xunit'
            if file_name.endswith(suffix):
                file_name = file_name[0:-len(suffix)]
        testname = '%s.%s' % (folder_name, file_name)

        xml = get_xunit_content(report, testname, time.time() - start_time)
        path = os.path.dirname(os.path.abspath(args.xunit_file))
        if not os.path.exists(path):
            os.makedirs(path)
        with open(args.xunit_file, 'w') as f:
            f.write(xml)

    return rc
示例#26
0
    def run(self, found_files):
        reporter = ProspectorReporter(ignore=self.ignore_codes)
        for filepath in found_files.iter_module_paths():
            checkPath(filepath, reporter)

        return reporter.get_messages()
示例#27
0
def check_path(filename):
    """a doit action - execute pyflakes in a single file.
    :return bool: check succeded
    """
    return not bool(checkPath(filename))