Пример #1
0
def get_total_loc(path: str, file_types: List, custom_paths: List):
    total_file_list: List = []
    total_file_list.extend(custom_paths)
    jupyter_list: List = []
    total_loc: int = 0
    project_summary = ProjectSummary()
    for file_type in file_types:
        if file_type != ".ipynb":
            total_file_list.extend(locate_files(path, file_type))
        else:
            jupyter_list.extend(locate_files(path, file_type))

    for source_path in total_file_list:
        source_analysis = SourceAnalysis.from_file(
            source_path, "pygount", encoding="utf-8"
        )
        project_summary.add(source_analysis)

    language_stat_dict: Dict = {}

    for language_summary in project_summary.language_to_language_summary_map.values():
        language_stat_dict[language_summary.language] = (
            language_summary.code_count - language_summary.empty_count
        )

    language_stat_dict["Python"] += count_lines_jupyter(jupyter_list)

    del language_stat_dict["__empty__"]

    return language_stat_dict
Пример #2
0
    def button_analyse_code(self):
        IrModuleAuthor = self.env["ir.module.author"]
        IrModuleTypeRule = self.env["ir.module.type.rule"]
        rules = IrModuleTypeRule.search([])

        cfg = self.env["ir.config_parameter"]
        val = cfg.get_param("module_analysis.exclude_directories", "")
        exclude_directories = [x.strip() for x in val.split(",") if x.strip()]
        val = cfg.get_param("module_analysis.exclude_files", "")
        exclude_files = [x.strip() for x in val.split(",") if x.strip()]

        for module in self:
            _logger.info("Analysing Code for module %s ..." % (module.name))

            # Update Authors, based on manifest key
            authors = []
            if module.author and module.author[0] == "[":
                author_txt_list = safe_eval(module.author)
            else:
                author_txt_list = (module.author
                                   and module.author.split(",")) or []

            author_txt_list = [x.strip() for x in author_txt_list]
            author_txt_list = [x for x in author_txt_list if x]
            for author_txt in author_txt_list:
                authors.append(IrModuleAuthor._get_or_create(author_txt))

            author_ids = [x.id for x in authors]
            module.author_ids = author_ids

            # Update Module Type, based on rules
            module_type_id = rules._get_module_type_id_from_module(module)
            module.module_type_id = module_type_id

            # Get Path of module folder and parse the code
            module_path = get_module_path(module.name)

            # Get Files
            analysed_datas = self._get_analyse_data_dict()
            file_extensions = analysed_datas.keys()
            file_list = self._get_files_to_analyse(module_path,
                                                   file_extensions,
                                                   exclude_directories,
                                                   exclude_files)

            for file_path, file_ext in file_list:
                file_res = SourceAnalysis.from_file(
                    file_path,
                    "",
                    encoding=self._get_module_encoding(file_ext))
                for k, v in analysed_datas.get(file_ext).items():
                    v["value"] += getattr(file_res, k)

            # Update the module with the datas
            values = {}
            for analyses in analysed_datas.values():
                for v in analyses.values():
                    values[v["field"]] = v["value"]
            module.write(values)
Пример #3
0
 def count(self):
     for subdir, _, files in os.walk(ROOT_DIR / "solaris"):
         for file in (f for f in files if f.endswith(".py")):
             analysis = SourceAnalysis.from_file(f"{subdir}/{file}",
                                                 "pygount",
                                                 encoding="utf=8")
             self.code += analysis.code_count
             self.docs += analysis.documentation_count
             self.empty += analysis.empty_count
Пример #4
0
def length_code(solver_name):
    solvers_dir = "hackathonbaobab2020/solver/"
    project_summary = ProjectSummary()
    source_paths = glob(solvers_dir + solvers_to_pattern[solver_name])
    for source_path in source_paths:
        source_analysis = SourceAnalysis.from_file(source_path, "pygount")
        project_summary.add(source_analysis)
    lines_of_code = project_summary.language_to_language_summary_map[
        "Python"].code_count
    return lines_of_code
 def _analysis_file(self, path):
     file_res = SourceAnalysis.from_file(path, "")
     return {
         "path": file_res._path,
         "language": file_res._language,
         "code": file_res._code,
         "documentation": file_res._documentation,
         "empty": file_res._empty,
         "string": file_res._string,
     }
Пример #6
0
    def calculate_loc(self) -> None:
        """
        Calculates the number of lines of code that are used by TNG.
        """
        project_summary = ProjectSummary()
        for source_path in (glob("*.py") + glob("modules/*.py") + glob("utils/*.py")):
            source_analysis = SourceAnalysis.from_file(source_path, "pygount", encoding="utf-8", fallback_encoding="cp850")
            project_summary.add(source_analysis)

        self.lines_of_code = 0
        for language_summary in project_summary.language_to_language_summary_map.values():
            self.lines_of_code += language_summary.code_count
Пример #7
0
    def calculate_loc(self) -> None:
        """
        Calculates the number of lines of code that the bot uses.
        """
        project_summary = ProjectSummary()
        for source_path in glob("**/*.py", recursive=True) + glob("**/*.html", recursive=True):
            source_analysis = SourceAnalysis.from_file(source_path, "pygount", encoding="utf-8", fallback_encoding="cp850")
            project_summary.add(source_analysis)

        self.lines_of_code = 0
        for language_summary in project_summary.language_to_language_summary_map.values():
            self.lines_of_code += language_summary.code_count
Пример #8
0
def analyse_file(source_path: Path):
    if not source_path.is_file():
        return

    source_analysis = SourceAnalysis.from_file(source_path, scan_path)

    if not source_analysis.state == analysis.SourceState.analyzed:
        return

    i = lizard.analyze_file(str(source_path))

    result = AnalysedFile(source_analysis)
    result.add_complexity(i.average_CCN, i.CCN)

    return result
Пример #9
0
 def count_loc(self, sources):
     if not sources:
         # Old version of coverage
         sources = [""]
     for source in sources:
         filename = os.path.join(source or "", self.filename)
         try:
             analysis = SourceAnalysis.from_file(
                 filename, group="clover", fallback_encoding="utf-8"
             )
             self.loc += analysis.code + analysis.documentation
         except IOError:
             # Try next file
             continue
         else:
             # The first file was founded (optimistic loop)
             break
Пример #10
0
    def testOpenCoverage(self):
        self.c.open(self.filename)
        cdata = deepcopy(self.c.__dict__)
        packages = cdata.pop("packages")
        package = deepcopy(list(packages.values())[0].__dict__)

        clover_analysis = SourceAnalysis.from_file(clover_file,
                                                   group="clover",
                                                   fallback_encoding="utf-8")
        clover_loc = clover_analysis.code + clover_analysis.documentation

        clover_bin_analysis = SourceAnalysis.from_file(
            clover_bin_file, group="clover", fallback_encoding="utf-8")
        bin_loc = clover_bin_analysis.code + clover_bin_analysis.documentation

        loc = clover_loc + bin_loc

        cversion = coverage.__version__

        # Initial values for coverage==6.2
        statements = ncloc = 166
        covered_conditions = 37
        covered_statements = 151
        conditions = 46

        expected = {
            "classes": 0,
            "conditions": conditions,
            "covered_conditions": covered_conditions,
            "covered_statements": covered_statements,
            "files": 2,
            "loc": loc,
            "ncloc": ncloc,
            "statements": statements,
            "version": cversion,
        }
        cdata.pop("timestamp")
        self.assertDictEqual(cdata, expected)

        expected = {
            "loc": loc,
            "statements": statements,
            "name": "",
            "ncloc": ncloc,
            "covered_conditions": covered_conditions,
            "conditions": conditions,
            "covered_statements": covered_statements,
        }

        classes = package.pop("classes")

        self.assertDictEqual(package, expected)

        cname = "clover/__init__" if cversion < "4.0" else "__init__.py"
        clover = deepcopy(classes[cname].__dict__)

        statements = ncloc = 149
        conditions = 42
        covered_conditions = 36
        covered_statements = 143

        expected = {
            "loc": clover_loc,
            "statements": statements,
            "name": cname,
            "filename": "clover/__init__.py",
            "ncloc": ncloc,
            "covered_conditions": covered_conditions,
            "conditions": conditions,
            "covered_statements": covered_statements,
        }

        self.assertDictEqual(clover, expected)
Пример #11
0
    def create_file(self, filename, branch):
        pkey = str(Path(self.repo.base_directory) / Path(filename))
        key = pkey + '@' + branch.name
        if key in self.file_cache:
            file = self.file_cache[key]
            return self.file_cache[key], file.id is None

        p = Path(filename)
        parent = p.parent
        name = p.name
        if parent == name:
            parent = ''
        file_path = self.get_or_create_filepath(branch, parent)
        try:
            path = Path(pkey)
            exists = path.is_file()

            if not exists:
                file = self.create_file_object(filename, branch, file_path,
                                               name, False)
            else:
                try:
                    analysis = SourceAnalysis.from_file(pkey, self.repo.name)
                    empty = analysis.state == SourceState.empty.name
                    binary = analysis.state == SourceState.binary.name
                    indent_complexity = calculate_complexity_in(
                        pkey) if not empty and not binary else 0
                    is_code = (not binary) and (
                        not empty) and language_is_code(analysis.language)
                    lines = 0
                    if not binary:
                        encoding = detect_encoding(path)
                        with open(path,
                                  "r",
                                  newline='',
                                  encoding=encoding,
                                  errors='ignore') as fd:
                            lines = sum(1 for _ in fd)
                    file = File(filename=filename,
                                repository=self.repo,
                                branch=branch,
                                path=file_path,
                                name=name,
                                language=analysis.language,
                                code=analysis.code,
                                doc=analysis.documentation,
                                blanks=analysis.empty,
                                empty=empty,
                                strings=analysis.string,
                                binary=binary,
                                exists=True,
                                is_code=is_code,
                                indent_complexity=indent_complexity,
                                lines=lines)
                except Exception as e:
                    file = self.create_file_object(filename, branch, file_path,
                                                   name, True)
                    logger.info('error on {}'.format(pkey))
                    tb = traceback.format_exc(e)
                    logger.info(tb)

            self.file_cache[key] = file
        except Exception as e:
            tb = traceback.format_exc(e)
            logger.info(tb)
            file = self.create_file_object(filename, branch, file_path, name,
                                           False)
            self.file_cache[key] = file
        return self.file_cache[key], True