def analyse_repository(self, repository_name, repository_url, user_name): print('started') celerysocketio = SocketIO(message_queue='redis://localhost:6379/') self.update_state(state='RUNNING', meta={'current': 0, 'total': 100}) # os.system('git clone ' + repository_url + base_dir + repository_name + '/') repository_id = create_repository(repository_name, self.request.id, user_name) print(self.request.id) files = lizard.analyze( paths=[base_dir + repository_name], exclude_pattern=['*/node_modules/*', '*/build/*', '*/build-api/*'], exts=lizard.get_extensions([])) files_list = list(files) for idx, repository_file in enumerate(files_list): celerysocketio.emit( 'update', { 'state': 'RUNNING', 'complete': ((idx if idx != 0 else idx) / len(files_list)) * 100, 'repositoryId': repository_id, }, room=self.request.id) self.update_state(state='RUNNING', meta={ 'current': idx, 'total': len(files_list) }) file_id = create_file(repository_name, repository_file, repository_id) create_functions(repository_file, file_id) # os.system('rm -rf /home/sam/' + repository_name) create_aggregate_tables(repository_id) celerysocketio.emit('update', { 'state': 'SUCCESS', 'complete': 100, 'repositoryId': repository_id }, room=self.request.id) update_repository_status(repository_name, user_name)
def scan(self, package: Package, level: str) -> Optional[List[Issue]]: """Run tool and gather output.""" if not package.path: return [] # The following is a modification of lizard.py's main(). raw_user_flags = ([lizard.__file__] + [package.path] + self.get_user_flags(level) ) # Leading lizard file name is required. # Make sure we log warnings. if "-w" not in raw_user_flags: raw_user_flags += ["-w"] # Make sure unsupported arguments are not included. user_flags = self.remove_invalid_flags(raw_user_flags) options = lizard.parse_args(user_flags) printer = options.printer or lizard.print_result schema = lizard.OutputScheme(options.extensions) schema.patch_for_extensions() result = lizard.analyze( options.paths, options.exclude, options.working_threads, options.extensions, options.languages, ) lizard_output = io.StringIO() with redirect_stdout(lizard_output): printer(result, options, schema, lizard.AllResult) output = lizard_output.getvalue() lizard.print_extension_results(options.extensions) logging.debug("%s", output) if self.plugin_context and self.plugin_context.args.output_directory: with open(self.get_name() + ".log", "w", encoding="utf8") as fid: fid.write(output) issues: List[Issue] = self.parse_output(output) return issues
def __analyze_repository(self, repository_path, files_affected, details): """Add code complexity information for a given repository using Lizard and CLOC. Current information includes cyclomatic complexity (ccn), lines of code, number of functions, tokens, blanks and comments. :param repository_path: repository path :param details: if True, it returns fine-grained results :returns result: list of the results of the analysis """ analysis_result = [] repository_analysis = lizard.analyze( paths=[repository_path], threads=1, exts=lizard.get_extensions([]), ) cloc = Cloc() for analysis in repository_analysis: cloc_analysis = cloc.analyze(file_path=analysis.filename) file_path = analysis.filename.replace(repository_path + "/", '') in_commit = True if file_path in files_affected else False result = { 'loc': analysis.nloc, 'ccn': analysis.CCN, 'tokens': analysis.token_count, 'num_funs': len(analysis.function_list), 'file_path': file_path, 'in_commit': in_commit, 'blanks': cloc_analysis['blanks'], 'comments': cloc_analysis['comments'] } analysis_result.append(result) # TODO: implement details option return analysis_result
def _calculate_metrics(self): paths = [self.path] exc_patterns = ["*/test/*"] ext = None lans = ["java"] if self._nloc is None: self._nloc = 0 self._method_count = 0 self._complexity = 0 self._token_count = 0 self._ND = 0 analyser = lizard.analyze(paths, exc_patterns, 1, ext, lans) for f in analyser: self._file_list.append(File(f)) for fun in f.function_list: self._func_list.append(Function(fun)) self._nloc += f.nloc self._method_count += len(f.function_list) self._complexity += f.CCN self._ND += f.ND self._token_count += f.token_count return
def _calculate_metrics(self): paths = [self.source_path] exc_patterns = None ext = None lans = ["java", "python", "cpp"] self.timestamp = str(datetime.datetime.now().timestamp()) if self._nloc is None: self._nloc = 0 self._method_count = 0 self._complexity = 0 self._token_count = 0 self._ND = 0 analyser = lizard.analyze(paths, exc_patterns, 1, ext, lans) for file in analyser: self._file_list.append(LizardFile(file)) for fun in file.function_list: self._func_list.append(LizardFunction(fun)) self._nloc += file.nloc self._method_count += len(file.function_list) self._complexity += file.CCN # self._ND += file.ND self._token_count += file.token_count return
def main(path, function): """The main function for this script""" options = argparse.Namespace() options.paths = path options.extensions = lizard.get_extensions(["cpre"], False) options.warnings_only = False options.whitelist = "whitelizard.txt" options.verbose = True options.sorting = [] # default maximum cyclomatic complexity options.CCN = 30 # default maximum function length options.length = 300 # default maximum number of arguments options.arguments = 8 options.number = 0 options.working_threads = 4 analysis = lizard.analyze(path, threads=options.working_threads, extensions=options.extensions) print(process_code_info(analysis, options.extensions))
def analyse_repo(repo: git.Repo, analysis_settings: 'lizard_mon.config.AnalysisSettings', verbosity: int) -> 'TargetResultCache': result = TargetResultCache(AnalysisResult(), {}) analysis_dir = os.path.relpath(repo.working_tree_dir) def patch_relative_exclude_patterns(pattern): if pattern.startswith("./") or pattern.startswith(".\\"): patched = os.path.join(analysis_dir, pattern[2:]) else: patched = pattern patched = patched.replace("\\", "/") return patched exclusion_patterns = [ patch_relative_exclude_patterns(pattern) for pattern in analysis_settings.exclusion_patterns ] for pattern in exclusion_patterns: print(" excluding:", pattern) analysis = lizard.analyze( paths=[analysis_dir], exclude_pattern=exclusion_patterns, threads=os.cpu_count(), exts=lizard.get_extensions([]), lans=analysis_settings.languages, ) file_analysis = typing.cast(typing.Iterator[lizard.FileInformation], analysis) thresholds = analysis_settings.limits for analysed_file in file_analysis: if verbosity > 0: print(f" - file: {analysed_file.filename} (NLOC={analysed_file.nloc})") violations_in_this_file = 0 for fn in analysed_file.function_list: values = lizard_mon.config.AnalysisLimits( fn.cyclomatic_complexity, fn.nloc, len(fn.parameters), ) if not values.exceeds(thresholds): continue violations = lizard_mon.config.list_limit_violations(values, thresholds) violations_in_this_file += 1 if verbosity > 1: print(f" - {fn.long_name} [{fn.start_line}:{fn.end_line}]") print(f" violations: {', '.join(violations)}") file_result = AnalysisResult( violation_count=violations_in_this_file, lines_of_code=analysed_file.nloc, file_count=1, ) if verbosity > 0: print(f" results for this file: {file_result}") result.overall.merge_with(file_result) result.files[analysed_file.filename] = file_result return result
def analyze(repo, lang): return lizard.analyze(repo, None, 1, None, lang)
def analyze(file_paths): extensions = lizard.get_extensions(["cpre"], False) analysis = lizard.analyze(file_paths, threads=4, extensions=extensions) return process_code_info(analysis, extensions)