def main(): """Start of the program.""" args = parse_arguments() filename = args.issuefile # pre, ext = os.path.splitext(filename) # ext = ext.replace("xml", "tmp") # tmp_filename = pre + ext # filter_out(filename, tmp_filename) warnings = read_resharper_issues(filename) # os.remove(tmp_filename) issues_per_project = determine_issues_per_project(warnings) issues_per_issue_type = determine_issues_per_issuetype(warnings) issues_per_category = determine_issues_per_category(warnings) report_dir = create_report_directory(args.reportdir) save_issues_per_project(issues_per_project, report_dir) save_issues_per_issue_type(issues_per_issue_type, report_dir) save_issues_per_category(issues_per_category, report_dir) save_as_json(warnings) show_issues_per_project(issues_per_project)
def perform_analysis(settings): """Perform the requested analysis.""" metrics = analyze_duplication(settings) report_dir = create_report_directory(settings["report_directory"]) report_file = os.path.join(report_dir, "code_duplication.csv") save_duplication_profile(report_file, metrics) show_duplication_profile(metrics["total_loc"], metrics["duplicated_loc"])
def save_file_size_metrics(metrics, report_dir): """Save the file metrics to a file.""" metrics_dir = create_report_directory(os.path.join(report_dir, "metrics")) metrics_file = os.path.join(metrics_dir, "file_size_metrics.csv") with open(metrics_file, "w", encoding="utf-8") as output: csv_writer = csv.writer(output, delimiter=",", lineterminator="\n", quoting=csv.QUOTE_ALL) write_file_size_header(csv_writer) write_file_size_metrics(csv_writer, metrics)
def analyze_fan_out(database, output): """Analyze the fan-out.""" print("Analyzing fan-out.") profile = create_fan_out_profile() understand_database = understand.open(database) profile = determine_fan_out_profile(profile, understand_database) profile.print() report_file = os.path.join(create_report_directory(output), "fan-out.csv") profile.save(report_file)
def analyze_file_size(settings): """Analyze the file size.""" report_dir = create_report_directory(settings["report_directory"]) report_file = os.path.join(report_dir, "intermediate", "file_size_metrics.csv") analysis_filter = settings["file_size_filter"] measure_file_size(settings["analysis_directory"], report_file, analysis_filter) metrics = get_file_size_metrics(report_file) save_file_size_metrics(metrics, report_dir) shutil.rmtree(os.path.join(report_dir, "intermediate")) profile = determine_profile(metrics) profiles_dir = create_report_directory(os.path.join( report_dir, "profiles")) profile_file = os.path.join(profiles_dir, "file_size_profile.csv") profile.save(profile_file)
def test_create_report_directory_directory_exists(path_exists_mock): """Test that the create report directory does not create the directory when it already exists.""" # arrange directory = r"c:\temp\reports" path_exists_mock.return_value = True # act report_dir = create_report_directory(directory) # assert assert directory == report_dir path_exists_mock.assert_called_once()
def analyze_code_size(database, output): """Analyze the code size.""" understand_database = understand.open(database) report_dir = create_report_directory(output) metrics = measure_code_size(understand_database) save_code_size(metrics, report_dir) test_metrics = measure_test_code_size(understand_database) save_test_code_size(test_metrics, report_dir) print_test_code_ratio(metrics, test_metrics)
def analyze_function_parameters(database, output): """Analyze the function parameters.""" print("Analyzing function parameters.") profile = create_function_parameters_profile() understand_database = understand.open(database) profile = determine_function_parameters_profile(profile, understand_database) profile.print() report_file = os.path.join(create_report_directory(output), "function_parameters.csv") profile.save(report_file)
def test_create_report_directory_directory_created(path_exists_mock, makedirs_mock): """Test that the create report directory creates the directory when it does not exist.""" # arrange directory = r"c:\temp\reports" path_exists_mock.return_value = False # act report_dir = create_report_directory(directory) # assert assert directory == report_dir path_exists_mock.assert_called_once() makedirs_mock.assert_called_once()
def analyze_size_per_code_type(settings): """Analyze the code size for all code types.""" metrics = {} report_dir = create_report_directory(settings["report_directory"]) for code_type in settings["code_type"]: report_file = os.path.join(report_dir, f"{code_type}_code_volume_profile.csv") analysis_filter = settings[f"{code_type}_filter"] measure_lines_of_code(settings["analysis_directory"], report_file, analysis_filter) metrics[code_type] = get_size_metrics(report_file) save_code_metrics(report_file, metrics[code_type]) save_code_type_profile(report_dir, metrics) return metrics
def main(): """Start of the program.""" args = parse_arguments() xml_doc = read_coverage(args.filename) if args.namespace: coverage = determine_coverage_of_namespace(xml_doc, args.namespace) print(coverage) coverage_per_namespace = determine_coverage_per_namespace(xml_doc) report_dir = create_report_directory(args.reportdir) save_coverage_per_namespace(coverage_per_namespace, report_dir) if args.verbose: for name in coverage_per_namespace.items(): print(name, " : ", coverage_per_namespace[name])
def measure_lines_of_code(settings): """Measure the lines of code using cloc.""" report_dir = create_report_directory(settings["report_directory"]) report_file = os.path.join(report_dir, "code_duplication") command = [ "cloc", "--csv", "--hide-rate", "--quiet", "--exclude-dir=test,tst", settings["analysis_directory"], ] process = Subprocess(command, verbose=1) output = process.execute_pipe(report_dir, report_file, check_return_code=False) return output.stdout.decode("utf-8")
def perform_analysis(analysis): """Perform the requested analysis.""" report_dir = create_report_directory(analysis.output) metrics_file = measure_function_metrics(analysis.input, report_dir) profiles = create_profiles() determine_profiles(profiles, metrics_file) if analysis.all: analyze_complexity(report_dir, profiles) analyze_function_size(report_dir, profiles) analyze_parameters(report_dir, profiles) if analysis.complexity: analyze_complexity(report_dir, profiles) if analysis.function_size: analyze_function_size(report_dir, profiles) if analysis.parameters: analyze_parameters(report_dir, profiles)
def collect_function_metrics(database, output): """Collect the function metrics.""" understand_database = understand.open(database) report_file = os.path.join(create_report_directory(output), "function_metrics.csv") with open(report_file, "w", encoding="utf-8") as output_file: csv_writer = csv.writer(output_file, delimiter=",", lineterminator="\n", quoting=csv.QUOTE_ALL) csv_writer.writerow([ "FunctionName", "LinesOfCode", "CyclomaticComplexity", "Fan-in", "Fan-out", "NumberOfParameters", ]) for func in understand_database.ents("function,method,procedure"): metrics = func.metric([ "CountLineBlank", "CountLineCode", "CountLineComment", "CountLineInactive", "Cyclomatic", "CountInput", "CountOutput", ]) csv_writer.writerow([ func.longname(), metrics["CountLineCode"], metrics["Cyclomatic"], metrics["CountInput"], metrics["CountOutput"], len(func.parameters().split(",")), ])
def measure_code_duplication(settings): """Measure the amount of code duplication.""" report_dir = create_report_directory(settings["report_directory"]) report_file = os.path.join(report_dir, "code_duplication") measure_function_size_command = [ "cpd", "--language", settings["language"], "--minimum-tokens", settings["tokens"], "--format", "csv", "--files", settings["analysis_directory"], ] process = Subprocess(measure_function_size_command, verbose=1) output = process.execute_pipe(report_dir, report_file, check_return_code=False) return output.stdout.decode("utf-8")