def main(): """Entry point to the QA Dashboard.""" #log.setLevel(log.critical) log.critical("Setup") #with log.indent(): config = Config() cli_arguments = cli_parser.parse_args() repositories = Repositories(config) # some CLI arguments are used to DISABLE given feature of the dashboard, # but let's not use double negation everywhere :) ci_jobs_table_enabled = not cli_arguments.disable_ci_jobs code_quality_table_enabled = not cli_arguments.disable_code_quality liveness_table_enabled = not cli_arguments.disable_liveness sla_table_enabled = not cli_arguments.disable_sla clone_repositories_enabled = cli_arguments.clone_repositories cleanup_repositories_enabled = cli_arguments.cleanup_repositories log.critical("Environment variables check") #with log.indent(): check_environment_variables() log.critical("Environment variables check done") log.critical("Setup done") results = Results() # list of repositories to check results.repositories = repositories.repolist # we need to know which tables are enabled or disabled to proper process the template results.sla_table_enabled = sla_table_enabled results.liveness_table_enabled = liveness_table_enabled results.code_quality_table_enabled = code_quality_table_enabled results.ci_jobs_table_enabled = ci_jobs_table_enabled results.teams = teams results.sprint = config.get_sprint() log.critical("Sprint: " + results.sprint) ci_jobs = CIJobs() job_statuses = read_job_statuses(ci_jobs, ci_jobs_table_enabled, liveness_table_enabled) results.smoke_tests_total_builds, results.smoke_tests_success_builds = \ production_smoketests_status(ci_jobs) results.sprint_plan_url = config.get_sprint_plan_url() log.critical("Sprint plan URL: " + results.sprint_plan_url) code_coverage_threshold = get_code_coverage_threshold( cli_arguments, config) for team in teams: results.issues_list_url[team] = config.get_list_of_issues_url(team) if liveness_table_enabled: prepare_data_for_liveness_table(results, ci_jobs, job_statuses) prepare_data_for_repositories(repositories.repolist, results, ci_jobs, job_statuses, clone_repositories_enabled, cleanup_repositories_enabled, code_quality_table_enabled, ci_jobs_table_enabled, code_coverage_threshold) if sla_table_enabled: prepare_data_for_sla_table(results) if code_quality_table_enabled and liveness_table_enabled: export_into_csv(results, repositories.repolist) #generate_dashboard(results, ignored_files_for_pylint, ignored_files_for_pydocstyle) #print(results) #generate_charts(results) #generate_quality_labels(results) jobs = all_ci_badges(results) results_sla = { 'Stage': results.stage, 'production': results.production, 'perf_tests_statistic': results.perf_tests_statistic, 'generated_on': results.generated_on, 'ci_jobs': jobs } results_json = [] for repo in results.repositories: data = { 'repository': repo, 'source_count': results.source_files[repo]['count'], 'source_lines': results.source_files[repo]['total_lines'], 'linter_total': results.repo_linter_checks[repo]['total'], 'linter_passed': results.repo_linter_checks[repo]['passed'], 'linter_failed': results.repo_linter_checks[repo]['failed'], 'linter_passed_percent': results.repo_linter_checks[repo]['passed%'], 'linter_failed_percent': results.repo_linter_checks[repo]['failed%'], 'docstyle_total': results.repo_docstyle_checks[repo]['total'], 'docstyle_passed': results.repo_docstyle_checks[repo]['passed'], 'docstyle_failed': results.repo_docstyle_checks[repo]['failed'], 'docstyle_passed_percent': results.repo_docstyle_checks[repo]['passed%'], 'docstyle_failed_percent': results.repo_docstyle_checks[repo]['failed%'], 'code_coverage': results.unit_test_coverage[repo], 'cyclomatic_complexity': results.repo_cyclomatic_complexity[repo], 'maintainability_index': results.repo_maintainability_index[repo], 'status': results.overall_status[repo], 'remarks': results.remarks[repo], } results_json.append(data) results_data = {'quality': results_json, 'others': results_sla} with open("results.json", "w") as f: json.dump(results_data, f) firebase_api_key = os.environ.get("FIREBASE_API_KEY") auth_domain = os.environ.get("AUTH_DOMAIN") database_url = os.environ.get("DATABASEURL") storage_bucket = os.environ.get('STORAGE_BUCKET') config = { "apiKey": firebase_api_key, "authDomain": auth_domain, "databaseURL": database_url, "storageBucket": storage_bucket, } firebase = pyrebase.initialize_app(config) storage = firebase.storage() storage.child("dashboard_data/results.json").put("results.json")
def main(): """Entry point to the QA Dashboard.""" log.setLevel(log.INFO) log.info("Setup") with log.indent(): config = Config() cli_arguments = cli_parser.parse_args() repositories = Repositories(config) # some CLI arguments are used to DISABLE given feature of the dashboard, # but let's not use double negation everywhere :) ci_jobs_table_enabled = not cli_arguments.disable_ci_jobs code_quality_table_enabled = not cli_arguments.disable_code_quality liveness_table_enabled = not cli_arguments.disable_liveness sla_table_enabled = not cli_arguments.disable_sla clone_repositories_enabled = cli_arguments.clone_repositories cleanup_repositories_enabled = cli_arguments.cleanup_repositories log.info("Environment variables check") with log.indent(): check_environment_variables() log.success("Environment variables check done") log.success("Setup done") results = Results() # list of repositories to check results.repositories = repositories.repolist # we need to know which tables are enabled or disabled to proper process the template results.sla_table_enabled = sla_table_enabled results.liveness_table_enabled = liveness_table_enabled results.code_quality_table_enabled = code_quality_table_enabled results.ci_jobs_table_enabled = ci_jobs_table_enabled results.teams = teams results.sprint = config.get_sprint() log.info("Sprint: " + results.sprint) ci_jobs = CIJobs() job_statuses = read_job_statuses(ci_jobs, ci_jobs_table_enabled, liveness_table_enabled) results.smoke_tests_total_builds, results.smoke_tests_success_builds = \ production_smoketests_status(ci_jobs) results.sprint_plan_url = config.get_sprint_plan_url() log.info("Sprint plan URL: " + results.sprint_plan_url) code_coverage_threshold = get_code_coverage_threshold( cli_arguments, config) for team in teams: results.issues_list_url[team] = config.get_list_of_issues_url(team) if liveness_table_enabled: prepare_data_for_liveness_table(results, ci_jobs, job_statuses) prepare_data_for_repositories(repositories.repolist, results, ci_jobs, job_statuses, clone_repositories_enabled, cleanup_repositories_enabled, code_quality_table_enabled, ci_jobs_table_enabled, code_coverage_threshold) if sla_table_enabled: prepare_data_for_sla_table(results) if code_quality_table_enabled and liveness_table_enabled: export_into_csv(results, repositories.repolist) generate_dashboard(results, ignored_files_for_pylint, ignored_files_for_pydocstyle) generate_charts(results) generate_quality_labels(results)