def insert_info(self, stage_id): failures = self.parse_info() if failures: for value in failures.values(): for failure in value: index, failure_type, document_name = failure get_connection().insert_texttest_failure(stage_id, index, failure_type, document_name)
def insert_info(self, stage_id): failures = self.parse_info() if failures: for value in failures.values(): for failure in value: index, failure_type, document_name = failure get_connection().insert_texttest_failure( stage_id, index, failure_type, document_name)
def main(): if 'FILE_CLIENT' in app.config: go_client(app.config['FILE_CLIENT']) else: go_client( app.config['GO_SERVER_URL'], (app.config['GO_SERVER_USER'], app.config['GO_SERVER_PASSWD'])) get_connection(app.config.get('DB_PATH')) get_all_pipeline_groups()
def main(): if 'FILE_CLIENT' in app.config: go_client(app.config['FILE_CLIENT']) else: go_client( app.config['GO_SERVER_URL'], (app.config['GO_SERVER_USER'], app.config['GO_SERVER_PASSWD']) ) get_connection(app.config.get('DB_PATH')) get_all_pipeline_groups()
def get_all_pipeline_groups(): for pipeline in get_connection().list_pipelines(): group_of_pipeline[pipeline['pipeline_name']] = pipeline['pipeline_group'] temp_dict = defaultdict(list) for pipeline_name, pipeline_group in group_of_pipeline.items(): temp_dict[pipeline_group].append(pipeline_name) pipeline_groups = [] for pipeline_group, pipeline_list in temp_dict.items(): pipeline_groups.append([pipeline_group, 'checked', pipeline_list]) return pipeline_groups
def get_all_pipeline_groups(): for pipeline in get_connection().list_pipelines(): group_of_pipeline[ pipeline['pipeline_name']] = pipeline['pipeline_group'] temp_dict = defaultdict(list) for pipeline_name, pipeline_group in group_of_pipeline.items(): temp_dict[pipeline_group].append(pipeline_name) pipeline_groups = [] for pipeline_group, pipeline_list in temp_dict.items(): pipeline_groups.append([pipeline_group, 'checked', pipeline_list]) return pipeline_groups
def synchronize(pipelines): for pipeline, begin_sync_index in pipelines: synced_pipeline_counter = data_access.get_connection().get_highest_pipeline_count(pipeline) sync_begin_index = max(begin_sync_index, synced_pipeline_counter) max_in_go = go_request.get_max_pipeline_status(pipeline)[0] number_of_pipelines = max_in_go - sync_begin_index log("Will synchronize " + pipeline + " from " + str(sync_begin_index) + " onwards.") if sync_begin_index == max_in_go: actions.fetch_pipelines(pipeline, sync_begin_index, max_in_go, 10, 0) else: actions.fetch_pipelines(pipeline, sync_begin_index, max_in_go, number_of_pipelines, 0)
def pipelines(): pipeline_list = get_connection().list_pipelines() template = render_template('pipelines.html', go_server_url=app.config['PUBLIC_GO_SERVER_URL'], pipelines=pipeline_list, log_parsers=['junit', 'characterize'], now=datetime.now(), theme=get_bootstrap_theme(), footer=get_footer(), application_root=app.config['APPLICATION_ROOT']) response = make_response(template) return response
def pipelines(): pipeline_list = get_connection().list_pipelines() template = render_template( 'pipelines.html', go_server_url=app.config['PUBLIC_GO_SERVER_URL'], pipelines=pipeline_list, log_parsers=['junit', 'characterize'], now=datetime.now(), theme=get_bootstrap_theme(), footer=get_footer(), application_root=app.config['APPLICATION_ROOT']) response = make_response(template) return response
def pull(pipeline_name, subsequent_pipelines, start, dry_run): latest_pipeline = data_access.get_connection().get_highest_pipeline_count(pipeline_name) print("In pipeline: " + pipeline_name) max_pipeline_status, max_available_pipeline = go_request.get_max_pipeline_status(pipeline_name) print("Latest synced pipeline locally: " + str(latest_pipeline)) print("Latest pipeline in GO: " + str(max_pipeline_status)) print("Latest available pipeline: " + str(max_available_pipeline)) pipeline_name, latest_pipeline, max_pipeline_status, subsequent_pipelines, start = assert_correct_input( pipeline_name, latest_pipeline, max_pipeline_status, subsequent_pipelines, max_available_pipeline, start=start) if not dry_run: fetch_pipelines(pipeline_name, latest_pipeline, max_pipeline_status, subsequent_pipelines, start) print("Done.") else: print("Dry run!")
def get_log_parser_name(pipeline_name): pipeline = get_connection().get_pipeline(pipeline_name) return pipeline['log_parser']
def insert_info(self, stage_id): failures = self.parse_info() if failures: for error in failures: get_connection().insert_junit_failure_information( stage_id, error[0], error[1])
def insert_info(self, stage_id): failures = self.parse_info() if failures: for error in failures: get_connection().insert_junit_failure_information(stage_id, error[0], error[1])
self.nodes = [(prefix, json_structure)] @classmethod def json_nodes_list(cls, json_structure, prefix=None): result = [] for elm in json_structure: result.extend(cls(elm, prefix).nodes) return result @classmethod def json_nodes_dict(cls, json_structure, prefix=None): result = [] for key, value in json_structure.items(): if not prefix: new_prefix = key else: new_prefix = prefix + '.' + key result.extend(cls(value, new_prefix).nodes) return result if __name__ == '__main__': setup_go_client(parse_args()) go = go_client.go_client() db = data_access.get_connection(app_config.get_app_config().cfg['DB_PATH']) controller = SyncController(db, go) log("Starting synchronization.") controller.sync() log("Synchronization finished.") log('Done!')
def info(pipeline_name): latest_pipeline = data_access.get_connection().get_highest_pipeline_count(pipeline_name) print("In pipeline: " + pipeline_name) print("Current pipeline counter in GO, latest available pipeline: " + str( go_request.get_max_pipeline_status(pipeline_name))) print("Latest synced pipeline locally: {}".format(latest_pipeline))
def pipelines_log_parser(pipeline_name): get_connection().update_pipeline(pipeline_name, log_parser=request.form['log_parser']) return '', 204
def pipelines_sync(pipeline_name): get_connection().update_pipeline(pipeline_name, sync=request.method == 'PUT') return '', 204
def insights(pipeline_name): current_stage = get_current_stage(pipeline_name) if current_stage is None: abort(500, "Database error. Have you tried syncing some pipelines " "using gocddash_sync.py? Current_stage is None.") current_status = pipeline_status.create_stage_info(current_stage) last_stage = get_previous_stage(current_stage) previous_status = pipeline_status.create_stage_info(last_stage) latest_passing_stage = get_latest_passing_stage(pipeline_name) stage_name_index = (get_connection().get_stage_order(pipeline_name)).index(current_stage.stage_name) git_history = [] perpetrator_data = [] if not current_stage.is_success(): if latest_passing_stage is None: latest_passing_stage = get_first_synced_stage(pipeline_name) if not current_stage.pipeline_counter - latest_passing_stage.pipeline_counter == 1: perpetrator_data = get_git_comparison(pipeline_name, latest_passing_stage.pipeline_counter + 1, latest_passing_stage.pipeline_counter, app.config['PREFERRED_UPSTREAM']) git_history = get_git_comparison(pipeline_name, current_stage.pipeline_counter, latest_passing_stage.pipeline_counter, app.config['PREFERRED_UPSTREAM']) base_url = app.config['PUBLIC_GO_SERVER_URL'] rerun_link = base_url + "pipelines/{}/{}/{}/{}".format(current_stage.pipeline_name, current_stage.pipeline_counter, current_stage.stage_name, current_stage.stage_counter) job_to_display = get_job_to_display(current_stage.stage_id) if job_to_display: log_link = base_url + "tab/build/detail/{}/{}/{}/{}/{}#tab-tests".format( current_stage.pipeline_name, current_stage.pipeline_counter, current_stage.stage_name, current_stage.stage_counter, job_to_display.job_name) else: log_link = 'FIX_THIS' main_pipeline_link = base_url + "tab/pipeline/history/{}".format(current_stage.pipeline_name) comparison_link = base_url + "compare/{}/{}/with/{}".format(current_stage.pipeline_name, current_stage.pipeline_counter, latest_passing_stage.pipeline_counter) dash_status = get_cctray_status() recommendation, last_claim = failure_tip.get_failure_tip(current_status, previous_status, latest_passing_stage.pipeline_counter) template = render_template( 'insights.html', go_server_url=app.config['PUBLIC_GO_SERVER_URL'], now=datetime.now(), theme=get_bootstrap_theme(), footer=get_footer(), current_status=current_status, git_history=git_history, rerun_link=rerun_link, comparison_link=comparison_link, live_info=dash_status.pipelines[pipeline_name], latest_passing_stage=latest_passing_stage, previous_status=previous_status, recommendation=recommendation, last_claim=last_claim, log_link=log_link, main_pipeline_link=main_pipeline_link, stage_name_index=stage_name_index, application_root=app.config['APPLICATION_ROOT'], username=app.config['GO_SERVER_USER'], passwd=app.config['GO_SERVER_PASSWD'], rerun_token=app.config['RERUN_TOKEN'], perpretrator_data=perpetrator_data ) return make_response(template)
def pipelines_email_notifications(pipeline_name): get_connection().update_pipeline(pipeline_name, email_notifications=request.method == 'PUT') return '', 204
def pipelines_email_notifications(pipeline_name): get_connection().update_pipeline( pipeline_name, email_notifications=request.method == 'PUT') return '', 204
def get_email_notif(pipeline_name): pipeline = get_connection().get_pipeline(pipeline_name) return pipeline['email_notifications']
def insights(pipeline_name): current_stage = get_current_stage(pipeline_name) if current_stage is None: abort( 500, "Database error. Have you tried syncing some pipelines " "using gocddash_sync.py? Current_stage is None.") current_status = pipeline_status.create_stage_info(current_stage) last_stage = get_previous_stage(current_stage) previous_status = pipeline_status.create_stage_info(last_stage) latest_passing_stage = get_latest_passing_stage(pipeline_name) stage_name_index = (get_connection().get_stage_order(pipeline_name)).index( current_stage.stage_name) git_history = [] perpetrator_data = [] if not current_stage.is_success(): if latest_passing_stage is None: latest_passing_stage = get_first_synced_stage(pipeline_name) if not current_stage.pipeline_counter - latest_passing_stage.pipeline_counter == 1: perpetrator_data = get_git_comparison( pipeline_name, latest_passing_stage.pipeline_counter + 1, latest_passing_stage.pipeline_counter, app.config['PREFERRED_UPSTREAM']) git_history = get_git_comparison(pipeline_name, current_stage.pipeline_counter, latest_passing_stage.pipeline_counter, app.config['PREFERRED_UPSTREAM']) base_url = app.config['PUBLIC_GO_SERVER_URL'] rerun_link = base_url + "pipelines/{}/{}/{}/{}".format( current_stage.pipeline_name, current_stage.pipeline_counter, current_stage.stage_name, current_stage.stage_counter) job_to_display = get_job_to_display(current_stage.stage_id) if job_to_display: log_link = base_url + "tab/build/detail/{}/{}/{}/{}/{}#tab-tests".format( current_stage.pipeline_name, current_stage.pipeline_counter, current_stage.stage_name, current_stage.stage_counter, job_to_display.job_name) else: log_link = 'FIX_THIS' main_pipeline_link = base_url + "tab/pipeline/history/{}".format( current_stage.pipeline_name) comparison_link = base_url + "compare/{}/{}/with/{}".format( current_stage.pipeline_name, current_stage.pipeline_counter, latest_passing_stage.pipeline_counter) dash_status = get_cctray_status() recommendation, last_claim = failure_tip.get_failure_tip( current_status, previous_status, latest_passing_stage.pipeline_counter) template = render_template( 'insights.html', go_server_url=app.config['PUBLIC_GO_SERVER_URL'], now=datetime.now(), theme=get_bootstrap_theme(), footer=get_footer(), current_status=current_status, git_history=git_history, rerun_link=rerun_link, comparison_link=comparison_link, live_info=dash_status.pipelines[pipeline_name], latest_passing_stage=latest_passing_stage, previous_status=previous_status, recommendation=recommendation, last_claim=last_claim, log_link=log_link, main_pipeline_link=main_pipeline_link, stage_name_index=stage_name_index, application_root=app.config['APPLICATION_ROOT'], username=app.config['GO_SERVER_USER'], passwd=app.config['GO_SERVER_PASSWD'], rerun_token=app.config['RERUN_TOKEN'], perpretrator_data=perpetrator_data) return make_response(template)
#!/usr/bin/env python3 from gocddash.analysis.data_access import get_connection, create_connection if __name__ == '__main__': conn = create_connection() get_connection().truncate_tables()
def all_info(): all_synced = data_access.get_connection().get_synced_pipelines() print("I have these pipelines: ") print("Pipeline \t\tLocal \tIn Go") for pipeline in all_synced: print("{}\t{}\t{}".format(pipeline[0], pipeline[1], go_request.get_max_pipeline_status(pipeline[0])[1]))