Exemple #1
0
def trace_view():
    """Request handler for viewing perf trace profiles."""
    global all_stack_data
    global svgchart
    job = str(request.args.get("job"))
    trace_model.job = job
    trace_model.start = 0.0
    trace_model.stop = sys.maxsize
    trace_model.layout.results = GlobalData.results_files
    # Stacks already loaded - just update
    if job in all_stack_data:
        update_trace_model(job)
        all_stack_data[job].read_data(
            start=trace_model.start,
            stop=trace_model.stop,
            selected_ids=trace_model.selected_ids,
        )
    # Load new stack data into memory and set default parameters
    else:
        all_stack_data[job] = TraceData(
            GlobalData.results_files,
            GlobalData.local_data,
            GlobalData.loaded_cpu_definition,
            data_id=job,
            debug=GlobalData.debug,
            n_proc=GlobalData.n_proc,
        )
        update_trace_model(job)
    trace_model.process_names = all_stack_data[job].get_all_process_names()
    trace_model.jobs = all_stack_data[job].get_all_jobs()
    trace_model.system_wide = all_stack_data[job].get_system_wide_mode_enabled(
    )

    # Prepare plots
    purge(GlobalData.local_data, ".svg")
    flamegraph_type = "cumulative"
    trace_model.layout.flamegraph = get_flamegraph(flamegraph_type, job,
                                                   trace_model.start,
                                                   trace_model.stop)
    trace_model.layout.timelines = get_timelines(job, trace_model.start,
                                                 trace_model.stop)
    # Setup general layout
    ids = all_stack_data[job].get_all_process_ids()
    trace_model.layout.reference_id = ids[0].label
    trace_model.layout.title = "Trace Analysis: " + job
    trace_model.layout.footer = "Loaded Results: " + " & ".join(
        trace_model.layout.results)
    return render_template(
        "TraceView.html",
        events=GlobalData.loaded_cpu_definition.get_active_events(),
        trace_jobs=GlobalData.trace_jobs,
        event_group_map=GlobalData.loaded_cpu_definition.
        get_active_event_group_map(),
        all_event_groups=GlobalData.loaded_cpu_definition.get_event_groups(),
        jobs=GlobalData.jobs,
        processes=GlobalData.processes,
        trace_model=trace_model,
        enabled_modes=GlobalData.enabled_modes,
        ids=ids,
    )
Exemple #2
0
def reset_timelines():
    global trace_model
    job = trace_model.job
    trace_model.start = -0.0000001
    trace_model.stop = sys.maxsize
    purge(GlobalData.local_data, ".svg")
    trace_model.layout.start = trace_model.start
    trace_model.layout.stop = trace_model.stop
    trace_model.layout.flamegraph = get_flamegraph(trace_model.flamegraph_type,
                                                   job, trace_model.start,
                                                   trace_model.stop)
    trace_model.layout.timelines = get_timelines(job, trace_model.start,
                                                 trace_model.stop)
    return jsonify(trace_model.layout.to_dict())
Exemple #3
0
 def execute_perf(self, job_settings):
     local_data = GlobalData.local_data
     purge(local_data, job_settings.job_name + "_proc")
     purge(local_data, job_settings.job_name + "_host")
     purge(local_data, job_settings.job_name + ".results")
     purge(local_data, job_settings.job_name + ".done")
     self.scriptwriting_logger.info(u" Write perf script")
     perf_script = self.write_perf_script(local_data)
     self.scriptwriting_logger.info(u" Setup background threads")
     working_dir = job_settings.working_directory_linux
     try:
         background_thread = threading.Thread(
             target=self.run_perf_job,
             args=(
                 working_dir,
                 self.job.use_ssh,
                 self.job.job_id,
                 local_data,
                 perf_script,
                 self.job.system_wide,
                 self.job.mpi_config_files,
                 job_settings,
             ),
         )
         background_thread.daemon = True
         background_thread.start()
     except Exception as e:
         raise Exception(str(e))
Exemple #4
0
def index():
    # Request handler for job submission and results/settings loading
    global status
    global layout
    global main_logger
    global logfile
    global submitted_jobs
    global initialise
    if request.method == "POST":
        # Load requested results - just check which events can be found in requested results files at this point
        load_perf_data = "perf_btn" in request.form
        load_hpc_data = "hpc_btn" in request.form
        load_profile_data = "profile_btn" in request.form
        if load_profile_data or load_hpc_data or load_perf_data:
            experiment_file = None
            perf_data_files = []
            allfiles = request.files.getlist("file[]")
            reset_data_structures()
            for foundfile in allfiles:
                if foundfile.filename == "":
                    return render_template(
                        "index.html",
                        layout=layout,
                        events=GlobalData.loaded_cpu_definition.
                        get_active_events(),
                        trace_jobs=GlobalData.trace_jobs,
                        event_group_map=GlobalData.loaded_cpu_definition.
                        get_active_event_group_map(),
                        all_event_groups=GlobalData.loaded_cpu_definition.
                        get_event_groups(),
                        jobs=GlobalData.jobs,
                        processes=GlobalData.processes,
                        job_settings=GlobalData.job_settings.to_dict(),
                        enabled_modes=GlobalData.enabled_modes,
                    )
                if ((load_profile_data and allowed_file(foundfile.filename))
                        or load_hpc_data or load_perf_data):
                    filename = secure_filename(foundfile.filename)
                    if load_hpc_data:
                        if not os.path.exists(
                                os.path.join(GlobalData.local_data,
                                             foundfile.filename)):
                            path_to_dir = pathlib.Path(
                                pathlib.Path(GlobalData.local_data) /
                                pathlib.Path(foundfile.filename)).parent
                            pathlib.Path(path_to_dir).mkdir(parents=True,
                                                            exist_ok=True)
                            try:
                                foundfile.save(
                                    str(path_to_dir /
                                        pathlib.Path(foundfile.filename).name))
                            except Exception as e:
                                main_logger.info(
                                    u" Failed copy: " +
                                    str(path_to_dir /
                                        pathlib.Path(foundfile.filename).name))
                        if is_hpc_experiment_file(foundfile.filename):
                            experiment_file = str(
                                pathlib.Path(
                                    pathlib.Path(GlobalData.local_data) /
                                    pathlib.Path(foundfile.filename)))
                    elif is_results_file(filename):
                        results_file = os.path.basename(foundfile.filename)
                        if results_file not in GlobalData.results_files:
                            GlobalData.results_files.append(filename)
                    elif load_perf_data:
                        perf_data_files.append(filename)
                        perf_working_directory = request.form[
                            "path_to_perf_data"]

            if len(perf_data_files) > 0:
                jobhandler = JobHandler(GlobalData.root_directory)
                results = jobhandler.convert_perf_data(perf_data_files,
                                                       GlobalData.local_data,
                                                       perf_working_directory)
                GlobalData.results_files.append(results)

            if experiment_file:
                analysis_level = request.form["analysis_level"]
                if re.match(".*Line", analysis_level):
                    include_loops = True
                    include_statements = True
                elif re.match(".*Loop", analysis_level):
                    include_loops = True
                    include_statements = False
                else:  # "Procedure"
                    include_loops = False
                    include_statements = False
                hpc_experiment = HPCExperimentHandler(GlobalData.local_data,
                                                      experiment_file)
                results = hpc_experiment.create_results(
                    include_loops, include_statements)
                main_logger.info(u"Created HPC Experiment results: " +
                                 hpc_experiment.get_results_file_name() +
                                 " from Experiment " +
                                 hpc_experiment.get_experiment_file_name())
                results_file = os.path.basename(results)
                if results_file not in GlobalData.results_files:
                    GlobalData.results_files.append(results_file)
                GlobalData.hpc_results.append(
                    HPCResultsHandler(GlobalData.local_data, results_file))
                main_logger.info(u"Loaded HPC Experiment results: " +
                                 results_file)
            else:
                for results_file in GlobalData.results_files:
                    full_path = os.path.join(GlobalData.local_data,
                                             results_file)
                    if is_hpc_result(full_path):
                        GlobalData.hpc_results.append(
                            HPCResultsHandler(GlobalData.local_data,
                                              results_file))
                        main_logger.info(u"Loaded HPC Experiment results: " +
                                         results_file)

            purge(GlobalData.local_data, "_compressed")
            layout["Results"] = GlobalData.results_files
            main_logger.info(u"Loaded Results " +
                             ", ".join(GlobalData.results_files))
            GlobalData.processes, raw_events = get_results_info(
                GlobalData.local_data, GlobalData.results_files)
            GlobalData.trace_jobs = get_trace_jobs(GlobalData.local_data,
                                                   GlobalData.results_files)
            GlobalData.cpu = get_cpu(GlobalData.local_data,
                                     GlobalData.results_files)
            GlobalData.loaded_cpu_definition = get_cpu_definition(
                GlobalData.cpu, raw_events)
            loaded_events = GlobalData.loaded_cpu_definition.get_active_events(
            )
            GlobalData.jobs = get_jobs(GlobalData.results_files)
            GlobalData.enabled_modes = (
                GlobalData.loaded_cpu_definition.get_enabled_modes())
            main_logger.info(u"Found events: " + ", ".join(loaded_events))
            status = "Loaded " + " & ".join(layout["Results"])
            layout["title"] = status
            layout["footer"] = "Loaded Results: " + " & ".join(
                layout["Results"])

        # Get settings from previously submitted job
        elif "settings_btn" in request.form:
            foundfile = request.files.getlist("file")[0]
            if foundfile.filename == "":
                return render_template(
                    "index.html",
                    layout=layout,
                    jobs=GlobalData.jobs,
                    events=GlobalData.loaded_cpu_definition.get_active_events(
                    ),
                    trace_jobs=GlobalData.trace_jobs,
                    event_group_map=GlobalData.loaded_cpu_definition.
                    get_active_event_group_map(),
                    all_event_groups=GlobalData.loaded_cpu_definition.
                    get_event_groups(),
                    processes=GlobalData.processes,
                    job_settings=GlobalData.job_settings.to_dict(),
                    enabled_modes=GlobalData.enabled_modes,
                )
            if foundfile and allowed_file(foundfile.filename):
                filename = secure_filename(foundfile.filename)
                if not os.path.exists(
                        os.path.join(GlobalData.local_data, filename)):
                    foundfile.save(
                        os.path.join(app.config["UPLOAD_FOLDER"], filename))
                layout["Settings"] = filename
            try:
                restore_job_data(layout["Settings"])
            except Exception as e:
                main_logger.info(u"Error loading settings - missing Data " +
                                 str(e))
                main_logger.info(u"Loaded Settings: Aborted")
                layout["footer"] = "Loaded Settings: Aborted"
            else:
                main_logger.info(u"Loaded Settings: " + layout["Settings"])
                status = "Loaded " + layout["Settings"]
                layout["title"] = status
                layout["footer"] = "Loaded Settings: " + layout["Settings"]

        # Get details of job to be submitted, and submit job
        elif "run_btn" in request.form:
            GlobalData.job_settings.arguments = request.form["perf_args"]
            GlobalData.job_settings.job_name = request.form["perf_job_name"]
            GlobalData.job_settings.copy_files = request.form["copy_files"]
            if "use_ssh" in request.form:
                GlobalData.job_settings.use_ssh = True
                GlobalData.job_settings.server = request.form["server"]
                GlobalData.job_settings.username = request.form["username"]
                if "password" in request.form:
                    GlobalData.job_settings.password = request.form["password"]
                    GlobalData.job_settings.private_key = ""
                else:
                    GlobalData.job_settings.private_key = request.form[
                        "private_key"]
                    GlobalData.job_settings.password = ""
            else:
                GlobalData.job_settings.use_ssh = False
            if "use_lsf" in request.form:
                GlobalData.job_settings.use_lsf = True
                GlobalData.job_settings.lsf_params = request.form["lsf_params"]
                GlobalData.job_settings.queue = request.form["queue"]
                GlobalData.job_settings.processes_per_node = int(
                    request.form["processes_per_node"])
            else:
                GlobalData.job_settings.use_lsf = False
                GlobalData.job_settings.processes_per_node = 1
            if "use_mpirun" in request.form:
                GlobalData.job_settings.use_mpirun = True
                GlobalData.job_settings.global_mpirun_params = request.form[
                    "global_mpirun_params"]
                GlobalData.job_settings.local_mpirun_params = request.form[
                    "local_mpirun_params"]
                GlobalData.job_settings.mpirun_version = request.form[
                    "mpirun_version"]
                GlobalData.job_settings.processes = int(
                    request.form["processes"])
            else:
                GlobalData.job_settings.use_mpirun = False
                GlobalData.job_settings.processes = 1
            GlobalData.job_settings.run_parallel = "run_parallel" in request.form
            GlobalData.job_settings.run_system_wide = "run_system_wide" in request.form
            GlobalData.job_settings.run_as_root = "run_as_root" in request.form
            GlobalData.job_settings.perf_params = request.form["perf_params"]
            GlobalData.job_settings.frequency = request.form["frequency"]
            GlobalData.job_settings.period = request.form["period"]
            GlobalData.job_settings.working_directory_linux = request.form[
                "working_directory_linux"]
            GlobalData.job_settings.executable = request.form["executable"]
            GlobalData.job_settings.env_variables = request.form[
                "env_variables"]
            GlobalData.job_settings.bin_path = request.form["bin_path"]
            GlobalData.job_settings.lib_path = request.form["lib_path"]
            GlobalData.job_settings.preload = request.form["preload"]
            status = "Submitted Jobs: "
            main_logger.info(u"Preparing job " +
                             GlobalData.job_settings.job_name)

            job = Job(
                GlobalData.job_settings,
                GlobalData.selected_cpu_definition,
                GlobalData.selected_cpu_definition.get_active_raw_events(),
            )

            jobhandler = JobHandler(GlobalData.root_directory, job)

            report_error = False
            if GlobalData.job_settings.use_ssh:
                e = jobhandler.check_connection(GlobalData.job_settings)
                if e != "":
                    report_error = True
                    main_logger.info(u"Job " +
                                     GlobalData.job_settings.job_name + ": " +
                                     e)
                    status = "Error - connection error"
            failed_paths = jobhandler.get_failed_paths(job,
                                                       GlobalData.job_settings)
            if len(failed_paths) > 0:
                report_error = True
                for path in failed_paths:
                    main_logger.info(u"Job " +
                                     GlobalData.job_settings.job_name + ": " +
                                     path + " was not found")
                status = "Error - remote directory is invalid"
            if len(GlobalData.selected_cpu_definition.get_active_raw_events()
                   ) == 0:
                report_error = True
                main_logger.info(u"Job " + GlobalData.job_settings.job_name +
                                 ": " + "no performance events selected")
                status = "Error - no performance events selected"
            if report_error:
                main_logger.info(u"Job " + GlobalData.job_settings.job_name +
                                 " Aborted")
                layout["title"] = "Submit Jobs / Load Profiles: " + status
                return render_template(
                    "index.html",
                    layout=layout,
                    events=GlobalData.loaded_cpu_definition.get_active_events(
                    ),
                    trace_jobs=GlobalData.trace_jobs,
                    event_group_map=GlobalData.loaded_cpu_definition.
                    get_active_event_group_map(),
                    all_event_groups=GlobalData.loaded_cpu_definition.
                    get_event_groups(),
                    jobs=GlobalData.jobs,
                    processes=GlobalData.processes,
                    job_settings=GlobalData.job_settings.to_dict(),
                    enabled_modes=GlobalData.enabled_modes,
                )
            main_logger.info(
                u"perf_event_paranoid: " +
                jobhandler.check_perf_event_paranoid(GlobalData.job_settings))
            main_logger.debug(u" Finished preparing scripts")
            main_logger.debug(u" Executing scripts")
            save_job_data()
            try:
                jobhandler.execute_perf(GlobalData.job_settings)
            except Exception as e:
                main_logger.info(u"Error Running Perf Job. " + str(e))
            main_logger.debug(u" Finished executing scripts")
            status = "Submitted Job " + GlobalData.job_settings.job_name
            layout["title"] = "Submit Jobs / Load Profiles: " + status
            layout["footer"] = "Loaded Results: " + " & ".join(
                layout["Results"])
            start_time = get_datetime()
            main_logger.info(u"Job " + GlobalData.job_settings.job_name +
                             " submitted at " +
                             start_time.strftime("%Y-%m-%d %H:%M:%S"))
            submitted_jobs.append({
                "job_name": GlobalData.job_settings.job_name,
                "job_status": "running",
                "start_time": start_time,
            })
            main_logger.info(u"Job " + GlobalData.job_settings.job_name +
                             " is running")

    # Display
    if initialise:
        initialise_app()
        initialise = False
    else:
        status = ""
        layout["title"] = "Submit Jobs / Load Profiles: " + status
    return render_template(
        "index.html",
        layout=layout,
        events=GlobalData.loaded_cpu_definition.get_active_events(),
        trace_jobs=GlobalData.trace_jobs,
        event_group_map=GlobalData.loaded_cpu_definition.
        get_active_event_group_map(),
        all_event_groups=GlobalData.loaded_cpu_definition.get_event_groups(),
        jobs=GlobalData.jobs,
        processes=GlobalData.processes,
        job_settings=GlobalData.job_settings.to_dict(),
        enabled_modes=GlobalData.enabled_modes,
    )
def process_view():
    """Request handler for viewing perf process profiles. All events/threads will be loaded for a single process"""
    global process_model
    global all_stack_data
    global svgchart
    process = str(request.args.get("process"))
    process_model.process = process
    process_model.layout.results = GlobalData.results_files
    if process in all_stack_data:
        update_process_model(process)
        all_stack_data[process].read_data(
            start=process_model.start,
            stop=process_model.stop,
            text_filter=process_model.text_filter,
            selected_ids=process_model.selected_ids,
            base_case=process_model.reference_id,
        )
    else:
        all_stack_data[process] = StackData.create_process_data(
            GlobalData.results_files,
            GlobalData.local_data,
            GlobalData.loaded_cpu_definition,
            data_id=process,
            debug=GlobalData.debug,
            n_proc=GlobalData.n_proc,
        )
        update_process_model(process)
    process_model.event_names = all_stack_data[process].get_all_event_names()
    process_model.jobs = all_stack_data[process].get_all_jobs()
    process_model.system_wide = all_stack_data[process].get_system_wide_mode_enabled()
    # Set reference process
    reference_id = all_stack_data[process].get_base_case_id()
    process_model.reference_event_type = reference_id.event_type
    process_model.reference_id = reference_id.label
    if reference_id.event_type == "custom_event_ratio":
        process_model.reference_count = float(reference_id.count2) / float(
            reference_id.count1
        )
    else:
        process_model.reference_count = reference_id.count1
    process_model.num_custom_event_ratios = (
        GlobalData.loaded_cpu_definition.get_num_custom_event_ratios()
    )
    # Prepare plots
    purge(GlobalData.local_data, ".svg")
    process_model.layout.reference_id = process_model.reference_id
    (
        process_model.layout.event_totals_chart,
        process_model.layout.event_totals_table,
    ) = get_barchart(process, process_model.hotspots, svgchart)
    if process_model.num_custom_event_ratios > 0:
        process_model.layout.event_ratios_chart = get_custom_barchart(process, svgchart)
    process_model.layout.flamegraph = get_flamegraph(process)
    (
        process_model.layout.event_time_series,
        process_model.layout.event_ratio_time_series,
    ) = get__timechart(process, svgchart)
    process_model.layout.show_source = len(GlobalData.hpc_results) > 0
    (
        process_model.layout.source_code_table,
        process_model.layout.source_code_info,
        process_model.layout.source_code_line,
    ) = get_source_code("", process_model.reference_id)
    # Setup general layout
    ids = all_stack_data[process].get_all_process_ids()
    process_model.layout.diff = True
    process_model.layout.title = "Process: " + process
    process_model.layout.footer = "Loaded Results: " + " & ".join(
        process_model.layout.results
    )
    return render_template(
        "ProcessView.html",
        events=GlobalData.loaded_cpu_definition.get_active_events(),
        trace_jobs=GlobalData.trace_jobs,
        event_group_map=GlobalData.loaded_cpu_definition.get_active_event_group_map(),
        all_event_groups=GlobalData.loaded_cpu_definition.get_event_groups(),
        jobs=GlobalData.jobs,
        processes=GlobalData.processes,
        process_model=process_model,
        enabled_modes=GlobalData.enabled_modes,
        ids=ids,
    )
def update_all_charts():
    global process_model
    global svgchart
    process = process_model.process
    data = request.get_json()
    if "text_filter" in data:
        process_model.text_filter = data["text_filter"]
    if "new_ref_id" in data:  # Add reference id if not already in flamegraph_ids
        process_model.reference_id = data["new_ref_id"]
        old_ids = all_stack_data[process].get_flamegraph_process_ids()
        ids = []
        add_id = True
        for process_id in old_ids:
            if process_id.label == process_model.reference_id:
                add_id = False
                process_model.flamegraph_event_type = process_id.event_type
            ids.append(process_id)
        if add_id:
            for process_id in process_model.selected_ids:
                if process_id.label == process_model.reference_id:
                    ids.append(process_id)
                    process_model.flamegraph_event_type = process_id.event_type
        all_stack_data[process].set_flamegraph_process_ids(ids)
    if "start" in data:
        process_model.start = data["start"]
        process_model.stop = data["stop"]
    if "process_ids" in data:
        ids = all_stack_data[process].get_all_process_ids()
        process_model.selected_ids = []
        for process_id in ids:
            if process_id.label in data["process_ids"]:
                process_model.selected_ids.append(process_id)
    if "reference_id" in data:
        process_model.reference_id = data["reference_id"]
    if "direction" in data:
        if data["direction"] == "next":
            process_model.hotspots += 10
        else:
            if process_model.hotspots > 10:
                process_model.hotspots -= 10
    all_stack_data[process].read_data(
        start=process_model.start,
        stop=process_model.stop,
        text_filter=process_model.text_filter,
        selected_ids=process_model.selected_ids,
        base_case=process_model.reference_id,
    )
    process_model.reference_event_type = (
        all_stack_data[process].get_base_case_id().event_type
    )
    purge(GlobalData.local_data, ".svg")
    (
        process_model.layout.event_totals_chart,
        process_model.layout.event_totals_table,
    ) = get_barchart(process, process_model.hotspots, svgchart)
    if process_model.num_custom_event_ratios > 0:
        process_model.layout.event_ratios_chart = get_custom_barchart(process, svgchart)
    process_model.layout.flamegraph = get_flamegraph(
        process, flamegraph_event_type=process_model.flamegraph_event_type
    )
    reference_id = all_stack_data[process].get_base_case_id()
    if reference_id.event_type == "custom_event_ratio":
        process_model.reference_count = float(reference_id.count2) / float(
            reference_id.count1
        )
    else:
        process_model.reference_count = reference_id.count1
    (
        process_model.layout.event_time_series,
        process_model.layout.event_ratio_time_series,
    ) = get__timechart(process, svgchart)
    process_model.layout.reference_count = process_model.reference_count
    process_model.layout.reference_id = process_model.reference_id
    process_model.layout.text_filter = process_model.text_filter
    return jsonify(process_model.layout.to_dict())
def general_analysis():
    # Request handler for general analysis.
    # of event data (event1 vs event2)
    global all_stack_data
    global all_analysis_data
    analysis_type = "general"
    analysis_model.analysis_type = analysis_type
    analysis_data, cluster_events = get_analysis(analysis_type)
    analysis_model.layout.results = GlobalData.results_files
    (
        analysis_model.event1,
        analysis_model.event2,
    ) = initialise_analysis_model_cluster_data(analysis_data)
    analysis_model.base_event = GlobalData.loaded_cpu_definition.get_base_event(
    )
    base_event = analysis_model.base_event
    events = GlobalData.loaded_cpu_definition.get_active_events()
    # Load base event to generate complete list of available processes - for selection of required processes
    if base_event in all_stack_data:
        update_analysis_model_base_event_data(base_event, events)
        all_stack_data[base_event].read_data(
            start=all_stack_data[base_event].start,
            stop=all_stack_data[base_event].stop,
            text_filter=analysis_model.text_filter,
            selected_ids=analysis_model.base_event_selected_ids,
        )
    else:
        all_stack_data[base_event] = StackData.create_event_data(
            GlobalData.results_files,
            GlobalData.local_data,
            GlobalData.loaded_cpu_definition,
            data_id=base_event,
            debug=GlobalData.debug,
            n_proc=GlobalData.n_proc,
        )
        update_analysis_model_base_event_data(base_event, events)

    # Now load selected events on each of the selected processes
    for process in analysis_model.process_list:
        if process in all_stack_data:
            update_analysis_model_process_data(process)
            all_stack_data[process].read_data(
                start=all_stack_data[process].start,
                stop=all_stack_data[process].stop,
                text_filter=analysis_model.text_filter,
                selected_ids=analysis_model.selected_ids[process],
                base_case=analysis_model.reference_id,
            )
        else:
            all_stack_data[process] = StackData.create_process_data(
                GlobalData.results_files,
                GlobalData.local_data,
                GlobalData.loaded_cpu_definition,
                data_id=process,
                debug=GlobalData.debug,
                n_proc=GlobalData.n_proc,
            )
            update_analysis_model_process_data(process)
            # Update process ids and reference id
            all_stack_data[process].set_selected_process_ids(
                analysis_model.selected_ids[process])
            all_stack_data[process].set_base_case(
                "", analysis_model.selected_ids[process])
            ids = [all_stack_data[process].get_base_case_id()]
            all_stack_data[process].set_flamegraph_process_ids(ids)
        analysis_data.add_data(all_stack_data[process], process)
    # Setup General plot utility
    colours = get_top_ten_colours(return_hex=False)
    analysis_model.num_custom_event_ratios = (
        GlobalData.loaded_cpu_definition.get_num_custom_event_ratios())
    centred = analysis_model.centred_scatter_plot == "centred"
    append_cluster_labels = analysis_model.flamegraph_mode == "clusters"
    event1 = analysis_model.event1
    event2 = analysis_model.event2
    log_scale = analysis_model.log_scale
    analysis_model.cluster_labels = run_analysis(analysis_data, event1, event2,
                                                 centred,
                                                 append_cluster_labels,
                                                 log_scale)
    # Prepare plots
    purge(GlobalData.local_data, ".svg")
    analysis_model.layout.reference_id = analysis_model.reference_id
    analysis_model.layout.scatter_plot = get_hotspot_scatter_plot(
        analysis_data,
        event1,
        event2,
        svgchart,
        centred,
        analysis_model.hotspots,
        log_scale,
    )
    (
        analysis_model.layout.event_totals_chart,
        analysis_model.layout.event_totals_table,
    ) = get_barchart(analysis_model.process_list, analysis_model.hotspots,
                     svgchart)
    if analysis_model.num_custom_event_ratios > 0:
        analysis_model.layout.event_ratios_chart = get_custom_barchart(
            analysis_model.process_list, svgchart)
    analysis_model.layout.flamegraph = get_flamegraph(
        analysis_data, analysis_model.process_list,
        analysis_model.flamegraph_mode)
    analysis_model.layout.show_source = len(GlobalData.hpc_results) > 0
    (
        analysis_model.layout.source_code_table,
        analysis_model.layout.source_code_info,
        analysis_model.layout.source_code_line,
    ) = get_source_code("", analysis_model.reference_id)
    # Setup general layout
    analysis_model.layout.title = "Analysis: General"
    analysis_model.layout.footer = "Loaded Results: " + " & ".join(
        analysis_model.layout.results)
    analysis_model.group_names = analysis_data.get_group_names()
    ids = all_stack_data[base_event].get_all_process_ids()
    return render_template(
        "AnalysisView.html",
        events=events,
        trace_jobs=GlobalData.trace_jobs,
        event_group_map=GlobalData.loaded_cpu_definition.
        get_active_event_group_map(),
        all_event_groups=GlobalData.loaded_cpu_definition.get_event_groups(),
        jobs=GlobalData.jobs,
        processes=GlobalData.processes,
        analysis_model=analysis_model,
        enabled_modes=GlobalData.enabled_modes,
        ids=ids,
        colours=colours)
def update_all_charts():
    global svgchart
    global analysis_model
    analysis_type = analysis_model.analysis_type
    analysis_data = all_analysis_data[analysis_type]
    analysis_data.reset_stack_maps()
    analysis_model.process_list
    data = request.get_json()
    run_new_analysis = False
    if "minx" in data:
        minx = data["minx"]
        maxx = data["maxx"]
        miny = data["miny"]
        maxy = data["maxy"]
        analysis_model.xlower = 0.95 * minx
        analysis_model.xupper = 1.05 * maxx
        analysis_model.ylower = 0.95 * miny
        analysis_model.yupper = 1.05 * maxy
    if "text_filter" in data:
        match = data["text_filter"]
        if re.match(".*\[\[cluster", match):
            match = match.rpartition("[[cluster")[0]
        analysis_model.text_filter = match
    if "new_ref_id" in data:  # Add reference id if not already in flamegraph_ids
        analysis_model.reference_id = data["new_ref_id"]
        for process in analysis_model.process_list:
            old_ids = all_stack_data[process].get_flamegraph_process_ids()
            ids = []
            add_id = True
            for process_id in old_ids:
                if process_id.label == analysis_model.reference_id:
                    add_id = False
                    analysis_model.flamegraph_event_type = process_id.event_type
                ids.append(process_id)
            if add_id:
                for process_id in analysis_model.selected_ids[process]:
                    if process_id.label == analysis_model.reference_id:
                        ids.append(process_id)
                        analysis_model.flamegraph_event_type = process_id.event_type
            all_stack_data[process].set_flamegraph_process_ids(ids)
        analysis_model.reference_event = get_event(analysis_model.reference_id)
        analysis_model.reference_job = get_job(analysis_model.reference_id)
        analysis_model.reference_process = get_process(
            analysis_model.reference_id)
        analysis_model.reference_pid = get_pid(analysis_model.reference_id)
        analysis_model.reference_tid = get_tid(analysis_model.reference_id)
        analysis_model.base_event_reference_id = make_label(
            analysis_model.reference_job,
            analysis_model.reference_process,
            analysis_model.base_event,
            analysis_model.reference_pid,
            analysis_model.reference_tid,
        )
    if "process_ids" in data:
        base_event = analysis_model.base_event
        ids = all_stack_data[base_event].get_all_process_ids()
        analysis_model.base_event_selected_ids = []
        for process_id in ids:
            if process_id.label in data["process_ids"]:
                analysis_model.base_event_selected_ids.append(process_id)
        analysis_model.process_list = []
        for job in GlobalData.processes:
            for process in GlobalData.processes[job]:
                for process_id in analysis_model.base_event_selected_ids:
                    if job == process_id.job and process == process_id.process_name:
                        analysis_model.process_list.append(job + "_" + process)
                        break
        run_new_analysis = True
    if "base_event_reference_id" in data:
        analysis_model.base_event_reference_id = data[
            "base_event_reference_id"]
        analysis_model.reference_job = get_job(
            analysis_model.base_event_reference_id)
        analysis_model.reference_process = get_process(
            analysis_model.base_event_reference_id)
        analysis_model.reference_pid = get_pid(
            analysis_model.base_event_reference_id)
        analysis_model.reference_tid = get_tid(
            analysis_model.base_event_reference_id)
    if "selected_clusters" in data:
        nc = int(data["num_clusters"])
        analysis_model.clusters = [str(i) for i in range(0, nc)]
        analysis_model.selected_clusters = []
        for cluster in analysis_model.clusters:
            if cluster in data["selected_clusters"]:
                analysis_model.selected_clusters.append(cluster)
    if "selected_events" in data:
        analysis_model.selected_events = []
        for event in GlobalData.loaded_cpu_definition.get_active_events():
            if event in data["selected_events"]:
                analysis_model.selected_events.append(event)
        analysis_model.process_list = []
        for job in GlobalData.processes:
            for process in GlobalData.processes[job]:
                for process_id in analysis_model.base_event_selected_ids:
                    if job == process_id.job and process == process_id.process_name:
                        analysis_model.process_list.append(job + "_" + process)
                        break
    if "reset_filters" in data:
        run_new_analysis = True
    if "direction" in data:
        if data["direction"] == "next":
            analysis_model.hotspots += 10
        else:
            if analysis_model.hotspots > 10:
                analysis_model.hotspots -= 10
    for process in analysis_model.process_list:
        if process in all_stack_data:
            update_analysis_model_process_data(process)
            all_stack_data[process].read_data(
                start=all_stack_data[process].start,
                stop=all_stack_data[process].stop,
                text_filter=analysis_model.text_filter,
                selected_ids=analysis_model.selected_ids[process],
                base_case=analysis_model.reference_id,
            )
        else:
            all_stack_data[process] = StackData.create_process_data(
                GlobalData.results_files,
                GlobalData.local_data,
                GlobalData.loaded_cpu_definition,
                data_id=process,
                debug=GlobalData.debug,
                n_proc=GlobalData.n_proc,
            )
            update_analysis_model_process_data(process)
            # Update process ids and reference id
            all_stack_data[process].set_selected_process_ids(
                analysis_model.selected_ids[process])
            all_stack_data[process].set_base_case(
                "", analysis_model.selected_ids[process])
            ids = [all_stack_data[process].get_base_case_id()]
            all_stack_data[process].set_flamegraph_process_ids(ids)
        analysis_data.add_data(all_stack_data[process], process)
    purge(GlobalData.local_data, ".svg")
    event1 = analysis_model.event1
    event2 = analysis_model.event2
    raw_event1 = event_to_raw_event(analysis_model.event1,
                                    GlobalData.loaded_cpu_definition)
    raw_event2 = event_to_raw_event(analysis_model.event2,
                                    GlobalData.loaded_cpu_definition)
    centred = analysis_model.centred_scatter_plot == "centred"
    append_cluster_labels = analysis_model.flamegraph_mode == "clusters"
    log_scale = analysis_model.log_scale
    xlower = analysis_model.xlower
    xupper = analysis_model.xupper
    ylower = analysis_model.ylower
    yupper = analysis_model.yupper
    if run_new_analysis:
        analysis_model.cluster_labels = run_analysis(analysis_data, event1,
                                                     event2, centred,
                                                     append_cluster_labels,
                                                     log_scale)
    else:
        analysis_model.cluster_labels = analysis_data.get_cluster_labels()
        analysis_data.make_stack_map(
            [int(i) for i in analysis_model.selected_clusters],
            append_cluster_labels=append_cluster_labels,
            event1=raw_event1,
            event2=raw_event2,
            xlower=xlower,
            xupper=xupper,
            ylower=ylower,
            yupper=yupper,
        )
    if analysis_model.scatter_plot_type == "clusters":
        analysis_model.layout.scatter_plot = get_cluster_plot(
            analysis_data,
            event1,
            event2,
            svgchart,
            centred,
            log_scale,
            xlower=xlower,
            xupper=xupper,
            ylower=ylower,
            yupper=yupper,
        )
    else:
        analysis_model.layout.scatter_plot = get_hotspot_scatter_plot(
            analysis_data,
            event1,
            event2,
            svgchart,
            centred,
            analysis_model.hotspots,
            log_scale,
            xlower=xlower,
            xupper=xupper,
            ylower=ylower,
            yupper=yupper,
        )
    (
        analysis_model.layout.event_totals_chart,
        analysis_model.layout.event_totals_table,
    ) = get_barchart(analysis_model.process_list, analysis_model.hotspots,
                     svgchart)
    analysis_model.layout.flamegraph = get_flamegraph(
        analysis_data,
        analysis_model.process_list,
        analysis_model.flamegraph_mode,
        flamegraph_event_type=analysis_model.flamegraph_event_type,
    )
    if analysis_model.num_custom_event_ratios > 0:
        analysis_model.layout.event_ratios_chart = get_custom_barchart(
            analysis_model.process_list, svgchart)
    analysis_model.layout.reference_id = analysis_model.reference_id
    analysis_model.layout.reference_event = analysis_model.reference_event
    analysis_model.layout.base_event_reference_id = (
        analysis_model.base_event_reference_id)
    analysis_model.layout.event_name_ref = replace_operators(
        analysis_model.reference_event)
    analysis_model.layout.text_filter = analysis_model.text_filter
    analysis_model.layout.group_names = analysis_data.get_group_names()
    return jsonify(analysis_model.layout.to_dict())