def retry_crashed(pathserv, max_retries=5): crashes_light_file = pathserv.special_raw_filename("crashes_light") if os.path.isfile(crashes_light_file): total_retries, sometimes_crashed, still_crashing = fs.pickle_load( crashes_light_file) samples_to_consider = still_crashing else: total_retries, sometimes_crashed = 0, set() playlist_gen = pathserv.session_playlist_generator() samples_to_consider = [s for s, f in playlist_gen] cnt_retries = 0 while 1: still_crashing = set() for sample in samples_to_consider: if sample_crashed(pathserv, sample): sometimes_crashed.add(sample) still_crashing.add(sample) fs.pickle_save((total_retries, sometimes_crashed, still_crashing), crashes_light_file) if cnt_retries >= max_retries or len(still_crashing) == 0: break samples_to_consider = still_crashing playlist = pathserv.playlist_generator_from_samples_iterable( samples_to_consider) playmany.core_play_many(pathserv, playlist) cnt_retries += 1 total_retries += 1
def bokeh_render_timeline(pathserv, sample): infile = pathserv.report_filename(sample, "timeline.pkl", False) if not os.path.isfile(infile): timeline.save_timeline(pathserv, sample, ".pkl") timeline_postprocessed = fs.pickle_load(infile) info = unpack_timeline(timeline_postprocessed) outfile = pathserv.report_filename(sample, "timeline.html", False) make_plot(info, outfile)
def save_timeline(pathserv, sample, output_format): dbg_file = pathserv.dbg_filename(sample) recorded_events = fs.pickle_load(dbg_file) tm = ins.TimelineBuilder(recorded_events, events_definition=ins.mp_events) timeline = tm.get_timeline() v = ins.timeline_postprocessing(timeline) if output_format == ".pkl": outfile = pathserv.report_filename(sample, "timeline.pkl", False) fs.pickle_save(v, outfile) elif output_format == ".csv": outfile = pathserv.report_filename(sample, "timeline.csv", False) #? todo: investigate packing multiple tables raise NotImplemented else: raise mpexceptions.ExceptionUnknownOutputFormat(output_format)
def count_defects(self, sample): dbg_file = self.pathserv.dbg_filename(sample) if not os.path.isfile(dbg_file): self.no_dbg_samples.add(sample) classifies_as = "no_dbg" counters = None else: log_entries = fs.pickle_load(dbg_file) counters = self.fn_count_bads(log_entries) if counters["crash"]: self.crashed_samples.add(sample) classifies_as = "crash" else: perfect = all((counters[k] == 0 for k in counters)) classifies_as = "perfect" if perfect else "non_perfect" self.last_count = sample, classifies_as, counters return classifies_as
def sample_crashed(pathserv, sample): dbg_file = pathserv.dbg_filename(sample) log_entries = fs.pickle_load(dbg_file) return ins.crash_detected(log_entries)
def report_sample(pathserv, sample, report_name): dbg_file = pathserv.dbg_filename(sample) outfile = pathserv.report_filename(sample, report_name) log_entries = fs.pickle_load(dbg_file) text = reports.report_by_name(report_name)(log_entries) fs.txt_save(text, outfile)