def create_partial_graph(self, rows, cols): """ Create graph's data structure for partial view Structure is saved into instance attribute __graph_data. Arguments: rows(int): total number of the graph's rows cols(int): total number of the graph's columns """ # clearing old data self.__graph_data = [] # calculating total number of the bars in screen bars = cols // self.BAR_SPACE field_size = self.__get_field_size(rows) bars_cnt = 0 for i, snap in enumerate(self.__heap['snapshots']): # heap representation transforming into graph data if self.__current_snap <= i + 1: bars_cnt += 1 if bars_cnt <= bars: data = {} total_fields = int( math.ceil(snap.get('sum_amount', 0) / field_size)) data['fields'] = total_fields data['time'] = snap['time'] data['snapshot'] = i + 1 if 'sum_amount' in snap: data['peak'] = bool(snap['sum_amount'] == self.__peak) else: data['peak'] = 0 self.__graph_data.append(data) profiles.store_profile_at(self.__graph_data, 'partial.graf')
def create_global_graph(self, rows, cols): """ Create graph's data structure for global view Structure is saved into instance attribute __graph_data. Arguments: rows(int): total number of the graph's rows cols(int): total number of the graph's columns """ # clearing old data self.__graph_data = [] # calculating total number of the bars in screen bars = cols // self.BAR_SPACE field_size = self.__get_field_size(rows) snapshots = len(self.__heap['snapshots']) if snapshots >= bars: approx_bars = int(math.ceil(snapshots / bars)) bars_cnt = 0 was_peak = False avg_sum = 0 # heap representation transforming into graph data for i, snap in enumerate(self.__heap['snapshots']): bars_cnt += 1 avg_sum += snap.get('sum_amount', 0) if 'sum_amount' in snap and snap['sum_amount'] == self.__peak: was_peak = True if bars_cnt == approx_bars: data = {} # number of the fields is average of the approximated bars total_fields = avg_sum / field_size / approx_bars data['fields'] = int(math.ceil(total_fields)) data['time'] = snap['time'] data['snapshot'] = i + 1 data['peak'] = bool(was_peak) self.__graph_data.append(data) bars_cnt = 0 avg_sum = 0 was_peak = False else: for i, snap in enumerate(self.__heap['snapshots']): # heap representation transforming into graph data data = {} total_fields = int( math.ceil(snap.get('sum_amount') / field_size)) data['fields'] = total_fields data['time'] = snap['time'] data['snapshot'] = i + 1 if 'sum_amount' in snap: data['peak'] = bool(snap['sum_amount'] == self.__peak) else: data['peak'] = 0 self.__graph_data.append(data) profiles.store_profile_at(self.__graph_data, 'complete.graf')
def store_generated_profile(pcs, prof, job): """Stores the generated profile in the pending jobs directory. Arguments: pcs(PCS): object with performance control system wrapper prof(dict): profile that we are storing in the repository job(Job): job with additional information about generated profiles """ full_profile = profile.finalize_profile_for_job(pcs, prof, job) full_profile_name = profile.generate_profile_name(job) profile_directory = pcs.get_job_directory() full_profile_path = os.path.join(profile_directory, full_profile_name) profile.store_profile_at(full_profile, full_profile_path) log.info("stored profile at: {}".format( os.path.relpath(full_profile_path)))
def prepare_profile(dest_dir, profile, origin): """ Arguments: dest_dir(str): destination of the prepared profile profile(str): name of the profile that is going to be stored in pending jobs origin(str): origin minor version for the given profile """ # Copy to jobs and prepare origin for the current version shutil.copy2(profile, dest_dir) # Prepare origin for the current version copied_filename = os.path.join(dest_dir, os.path.split(profile)[-1]) copied_profile = perun_profile.load_profile_from_file(copied_filename, is_raw_profile=True) copied_profile['origin'] = origin perun_profile.store_profile_at(copied_profile, copied_filename) shutil.copystat(profile, copied_filename) return copied_filename
def prepare_profile(perun, profile, origin): """ Arguments: pcs(PCS): perun control system wrapper profile(str): name of the profile that is going to be stored in pending jobs origin(str): origin minor version for the given profile """ # Copy to jobs and prepare origin for the current version dest_dir = perun.get_job_directory() shutil.copy2(profile, dest_dir) # Prepare origin for the current version copied_filename = os.path.join(dest_dir, os.path.split(profile)[-1]) copied_profile = perun_profile.load_profile_from_file( copied_filename, is_raw_profile=True) copied_profile['origin'] = origin perun_profile.store_profile_at(copied_profile, copied_filename) shutil.copystat(profile, copied_filename) return copied_filename
def store_generated_profile(prof, job): """Stores the generated profile in the pending jobs directory. :param dict prof: profile that we are storing in the repository :param Job job: job with additional information about generated profiles """ full_profile = profile.finalize_profile_for_job(prof, job) full_profile_name = profile.generate_profile_name(full_profile) profile_directory = pcs.get_job_directory() full_profile_path = os.path.join(profile_directory, full_profile_name) profile.store_profile_at(full_profile, full_profile_path) log.info("stored profile at: {}".format( os.path.relpath(full_profile_path))) if dutils.strtobool( str( config.lookup_key_recursively("profiles.register_after_run", "false"))): # We either store the profile according to the origin, or we use the current head dst = prof.get('origin', vcs.get_minor_head()) commands.add([full_profile_path], dst, keep_profile=False)