def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold[self.increase_threshold]) self.register.set_pixel_register_value('TDAC', self.last_good_tdac[self.increase_threshold]) self.register.set_pixel_register_value('Enable', self.last_good_enable_mask[0]) # use enable mask from the lowest point to mask bad pixels # write configuration to avaoid high current states commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"])) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable")) self.register_utils.send_commands(commands) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() plot_occupancy(self.last_occupancy_hist[self.increase_threshold].T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_hist[self.increase_threshold].T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_occupancy_mask[self.increase_threshold].T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_mask[self.increase_threshold].T, filename=analyze_raw_data.output_pdf) plot_three_way(self.last_good_tdac[self.increase_threshold].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.last_good_tdac[self.increase_threshold].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_good_enable_mask[self.increase_threshold].T, title='Intermediate Enable Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_good_enable_mask[self.increase_threshold].T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_good_enable_mask[0].T, title='Final Enable Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[0], self.last_step[0]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_good_enable_mask[0].T, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.interpreter.set_warning_output(True) analyze_raw_data.create_tot_hist = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary()
def analyze_raw_data(input_files, output_file_hits, interpreter_plots, overwrite_output_files, align_at_trigger=True, align_at_tdc=False, use_tdc_trigger_time_stamp=False, max_tdc_delay=80, interpreter_warnings=False): logging.info('Analyze the raw FE data given in ' + str(len(input_files)) + ' files and store the needed data') if os.path.isfile(output_file_hits) and not overwrite_output_files: # skip analysis if already done logging.info('Analyzed data file ' + output_file_hits + ' already exists. Skip analysis for this file.') else: with AnalyzeRawData(raw_data_file=input_files, analyzed_data_file=output_file_hits) as analyze_raw_data: analyze_raw_data.max_tdc_delay = max_tdc_delay # max TDC delay to consider a valid in-time TDC word analyze_raw_data.use_tdc_trigger_time_stamp = use_tdc_trigger_time_stamp # if you want to also measure the delay between trigger / hit-bus analyze_raw_data.align_at_trigger = align_at_trigger # align events at TDC words, first word of event has to be a tdc word analyze_raw_data.align_at_tdc = align_at_tdc # align events at TDC words, first word of event has to be a tdc word analyze_raw_data.create_tdc_counter_hist = True # create a histogram for all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.create_tdc_pixel_hist = True analyze_raw_data.create_tot_pixel_hist = True analyze_raw_data.create_cluster_hit_table = True # enables the creation of a table with all cluster hits, std. setting is false analyze_raw_data.create_source_scan_hist = True # create source scan hists analyze_raw_data.create_cluster_size_hist = True # enables cluster size histogramming, can save some time, std. setting is false analyze_raw_data.create_cluster_tot_hist = True # enables cluster ToT histogramming per cluster size, std. setting is false analyze_raw_data.interpreter.set_warning_output(interpreter_warnings) # std. setting is True analyze_raw_data.interpret_word_table() # the actual start conversion command analyze_raw_data.interpreter.print_summary() # prints the interpreter summary # Store the enables pixels for good pixel selection in TDC analysis step with tb.open_file(analyze_raw_data._analyzed_data_file, 'r+') as out_file_h5: with tb.open_file(analyze_raw_data.files_dict.items()[0][0]) as in_file_h5: # Use first raw data file to extract enable mask out_file_h5.root.ClusterHits.attrs.enabled_pixels = in_file_h5.root.configuration.Enable[:] if interpreter_plots: analyze_raw_data.plot_histograms() # plots all activated histograms into one pdf
def analyze_raw_data_per_scan_parameter(input_file, output_file_hits, scan_data_filename, scan_parameters=['PlsrDAC']): with AnalyzeRawData(raw_data_file=input_file, analyzed_data_file=output_file_hits) as analyze_raw_data: analyze_raw_data.create_hit_table = True # can be set to false to omit hit table creation, std. setting is false analyze_raw_data.create_tot_hist = True # creates a ToT histogram for data_one_step, one_step_parameter in analyze_hits_per_scan_parameter(analyze_data=analyze_raw_data, scan_parameters=scan_parameters): data_one_step.plot_histograms(scan_data_filename + '_' + one_step_parameter, create_hit_hists_only=True)
def analyze_raw_data(input_files, output_file_hits, interpreter_plots, overwrite_output_files, pdf_filename): logging.info('Analyze the raw FE data given in ' + str(len(input_files)) + ' files and store the needed data') if os.path.isfile(output_file_hits) and not overwrite_output_files: # skip analysis if already done logging.info('Analyzed data file ' + output_file_hits + ' already exists. Skip analysis for this file.') else: with AnalyzeRawData(raw_data_file=input_files, analyzed_data_file=output_file_hits) as analyze_raw_data: # analyze_raw_data.interpreter.debug_events(3645978, 3645978, True) # analyze_raw_data.interpreter.debug_events(100, 110, True) # analyze_raw_data.use_tdc_trigger_time_stamp = True # if you want to also measure the delay between trigger / hit-bus # analyze_raw_data.max_tdc_delay = 80 analyze_raw_data.align_at_trigger = False # align events at TDC words, first word of event has to be a tdc word analyze_raw_data.align_at_tdc = True # align events at TDC words, first word of event has to be a tdc word analyze_raw_data.create_tdc_counter_hist = True # create a histogram for all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.create_tdc_pixel_hist = True analyze_raw_data.create_tot_pixel_hist = True analyze_raw_data.create_cluster_hit_table = True # enables the creation of a table with all cluster hits, std. setting is false analyze_raw_data.create_source_scan_hist = True # create source scan hists analyze_raw_data.create_cluster_size_hist = True # enables cluster size histogramming, can save some time, std. setting is false analyze_raw_data.create_cluster_tot_hist = True # enables cluster ToT histogramming per cluster size, std. setting is false analyze_raw_data.interpreter.set_warning_output(analysis_configuration['interpreter_warnings']) # std. setting is True analyze_raw_data.clusterizer.set_warning_output(analysis_configuration['interpreter_warnings']) # std. setting is True analyze_raw_data.interpreter.print_status() analyze_raw_data.interpret_word_table() # the actual start conversion command analyze_raw_data.interpreter.print_summary() # prints the interpreter summary if interpreter_plots: analyze_raw_data.plot_histograms() # plots all activated histograms into one pdf
def analyze_raw_data(input_files, output_file_hits, scan_parameter): logging.info('Analyze the raw FE data given in ' + str(len(input_files)) + ' files and store the needed data') if os.path.isfile(output_file_hits) and not analysis_configuration[ 'overwrite_output_files']: # skip analysis if already done logging.warning('Analyzed data file ' + output_file_hits + ' already exists. Skip analysis for this file.') else: with AnalyzeRawData( raw_data_file=input_files, analyzed_data_file=output_file_hits, scan_parameter_name=scan_parameter) as analyze_raw_data: analyze_raw_data.create_hit_table = True # can be set to false to omit hit table creation, std. setting is false analyze_raw_data.create_cluster_table = True # enables the creation of a table with all clusters, std. setting is false analyze_raw_data.create_source_scan_hist = True # create source scan hists analyze_raw_data.create_cluster_size_hist = True # enables cluster size histogramming, can save some time, std. setting is false analyze_raw_data.create_cluster_tot_hist = True # enables cluster ToT histogramming per cluster size, std. setting is false analyze_raw_data.interpreter.set_warning_output( analysis_configuration['interpreter_warnings'] ) # std. setting is True analyze_raw_data.clusterizer.set_warning_output( analysis_configuration['interpreter_warnings'] ) # std. setting is True analyze_raw_data.interpreter.debug_events( 0, 10, False ) # events to be printed onto the console for debugging, usually deactivated analyze_raw_data.interpret_word_table( ) # the actual start conversion command analyze_raw_data.interpreter.print_summary( ) # prints the interpreter summary analyze_raw_data.plot_histograms( ) # plots all activated histograms into one pdf
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_threshold_hists = True analyze_raw_data.create_fitted_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.n_injections = 100 analyze_raw_data.interpreter.set_warning_output(False) # so far the data structure in a threshold scan was always bad, too many warnings given analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: thr = out_file_h5.root.HistThresholdFitted[:] thr_masked = np.ma.masked_where(np.isclose(thr, 0), thr) corr = [thr_masked[:, i * 2 + 1:i * 2 + 3].mean() for i in range(0, 38)] corr = np.array(corr) corr -= corr.min() # corr = np.around(corr).astype(int) if "C_High".lower() in map(lambda x: x.lower(), self.enable_shift_masks) and "C_Low".lower() in map(lambda x: x.lower(), self.enable_shift_masks): self.register.calibration_parameters['Pulser_Corr_C_Inj_High'] = list(corr) elif "C_High".lower() in map(lambda x: x.lower(), self.enable_shift_masks): self.register.calibration_parameters['Pulser_Corr_C_Inj_Med'] = list(corr) elif "C_Low".lower() in map(lambda x: x.lower(), self.enable_shift_masks): self.register.calibration_parameters['Pulser_Corr_C_Inj_Low'] = list(corr) else: raise ValueError('Unknown C_Inj')
def analyze_raw_data(input_file): # FE-I4 raw data analysis '''Std. raw data analysis of FE-I4 data. A hit table is created for further analysis. Parameters ---------- input_file : pytables file output_file_hits : pytables file ''' with AnalyzeRawData(raw_data_file=input_file, create_pdf=True) as analyze_raw_data: #analyze_raw_data.align_at_trigger_number = True # if trigger number is at the beginning of each event activate this for event alignment analyze_raw_data.use_trigger_time_stamp = False # the trigger number is a time stamp analyze_raw_data.use_tdc_word = False analyze_raw_data.create_hit_table = True analyze_raw_data.create_meta_event_index = True analyze_raw_data.create_trigger_error_hist = True analyze_raw_data.create_rel_bcid_hist = True analyze_raw_data.create_error_hist = True analyze_raw_data.create_service_record_hist = True analyze_raw_data.create_occupancy_hist = True analyze_raw_data.create_tot_hist = False # analyze_raw_data.n_bcid = 16 # analyze_raw_data.max_tot_value = 13 analyze_raw_data.interpreter.create_empty_event_hits(False) # analyze_raw_data.interpreter.set_debug_output(False) # analyze_raw_data.interpreter.set_info_output(False) analyze_raw_data.interpreter.set_warning_output(False) # analyze_raw_data.interpreter.debug_events(0, 1, True) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_tot_hist = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() # occ_hist = make_occupancy_hist(*convert_data_array(data_array_from_data_dict_iterable(self.fifo_readout.data), filter_func=is_data_record, converter_func=get_col_row_array_from_data_record_array)).T with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 self.occ_mask[occ_hist < self.n_injections] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) self.disable_for_mask = self.disable_for_mask if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value( mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) self.enable_for_mask = self.enable_for_mask if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Stuck Pixels', z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_hit_table = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 self.occ_mask[occ_hist > self.abs_occ_limit] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value( mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Noisy Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze_hits(input_file, output_file_hits, scan_data_filename, output_file_hits_analyzed=None): with AnalyzeRawData(raw_data_file=input_file, analyzed_data_file=output_file_hits) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_cluster_hit_table = True analyze_raw_data.create_cluster_table = True analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.create_cluster_tot_hist = True analyze_raw_data.analyze_hit_table(analyzed_data_out_file=output_file_hits_analyzed) analyze_raw_data.plot_histograms(scan_data_filename=scan_data_filename, analyzed_data_file=output_file_hits_analyzed)
def setUpClass(cls): cls.interpreter = PyDataInterpreter() cls.histogram = PyDataHistograming() cls.clusterizer = PyDataClusterizer() with AnalyzeRawData(raw_data_file=tests_data_folder + 'unit_test_data_1.h5', analyzed_data_file=tests_data_folder + 'unit_test_data_1_interpreted.h5', create_pdf=False) as analyze_raw_data: # analyze the digital scan raw data, do not show any feedback (no prints to console, no plots) analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_hit_table = True # can be set to false to omit hit table creation, std. setting is false analyze_raw_data.create_cluster_hit_table = True # adds the cluster id and seed info to each hit, std. setting is false analyze_raw_data.create_cluster_table = True # enables the creation of a table with all clusters, std. setting is false analyze_raw_data.create_trigger_error_hist = True # creates a histogram summing up the trigger errors analyze_raw_data.create_cluster_size_hist = True # enables cluster size histogramming, can save some time, std. setting is false analyze_raw_data.create_cluster_tot_hist = True # enables cluster ToT histogramming per cluster size, std. setting is false analyze_raw_data.create_meta_word_index = True # stores the start and stop raw data word index for every event, std. setting is false analyze_raw_data.create_meta_event_index = True # stores the event number for each readout in an additional meta data array, default: False analyze_raw_data.interpret_word_table(use_settings_from_file=False, fei4b=False) # the actual start conversion command with AnalyzeRawData(raw_data_file=tests_data_folder + 'unit_test_data_2.h5', analyzed_data_file=tests_data_folder + 'unit_test_data_2_interpreted.h5', create_pdf=False) as analyze_raw_data: # analyze the fast threshold scan raw data, do not show any feedback (no prints to console, no plots) analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_threshold_hists = True # makes only sense if threshold scan data is analyzed, std. setting is false analyze_raw_data.interpret_word_table(use_settings_from_file=False, fei4b=False) # the actual start conversion command with AnalyzeRawData(raw_data_file=None, analyzed_data_file=tests_data_folder + 'unit_test_data_1_interpreted.h5', create_pdf=False) as analyze_raw_data: # analyze the digital scan hit data, do not show any feedback (no prints to console, no plots) analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_cluster_hit_table = True analyze_raw_data.create_cluster_table = True analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.create_cluster_tot_hist = True analyze_raw_data.analyze_hit_table(analyzed_data_out_file=tests_data_folder + 'unit_test_data_1_analyzed.h5') with AnalyzeRawData(raw_data_file=tests_data_folder + 'unit_test_data_3.h5', analyzed_data_file=tests_data_folder + 'unit_test_data_3_interpreted.h5', create_pdf=False) as analyze_raw_data: # analyze the digital scan raw data per scan parameter, do not show any feedback (no prints to console, no plots) analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_hit_table = True # can be set to false to omit hit table creation, std. setting is false analyze_raw_data.create_cluster_hit_table = True # adds the cluster id and seed info to each hit, std. setting is false analyze_raw_data.create_cluster_table = True # enables the creation of a table with all clusters, std. setting is false analyze_raw_data.create_trigger_error_hist = True # creates a histogram summing up the trigger errors analyze_raw_data.create_cluster_size_hist = True # enables cluster size histogramming, can save some time, std. setting is false analyze_raw_data.create_cluster_tot_hist = True # enables cluster ToT histogramming per cluster size, std. setting is false analyze_raw_data.create_meta_word_index = True # stores the start and stop raw data word index for every event, std. setting is false analyze_raw_data.create_meta_event_index = True # stores the event number for each readout in an additional meta data array, default: False analyze_raw_data.interpret_word_table(use_settings_from_file=False, fei4b=False) # the actual start conversion command with AnalyzeRawData(raw_data_file=tests_data_folder + 'unit_test_data_2.h5', analyzed_data_file=tests_data_folder + 'unit_test_data_2_hits.h5', create_pdf=False) as analyze_raw_data: # analyze the fast threshold scan raw data, do not show any feedback (no prints to console, no plots) analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_hit_table = True analyze_raw_data.create_threshold_hists = True # makes only sense if threshold scan data is analyzed, std. setting is false analyze_raw_data.interpret_word_table(use_settings_from_file=False, fei4b=False) # the actual start conversion command with AnalyzeRawData(raw_data_file=None, analyzed_data_file=tests_data_folder + 'unit_test_data_2_hits.h5', create_pdf=False) as analyze_raw_data: analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_threshold_hists = True analyze_raw_data.analyze_hit_table(analyzed_data_out_file=tests_data_folder + 'unit_test_data_2_analyzed.h5') with AnalyzeRawData(raw_data_file=tests_data_folder + 'unit_test_data_4.h5', analyzed_data_file=tests_data_folder + 'unit_test_data_4_interpreted.h5', create_pdf=False) as analyze_raw_data: analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_hit_table = True analyze_raw_data.interpret_word_table(use_settings_from_file=False, fei4b=False) # the actual start conversion command with AnalyzeRawData(raw_data_file=[tests_data_folder + 'unit_test_data_4_parameter_128.h5', tests_data_folder + 'unit_test_data_4_parameter_256.h5'], analyzed_data_file=tests_data_folder + 'unit_test_data_4_interpreted_2.h5', scan_parameter_name='parameter', create_pdf=False) as analyze_raw_data: analyze_raw_data.chunk_size = 2999999 analyze_raw_data.create_hit_table = True analyze_raw_data.interpret_word_table(use_settings_from_file=False, fei4b=False) # the actual start conversion command
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_cluster_size_hist = True # can be set to false to omit cluster hit creation, can save some time, standard setting is false analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_cluster_tot_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.interpreter.use_tdc_word(True) # align events at the TDC word analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary()
def analyze_raw_data_file(file_name): with AnalyzeRawData(raw_data_file=file_name, create_pdf=False) as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_fitted_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.interpreter.set_warning_output( True ) # so far the data structure in a threshold scan was always bad, too many warnings given analyze_raw_data.interpret_word_table()
def analyze_raw_data(input_file, output_file_hits): with AnalyzeRawData(raw_data_file=input_file, analyzed_data_file=output_file_hits, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_hit_table = False # can be set to false to omit hit table creation, std. setting is false analyze_raw_data.create_cluster_hit_table = False # adds the cluster id and seed info to each hit, std. setting is false analyze_raw_data.create_cluster_table = False # enables the creation of a table with all clusters, std. setting is false analyze_raw_data.create_occupancy_hist = True # creates a colxrow histogram with accumulated hits for each scan parameter analyze_raw_data.create_tot_hist = True # creates a ToT histogram analyze_raw_data.create_rel_bcid_hist = True # creates a histogram with the relative BCID of the hits analyze_raw_data.create_service_record_hist = True # creates a histogram with all SR send out from the FE analyze_raw_data.create_error_hist = True # creates a histogram summing up the event errors that occurred analyze_raw_data.create_trigger_error_hist = True # creates a histogram summing up the trigger errors analyze_raw_data.create_source_scan_hist = False # create source scan hists analyze_raw_data.create_cluster_size_hist = False # enables cluster size histogramming, can save some time, std. setting is false analyze_raw_data.create_cluster_tot_hist = False # enables cluster ToT histogramming per cluster size, std. setting is false analyze_raw_data.create_threshold_hists = False # makes only sense if threshold scan data is analyzed, std. setting is false analyze_raw_data.create_threshold_mask = False # masking of noisy or black pixels during histogramming, only affecting fast-algorithm analyze_raw_data.create_fitted_threshold_hists = False # makes only sense if threshold scan data is analyzed, std. setting is false analyze_raw_data.create_fitted_threshold_mask = False # masking of noisy or black pixels during histogramming, only affecting S-curve fitting analyze_raw_data.create_meta_word_index = False # stores the start and stop raw data word index for every event, std. setting is false analyze_raw_data.create_meta_event_index = True # stores the event number for each readout in an additional meta data array, default: False analyze_raw_data.n_bcid = 16 # set the number of BCIDs per event, needed to judge the event structure, only active if settings are not taken from raw data file analyze_raw_data.n_injections = 100 # set the numbers of injections, needed for fast threshold/noise determination analyze_raw_data.max_tot_value = 13 # set the maximum ToT value considered to be a hit, 14 is a late hit analyze_raw_data.use_trigger_number = False analyze_raw_data.set_stop_mode = False # special analysis if data was taken in stop mode analyze_raw_data.interpreter.use_tdc_word( False ) # use the TDC word to align the events, assume that they are first words in the event analyze_raw_data.interpreter.use_trigger_time_stamp( False) # use the trigger number as a time stamp analyze_raw_data.interpreter.set_debug_output( False) # std. setting is False analyze_raw_data.interpreter.set_info_output( False) # std. setting is False analyze_raw_data.interpreter.set_warning_output( True) # std. setting is True analyze_raw_data.clusterizer.set_warning_output( True) # std. setting is True analyze_raw_data.interpreter.debug_events( 3832, 3850, False ) # events to be printed onto the console for debugging, usually deactivated analyze_raw_data.interpret_word_table( ) # the actual start conversion command analyze_raw_data.interpreter.print_summary( ) # prints the interpreter summary analyze_raw_data.plot_histograms( pdf_filename=input_file ) # plots all activated histograms into one pdf
def analyze_raw_data_file(file_name): if os.path.isfile(os.path.splitext(file_name)[0] + '_interpreted.h5'): # skip analysis if already done logging.warning('Analyzed data file ' + file_name + ' already exists. Skip analysis for this file.') else: with AnalyzeRawData(raw_data_file=file_name, create_pdf=False) as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_tot_pixel_hist = False analyze_raw_data.create_fitted_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.interpreter.set_warning_output(False) # RX errors would fill the console analyze_raw_data.interpret_word_table()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) occ_mask[occ_hist > 1] = 1 inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(occ_mask.T, title='Merged Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_hit_table = True analyze_raw_data.trigger_data_format = self.dut['TLU'][ 'DATA_FORMAT'] analyze_raw_data.create_source_scan_hist = True analyze_raw_data.set_stop_mode = True analyze_raw_data.align_at_trigger = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_fitted_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.n_injections = 100 analyze_raw_data.interpreter.set_warning_output( False ) # so far the data structure in a threshold scan was always bad, too many warnings given analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_hit_table = True analyze_raw_data.trig_count = self.trig_count # set number of BCID to overwrite the number deduced from the raw data file analyze_raw_data.create_source_scan_hist = True analyze_raw_data.use_trigger_time_stamp = True analyze_raw_data.set_stop_mode = True analyze_raw_data.align_at_trigger = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table(use_settings_from_file=False) analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.threshold[0]) self.register.set_pixel_register_value('TDAC', self.new_tdac[0]) self.register.set_pixel_register_value('Enable', self.new_enable_mask[0]) # use enable mask from the lowest point to mask bad pixels # write configuration to avaoid high current states commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"])) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable")) self.register_utils.send_commands(commands) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() last_step = None for step in range(self.plot_n_steps, -1, -1): if self.threshold[step] is not None: plot_occupancy(self.occupancy_hist[step].T, title='Occupancy at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occupancy_hist[step].T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.occupancy_mask[step].T, title='Noisy pixels at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occupancy_mask[step].T, filename=analyze_raw_data.output_pdf) plot_three_way(self.tdac[step].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.tdac[step].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.enable_mask[step].T, title='Enable mask at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), z_max=1, filename=analyze_raw_data.output_pdf) # adding Poisson statistics plots fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) ax.set_title("Hit statistics") hist, bin_edges = np.histogram(self.occupancy_hist[step], bins=np.arange(0.0, np.max(self.occupancy_hist[step]) + 2, 1.0)) try: _, idx = hist_quantiles(hist, [0.0, 0.9], return_indices=True) except IndexError: idx = [0, 1] bins = np.arange(0, np.maximum(bin_edges[idx[1]], stats.poisson.ppf(0.9999, mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1)) + 2, 1) ax.hist(self.occupancy_hist[step].flatten(), bins=bins, align='left', alpha=0.5, label="Measured occupancy") ax.bar(x=bins[:-1], height=stats.poisson.pmf(k=bins[:-1], mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1) * self.enable_mask[step].sum(), alpha=0.5, width=1.0, color="r", label="Expected occupancy (Poisson statistics)") # ax.hist(stats.poisson.rvs(mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1, size=self.enable_mask[step].sum()), bins=bins, align='left', alpha=0.5, label="Expected occupancy (Poisson statistics)") ax.set_xlabel('#Hits') ax.set_ylabel('#Pixels') ax.legend() analyze_raw_data.output_pdf.savefig(fig) last_step = step if last_step is not None: plot_three_way(self.new_tdac[last_step].T, title='Final TDAC after Vthin_AltFine %d Step %d' % (self.threshold[last_step], self.tdac_step[last_step]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.new_tdac[last_step].T, title='Final TDAC after Vthin_AltFine %d Step %d' % (self.threshold[last_step], self.tdac_step[last_step]), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.new_enable_mask[last_step].T, title='Final Enable mask after Vthin_AltFine %d Step %d' % (self.threshold[last_step], self.tdac_step[last_step]), z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_tot_hist = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 occ_mask[occ_hist < self.n_injections] = 1 # make inverse inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(occ_mask.T, title='Stuck Pixels', z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.create_cluster_tot_hist = True analyze_raw_data.align_at_trigger = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.align_at_tdc = False # align events at the TDC word analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_hit_table = True analyze_raw_data.n_bcid = self.bcid_window analyze_raw_data.create_source_scan_hist = True analyze_raw_data.use_trigger_time_stamp = True analyze_raw_data.set_stop_mode = True analyze_raw_data.align_at_trigger = True analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.clusterizer.set_warning_output(False) analyze_raw_data.interpret_word_table(use_settings_from_file=False) analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyze_hits(input_file_hits): with AnalyzeRawData( raw_data_file=None, analyzed_data_file=input_file_hits) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_cluster_hit_table = True analyze_raw_data.create_cluster_table = True analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.create_cluster_tot_hist = True analyze_raw_data.create_tdc_hist = True analyze_raw_data.analyze_hit_table( analyzed_data_out_file=input_file_hits[:-3] + '_analyzed.h5') analyze_raw_data.plot_histograms( pdf_filename=input_file_hits[:-3], analyzed_data_file=input_file_hits[:-3] + '_analyzed.h5')
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_hit_table = True analyze_raw_data.n_bcid = self.bcid_window analyze_raw_data.create_source_scan_hist = True analyze_raw_data.use_trigger_time_stamp = True analyze_raw_data.set_stop_mode = True analyze_raw_data.interpreter.use_trigger_number(True) analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.clusterizer.set_warning_output(False) # analyze_raw_data.interpreter.debug_events(0, 10, True) # events to be printed onto the console for debugging, usually deactivated analyze_raw_data.interpret_word_table(use_settings_from_file=False) analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms()
def analyse_selected_hits(input_file_hits, output_file_hits, output_file_hits_analyzed, scan_data_filenames, cluster_size_condition='cluster_size==1', n_cluster_condition='n_cluster==1'): logging.info('Analyze selected hits with ' + cluster_size_condition + ' and ' + n_cluster_condition + ' in ' + input_file_hits) if os.path.isfile(output_file_hits) and not analysis_configuration[ "overwrite_output_files"]: # skip analysis if already done logging.warning('Selected hit data file ' + output_file_hits + ' already exists. Skip analysis for this file.') else: analysis.select_hits_from_cluster_info( input_file_hits=input_file_hits, output_file_hits=output_file_hits, cluster_size_condition=cluster_size_condition, n_cluster_condition=n_cluster_condition ) # select hits and copy the mto new file if os.path.isfile( output_file_hits_analyzed) and not analysis_configuration[ "overwrite_output_files"]: # skip analysis if already done logging.warning('Analyzed selected hit data file ' + output_file_hits_analyzed + ' already exists. Skip analysis for this file.') else: logging.info('Analyze selected hits in ' + output_file_hits) with AnalyzeRawData( raw_data_file=None, analyzed_data_file=output_file_hits) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_tot_hist = False analyze_raw_data.create_cluster_size_hist = True analyze_raw_data.create_cluster_tot_hist = True analyze_raw_data.analyze_hit_table( analyzed_data_out_file=output_file_hits_analyzed) analyze_raw_data.plot_histograms( scan_data_filename=output_file_hits_analyzed, analyzed_data_file=output_file_hits_analyzed) with tb.openFile( input_file_hits, mode="r" ) as in_hit_file_h5: # copy meta data to the new analyzed file with tb.openFile(output_file_hits_analyzed, mode="r+") as output_hit_file_h5: in_hit_file_h5.root.meta_data.copy( output_hit_file_h5.root) # copy meta_data note to new file
def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold) self.register.set_pixel_register_value('TDAC', self.last_good_tdac) self.register.set_pixel_register_value('Enable', self.last_good_enable_mask) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() plot_occupancy(self.last_occupancy_hist.T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_hist.T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_occupancy_mask.T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_mask.T, filename=analyze_raw_data.output_pdf) plotThreeWay(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.register.get_pixel_register_value('Enable').T, title='Enable Mask', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy( self.register.get_pixel_register_value('Enable').T, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.interpreter.use_tdc_word( True) # align events at the TDC word analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) occ_mask[occ_hist > 0] = 1 plot_occupancy(occ_mask.T, title='Merged Pixels', z_max=1, filename=analyze_raw_data.output_pdf) inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask)