def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold[self.increase_threshold]) self.register.set_pixel_register_value('TDAC', self.last_good_tdac[self.increase_threshold]) self.register.set_pixel_register_value('Enable', self.last_good_enable_mask[0]) # use enable mask from the lowest point to mask bad pixels # write configuration to avaoid high current states commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"])) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable")) self.register_utils.send_commands(commands) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() plot_occupancy(self.last_occupancy_hist[self.increase_threshold].T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_hist[self.increase_threshold].T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_occupancy_mask[self.increase_threshold].T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_mask[self.increase_threshold].T, filename=analyze_raw_data.output_pdf) plot_three_way(self.last_good_tdac[self.increase_threshold].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.last_good_tdac[self.increase_threshold].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_good_enable_mask[self.increase_threshold].T, title='Intermediate Enable Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_good_enable_mask[self.increase_threshold].T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_good_enable_mask[0].T, title='Final Enable Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[0], self.last_step[0]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_good_enable_mask[0].T, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.interpreter.use_tdc_word(True) # align events at the TDC word analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) occ_mask[occ_hist > 0] = 1 plot_occupancy(occ_mask.T, title='Merged Pixels', z_max=1, filename=analyze_raw_data.output_pdf) inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and(inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or(occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask)
def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold + self.increase_threshold) self.register.set_pixel_register_value('TDAC', self.last_good_tdac) self.register.set_pixel_register_value('Enable', self.last_good_enable_mask) # write configuration to avaoid high current states commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"])) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable")) self.register_utils.send_commands(commands) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() plot_occupancy(self.last_occupancy_hist.T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_hist.T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_occupancy_mask.T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_mask.T, filename=analyze_raw_data.output_pdf) plot_three_way(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.register.get_pixel_register_value('Enable').T, title='Enable Mask', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.register.get_pixel_register_value('Enable').T, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.interpreter.use_tdc_word( True) # align events at the TDC word analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) occ_mask[occ_hist > 0] = 1 plot_occupancy(occ_mask.T, title='Merged Pixels', z_max=1, filename=analyze_raw_data.output_pdf) inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_fitted_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.n_injections = 100 analyze_raw_data.interpreter.set_warning_output( False ) # so far the data structure in a threshold scan was always bad, too many warnings given analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() thr_hist = analyze_raw_data.out_file_h5.root.HistThresholdFitted[:, :].T xtalk_mask = np.zeros(shape=thr_hist.shape, dtype=np.dtype('>u1')) xtalk_mask[thr_hist > 0.0] = 1 plot_occupancy(xtalk_mask.T, title='Crosstalk', z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_hit_table = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 self.occ_mask[occ_hist > self.abs_occ_limit] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value( mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Noisy Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_tot_hist = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() # occ_hist = make_occupancy_hist(*convert_data_array(data_array_from_data_dict_iterable(self.fifo_readout.data), filter_func=is_data_record, converter_func=get_col_row_array_from_data_record_array)).T with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 self.occ_mask[occ_hist < self.n_injections] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) self.disable_for_mask = self.disable_for_mask if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value( mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) self.enable_for_mask = self.enable_for_mask if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Stuck Pixels', z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold) self.register.set_pixel_register_value('TDAC', self.last_good_tdac) self.register.set_pixel_register_value('Enable', self.last_good_enable_mask) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() plot_occupancy(self.last_occupancy_hist.T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_hist.T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_occupancy_mask.T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_mask.T, filename=analyze_raw_data.output_pdf) plotThreeWay(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.register.get_pixel_register_value('Enable').T, title='Enable Mask', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy( self.register.get_pixel_register_value('Enable').T, filename=analyze_raw_data.output_pdf)
def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold + self.increase_threshold) self.register.set_pixel_register_value('TDAC', self.last_good_tdac) self.register.set_pixel_register_value('Enable', self.last_good_enable_mask) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() plot_occupancy(self.last_occupancy_hist.T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_hist.T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.last_occupancy_mask.T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.last_occupancy_mask.T, filename=analyze_raw_data.output_pdf) plot_three_way(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.last_tdac_distribution.T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val, self.last_step), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.register.get_pixel_register_value('Enable').T, title='Enable Mask', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.register.get_pixel_register_value('Enable').T, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = True if self.enable_tdc: analyze_raw_data.create_tdc_counter_hist = True # histogram all TDC words analyze_raw_data.create_tdc_hist = True # histogram the hit TDC information analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) occ_mask[occ_hist > 1] = 1 inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(occ_mask.T, title='Merged Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_tot_hist = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 occ_mask[occ_hist < self.n_injections] = 1 # make inverse inv_occ_mask = invert_pixel_mask(occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(occ_mask.T, title='Stuck Pixels', z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_cluster_size_hist = False analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_cluster_tot_hist = False analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.clusterizer.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # n largest elements n_largest_elements = np.sort(occ_hist[occ_hist > self.low_value])[-self.mask_high_count:] # noisy pixels are set to 1 if n_largest_elements.shape[0] > 0: self.occ_mask[occ_hist >= n_largest_elements[0]] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and(self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or(self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Noisy Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_hit_table = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 if self.trig_count == 0: consecutive_lvl1 = (2 ** self.register.global_registers['Trig_Count']['bitlength']) else: consecutive_lvl1 = self.trig_count self.occ_mask[occ_hist > self.occupancy_limit * self.n_triggers * consecutive_lvl1] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and(self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or(self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Noisy Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_tot_hist = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() # occ_hist = make_occupancy_hist(*convert_data_array(data_array_from_data_dict_iterable(self.fifo_readout.data), filter_func=is_data_record, converter_func=get_col_row_array_from_data_record_array)).T with tb.open_file(analyze_raw_data._analyzed_data_file, 'r') as out_file_h5: occ_hist = out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 self.occ_mask[occ_hist < self.n_injections] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) self.disable_for_mask = self.disable_for_mask if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and(self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) self.enable_for_mask = self.enable_for_mask if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or(self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Stuck Pixels', z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze_injected_charge(data_analyzed_file): logging.info('Analyze the injected charge') with tb.openFile(data_analyzed_file, mode="r") as in_file_h5: occupancy = in_file_h5.root.HistOcc[:].T gdacs = analysis_utils.get_scan_parameter( in_file_h5.root.meta_data[:])['GDAC'] with PdfPages(data_analyzed_file[:-3] + '.pdf') as plot_file: plotting.plot_scatter( gdacs, occupancy.sum(axis=(0, 1)), title='Single pixel hit rate at different thresholds', x_label='Threshold setting [GDAC]', y_label='Single pixel hit rate', log_x=True, filename=plot_file) if analysis_configuration['input_file_calibration']: with tb.openFile( analysis_configuration['input_file_calibration'], mode="r" ) as in_file_calibration_h5: # read calibration file from calibrate_threshold_gdac scan mean_threshold_calibration = in_file_calibration_h5.root.MeanThresholdCalibration[:] threshold_calibration_array = in_file_calibration_h5.root.HistThresholdCalibration[:] gdac_range_calibration = np.array( in_file_calibration_h5.root.HistThresholdCalibration. _v_attrs.scan_parameter_values) gdac_range_source_scan = gdacs # Select data that is within the given GDAC range, (min_gdac, max_gdac) sel = np.where( np.logical_and( gdac_range_source_scan >= analysis_configuration['min_gdac'], gdac_range_source_scan <= analysis_configuration['max_gdac']))[0] gdac_range_source_scan = gdac_range_source_scan[sel] occupancy = occupancy[:, :, sel] sel = np.where( np.logical_and( gdac_range_calibration >= analysis_configuration['min_gdac'], gdac_range_calibration <= analysis_configuration['max_gdac']))[0] gdac_range_calibration = gdac_range_calibration[sel] threshold_calibration_array = threshold_calibration_array[:, :, sel] logging.info( 'Analyzing source scan data with %d GDAC settings from %d to %d with minimum step sizes from %d to %d', len(gdac_range_source_scan), np.min(gdac_range_source_scan), np.max(gdac_range_source_scan), np.min(np.gradient(gdac_range_source_scan)), np.max(np.gradient(gdac_range_source_scan))) logging.info( 'Use calibration data with %d GDAC settings from %d to %d with minimum step sizes from %d to %d', len(gdac_range_calibration), np.min(gdac_range_calibration), np.max(gdac_range_calibration), np.min(np.gradient(gdac_range_calibration)), np.max(np.gradient(gdac_range_calibration))) # rate_normalization of the total hit number for each GDAC setting rate_normalization = 1. if analysis_configuration['normalize_rate']: rate_normalization = analysis_utils.get_rate_normalization( hit_file=hit_file, cluster_file=hit_file, parameter='GDAC', reference=analysis_configuration[ 'normalization_reference'], plot=analysis_configuration['plot_normalization']) # correcting the hit numbers for the different cluster sizes correction_factors = 1. if analysis_configuration['use_cluster_rate_correction']: correction_h5 = tb.openFile(cluster_sizes_file, mode="r") cluster_size_histogram = correction_h5.root.AllHistClusterSize[:] correction_factors = analysis_utils.get_hit_rate_correction( gdacs=gdac_range_source_scan, calibration_gdacs=gdac_range_source_scan, cluster_size_histogram=cluster_size_histogram) if analysis_configuration['plot_cluster_sizes']: plot_cluster_sizes( correction_h5, in_file_calibration_h5, gdac_range=gdac_range_source_scan) pixel_thresholds = analysis_utils.get_pixel_thresholds_from_calibration_array( gdacs=gdac_range_source_scan, calibration_gdacs=gdac_range_calibration, threshold_calibration_array=threshold_calibration_array ) # interpolates the threshold at the source scan GDAC setting from the calibration pixel_hits = occupancy # create hit array with shape (col, row, ...) pixel_hits = pixel_hits * correction_factors * rate_normalization # choose region with pixels that have a sufficient occupancy but are not too hot good_pixel = analysis_utils.select_good_pixel_region( pixel_hits, col_span=analysis_configuration['col_span'], row_span=analysis_configuration['row_span'], min_cut_threshold=analysis_configuration[ 'min_cut_threshold'], max_cut_threshold=analysis_configuration[ 'max_cut_threshold']) pixel_mask = ~np.ma.getmaskarray(good_pixel) selected_pixel_hits = pixel_hits[ pixel_mask, :] # reduce the data to pixels that are in the good pixel region selected_pixel_thresholds = pixel_thresholds[ pixel_mask, :] # reduce the data to pixels that are in the good pixel region plotting.plot_occupancy( good_pixel.T, title='Selected pixel for analysis (' + str(len(selected_pixel_hits)) + ')', filename=plot_file) # reshape to one dimension x = selected_pixel_thresholds.flatten() y = selected_pixel_hits.flatten() # nothing should be NAN/INF, NAN/INF is not supported yet if np.isfinite(x).shape != x.shape or np.isfinite( y).shape != y.shape: logging.warning( 'There are pixels with NaN or INF threshold or hit values, analysis will fail' ) # calculated profile histogram x_p, y_p, y_p_e = analysis_utils.get_profile_histogram( x, y, n_bins=analysis_configuration['n_bins'] ) # profile histogram data # select only the data point where the calibration worked selected_data = np.logical_and( x_p > analysis_configuration['min_thr'] / analysis_configuration['vcal_calibration'], x_p < analysis_configuration['max_thr'] / analysis_configuration['vcal_calibration']) x_p = x_p[selected_data] y_p = y_p[selected_data] y_p_e = y_p_e[selected_data] if len(y_p_e[y_p_e == 0]) != 0: logging.warning( 'There are bins without any data, guessing the error bars' ) y_p_e[y_p_e == 0] = np.amin(y_p_e[y_p_e != 0]) smoothed_data = analysis_utils.smooth_differentiation( x_p, y_p, weigths=1 / y_p_e, order=3, smoothness=analysis_configuration['smoothness'], derivation=0) smoothed_data_diff = analysis_utils.smooth_differentiation( x_p, y_p, weigths=1 / y_p_e, order=3, smoothness=analysis_configuration['smoothness'], derivation=1) with tb.openFile(data_analyzed_file[:-3] + '_result.h5', mode="w") as out_file_h5: result_1 = np.rec.array(np.column_stack( (x_p, y_p, y_p_e)), dtype=[('charge', float), ('count', float), ('count_error', float)]) result_2 = np.rec.array(np.column_stack( (x_p, smoothed_data)), dtype=[('charge', float), ('count', float)]) result_3 = np.rec.array(np.column_stack( (x_p, -smoothed_data_diff)), dtype=[('charge', float), ('count', float)]) out_1 = out_file_h5.create_table( out_file_h5.root, name='ProfileHistogram', description=result_1.dtype, title= 'Single pixel count rate combined with a profile histogram', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) out_2 = out_file_h5.create_table( out_file_h5.root, name='ProfileHistogramSpline', description=result_2.dtype, title= 'Single pixel count rate combined with a profile histogram and spline smoothed', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) out_3 = out_file_h5.create_table( out_file_h5.root, name='ChargeHistogram', description=result_3.dtype, title= 'Charge histogram with threshold method and per pixel calibration', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) for key, value in analysis_configuration.iteritems(): out_1.attrs[key] = value out_2.attrs[key] = value out_3.attrs[key] = value out_1.append(result_1) out_2.append(result_2) out_3.append(result_3) plot_result(x_p, y_p, y_p_e, smoothed_data, smoothed_data_diff) # calculate and plot mean results x_mean = analysis_utils.get_mean_threshold_from_calibration( gdac_range_source_scan, mean_threshold_calibration) y_mean = selected_pixel_hits.mean(axis=(0)) plotting.plot_scatter( np.array(gdac_range_source_scan), y_mean, log_x=True, plot_range=None, title= 'Mean single pixel cluster rate at different thresholds', x_label='threshold setting [GDAC]', y_label='mean single pixel cluster rate', filename=plot_file) plotting.plot_scatter( x_mean * analysis_configuration['vcal_calibration'], y_mean, plot_range=(analysis_configuration['min_thr'], analysis_configuration['max_thr']), title= 'Mean single pixel cluster rate at different thresholds', x_label='mean threshold [e]', y_label='mean single pixel cluster rate', filename=plot_file) if analysis_configuration['use_cluster_rate_correction']: correction_h5.close()
def analyze_beam_spot( scan_base, combine_n_readouts=1000, chunk_size=10000000, plot_occupancy_hists=False, output_pdf=None, output_file=None, ): """ Determines the mean x and y beam spot position as a function of time. Therefore the data of a fixed number of read outs are combined ('combine_n_readouts'). The occupancy is determined for the given combined events and stored into a pdf file. At the end the beam x and y is plotted into a scatter plot with absolute positions in um. Parameters ---------- scan_base: list of str scan base names (e.g.: ['//data//SCC_50_fei4_self_trigger_scan_390', ] combine_n_readouts: int the number of read outs to combine (e.g. 1000) max_chunk_size: int the maximum chunk size used during read, if too big memory error occurs, if too small analysis takes longer output_pdf: PdfPages PdfPages file object, if none the plot is printed to screen """ time_stamp = [] x = [] y = [] for data_file in scan_base: with tb.openFile(data_file + "_interpreted.h5", mode="r+") as in_hit_file_h5: # get data and data pointer meta_data_array = in_hit_file_h5.root.meta_data[:] hit_table = in_hit_file_h5.root.Hits # determine the event ranges to analyze (timestamp_start, start_event_number, stop_event_number) parameter_ranges = np.column_stack( ( analysis_utils.get_ranges_from_array(meta_data_array["timestamp_start"][::combine_n_readouts]), analysis_utils.get_ranges_from_array(meta_data_array["event_number"][::combine_n_readouts]), ) ) # create a event_numer index (important) analysis_utils.index_event_number(hit_table) # initialize the analysis and set settings analyze_data = AnalyzeRawData() analyze_data.create_tot_hist = False analyze_data.create_bcid_hist = False analyze_data.histograming.set_no_scan_parameter() # variables for read speed up index = 0 # index where to start the read out, 0 at the beginning, increased during looping best_chunk_size = chunk_size progress_bar = progressbar.ProgressBar( widgets=[ "", progressbar.Percentage(), " ", progressbar.Bar(marker="*", left="|", right="|"), " ", analysis_utils.ETA(), ], maxval=hit_table.shape[0], term_width=80, ) progress_bar.start() # loop over the selected events for parameter_index, parameter_range in enumerate(parameter_ranges): logging.debug( "Analyze time stamp " + str(parameter_range[0]) + " and data from events = [" + str(parameter_range[2]) + "," + str(parameter_range[3]) + "[ " + str(int(float(float(parameter_index) / float(len(parameter_ranges)) * 100.0))) + "%" ) analyze_data.reset() # resets the data of the last analysis # loop over the hits in the actual selected events with optimizations: determine best chunk size, start word index given readout_hit_len = ( 0 ) # variable to calculate a optimal chunk size value from the number of hits for speed up for hits, index in analysis_utils.data_aligned_at_events( hit_table, start_event_number=parameter_range[2], stop_event_number=parameter_range[3], start=index, chunk_size=best_chunk_size, ): analyze_data.analyze_hits(hits) # analyze the selected hits in chunks readout_hit_len += hits.shape[0] progress_bar.update(index) best_chunk_size = ( int(1.5 * readout_hit_len) if int(1.05 * readout_hit_len) < chunk_size else chunk_size ) # to increase the readout speed, estimated the number of hits for one read instruction # get and store results occupancy_array = analyze_data.histograming.get_occupancy() projection_x = np.sum(occupancy_array, axis=0).ravel() projection_y = np.sum(occupancy_array, axis=1).ravel() x.append(analysis_utils.get_mean_from_histogram(projection_x, bin_positions=range(0, 80))) y.append(analysis_utils.get_mean_from_histogram(projection_y, bin_positions=range(0, 336))) time_stamp.append(parameter_range[0]) if plot_occupancy_hists: plotting.plot_occupancy( occupancy_array[:, :, 0], title="Occupancy for events between " + time.strftime("%H:%M:%S", time.localtime(parameter_range[0])) + " and " + time.strftime("%H:%M:%S", time.localtime(parameter_range[1])), filename=output_pdf, ) progress_bar.finish() plotting.plot_scatter( [i * 250 for i in x], [i * 50 for i in y], title="Mean beam position", x_label="x [um]", y_label="y [um]", marker_style="-o", filename=output_pdf, ) if output_file: with tb.openFile(output_file, mode="a") as out_file_h5: rec_array = np.array(zip(time_stamp, x, y), dtype=[("time_stamp", float), ("x", float), ("y", float)]) try: beam_spot_table = out_file_h5.createTable( out_file_h5.root, name="Beamspot", description=rec_array, title="Beam spot position", filters=tb.Filters(complib="blosc", complevel=5, fletcher32=False), ) beam_spot_table[:] = rec_array except tb.exceptions.NodeError: logging.warning(output_file + " has already a Beamspot note, do not overwrite existing.") return time_stamp, x, y
def analyze_injected_charge(data_analyzed_file): logging.info('Analyze the injected charge') with tb.openFile(data_analyzed_file, mode="r") as in_file_h5: occupancy = in_file_h5.root.HistOcc[:].T gdacs = analysis_utils.get_scan_parameter(in_file_h5.root.meta_data[:])['GDAC'] with PdfPages(data_analyzed_file[:-3] + '.pdf') as plot_file: plotting.plot_scatter(gdacs, occupancy.sum(axis=(0, 1)), title='Single pixel hit rate at different thresholds', x_label='Threshold setting [GDAC]', y_label='Single pixel hit rate', log_x=True, filename=plot_file) if analysis_configuration['input_file_calibration']: with tb.openFile(analysis_configuration['input_file_calibration'], mode="r") as in_file_calibration_h5: # read calibration file from calibrate_threshold_gdac scan mean_threshold_calibration = in_file_calibration_h5.root.MeanThresholdCalibration[:] threshold_calibration_array = in_file_calibration_h5.root.HistThresholdCalibration[:] gdac_range_calibration = np.array(in_file_calibration_h5.root.HistThresholdCalibration._v_attrs.scan_parameter_values) gdac_range_source_scan = gdacs # Select data that is within the given GDAC range, (min_gdac, max_gdac) sel = np.where(np.logical_and(gdac_range_source_scan >= analysis_configuration['min_gdac'], gdac_range_source_scan <= analysis_configuration['max_gdac']))[0] gdac_range_source_scan = gdac_range_source_scan[sel] occupancy = occupancy[:, :, sel] sel = np.where(np.logical_and(gdac_range_calibration >= analysis_configuration['min_gdac'], gdac_range_calibration <= analysis_configuration['max_gdac']))[0] gdac_range_calibration = gdac_range_calibration[sel] threshold_calibration_array = threshold_calibration_array[:, :, sel] logging.info('Analyzing source scan data with %d GDAC settings from %d to %d with minimum step sizes from %d to %d', len(gdac_range_source_scan), np.min(gdac_range_source_scan), np.max(gdac_range_source_scan), np.min(np.gradient(gdac_range_source_scan)), np.max(np.gradient(gdac_range_source_scan))) logging.info('Use calibration data with %d GDAC settings from %d to %d with minimum step sizes from %d to %d', len(gdac_range_calibration), np.min(gdac_range_calibration), np.max(gdac_range_calibration), np.min(np.gradient(gdac_range_calibration)), np.max(np.gradient(gdac_range_calibration))) # rate_normalization of the total hit number for each GDAC setting rate_normalization = 1. if analysis_configuration['normalize_rate']: rate_normalization = analysis_utils.get_rate_normalization(hit_file=hit_file, cluster_file=hit_file, parameter='GDAC', reference=analysis_configuration['normalization_reference'], plot=analysis_configuration['plot_normalization']) # correcting the hit numbers for the different cluster sizes correction_factors = 1. if analysis_configuration['use_cluster_rate_correction']: correction_h5 = tb.openFile(cluster_sizes_file, mode="r") cluster_size_histogram = correction_h5.root.AllHistClusterSize[:] correction_factors = analysis_utils.get_hit_rate_correction(gdacs=gdac_range_source_scan, calibration_gdacs=gdac_range_source_scan, cluster_size_histogram=cluster_size_histogram) if analysis_configuration['plot_cluster_sizes']: plot_cluster_sizes(correction_h5, in_file_calibration_h5, gdac_range=gdac_range_source_scan) pixel_thresholds = analysis_utils.get_pixel_thresholds_from_calibration_array(gdacs=gdac_range_source_scan, calibration_gdacs=gdac_range_calibration, threshold_calibration_array=threshold_calibration_array) # interpolates the threshold at the source scan GDAC setting from the calibration pixel_hits = occupancy # create hit array with shape (col, row, ...) pixel_hits = pixel_hits * correction_factors * rate_normalization # choose region with pixels that have a sufficient occupancy but are not too hot good_pixel = analysis_utils.select_good_pixel_region(pixel_hits, col_span=analysis_configuration['col_span'], row_span=analysis_configuration['row_span'], min_cut_threshold=analysis_configuration['min_cut_threshold'], max_cut_threshold=analysis_configuration['max_cut_threshold']) pixel_mask = ~np.ma.getmaskarray(good_pixel) selected_pixel_hits = pixel_hits[pixel_mask, :] # reduce the data to pixels that are in the good pixel region selected_pixel_thresholds = pixel_thresholds[pixel_mask, :] # reduce the data to pixels that are in the good pixel region plotting.plot_occupancy(good_pixel.T, title='Selected pixel for analysis (' + str(len(selected_pixel_hits)) + ')', filename=plot_file) # reshape to one dimension x = selected_pixel_thresholds.flatten() y = selected_pixel_hits.flatten() # nothing should be NAN/INF, NAN/INF is not supported yet if np.isfinite(x).shape != x.shape or np.isfinite(y).shape != y.shape: logging.warning('There are pixels with NaN or INF threshold or hit values, analysis will fail') # calculated profile histogram x_p, y_p, y_p_e = analysis_utils.get_profile_histogram(x, y, n_bins=analysis_configuration['n_bins']) # profile histogram data # select only the data point where the calibration worked selected_data = np.logical_and(x_p > analysis_configuration['min_thr'] / analysis_configuration['vcal_calibration'], x_p < analysis_configuration['max_thr'] / analysis_configuration['vcal_calibration']) x_p = x_p[selected_data] y_p = y_p[selected_data] y_p_e = y_p_e[selected_data] if len(y_p_e[y_p_e == 0]) != 0: logging.warning('There are bins without any data, guessing the error bars') y_p_e[y_p_e == 0] = np.amin(y_p_e[y_p_e != 0]) smoothed_data = analysis_utils.smooth_differentiation(x_p, y_p, weigths=1 / y_p_e, order=3, smoothness=analysis_configuration['smoothness'], derivation=0) smoothed_data_diff = analysis_utils.smooth_differentiation(x_p, y_p, weigths=1 / y_p_e, order=3, smoothness=analysis_configuration['smoothness'], derivation=1) with tb.openFile(data_analyzed_file[:-3] + '_result.h5', mode="w") as out_file_h5: result_1 = np.rec.array(np.column_stack((x_p, y_p, y_p_e)), dtype=[('charge', float), ('count', float), ('count_error', float)]) result_2 = np.rec.array(np.column_stack((x_p, smoothed_data)), dtype=[('charge', float), ('count', float)]) result_3 = np.rec.array(np.column_stack((x_p, -smoothed_data_diff)), dtype=[('charge', float), ('count', float)]) out_1 = out_file_h5.create_table(out_file_h5.root, name='ProfileHistogram', description=result_1.dtype, title='Single pixel count rate combined with a profile histogram', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) out_2 = out_file_h5.create_table(out_file_h5.root, name='ProfileHistogramSpline', description=result_2.dtype, title='Single pixel count rate combined with a profile histogram and spline smoothed', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) out_3 = out_file_h5.create_table(out_file_h5.root, name='ChargeHistogram', description=result_3.dtype, title='Charge histogram with threshold method and per pixel calibration', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) for key, value in analysis_configuration.iteritems(): out_1.attrs[key] = value out_2.attrs[key] = value out_3.attrs[key] = value out_1.append(result_1) out_2.append(result_2) out_3.append(result_3) plot_result(x_p, y_p, y_p_e, smoothed_data, smoothed_data_diff) # calculate and plot mean results x_mean = analysis_utils.get_mean_threshold_from_calibration(gdac_range_source_scan, mean_threshold_calibration) y_mean = selected_pixel_hits.mean(axis=(0)) plotting.plot_scatter(np.array(gdac_range_source_scan), y_mean, log_x=True, plot_range=None, title='Mean single pixel cluster rate at different thresholds', x_label='threshold setting [GDAC]', y_label='mean single pixel cluster rate', filename=plot_file) plotting.plot_scatter(x_mean * analysis_configuration['vcal_calibration'], y_mean, plot_range=(analysis_configuration['min_thr'], analysis_configuration['max_thr']), title='Mean single pixel cluster rate at different thresholds', x_label='mean threshold [e]', y_label='mean single pixel cluster rate', filename=plot_file) if analysis_configuration['use_cluster_rate_correction']: correction_h5.close()
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_cluster_size_hist = False analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_cluster_tot_hist = False analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # n largest elements n_largest_elements = np.sort( occ_hist[occ_hist > self.low_value])[-self.mask_high_count:] # noisy pixels are set to 1 if n_largest_elements.shape[0] > 0: self.occ_mask[occ_hist >= n_largest_elements[0]] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value( mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = np.logical_and( self.inv_occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = np.logical_or( self.occ_mask, self.register.get_pixel_register_value(mask)) self.register.set_pixel_register_value(mask, disable_mask) plot_occupancy(self.occ_mask.T, title='Noisy Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf)
def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.create_source_scan_hist = True analyze_raw_data.create_hit_table = False analyze_raw_data.interpret_word_table() analyze_raw_data.plot_histograms() analyze_raw_data.interpreter.print_summary() # get occupancy hist occ_hist = analyze_raw_data.out_file_h5.root.HistOcc[:, :, 0].T self.occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1')) # noisy pixels are set to 1 self.occ_mask[occ_hist > self.abs_occ_limit] = 1 # make inverse self.inv_occ_mask = invert_pixel_mask(self.occ_mask) # generate masked occupancy hist masked_occ_hist = occ_hist.copy() masked_occ_hist[self.occ_mask == 1] = 0 if self.overwrite_mask: for mask in self.disable_for_mask: self.register.set_pixel_register_value(mask, self.inv_occ_mask) else: for mask in self.disable_for_mask: enable_mask = self.register.get_pixel_register_value(mask) new_enable_mask = np.logical_and(self.inv_occ_mask, enable_mask) self.register.set_pixel_register_value(mask, new_enable_mask) if self.overwrite_mask: for mask in self.enable_for_mask: self.register.set_pixel_register_value(mask, self.occ_mask) else: for mask in self.enable_for_mask: disable_mask = self.register.get_pixel_register_value(mask) new_disable_mask = np.logical_or(self.occ_mask, disable_mask) self.register.set_pixel_register_value(mask, new_disable_mask) plot_occupancy(self.occ_mask.T, title='Noisy Pixels', z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occ_mask.T, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.disable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) for mask in self.enable_for_mask: mask_name = self.register.pixel_registers[mask]['name'] plot_occupancy(self.register.get_pixel_register_value(mask).T, title='%s Mask' % mask_name, z_max=1, filename=analyze_raw_data.output_pdf) # adding Poisson statistics plots fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) ax.set_title("Hit statistics") hist, bin_edges = np.histogram(occ_hist, bins=np.arange(0, np.max(occ_hist) + 2, 1)) try: _, idx = hist_quantiles(hist, [0.0, 0.9], return_indices=True) except IndexError: idx = [0, 1] bins = np.arange(0, np.maximum(bin_edges[idx[1]], stats.poisson.ppf(0.9999, mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1)) + 2, 1) ax.hist(occ_hist.flatten(), bins=bins, align='left', alpha=0.5, label="Measured occupancy before masking noisy pixels") ax.hist(masked_occ_hist.flatten(), bins=bins, align='left', alpha=0.5, label="Measured occupancy after masking noisy pixels") ax.bar(x=bins[:-1], height=stats.poisson.pmf(k=bins[:-1], mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1) * self.register.get_pixel_register_value("Enable").sum(), alpha=0.5, width=1.0, color="r", label="Expected occupancy (Poisson statistics)") # ax.hist(stats.poisson.rvs(mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1, size=self.register.get_pixel_register_value("Enable").sum()), bins=bins, align='left', alpha=0.5, label="Expected occupancy (Poisson statistics)") ax.set_xlabel('#Hits') ax.set_ylabel('#Pixels') ax.legend() analyze_raw_data.output_pdf.savefig(fig)
def analyze(self): self.register.set_global_register_value("Vthin_AltFine", self.threshold[0]) self.register.set_pixel_register_value('TDAC', self.new_tdac[0]) self.register.set_pixel_register_value('Enable', self.new_enable_mask[0]) # use enable mask from the lowest point to mask bad pixels # write configuration to avaoid high current states commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"])) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable")) self.register_utils.send_commands(commands) with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_source_scan_hist = True analyze_raw_data.interpreter.set_warning_output(False) analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() last_step = None for step in range(self.plot_n_steps, -1, -1): if self.threshold[step] is not None: plot_occupancy(self.occupancy_hist[step].T, title='Occupancy at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occupancy_hist[step].T, filename=analyze_raw_data.output_pdf) plot_occupancy(self.occupancy_mask[step].T, title='Noisy pixels at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), z_max=1, filename=analyze_raw_data.output_pdf) plot_fancy_occupancy(self.occupancy_mask[step].T, filename=analyze_raw_data.output_pdf) plot_three_way(self.tdac[step].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.tdac[step].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.enable_mask[step].T, title='Enable mask at Vthin_AltFine %d Step %d' % (self.threshold[step], self.tdac_step[step]), z_max=1, filename=analyze_raw_data.output_pdf) # adding Poisson statistics plots fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) ax.set_title("Hit statistics") hist, bin_edges = np.histogram(self.occupancy_hist[step], bins=np.arange(0.0, np.max(self.occupancy_hist[step]) + 2, 1.0)) try: _, idx = hist_quantiles(hist, [0.0, 0.9], return_indices=True) except IndexError: idx = [0, 1] bins = np.arange(0, np.maximum(bin_edges[idx[1]], stats.poisson.ppf(0.9999, mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1)) + 2, 1) ax.hist(self.occupancy_hist[step].flatten(), bins=bins, align='left', alpha=0.5, label="Measured occupancy") ax.bar(x=bins[:-1], height=stats.poisson.pmf(k=bins[:-1], mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1) * self.enable_mask[step].sum(), alpha=0.5, width=1.0, color="r", label="Expected occupancy (Poisson statistics)") # ax.hist(stats.poisson.rvs(mu=self.occupancy_limit * self.n_triggers * self.consecutive_lvl1, size=self.enable_mask[step].sum()), bins=bins, align='left', alpha=0.5, label="Expected occupancy (Poisson statistics)") ax.set_xlabel('#Hits') ax.set_ylabel('#Pixels') ax.legend() analyze_raw_data.output_pdf.savefig(fig) last_step = step if last_step is not None: plot_three_way(self.new_tdac[last_step].T, title='Final TDAC after Vthin_AltFine %d Step %d' % (self.threshold[last_step], self.tdac_step[last_step]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32) plot_occupancy(self.new_tdac[last_step].T, title='Final TDAC after Vthin_AltFine %d Step %d' % (self.threshold[last_step], self.tdac_step[last_step]), z_max=31, filename=analyze_raw_data.output_pdf) plot_occupancy(self.new_enable_mask[last_step].T, title='Final Enable mask after Vthin_AltFine %d Step %d' % (self.threshold[last_step], self.tdac_step[last_step]), z_max=1, filename=analyze_raw_data.output_pdf)
def analyze_beam_spot(scan_base, combine_n_readouts=1000, chunk_size=10000000, plot_occupancy_hists=False, output_pdf=None, output_file=None): ''' Determines the mean x and y beam spot position as a function of time. Therefore the data of a fixed number of read outs are combined ('combine_n_readouts'). The occupancy is determined for the given combined events and stored into a pdf file. At the end the beam x and y is plotted into a scatter plot with absolute positions in um. Parameters ---------- scan_base: list of str scan base names (e.g.: ['//data//SCC_50_fei4_self_trigger_scan_390', ] combine_n_readouts: int the number of read outs to combine (e.g. 1000) max_chunk_size: int the maximum chunk size used during read, if too big memory error occurs, if too small analysis takes longer output_pdf: PdfPages PdfPages file object, if none the plot is printed to screen ''' time_stamp = [] x = [] y = [] for data_file in scan_base: with tb.open_file(data_file + '_interpreted.h5', mode="r+") as in_hit_file_h5: # get data and data pointer meta_data_array = in_hit_file_h5.root.meta_data[:] hit_table = in_hit_file_h5.root.Hits # determine the event ranges to analyze (timestamp_start, start_event_number, stop_event_number) parameter_ranges = np.column_stack( (analysis_utils.get_ranges_from_array( meta_data_array['timestamp_start'][::combine_n_readouts]), analysis_utils.get_ranges_from_array( meta_data_array['event_number'][::combine_n_readouts]))) # create a event_numer index (important) analysis_utils.index_event_number(hit_table) # initialize the analysis and set settings analyze_data = AnalyzeRawData() analyze_data.create_tot_hist = False analyze_data.create_bcid_hist = False analyze_data.histogram.set_no_scan_parameter() # variables for read speed up index = 0 # index where to start the read out, 0 at the beginning, increased during looping best_chunk_size = chunk_size progress_bar = progressbar.ProgressBar(widgets=[ '', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA() ], maxval=hit_table.shape[0], term_width=80) progress_bar.start() # loop over the selected events for parameter_index, parameter_range in enumerate( parameter_ranges): logging.debug('Analyze time stamp ' + str(parameter_range[0]) + ' and data from events = [' + str(parameter_range[2]) + ',' + str(parameter_range[3]) + '[ ' + str( int( float( float(parameter_index) / float(len(parameter_ranges)) * 100.0))) + '%') analyze_data.reset() # resets the data of the last analysis # loop over the hits in the actual selected events with optimizations: determine best chunk size, start word index given readout_hit_len = 0 # variable to calculate a optimal chunk size value from the number of hits for speed up for hits, index in analysis_utils.data_aligned_at_events( hit_table, start_event_number=parameter_range[2], stop_event_number=parameter_range[3], start_index=index, chunk_size=best_chunk_size): analyze_data.analyze_hits( hits) # analyze the selected hits in chunks readout_hit_len += hits.shape[0] progress_bar.update(index) best_chunk_size = int(1.5 * readout_hit_len) if int( 1.05 * readout_hit_len ) < chunk_size else chunk_size # to increase the readout speed, estimated the number of hits for one read instruction # get and store results occupancy_array = analyze_data.histogram.get_occupancy() projection_x = np.sum(occupancy_array, axis=0).ravel() projection_y = np.sum(occupancy_array, axis=1).ravel() x.append( analysis_utils.get_mean_from_histogram(projection_x, bin_positions=range( 0, 80))) y.append( analysis_utils.get_mean_from_histogram(projection_y, bin_positions=range( 0, 336))) time_stamp.append(parameter_range[0]) if plot_occupancy_hists: plotting.plot_occupancy( occupancy_array[:, :, 0], title='Occupancy for events between ' + time.strftime( '%H:%M:%S', time.localtime(parameter_range[0])) + ' and ' + time.strftime( '%H:%M:%S', time.localtime(parameter_range[1])), filename=output_pdf) progress_bar.finish() plotting.plot_scatter([i * 250 for i in x], [i * 50 for i in y], title='Mean beam position', x_label='x [um]', y_label='y [um]', marker_style='-o', filename=output_pdf) if output_file: with tb.open_file(output_file, mode="a") as out_file_h5: rec_array = np.array(zip(time_stamp, x, y), dtype=[('time_stamp', float), ('x', float), ('y', float)]) try: beam_spot_table = out_file_h5.create_table( out_file_h5.root, name='Beamspot', description=rec_array, title='Beam spot position', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) beam_spot_table[:] = rec_array except tb.exceptions.NodeError: logging.warning( output_file + ' has already a Beamspot note, do not overwrite existing.') return time_stamp, x, y
def analyze_injected_charge(data_analyzed_file): logging.info('Analyze the injected charge') with tb.openFile(data_analyzed_file, mode="r") as in_file_h5: occupancy = in_file_h5.root.HistOcc[:] gdacs = analysis_utils.get_scan_parameter( in_file_h5.root.meta_data[:])['GDAC'] with tb.openFile( analysis_configuration['input_file_calibration'], mode="r" ) as in_file_calibration_h5: # read calibration file from calibrate_threshold_gdac scan mean_threshold_calibration = in_file_calibration_h5.root.MeanThresholdCalibration[:] threshold_calibration_array = in_file_calibration_h5.root.HistThresholdCalibration[:] gdac_range_calibration = mean_threshold_calibration['gdac'] gdac_range_source_scan = gdacs logging.info( 'Analyzing source scan data with %d GDAC settings from %d to %d with minimum step sizes from %d to %d' % (len(gdac_range_source_scan), np.min(gdac_range_source_scan), np.max(gdac_range_source_scan), np.min(np.gradient(gdac_range_source_scan)), np.max(np.gradient(gdac_range_source_scan)))) logging.info( 'Use calibration data with %d GDAC settings from %d to %d with minimum step sizes from %d to %d' % (len(gdac_range_calibration), np.min(gdac_range_calibration), np.max(gdac_range_calibration), np.min(np.gradient(gdac_range_calibration)), np.max(np.gradient(gdac_range_calibration)))) # rate_normalization of the total hit number for each GDAC setting rate_normalization = 1. if analysis_configuration['normalize_rate']: rate_normalization = analysis_utils.get_rate_normalization( hit_file=hit_file, cluster_file=hit_file, parameter='GDAC', reference=analysis_configuration[ 'normalization_reference'], plot=analysis_configuration['plot_normalization']) # correcting the hit numbers for the different cluster sizes correction_factors = 1. if analysis_configuration['use_cluster_rate_correction']: correction_h5 = tb.openFile(cluster_sizes_file, mode="r") cluster_size_histogram = correction_h5.root.AllHistClusterSize[:] correction_factors = analysis_utils.get_hit_rate_correction( gdacs=gdac_range_source_scan, calibration_gdacs=gdac_range_source_scan, cluster_size_histogram=cluster_size_histogram) if analysis_configuration['plot_cluster_sizes']: plot_cluster_sizes(correction_h5, in_file_calibration_h5, gdac_range=gdac_range_source_scan) print correction_factors pixel_thresholds = analysis_utils.get_pixel_thresholds_from_calibration_array( gdacs=gdac_range_source_scan, calibration_gdacs=gdac_range_calibration, threshold_calibration_array=threshold_calibration_array ) # interpolates the threshold at the source scan GDAC setting from the calibration pixel_hits = np.swapaxes( occupancy, 0, 1) # create hit array with shape (col, row, ...) pixel_hits = pixel_hits * correction_factors * rate_normalization # choose region with pixels that have a sufficient occupancy but are not too hot good_pixel = analysis_utils.select_good_pixel_region( pixel_hits, col_span=analysis_configuration['col_span'], row_span=analysis_configuration['row_span'], min_cut_threshold=analysis_configuration['min_cut_threshold'], max_cut_threshold=analysis_configuration['max_cut_threshold']) pixel_mask = ~np.ma.getmaskarray(good_pixel) selected_pixel_hits = pixel_hits[ pixel_mask, :] # reduce the data to pixels that are in the good pixel region selected_pixel_thresholds = pixel_thresholds[ pixel_mask, :] # reduce the data to pixels that are in the good pixel region plotting.plot_occupancy(good_pixel.T, title='Select ' + str(len(selected_pixel_hits)) + ' pixels for analysis') # reshape to one dimension x = selected_pixel_thresholds.flatten() y = selected_pixel_hits.flatten() #nothing should be NAN, NAN is not supported yet if np.isfinite(x).shape != x.shape or np.isfinite( y).shape != y.shape: logging.warning( 'There are pixels with NaN or INF threshold or hit values, analysis will fail' ) # calculated profile histogram x_p, y_p, y_p_e = analysis_utils.get_profile_histogram( x, y, n_bins=analysis_configuration['n_bins'] ) # profile histogram data # select only the data point where the calibration worked selected_data = np.logical_and( x_p > analysis_configuration['min_thr'] / analysis_configuration['vcal_calibration'], x_p < analysis_configuration['max_thr'] / analysis_configuration['vcal_calibration']) x_p = x_p[selected_data] y_p = y_p[selected_data] y_p_e = y_p_e[selected_data] plot_result(x_p, y_p, y_p_e) # calculate and plot mean results x_mean = analysis_utils.get_mean_threshold_from_calibration( gdac_range_source_scan, mean_threshold_calibration) y_mean = selected_pixel_hits.mean(axis=(0)) plotting.plot_scatter( np.array(gdac_range_source_scan), y_mean, log_x=True, plot_range=None, title='Mean single pixel cluster rate at different thresholds', x_label='threshold setting [GDAC]', y_label='mean single pixel cluster rate') plotting.plot_scatter( x_mean * analysis_configuration['vcal_calibration'], y_mean, plot_range=(analysis_configuration['min_thr'], analysis_configuration['max_thr']), title='Mean single pixel cluster rate at different thresholds', x_label='mean threshold [e]', y_label='mean single pixel cluster rate') if analysis_configuration['use_cluster_rate_correction']: correction_h5.close()