def compute(self, peaks): le = self.config['left_event_extension'] re = self.config['right_event_extension'] triggers = peaks[ (peaks['area'] > self.config['trigger_min_area']) & (peaks['n_competing'] <= self.config['trigger_max_competing'])] # Join nearby triggers t0, t1 = strax.find_peak_groups(triggers, gap_threshold=le + re + 1, left_extension=le, right_extension=re) result = np.zeros(len(t0), self.dtype) result['time'] = t0 result['endtime'] = t1 result['event_number'] = np.arange(len(result)) + self.events_seen if not result.size > 0: print("Found chunk without events?!") self.events_seen += len(result) return result
def compute(self, peaks, start, end): le = self.config['left_event_extension'] re = self.config['right_event_extension'] triggers = peaks[ (peaks['area'] > self.config['trigger_min_area']) & (peaks['n_competing'] <= self.config['trigger_max_competing'])] # Join nearby triggers t0, t1 = strax.find_peak_groups( triggers, gap_threshold=le + re + 1, left_extension=le, right_extension=re) # Don't extend beyond the chunk boundaries # This will often happen for events near the invalid boundary of the # overlap processing (which should be thrown away) t0 = np.clip(t0, start, end) t1 = np.clip(t1, start, end) result = np.zeros(len(t0), self.dtype) result['time'] = t0 result['endtime'] = t1 result['event_number'] = np.arange(len(result)) + self.events_seen if not result.size > 0: print("Found chunk without events?!") self.events_seen += len(result) return result
def compute(self, peaklets): if not len(peaklets): return peaklets[:0] if self.config['s2_merge_max_gap'] < 0: # Do not merge at all merged_s2s = np.zeros(0, dtype=peaklets.dtype) else: # Find all groups of peaklets separated by < the gap cluster_starts, cluster_stops = strax.find_peak_groups( peaklets, self.config['s2_merge_max_gap']) start_merge_at, end_merge_at = self.get_merge_instructions( peaklets['time'], strax.endtime(peaklets), areas=peaklets['area'], types=peaklets['type'], cluster_starts=cluster_starts, cluster_stops=cluster_stops, max_duration=self.config['s2_merge_max_duration'], max_area=self.config['s2_merge_max_area']) merged_s2s = strax.merge_peaks( peaklets, start_merge_at, end_merge_at, max_buffer=int(self.config['s2_merge_max_duration'] // peaklets['dt'].min())) merged_s2s['type'] = 2 strax.compute_widths(merged_s2s) return merged_s2s
def merge_vetos(channels, gap, dtype, t): if len(channels): start, stop = strax.find_peak_groups(channels, gap_threshold=gap) result = np.zeros(len(start), dtype=dtype) result['time'] = start result['endtime'] = stop else: result = np.zeros(0, dtype=dtype) return result
def compute(self, peaklets): if not len(peaklets): return peaklets[:0] if self.config['s2_merge_max_gap'] < 0: # Do not merge at all merged_s2s = np.zeros(0, dtype=peaklets.dtype) else: # Find all groups of peaklets separated by < the gap cluster_starts, cluster_stops = strax.find_peak_groups( peaklets, self.config['s2_merge_max_gap']) start_merge_at, end_merge_at = self.get_merge_instructions( peaklets['time'], strax.endtime(peaklets), areas=peaklets['area'], types=peaklets['type'], cluster_starts=cluster_starts, cluster_stops=cluster_stops, max_duration=self.config['s2_merge_max_duration'], max_area=self.config['s2_merge_max_area']) merged_s2s = strax.merge_peaks( peaklets, start_merge_at, end_merge_at, max_buffer=int(self.config['s2_merge_max_duration'] // peaklets['dt'].min())) merged_s2s['type'] = 2 strax.compute_widths(merged_s2s) if len(merged_s2s) == 0: # Strax does not handle the case of no merged S2s well # If there are none in the entire dataset, it will just keep # waiting in Peaks forever. # Thus, this ugly hack of passing a single fake merged S2 # in the middle of the chunk, which is removed later merged_s2s = np.zeros(1, merged_s2s.dtype) q = merged_s2s[0] q['type'] = FAKE_MERGED_S2_TYPE q['time'] = (peaklets[0]['time'] + strax.endtime(peaklets[0])) / 2 q['dt'] = 1 return merged_s2s
def compute(self, peaks, start, end): _is_triggering = peaks['area'] > self.config['trigger_min_area'] _is_triggering &= (peaks['n_competing'] <= self.config['trigger_max_competing']) if self.config['exclude_s1_as_triggering_peaks']: _is_triggering &= peaks['type'] == 2 else: is_not_s1 = peaks['type'] != 1 has_tc_large_enough = (peaks['tight_coincidence'] >= self.config['event_s1_min_coincidence']) _is_triggering &= (is_not_s1 | has_tc_large_enough) triggers = peaks[_is_triggering] # Join nearby triggers t0, t1 = strax.find_peak_groups(triggers, gap_threshold=self.left_extension + self.right_extension + 1, left_extension=self.left_extension, right_extension=self.right_extension) # Don't extend beyond the chunk boundaries # This will often happen for events near the invalid boundary of the # overlap processing (which should be thrown away) t0 = np.clip(t0, start, end) t1 = np.clip(t1, start, end) result = np.zeros(len(t0), self.dtype) result['time'] = t0 result['endtime'] = t1 result['event_number'] = np.arange(len(result)) + self.events_seen if not result.size > 0: print("Found chunk without events?!") self.events_seen += len(result) return result
def compute(self, peaks): le = self.config['left_event_extension'] re = self.config['right_event_extension'] triggers = peaks[ (peaks['area'] > self.config['trigger_min_area']) & (peaks['n_competing'] <= self.config['trigger_max_competing'])] # Join nearby triggers t0, t1 = strax.find_peak_groups( triggers, gap_threshold=le + re + 1, left_extension=le, right_extension=re, max_duration=self.config['max_event_duration']) result = np.zeros_like(t0, dtype=self.dtype) result['time'] = t0 result['endtime'] = t1 result['event_number'] = np.arange(len(result)) + self.events_seen self.events_seen += len(result) return result
def software_he_veto(records, to_pe, area_threshold=int(1e5), veto_length=int(3e6), veto_res=int(1e3), pass_veto_fraction=0.01, pass_veto_extend=3): """Veto veto_length (time in ns) after peaks larger than area_threshold (in PE). Further large peaks inside the veto regions are still passed: We sum the waveform inside the veto region (with time resolution veto_res in ns) and pass regions within pass_veto_extend samples of samples with amplitude above pass_veto_fraction times the maximum. :returns: (preserved records, vetoed records, veto intervals). :param records: PMT records :param to_pe: ADC to PE conversion factors for the channels in records. :param area_threshold: Minimum peak area to trigger the veto. Note we use a much rougher clustering than in later processing. :param veto_length: Time in ns to veto after the peak :param veto_res: Resolution of the sum waveform inside the veto region. Do not make too large without increasing integer type in some strax dtypes... :param pass_veto_fraction: fraction of maximum sum waveform amplitude to trigger veto passing of further peaks :param pass_veto_extend: samples to extend (left and right) the pass veto regions. """ veto_res = int(veto_res) if veto_res > np.iinfo(np.int16).max: raise ValueError("Veto resolution does not fit 16-bit int") veto_length = np.ceil(veto_length / veto_res).astype(np.int) * veto_res veto_n = int(veto_length / veto_res) + 1 # 1. Find large peaks in the data. # This will actually return big agglomerations of peaks and their tails peaks = strax.find_peaks( records, to_pe, gap_threshold=1, left_extension=0, right_extension=0, min_channels=100, min_area=area_threshold, result_dtype=strax.peak_dtype(n_channels=len(to_pe), n_sum_wv_samples=veto_n)) # 2. Find initial veto regions around these peaks # (with a generous right extension) veto_start, veto_end = strax.find_peak_groups( peaks, gap_threshold=veto_length + 2 * veto_res, right_extension=veto_length, left_extension=veto_res) veto_end = veto_end.clip(0, strax.endtime(records[-1])) veto_length = veto_end - veto_start # dtype is like record (since we want to use hitfiding etc) # but with float32 waveform regions = np.zeros( len(veto_start), dtype=strax.interval_dtype + [ ("data", (np.float32, veto_n)), ("baseline", np.float32), ("reduction_level", np.int64), ("record_i", np.int64), ("pulse_length", np.int64), ]) regions['time'] = veto_start regions['length'] = veto_length regions['pulse_length'] = veto_length regions['dt'] = veto_res if not len(regions): # No veto anywhere in this data return records, records[:0], np.zeros(0, strax.hit_dtype) # 3. Find pass_veto regios with big peaks inside the veto regions. # For this we compute a rough sum waveform (at low resolution, # without looping over the pulse data) rough_sum(regions, records, to_pe, veto_n, veto_res) regions['data'] /= np.max(regions['data'], axis=1)[:, np.newaxis] pass_veto = strax.find_hits(regions, threshold=pass_veto_fraction) # 4. Extend these by a few samples and inverse to find veto regions regions['data'] = 1 regions = strax.cut_outside_hits( regions, pass_veto, left_extension=pass_veto_extend, right_extension=pass_veto_extend) regions['data'] = 1 - regions['data'] veto = strax.find_hits(regions, threshold=0.5) # Do not remove very tiny regions veto = veto[veto['length'] > 2 * pass_veto_extend] # 5. Apply the veto and return results veto_mask = strax.fully_contained_in(records, veto) == -1 return tuple(list(_mask_and_not(records, veto_mask)) + [veto])