Пример #1
0
 def get_livetimes(self, fi):
     with h5py.File(fi, 'r') as f:
         for ct in self.ctimes:
             # 'inclusive' time when at least the ifos specified by ct are on
             fgt = conv.sec_to_year(f[ct].attrs['foreground_time'])
             # index dict on chunk start time / coinc type
             self.incl_livetimes[(get_start_dur(fi)[0], ct)] = fgt
             # subtract times during which 1 more ifo was on,
             # ie subtract H1L1* time from H1L1; subtract H1* time from H1; etc
             for combo in self.moreifotimes(ct):
                 if len(combo) == len(ct) + 2:
                     fgt -= conv.sec_to_year(
                         f[combo].attrs['foreground_time'])
             # index dict on chunk start time / coinc time
             self.livetimes[(get_start_dur(fi)[0], ct)] = fgt
Пример #2
0
 def calculate_ifar(self, newsnr, duration):
     if self.fit_info['fixed_ifar']:
         return self.fit_info['fixed_ifar']
     dur_bin = self.fit_info['bins'][duration]
     rate = self.fit_info['rates'][dur_bin]
     coeff = self.fit_info['coeffs'][dur_bin]
     rate_louder = rate * fits.cum_fit('exponential', [newsnr], coeff,
                                       self.fit_info['thresh'])[0]
     # apply a trials factor of the number of duration bins
     rate_louder *= len(self.fit_info['rates'])
     return conv.sec_to_year(1. / rate_louder)
Пример #3
0
    def add_background(self, full_file):
        start = get_start_dur(full_file)[0]
        self.get_livetimes(full_file)

        with h5py.File(full_file, 'r') as ff:
            # get stat values and threshold
            _bgstat = ff['background_exc/stat'][:]
            _keepstat = _bgstat > self.thr

            # get template ids and filter
            _bgtid = ff['background_exc/template_id'][:]
            # need the template filter to have already been made
            assert self.in_bin is not None
            _keep = np.logical_and(_keepstat, self.in_bin[_bgtid])
            _bgstat = _bgstat[_keep]
            _bgdec = ff['background_exc/decimation_factor'][:][_keep]

            # assign coinc types
            _times = {}
            for i in self.ifos:
                # NB times are time-shifted between ifos
                _times[i] = ff['background_exc/' + i + '/time'][:][_keep]
            _ctype = self.get_ctypes(_times)
            for cty in self.coinc_types:
                self.bg_vals[(start, cty)] = _bgstat[_ctype == cty]
                self.bg_dec[(start, cty)] = _bgdec[_ctype == cty]
                # get bg livetime for noise rate estimate
                # - convert to years
                self.bg_livetimes[(start, cty)] = conv.sec_to_year(
                    ff[cty].attrs['background_time_exc'])

                # make histogram
                bins = self.make_bins(np.max(_bgstat[_ctype == cty]), 'bg')
                # hack to make larger bins for H1L1V1
                if cty == 'H1L1V1':
                    if self.args.verbose:
                        print('Halving bg bins for triple bg hist')
                    bins = bins[::2].copy()  # take every 2nd bin edge
                self.bg_hist[(start, cty)] = \
                      np.histogram(_bgstat[_ctype == cty],
                                   weights=_bgdec[_ctype == cty], bins=bins)
                # get expected number of bg events for this chunk and coinc type
                self.exp_bg[(start, cty)] = _bgdec[_ctype == cty].sum() * \
                      self.incl_livetimes[(start, cty)] / \
                      self.bg_livetimes[(start, cty)]
Пример #4
0
    def calculate_ifar(self, sngl_ranking, duration):
        if self.fixed_ifar:
            return self.fixed_ifar[self.ifo]

        with h5py.File(self.fit_file, 'r') as fit_file:
            bin_edges = fit_file['bins_edges'][:]
            live_time = fit_file[self.ifo].attrs['live_time']
            thresh = fit_file.attrs['fit_threshold']

            dist_grp = fit_file[self.ifo][self.sngl_ifar_est_dist]
            rates = dist_grp['counts'][:] / live_time
            coeffs = dist_grp['fit_coeff'][:]

        bins = bin_utils.IrregularBins(bin_edges)
        dur_bin = bins[duration]

        rate = rates[dur_bin]
        coeff = coeffs[dur_bin]
        rate_louder = rate * fits.cum_fit('exponential', [sngl_ranking], coeff,
                                          thresh)[0]
        # apply a trials factor of the number of duration bins
        rate_louder *= len(rates)
        return conv.sec_to_year(1. / rate_louder)