def covers(self, proposal, min_match): """ Return True if any template in the bank has match with proposal greater than min_match. """ # find templates in the bank "near" this tmplt prop_nhd = getattr(proposal, self.nhood_param) low, high = _find_neighborhood(self._nhoods, prop_nhd, self.nhood_size) tmpbank = self._templates[low:high] if not tmpbank: return False # sort the bank by its nearness to tmplt in mchirp # NB: This sort comes up as a dominating cost if you profile, # but it cuts the number of match evaluations by 80%, so turns out # to be worth it even for metric match, where matches are cheap. tmpbank.sort(key=lambda b: abs( getattr(b, self.nhood_param) - prop_nhd)) # set parameters of match calculation that are optimized for this block df_end, f_max = get_neighborhood_df_fmax(tmpbank + [proposal], self.flow) df_start = max(df_end, self.iterative_match_df_max) # find and test matches for tmplt in tmpbank: self._nmatch += 1 df = df_start match_last = 0 if self.coarse_match_df: # Perform a match at high df to see if point can be quickly # ruled out as already covering the proposal PSD = get_PSD(self.coarse_match_df, self.flow, f_max, self.noise_model) match = self.compute_match(tmplt, proposal, self.coarse_match_df, PSD=PSD) if (1 - match) > 4.0*(1 - min_match): continue while df >= df_end: PSD = get_PSD(df, self.flow, f_max, self.noise_model) match = self.compute_match(tmplt, proposal, df, PSD=PSD) # if the result is a really bad match, trust it isn't # misrepresenting a good match if (1 - match) > 4.0*(1 - min_match): break # calculation converged if match_last > 0 and abs(match_last - match) < 0.001: break # otherwise, refine calculation match_last = match df /= 2.0 if match > min_match: return True return False
def covers(self, proposal, min_match, nhood=None): """ Return (max_match, template) where max_match is either (i) the best found match if max_match < min_match or (ii) the match of the first template found with match >= min_match. template is the Template() object which yields max_match. """ max_match = 0 template = None # find templates in the bank "near" this tmplt prop_nhd = getattr(proposal, self.nhood_param) if not nhood: low, high = _find_neighborhood(self._nhoods, prop_nhd, self.nhood_size) tmpbank = self._templates[low:high] else: tmpbank = nhood if not tmpbank: return (max_match, template) # sort the bank by its nearness to tmplt in mchirp # NB: This sort comes up as a dominating cost if you profile, # but it cuts the number of match evaluations by 80%, so turns out # to be worth it even for metric match, where matches are cheap. tmpbank.sort(key=lambda b: abs( getattr(b, self.nhood_param) - prop_nhd)) # set parameters of match calculation that are optimized for this block df_end, f_max = get_neighborhood_df_fmax(tmpbank + [proposal], self.flow) if self.fhigh_max: f_max = min(f_max, self.fhigh_max) df_start = max(df_end, self.iterative_match_df_max) # find and test matches for tmplt in tmpbank: self._nmatch += 1 df = df_start match_last = 0 if self.coarse_match_df: # Perform a match at high df to see if point can be quickly # ruled out as already covering the proposal PSD = get_PSD(self.coarse_match_df, self.flow, f_max, self.noise_model) match = self.compute_match(tmplt, proposal, self.coarse_match_df, PSD=PSD) if match == 0: err_msg = "Match is 0. This might indicate that you have " err_msg += "the df value too high. Please try setting the " err_msg += "coarse-value-df value lower." # FIXME: This could be dealt with dynamically?? raise ValueError(err_msg) if (1 - match) > 0.05 + (1 - min_match): continue while df >= df_end: PSD = get_PSD(df, self.flow, f_max, self.noise_model) match = self.compute_match(tmplt, proposal, df, PSD=PSD) if match == 0: err_msg = "Match is 0. This might indicate that you have " err_msg += "the df value too high. Please try setting the " err_msg += "iterative-match-df-max value lower." # FIXME: This could be dealt with dynamically?? raise ValueError(err_msg) # if the result is a really bad match, trust it isn't # misrepresenting a good match if (1 - match) > 0.05 + (1 - min_match): break # calculation converged if match_last > 0 and abs(match_last - match) < 0.001: break # otherwise, refine calculation match_last = match df /= 2.0 if match > min_match: return (match, tmplt) # record match and template params for highest match if match > max_match: max_match = match template = tmplt return (max_match, template)
else: print >> sys.stderr, "Warning: fhigh-max not specified, using maximum frequency in the PSD (%.3f Hz)" \ % f_max_orig opts.fhigh_max = float(f_max_orig) interpolator = UnivariateSpline(f_orig, np.log(psd.data), s=0) # spline extrapolation may lead to unexpected results, # so set the PSD to infinity above the max original frequency noise_model = lambda g: np.where(g < f_max_orig, np.exp(interpolator(g)), np.inf) else: noise_model = noise_models[opts.noise_model] # Set up PSD for metric computation # calling into pylal, so need pylal types psd = REAL8FrequencySeries(name="psd", f0=0., deltaF=1., data=get_PSD(1., opts.flow, 1570., noise_model)) # # seed the bank, if applicable # if opts.bank_seed is None: # seed the process with an empty bank # the first proposal will always be accepted bank = Bank(waveform, noise_model, opts.flow, opts.use_metric, opts.cache_waveforms, opts.neighborhood_size, opts.neighborhood_param, coarse_match_df=opts.coarse_match_df, iterative_match_df_max=opts.iterative_match_df_max, fhigh_max=opts.fhigh_max) else: # seed bank with input bank. we do not prune the bank # for overcoverage, but take it as is tmpdoc = utils.load_filename(opts.bank_seed, contenthandler=ContentHandler) sngl_inspiral = table.get_table(tmpdoc, lsctables.SnglInspiralTable.tableName) bank = Bank.from_sngls(sngl_inspiral, waveform, noise_model, opts.flow, opts.use_metric, opts.cache_waveforms, opts.neighborhood_size, opts.neighborhood_param, coarse_match_df=opts.coarse_match_df, iterative_match_df_max=opts.iterative_match_df_max, fhigh_max=opts.fhigh_max)
def covers(self, proposal, min_match, nhood=None): """ Return (max_match, template) where max_match is either (i) the best found match if max_match < min_match or (ii) the match of the first template found with match >= min_match. template is the Template() object which yields max_match. """ max_match = 0 template = None # find templates in the bank "near" this tmplt prop_nhd = getattr(proposal, self.nhood_param) if not nhood: low, high = _find_neighborhood(self._nhoods, prop_nhd, self.nhood_size) tmpbank = self._templates[low:high] else: tmpbank = nhood if not tmpbank: return (max_match, template) # sort the bank by its nearness to tmplt in mchirp # NB: This sort comes up as a dominating cost if you profile, # but it cuts the number of match evaluations by 80%, so turns out # to be worth it even for metric match, where matches are cheap. tmpbank.sort( key=lambda b: abs(getattr(b, self.nhood_param) - prop_nhd)) # set parameters of match calculation that are optimized for this block df_end, f_max = get_neighborhood_df_fmax(tmpbank + [proposal], self.flow) if self.fhigh_max: f_max = min(f_max, self.fhigh_max) df_start = max(df_end, self.iterative_match_df_max) # find and test matches for tmplt in tmpbank: self._nmatch += 1 df = df_start match_last = 0 if self.coarse_match_df: # Perform a match at high df to see if point can be quickly # ruled out as already covering the proposal PSD = get_PSD(self.coarse_match_df, self.flow, f_max, self.noise_model) match = self.compute_match(tmplt, proposal, self.coarse_match_df, PSD=PSD) if match == 0: err_msg = "Match is 0. This might indicate that you have " err_msg += "the df value too high. Please try setting the " err_msg += "coarse-value-df value lower." # FIXME: This could be dealt with dynamically?? raise ValueError(err_msg) # record match and template params for highest match if match > max_match: max_match = match template = tmplt if (1 - match) > 0.05 + (1 - min_match): continue while df >= df_end: PSD = get_PSD(df, self.flow, f_max, self.noise_model) match = self.compute_match(tmplt, proposal, df, PSD=PSD) if match == 0: err_msg = "Match is 0. This might indicate that you have " err_msg += "the df value too high. Please try setting the " err_msg += "iterative-match-df-max value lower." # FIXME: This could be dealt with dynamically?? raise ValueError(err_msg) # record match and template params for highest match if match > max_match: max_match = match template = tmplt # if the result is a really bad match, trust it isn't # misrepresenting a good match if (1 - match) > 0.05 + (1 - min_match): break # calculation converged if match_last > 0 and abs(match_last - match) < 0.001: break # otherwise, refine calculation match_last = match df /= 2.0 if match > min_match: return (match, tmplt) return (max_match, template)
interpolator = UnivariateSpline(f_orig, np.log(psd.data), s=0) # spline extrapolation may lead to unexpected results, # so set the PSD to infinity above the max original frequency noise_model = lambda g: np.where(g < f_max_orig, np.exp(interpolator(g)), np.inf) else: noise_model = noise_models[opts.noise_model] # Set up PSD for metric computation # calling into pylal, so need pylal types psd = REAL8FrequencySeries(name="psd", f0=0., deltaF=1., data=get_PSD(1., opts.flow, 1570., noise_model)) # # seed the bank, if applicable # if opts.bank_seed is None: # seed the process with an empty bank # the first proposal will always be accepted bank = Bank(waveform, noise_model, opts.flow, opts.use_metric, opts.cache_waveforms, opts.neighborhood_size, opts.neighborhood_param, coarse_match_df=opts.coarse_match_df,
# This loads the edge list from the exported file # of the form: # [[0,1], [0, 2]... [2,3], [3,4]] print "Loading edge list..." edge_array = np.loadtxt("./edge_lists/edge_list_%s.ncol" % str(numTemplates)) edge_array = edge_array[generateFrom:generateTo] # Read in PSD and make it usable print "Reading PSD..." psd = read_psd('H1L1V1-REFERENCE_PSD-966386126-24805.xml.gz')['H1'] print "Preparing PSD..." f_orig = psd.f0 + np.arange(len(psd.data)) * psd.deltaF f_max_orig = max(f_orig) interpolator = UnivariateSpline(f_orig, np.log(psd.data), s=0) noise_model = lambda g: np.where(g < f_max_orig, np.exp(interpolator(g)), np.inf) PSD = get_PSD(1. / duration, f_low, f_high, noise_model) # Generate ASD print "Generating ASD" ASD = np.sqrt(PSD) print "Creating workspace..." # Create workspace for match calculation workspace_cache = CreateSBankWorkspaceCache() # Declare the array we are going to be using in match calculation fs = [0, 0] sigmasq = [0, 0] new = [0, 0] hplus = [0, 0] hcross = [0, 0]