def worker(channel): channel_mask = total_poly_energies == channel # Mask background events and current channel # poly_chan_mask = np.logical_and(poly_mask, channel_mask) # Select the masked events current_events = total_poly_events[channel_mask] polynomial, _ = unbinned_polyfit( current_events, self._optimal_polynomial_grade, t_start, t_stop, poly_exposure, bayes=bayes) return polynomial
def _unbinned_fit_global_and_determine_optimum_grade(self, events, exposure): """ Provides the ability to find the optimum polynomial grade for *unbinned* events by fitting the total (all channels) to 0-4 order polynomials and then comparing them via a likelihood ratio test. :param events: an event list :param exposure: the exposure per event :return: polynomial grade """ # Fit the sum of all the channels to determine the optimal polynomial # grade min_grade = 0 max_grade = 4 log_likelihoods = [] t_start = self._poly_intervals.start_times t_stop = self._poly_intervals.stop_times for grade in range(min_grade, max_grade + 1): polynomial, log_like = unbinned_polyfit(events, grade, t_start, t_stop, exposure) log_likelihoods.append(log_like) # Found the best one delta_loglike = np.array([2 * (x[0] - x[1]) for x in zip(log_likelihoods[:-1], log_likelihoods[1:])]) delta_threshold = 9.0 mask = (delta_loglike >= delta_threshold) if (len(mask.nonzero()[0]) == 0): # best grade is zero! best_grade = 0 else: best_grade = mask.nonzero()[0][-1] + 1 return best_grade
def _unbinned_fit_global_and_determine_optimum_grade(self, events, exposure): """ Provides the ability to find the optimum polynomial grade for *unbinned* events by fitting the total (all channels) to 0-4 order polynomials and then comparing them via a likelihood ratio test. :param events: an event list :param exposure: the exposure per event :return: polynomial grade """ # Fit the sum of all the channels to determine the optimal polynomial # grade min_grade = 0 max_grade = 4 log_likelihoods = [] t_start = self._poly_intervals.start_times t_stop = self._poly_intervals.stop_times for grade in range(min_grade, max_grade + 1): polynomial, log_like = unbinned_polyfit(events, grade, t_start, t_stop, exposure) log_likelihoods.append(log_like) # Found the best one delta_loglike = np.array(map(lambda x: 2 * (x[0] - x[1]), zip(log_likelihoods[:-1], log_likelihoods[1:]))) delta_threshold = 9.0 mask = (delta_loglike >= delta_threshold) if (len(mask.nonzero()[0]) == 0): # best grade is zero! best_grade = 0 else: best_grade = mask.nonzero()[0][-1] + 1 return best_grade
def _unbinned_fit_polynomials(self): self._poly_fit_exists = True # inform the type of fit we have self._fit_method_info['bin type'] = 'Unbinned' self._fit_method_info['fit method'] = threeML_config['event list'][ 'unbinned fit method'] # Select all the events that are in the background regions # and make a mask all_bkg_masks = [] total_duration = 0. poly_exposure = 0 for selection in self._poly_intervals: total_duration += selection.duration poly_exposure += self.exposure_over_interval( selection.start_time, selection.stop_time) all_bkg_masks.append( np.logical_and(self._arrival_times >= selection.start_time, self._arrival_times <= selection.stop_time)) poly_mask = all_bkg_masks[0] # If there are multiple masks: if len(all_bkg_masks) > 1: for mask in all_bkg_masks[1:]: poly_mask = np.logical_or(poly_mask, mask) # Select the all the events in the poly selections # We only need to do this once total_poly_events = self._arrival_times[poly_mask] # For the channel energies we will need to down select again. # We can go ahead and do this to avoid repeated computations total_poly_energies = self._measurement[poly_mask] # Now we will find the the best poly order unless the use specified one # The total cnts (over channels) is binned to .1 sec intervals if self._user_poly_order == -1: self._optimal_polynomial_grade = self._unbinned_fit_global_and_determine_optimum_grade( total_poly_events, poly_exposure) if self._verbose: print("Auto-determined polynomial order: %d" % self._optimal_polynomial_grade) print('\n') else: self._optimal_polynomial_grade = self._user_poly_order channels = range(self._first_channel, self._n_channels + self._first_channel) # Check whether we are parallelizing or not t_start = self._poly_intervals.start_times t_stop = self._poly_intervals.stop_times polynomials = [] with progress_bar(self._n_channels, title="Fitting %s background" % self._instrument) as p: for channel in channels: channel_mask = total_poly_energies == channel # Mask background events and current channel # poly_chan_mask = np.logical_and(poly_mask, channel_mask) # Select the masked events current_events = total_poly_events[channel_mask] polynomial, _ = unbinned_polyfit( current_events, self._optimal_polynomial_grade, t_start, t_stop, poly_exposure) polynomials.append(polynomial) p.increase() # We are now ready to return the polynomials self._polynomials = polynomials
def _unbinned_fit_polynomials(self, bayes=False): self._poly_fit_exists = True # Select all the events that are in the background regions # and make a mask all_bkg_masks = [] total_duration = 0.0 poly_exposure = 0 for selection in self._poly_intervals: total_duration += selection.duration poly_exposure += self.exposure_over_interval( selection.start_time, selection.stop_time) all_bkg_masks.append( np.logical_and( self._arrival_times >= selection.start_time, self._arrival_times <= selection.stop_time, )) poly_mask = all_bkg_masks[0] # If there are multiple masks: if len(all_bkg_masks) > 1: for mask in all_bkg_masks[1:]: poly_mask = np.logical_or(poly_mask, mask) # Select the all the events in the poly selections # We only need to do this once total_poly_events = self._arrival_times[poly_mask] # For the channel energies we will need to down select again. # We can go ahead and do this to avoid repeated computations total_poly_energies = self._measurement[poly_mask] # Now we will find the the best poly order unless the use specified one # The total cnts (over channels) is binned to .1 sec intervals if self._user_poly_order == -1: self._optimal_polynomial_grade = ( self._unbinned_fit_global_and_determine_optimum_grade( total_poly_events, poly_exposure, bayes=bayes)) log.info("Auto-determined polynomial order: %d" % self._optimal_polynomial_grade) else: self._optimal_polynomial_grade = self._user_poly_order channels = list( range(self._first_channel, self._n_channels + self._first_channel)) # Check whether we are parallelizing or not t_start = self._poly_intervals.start_times t_stop = self._poly_intervals.stop_times if threeML_config["parallel"]["use_parallel"]: def worker(channel): channel_mask = total_poly_energies == channel # Mask background events and current channel # poly_chan_mask = np.logical_and(poly_mask, channel_mask) # Select the masked events current_events = total_poly_events[channel_mask] polynomial, _ = unbinned_polyfit( current_events, self._optimal_polynomial_grade, t_start, t_stop, poly_exposure, bayes=bayes) return polynomial client = ParallelClient() polynomials = client.execute_with_progress_bar( worker, channels, name=f"Fitting {self._instrument} background") else: polynomials = [] for channel in tqdm(channels, desc=f"Fitting {self._instrument} background"): channel_mask = total_poly_energies == channel # Mask background events and current channel # poly_chan_mask = np.logical_and(poly_mask, channel_mask) # Select the masked events current_events = total_poly_events[channel_mask] polynomial, _ = unbinned_polyfit( current_events, self._optimal_polynomial_grade, t_start, t_stop, poly_exposure, bayes=bayes) polynomials.append(polynomial) # We are now ready to return the polynomials self._polynomials = polynomials
def _unbinned_fit_global_and_determine_optimum_grade( self, events, exposure, bayes=False): """ Provides the ability to find the optimum polynomial grade for *unbinned* events by fitting the total (all channels) to 0-2 order polynomials and then comparing them via a likelihood ratio test. :param events: an event list :param exposure: the exposure per event :return: polynomial grade """ # Fit the sum of all the channels to determine the optimal polynomial # grade min_grade = 0 max_grade = 2 log_likelihoods = [] t_start = self._poly_intervals.start_times t_stop = self._poly_intervals.stop_times log.debug("attempting to find best fit poly with unbinned") if threeML_config["parallel"]["use_parallel"]: def worker(grade): polynomial, log_like = unbinned_polyfit(events, grade, t_start, t_stop, exposure, bayes=bayes) return log_like client = ParallelClient() log_likelihoods = client.execute_with_progress_bar( worker, list(range(min_grade, max_grade + 1)), name="Finding best polynomial Order") else: for grade in trange(min_grade, max_grade + 1, desc="Finding best polynomial Order"): polynomial, log_like = unbinned_polyfit(events, grade, t_start, t_stop, exposure, bayes=bayes) log_likelihoods.append(log_like) # Found the best one delta_loglike = np.array([ 2 * (x[0] - x[1]) for x in zip(log_likelihoods[:-1], log_likelihoods[1:]) ]) log.debug(f"log likes {log_likelihoods}") log.debug(f" delta loglikes {delta_loglike}") delta_threshold = 9.0 mask = delta_loglike >= delta_threshold if len(mask.nonzero()[0]) == 0: # best grade is zero! best_grade = 0 else: best_grade = mask.nonzero()[0][-1] + 1 return best_grade