def get_scale_model(self, models): """Set scale model Parameters ---------- models : `Models` Models Return ------ model : `ScaleSpectralModel` Scale spectral model """ ref_model = models[self.source].spectral_model scale_model = ScaleSpectralModel(ref_model) scale_model.norm.value = 1.0 scale_model.norm.frozen = False if hasattr(ref_model, "amplitude"): scaled_parameter = ref_model.amplitude else: scaled_parameter = ref_model.norm scale_model.norm.min = scaled_parameter.min / scaled_parameter.value scale_model.norm.max = scaled_parameter.max / scaled_parameter.value scale_model.norm.interp = scaled_parameter.interp scale_model.norm.scan_values = self.norm_values scale_model.norm.scan_min = self.norm_min scale_model.norm.scan_max = self.norm_max scale_model.norm.scan_n_values = self.norm_n_values return scale_model
def run(self, datasets, steps="all"): """Estimate flux for a given energy range. The fit is performed in the energy range provided by the dataset masks. The input energy range is used only to compute the flux normalization. Parameters ---------- datasets : list of `~gammapy.spectrum.SpectrumDataset` Spectrum datasets. steps : list of str Which steps to execute. Available options are: * "norm-err": estimate symmetric error. * "errn-errp": estimate asymmetric errors. * "ul": estimate upper limits. * "ts": estimate ts and sqrt(ts) values. * "norm-scan": estimate fit statistic profiles. By default all steps are executed. Returns ------- result : dict Dict with results for the flux point. """ datasets = self._check_datasets(datasets) if not datasets.is_all_same_type or not datasets.is_all_same_energy_shape: raise ValueError( "Flux point estimation requires a list of datasets" " of the same type and data shape.") dataset = datasets[0] ref_model = dataset.models[self.source].spectral_model scale_model = ScaleSpectralModel(ref_model) scale_model.norm.min = 0 scale_model.norm.max = 1e5 self._set_model(datasets, scale_model) steps = self._prepare_steps(steps) result = self._prepare_result(scale_model.model) scale_model.norm.value = 1.0 scale_model.norm.frozen = False result.update(super().run( datasets, scale_model.norm, steps, null_value=0, scan_values=self.norm_values, )) self._set_model(datasets, ref_model) return result
def __init__( self, datasets, time_intervals=None, source="", norm_min=0.2, norm_max=5, norm_n_values=11, norm_values=None, sigma=1, sigma_ul=2, reoptimize=False, ): self.datasets = Datasets(datasets) if not self.datasets.is_all_same_type and self.datasets.is_all_same_shape: raise ValueError( "Light Curve estimation requires a list of datasets" " of the same type and data shape.") if time_intervals is None: time_intervals = [ Time([d.gti.time_start[0], d.gti.time_stop[-1]]) for d in self.datasets ] self._check_and_sort_time_intervals(time_intervals) dataset = self.datasets[0] if isinstance(dataset, SpectrumDatasetOnOff): model = dataset.model else: model = dataset.model[source].spectral_model self.model = ScaleSpectralModel(model) self.model.norm.min = 0 self.model.norm.max = 1e5 if norm_values is None: norm_values = np.logspace(np.log10(norm_min), np.log10(norm_max), norm_n_values) self.norm_values = norm_values self.sigma = sigma self.sigma_ul = sigma_ul self.reoptimize = reoptimize self.source = source self.group_table_info = None
def get_scale_model(self, models): """Set scale model Parameters ---------- models : `Models` Models Returns ------- model : `ScaleSpectralModel` Scale spectral model """ ref_model = models[self.source].spectral_model scale_model = ScaleSpectralModel(ref_model) if "amplitude" in ref_model.parameters.names: scaled_parameter = ref_model.parameters["amplitude"] else: scaled_parameter = ref_model.parameters["norm"] scale_model.norm = self._set_norm_parameter(scale_model.norm, scaled_parameter) return scale_model
def __init__( self, datasets, source="", norm_min=0.2, norm_max=5, norm_n_values=11, norm_values=None, sigma=1, sigma_ul=2, reoptimize=False, ): if not isinstance(datasets, Datasets): datasets = Datasets(datasets) self.datasets = datasets if not datasets.is_all_same_type and datasets.is_all_same_shape: raise ValueError( "Light Curve estimation requires a list of datasets" " of the same type and data shape." ) dataset = self.datasets.datasets[0] if isinstance(dataset, SpectrumDatasetOnOff): model = dataset.model else: model = dataset.model[source].spectral_model self.model = ScaleSpectralModel(model) self.model.norm.min = 0 self.model.norm.max = 1e5 if norm_values is None: norm_values = np.logspace( np.log10(norm_min), np.log10(norm_max), norm_n_values ) self.norm_values = norm_values self.sigma = sigma self.sigma_ul = sigma_ul self.reoptimize = reoptimize self.source = source self._set_scale_model()
def __init__( self, datasets, source, energy_range, norm_min=0.2, norm_max=5, norm_n_values=11, norm_values=None, sigma=1, sigma_ul=3, reoptimize=True, ): # make a copy to not modify the input datasets datasets = self._check_datasets(datasets) if not datasets.is_all_same_type or not datasets.is_all_same_energy_shape: raise ValueError( "Flux point estimation requires a list of datasets" " of the same type and data shape.") datasets = datasets.copy() dataset = datasets[0] model = dataset.models[source].spectral_model self.model = ScaleSpectralModel(model) self.model.norm.min = 0 self.model.norm.max = 1e5 if norm_values is None: norm_values = np.logspace(np.log10(norm_min), np.log10(norm_max), norm_n_values) self.norm_values = norm_values self.source = source self.energy_range = energy_range super().__init__( datasets, sigma, sigma_ul, reoptimize, ) self._set_scale_model()
def __init__( self, datasets, e_edges, source=0, norm_min=0.2, norm_max=5, norm_n_values=11, norm_values=None, sigma=1, sigma_ul=2, reoptimize=False, ): # make a copy to not modify the input datasets if not isinstance(datasets, Datasets): datasets = Datasets(datasets) if not datasets.is_all_same_type and datasets.is_all_same_shape: raise ValueError( "Flux point estimation requires a list of datasets" " of the same type and data shape.") self.datasets = datasets.copy() self.e_edges = e_edges dataset = self.datasets[0] model = dataset.models[source].spectral_model self.model = ScaleSpectralModel(model) self.model.norm.min = 0 self.model.norm.max = 1e3 if norm_values is None: norm_values = np.logspace(np.log10(norm_min), np.log10(norm_max), norm_n_values) self.norm_values = norm_values self.sigma = sigma self.sigma_ul = sigma_ul self.reoptimize = reoptimize self.source = source self.fit = Fit(self.datasets) self._set_scale_model() self._contribute_to_stat = False
def get_scale_model(self, models): """Set scale model Parameters ---------- models : `Models` Models Return ------ model : `ScaleSpectralModel` Scale spectral model """ ref_model = models[self.source].spectral_model scale_model = ScaleSpectralModel(ref_model) scale_model.norm.min = -1e5 scale_model.norm.max = 1e5 scale_model.norm.value = 1.0 scale_model.norm.frozen = False return scale_model
def get_scale_model(self, models): """Set scale model Parameters ---------- models : `Models` Models Return ------ model : `ScaleSpectralModel` Scale spectral model """ ref_model = models[self.source].spectral_model scale_model = ScaleSpectralModel(ref_model) scale_model.norm.value = 1.0 scale_model.norm.frozen = False scale_model.norm.interp = "log" scale_model.norm.scan_values = self.norm_values scale_model.norm.scan_min = self.norm_min scale_model.norm.scan_max = self.norm_max scale_model.norm.scan_n_values = self.norm_n_values return scale_model