コード例 #1
0
class TestSimpleFit:
    """Test fit on counts spectra without any IRFs"""
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = PowerLawSpectralModel(index=2,
                                                  amplitude=1e5 / u.TeV,
                                                  reference=0.1 * u.TeV)
        self.bkg_model = PowerLawSpectralModel(index=3,
                                               amplitude=1e4 / u.TeV,
                                               reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.integral(binning[:-1], binning[1:])
        source_counts = random_state.poisson(npred)
        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)
        # Currently it's necessary to specify a lifetime
        self.src.livetime = 1 * u.s

        npred_bkg = self.bkg_model.integral(binning[:-1], binning[1:])

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_counts)
        self.off = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=off_counts)

    def test_wstat(self):
        """WStat with on source and background spectrum"""
        on_vector = self.src.copy()
        on_vector.data += self.bkg.data
        obs = SpectrumDatasetOnOff(
            counts=on_vector,
            counts_off=self.off,
            acceptance=1,
            acceptance_off=1 / self.alpha,
        )
        obs.model = self.source_model

        self.source_model.parameters.index = 1.12

        fit = Fit(obs)
        result = fit.run()
        pars = self.source_model.parameters

        assert_allclose(pars["index"].value, 1.997342, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.187067, rtol=1e-3)
        assert_allclose(result.total_stat, 30.022316, rtol=1e-3)
コード例 #2
0
ファイル: test_spectrum.py プロジェクト: AtreyeeS/gammapy
    def setup(self):
        self.nbins = 30
        energy = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = SkyModel(
            spectral_model=PowerLawSpectralModel(index=2,
                                                 amplitude=1e5 *
                                                 u.Unit("cm-2 s-1 TeV-1"),
                                                 reference=0.1 * u.TeV))
        bkg_model = PowerLawSpectralModel(index=3,
                                          amplitude=1e4 *
                                          u.Unit("cm-2 s-1 TeV-1"),
                                          reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.spectral_model.integral(
            energy[:-1], energy[1:]).value
        source_counts = random_state.poisson(npred)

        axis = MapAxis.from_edges(energy, name="energy", interp="log")
        geom = RegionGeom(region=None, axes=[axis])

        self.src = RegionNDMap.from_geom(geom=geom, data=source_counts)
        self.exposure = RegionNDMap.from_geom(geom.as_energy_true,
                                              data=1,
                                              unit="cm2 s")

        npred_bkg = bkg_model.integral(energy[:-1], energy[1:]).value

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = RegionNDMap.from_geom(geom=geom, data=bkg_counts)
        self.off = RegionNDMap.from_geom(geom=geom, data=off_counts)
コード例 #3
0
ファイル: utils.py プロジェクト: tobiaskleiner/gammapy
def _map_spectrum_weight(map, spectrum=None):
    """Weight a map with a spectrum.

    This requires map to have an "energy" axis.
    The weights are normalised so that they sum to 1.
    The mean and unit of the output image is the same as of the input cube.

    At the moment this is used to get a weighted exposure image.

    Parameters
    ----------
    map : `~gammapy.maps.Map`
        Input map with an "energy" axis.
    spectrum : `~gammapy.modeling.models.SpectralModel`
        Spectral model to compute the weights.
        Default is power-law with spectral index of 2.

    Returns
    -------
    map_weighted : `~gammapy.maps.Map`
        Weighted image
    """
    if spectrum is None:
        spectrum = PowerLawSpectralModel(index=2.0)

    # Compute weights vector
    energy_edges = map.geom.axes["energy_true"].edges
    weights = spectrum.integral(energy_min=energy_edges[:-1],
                                energy_max=energy_edges[1:])
    weights /= weights.sum()
    shape = np.ones(len(map.geom.data_shape))
    shape[0] = -1
    return map * weights.reshape(shape.astype(int))
コード例 #4
0
ファイル: utils.py プロジェクト: sanjaymsh/gammapy
def estimate_exposure_reco_energy(dataset, spectral_model=None):
    """Estimate an exposure map in reconstructed energy.

    Parameters
    ----------
    dataset:`~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset
    spectral_model: `~gammapy.modeling.models.SpectralModel`
            assumed spectral shape. If none, a Power Law of index 2 is assumed

    Returns
    -------
    exposure : `Map`
        Exposure map in reconstructed energy
    """
    if spectral_model is None:
        spectral_model = PowerLawSpectralModel()

    model = SkyModel(spatial_model=ConstantFluxSpatialModel(),
                     spectral_model=spectral_model)

    energy_axis = dataset._geom.axes["energy"]

    edisp = None

    if dataset.edisp is not None:
        edisp = dataset.edisp.get_edisp_kernel(position=None,
                                               energy_axis=energy_axis)

    meval = MapEvaluator(model=model, exposure=dataset.exposure, edisp=edisp)
    npred = meval.compute_npred()
    ref_flux = spectral_model.integral(energy_axis.edges[:-1],
                                       energy_axis.edges[1:])
    reco_exposure = npred / ref_flux[:, np.newaxis, np.newaxis]
    return reco_exposure
コード例 #5
0
ファイル: test_fit.py プロジェクト: peroju/gammapy
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = PowerLawSpectralModel(index=2,
                                                  amplitude=1e5 *
                                                  u.Unit("cm-2 s-1 TeV-1"),
                                                  reference=0.1 * u.TeV)
        bkg_model = PowerLawSpectralModel(index=3,
                                          amplitude=1e4 *
                                          u.Unit("cm-2 s-1 TeV-1"),
                                          reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.integral(binning[:-1], binning[1:]).value
        source_counts = random_state.poisson(npred)
        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)

        self.src.livetime = 1 * u.s
        self.aeff = EffectiveAreaTable.from_constant(binning, "1 cm2")

        npred_bkg = bkg_model.integral(binning[:-1], binning[1:]).value

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_counts)
        self.off = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=off_counts)
コード例 #6
0
def test_trapz_loglog():
    energy = Quantity([1, 10], "TeV")
    pwl = PowerLawSpectralModel(index=2.3)

    ref = pwl.integral(energy_min=energy[0], energy_max=energy[1])

    val = trapz_loglog(pwl(energy), energy)
    assert_quantity_allclose(val, ref)
コード例 #7
0
def test_integrate_spectrum():
    """
    Test numerical integration against analytical solution.
    """
    emin = Quantity(1, "TeV")
    emax = Quantity(10, "TeV")
    pwl = PowerLawSpectralModel(index=2.3)

    ref = pwl.integral(emin=emin, emax=emax)

    val = integrate_spectrum(pwl, emin, emax)
    assert_quantity_allclose(val, ref)
コード例 #8
0
ファイル: test_spectral.py プロジェクト: maxnoe/gammapy
def test_integral_exp_cut_off_power_law_large_number_of_bins():
    energy = np.geomspace(1, 10, 100) * u.TeV
    energy_min = energy[:-1]
    energy_max = energy[1:]

    exppowerlaw = ExpCutoffPowerLawSpectralModel(
        amplitude="1e-11 TeV-1 cm-2 s-1", index=2)
    exppowerlaw.parameters["lambda_"].value = 1e-3
    powerlaw = PowerLawSpectralModel(amplitude="1e-11 TeV-1 cm-2 s-1", index=2)
    expected_flux = powerlaw.integral(energy_min, energy_max)

    flux = exppowerlaw.integral(energy_min, energy_max)

    assert_allclose(flux.value, expected_flux.value, rtol=0.01)
コード例 #9
0
ファイル: kernel.py プロジェクト: registerrier/gammapy
    def to_image(self, spectrum=None, exposure=None, keepdims=True):
        """Transform 3D PSFKernel into a 2D PSFKernel.

        Parameters
        ----------
        spectrum : `~gammapy.modeling.models.SpectralModel`
            Spectral model to compute the weights.
            Default is power-law with spectral index of 2.
        exposure : `~astropy.units.Quantity` or `~numpy.ndarray`
            1D array containing exposure in each true energy bin.
            It must have the same size as the PSFKernel energy axis.
            Default is uniform exposure over energy.
        keepdims : bool
            If true, the resulting PSFKernel wil keep an energy axis with one bin.
            Default is True.

        Returns
        -------
        weighted_kernel : `~gammapy.irf.PSFKernel`
            the weighted kernel summed over energy
        """
        map = self.psf_kernel_map

        if spectrum is None:
            spectrum = PowerLawSpectralModel(index=2.0)

        if exposure is None:
            exposure = np.ones(map.geom.axes[0].center.shape)

        exposure = u.Quantity(exposure)

        if exposure.shape != map.geom.axes[0].center.shape:
            raise ValueError("Incorrect exposure_array shape")

        # Compute weights vector
        energy_edges = map.geom.axes["energy_true"].edges
        weights = spectrum.integral(energy_min=energy_edges[:-1],
                                    energy_max=energy_edges[1:],
                                    intervals=True)
        weights *= exposure
        weights /= weights.sum()

        spectrum_weighted_kernel = map.copy()
        spectrum_weighted_kernel.quantity *= weights[:, np.newaxis, np.newaxis]

        return self.__class__(
            spectrum_weighted_kernel.sum_over_axes(keepdims=keepdims))
コード例 #10
0
def estimate_exposure_reco_energy(dataset,
                                  spectral_model=None,
                                  normalize=True):
    """Estimate an exposure map in reconstructed energy.

    Parameters
    ----------
    dataset:`~gammapy.datasets.MapDataset` or `~gammapy.datasets.MapDatasetOnOff`
            the input dataset
    spectral_model: `~gammapy.modeling.models.SpectralModel`
            assumed spectral shape. If none, a Power Law of index 2 is assumed
    normalize : bool
        Normalize the exposure to the total integrated flux of the spectral model.
        When not normalized it directly gives the predicted counts from the spectral
        model.

    Returns
    -------
    exposure : `Map`
        Exposure map in reconstructed energy
    """
    if spectral_model is None:
        spectral_model = PowerLawSpectralModel()

    model = SkyModel(spatial_model=ConstantFluxSpatialModel(),
                     spectral_model=spectral_model)

    energy_axis = dataset._geom.axes["energy"]

    if dataset.edisp is not None:
        edisp = dataset.edisp.get_edisp_kernel(position=None,
                                               energy_axis=energy_axis)
    else:
        edisp = None

    eval = MapEvaluator(model=model, exposure=dataset.exposure, edisp=edisp)
    reco_exposure = eval.compute_npred()

    if normalize:
        ref_flux = spectral_model.integral(energy_axis.edges[:-1],
                                           energy_axis.edges[1:])
        reco_exposure = reco_exposure / ref_flux[:, np.newaxis, np.newaxis]

    return reco_exposure
コード例 #11
0
ファイル: test_fit.py プロジェクト: peroju/gammapy
class TestFit:
    """Test fit on counts spectra without any IRFs"""
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = PowerLawSpectralModel(index=2,
                                                  amplitude=1e5 *
                                                  u.Unit("cm-2 s-1 TeV-1"),
                                                  reference=0.1 * u.TeV)
        bkg_model = PowerLawSpectralModel(index=3,
                                          amplitude=1e4 *
                                          u.Unit("cm-2 s-1 TeV-1"),
                                          reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.integral(binning[:-1], binning[1:]).value
        source_counts = random_state.poisson(npred)
        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)

        self.src.livetime = 1 * u.s
        self.aeff = EffectiveAreaTable.from_constant(binning, "1 cm2")

        npred_bkg = bkg_model.integral(binning[:-1], binning[1:]).value

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_counts)
        self.off = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=off_counts)

    def test_cash(self):
        """Simple CASH fit to the on vector"""
        dataset = SpectrumDataset(
            model=self.source_model,
            counts=self.src,
            aeff=self.aeff,
            livetime=self.src.livetime,
        )

        npred = dataset.npred().data
        assert_allclose(npred[5], 660.5171, rtol=1e-5)

        stat_val = dataset.likelihood()
        assert_allclose(stat_val, -107346.5291, rtol=1e-5)

        self.source_model.parameters["index"].value = 1.12

        fit = Fit([dataset])
        result = fit.run()

        # These values are check with sherpa fits, do not change
        pars = result.parameters
        assert_allclose(pars["index"].value, 1.995525, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.9, rtol=1e-3)

    def test_wstat(self):
        """WStat with on source and background spectrum"""
        on_vector = self.src.copy()
        on_vector.data += self.bkg.data
        dataset = SpectrumDatasetOnOff(
            counts=on_vector,
            counts_off=self.off,
            aeff=self.aeff,
            livetime=self.src.livetime,
            acceptance=1,
            acceptance_off=1 / self.alpha,
        )
        dataset.model = self.source_model

        self.source_model.parameters.index = 1.12

        fit = Fit([dataset])
        result = fit.run()
        pars = self.source_model.parameters

        assert_allclose(pars["index"].value, 1.997342, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.187067, rtol=1e-3)
        assert_allclose(result.total_stat, 30.022316, rtol=1e-3)

    def test_fit_range(self):
        """Test fit range without complication of thresholds"""
        dataset = SpectrumDatasetOnOff(counts=self.src,
                                       mask_safe=np.ones(self.src.energy.nbin,
                                                         dtype=bool))
        dataset.model = self.source_model

        assert np.sum(dataset.mask_safe) == self.nbins
        e_min, e_max = dataset.energy_range

        assert_allclose(e_max.value, 10)
        assert_allclose(e_min.value, 0.1)

    def test_likelihood_profile(self):
        dataset = SpectrumDataset(
            model=self.source_model,
            aeff=self.aeff,
            livetime=self.src.livetime,
            counts=self.src,
            mask_safe=np.ones(self.src.energy.nbin, dtype=bool),
        )
        fit = Fit([dataset])
        result = fit.run()
        true_idx = result.parameters["index"].value
        values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100)
        profile = fit.likelihood_profile("index", values=values)
        actual = values[np.argmin(profile["likelihood"])]
        assert_allclose(actual, true_idx, rtol=0.01)
コード例 #12
0
class TestSpectrumDataset:
    """Test fit on counts spectra without any IRFs"""
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV

        self.source_model = PowerLawSpectralModel(index=2.1,
                                                  amplitude=1e5 / u.TeV / u.s,
                                                  reference=0.1 * u.TeV)

        self.livetime = 100 * u.s

        bkg_rate = np.ones(self.nbins) / u.s
        bkg_expected = bkg_rate * self.livetime

        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_expected)

        random_state = get_random_state(23)
        self.npred = (self.source_model.integral(binning[:-1], binning[1:]) *
                      self.livetime)
        self.npred += bkg_expected
        source_counts = random_state.poisson(self.npred)

        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)
        self.dataset = SpectrumDataset(
            model=self.source_model,
            counts=self.src,
            livetime=self.livetime,
            background=self.bkg,
        )

    def test_data_shape(self):
        assert self.dataset.data_shape[0] == self.nbins

    def test_energy_range(self):
        energy_range = self.dataset.energy_range
        assert energy_range.unit == u.TeV
        assert_allclose(energy_range.to_value("TeV"), [0.1, 10.0])

    def test_cash(self):
        """Simple CASH fit to the on vector"""
        fit = Fit(self.dataset)
        result = fit.run()

        assert result.success
        assert "minuit" in repr(result)

        npred = self.dataset.npred().data.sum()
        assert_allclose(npred, self.npred.sum(), rtol=1e-3)
        assert_allclose(result.total_stat, -18087404.624, rtol=1e-3)

        pars = result.parameters
        assert_allclose(pars["index"].value, 2.1, rtol=1e-2)
        assert_allclose(pars.error("index"), 0.00127, rtol=1e-2)

        assert_allclose(pars["amplitude"].value, 1e5, rtol=1e-3)
        assert_allclose(pars.error("amplitude"), 153.450, rtol=1e-2)

    def test_fake(self):
        """Test the fake dataset"""
        real_dataset = self.dataset.copy()
        self.dataset.fake(314)
        assert real_dataset.counts.data.shape == self.dataset.counts.data.shape
        assert real_dataset.background.data.sum(
        ) == self.dataset.background.data.sum()
        assert int(real_dataset.counts.data.sum()) == 907010
        assert self.dataset.counts.data.sum() == 907331

    def test_incorrect_mask(self):
        mask_fit = np.ones(self.nbins, dtype=np.dtype("float"))
        with pytest.raises(ValueError):
            SpectrumDataset(
                model=self.source_model,
                counts=self.src,
                livetime=self.livetime,
                mask_fit=mask_fit,
                background=self.bkg,
            )

    def test_set_model(self):
        aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges,
                                                       "HESS")
        edisp = EnergyDispersion.from_diagonal_response(
            self.src.energy.edges, self.src.energy.edges)
        dataset = SpectrumDataset(None, self.src, self.livetime, None, aeff,
                                  edisp, self.bkg)
        with pytest.raises(AttributeError):
            dataset.parameters

        dataset.model = self.source_model
        assert dataset.parameters[0] == self.source_model.parameters[0]

    def test_str(self):
        assert "SpectrumDataset" in str(self.dataset)

    def test_spectrumdataset_create(self):
        e_reco = u.Quantity([0.1, 1, 10.0], "TeV")
        e_true = u.Quantity([0.05, 0.5, 5, 20.0], "TeV")
        empty_dataset = SpectrumDataset.create(e_reco, e_true)

        assert empty_dataset.counts.total_counts == 0
        assert empty_dataset.data_shape[0] == 2
        assert empty_dataset.background.total_counts == 0
        assert empty_dataset.background.energy.nbin == 2
        assert empty_dataset.aeff.data.axis("energy").nbin == 3
        assert empty_dataset.edisp.data.axis("e_reco").nbin == 2
        assert empty_dataset.livetime.value == 0
        assert len(empty_dataset.gti.table) == 0
        assert empty_dataset.energy_range[0] is None
        assert_allclose(empty_dataset.mask_safe, 0)

    def test_spectrum_dataset_stack_diagonal_safe_mask(self):
        aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges,
                                                       "HESS")
        edisp = EnergyDispersion.from_diagonal_response(
            self.src.energy.edges, self.src.energy.edges)
        livetime = self.livetime
        dataset1 = SpectrumDataset(
            counts=self.src.copy(),
            livetime=livetime,
            aeff=aeff,
            edisp=edisp,
            background=self.bkg.copy(),
        )

        livetime2 = 0.5 * livetime
        aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1],
                                   self.src.energy.edges[1:],
                                   2 * aeff.data.data)
        bkg2 = CountsSpectrum(
            self.src.energy.edges[:-1],
            self.src.energy.edges[1:],
            data=2 * self.bkg.data,
        )
        safe_mask2 = np.ones_like(self.src.data, bool)
        safe_mask2[0] = False
        dataset2 = SpectrumDataset(
            counts=self.src.copy(),
            livetime=livetime2,
            aeff=aeff2,
            edisp=edisp,
            background=bkg2,
            mask_safe=safe_mask2,
        )
        dataset1.stack(dataset2)

        assert_allclose(dataset1.counts.data[1:], self.src.data[1:] * 2)
        assert_allclose(dataset1.counts.data[0], self.src.data[0])
        assert dataset1.livetime == 1.5 * self.livetime
        assert_allclose(dataset1.background.data[1:], 3 * self.bkg.data[1:])
        assert_allclose(dataset1.background.data[0], self.bkg.data[0])
        assert_allclose(
            dataset1.aeff.data.data.to_value("m2"),
            4.0 / 3 * aeff.data.data.to_value("m2"),
        )
        assert_allclose(dataset1.edisp.pdf_matrix[1:], edisp.pdf_matrix[1:])
        assert_allclose(dataset1.edisp.pdf_matrix[0],
                        0.5 * edisp.pdf_matrix[0])

    def test_spectrum_dataset_stack_nondiagonal_no_bkg(self):
        aeff = EffectiveAreaTable.from_parametrization(self.src.energy.edges,
                                                       "HESS")
        edisp1 = EnergyDispersion.from_gauss(self.src.energy.edges,
                                             self.src.energy.edges, 0.1, 0.0)
        livetime = self.livetime
        dataset1 = SpectrumDataset(counts=None,
                                   livetime=livetime,
                                   aeff=aeff,
                                   edisp=edisp1,
                                   background=None)

        livetime2 = livetime
        aeff2 = EffectiveAreaTable(self.src.energy.edges[:-1],
                                   self.src.energy.edges[1:], aeff.data.data)
        edisp2 = EnergyDispersion.from_gauss(self.src.energy.edges,
                                             self.src.energy.edges, 0.2, 0.0)
        dataset2 = SpectrumDataset(
            counts=self.src.copy(),
            livetime=livetime2,
            aeff=aeff2,
            edisp=edisp2,
            background=None,
        )
        dataset1.stack(dataset2)

        assert dataset1.counts is None
        assert dataset1.background is None
        assert dataset1.livetime == 2 * self.livetime
        assert_allclose(dataset1.aeff.data.data.to_value("m2"),
                        aeff.data.data.to_value("m2"))
        assert_allclose(dataset1.edisp.get_bias(1 * u.TeV), 0.0, atol=1e-3)
        assert_allclose(dataset1.edisp.get_resolution(1 * u.TeV),
                        0.1581,
                        atol=1e-2)
コード例 #13
0
class TestFit:
    """Test fit on counts spectra without any IRFs"""
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = PowerLawSpectralModel(index=2,
                                                  amplitude=1e5 / u.TeV,
                                                  reference=0.1 * u.TeV)
        self.bkg_model = PowerLawSpectralModel(index=3,
                                               amplitude=1e4 / u.TeV,
                                               reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.integral(binning[:-1], binning[1:])
        source_counts = random_state.poisson(npred)
        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)
        # Currently it's necessary to specify a lifetime
        self.src.livetime = 1 * u.s

        npred_bkg = self.bkg_model.integral(binning[:-1], binning[1:])

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_counts)
        self.off = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=off_counts)

    def test_cash(self):
        """Simple CASH fit to the on vector"""
        dataset = SpectrumDataset(model=self.source_model, counts=self.src)

        npred = dataset.npred().data
        assert_allclose(npred[5], 660.5171, rtol=1e-5)

        stat_val = dataset.likelihood()
        assert_allclose(stat_val, -107346.5291, rtol=1e-5)

        self.source_model.parameters["index"].value = 1.12

        fit = Fit([dataset])
        result = fit.run()

        # These values are check with sherpa fits, do not change
        pars = result.parameters
        assert_allclose(pars["index"].value, 1.995525, rtol=1e-3)
        assert_allclose(pars["amplitude"].value, 100245.9, rtol=1e-3)

    def test_fit_range(self):
        """Test fit range without complication of thresholds"""
        dataset = SpectrumDatasetOnOff(counts=self.src,
                                       mask_safe=np.ones(self.src.energy.nbin,
                                                         dtype=bool))
        dataset.model = self.source_model

        assert np.sum(dataset.mask_safe) == self.nbins
        e_min, e_max = dataset.energy_range

        assert_allclose(e_max.value, 10)
        assert_allclose(e_min.value, 0.1)

    def test_likelihood_profile(self):
        dataset = SpectrumDataset(
            model=self.source_model,
            counts=self.src,
            mask_safe=np.ones(self.src.energy.nbin, dtype=bool),
        )
        fit = Fit([dataset])
        result = fit.run()
        true_idx = result.parameters["index"].value
        values = np.linspace(0.95 * true_idx, 1.05 * true_idx, 100)
        profile = fit.likelihood_profile("index", values=values)
        actual = values[np.argmin(profile["likelihood"])]
        assert_allclose(actual, true_idx, rtol=0.01)