Пример #1
0
    def sample_coord(self, n_events, random_state=0):
        """Sample position and energy of events.

        Parameters
        ----------
        n_events : int
            Number of events to sample.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.

        Returns
        -------
        coords : `~gammapy.maps.MapCoord` object.
            Sequence of coordinates and energies of the sampled events.
        """

        random_state = get_random_state(random_state)
        sampler = InverseCDFSampler(pdf=self.data, random_state=random_state)

        coords_pix = sampler.sample(n_events)
        coords = self.geom.pix_to_coord(coords_pix[::-1])

        # TODO: pix_to_coord should return a MapCoord object
        axes_names = ["lon", "lat"] + self.geom.axes.names
        cdict = OrderedDict(zip(axes_names, coords))

        return MapCoord.create(cdict, frame=self.geom.frame)
Пример #2
0
def random_times(
    size,
    rate,
    dead_time=TimeDelta(0, format="sec"),
    return_diff=False,
    random_state="random-seed",
):
    """Make random times assuming a Poisson process.

    This function can be used to test event time series,
    to have a comparison what completely random data looks like.

    Can be used in two ways (in either case the return type is `~astropy.time.TimeDelta`):

    * ``return_delta=False`` - Return absolute times, relative to zero (default)
    * ``return_delta=True`` - Return time differences between consecutive events.

    Parameters
    ----------
    size : int
        Number of samples
    rate : `~astropy.units.Quantity`
        Event rate (dimension: 1 / TIME)
    dead_time : `~astropy.units.Quantity` or `~astropy.time.TimeDelta`, optional
        Dead time after event (dimension: TIME)
    return_diff : bool
        Return time difference between events? (default: no, return absolute times)
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.

    Returns
    -------
    time : `~astropy.time.TimeDelta`
        Time differences (second) after time zero.

    Examples
    --------
    Example how to simulate 100 events at a rate of 10 Hz.
    As expected the last event occurs after about 10 seconds.

    >>> from astropy.units import Quantity
    >>> from gammapy.time import random_times
    >>> rate = Quantity(10, 'Hz')
    >>> times = random_times(size=100, rate=rate, random_state=0)
    >>> times[-1]
    <TimeDelta object: scale='None' format='sec' value=9.186484131475076>
    """
    random_state = get_random_state(random_state)

    dead_time = TimeDelta(dead_time)
    scale = (1 / rate).to("s").value
    time_delta = random_state.exponential(scale=scale, size=size)
    time_delta += dead_time.to("s").value

    if return_diff:
        return TimeDelta(time_delta, format="sec")
    else:
        time = time_delta.cumsum()
        return TimeDelta(time, format="sec")
Пример #3
0
    def __call__(self, radius, blur=True, random_state="random-seed"):
        """Draw random position from spiral arm distribution.

        Returns the corresponding angle theta[rad] to a given radius[kpc] and number of spiralarm.
        Possible numbers are:

        * Norma = 0,
        * Carina Sagittarius = 1,
        * Perseus = 2
        * Crux Scutum = 3.

        Parameters
        ----------
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.

        Returns
        -------
        Returns dx and dy, if blurring= true.
        """
        random_state = get_random_state(random_state)

        # Choose spiral arm
        N = random_state.randint(0, 4, radius.size)
        theta = self.k[N] * np.log(radius / self.r_0[N]) + self.theta_0[N]
        spiralarm = self.spiralarms[N]

        if blur:  # Apply blurring model according to Faucher
            radius, theta = self._blur(radius, theta, random_state=random_state)
            radius, theta = self._gc_correction(
                radius, theta, random_state=random_state
            )
        return radius, theta, spiralarm
Пример #4
0
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = PowerLawSpectralModel(index=2,
                                                  amplitude=1e5 / u.TeV,
                                                  reference=0.1 * u.TeV)
        self.bkg_model = PowerLawSpectralModel(index=3,
                                               amplitude=1e4 / u.TeV,
                                               reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.integral(binning[:-1], binning[1:])
        source_counts = random_state.poisson(npred)
        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)
        # Currently it's necessary to specify a lifetime
        self.src.livetime = 1 * u.s

        npred_bkg = self.bkg_model.integral(binning[:-1], binning[1:])

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_counts)
        self.off = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=off_counts)
Пример #5
0
    def fake(self, background_model, random_state="random-seed"):
        """Simulate fake counts for the current model and reduced irfs.

        This method overwrites the counts and off counts defined on the dataset object.

        Parameters
        ----------
        background_model : `~gammapy.maps.RegionNDMap`
            Background model.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        random_state = get_random_state(random_state)

        npred = self.npred_sig()
        npred.data = random_state.poisson(npred.data)

        npred_bkg = background_model.evaluate().copy()
        npred_bkg.data = random_state.poisson(npred_bkg.data)

        self.counts = npred + npred_bkg

        npred_off = background_model.evaluate() / self.alpha
        npred_off.data = random_state.poisson(npred_off.data)
        self.counts_off = npred_off
Пример #6
0
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = PowerLawSpectralModel(index=2,
                                                  amplitude=1e5 *
                                                  u.Unit("cm-2 s-1 TeV-1"),
                                                  reference=0.1 * u.TeV)
        bkg_model = PowerLawSpectralModel(index=3,
                                          amplitude=1e4 *
                                          u.Unit("cm-2 s-1 TeV-1"),
                                          reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.integral(binning[:-1], binning[1:]).value
        source_counts = random_state.poisson(npred)
        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)

        self.src.livetime = 1 * u.s
        self.aeff = EffectiveAreaTable.from_constant(binning, "1 cm2")

        npred_bkg = bkg_model.integral(binning[:-1], binning[1:]).value

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_counts)
        self.off = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=off_counts)
Пример #7
0
    def setup(self):
        self.nbins = 30
        energy = np.logspace(-1, 1, self.nbins + 1) * u.TeV
        self.source_model = SkyModel(
            spectral_model=PowerLawSpectralModel(index=2,
                                                 amplitude=1e5 *
                                                 u.Unit("cm-2 s-1 TeV-1"),
                                                 reference=0.1 * u.TeV))
        bkg_model = PowerLawSpectralModel(index=3,
                                          amplitude=1e4 *
                                          u.Unit("cm-2 s-1 TeV-1"),
                                          reference=0.1 * u.TeV)

        self.alpha = 0.1
        random_state = get_random_state(23)
        npred = self.source_model.spectral_model.integral(
            energy[:-1], energy[1:]).value
        source_counts = random_state.poisson(npred)

        axis = MapAxis.from_edges(energy, name="energy", interp="log")
        geom = RegionGeom(region=None, axes=[axis])

        self.src = RegionNDMap.from_geom(geom=geom, data=source_counts)
        self.exposure = RegionNDMap.from_geom(geom.as_energy_true,
                                              data=1,
                                              unit="cm2 s")

        npred_bkg = bkg_model.integral(energy[:-1], energy[1:]).value

        bkg_counts = random_state.poisson(npred_bkg)
        off_counts = random_state.poisson(npred_bkg * 1.0 / self.alpha)
        self.bkg = RegionNDMap.from_geom(geom=geom, data=bkg_counts)
        self.off = RegionNDMap.from_geom(geom=geom, data=off_counts)
Пример #8
0
    def sample_coord(self, map_coord, random_state=0):
        """Apply the energy dispersion corrections on the coordinates of a set of simulated events.

        Parameters
        ----------
        map_coord : `~gammapy.maps.MapCoord` object.
            Sequence of coordinates and energies of sampled events.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.

        Returns
        -------
        `~gammapy.maps.MapCoord`.
            Sequence of Edisp-corrected coordinates of the input map_coord map.
        """
        random_state = get_random_state(random_state)
        migra_axis = self.edisp_map.geom.get_axis_by_name("migra")

        coord = {
            "skycoord": map_coord.skycoord.reshape(-1, 1),
            "energy_true": map_coord["energy_true"].reshape(-1, 1),
            "migra": migra_axis.center,
        }

        pdf_edisp = self.edisp_map.interp_by_coord(coord)

        sample_edisp = InverseCDFSampler(pdf_edisp, axis=1, random_state=random_state)
        pix_edisp = sample_edisp.sample_axis()
        migra = migra_axis.pix_to_coord(pix_edisp)

        energy_reco = map_coord["energy_true"] * migra

        return MapCoord.create({"skycoord": map_coord.skycoord, "energy": energy_reco})
Пример #9
0
    def _gc_correction(radius,
                       theta,
                       r_corr=Quantity(2.857, "kpc"),
                       random_state="random-seed"):
        """Correction of source distribution towards the galactic center.

        To avoid spiralarm features near the Galactic Center, the position angle theta
        is blurred by a certain amount towards the GC.

        Parameters
        ----------
        radius : `~astropy.units.Quantity`
            Radius coordinate
        theta : `~astropy.units.Quantity`
            Angle coordinate
        r_corr : `~astropy.units.Quantity`, optional
            Scale of the correction towards the GC
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        random_state = get_random_state(random_state)

        theta_corr = Quantity(random_state.uniform(0, 2 * np.pi, radius.size),
                              "rad")
        return radius, theta + theta_corr * np.exp(-radius / r_corr)
Пример #10
0
def main():
    n_sources = 1000
    random_seed = 1
    random_state = get_random_state(random_seed)

    table_composites = make_composites(random_state=random_state)

    n_isolated_pwn = n_sources - len(table_composites)
    table_isolated = make_pwn_pos(n_sources=n_isolated_pwn, random_state=random_state)

    compute_glat_glon_distance(table_composites)

    table_composites = add_spectra(table_composites, random_state=random_state)
    polish_pwn_table(table_composites)

    select_those_to_removed(table_composites, tag='composite')

    filename_composite = 'ctadc_skymodel_gps_sources_composite.ecsv'
    print('Writing {}'.format(filename_composite))
    table_composites.write(filename_composite, format='ascii.ecsv', overwrite=True)

    compute_glat_glon_distance(table_isolated)
    table_isolated = add_spectra(table_isolated, random_state=random_state)
    polish_pwn_table(table_isolated)

    select_those_to_removed(table_isolated, tag='pwn')

    filename = 'ctadc_skymodel_gps_sources_pwn.ecsv'
    print('Writing {}'.format(filename))
    table_isolated.write(filename, format='ascii.ecsv', overwrite=True)
Пример #11
0
    def _blur(radius, theta, amount=0.07, random_state="random-seed"):
        """Blur the positions around the centroid of the spiralarm.

        The given positions are blurred by drawing a displacement in radius from
        a normal distribution, with sigma = amount * radius. And a direction
        theta from a uniform distribution in the interval [0, 2 * pi].

        Parameters
        ----------
        radius : `~astropy.units.Quantity`
            Radius coordinate
        theta : `~astropy.units.Quantity`
            Angle coordinate
        amount: float, optional
            Amount of blurring of the position, given as a fraction of `radius`.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        random_state = get_random_state(random_state)

        dr = Quantity(
            abs(random_state.normal(0, amount * radius, radius.size)), "kpc")
        dtheta = Quantity(random_state.uniform(0, 2 * np.pi, radius.size),
                          "rad")
        x, y = cartesian(radius, theta)
        dx, dy = cartesian(dr, dtheta)
        return polar(x + dx, y + dy)
Пример #12
0
    def fake(self, background_model, random_state="random-seed"):
        """Simulate fake counts for the current model and reduced irfs.

        This method overwrites the counts and off counts defined on the dataset object.

        Parameters
        ----------
        background_model : `~gammapy.spectrum.CountsSpectrum`
            BackgroundModel. In the future will be part of the SpectrumDataset Class.
            For the moment, a CountSpectrum.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        random_state = get_random_state(random_state)

        npred_sig = self.npred_sig()
        npred_sig.data = random_state.poisson(npred_sig.data)

        npred_bkg = background_model.copy()
        npred_bkg.data = random_state.poisson(npred_bkg.data)

        self.counts = npred_sig + npred_bkg

        npred_off = background_model / self.alpha
        npred_off.data = random_state.poisson(npred_off.data)
        self.counts_off = npred_off
Пример #13
0
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV

        self.source_model = PowerLawSpectralModel(index=2.1,
                                                  amplitude=1e5 / u.TeV / u.s,
                                                  reference=0.1 * u.TeV)

        self.livetime = 100 * u.s

        bkg_rate = np.ones(self.nbins) / u.s
        bkg_expected = bkg_rate * self.livetime

        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_expected)

        random_state = get_random_state(23)
        self.npred = (self.source_model.integral(binning[:-1], binning[1:]) *
                      self.livetime)
        self.npred += bkg_expected
        source_counts = random_state.poisson(self.npred)

        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)
        self.dataset = SpectrumDataset(
            model=self.source_model,
            counts=self.src,
            livetime=self.livetime,
            background=self.bkg,
        )
Пример #14
0
    def setup(self):
        self.nbins = 30
        binning = np.logspace(-1, 1, self.nbins + 1) * u.TeV

        self.source_model = PowerLawSpectralModel(index=2.1,
                                                  amplitude=1e5 *
                                                  u.Unit("cm-2 s-1 TeV-1"),
                                                  reference=0.1 * u.TeV)

        self.livetime = 100 * u.s
        aeff = EffectiveAreaTable.from_constant(binning, "1 cm2")

        bkg_rate = np.ones(self.nbins) / u.s
        bkg_expected = (bkg_rate * self.livetime).to_value("")

        self.bkg = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=bkg_expected)

        random_state = get_random_state(23)
        flux = self.source_model.integral(binning[:-1], binning[1:])
        self.npred = (flux * aeff.data.data[0] * self.livetime).to_value("")
        self.npred += bkg_expected
        source_counts = random_state.poisson(self.npred)

        self.src = CountsSpectrum(energy_lo=binning[:-1],
                                  energy_hi=binning[1:],
                                  data=source_counts)
        self.dataset = SpectrumDataset(
            model=self.source_model,
            counts=self.src,
            aeff=aeff,
            livetime=self.livetime,
            background=self.bkg,
        )
Пример #15
0
    def sample_time(n_events, t_min, t_max, random_state=0):
        """Sample arrival times of events.

        Parameters
        ----------
        n_events : int
            Number of events to sample.
        t_min : `~astropy.time.Time`
            Start time of the sampling.
        t_max : `~astropy.time.Time`
            Stop time of the sampling.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.

        Returns
        -------
        time : `~astropy.units.Quantity`
            Array with times of the sampled events.
        """
        random_state = get_random_state(random_state)

        t_min = Time(t_min)
        t_max = Time(t_max)

        t_stop = (t_max - t_min).sec

        time_delta = random_state.uniform(high=t_stop, size=n_events) * u.s

        return t_min + time_delta
Пример #16
0
def add_pulsar_parameters(
    table,
    B_mean=12.05,
    B_stdv=0.55,
    P_mean=0.3,
    P_stdv=0.15,
    random_state="random-seed",
):
    """Add pulsar parameters to the table.

    For the initial normal distribution of period and logB can exist the following
    Parameters: B_mean=12.05[log Gauss], B_stdv=0.55, P_mean=0.3[s], P_stdv=0.15

    Parameters
    ----------
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.
    """
    random_state = get_random_state(random_state)
    # Read relevant columns
    age = table["age"].quantity

    # Draw the initial values for the period and magnetic field
    def p_dist(x):
        return np.exp(-0.5 * ((x - P_mean) / P_stdv) ** 2)

    p0_birth = draw(0, 2, len(table), p_dist, random_state=random_state)
    p0_birth = Quantity(p0_birth, "s")

    log10_b_psr = random_state.normal(B_mean, B_stdv, len(table))
    b_psr = Quantity(10 ** log10_b_psr, "G")

    # Compute pulsar parameters
    psr = Pulsar(p0_birth, b_psr)
    p0 = psr.period(age)
    p1 = psr.period_dot(age)
    p1_birth = psr.P_dot_0
    tau = psr.tau(age)
    tau_0 = psr.tau_0
    l_psr = psr.luminosity_spindown(age)
    l0_psr = psr.L_0

    # Add columns to table
    table["P0"] = Column(p0, unit="s", description="Pulsar period")
    table["P1"] = Column(p1, unit="", description="Pulsar period derivative")
    table["P0_birth"] = Column(p0_birth, unit="s", description="Pulsar birth period")
    table["P1_birth"] = Column(
        p1_birth, unit="", description="Pulsar birth period derivative"
    )
    table["CharAge"] = Column(tau, unit="yr", description="Pulsar characteristic age")
    table["Tau0"] = Column(tau_0, unit="yr")
    table["L_PSR"] = Column(l_psr, unit="erg s-1")
    table["L0_PSR"] = Column(l0_psr, unit="erg s-1")
    table["B_PSR"] = Column(
        b_psr, unit="Gauss", description="Pulsar magnetic field at the poles"
    )
    return table
Пример #17
0
def get_test_data():
    n_bins = 1
    random_state = get_random_state(3)
    model = random_state.rand(n_bins) * 20
    data = random_state.poisson(model)
    staterror = np.sqrt(data)
    off_vec = random_state.poisson(0.7 * model)
    alpha = np.array([0.2] * len(model))
    return data, model, staterror, off_vec, alpha
Пример #18
0
def get_test_data():
    n_bins = 1
    random_state = get_random_state(3)
    model = random_state.rand(n_bins) * 20
    data = random_state.poisson(model)
    staterror = np.sqrt(data) 
    off_vec = random_state.poisson(0.7 * model)
    alpha = np.array([0.2] * len(model))
    return data, model, staterror, off_vec, alpha
Пример #19
0
def make_catalog_random_positions_cube(size=100,
                                       dimension=3,
                                       distance_max="1 pc",
                                       random_state="random-seed"):
    """Make a catalog of sources randomly distributed on a line, square or cube.

    This can be used to study basic source population distribution effects,
    e.g. what the distance distribution looks like, or for a given luminosity
    function what the resulting flux distributions are for different spatial
    configurations.

    Parameters
    ----------
    size : int
        Number of sources
    dimension : {1, 2, 3}
        Number of dimensions
    distance_max : `~astropy.units.Quantity`
        Maximum distance
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.

    Returns
    -------
    table : `~astropy.table.Table`
        Table with 3D position cartesian coordinates.
        Columns: x (pc), y (pc), z (pc)
    """
    distance_max = Quantity(distance_max).to_value("pc")
    random_state = get_random_state(random_state)

    # Generate positions 1D, 2D, or 3D
    if dimension == 1:
        x = random_state.uniform(-distance_max, distance_max, size)
        y, z = 0, 0
    elif dimension == 2:
        x = random_state.uniform(-distance_max, distance_max, size)
        y = random_state.uniform(-distance_max, distance_max, size)
        z = 0
    elif dimension == 3:
        x = random_state.uniform(-distance_max, distance_max, size)
        y = random_state.uniform(-distance_max, distance_max, size)
        z = random_state.uniform(-distance_max, distance_max, size)
    else:
        raise ValueError("Invalid dimension: {}".format(dimension))

    table = Table()
    table["x"] = Column(x, unit="pc", description="Cartesian coordinate")
    table["y"] = Column(y, unit="pc", description="Cartesian coordinate")
    table["z"] = Column(z, unit="pc", description="Cartesian coordinate")

    return table
Пример #20
0
    def sample_time(self,
                    n_events,
                    t_min,
                    t_max,
                    t_delta="1 s",
                    random_state=0):
        """Sample arrival times of events.

        Parameters
        ----------
        n_events : int
            Number of events to sample.
        t_min : `~astropy.time.Time`
            Start time of the sampling.
        t_max : `~astropy.time.Time`
            Stop time of the sampling.
        t_delta : `~astropy.units.Quantity`
            Time step used for sampling of the temporal model.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.

        Returns
        -------
        time : `~astropy.units.Quantity`
            Array with times of the sampled events.
        """
        time_unit = getattr(u, self.table.meta["TIMEUNIT"])

        t_min = Time(t_min)
        t_max = Time(t_max)
        t_delta = u.Quantity(t_delta)
        random_state = get_random_state(random_state)

        ontime = u.Quantity((t_max - t_min).sec, "s")
        t_stop = ontime.to_value(time_unit)

        # TODO: the separate time unit handling is unfortunate, but the quantity support for np.arange and np.interp
        #  is still incomplete, refactor once we change to recent numpy and astropy versions
        t_step = t_delta.to_value(time_unit)
        t = np.arange(0, t_stop, t_step)

        pdf = self.evaluate(t)

        sampler = InverseCDFSampler(pdf=pdf, random_state=random_state)
        time_pix = sampler.sample(n_events)[0]
        time = np.interp(time_pix, np.arange(len(t)), t) * time_unit

        return t_min + time
Пример #21
0
    def fake(self, random_state="random-seed"):
        """Simulate fake counts for the current model and reduced irfs.

        This method overwrites the counts defined on the dataset object.

        Parameters
        ----------
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        random_state = get_random_state(random_state)
        npred = self.npred()
        npred.data = random_state.poisson(npred.data)
        self.counts = npred
Пример #22
0
    def sample_coord(self, map_coord, random_state=0):
        """Apply PSF corrections on the coordinates of a set of simulated events.

        Parameters
        ----------
        map_coord : `~gammapy.maps.MapCoord` object.
            Sequence of coordinates and energies of sampled events.
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.

        Returns
        -------
        corr_coord : `~gammapy.maps.MapCoord` object.
            Sequence of PSF-corrected coordinates of the input map_coord map.
        """

        random_state = get_random_state(random_state)
        rad_axis = self.psf_map.geom.axes["rad"]

        coord = {
            "skycoord": map_coord.skycoord.reshape(-1, 1),
            "energy_true": map_coord["energy_true"].reshape(-1, 1),
            "rad": rad_axis.center,
        }

        pdf = (
            self.psf_map.interp_by_coord(coord)
            * rad_axis.center.value
            * rad_axis.bin_width.value
        )

        sample_pdf = InverseCDFSampler(pdf, axis=1, random_state=random_state)
        pix_coord = sample_pdf.sample_axis()
        separation = rad_axis.pix_to_coord(pix_coord)

        position_angle = random_state.uniform(360, size=len(map_coord.lon)) * u.deg

        event_positions = map_coord.skycoord.directional_offset_by(
            position_angle=position_angle, separation=separation
        )
        return MapCoord.create(
            {"skycoord": event_positions, "energy_true": map_coord["energy_true"]}
        )
Пример #23
0
def make_catalog_random_positions_sphere(size=100,
                                         distance_min="0 pc",
                                         distance_max="1 pc",
                                         random_state="random-seed"):
    """Sample random source locations in a sphere.

    This can be used to generate an isotropic source population
    in a sphere, e.g. to represent extra-galactic sources.

    Parameters
    ----------
    size : int
        Number of sources
    distance_min, distance_max : `~astropy.units.Quantity`
        Minimum and maximum distance
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.

    Returns
    -------
    catalog : `~astropy.table.Table`
        Table with 3D position spherical coordinates.
        Columns: lon (deg), lat (deg), distance(pc)
    """
    distance_min = Quantity(distance_min).to_value("pc")
    distance_max = Quantity(distance_max).to_value("pc")
    random_state = get_random_state(random_state)

    lon, lat = sample_sphere(size, random_state=random_state)
    distance = sample_sphere_distance(distance_min, distance_max, size,
                                      random_state)

    table = Table()

    table["lon"] = Column(lon, unit="rad", description="Spherical coordinate")
    table["lat"] = Column(lat, unit="rad", description="Spherical coordinate")
    table["distance"] = Column(distance,
                               unit="pc",
                               description="Spherical coordinate")

    return table
Пример #24
0
def fill_poisson(map_in, mu, random_state="random-seed"):
    """Fill a map object with a poisson random variable.

    This can be useful for testing, to make a simulated counts image.
    E.g. filling with ``mu=0.5`` fills the map so that many pixels
    have value 0 or 1, and a few more "counts".

    Parameters
    ----------
    map_in : `~gammapy.maps.Map`
        Input map
    mu : scalar or `~numpy.ndarray`
        Expectation value
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.
    """
    random_state = get_random_state(random_state)
    idx = map_in.geom.get_idx(flat=True)
    mu = random_state.poisson(mu, idx[0].shape)
    map_in.fill_by_idx(idx, mu)
Пример #25
0
    def simulate_obs(self, obs_id, seed='random-seed'):
        """Simulate one `~gammapy.spectrum.SpectrumObservation`.

        The result is stored as ``obs`` attribute

        Parameters
        ----------
        obs_id : int
            Observation identifier
        seed : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            see :func:~`gammapy.utils.random.get_random_state`
        """
        random_state = get_random_state(seed)
        self.simulate_source_counts(random_state)
        obs = SpectrumObservation(on_vector=self.on_vector,
                                  off_vector=self.off_vector,
                                  aeff=self.aeff,
                                  edisp=self.edisp)
        self.simulate_background_counts()
        obs.off_vector=self.off_vector
        obs.obs_id = obs_id
        self.obs = obs
Пример #26
0
def generate_dataset(Eflux,
                     flux,
                     Erange=None,
                     tstart=Time('2000-01-01 02:00:00', scale='utc'),
                     tobs=100 * u.s,
                     irf_file=None,
                     alpha=1 / 5,
                     name=None,
                     fake=True,
                     onoff=True,
                     seed='random-seed',
                     debug=False):
    """
    Generate a dataset from a list of energies and flux points either as
    a SpectrumDataset or a SpectrumDatasetOnOff

    Note :
    - in SpectrumDataset, the backgound counts are assumed precisely know and
    are not fluctuated.
    - in SpectrumDatasetOnOff, the background counts (off counts) are
    fluctuated from the IRF known values.

    Parameters
    ----------
    Eflux : Quantity
        Energies at which the flux is given.
    flux : Quantity
        Flux corresponding to the given energies.
    Erange : List, optional
        The energy boundaries within which the flux is defined, if not over all
        energies. The default is None.
    tstart : Time object, optional
        Start date of the dataset.
        The default is Time('2000-01-01 02:00:00',scale='utc').
    tobs : Quantity, optional
        Duration of the observation. The default is 100*u.s.
    irf_file : String, optional
        The IRf file name. The default is None.
    alpha : Float, optional
        The on over off surface ratio for the On-Off analysis.
        The default is 1/5.
    name : String, optional
        The dataset name, also used to name tthe spectrum. The default is None.
    fake : Boolean, optional
        If True, the dataset counts are fluctuated. The default is True.
    onoff : Boolean, optional
        If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet.
        The default is True.
    seed : String, optional
        The seed for the randome generator; If an integer will generate the
        same random series at each run. The default is 'random-seed'.
    debug: Boolean
        If True, let's talk a bit. The default is False.

    Returns
    -------
    ds : Dataset object
        The dataset.

    """
    random_state = get_random_state(seed)

    ### Define on region
    on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg,
                           frame="icrs")  # Observing region
    on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg)

    # Define energy axis (see spectrum analysis notebook)
    # edges for SpectrumDataset - all dataset should have the same axes
    # Note that linear spacing is clearly problematic for powerlaw fluxes
    # Axes can also be defined using MapAxis
    unit = u.GeV
    E1v = min(Eflux).to(unit).value
    E2v = max(Eflux).to(unit).value
    #     ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit
    #     ereco_axis = MapAxis.from_edges(ereco.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy",
    #                                    interp="log")

    ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit,
                                            0.9 * E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy")

    #     etrue = np.logspace(np.log10(    E1v), np.log10(    E2v), 50) * unit
    #     etrue_axis = MapAxis.from_edges(etrue.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy_true",
    #                                    interp="log")
    etrue_axis = MapAxis.from_energy_bounds(E1v * unit,
                                            E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy_true")
    if (debug):
        print("Dataset ", name)
        print("Etrue : ", etrue_axis.edges)
        print("Ereco : ", ereco_axis.edges)

    # Load IRF
    irf = load_cta_irfs(irf_file)

    spec = TemplateSpectralModel(energy=Eflux,
                                 values=flux,
                                 interp_kwargs={"values_scale": "log"})

    model = SkyModel(spectral_model=spec, name="Spec" + str(name))
    obs = Observation.create(obs_id=1,
                             pointing=on_pointing,
                             livetime=tobs,
                             irfs=irf,
                             deadtime_fraction=0,
                             reference_time=tstart)

    ds_empty = SpectrumDataset.create(
        e_reco=ereco_axis,  # Ereco.edges,
        e_true=etrue_axis,  #Etrue.edges,
        region=on_region,
        name=name)
    maker = SpectrumDatasetMaker(containment_correction=False,
                                 selection=["exposure", "background", "edisp"])
    ds = maker.run(ds_empty, obs)
    ds.models = model
    mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0],
                                         energy_max=Erange[1])

    mask = mask & ds.mask_safe.data
    ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask)

    ds.fake(random_state=random_state)  # Fake is mandatory ?

    # Transform SpectrumDataset into SpectrumDatasetOnOff if needed
    if (onoff):

        ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds,
                                                        acceptance=1,
                                                        acceptance_off=1 /
                                                        alpha)
        print("Transformed in ONOFF")

    if fake:
        print(" Fluctuations : seed = ", seed)
        if (onoff):
            ds.fake(npred_background=ds.npred_background())
        else:
            ds.fake(random_state=random_state)

    print("ds.energy_range = ", ds.energy_range)

    return ds
Пример #27
0
npred = evaluator.compute_npred()
npred_map = WcsNDMap(geom, npred)

fig, ax, cbar = npred_map.sum_over_axes().plot(add_cbar=True)
ax.scatter(
    [lon_0_1, lon_0_2, pointing.galactic.l.degree],
    [lat_0_1, lat_0_2, pointing.galactic.b.degree],
    transform=ax.get_transform("galactic"),
    marker="+",
    color="cyan",
)
# plt.show()
plt.clf()

rng = get_random_state(42)
counts = rng.poisson(npred)
counts_map = WcsNDMap(geom, counts)

counts_map.sum_over_axes().plot()
# plt.show()
plt.clf()

models.parameters.set_error(2, 0.1 * u.deg)
models.parameters.set_error(4, 1e-12 * u.Unit("cm-2 s-1 TeV-1"))
models.parameters.set_error(8, 0.1 * u.deg)
models.parameters.set_error(10, 1e-12 * u.Unit("cm-2 s-1 TeV-1"))

fit = MapFit(model=models, counts=counts_map, exposure=exposure_map)
fit.run()
Пример #28
0
# Define data shape
shape = (200, 200)
y, x = np.indices(shape)

# Create a new WCS object
w = WCS(naxis=2)

# Set up an Galactic projection
w.wcs.crpix = [99, 99]
w.wcs.cdelt = np.array([0.02, 0.02])
w.wcs.crval = [0, 0]
w.wcs.ctype = ["GLON-CAR", "GLAT-CAR"]

# Fake data
random_state = get_random_state(0)
data = random_state.poisson(model(x, y))

# Save data
header = w.to_header()

hdu = fits.PrimaryHDU(data=data.astype("int16"), header=header)
hdu.writeto("counts.fits.gz", clobber=True)

hdu = fits.PrimaryHDU(data=model(x, y).astype("float32"), header=header)
hdu.writeto("model.fits.gz", clobber=True)

hdu = fits.PrimaryHDU(data=background(x, y).astype("int16"), header=header)
hdu.writeto("background.fits.gz", clobber=True)

hdu = fits.PrimaryHDU(data=source(x, y).astype("float32"), header=header)
Пример #29
0
 def __init__(self, random_state="random-seed"):
     self.random_state = get_random_state(random_state)
Пример #30
0
def simulate_dataset(
    skymodel,
    geom,
    pointing,
    irfs,
    livetime=1 * u.h,
    offset=0 * u.deg,
    max_radius=0.8 * u.deg,
    random_state="random-seed",
):
    """Simulate a 3D dataset.

    Simulate a source defined with a sky model for a given pointing,
    geometry and irfs for a given exposure time.
    This will return a dataset object which includes the counts cube,
    the exposure cube, the psf cube, the background model and the sky model.

    Parameters
    ----------
    skymodel : `~gammapy.modeling.models.SkyModel`
        Background model map
    geom : `~gammapy.maps.WcsGeom`
        Geometry object for the observation
    pointing : `~astropy.coordinates.SkyCoord`
        Pointing position
    irfs : dict
        Irfs used for simulating the observation
    livetime : `~astropy.units.Quantity`
        Livetime exposure of the simulated observation
    offset : `~astropy.units.Quantity`
        Offset from the center of the pointing position.
        This is used for the PSF and Edisp estimation
    max_radius : `~astropy.coordinates.Angle`
        The maximum radius of the PSF kernel.
    random_state: {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.

    Returns
    -------
    dataset : `~gammapy.cube.MapDataset`
        A dataset of the simulated observation.
    """
    background = make_map_background_irf(
        pointing=pointing, ontime=livetime, bkg=irfs["bkg"], geom=geom
    )

    background_model = BackgroundModel(background)

    psf = irfs["psf"].to_energy_dependent_table_psf(theta=offset)
    psf_kernel = PSFKernel.from_table_psf(psf, geom, max_radius=max_radius)

    exposure = make_map_exposure_true_energy(
        pointing=pointing, livetime=livetime, aeff=irfs["aeff"], geom=geom
    )

    if "edisp" in irfs:
        energy = geom.axes[0].edges
        edisp = irfs["edisp"].to_energy_dispersion(offset, e_reco=energy, e_true=energy)
    else:
        edisp = None

    dataset = MapDataset(
        model=skymodel,
        exposure=exposure,
        background_model=background_model,
        psf=psf_kernel,
        edisp=edisp,
    )

    npred_map = dataset.npred()
    rng = get_random_state(random_state)
    counts = rng.poisson(npred_map.data)
    dataset.counts = WcsNDMap(geom, counts)

    return dataset
Пример #31
0
# MODEL
model = PowerLaw(index=2.3 * u.Unit(""), amplitude=2.5 * 1e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1 * u.TeV)

# COUNTS
livetime = 2 * u.h
npred = calculate_predicted_counts(model=model, aeff=aeff, edisp=edisp, livetime=livetime)

bkg = 0.2 * npred.data
alpha = 0.1
counts_kwargs = dict(
    energy=npred.energy,
    exposure=livetime,
    obs_id=31415,
    creator="Simulation",
    hi_threshold=50 * u.TeV,
    lo_threshold=lo_threshold,
)

rand = get_random_state(42)

on_counts = rand.poisson(npred.data) + rand.poisson(bkg)
on_vector = PHACountsSpectrum(data=on_counts, backscal=1, **counts_kwargs)

off_counts = rand.poisson(1.0 / alpha * bkg)
off_vector = PHACountsSpectrum(data=off_counts, backscal=1.0 / alpha, is_bkg=True, **counts_kwargs)


obs = SpectrumObservation(on_vector=on_vector, off_vector=off_vector, aeff=aeff, edisp=edisp)

obs.write()
Пример #32
0
def make_images(psf_sigma):
    # Define width of the source and the PSF
    source_sigma = 4
    sigma = np.sqrt(psf_sigma ** 2 + source_sigma ** 2)
    amplitude = 1E3 / (2 * np.pi * sigma ** 2)

    source = Gaussian2D(amplitude, 99, 99, sigma, sigma)
    background = Const2D(1)
    model = source + background

    # Define data shape
    shape = (200, 200)
    y, x = np.indices(shape)

    # Create a new WCS object
    wcs = WCS(naxis=2)

    # Set up an Galactic projection
    wcs.wcs.crpix = [100.5, 100.5]
    wcs.wcs.cdelt = np.array([0.02, 0.02])
    wcs.wcs.crval = [0, 0]
    wcs.wcs.ctype = ['GLON-CAR', 'GLAT-CAR']

    # Fake data
    random_state = get_random_state(0)
    data = random_state.poisson(model(x, y))

    # Create exclusion mask
    center = SkyCoord(0, 0, frame='galactic', unit='deg')
    circle = CircleSkyRegion(center, 0.5 * u.deg)
    exclusion = SkyImage(data=x, wcs=wcs).region_mask(circle)

    # Save data
    header = wcs.to_header()

    mask = ~exclusion.data
    hdu = fits.PrimaryHDU(data=mask.astype('int32'), header=header)
    filename = 'exclusion.fits.gz'
    print('Writing {}'.format(filename))
    hdu.writeto(filename, clobber=True)

    hdu = fits.PrimaryHDU(data=data.astype('int32'), header=header)
    filename = 'counts.fits.gz'
    print('Writing {}'.format(filename))
    hdu.writeto(filename, clobber=True)

    hdu = fits.PrimaryHDU(data=model(x, y).astype('float32'), header=header)
    filename = 'model.fits.gz'
    print('Writing {}'.format(filename))
    hdu.writeto(filename, clobber=True)

    hdu = fits.PrimaryHDU(data=background(x, y).astype('float32'), header=header)
    filename = 'background.fits.gz'
    print('Writing {}'.format(filename))
    hdu.writeto(filename, clobber=True)

    hdu = fits.PrimaryHDU(data=source(x, y).astype('float32'), header=header)
    filename = 'source.fits.gz'
    print('Writing {}'.format(filename))
    hdu.writeto(filename, clobber=True)

    exposure = 1E12 * np.ones(shape)
    hdu = fits.PrimaryHDU(data=exposure.astype('float32'), header=header)
    filename = 'exposure.fits.gz'
    print('Writing {}'.format(filename))
    hdu.writeto(filename, clobber=True)
Пример #33
0
    def __init__(self,
                 niter  = 1,
                 method = 0,
                 debug  = 0,
                 fluctuate = True,
                 nosignal  = False,
                 seed = 'random-seed',
                 name = "Unknown"):
        """
        Initialize class members to default values

        Parameters
        ----------
        niter : Integer, optional
            Number of Monte carlo iterations. The default is 1.
        method : integer, optional
            Aperture photometry if 0, energy on-off if 1. The default is 0.
        debug : Boolean, optional
            If True, verbosy mode. The default is 0.
        fluctuate : Boolean, optional
            If false, generate one simulation with no fluctuation.
            The default is True.
        nosignal: Boolean, optional
            If True force signal to stricly zero. Default is False.
        seed : String or integer, optional
            The value of the seed to obtain the random state. Using a fix
            number will have as consequence to have, for all GRB, the same
            fluctuations generated along the trials.
            This can systematically bias the fractio of iteration reaching 3
            sigma in the first trials, and make the acceleration option
            inoperant. It is also very dangerous if he number of iteration
            is low.The default is 'random-seed'
        name : String, optional
            Name of the simulation (usually related to the GRB name and the
            site). The default is "Unknown".

        Returns
        -------
        None.

        """
        self.dbg       = debug

        # Input parameters and objects
        self.niter     = niter     # Number of trials
        self.method    = method    # Analysis method
        self.fluctuate = fluctuate # Poisson fluctuate the count numbers
        self.nosignal  = nosignal  # Force signal count to zero
        self.slot      = None      # The time slot (slices) of this simulation
        self.name      = name

        # The random state is reinitialised here and would lead to the
        # same sequence for all GRB if the seed is a fixed number
        self.rnd_state =  get_random_state(seed)

        # Data set list
        self.dset_list = [] # Not a Datasets object, just my own list so far

        # list of simulations (one per slice)
        self.simulations = None # For gammapy 0.12 compatibility

        # Significance over simulations
        self.id_smax_list  = [] # Slice indices to get back the time/ altaz
        self.smax_list     = [] # List of max. significances along trials
        self.nex_smax_list = [] # List of excess counts at max. signif.
        self.nb_smax_list  = [] # List of background counts at max. signif.

        self.id_3s_list    = [] # Slice indices ot get back the time/altaz
        self.nex_3s_list   = [] # List of excess
        self.nb_3s_list    = [] # List of background
        self.detect_3s     = 0  # Number of trials 3 sigma was reached

        self.id_5s_list    = [] # Slice indices to get back the time/altaz
        self.nex_5s_list   = [] # List of excess
        self.nb_5s_list    = [] # List of background
        self.detect_5s     = 0  # Number of trials 5 sigma was reached

        # Mean sigma versus time - one value per time slice
        self.sigma_mean = []
        self.sigma_std  = []

        # Slice number with error or warning
        self.err_slice  = []  # useful ?"

        self.mctime = 0.00
        self.err    = -999 # error code : default, simulation is not completed

        return
Пример #34
0
    def simulate_obs(perf, target, obs_param, obs_id=0, random_state='random-seed'):
        """Simulate observation with given parameters.

        Parameters
        ----------
        perf : `~gammapy.scripts.CTAPerf`
            CTA performance
        target : `~gammapy.scripts.Target`
            Source
        obs_param : `~gammapy.scripts.ObservationParameters`
            Observation parameters
        obs_id : `int`, optional
            Observation Id
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        livetime = obs_param.livetime
        alpha = obs_param.alpha.value
        emin = obs_param.emin
        emax = obs_param.emax

        model = target.model

        # Compute expected counts
        reco_energy = perf.bkg.energy
        bkg_rate_values = perf.bkg.data.data * livetime.to('s')
        predicted_counts = CountsPredictor(model=model,
                                           aeff=perf.aeff,
                                           livetime=livetime,
                                           edisp=perf.rmf)
        predicted_counts.run()
        npred = predicted_counts.npred
        # set negative values to zero (interpolation issue)
        idx = np.where(npred.data.data < 0.)
        npred.data.data[idx] = 0

        # Randomise counts
        random_state = get_random_state(random_state)
        on_counts = random_state.poisson(npred.data.data.value)  # excess
        bkg_counts = random_state.poisson(bkg_rate_values.value)  # bkg in ON region
        off_counts = random_state.poisson(bkg_rate_values.value / alpha)  # bkg in OFF region

        on_counts += bkg_counts  # evts in ON region

        on_vector = PHACountsSpectrum(
            data=on_counts,
            backscal=1,
            energy_lo=reco_energy.lo,
            energy_hi=reco_energy.hi,
        )

        on_vector.livetime = livetime
        off_vector = PHACountsSpectrum(energy_lo=reco_energy.lo,
                                       energy_hi=reco_energy.hi,
                                       data=off_counts,
                                       backscal=1. / alpha,
                                       is_bkg=True,
                                       )
        off_vector.livetime = livetime

        obs = SpectrumObservation(on_vector=on_vector,
                                  off_vector=off_vector,
                                  aeff=perf.aeff,
                                  edisp=perf.rmf)
        obs.obs_id = obs_id

        # Set threshold according to the closest energy reco from bkg bins
        idx_min = np.abs(reco_energy.lo - emin).argmin()
        idx_max = np.abs(reco_energy.lo - emax).argmin()
        obs.lo_threshold = reco_energy.lo[idx_min]
        obs.hi_threshold = reco_energy.lo[idx_max]

        return obs
Пример #35
0
npred = evaluator.compute_npred()
npred_map = WcsNDMap(geom, npred)

fig, ax, cbar = npred_map.sum_over_axes().plot(add_cbar=True)
ax.scatter(
    [lon_0_1, lon_0_2, pointing.galactic.l.degree],
    [lat_0_1, lat_0_2, pointing.galactic.b.degree],
    transform=ax.get_transform("galactic"),
    marker="+",
    color="cyan",
)
# plt.show()
plt.clf()

rng = get_random_state(42)
counts = rng.poisson(npred)
counts_map = WcsNDMap(geom, counts)

counts_map.sum_over_axes().plot()
# plt.show()
plt.clf()

compound_model.parameters.set_error(2, 0.1 * u.deg)
compound_model.parameters.set_error(4, 1e-12 * u.Unit("cm-2 s-1 TeV-1"))
compound_model.parameters.set_error(8, 0.1 * u.deg)
compound_model.parameters.set_error(10, 1e-12 * u.Unit("cm-2 s-1 TeV-1"))

fit = MapFit(model=compound_model, counts=counts_map, exposure=exposure_map)
fit.run()
Пример #36
0
    def simulate_obs(perf,
                     target,
                     obs_param,
                     obs_id=0,
                     random_state='random-seed'):
        """Simulate observation with given parameters.

        Parameters
        ----------
        perf : `~gammapy.scripts.CTAPerf`
            CTA performance
        target : `~gammapy.scripts.Target`
            Source
        obs_param : `~gammapy.scripts.ObservationParameters`
            Observation parameters
        obs_id : `int`, optional
            Observation Id
        random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
            Defines random number generator initialisation.
            Passed to `~gammapy.utils.random.get_random_state`.
        """
        livetime = obs_param.livetime
        alpha = obs_param.alpha.value
        emin = obs_param.emin
        emax = obs_param.emax

        model = target.model

        # Compute expected counts
        reco_energy = perf.bkg.energy
        bkg_rate_values = perf.bkg.data.data * livetime.to('s')
        predicted_counts = CountsPredictor(model=model,
                                           aeff=perf.aeff,
                                           livetime=livetime,
                                           edisp=perf.rmf)
        predicted_counts.run()
        npred = predicted_counts.npred
        # set negative values to zero (interpolation issue)
        idx = np.where(npred.data.data < 0.)
        npred.data.data[idx] = 0

        # Randomise counts
        random_state = get_random_state(random_state)
        on_counts = random_state.poisson(npred.data.data.value)  # excess
        bkg_counts = random_state.poisson(
            bkg_rate_values.value)  # bkg in ON region
        off_counts = random_state.poisson(bkg_rate_values.value /
                                          alpha)  # bkg in OFF region

        on_counts += bkg_counts  # evts in ON region

        on_vector = PHACountsSpectrum(
            data=on_counts,
            backscal=1,
            energy_lo=reco_energy.lo,
            energy_hi=reco_energy.hi,
        )

        on_vector.livetime = livetime
        off_vector = PHACountsSpectrum(
            energy_lo=reco_energy.lo,
            energy_hi=reco_energy.hi,
            data=off_counts,
            backscal=1. / alpha,
            is_bkg=True,
        )
        off_vector.livetime = livetime

        obs = SpectrumObservation(on_vector=on_vector,
                                  off_vector=off_vector,
                                  aeff=perf.aeff,
                                  edisp=perf.rmf)
        obs.obs_id = obs_id

        # Set threshold according to the closest energy reco from bkg bins
        idx_min = np.abs(reco_energy.lo - emin).argmin()
        idx_max = np.abs(reco_energy.lo - emax).argmin()
        obs.lo_threshold = reco_energy.lo[idx_min]
        obs.hi_threshold = reco_energy.lo[idx_max]

        return obs