示例#1
0
def observations_magic():
    observations = [
        Observation.read(
            "$GAMMAPY_DATA/magic/rad_max/data/magic_dl3_run_05029747.fits"),
        Observation.read(
            "$GAMMAPY_DATA/magic/rad_max/data/magic_dl3_run_05029748.fits"),
    ]
    return observations
示例#2
0
def observations_magic_rad_max():
    observations = [
        Observation.read(
            "$GAMMAPY_DATA/magic/rad_max/data/20131004_05029747_DL3_CrabNebula-W0.40+035.fits"
        ),
        Observation.read(
            "$GAMMAPY_DATA/magic/rad_max/data/20131004_05029748_DL3_CrabNebula-W0.40+215.fits"
        ),
    ]
    return observations
示例#3
0
def test_mde_run_switchoff(dataset, models):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )
    livetime = 1.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(obs_id=1001,
                             pointing=pointing,
                             livetime=livetime,
                             irfs=irfs)

    dataset.models = models

    dataset.psf = None
    dataset.edisp = None
    dataset.background = None

    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)

    assert len(events.table) == 88
    assert_allclose(events.table["ENERGY"][0], 2.751205, rtol=1e-5)
    assert_allclose(events.table["RA"][0], 266.559566, rtol=1e-5)
    assert_allclose(events.table["DEC"][0], -28.742429, rtol=1e-5)

    meta = events.table.meta

    assert meta["RA_PNT"] == 266.4049882865447
    assert meta["ONTIME"] == 3600.0
    assert meta["OBS_ID"] == 1001
    assert meta["RADECSYS"] == "icrs"
示例#4
0
def test_mde_run(dataset):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )
    livetime = 10.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(obs_id=1001,
                             pointing=pointing,
                             livetime=livetime,
                             irfs=irfs)

    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)

    dataset_bkg = dataset.copy()
    dataset_bkg.models = dataset_bkg.models[1]
    events_bkg = sampler.run(dataset=dataset_bkg, observation=obs)

    assert len(events.table) == 2422
    assert_allclose(events.table["ENERGY"][0], 1.56446303986587, rtol=1e-5)
    assert_allclose(events.table["RA"][0], 268.8180057255861, rtol=1e-5)
    assert_allclose(events.table["DEC"][0], -28.45051813404372, rtol=1e-5)

    assert len(events_bkg.table) == 12
    assert_allclose(events_bkg.table["ENERGY"][0], 1.377619454, rtol=1e-5)
    assert_allclose(events_bkg.table["RA"][0], 265.09135019, rtol=1e-5)
    assert_allclose(events_bkg.table["DEC"][0], -30.631115659801, rtol=1e-5)
    assert_allclose(events_bkg.table["MC_ID"][0], 0, rtol=1e-5)

    meta = events.table.meta

    assert meta["RA_PNT"] == 266.4049882865447
    assert meta["ONTIME"] == 36000.0
    assert meta["OBS_ID"] == 1001
    assert meta["RADECSYS"] == "icrs"
示例#5
0
def test_events_datastore(tmp_path, dataset, models):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )
    livetime = 10.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(
        obs_id=1001,
        pointing=pointing,
        livetime=livetime,
        irfs=irfs,
        location=LOCATION,
    )

    dataset.models = models
    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)

    primary_hdu = fits.PrimaryHDU()
    hdu_evt = fits.BinTableHDU(events.table)
    hdu_gti = fits.BinTableHDU(dataset.gti.table, name="GTI")
    hdu_all = fits.HDUList([primary_hdu, hdu_evt, hdu_gti])
    hdu_all.writeto(str(tmp_path / "events.fits"))

    DataStore.from_events_files([str(tmp_path / "events.fits")])
示例#6
0
def prepare_dataset_simple(filename_dataset):
    """Prepare dataset for a given skymodel."""
    log.info(f"Reading {IRF_FILE}")

    irfs = load_cta_irfs(IRF_FILE)

    edisp_gauss = EnergyDispersion2D.from_gauss(
        e_true=ENERGY_AXIS_TRUE.edges,
        migra=MIGRA_AXIS.edges,
        sigma=0.1,
        bias=0,
        offset=[0, 2, 4, 6, 8] * u.deg,
    )

    irfs["edisp"] = edisp_gauss
    # irfs["aeff"].data.data = np.ones_like(irfs["aeff"].data.data) * 1e6

    observation = Observation.create(
        obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs
    )

    empty = MapDataset.create(
        WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS
    )
    # maker = MapDatasetMaker(selection=["exposure", "edisp"])
    # maker = MapDatasetMaker(selection=["exposure", "edisp", "background"])
    maker = MapDatasetMaker(selection=["exposure", "edisp", "psf", "background"])
    dataset = maker.run(empty, observation)

    filename_dataset.parent.mkdir(exist_ok=True, parents=True)
    log.info(f"Writing {filename_dataset}")
    dataset.write(filename_dataset, overwrite=True)
示例#7
0
def test_mde_sample_weak_src(dataset, models):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )
    livetime = 10.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(
        obs_id=1001,
        pointing=pointing,
        livetime=livetime,
        irfs=irfs,
        location=LOCATION,
    )

    models[0].parameters["amplitude"].value = 1e-25

    dataset.models = models

    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)

    assert len(events.table) == 18
    assert_allclose(len(np.where(events.table["MC_ID"] == 0)[0]),
                    len(events.table),
                    rtol=1e-5)
    def simulate_single(self,
                        pointing,
                        models=None,
                        empty=None,
                        random_state="random-seed"):
        obs = Observation.create(pointing=pointing,
                                 livetime=self.livetime,
                                 irfs=self.get_irf())
        maker = MapDatasetMaker(
            selection=["exposure", "background", "psf", "edisp"])
        maker_safe_mask = SafeMaskMaker(methods=["offset-max"],
                                        offset_max=2.0 * u.deg)

        dataset = maker.run(empty, obs)
        dataset = maker_safe_mask.run(dataset, obs)

        dataset.models = Models([models])

        #fluctuate the background - gaussian fluctuations
        bkg_factor = np.random.normal(self.mean_bkg, self.sigma_bkg)
        while bkg_factor < 0.0:
            bkg_factor = np.random.normal(self.mean_bkg, self.sigma_bkg)

        dataset.background.data = bkg_factor * dataset.background.data
        self.bkg_norms.append(bkg_factor)

        #Poission fluctuate source + background
        dataset.fake()

        dataset.models = None  #remove the model on the dataset
        dataset.background.data /= bkg_factor  # restore the old background

        return dataset
        def empty_dataset(source_pos_radec, map_geom, e_reco_binning, livetime,
                          irf_file, offset):

            source_pos_ra = source_pos_radec["ra"]
            source_pos_dec = source_pos_radec["dec"]

            source = SkyCoord(source_pos_ra,
                              source_pos_dec,
                              unit="deg",
                              frame="icrs")

            e_reco_min = u.Quantity(e_reco_binning["e_reco_min"]).to("TeV")
            e_reco_min = e_reco_min.value
            e_reco_max = u.Quantity(e_reco_binning["e_reco_max"]).to("TeV")
            e_reco_max = e_reco_max.value
            n_e_reco = e_reco_binning["n_e_reco"]

            energy_axis = MapAxis.from_edges(np.logspace(
                np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco),
                                             unit="TeV",
                                             name="energy",
                                             interp="log")

            geom = WcsGeom.create(
                skydir=source,
                binsz=u.Quantity(map_geom["binsize"]).to("deg").value,
                width=(u.Quantity(map_geom["width"]).to("deg").value,
                       u.Quantity(map_geom["width"]).to("deg").value),
                frame="icrs",
                axes=[energy_axis])

            energy_axis_true = MapAxis.from_edges(np.logspace(
                np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco),
                                                  unit="TeV",
                                                  name="energy",
                                                  interp="log")

            pointing = SkyCoord(u.Quantity(source_pos_ra).to("deg"),
                                u.Quantity(source_pos_dec).to("deg") + offset,
                                frame="icrs",
                                unit="deg")

            irfs = load_cta_irfs(irf_file)

            obs = Observation.create(pointing=pointing,
                                     livetime=livetime,
                                     irfs=irfs)

            empty = MapDataset.create(geom, energy_axis_true=energy_axis_true)
            maker = MapDatasetMaker(
                selection=["exposure", "background", "psf", "edisp"])
            maker_safe_mask = SafeMaskMaker(
                methods=["offset-max"],
                offset_max=u.quantity.Quantity(map_geom["width"]) +
                1.0 * u.deg)

            dataset = maker.run(empty, obs)
            dataset = maker_safe_mask.run(dataset, obs)

            return dataset
    def __init__(self, lambda_true: u.quantity.Quantity, index_true: float,
                 normalization_true: u.quantity.Quantity,
                 livetime: u.quantity.Quantity, pointing_galactic: dict,
                 e_reco_binning: dict, on_region_radius: str, irf_file: str):

        normalization_true = normalization_true.to("cm-2 s-1 TeV-1")
        self.lambda_true = lambda_true
        self.index_true = index_true
        self.normalization_true = normalization_true

        pointing_l = pointing_galactic["pointing_l"]
        pointing_b = pointing_galactic["pointing_b"]
        pointing = SkyCoord(pointing_l,
                            pointing_b,
                            unit="deg",
                            frame="galactic")
        e_reco_min = u.Quantity(e_reco_binning["e_reco_min"]).to("TeV").value
        e_reco_max = u.Quantity(e_reco_binning["e_reco_max"]).to("TeV").value
        n_e_reco = e_reco_binning["n_e_reco"]
        self.energy_axis = np.logspace(np.log10(e_reco_min),
                                       np.log10(e_reco_max), n_e_reco) * u.TeV

        on_region_radius = Angle(on_region_radius)
        self.on_region = CircleSkyRegion(center=pointing,
                                         radius=on_region_radius)

        irfs = load_cta_irfs(irf_file)

        self.obs = Observation.create(pointing=pointing,
                                      livetime=livetime,
                                      irfs=irfs)
示例#11
0
def test_observation_write(tmp_path):
    obs = Observation.read(
        "$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_023523.fits.gz")
    path = tmp_path / "obs.fits.gz"
    obs.write(path)
    obs_read = obs.read(path)

    assert obs_read.events is not None
    assert obs_read.gti is not None
    assert obs_read.aeff is not None
    assert obs_read.edisp is not None
    assert obs_read.bkg is not None
    assert obs_read.rad_max is None

    # unsupported format
    with pytest.raises(ValueError):
        obs.write(tmp_path / "foo.fits.gz", format="cool-new-format")

    # no irfs
    path = tmp_path / "obs_no_irfs.fits.gz"
    obs.write(path, include_irfs=False)
    obs_read = obs.read(path)
    assert obs_read.events is not None
    assert obs_read.gti is not None
    assert obs_read.aeff is None
    assert obs_read.edisp is None
    assert obs_read.bkg is None
    assert obs_read.rad_max is None
示例#12
0
def simulate_map_dataset(random_state=0, name=None):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )

    skydir = SkyCoord("0 deg", "0 deg", frame="galactic")
    edges = np.logspace(-1, 2, 15) * u.TeV
    energy_axis = MapAxis.from_edges(edges=edges, name="energy", interp="log")

    geom = WcsGeom.create(
        skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], frame="galactic"
    )

    gauss = GaussianSpatialModel(
        lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic"
    )
    pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1")
    skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source")

    obs = Observation.create(pointing=skydir, livetime=1 * u.h, irfs=irfs)
    empty = MapDataset.create(geom, name=name)
    maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"])
    dataset = maker.run(empty, obs)

    dataset.models.append(skymodel)
    dataset.fake(random_state=random_state)
    return dataset
示例#13
0
def test_observation_cta_1dc():
    ontime = 5.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )

    t_ref = Time('2020-01-01T00:00:00')
    tstart = 20 * u.hour
    location = EarthLocation(lon="-70d18m58.84s",
                             lat="-24d41m0.34s",
                             height="2000m")

    obs = Observation.create(
        pointing,
        irfs=irfs,
        deadtime_fraction=0.1,
        tstart=tstart,
        tstop=tstart + ontime,
        reference_time=t_ref,
        location=location,
    )

    assert_skycoord_allclose(obs.pointing_radec, pointing.icrs)
    assert_allclose(obs.observation_live_time_duration, 0.9 * ontime)
    assert_allclose(obs.target_radec.ra, np.nan)
    assert not np.isnan(obs.pointing_zen)
    assert_allclose(obs.muoneff, 1)
示例#14
0
def test_observation_read_single_file_fixed_rad_max():
    """check that for a point-like observation without the RAD_MAX_2D table
    a RadMax2D object is generated from the RAD_MAX keyword"""
    obs = Observation.read(
        "$GAMMAPY_DATA/joint-crab/dl3/magic/run_05029748_DL3.fits")

    assert obs.rad_max is not None
    assert obs.rad_max.quantity.shape == (1, 1)
    assert u.allclose(obs.rad_max.quantity, 0.1414213 * u.deg)
示例#15
0
def simulate():

    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )

    # Reconstructed and true energy axis
    center = SkyCoord(0.0, 0.0, unit="deg", frame="galactic")
    energy_axis = MapAxis.from_edges(
        np.logspace(-0.5, 1.0, 10), unit="TeV", name="energy", interp="log",
    )
    energy_axis_true = MapAxis.from_edges(
        np.logspace(-1.2, 2.0, 31), unit="TeV", name="energy_true", interp="log",
    )

    on_region_radius = Angle("0.11 deg")
    on_region = CircleSkyRegion(center=center, radius=on_region_radius)

    pointing = SkyCoord(0.5, 0.5, unit="deg", frame="galactic")

    spectral_model = PowerLawSpectralModel(
        index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV"
    )
    temporal_model = ExpDecayTemporalModel(t0="6 h", t_ref=gti_t0.mjd * u.d)
    model_simu = SkyModel(
        spectral_model=spectral_model, temporal_model=temporal_model, name="model-simu",
    )

    lvtm = np.ones(N_OBS) * 1.0 * u.hr
    tstart = 1.0 * u.hr

    datasets = []
    for i in range(N_OBS):
        obs = Observation.create(
            pointing=pointing,
            livetime=lvtm[i],
            tstart=tstart,
            irfs=irfs,
            reference_time=gti_t0,
        )
        empty = SpectrumDataset.create(
            e_reco=energy_axis,
            e_true=energy_axis_true,
            region=on_region,
            name=f"dataset_{i}",
        )
        maker = SpectrumDatasetMaker(selection=["aeff", "background", "edisp"])
        dataset = maker.run(empty, obs)
        dataset.models = model_simu
        dataset.fake()
        datasets.append(dataset)
        tstart = tstart + 2.0 * u.hr

    return datasets
示例#16
0
def test_observation():
    livetime = 5.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )

    obs = Observation.create(pointing,
                             livetime=livetime,
                             irfs=irfs,
                             deadtime_fraction=0.1)

    assert_skycoord_allclose(obs.pointing_radec, pointing.icrs)
    assert_allclose(obs.observation_live_time_duration, 0.9 * livetime)
示例#17
0
def test_observation_read_single_file():
    """read event list and irf components from the same DL3 files"""
    obs = Observation.read(
        "$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_020136.fits.gz")

    energy = Quantity(1, "TeV")
    offset = Quantity(0.5, "deg")
    val = obs.aeff.evaluate(energy_true=energy, offset=offset)

    assert obs.obs_id == 20136
    assert len(obs.events.energy) == 11243
    assert obs.available_irfs == ["aeff", "edisp", "psf", "bkg"]
    assert_allclose(val.value, 273372.44851054, rtol=1e-5)
    assert val.unit == "m2"
示例#18
0
def test_irf_alpha_config(dataset, models):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-caldb/Prod5-South-20deg-AverageAz-14MSTs37SSTs.180000s-v0.1.fits.gz"
    )
    livetime = 1.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(obs_id=1001,
                             pointing=pointing,
                             livetime=livetime,
                             irfs=irfs)

    dataset.models = models
    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)
    assert events is not None
示例#19
0
def prepare_dataset(filename_dataset):
    """Prepare dataset for a given skymodel."""
    log.info(f"Reading {IRF_FILE}")
    irfs = load_cta_irfs(IRF_FILE)
    observation = Observation.create(
        obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs
    )

    empty = MapDataset.create(
        WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS
    )
    maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"])
    dataset = maker.run(empty, observation)

    filename_dataset.parent.mkdir(exist_ok=True, parents=True)
    log.info(f"Writing {filename_dataset}")
    dataset.write(filename_dataset, overwrite=True)
示例#20
0
def test_event_det_coords(dataset):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )
    livetime = 1.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(
        obs_id=1001, pointing=pointing, livetime=livetime, irfs=irfs
    )

    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)

    assert len(events.table) == 374
    assert_allclose(events.table["DETX"][0], -2.44563584, rtol=1e-5)
    assert events.table["DETX"].unit == "deg"

    assert_allclose(events.table["DETY"][0], 0.01414569, rtol=1e-5)
    assert events.table["DETY"].unit == "deg"
示例#21
0
def simulate_events(filename_model, filename_dataset, dataset, obs_id):
    """Simulate events for a given model and dataset.

    Parameters
    ----------
    filename_model : str
        Filename of the model definition.
    filename_dataset : str
        Filename of the dataset to use for simulation.
    nobs : int
        Number of obervations to simulate.
    """
    log.info(f"Reading {IRF_FILE}")
    irfs = load_cta_irfs(IRF_FILE)

    #    log.info(f"Reading {filename_dataset}")
    #    dataset = MapDataset.read(filename_dataset)

    log.info(f"Reading {filename_model}")
    models = Models.read(filename_model)
    #    dataset.models = models
    dataset.models.extend(models)

    sampler = MapDatasetEventSampler(random_state=0)

    #    obs_id = np.arange(nobs)
    #    with multiprocessing.Pool(processes=core) as pool:
    #        args1 = zip(obs_id, repeat(POINTING), repeat(LIVETIME), repeat(irfs),
    #                    repeat(dataset), repeat(filename_dataset), repeat(filename_model))
    #        pool.starmap(simulate_parallel, args1)

    #    for obs_id in np.arange(nobs):
    observation = Observation.create(obs_id=obs_id,
                                     pointing=POINTING,
                                     livetime=LIVETIME,
                                     irfs=irfs)

    events = sampler.run(dataset, observation)

    path = get_filename_events(filename_dataset, filename_model, obs_id)
    log.info(f"Writing {path}")
    path.parent.mkdir(exist_ok=True, parents=True)
    events.table.write(str(path), overwrite=True)
示例#22
0
def test_dataset_maker_spectrum_global_rad_max():
    """test the energy-dependent spectrum extraction"""

    observation = Observation.read('$GAMMAPY_DATA/joint-crab/dl3/magic/run_05029748_DL3.fits')

    maker = SpectrumDatasetMaker(
        containment_correction=False, selection=["counts", "exposure", "edisp"]
    )
    dataset = maker.run(get_spectrumdataset_rad_max("spec"), observation)

    finder = WobbleRegionsFinder(n_off_regions=3)
    bkg_maker = ReflectedRegionsBackgroundMaker(region_finder=finder)
    dataset_on_off = bkg_maker.run(dataset, observation)

    counts = dataset_on_off.counts
    counts_off = dataset_on_off.counts_off
    assert counts.unit == ""
    assert counts_off.unit == ""
    assert_allclose(counts.data.sum(), 437, rtol=1e-5)
    assert_allclose(counts_off.data.sum(), 273, rtol=1e-5)
示例#23
0
def test_observation_read():
    """read event list and irf components from different DL3 files"""
    obs = Observation.read(
        event_file=
        "$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_020136.fits.gz",
        irf_file=
        "$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_020137.fits.gz",
    )

    energy = Quantity(1, "TeV")
    offset = Quantity(0.5, "deg")
    val = obs.aeff.evaluate(energy_true=energy, offset=offset)

    assert obs.obs_id == 20136
    assert len(obs.events.energy) == 11243
    assert obs.available_hdus == [
        "events", "gti", "aeff", "edisp", "psf", "bkg"
    ]
    assert_allclose(val.value, 278000.54120855, rtol=1e-5)
    assert val.unit == "m2"
示例#24
0
    def setup_class(self):
        table = Table()
        table["RA"] = [0.0, 0.0, 0.0, 0.0, 10.0] * u.deg
        table["DEC"] = [0.0, 0.05, 0.9, 10.0, 10.0] * u.deg
        table["ENERGY"] = [1.0, 1.0, 1.5, 1.5, 10.0] * u.TeV
        table["OFFSET"] = [0.1, 0.1, 0.5, 1.0, 1.5] * u.deg

        table.meta["RA_PNT"] = 0 * u.deg
        table.meta["DEC_PNT"] = 0.5 * u.deg

        meta_obs = dict()
        meta_obs["RA_PNT"] = 0 * u.deg
        meta_obs["DEC_PNT"] = 0.5 * u.deg
        meta_obs["DEADC"] = 1

        meta = time_ref_to_dict("2010-01-01")
        gti_table = Table({"START": [1], "STOP": [3]}, meta=meta)
        gti = GTI(gti_table)

        self.observation = Observation(
            events=EventList(table), obs_info=meta_obs, gti=gti
        )
示例#25
0
    def setup_class(self):
        self.observations = []
        for sign in [-1, 1]:
            events = Table()
            events["RA"] = [0.0, 0.0, 0.0, 0.0, 10.0] * u.deg
            events["DEC"] = sign * ([0.0, 0.05, 0.9, 10.0, 10.0] * u.deg)
            events["ENERGY"] = [1.0, 1.0, 1.5, 1.5, 10.0] * u.TeV
            events["OFFSET"] = [0.1, 0.1, 0.5, 1.0, 1.5] * u.deg


            obs_info = dict(
                RA_PNT=0 * u.deg,
                DEC_PNT=sign * 0.5 * u.deg,
                DEADC=1,
            )
            events.meta.update(obs_info)
            meta = time_ref_to_dict("2010-01-01")
            gti_table = Table({"START": [1], "STOP": [3]}, meta=meta)
            gti = GTI(gti_table)

            self.observations.append(Observation(
                events=EventList(events), obs_info=obs_info, gti=gti
            ))
示例#26
0
def test_observation_peek(data_store, caplog):
    obs = Observation.read(
        "$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_023523.fits.gz")

    with mpl_plot_check():
        obs.peek()

    obs.bkg = None

    with mpl_plot_check():
        obs.peek()

    assert "WARNING" in [record.levelname for record in caplog.records]
    message = "No background model found for obs 23523."
    assert message in [record.message for record in caplog.records]

    obs.psf = None
    with mpl_plot_check():
        obs.peek()

    assert "WARNING" in [record.levelname for record in caplog.records]
    message = "No PSF found for obs 23523."
    assert message in [record.message for record in caplog.records]
示例#27
0
def simulate_events(filename_model, filename_dataset, nobs):
    """Simulate events for a given model and dataset.

    Parameters
    ----------
    filename_model : str
        Filename of the model definition.
    filename_dataset : str
        Filename of the dataset to use for simulation.
    nobs : int
        Number of obervations to simulate.
    """
    log.info(f"Reading {IRF_FILE}")
    irfs = load_cta_irfs(IRF_FILE)

    log.info(f"Reading {filename_dataset}")
    dataset = MapDataset.read(filename_dataset)

    log.info(f"Reading {filename_model}")
    models = Models.read(filename_model)
    models.append(FoVBackgroundModel(dataset_name=dataset.name))
    dataset.models = models
#    dataset.models.extend(models)

    sampler = MapDatasetEventSampler(random_state=0)

    for obs_id in np.arange(nobs):
        observation = Observation.create(
            obs_id=obs_id, pointing=POINTING, livetime=LIVETIME, irfs=irfs
        )

        events = sampler.run(dataset, observation)

        path = get_filename_events(filename_dataset, filename_model, obs_id)
        log.info(f"Writing {path}")
        path.parent.mkdir(exist_ok=True, parents=True)
        events.table.write(str(path), overwrite=True)
示例#28
0
def test_mde_run(dataset):
    irfs = load_cta_irfs(
        "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
    )
    livetime = 1.0 * u.hr
    pointing = SkyCoord(0, 0, unit="deg", frame="galactic")
    obs = Observation.create(obs_id=1001,
                             pointing=pointing,
                             livetime=livetime,
                             irfs=irfs)

    sampler = MapDatasetEventSampler(random_state=0)
    events = sampler.run(dataset=dataset, observation=obs)

    dataset_bkg = dataset.copy()
    dataset_bkg.models = dataset_bkg.models[1]
    events_bkg = sampler.run(dataset=dataset_bkg, observation=obs)

    assert len(events.table) == 374
    assert_allclose(events.table["ENERGY"][0], 4.09979515940, rtol=1e-5)
    assert_allclose(events.table["RA"][0], 263.611383742, rtol=1e-5)
    assert_allclose(events.table["DEC"][0], -28.89318805, rtol=1e-5)

    assert len(events_bkg.table) == 10
    assert_allclose(events_bkg.table["ENERGY"][0], 2.84808850102, rtol=1e-5)
    assert_allclose(events_bkg.table["RA"][0], 266.6138405848, rtol=1e-5)
    assert_allclose(events_bkg.table["DEC"][0], -29.0489180785, rtol=1e-5)
    assert_allclose(events_bkg.table["MC_ID"][0], 0, rtol=1e-5)

    meta = events.table.meta

    assert meta["RA_PNT"] == 266.4049882865447
    assert meta["ONTIME"] == 3600.0
    assert meta["OBS_ID"] == 1001
    assert meta["RADECSYS"] == "icrs"
    assert meta["ALT_PNT"] == "20.000"
    assert meta["AZ_PNT"] == "0.000"
示例#29
0
def test_create_dl3_energy_dependent_cuts(
    temp_dir_observed_files, observed_dl2_file
):
    """
    Generating an DL3 file from a test DL2 files and test IRF file, using
    energy dependent cuts. Here the previously created IRF is used.
    """
    from lstchain.tools.lstchain_create_dl3_file import DataReductionFITSWriter
    from gammapy.data import Observation

    irf_file = temp_dir_observed_files / "pnt_irf.fits.gz"

    dl2_name = observed_dl2_file.name
    observed_dl3_file = temp_dir_observed_files / dl2_name.replace('dl2', 'dl3')
    observed_dl3_file = observed_dl3_file.with_suffix(".fits")

    assert (
        run_tool(
            DataReductionFITSWriter(),
            argv=[
                f"--input-dl2={observed_dl2_file}",
                f"--output-dl3-path={temp_dir_observed_files}",
                f"--input-irf={irf_file}",
                "--source-name=Crab",
                "--source-ra=83.633deg",
                "--source-dec=22.01deg",
                "--overwrite",
            ],
            cwd=temp_dir_observed_files,
        )
        == 0
    )

    assert Observation.read(
        event_file=observed_dl3_file, irf_file=irf_file
    ).obs_id == 2008
示例#30
0
def generate_dataset(Eflux,
                     flux,
                     Erange=None,
                     tstart=Time('2000-01-01 02:00:00', scale='utc'),
                     tobs=100 * u.s,
                     irf_file=None,
                     alpha=1 / 5,
                     name=None,
                     fake=True,
                     onoff=True,
                     seed='random-seed',
                     debug=False):
    """
    Generate a dataset from a list of energies and flux points either as
    a SpectrumDataset or a SpectrumDatasetOnOff

    Note :
    - in SpectrumDataset, the backgound counts are assumed precisely know and
    are not fluctuated.
    - in SpectrumDatasetOnOff, the background counts (off counts) are
    fluctuated from the IRF known values.

    Parameters
    ----------
    Eflux : Quantity
        Energies at which the flux is given.
    flux : Quantity
        Flux corresponding to the given energies.
    Erange : List, optional
        The energy boundaries within which the flux is defined, if not over all
        energies. The default is None.
    tstart : Time object, optional
        Start date of the dataset.
        The default is Time('2000-01-01 02:00:00',scale='utc').
    tobs : Quantity, optional
        Duration of the observation. The default is 100*u.s.
    irf_file : String, optional
        The IRf file name. The default is None.
    alpha : Float, optional
        The on over off surface ratio for the On-Off analysis.
        The default is 1/5.
    name : String, optional
        The dataset name, also used to name tthe spectrum. The default is None.
    fake : Boolean, optional
        If True, the dataset counts are fluctuated. The default is True.
    onoff : Boolean, optional
        If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet.
        The default is True.
    seed : String, optional
        The seed for the randome generator; If an integer will generate the
        same random series at each run. The default is 'random-seed'.
    debug: Boolean
        If True, let's talk a bit. The default is False.

    Returns
    -------
    ds : Dataset object
        The dataset.

    """
    random_state = get_random_state(seed)

    ### Define on region
    on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg,
                           frame="icrs")  # Observing region
    on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg)

    # Define energy axis (see spectrum analysis notebook)
    # edges for SpectrumDataset - all dataset should have the same axes
    # Note that linear spacing is clearly problematic for powerlaw fluxes
    # Axes can also be defined using MapAxis
    unit = u.GeV
    E1v = min(Eflux).to(unit).value
    E2v = max(Eflux).to(unit).value
    #     ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit
    #     ereco_axis = MapAxis.from_edges(ereco.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy",
    #                                    interp="log")

    ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit,
                                            0.9 * E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy")

    #     etrue = np.logspace(np.log10(    E1v), np.log10(    E2v), 50) * unit
    #     etrue_axis = MapAxis.from_edges(etrue.to("TeV").value,
    #                                    unit="TeV",
    #                                    name="energy_true",
    #                                    interp="log")
    etrue_axis = MapAxis.from_energy_bounds(E1v * unit,
                                            E2v * unit,
                                            nbin=4,
                                            per_decade=True,
                                            name="energy_true")
    if (debug):
        print("Dataset ", name)
        print("Etrue : ", etrue_axis.edges)
        print("Ereco : ", ereco_axis.edges)

    # Load IRF
    irf = load_cta_irfs(irf_file)

    spec = TemplateSpectralModel(energy=Eflux,
                                 values=flux,
                                 interp_kwargs={"values_scale": "log"})

    model = SkyModel(spectral_model=spec, name="Spec" + str(name))
    obs = Observation.create(obs_id=1,
                             pointing=on_pointing,
                             livetime=tobs,
                             irfs=irf,
                             deadtime_fraction=0,
                             reference_time=tstart)

    ds_empty = SpectrumDataset.create(
        e_reco=ereco_axis,  # Ereco.edges,
        e_true=etrue_axis,  #Etrue.edges,
        region=on_region,
        name=name)
    maker = SpectrumDatasetMaker(containment_correction=False,
                                 selection=["exposure", "background", "edisp"])
    ds = maker.run(ds_empty, obs)
    ds.models = model
    mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0],
                                         energy_max=Erange[1])

    mask = mask & ds.mask_safe.data
    ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask)

    ds.fake(random_state=random_state)  # Fake is mandatory ?

    # Transform SpectrumDataset into SpectrumDatasetOnOff if needed
    if (onoff):

        ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds,
                                                        acceptance=1,
                                                        acceptance_off=1 /
                                                        alpha)
        print("Transformed in ONOFF")

    if fake:
        print(" Fluctuations : seed = ", seed)
        if (onoff):
            ds.fake(npred_background=ds.npred_background())
        else:
            ds.fake(random_state=random_state)

    print("ds.energy_range = ", ds.energy_range)

    return ds