def test_stack(geom, geom_etrue): m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model1 = BackgroundModel(m) c_map1 = Map.from_geom(geom) c_map1.quantity = 0.3 * np.ones(c_map1.data.shape) mask1 = np.ones(m.data.shape, dtype=bool) mask1[0][0][0:10] = False mask1 = Map.from_geom(geom, data=mask1) dataset1 = MapDataset( counts=c_map1, background_model=background_model1, exposure=get_exposure(geom_etrue), mask_safe=mask1, ) c_map2 = Map.from_geom(geom) c_map2.quantity = 0.1 * np.ones(c_map2.data.shape) background_model2 = BackgroundModel(m, norm=0.5) mask2 = np.ones(m.data.shape, dtype=bool) mask2[0][3] = False mask2 = Map.from_geom(geom, data=mask2) dataset2 = MapDataset( counts=c_map2, background_model=background_model2, exposure=get_exposure(geom_etrue), mask_safe=mask2, ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data.sum(), 7987) assert_allclose(dataset1.background_model.map.data.sum(), 5987) assert_allclose(dataset1.exposure.data, 2.0 * dataset2.exposure.data) assert_allclose(dataset1.mask_safe.data.sum(), 20000)
def to_spectrum_dataset(self, name=None): """ Convert a SpectrumDatasetOnOff to a SpectrumDataset The background model template is taken as alpha*counts_off Parameters: ----------- name: str Name of the new dataset Returns: ------- dataset: `SpectrumDataset` SpectrumDatset with cash statistics """ name = make_name(name) background_model = BackgroundModel(self.counts_off * self.alpha) background_model.datasets_names = [name] return SpectrumDataset( counts=self.counts, exposure=self.exposure, edisp=self.edisp, name=name, gti=self.gti, mask_fit=self.mask_fit, mask_safe=self.mask_safe, models=background_model, meta_table=self.meta_table, )
def test_background_model(background): bkg1 = BackgroundModel(background, norm=2.0).evaluate() assert_allclose(bkg1.data[0][0][0], background.data[0][0][0] * 2.0, rtol=1e-3) assert_allclose(bkg1.data.sum(), background.data.sum() * 2.0, rtol=1e-3) bkg2 = BackgroundModel( background, norm=2.0, tilt=0.2, reference="1000 GeV" ).evaluate() assert_allclose(bkg2.data[0][0][0], 2.254e-07, rtol=1e-3) assert_allclose(bkg2.data.sum(), 7.352e-06, rtol=1e-3)
def test_background_model_io(tmpdir, background): filename = str(tmpdir / "test-bkg-file.fits") bkg = BackgroundModel(background, norm=2.0, filename=filename) bkg.map.write(filename, overwrite=True) bkg_dict = bkg.to_dict() bkg_read = bkg.from_dict(bkg_dict) assert_allclose(bkg_read.evaluate().data.sum(), background.data.sum() * 2.0, rtol=1e-3) assert bkg_read.filename == filename
def backgrounds(): axis = MapAxis.from_edges(np.logspace(-1, 1, 3), unit=u.TeV, name="energy") geom = WcsGeom.create(skydir=(0, 0), npix=(5, 4), frame="galactic", axes=[axis]) m = Map.from_geom(geom) m.quantity = np.ones(geom.data_shape) * 1e-7 background1 = BackgroundModel(m, name="bkg1", datasets_names="dataset-1") background2 = BackgroundModel(m, name="bkg2", datasets_names=["dataset-2"]) backgrounds = [background1, background2] return backgrounds
def stack(self, other): """Stack another dataset in place. Parameters ---------- other: `~gammapy.cube.MapDataset` Map dataset to be stacked with this one. """ if self.counts and other.counts: self.counts.data[~self.mask_safe.data] = 0 self.counts.stack(other.counts, weights=other.mask_safe) if self.exposure and other.exposure: # TODO: apply energy dependent mask to exposure mask_image = self.mask_safe.reduce_over_axes(func=np.logical_or) self.exposure.data[..., ~mask_image.data] = 0 mask_image_other = other.mask_safe.reduce_over_axes( func=np.logical_or) self.exposure.stack(other.exposure, weights=mask_image_other) if self.background_model and other.background_model: bkg = self.background_model.evaluate() bkg.data[~self.mask_safe.data] = 0 other_bkg = other.background_model.evaluate() bkg.stack(other_bkg, weights=other.mask_safe) self.background_model = BackgroundModel( bkg, name=self.background_model.name) if self.mask_safe is not None and other.mask_safe is not None: self.mask_safe.stack(other.mask_safe) if self.psf and other.psf: if isinstance(self.psf, PSFMap) and isinstance(other.psf, PSFMap): mask_image = self.mask_safe.reduce_over_axes( func=np.logical_or) self.psf.stack(other.psf) else: raise ValueError("Stacking of PSF kernels not supported") if self.edisp and other.edisp: if isinstance(self.edisp, EDispMap) and isinstance( other.edisp, EDispMap): self.edisp.stack(other.edisp) else: raise ValueError("Stacking of edisp kernels not supported") if self.gti and other.gti: self.gti = self.gti.stack(other.gti).union()
def test_fake(self): """Test the fake dataset""" source_model = SkyModel(spectral_model=PowerLawSpectralModel()) dataset = SpectrumDatasetOnOff( name="test", counts=self.on_counts, counts_off=self.off_counts, models=source_model, exposure=self.aeff * self.livetime, edisp=self.edisp, acceptance=1, acceptance_off=10, ) real_dataset = dataset.copy() background = RegionNDMap.from_geom(dataset.counts.geom) background.data += 1 background_model = BackgroundModel(background, name="test-bkg", datasets_names="test") dataset.fake(background_model=background_model, random_state=314) assert real_dataset.counts.data.shape == dataset.counts.data.shape assert real_dataset.counts_off.data.shape == dataset.counts_off.data.shape assert dataset.counts_off.data.sum() == 39 assert dataset.counts.data.sum() == 5
def fermi_dataset(): size = Angle("3 deg", "3.5 deg") counts = Map.read("$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts-cube.fits.gz") counts = counts.cutout(counts.geom.center_skydir, size) background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background-cube.fits.gz" ) background = background.cutout(background.geom.center_skydir, size) background = BackgroundModel(background, datasets_names=["fermi-3fhl-gc"]) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure-cube.fits.gz" ) exposure = exposure.cutout(exposure.geom.center_skydir, size) exposure.unit = "cm2s" mask_safe = counts.copy(data=np.ones_like(counts.data).astype("bool")) psf = EnergyDependentTablePSF.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-psf-cube.fits.gz" ) psfmap = PSFMap.from_energy_dependent_table_psf(psf) dataset = MapDataset( counts=counts, models=[background], exposure=exposure, mask_safe=mask_safe, psf=psfmap, name="fermi-3fhl-gc", ) dataset = dataset.to_image() return dataset
def spectrum_dataset(): e_true = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=20, name="energy_true") e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=4) background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[e_reco]) background.data += 3600 background.data[0] *= 1e3 background.data[-1] *= 1e-3 edisp = EDispKernelMap.from_diagonal_response(energy_axis_true=e_true, energy_axis=e_reco, geom=background.geom) aeff = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[e_true], unit="m2") aeff.data += 1e6 livetime = 1 * u.h exposure = aeff * livetime return SpectrumDataset( name="test", exposure=exposure, edisp=edisp, models=BackgroundModel(background, name="test-bkg", datasets_names="test"), )
def fermi_dataset(): size = Angle("3 deg", "3.5 deg") counts = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts-cube.fits.gz") counts = counts.cutout(counts.geom.center_skydir, size) background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background-cube.fits.gz") background = background.cutout(background.geom.center_skydir, size) background = BackgroundModel(background, datasets_names=["fermi-3fhl-gc"]) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure-cube.fits.gz") exposure = exposure.cutout(exposure.geom.center_skydir, size) exposure.unit = "cm2 s" psf = EnergyDependentTablePSF.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-psf-cube.fits.gz") psfmap = PSFMap.from_energy_dependent_table_psf(psf) edisp = EDispKernelMap.from_diagonal_response( energy_axis=counts.geom.axes["energy"], energy_axis_true=exposure.geom.axes["energy_true"], ) return MapDataset( counts=counts, models=[background], exposure=exposure, psf=psfmap, name="fermi-3fhl-gc", edisp=edisp, )
def get_map_dataset(sky_model, geom, geom_etrue, edisp=True, **kwargs): """This computes the total npred""" # define background model m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model = BackgroundModel(m) psf = get_psf(geom_etrue) exposure = get_exposure(geom_etrue) if edisp: # define energy dispersion e_true = geom_etrue.get_axis_by_name("energy").edges e_reco = geom.get_axis_by_name("energy").edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) else: edisp = None # define fit mask center = sky_model.spatial_model.position circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = background_model.map.geom.region_mask([circle]) return MapDataset( model=sky_model, exposure=exposure, background_model=background_model, psf=psf, edisp=edisp, mask_fit=mask_fit, **kwargs )
def test_names(geom, geom_etrue, sky_model): m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model1 = BackgroundModel(m, name="bkg1", datasets_names=["test"]) assert background_model1.name == "bkg1" c_map1 = Map.from_geom(geom) c_map1.quantity = 0.3 * np.ones(c_map1.data.shape) model1 = sky_model.copy() assert model1.name != sky_model.name model1 = sky_model.copy(name="model1") assert model1.name == "model1" model2 = sky_model.copy(name="model2") dataset1 = MapDataset( counts=c_map1, models=Models([model1, model2, background_model1]), exposure=get_exposure(geom_etrue), name="test", ) dataset2 = dataset1.copy() assert dataset2.name != dataset1.name assert dataset2.background_model dataset2 = dataset1.copy(name="dataset2") assert dataset2.name == "dataset2" assert dataset2.background_model.name == "bkg1" assert dataset1.background_model is not dataset2.background_model assert dataset1.models.names == dataset2.models.names assert dataset1.models is not dataset2.models
def get_map_dataset(sky_model, geom, geom_etrue, edisp=True, name="test", **kwargs): """Returns a MapDatasets""" # define background model m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model = BackgroundModel(m, datasets_names=[name]) psf = get_psf() exposure = get_exposure(geom_etrue) if edisp: # define energy dispersion e_true = geom_etrue.get_axis_by_name("energy_true") edisp = EDispMap.from_diagonal_response(energy_axis_true=e_true) else: edisp = None # define fit mask center = sky_model.spatial_model.position circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = background_model.map.geom.region_mask([circle]) mask_fit = Map.from_geom(geom, data=mask_fit) return MapDataset( models=[sky_model, background_model], exposure=exposure, psf=psf, edisp=edisp, mask_fit=mask_fit, name=name, **kwargs )
def make_map_dataset(observations, target_pos, geom, geom_true, offset_max=2 * u.deg): maker = MapMaker(geom, offset_max, geom_true=geom_true) maps = maker.run(observations) table_psf = make_mean_psf(observations, target_pos) # PSF kernel used for the model convolution psf_kernel = PSFKernel.from_table_psf(table_psf, geom_true, max_radius="0.3 deg") edisp = make_mean_edisp( observations, target_pos, e_true=geom_true.axes[0].edges, e_reco=geom.axes[0].edges, ) background_model = BackgroundModel(maps["background"]) background_model.parameters["norm"].frozen = False background_model.parameters["tilt"].frozen = True dataset = MapDataset( counts=maps["counts"], exposure=maps["exposure"], background_model=background_model, psf=psf_kernel, edisp=edisp, ) return dataset
def input_dataset(): filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz" energy = MapAxis.from_energy_bounds("0.1 TeV", "1 TeV", 1) energy_true = MapAxis.from_energy_bounds("0.1 TeV", "1 TeV", 1, name="energy_true") counts2D = Map.read(filename, hdu="counts") counts = counts2D.to_cube([energy]) exposure2D = Map.read(filename, hdu="exposure") exposure2D.unit = "cm2s" exposure = exposure2D.to_cube([energy_true]) background2D = Map.read(filename, hdu="background") background = background2D.to_cube([energy]) name = "test-dataset" background_model = BackgroundModel(background, datasets_names=[name]) # add mask mask2D_data = np.ones_like(background2D.data).astype("bool") mask2D_data[0:40, :] = False mask2D = Map.from_geom(geom=counts2D.geom, data=mask2D_data) mask = mask2D.to_cube([energy]) return MapDataset( counts=counts, exposure=exposure, models=background_model, mask_safe=mask, name=name, )
def from_hdulist(cls, hdulist, name=""): """Create map dataset from list of HDUs. Parameters ---------- hdulist : `~astropy.io.fits.HDUList` List of HDUs. Returns ------- dataset : `MapDataset` Map dataset. """ kwargs = {"name": name} if "COUNTS" in hdulist: kwargs["counts"] = Map.from_hdulist(hdulist, hdu="counts") if "EXPOSURE" in hdulist: kwargs["exposure"] = Map.from_hdulist(hdulist, hdu="exposure") if "BACKGROUND" in hdulist: background_map = Map.from_hdulist(hdulist, hdu="background") kwargs["background_model"] = BackgroundModel(background_map) if "EDISP_MATRIX" in hdulist: kwargs["edisp"] = EnergyDispersion.from_hdulist( hdulist, hdu1="EDISP_MATRIX", hdu2="EDISP_MATRIX_EBOUNDS" ) if "EDISP" in hdulist: edisp_map = Map.from_hdulist(hdulist, hdu="edisp") exposure_map = Map.from_hdulist(hdulist, hdu="edisp_exposure") kwargs["edisp"] = EDispMap(edisp_map, exposure_map) if "PSF_KERNEL" in hdulist: psf_map = Map.from_hdulist(hdulist, hdu="psf_kernel") kwargs["psf"] = PSFKernel(psf_map) if "PSF" in hdulist: psf_map = Map.from_hdulist(hdulist, hdu="psf") exposure_map = Map.from_hdulist(hdulist, hdu="psf_exposure") kwargs["psf"] = PSFMap(psf_map, exposure_map) if "MASK_SAFE" in hdulist: mask_safe = Map.from_hdulist(hdulist, hdu="mask_safe") mask_safe.data = mask_safe.data.astype(bool) kwargs["mask_safe"] = mask_safe if "MASK_FIT" in hdulist: mask_fit = Map.from_hdulist(hdulist, hdu="mask_fit") mask_fit.data = mask_fit.data.astype(bool) kwargs["mask_fit"] = mask_fit if "GTI" in hdulist: gti = GTI(Table.read(hdulist, hdu="GTI")) kwargs["gti"] = gti return cls(**kwargs)
def from_geoms( cls, geom, geom_exposure, geom_psf, geom_edisp, reference_time="2000-01-01", name="", **kwargs, ): """ Create a MapDataset object with zero filled maps according to the specified geometries Parameters ---------- geom : `Geom` geometry for the counts and background maps geom_exposure : `Geom` geometry for the exposure map geom_psf : `Geom` geometry for the psf map geom_edisp : `Geom` geometry for the energy dispersion map reference_time : `~astropy.time.Time` the reference time to use in GTI definition name : str Name of the dataset. Returns ------- empty_maps : `MapDataset` A MapDataset containing zero filled maps """ counts = Map.from_geom(geom, unit="") background = Map.from_geom(geom, unit="") background_model = BackgroundModel(background) exposure = Map.from_geom(geom_exposure, unit="m2 s") edisp = EDispMap.from_geom(geom_edisp) psf = PSFMap.from_geom(geom_psf) gti = GTI.create([] * u.s, [] * u.s, reference_time=reference_time) mask_safe = Map.from_geom(geom, unit="", dtype=bool) return cls( counts=counts, exposure=exposure, psf=psf, edisp=edisp, background_model=background_model, gti=gti, mask_safe=mask_safe, name=name, **kwargs, )
def stack(self, other): """Stack another dataset in place. Parameters ---------- other: `~gammapy.cube.MapDataset` Map dataset to be stacked with this one. """ if self.counts and other.counts: self.counts.data[~self.mask_safe] = 0 self.counts.stack(other.counts, weights=other.mask_safe) if self.exposure and other.exposure: self.exposure.stack(other.exposure) if self.background_model and other.background_model: bkg = self.background_model.evaluate() bkg.data[~self.mask_safe] = 0 other_bkg = other.background_model.evaluate() other_bkg.data[~other.mask_safe] = 0 bkg.stack(other_bkg) self.background_model = BackgroundModel(bkg, name=self.background_model.name) if self.mask_safe is not None and other.mask_safe is not None: # TODO: make mask_safe a Map object mask_safe = Map.from_geom(self.counts.geom, data=self.mask_safe) mask_safe_other = Map.from_geom(other.counts.geom, data=other.mask_safe) mask_safe.stack(mask_safe_other) self.mask_safe = mask_safe.data if self.psf and other.psf: if isinstance(self.psf, PSFMap) and isinstance(other.psf, PSFMap): self.psf.stack(other.psf) else: raise ValueError("Stacking of PSF kernels not supported") if self.edisp and other.edisp: if isinstance(self.edisp, EDispMap) and isinstance(other.edisp, EDispMap): self.edisp.stack(other.edisp) else: raise ValueError("Stacking of edisp kernels not supported") if self.gti and other.gti: self.gti = self.gti.stack(other.gti).union()
def test_compute_lima_image(): """ Test Li & Ma image against TS image for Tophat kernel """ filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz" counts = Map.read(filename, hdu="counts") counts = image_to_cube(counts, "1 GeV", "100 GeV") background = Map.read(filename, hdu="background") background = image_to_cube(background, "1 GeV", "100 GeV") background_model = BackgroundModel(background) dataset = MapDataset(counts=counts) background_model.datasets_names = [dataset.name] dataset.models = background_model estimator = ExcessMapEstimator("0.1 deg", selection_optional=None) result_lima = estimator.run(dataset) assert_allclose(result_lima["significance"].data[:, 100, 100], 30.814916, atol=1e-3) assert_allclose(result_lima["significance"].data[:, 1, 1], 0.164, atol=1e-3)
def test_stack(geom, geom_etrue): m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model1 = BackgroundModel(m, name="dataset-1-bkg", datasets_names=["dataset-1"]) c_map1 = Map.from_geom(geom) c_map1.quantity = 0.3 * np.ones(c_map1.data.shape) mask1 = np.ones(m.data.shape, dtype=bool) mask1[0][0][0:10] = False mask1 = Map.from_geom(geom, data=mask1) dataset1 = MapDataset( counts=c_map1, models=[background_model1], exposure=get_exposure(geom_etrue), mask_safe=mask1, name="dataset-1", ) c_map2 = Map.from_geom(geom) c_map2.quantity = 0.1 * np.ones(c_map2.data.shape) background_model2 = BackgroundModel(m, norm=0.5, name="dataset-2-bkg", datasets_names=["dataset-2"]) mask2 = np.ones(m.data.shape, dtype=bool) mask2[0][3] = False mask2 = Map.from_geom(geom, data=mask2) dataset2 = MapDataset( counts=c_map2, models=[background_model2], exposure=get_exposure(geom_etrue), mask_safe=mask2, name="dataset-2", ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data.sum(), 7987) assert_allclose(dataset1.background_model.map.data.sum(), 5987) assert_allclose(dataset1.exposure.data, 2.0 * dataset2.exposure.data) assert_allclose(dataset1.mask_safe.data.sum(), 20000) assert len(dataset1.models) == 1
def run(self, dataset, observation): """Make map dataset. Parameters ---------- dataset : `~gammapy.cube.MapDataset` Reference dataset. observation : `~gammapy.data.Observation` Observation Returns ------- dataset : `~gammapy.cube.MapDataset` Map dataset. """ kwargs = {"gti": observation.gti} kwargs["meta_table"] = self.make_meta_table(observation) mask_safe = Map.from_geom(dataset.counts.geom, dtype=bool) mask_safe.data |= True kwargs["mask_safe"] = mask_safe if "counts" in self.selection: counts = self.make_counts(dataset.counts.geom, observation) kwargs["counts"] = counts if "exposure" in self.selection: exposure = self.make_exposure(dataset.exposure.geom, observation) kwargs["exposure"] = exposure if "background" in self.selection: background_map = self.make_background(dataset.counts.geom, observation) kwargs["models"] = BackgroundModel( background_map, name=dataset.name + "-bkg", datasets_names=[dataset.name], ) if "psf" in self.selection: psf = self.make_psf(dataset.psf.psf_map.geom, observation) kwargs["psf"] = psf if "edisp" in self.selection: if dataset.edisp.edisp_map.geom.axes[0].name.upper() == "MIGRA": edisp = self.make_edisp(dataset.edisp.edisp_map.geom, observation) else: edisp = self.make_edisp_kernel(dataset.edisp.edisp_map.geom, observation) kwargs["edisp"] = edisp return MapDataset(name=dataset.name, **kwargs)
def input_dataset_simple(): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz" bkg_map = Map.read(filename, hdu="background") counts = Map.read(filename, hdu="counts") counts = counts.to_cube(axes=[axis]) bkg_map = bkg_map.to_cube(axes=[axis]) bkg_model = BackgroundModel(bkg_map, datasets_names="test") return MapDataset(counts=counts, models=[bkg_model], name="test")
def test_to_image(geom): counts = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts-cube.fits.gz") background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background-cube.fits.gz") background = BackgroundModel(background, datasets_names=["fermi"]) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure-cube.fits.gz") exposure = exposure.sum_over_axes(keepdims=True) dataset = MapDataset(counts=counts, models=[background], exposure=exposure, name="fermi") dataset_im = dataset.to_image() assert dataset_im.mask_safe is None assert dataset_im.counts.data.sum() == dataset.counts.data.sum() assert_allclose(dataset_im.background_model.map.data.sum(), 28548.625, rtol=1e-5) ebounds = np.logspace(-1.0, 1.0, 3) axis = MapAxis.from_edges(ebounds, name="energy", unit=u.TeV, interp="log") geom = WcsGeom.create(skydir=(0, 0), binsz=0.5, width=(1, 1), frame="icrs", axes=[axis]) dataset = MapDataset.create(geom) # Check map_safe handling data = np.array([[[False, True], [True, True]], [[False, False], [True, True]]]) dataset.mask_safe = WcsNDMap.from_geom(geom=geom, data=data) dataset_im = dataset.to_image() assert dataset_im.mask_safe.data.dtype == bool desired = np.array([[False, True], [True, True]]) assert (dataset_im.mask_safe.data == desired).all() # Check that missing entries in the dataset do not break dataset_copy = dataset.copy() dataset_copy.exposure = None dataset_copy._background_model = None dataset_im = dataset_copy.to_image() assert dataset_im.exposure is None assert dataset_im.background_model == None dataset_copy = dataset.copy() dataset_copy.counts = None dataset_im = dataset_copy.to_image() assert dataset_im.counts is None
def get_fermi_3fhl_gc_dataset(): counts = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts-cube.fits.gz") background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background-cube.fits.gz") background = BackgroundModel(background, datasets_names=["fermi-3fhl-gc"]) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure-cube.fits.gz") return MapDataset(counts=counts, models=[background], exposure=exposure, name="fermi-3fhl-gc")
def run(self, dataset, observation, selection=None): """Make map dataset. Parameters ---------- dataset : `MapDataset` Reference dataset. observation : `~gammapy.data.DataStoreObservation` Observation selection : list List of str, selecting which maps to make. Available: 'counts', 'exposure', 'background', 'psf', 'edisp' By default, all maps are made. Returns ------- dataset : `MapDataset` Map dataset. """ selection = _check_selection(selection) kwargs = {"name": f"obs_{observation.obs_id}", "gti": observation.gti} geom = self._cutout_geom(dataset.counts.geom, observation) mask_safe = Map.from_geom(geom, dtype=bool) mask_safe.data |= True kwargs["mask_safe"] = mask_safe if "counts" in selection: counts = self.make_counts(dataset, observation) kwargs["counts"] = counts if "exposure" in selection: exposure = self.make_exposure(dataset, observation) kwargs["exposure"] = exposure if "background" in selection: background_map = self.make_background(dataset, observation) kwargs["background_model"] = BackgroundModel(background_map) if "psf" in selection: psf = self.make_psf(dataset, observation) kwargs["psf"] = psf if "edisp" in selection: edisp = self.make_edisp(dataset, observation) kwargs["edisp"] = edisp return MapDataset(**kwargs)
def spectrum_dataset(): name = "test" energy = np.logspace(-1, 1, 31) * u.TeV livetime = 100 * u.s pwl = PowerLawSpectralModel( index=2.1, amplitude="1e5 cm-2 s-1 TeV-1", reference="0.1 TeV", ) temp_mod = ConstantTemporalModel() model = SkyModel(spectral_model=pwl, temporal_model=temp_mod, name="test-source") axis = MapAxis.from_edges(energy, interp="log", name="energy") axis_true = MapAxis.from_edges(energy, interp="log", name="energy_true") background = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis]) bkg_model = BackgroundModel(background, name=name + "-bkg", datasets_names=[name]) bkg_model.spectral_model.norm.frozen = True models = Models([bkg_model, model]) exposure = RegionNDMap.create(region="icrs;circle(0, 0, 0.1)", axes=[axis_true]) exposure.quantity = u.Quantity("1 cm2") * livetime bkg_rate = np.ones(30) / u.s background.quantity = bkg_rate * livetime start = [1, 3, 5] * u.day stop = [2, 3.5, 6] * u.day t_ref = Time(55555, format="mjd") gti = GTI.create(start, stop, reference_time=t_ref) dataset = SpectrumDataset( models=models, exposure=exposure, name=name, gti=gti, ) dataset.fake(random_state=23) return dataset
def to_image(self, spectrum=None, keepdims=True): """Create images by summing over the energy axis. Exposure is weighted with an assumed spectrum, resulting in a weighted mean exposure image. Parameters ---------- spectrum : `~gammapy.modeling.models.SpectralModel` Spectral model to compute the weights. Default is power-law with spectral index of 2. keepdims : bool, optional If this is set to True, the energy axes is kept with a single bin. If False, the energy axes is removed Returns ------- dataset : `MapDataset` Map dataset containing images. """ counts = self.counts * self.mask_safe background = self.background_model.evaluate() * self.mask_safe counts = counts.sum_over_axes(keepdims=keepdims) exposure = _map_spectrum_weight(self.exposure, spectrum) exposure = exposure.sum_over_axes(keepdims=keepdims) background = background.sum_over_axes(keepdims=keepdims) idx = self.mask_safe.geom.get_axis_index_by_name("ENERGY") data = np.logical_or.reduce(self.mask_safe.data, axis=idx, keepdims=keepdims) mask_image = WcsNDMap(geom=counts.geom, data=data) # TODO: add edisp and psf edisp = None psf = None return self.__class__( counts=counts, exposure=exposure, background_model=BackgroundModel(background), mask_safe=mask_image, edisp=edisp, psf=psf, gti=self.gti, name=self.name, )
def from_dict(cls, data, components, models): """Create from dicts and models list generated from YAML serialization.""" dataset = cls.read(data["filename"], name=data["name"]) bkg_name = data["background"] model_names = data["models"] for component in components["components"]: if component["type"] == "BackgroundModel": if component["name"] == bkg_name: if "filename" not in component: component["map"] = dataset.background_model.map background_model = BackgroundModel.from_dict(component) dataset.background_model = background_model models_list = [model for model in models if model.name in model_names] dataset.model = SkyModels(models_list) if"likelihood" in data: dataset.likelihood_type = data["likelihood"] return dataset
def run(self, dataset, observation): """Make map dataset. Parameters ---------- dataset : `~gammapy.cube.MapDataset` Reference dataset. observation : `~gammapy.data.Observation` Observation Returns ------- dataset : `~gammapy.cube.MapDataset` Map dataset. """ kwargs = {"gti": observation.gti} mask_safe = Map.from_geom(dataset.counts.geom, dtype=bool) mask_safe.data |= True kwargs["mask_safe"] = mask_safe if "counts" in self.selection: counts = self.make_counts(dataset.counts.geom, observation) kwargs["counts"] = counts if "exposure" in self.selection: exposure = self.make_exposure(dataset.exposure.geom, observation) kwargs["exposure"] = exposure if "background" in self.selection: background_map = self.make_background(dataset.counts.geom, observation) kwargs["background_model"] = BackgroundModel(background_map) if "psf" in self.selection: psf = self.make_psf(dataset.psf.psf_map.geom, observation) kwargs["psf"] = psf if "edisp" in self.selection: edisp = self.make_edisp(dataset.edisp.edisp_map.geom, observation) kwargs["edisp"] = edisp return MapDataset(name=dataset.name, **kwargs)
def input_dataset(): filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz" energy = MapAxis.from_energy_bounds("0.1 TeV", "1 TeV", 1) energy_true = MapAxis.from_energy_bounds("0.1 TeV", "1 TeV", 1, name="energy_true") counts2D = Map.read(filename, hdu="counts") counts = Map.from_geom( counts2D.geom.to_cube([energy]), data=counts2D.data[np.newaxis, :, :], unit=counts2D.unit, ) exposure2D = Map.read(filename, hdu="exposure") exposure = Map.from_geom( exposure2D.geom.to_cube([energy_true]), data=exposure2D.data[np.newaxis, :, :], unit="cm2s", # no unit in header? ) background2D = Map.read(filename, hdu="background") background = Map.from_geom( background2D.geom.to_cube([energy]), data=background2D.data[np.newaxis, :, :], unit=background2D.unit, ) background_model = BackgroundModel(background) # add mask mask2D = np.ones_like(background2D.data).astype("bool") mask2D[0:40, :] = False mask = Map.from_geom( background2D.geom.to_cube([energy]), data=mask2D[np.newaxis, :, :], ) return MapDataset( counts=counts, exposure=exposure, models=background_model, mask_safe=mask, )