def run(self, dataset, observation=None): """Run ring background maker Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Input map dataset. Returns ------- dataset_on_off : `~gammapy.datasets.MapDatasetOnOff` On off dataset. """ from gammapy.datasets import MapDatasetOnOff maps_off = self.make_maps_off(dataset) maps_off["acceptance"] = dataset.npred_background() mask_safe = dataset.mask_safe.copy() not_has_off_acceptance = maps_off["acceptance_off"].data <= 0 mask_safe.data[not_has_off_acceptance] = 0 dataset_on_off = MapDatasetOnOff.from_map_dataset(dataset=dataset, name=dataset.name, **maps_off) dataset_on_off.mask_safe = mask_safe return dataset_on_off
def extract_ring_adaptive(datasets, exclusion_mask, r_in="0.6 deg", width="0.3 deg", r_out_max=2.3*u.deg, method = 'fixed_width'): geom = datasets[0].counts.geom energy_axis = datasets[0].counts.geom.axes["energy"] geom_image = geom.to_image().to_cube([energy_axis.squash()]) ring_maker = AdaptiveRingBackgroundMaker( r_in=r_in, width=width, exclusion_mask=exclusion_mask, r_out_max = r_out_max, method=method ) energy_axis_true = datasets[0].exposure.geom.axes["energy_true"] stacked_on_off = MapDatasetOnOff.create( geom=geom_image, energy_axis_true=energy_axis_true, name="stacked" ) for dataset in datasets: dataset_on_off = ring_maker.run(dataset.to_image()) stacked_on_off.stack(dataset_on_off) return stacked_on_off
def test_create_onoff(geom): # tests empty datasets created migra_axis = MapAxis(nodes=np.linspace(0.0, 3.0, 51), unit="", name="migra") rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="theta") energy_axis = geom.get_axis_by_name("energy").copy(name="energy_true") empty_dataset = MapDatasetOnOff.create(geom, energy_axis, migra_axis, rad_axis) assert_allclose(empty_dataset.counts.data.sum(), 0.0) assert_allclose(empty_dataset.counts_off.data.sum(), 0.0) assert_allclose(empty_dataset.acceptance.data.sum(), 0.0) assert_allclose(empty_dataset.acceptance_off.data.sum(), 0.0) assert empty_dataset.psf.psf_map.data.shape == (2, 50, 10, 10) assert empty_dataset.psf.exposure_map.data.shape == (2, 1, 10, 10) assert empty_dataset.edisp.edisp_map.data.shape == (2, 50, 10, 10) assert empty_dataset.edisp.exposure_map.data.shape == (2, 1, 10, 10) assert_allclose(empty_dataset.edisp.edisp_map.data.sum(), 200) assert_allclose(empty_dataset.gti.time_delta, 0.0 * u.s)
def run(self, dataset, observation=None): """Run adaptive ring background maker Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Input map dataset. Returns ------- dataset_on_off : `~gammapy.datasets.MapDatasetOnOff` On off dataset. """ from gammapy.datasets import MapDatasetOnOff cubes = self.make_cubes(dataset) acceptance, acceptance_off, counts_off = self._reduce_cubes( cubes, dataset) mask_safe = dataset.mask_safe.copy() not_has_off_acceptance = acceptance_off.data <= 0 mask_safe.data[not_has_off_acceptance] = 0 dataset_on_off = MapDatasetOnOff.from_map_dataset( dataset=dataset, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, name=dataset.name, ) dataset_on_off.mask_safe = mask_safe return dataset_on_off
def test_asmooth_map_dataset_on_off(): kernel = Tophat2DKernel scales = ASmoothMapEstimator.get_scales(3, factor=2, kernel=kernel) * 0.1 * u.deg asmooth = ASmoothMapEstimator(kernel=kernel, scales=scales, method="lima", threshold=2.5) counts = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="") counts += 2 counts_off = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="") counts_off += 3 acceptance = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="") acceptance += 1 acceptance_off = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="") acceptance_off += 3 dataset = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, ) smoothed = asmooth.run(dataset) assert_allclose(smoothed["counts"].data[25, 25], 2) assert_allclose(smoothed["background"].data[25, 25], 1) assert_allclose(smoothed["significance"].data[25, 25], 4.391334)
def create_stacked_dataset(analysis): # source_pos = SkyCoord.from_name("MSH 15-52") source_pos = SkyCoord(228.32, -59.08, unit="deg") # get the geom that we use geom = analysis.datasets[0].counts.geom energy_axis = analysis.datasets[0].counts.geom.axes["energy"] geom_image = geom.to_image().to_cube([energy_axis.squash()]) # Make the exclusion mask regions = CircleSkyRegion(center=source_pos, radius=0.3 * u.deg) exclusion_mask = geom_image.region_mask([regions], inside=False) ring_maker = RingBackgroundMaker(r_in="0.5 deg", width="0.3 deg", exclusion_mask=exclusion_mask) # Creation of the MapDatasetOnOff energy_axis_true = analysis.datasets[0].exposure.geom.axes["energy_true"] stacked_on_off = MapDatasetOnOff.create(geom=geom_image, energy_axis_true=energy_axis_true, name="stacked") for dataset in analysis.datasets: # Ring extracting makes sense only for 2D analysis dataset_on_off = ring_maker.run(dataset.to_image()) stacked_on_off.stack(dataset_on_off) return stacked_on_off
def test_plot_residual_onoff(): axis = MapAxis.from_energy_bounds(1, 10, 2, unit="TeV") geom = WcsGeom.create(npix=(10, 10), binsz=0.05, axes=[axis]) counts = Map.from_geom(geom, data=np.ones((2, 10, 10))) counts_off = Map.from_geom(geom, data=np.ones((2, 10, 10))) acceptance = Map.from_geom(geom, data=np.ones((2, 10, 10))) acceptance_off = Map.from_geom(geom, data=np.ones((2, 10, 10))) acceptance_off *= 2 dataset = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, ) with mpl_plot_check(): dataset.plot_residuals_spatial()
def simple_dataset_on_off(): axis = MapAxis.from_energy_bounds(0.1, 10, 2, unit="TeV") geom = WcsGeom.create(npix=20, binsz=0.02, axes=[axis]) dataset = MapDatasetOnOff.create(geom) dataset.mask_safe += np.ones(dataset.data_shape, dtype=bool) dataset.counts += 2 dataset.counts_off += 1 dataset.acceptance += 1 dataset.acceptance_off += 1 return dataset
def test_map_dataset_on_off_to_spectrum_dataset_weights(): e_reco = MapAxis.from_bounds(1, 10, nbin=3, unit="TeV", name="energy") geom = WcsGeom.create(skydir=(0, 0), width=(2.5, 2.5), binsz=0.5, axes=[e_reco], frame="galactic") counts = Map.from_geom(geom) counts.data += 1 counts_off = Map.from_geom(geom) counts_off.data += 2 acceptance = Map.from_geom(geom) acceptance.data += 1 acceptance_off = Map.from_geom(geom) acceptance_off.data += 4 weights = Map.from_geom(geom, dtype='bool') weights.data[1:, 2:4, 2] = True gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, mask_safe=weights, gti=gti, ) on_region = CircleSkyRegion(center=dataset.counts.geom.center_skydir, radius=1.5 * u.deg) spectrum_dataset = dataset.to_spectrum_dataset(on_region) assert_allclose(spectrum_dataset.counts.data[:, 0, 0], [0, 2, 2]) assert_allclose(spectrum_dataset.counts_off.data[:, 0, 0], [0, 4, 4]) assert_allclose(spectrum_dataset.acceptance.data[:, 0, 0], [0, 0.08, 0.08]) assert_allclose(spectrum_dataset.acceptance_off.data[:, 0, 0], [0, 0.32, 0.32]) assert_allclose(spectrum_dataset.alpha.data[:, 0, 0], [0, 0.25, 0.25])
def get_simple_dataset_on_off(): axis = MapAxis.from_energy_bounds(0.1, 10, 2, unit="TeV") geom = WcsGeom.create(npix=40, binsz=0.01, axes=[axis]) dataset = MapDatasetOnOff.create(geom) dataset.mask_safe += 1 dataset.counts += 5 dataset.counts_off += 1 dataset.acceptance += 1 dataset.acceptance_off += 1 dataset.exposure += 1000 * u.m ** 2 * u.s dataset.gti = GTI.create([0 * u.s], [5 * u.h], reference_time="2010-01-01T00:00:00") return dataset
def get_map_dataset_onoff(images, **kwargs): """Returns a MapDatasetOnOff""" mask_geom = images["counts"].geom mask_data = np.ones(images["counts"].data.shape, dtype=bool) mask_safe = Map.from_geom(mask_geom, data=mask_data) return MapDatasetOnOff(counts=images["counts"], counts_off=images["counts_off"], acceptance=images["acceptance"], acceptance_off=images["acceptance_off"], exposure=images["exposure"], mask_safe=mask_safe, **kwargs)
def test_to_map_dataset(): axis = MapAxis.from_energy_bounds(1, 10, 2, unit="TeV") geom = WcsGeom.create(npix=(10, 10), binsz=0.05, axes=[axis]) counts = Map.from_geom(geom, data=np.ones((2, 10, 10))) counts_off = Map.from_geom(geom, data=np.ones((2, 10, 10))) acceptance = Map.from_geom(geom, data=np.ones((2, 10, 10))) acceptance_off = Map.from_geom(geom, data=np.ones((2, 10, 10))) acceptance_off *= 2 dataset_onoff = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, ) dataset = dataset_onoff.to_map_dataset(name="ds") assert dataset.name == "ds" assert_allclose(dataset.npred_background().data.sum(), 100) assert isinstance(dataset, MapDataset) assert dataset.counts == dataset_onoff.counts
def test_downsample_onoff(): axis = MapAxis.from_energy_bounds(1, 10, 4, unit="TeV") geom = WcsGeom.create(npix=(10, 10), binsz=0.05, axes=[axis]) counts = Map.from_geom(geom, data=np.ones((4, 10, 10))) counts_off = Map.from_geom(geom, data=np.ones((4, 10, 10))) acceptance = Map.from_geom(geom, data=np.ones((4, 10, 10))) acceptance_off = Map.from_geom(geom, data=np.ones((4, 10, 10))) acceptance_off *= 2 dataset_onoff = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, ) downsampled = dataset_onoff.downsample(2, axis_name="energy") assert downsampled.counts.data.shape == (2, 10, 10) assert downsampled.counts.data.sum() == dataset_onoff.counts.data.sum() assert downsampled.counts_off.data.sum() == dataset_onoff.counts_off.data.sum() assert_allclose(downsampled.alpha.data, 0.5)
def test_map_dataset_on_off_fits_io(images, tmp_path): dataset = get_map_dataset_onoff(images) gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset.gti = gti hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "GTI", "COUNTS_OFF", "COUNTS_OFF_BANDS", "ACCEPTANCE", "ACCEPTANCE_BANDS", "ACCEPTANCE_OFF", "ACCEPTANCE_OFF_BANDS", ] assert actual == desired dataset.write(tmp_path / "test.fits") dataset_new = MapDatasetOnOff.read(tmp_path / "test.fits") assert len(dataset_new.models) == 0 assert dataset_new.mask.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose(dataset.counts_off.data, dataset_new.counts_off.data) assert_allclose(dataset.acceptance.data, dataset_new.acceptance.data) assert_allclose(dataset.acceptance_off.data, dataset_new.acceptance_off.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_safe, dataset_new.mask_safe) assert np.all(dataset.mask_safe.data == dataset_new.mask_safe.data) == True assert dataset.mask_safe.geom == dataset_new.mask_safe.geom assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert_allclose(dataset.gti.time_sum.to_value("s"), dataset_new.gti.time_sum.to_value("s"))
def test_compute_lima_on_off_image(): """ Test Li & Ma image with snippet from the H.E.S.S. survey data. """ filename = "$GAMMAPY_DATA/tests/unbundled/hess/survey/hess_survey_snippet.fits.gz" n_on = Map.read(filename, hdu="ON") counts = image_to_cube(n_on, "1 TeV", "100 TeV") n_off = Map.read(filename, hdu="OFF") counts_off = image_to_cube(n_off, "1 TeV", "100 TeV") a_on = Map.read(filename, hdu="ONEXPOSURE") acceptance = image_to_cube(a_on, "1 TeV", "100 TeV") a_off = Map.read(filename, hdu="OFFEXPOSURE") acceptance_off = image_to_cube(a_off, "1 TeV", "100 TeV") dataset = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, ) significance = Map.read(filename, hdu="SIGNIFICANCE") significance = image_to_cube(significance, "1 TeV", "10 TeV") estimator = ExcessMapEstimator("0.1 deg", correlate_off=False) results = estimator.run(dataset) # Reproduce safe significance threshold from HESS software results["sqrt_ts"].data[results["npred"].data < 5] = 0 # crop the image at the boundaries, because the reference image # is cut out from a large map, there is no way to reproduce the # result with regular boundary handling actual = results["sqrt_ts"].crop((11, 11)).data desired = significance.crop((11, 11)).data # Set boundary to NaN in reference image # The absolute tolerance is low because the method used here is slightly different from the one used in HGPS # n_off is convolved as well to ensure the method applies to true ON-OFF datasets assert_allclose(actual, desired, atol=0.2, rtol=1e-5) actual = np.nan_to_num(results["npred_background"].crop((11, 11)).data) background_corr = image_to_cube( Map.read(filename, hdu="BACKGROUNDCORRELATED"), "1 TeV", "100 TeV") desired = background_corr.crop((11, 11)).data # Set boundary to NaN in reference image # The absolute tolerance is low because the method used here is slightly different from the one used in HGPS # n_off is convolved as well to ensure the method applies to true ON-OFF datasets assert_allclose(actual, desired, atol=0.2, rtol=1e-5)
def test_stack_dataset_dataset_on_off(): axis = MapAxis.from_edges([1, 10] * u.TeV, name="energy") geom = WcsGeom.create(width=1, axes=[axis]) gti = GTI.create([0 * u.s], [1 * u.h]) dataset = MapDataset.create(geom, gti=gti) dataset_on_off = MapDatasetOnOff.create(geom, gti=gti) dataset_on_off.mask_safe.data += True dataset_on_off.acceptance_off += 5 dataset_on_off.acceptance += 1 dataset_on_off.counts_off += 1 dataset.stack(dataset_on_off) assert_allclose(dataset.npred_background().data, 0.166667, rtol=1e-3)
def test_map_dataset_on_off_fake(geom): rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="rad") energy_true_axis = geom.axes["energy"].copy(name="energy_true") empty_dataset = MapDataset.create(geom, energy_true_axis, rad_axis=rad_axis) empty_dataset = MapDatasetOnOff.from_map_dataset( empty_dataset, acceptance=1, acceptance_off=10.0 ) empty_dataset.acceptance_off.data[0, 50, 50] = 0 background_map = Map.from_geom(geom, data=1) empty_dataset.fake(background_map, random_state=42) assert_allclose(empty_dataset.counts.data[0, 50, 50], 0) assert_allclose(empty_dataset.counts.data.mean(), 0.99445, rtol=1e-3) assert_allclose(empty_dataset.counts_off.data.mean(), 10.00055, rtol=1e-3)
def test_stack_onoff_cutout(geom_image): # Test stacking of cutouts dataset = MapDatasetOnOff.create(geom_image) gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset.gti = gti geom_cutout = geom_image.cutout(position=geom_image.center_skydir, width=1 * u.deg) dataset_cutout = dataset.create(geom_cutout) dataset.stack(dataset_cutout) assert_allclose(dataset.counts.data.sum(), dataset_cutout.counts.data.sum()) assert_allclose(dataset.counts_off.data.sum(), dataset_cutout.counts_off.data.sum()) assert_allclose(dataset.alpha.data.sum(), dataset_cutout.alpha.data.sum()) assert_allclose(dataset.exposure.data.sum(), dataset_cutout.exposure.data.sum()) assert dataset_cutout.name != dataset.name
def get_map_dataset_onoff(images, **kwargs): """Returns a MapDatasetOnOff""" mask_geom = images["counts"].geom mask_data = np.ones(images["counts"].data.shape, dtype=bool) mask_safe = Map.from_geom(mask_geom, data=mask_data) gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") return MapDatasetOnOff(counts=images["counts"], counts_off=images["counts_off"], acceptance=images["acceptance"], acceptance_off=images["acceptance_off"], exposure=images["exposure"], mask_safe=mask_safe, gti=gti, **kwargs)
def test_asmooth_map_dataset_on_off(): kernel = Tophat2DKernel scales = ASmoothMapEstimator.get_scales(3, factor=2, kernel=kernel) * 0.1 * u.deg asmooth = ASmoothMapEstimator(kernel=kernel, scales=scales, method="lima", threshold=2.5) axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=1) counts = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="", axes=[axis]) counts += 2 counts_off = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="", axes=[axis]) counts_off += 3 acceptance = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="", axes=[axis]) acceptance += 1 acceptance_off = WcsNDMap.create(npix=(50, 50), binsz=0.02, unit="", axes=[axis]) acceptance_off += 3 dataset = MapDatasetOnOff( counts=counts, counts_off=counts_off, acceptance=acceptance, acceptance_off=acceptance_off, ) smoothed = asmooth.run(dataset) assert_allclose(smoothed["counts"].data[25, 25], 2) assert_allclose(smoothed["background"].data[25, 25], 1.25) assert_allclose(smoothed["significance"].data[25, 25], 3.079799117645, rtol=1e-2)
def extract_ring(datasets, exclusion_mask, r_in="1.0 deg", width="0.3 deg"): geom = datasets[0].counts.geom energy_axis = datasets[0].counts.geom.axes["energy"] geom_image = geom.to_image().to_cube([energy_axis.squash()]) ring_maker = RingBackgroundMaker( r_in=r_in, width=width, exclusion_mask=exclusion_mask ) energy_axis_true = datasets[0].exposure.geom.axes["energy_true"] stacked_on_off = MapDatasetOnOff.create( geom=geom_image, energy_axis_true=energy_axis_true, name="stacked" ) for dataset in datasets: dataset_on_off = ring_maker.run(dataset.to_image()) stacked_on_off.stack(dataset_on_off) return stacked_on_off