def prepare_dataset(filename_dataset): """Prepare dataset for a given skymodel.""" log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) observation = Observation.create(obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs) empty = MapDataset.create(WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, observation) filename_dataset.parent.mkdir(exist_ok=True, parents=True) log.info(f"Writing {filename_dataset}") dataset.write(filename_dataset, overwrite=True)
def fake_dataset(): axis = MapAxis.from_energy_bounds(0.1, 10, 5, unit="TeV", name="energy") axis_true = MapAxis.from_energy_bounds(0.05, 20, 10, unit="TeV", name="energy_true") geom = WcsGeom.create(npix=50, binsz=0.02, axes=[axis]) dataset = MapDataset.create(geom) dataset.psf = PSFMap.from_gauss(axis_true, sigma="0.05 deg") dataset.mask_safe += np.ones(dataset.data_shape, dtype=bool) dataset.background += 1 dataset.exposure += 1e12 * u.cm ** 2 * u.s spatial_model = PointSpatialModel() spectral_model = PowerLawSpectralModel(amplitude="1e-10 cm-2s-1TeV-1", index=2) model = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name="source" ) dataset.models = [model] dataset.fake(random_state=42) return dataset
def test_map_maker_obs_with_migra(observations): # Test for different spatial geoms and etrue, ereco bins migra = MapAxis.from_edges(np.linspace(0, 2.0, 50), unit="", name="migra") geom_reco = geom(ebounds=[0.1, 1, 10]) e_true = MapAxis.from_edges( [0.1, 0.5, 2.5, 10.0], name="energy_true", unit="TeV", interp="log" ) reference = MapDataset.create( geom=geom_reco, energy_axis_true=e_true, migra_axis=migra, binsz_irf=1.0 ) maker_obs = MapDatasetMaker() map_dataset = maker_obs.run(reference, observations[0]) assert map_dataset.counts.geom == geom_reco assert isinstance(map_dataset.edisp, EDispMap) assert map_dataset.edisp.edisp_map.data.shape == (3, 49, 5, 10) assert map_dataset.edisp.exposure_map.data.shape == (3, 1, 5, 10)
def test_safe_mask_maker(observations): obs = observations[0] axis = MapAxis.from_bounds(0.1, 10, nbin=16, unit="TeV", name="energy", interp="log") axis_true = MapAxis.from_bounds(0.1, 50, nbin=30, unit="TeV", name="energy_true", interp="log") geom = WcsGeom.create(npix=(11, 11), axes=[axis], skydir=obs.pointing_radec) empty_dataset = MapDataset.create(geom=geom, energy_axis_true=axis_true) dataset_maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(offset_max="3 deg", bias_percent=0.02, position=obs.pointing_radec) dataset = dataset_maker.run(empty_dataset, obs) mask_offset = safe_mask_maker.make_mask_offset_max(dataset=dataset, observation=obs) assert_allclose(mask_offset.sum(), 109) mask_energy_aeff_default = safe_mask_maker.make_mask_energy_aeff_default( dataset=dataset, observation=obs) assert_allclose(mask_energy_aeff_default.data.sum(), 1936) mask_aeff_max = safe_mask_maker.make_mask_energy_aeff_max(dataset) assert_allclose(mask_aeff_max.data.sum(), 1210) mask_edisp_bias = safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert_allclose(mask_edisp_bias.data.sum(), 1815) mask_bkg_peak = safe_mask_maker.make_mask_energy_bkg_peak(dataset) assert_allclose(mask_bkg_peak.data.sum(), 1815)
def test_map_maker(pars, observations): stacked = MapDataset.create( geom=pars["geom"], energy_axis_true=pars["e_true"], binsz_irf=pars["binsz_irf"], migra_axis=pars["migra"], ) maker = MapDatasetMaker( background_oversampling=pars.get("background_oversampling")) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") for obs in observations: cutout = stacked.cutout(position=obs.pointing_radec, width="4 deg") dataset = maker.run(cutout, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-4) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=1e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)
def simulate_map_dataset(random_state=0, name=None): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) skydir = SkyCoord("0 deg", "0 deg", frame="galactic") energy_edges = np.logspace(-1, 2, 15) * u.TeV energy_axis = MapAxis.from_edges(edges=energy_edges, name="energy", interp="log") geom = WcsGeom.create(skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], frame="galactic") gauss = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic") pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source") obs = Observation.create( pointing=skydir, livetime=1 * u.h, irfs=irfs, location=EarthLocation(lon="-70d18m58.84s", lat="-24d41m0.34s", height="2000m"), ) empty = MapDataset.create(geom, name=name) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, obs) bkg_model = FoVBackgroundModel(dataset_name=dataset.name) dataset.models = [bkg_model, skymodel] dataset.fake(random_state=random_state) return dataset
def get_map_dataset(sky_model, geom, geom_etrue, edisp="edispmap", name="test", **kwargs): """Returns a MapDatasets""" # define background model m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model = BackgroundModel(m, datasets_names=[name]) psf = get_psf() exposure = get_exposure(geom_etrue) e_reco = geom.get_axis_by_name("energy") e_true = geom_etrue.get_axis_by_name("energy_true") if edisp == "edispmap": edisp = EDispMap.from_diagonal_response(energy_axis_true=e_true) elif edisp == "edispkernelmap": edisp = EDispKernelMap.from_diagonal_response(energy_axis=e_reco, energy_axis_true=e_true) elif edisp == "edispkernel": edisp = EDispKernel.from_diagonal_response(e_true=e_true.edges, e_reco=e_reco.edges) else: edisp = None # define fit mask center = sky_model.spatial_model.position circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = background_model.map.geom.region_mask([circle]) mask_fit = Map.from_geom(geom, data=mask_fit) return MapDataset(models=[sky_model, background_model], exposure=exposure, psf=psf, edisp=edisp, mask_fit=mask_fit, name=name, **kwargs)
def test_map_dataset_geom(geom, sky_model): e_true = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=5, name="energy_true") dataset = MapDataset.create(geom, energy_axis_true=e_true) dataset.counts = None dataset._background_model = None with pytest.raises(AttributeError): dataset.background_model = None dataset.models = sky_model npred = dataset.npred() assert npred.geom == geom dataset.mask_safe = None with pytest.raises(ValueError): dataset._geom
def test_map_dataset_on_off_fake(geom): rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="theta") energy_true_axis = geom.get_axis_by_name("energy").copy(name="energy_true") empty_dataset = MapDataset.create(geom, energy_true_axis, rad_axis=rad_axis) empty_dataset = MapDatasetOnOff.from_map_dataset(empty_dataset, acceptance=1, acceptance_off=10.0) empty_dataset.acceptance_off.data[0, 50, 50] = 0 background_map = Map.from_geom(geom, data=1) empty_dataset.fake(background_map, random_state=42) assert_allclose(empty_dataset.counts.data[0, 50, 50], 0) assert_allclose(empty_dataset.counts.data.mean(), 0.99445, rtol=1e-3) assert_allclose(empty_dataset.counts_off.data.mean(), 10.00055, rtol=1e-3)
def get_map_dataset(geom, geom_etrue, edisp="edispmap", name="test", **kwargs): """Returns a MapDatasets""" # define background model background = Map.from_geom(geom) background.data += 0.2 psf = get_psf() exposure = get_exposure(geom_etrue) e_reco = geom.axes["energy"] e_true = geom_etrue.axes["energy_true"] if edisp == "edispmap": edisp = EDispMap.from_diagonal_response(energy_axis_true=e_true) elif edisp == "edispkernelmap": edisp = EDispKernelMap.from_diagonal_response(energy_axis=e_reco, energy_axis_true=e_true) elif edisp == "edispkernel": edisp = EDispKernel.from_diagonal_response(energy_true=e_true.edges, energy=e_reco.edges) else: edisp = None # define fit mask center = SkyCoord("0.2 deg", "0.1 deg", frame="galactic") circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = geom.region_mask([circle]) mask_fit = Map.from_geom(geom, data=mask_fit) models = FoVBackgroundModel(dataset_name=name) return MapDataset( models=models, exposure=exposure, background=background, psf=psf, edisp=edisp, mask_fit=mask_fit, name=name, **kwargs, )
def input_dataset(): filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz" energy = MapAxis.from_energy_bounds("0.1 TeV", "1 TeV", 1) energy_true = MapAxis.from_energy_bounds("0.1 TeV", "1 TeV", 1, name="energy_true") counts2D = Map.read(filename, hdu="counts") counts = Map.from_geom( counts2D.geom.to_cube([energy]), data=counts2D.data[np.newaxis, :, :], unit=counts2D.unit, ) exposure2D = Map.read(filename, hdu="exposure") exposure = Map.from_geom( exposure2D.geom.to_cube([energy_true]), data=exposure2D.data[np.newaxis, :, :], unit="cm2s", # no unit in header? ) background2D = Map.read(filename, hdu="background") background = Map.from_geom( background2D.geom.to_cube([energy]), data=background2D.data[np.newaxis, :, :], unit=background2D.unit, ) name = "test-dataset" background_model = BackgroundModel(background, datasets_names=[name]) # add mask mask2D = np.ones_like(background2D.data).astype("bool") mask2D[0:40, :] = False mask = Map.from_geom( background2D.geom.to_cube([energy]), data=mask2D[np.newaxis, :, :], ) return MapDataset( counts=counts, exposure=exposure, models=background_model, mask_safe=mask, name=name, )
def prepare_dataset(): energy = MapAxis.from_energy_bounds(0.1, 100, 5, per_decade=True, unit="TeV") energy_true = MapAxis.from_energy_bounds(0.1, 100, 5, unit="TeV", per_decade=True, name="energy_true") geom = WcsGeom.create(npix=500, binsz=0.01, axes=[energy]) dataset = MapDataset.create(geom, energy_axis_true=energy_true) dataset.exposure += "1 m2 s" dataset.psf = PSFMap.from_gauss(energy_true) dataset.edisp = EDispKernelMap.from_gauss(energy, energy_true, 0.1, 0.) return Datasets([dataset])
def test_minimal_datastore(): """ "Check that a standard analysis runs on a minimal datastore""" energy_axis = MapAxis.from_energy_bounds(1, 10, nbin=3, per_decade=False, unit="TeV", name="energy") geom = WcsGeom.create( skydir=(83.633, 22.014), binsz=0.5, width=(2, 2), frame="icrs", proj="CAR", axes=[energy_axis], ) data_store = DataStore.from_dir("$GAMMAPY_DATA/tests/minimal_datastore") observations = data_store.get_observations() maker = MapDatasetMaker() offset_max = 2.3 * u.deg maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) circle = CircleSkyRegion(center=SkyCoord("83.63 deg", "22.14 deg"), radius=0.2 * u.deg) exclusion_mask = ~geom.region_mask(regions=[circle]) maker_fov = FoVBackgroundMaker(method="fit", exclusion_mask=exclusion_mask) stacked = MapDataset.create(geom=geom, name="crab-stacked") for obs in observations: dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset = maker_fov.run(dataset) stacked.stack(dataset) assert_allclose(stacked.exposure.data.sum(), 6.01909e10) assert_allclose(stacked.counts.data.sum(), 1446) assert_allclose(stacked.background.data.sum(), 1445.9841)
def create_datasets_3d(observations, target_position): # Target geometry definition e_reco = MapAxis.from_energy_bounds(0.23, 20, 12, "TeV") e_true = MapAxis.from_energy_bounds(0.1, 40, 40, "TeV", name="energy_true") geom = WcsGeom.create(skydir=target_position, width=(2, 2), binsz=0.02, axes=[e_reco]) exclusion_region = CircleSkyRegion(target_position, 0.3 * u.deg) exclusion_mask = geom.region_mask([exclusion_region], inside=False) offset_max = 2.0 * u.deg #data reduction makers maker = MapDatasetMaker() bkg_maker = FoVBackgroundMaker(method="scale", exclusion_mask=exclusion_mask) safe_mask_maker = SafeMaskMaker(methods=["aeff-max", "offset-max"], aeff_percent=10, offset_max=offset_max) datasets = [] dataset_empty = MapDataset.create(geom=geom, energy_axis_true=e_true) for obs in observations: cutout = dataset_empty.cutout(obs.pointing_radec, width=2 * offset_max) # A MapDataset is filled in this cutout geometry dataset = maker.run(cutout, obs) # The data quality cut is applied dataset = safe_mask_maker.run(dataset, obs) # fit background model dataset = bkg_maker.run(dataset) print( f"Background norm obs {obs.obs_id}: {dataset.background_model.spectral_model.norm.value:.2f}" ) datasets.append(dataset) return datasets
def test_safe_mask_maker_bkg_invalid(observations_hess_dl3): obs = observations_hess_dl3[0] axis = MapAxis.from_bounds(0.1, 10, nbin=16, unit="TeV", name="energy", interp="log") axis_true = MapAxis.from_bounds(0.1, 50, nbin=30, unit="TeV", name="energy_true", interp="log") geom = WcsGeom.create(npix=(11, 11), axes=[axis], skydir=obs.pointing_radec) empty_dataset = MapDataset.create(geom=geom, energy_axis_true=axis_true) dataset_maker = MapDatasetMaker() safe_mask_maker_nonan = SafeMaskMaker([]) dataset = dataset_maker.run(empty_dataset, obs) bkg = dataset.background.data bkg[0, 0, 0] = np.nan mask_nonan = safe_mask_maker_nonan.make_mask_bkg_invalid(dataset) assert mask_nonan[0, 0, 0] == False assert_allclose(bkg[mask_nonan].max(), 3.615932e+28) #TODO: change after disable IRF extrapolation: #assert_allclose(bkg[mask_nonan].max(), 20.656366) dataset = safe_mask_maker_nonan.run(dataset, obs) assert_allclose(dataset.mask_safe, mask_nonan)
def test_map_maker_obs(observations): # Test for different spatial geoms and etrue, ereco bins geom_reco = geom(ebounds=[0.1, 1, 10]) e_true = MapAxis.from_edges( [0.1, 0.5, 2.5, 10.0], name="energy_true", unit="TeV", interp="log" ) reference = MapDataset.create( geom=geom_reco, energy_axis_true=e_true, binsz_irf=1.0 ) maker_obs = MapDatasetMaker() map_dataset = maker_obs.run(reference, observations[0]) assert map_dataset.counts.geom == geom_reco assert map_dataset.background_model.map.geom == geom_reco assert map_dataset.edisp.edisp_map.data.shape == (3, 48, 5, 10) assert map_dataset.edisp.exposure_map.data.shape == (3, 1, 5, 10) assert map_dataset.psf.psf_map.data.shape == (3, 66, 5, 10) assert map_dataset.psf.exposure_map.data.shape == (3, 1, 5, 10) assert_allclose(map_dataset.gti.time_delta, 1800.0 * u.s)
def test_map_dataset_maker_hpx(geom_config_hpx, observations): reference = MapDataset.create(**geom_config_hpx, binsz_irf=5 * u.deg) maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(offset_max="2.5 deg", methods=["aeff-default", "offset-max"]) dataset = maker.run(reference, observation=observations[0]) dataset = safe_mask_maker.run(dataset, observation=observations[0]).to_masked() assert_allclose(dataset.counts.data.sum(), 4264) assert_allclose(dataset.background.data.sum(), 2964.5369, rtol=1e-5) assert_allclose(dataset.exposure.data[4, 1000], 5.987e09, rtol=1e-4) coords = SkyCoord([0, 3], [0, 0], frame="galactic", unit="deg") coords = {"skycoord": coords, "energy": 1 * u.TeV} assert_allclose(dataset.mask_safe.get_by_coord(coords), [True, False]) kernel = dataset.edisp.get_edisp_kernel() assert_allclose(kernel.data.sum(axis=1)[3], 1, rtol=0.01)
def test_slice_by_idx(): axis = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=17) axis_etrue = MapAxis.from_energy_bounds( "0.1 TeV", "10 TeV", nbin=31, name="energy_true" ) geom = WcsGeom.create( skydir=(0, 0), binsz=0.5, width=(2, 2), frame="icrs", axes=[axis], ) dataset = MapDataset.create(geom=geom, energy_axis_true=axis_etrue, binsz_irf=0.5) slices = {"energy": slice(5, 10)} sub_dataset = dataset.slice_by_idx(slices) assert sub_dataset.counts.geom.data_shape == (5, 4, 4) assert sub_dataset.mask_safe.geom.data_shape == (5, 4, 4) assert sub_dataset.npred_background().geom.data_shape == (5, 4, 4) assert sub_dataset.exposure.geom.data_shape == (31, 4, 4) assert sub_dataset.edisp.edisp_map.geom.data_shape == (31, 5, 4, 4) assert sub_dataset.psf.psf_map.geom.data_shape == (31, 66, 4, 4) axis = sub_dataset.counts.geom.axes["energy"] assert_allclose(axis.edges[0].value, 0.387468, rtol=1e-5) slices = {"energy_true": slice(5, 10)} sub_dataset = dataset.slice_by_idx(slices) assert sub_dataset.counts.geom.data_shape == (17, 4, 4) assert sub_dataset.mask_safe.geom.data_shape == (17, 4, 4) assert sub_dataset.npred_background().geom.data_shape == (17, 4, 4) assert sub_dataset.exposure.geom.data_shape == (5, 4, 4) assert sub_dataset.edisp.edisp_map.geom.data_shape == (5, 17, 4, 4) assert sub_dataset.psf.psf_map.geom.data_shape == (5, 66, 4, 4) axis = sub_dataset.counts.geom.axes["energy"] assert_allclose(axis.edges[0].value, 0.1, rtol=1e-5) axis = sub_dataset.exposure.geom.axes["energy_true"] assert_allclose(axis.edges[0].value, 0.210175, rtol=1e-5)
def test_compute_lima_image(): """ Test Li & Ma image against TS image for Tophat kernel """ filename = "$GAMMAPY_DATA/tests/unbundled/poisson_stats_image/input_all.fits.gz" counts = Map.read(filename, hdu="counts") counts = image_to_cube(counts, "1 GeV", "100 GeV") background = Map.read(filename, hdu="background") background = image_to_cube(background, "1 GeV", "100 GeV") dataset = MapDataset(counts=counts, background=background) estimator = ExcessMapEstimator("0.1 deg") result_lima = estimator.run(dataset) assert_allclose(result_lima["sqrt_ts"].data[:, 100, 100], 30.814916, atol=1e-3) assert_allclose(result_lima["sqrt_ts"].data[:, 1, 1], 0.164, atol=1e-3) assert_allclose(result_lima["npred_background"].data[:, 1, 1], 37, atol=1e-3) assert_allclose(result_lima["npred"].data[:, 1, 1], 38, atol=1e-3) assert_allclose(result_lima["npred_excess"].data[:, 1, 1], 1, atol=1e-3)
def simulate_events(filename_model, filename_dataset, nobs): """Simulate events for a given model and dataset. Parameters ---------- filename_model : str Filename of the model definition. filename_dataset : str Filename of the dataset to use for simulation. nobs : int Number of obervations to simulate. """ log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) log.info(f"Reading {filename_dataset}") dataset = MapDataset.read(filename_dataset) log.info(f"Reading {filename_model}") models = Models.read(filename_model) models.append(FoVBackgroundModel(dataset_name=dataset.name)) dataset.models = models # dataset.models.extend(models) sampler = MapDatasetEventSampler(random_state=0) for obs_id in np.arange(nobs): observation = Observation.create( obs_id=obs_id, pointing=POINTING, livetime=LIVETIME, irfs=irfs ) events = sampler.run(dataset, observation) path = get_filename_events(filename_dataset, filename_model, obs_id) log.info(f"Writing {path}") path.parent.mkdir(exist_ok=True, parents=True) events.table.write(str(path), overwrite=True)
def test_adaptive_ring_bkg_maker(pars, geom, observations, exclusion_mask): adaptive_ring_bkg_maker = AdaptiveRingBackgroundMaker( r_in="0.2 deg", width="0.3 deg", r_out_max="2 deg", stepsize="0.2 deg", exclusion_mask=exclusion_mask, method=pars["method"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker( selection=["counts", "background", "exposure"]) obs = observations[pars["obs_idx"]] dataset = MapDataset.create(geom).cutout(obs.pointing_radec, width="4 deg") dataset = map_dataset_maker.run(dataset, obs) dataset = safe_mask_maker.run(dataset, obs) dataset = dataset.to_image() dataset_on_off = adaptive_ring_bkg_maker.run(dataset) mask = dataset.mask_safe assert_allclose(dataset_on_off.counts_off.data[mask].sum(), pars["counts_off"]) assert_allclose( dataset_on_off.acceptance_off.data[mask].sum(), pars["acceptance_off"], rtol=1e-5, ) assert_allclose(dataset_on_off.alpha.data[0][100][100], pars["alpha"], rtol=1e-5) assert_allclose(dataset_on_off.exposure.data[0][100][100], pars["exposure"], rtol=1e-5)
def test_stack(sky_model): axis = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=3) geom = WcsGeom.create( skydir=(266.40498829, -28.93617776), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis], ) axis_etrue = MapAxis.from_energy_bounds( "0.1 TeV", "10 TeV", nbin=5, name="energy_true" ) geom_etrue = WcsGeom.create( skydir=(266.40498829, -28.93617776), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis_etrue], ) edisp = EDispKernelMap.from_diagonal_response( energy_axis=axis, energy_axis_true=axis_etrue, geom=geom ) edisp.exposure_map.quantity = ( 1e0 * u.m ** 2 * u.s * np.ones(edisp.exposure_map.data.shape) ) bkg1 = Map.from_geom(geom) bkg1.data += 0.2 cnt1 = Map.from_geom(geom) cnt1.data = 1.0 * np.ones(cnt1.data.shape) exp1 = Map.from_geom(geom_etrue) exp1.quantity = 1e7 * u.m ** 2 * u.s * np.ones(exp1.data.shape) mask1 = Map.from_geom(geom) mask1.data = np.ones(mask1.data.shape, dtype=bool) mask1.data[0][:][5:10] = False dataset1 = MapDataset( counts=cnt1, background=bkg1, exposure=exp1, mask_safe=mask1, name="dataset-1", edisp=edisp, meta_table=Table({"OBS_ID": [0]}), ) bkg2 = Map.from_geom(geom) bkg2.data = 0.1 * np.ones(bkg2.data.shape) cnt2 = Map.from_geom(geom) cnt2.data = 1.0 * np.ones(cnt2.data.shape) exp2 = Map.from_geom(geom_etrue) exp2.quantity = 1e7 * u.m ** 2 * u.s * np.ones(exp2.data.shape) mask2 = Map.from_geom(geom) mask2.data = np.ones(mask2.data.shape, dtype=bool) mask2.data[0][:][5:10] = False mask2.data[1][:][10:15] = False dataset2 = MapDataset( counts=cnt2, background=bkg2, exposure=exp2, mask_safe=mask2, name="dataset-2", edisp=edisp, meta_table=Table({"OBS_ID": [1]}), ) background_model2 = FoVBackgroundModel(dataset_name="dataset-2") background_model1 = FoVBackgroundModel(dataset_name="dataset-1") dataset1.models = [background_model1, sky_model] dataset2.models = [background_model2, sky_model] dataset1.stack(dataset2) dataset1.models = [sky_model] npred_b = dataset1.npred() assert_allclose(npred_b.data.sum(), 1459.985035, 1e-5) assert_allclose(dataset1.npred_background().data.sum(), 1360.00, 1e-5) assert_allclose(dataset1.counts.data.sum(), 9000, 1e-5) assert_allclose(dataset1.mask_safe.data.sum(), 4600) assert_allclose(dataset1.exposure.data.sum(), 1.6e11) assert_allclose(dataset1.meta_table["OBS_ID"][0], [0, 1])
def test_map_dataset_fits_io(tmp_path, sky_model, geom, geom_etrue): dataset = get_map_dataset(geom, geom_etrue) bkg_model = FoVBackgroundModel(dataset_name=dataset.name) dataset.models = [sky_model, bkg_model] dataset.counts = dataset.npred() dataset.mask_safe = dataset.mask_fit gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset.gti = gti hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "BACKGROUND", "BACKGROUND_BANDS", "EDISP", "EDISP_BANDS", "EDISP_EXPOSURE", "EDISP_EXPOSURE_BANDS", "PSF", "PSF_BANDS", "PSF_EXPOSURE", "PSF_EXPOSURE_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "MASK_FIT", "MASK_FIT_BANDS", "GTI", ] assert actual == desired dataset.write(tmp_path / "test.fits") dataset_new = MapDataset.read(tmp_path / "test.fits") assert dataset_new.mask.data.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose( dataset.npred_background().data, dataset_new.npred_background().data ) assert_allclose(dataset.edisp.edisp_map.data, dataset_new.edisp.edisp_map.data) assert_allclose(dataset.psf.psf_map.data, dataset_new.psf.psf_map.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_fit.data, dataset_new.mask_fit.data) assert_allclose(dataset.mask_safe.data, dataset_new.mask_safe.data) assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert dataset.npred_background().geom == dataset_new.npred_background().geom assert dataset.edisp.edisp_map.geom == dataset_new.edisp.edisp_map.geom assert_allclose( dataset.gti.time_sum.to_value("s"), dataset_new.gti.time_sum.to_value("s") ) # To test io of psf and edisp map stacked = MapDataset.create(geom) stacked.write(tmp_path / "test-2.fits", overwrite=True) stacked1 = MapDataset.read(tmp_path / "test-2.fits") assert stacked1.psf.psf_map is not None assert stacked1.psf.exposure_map is not None assert stacked1.edisp.edisp_map is not None assert stacked1.edisp.exposure_map is not None assert stacked.mask.data.dtype == bool assert_allclose(stacked1.psf.psf_map, stacked.psf.psf_map) assert_allclose(stacked1.edisp.edisp_map, stacked.edisp.edisp_map)
def reference(geom): return MapDataset.create(geom)
def test_interpolate_map_dataset(): energy = MapAxis.from_energy_bounds("1 TeV", "300 TeV", nbin=5, name="energy") energy_true = MapAxis.from_nodes(np.logspace(-1, 3, 20), name="energy_true", interp="log", unit="TeV") # make dummy map IRFs geom_allsky = WcsGeom.create(npix=(5, 3), proj="CAR", binsz=60, axes=[energy], skydir=(0, 0)) geom_allsky_true = geom_allsky.drop('energy').to_cube([energy_true]) #background value = 30 bkg_map = Map.from_geom(geom_allsky, unit="") bkg_map.data = value*np.ones(bkg_map.data.shape) #effective area - with a gradient that also depends on energy aeff_map = Map.from_geom(geom_allsky_true, unit="cm2 s") ra_arr = np.arange(aeff_map.data.shape[1]) dec_arr = np.arange(aeff_map.data.shape[2]) for i in np.arange(aeff_map.data.shape[0]): aeff_map.data[i, :, :] = (i+1)*10*np.meshgrid(dec_arr, ra_arr)[0]+10*np.meshgrid(dec_arr, ra_arr)[1]+10 aeff_map.meta["TELESCOP"] = "HAWC" #psf map width = 0.2*u.deg rad_axis = MapAxis.from_nodes(np.linspace(0, 2, 50), name="rad", unit="deg") psfMap = PSFMap.from_gauss(energy_true, rad_axis, width) #edispmap edispmap = EDispKernelMap.from_gauss(energy, energy_true, sigma=0.1, bias=0.0, geom=geom_allsky) #events and gti nr_ev = 10 ev_t = Table() gti_t = Table() ev_t['EVENT_ID'] = np.arange(nr_ev) ev_t['TIME'] = nr_ev*[Time('2011-01-01 00:00:00', scale='utc', format='iso')] ev_t['RA'] = np.linspace(-1, 1, nr_ev)*u.deg ev_t['DEC'] = np.linspace(-1, 1, nr_ev)*u.deg ev_t['ENERGY'] = np.logspace(0, 2, nr_ev)*u.TeV gti_t['START'] = [Time('2010-12-31 00:00:00', scale='utc', format='iso')] gti_t['STOP'] = [Time('2011-01-02 00:00:00', scale='utc', format='iso')] events = EventList(ev_t) gti = GTI(gti_t) #define observation obs = Observation( obs_id=0, obs_info={}, gti=gti, aeff=aeff_map, edisp=edispmap, psf=psfMap, bkg=bkg_map, events=events, obs_filter=None, ) #define analysis geometry geom_target = WcsGeom.create( skydir=(0, 0), width=(10, 10), binsz=0.1*u.deg, axes=[energy] ) maker = MapDatasetMaker(selection=["exposure", "counts", "background", "edisp", "psf"]) dataset = MapDataset.create(geom=geom_target, energy_axis_true=energy_true, name="test") dataset = maker.run(dataset, obs) # test counts assert dataset.counts.data.sum() == nr_ev #test background coords_bg = { 'skycoord' : SkyCoord("0 deg", "0 deg"), 'energy' : energy.center[0] } assert_allclose( dataset.background_model.evaluate().get_by_coord(coords_bg)[0], value, atol=1e-7) #test effective area coords_aeff = { 'skycoord' : SkyCoord("0 deg", "0 deg"), 'energy_true' : energy_true.center[0] } assert_allclose( aeff_map.get_by_coord(coords_aeff)[0]/dataset.exposure.get_by_coord(coords_aeff)[0], 1, atol=1e-3) #test edispmap pdfmatrix_preinterp = edispmap.get_edisp_kernel(SkyCoord("0 deg", "0 deg")).pdf_matrix pdfmatrix_postinterp = dataset.edisp.get_edisp_kernel(SkyCoord("0 deg", "0 deg")).pdf_matrix assert_allclose(pdfmatrix_preinterp, pdfmatrix_postinterp, atol=1e-7) #test psfmap geom_psf = geom_target.drop('energy').to_cube([energy_true]) psfkernel_preinterp = psfMap.get_psf_kernel(SkyCoord("0 deg", "0 deg"), geom_psf, max_radius=2*u.deg).data psfkernel_postinterp = dataset.psf.get_psf_kernel(SkyCoord("0 deg", "0 deg"), geom_psf, max_radius=2*u.deg).data assert_allclose(psfkernel_preinterp, psfkernel_postinterp, atol=1e-4)
def _map_making(self): """Make maps and datasets for 3d analysis.""" datasets_settings = self.config.datasets log.info("Creating geometry.") geom = self._create_geometry() geom_settings = datasets_settings.geom geom_irf = dict(energy_axis_true=None, binsz_irf=None) if geom_settings.axes.energy_true.min is not None: geom_irf["energy_axis_true"] = self._make_energy_axis( geom_settings.axes.energy_true, name="energy_true") geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value offset_max = geom_settings.selection.offset_max log.info("Creating datasets.") maker = MapDatasetMaker(selection=datasets_settings.map_selection) safe_mask_selection = datasets_settings.safe_mask.methods safe_mask_settings = datasets_settings.safe_mask.parameters maker_safe_mask = SafeMaskMaker(methods=safe_mask_selection, **safe_mask_settings) bkg_maker_config = {} if datasets_settings.background.exclusion: exclusion_region = Map.read(datasets_settings.background.exclusion) bkg_maker_config["exclusion_mask"] = exclusion_region bkg_maker_config.update(datasets_settings.background.parameters) bkg_method = datasets_settings.background.method if bkg_method == "fov_background": log.debug( f"Creating FoVBackgroundMaker with arguments {bkg_maker_config}" ) bkg_maker = FoVBackgroundMaker(**bkg_maker_config) elif bkg_method == "ring": bkg_maker = RingBackgroundMaker(**bkg_maker_config) log.debug( f"Creating RingBackgroundMaker with arguments {bkg_maker_config}" ) if datasets_settings.geom.axes.energy.nbins > 1: raise ValueError( "You need to define a single-bin energy geometry for your dataset." ) else: bkg_maker = None log.warning( f"No background maker set for 3d analysis. Check configuration." ) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if datasets_settings.stack: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) if bkg_maker is not None: dataset = bkg_maker.run(dataset) if bkg_method == "ring": dataset = dataset.to_map_dataset() log.debug(dataset) stacked.stack(dataset) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) if bkg_maker is not None: dataset = bkg_maker.run(dataset) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def simulate(): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) center = SkyCoord(0.0, 0.0, unit="deg", frame="galactic") energy_reco = MapAxis.from_edges(np.logspace(-1.0, 1.0, 10), unit="TeV", name="energy", interp="log") pointing = SkyCoord(0.5, 0.5, unit="deg", frame="galactic") geom = WcsGeom.create( skydir=center, binsz=0.02, width=(4, 4), frame="galactic", axes=[energy_reco], ) energy_true = MapAxis.from_edges(np.logspace(-1.5, 1.5, 30), unit="TeV", name="energy_true", interp="log") spectral_model = PowerLawSpectralModel(index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") temporal_model = ExpDecayTemporalModel(t0="6 h", t_ref=gti_t0.mjd * u.d) spatial_model = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.3 deg", frame="galactic") model_simu = SkyModel( spectral_model=spectral_model, spatial_model=spatial_model, temporal_model=temporal_model, name="model-simu", ) lvtm = np.ones(N_OBS) * 1.0 * u.hr tstart = 1.0 * u.hr datasets = [] for i in range(N_OBS): obs = Observation.create( pointing=pointing, livetime=lvtm[i], tstart=tstart, irfs=irfs, reference_time=gti_t0, ) empty = MapDataset.create(geom, name=f"dataset_{i}", energy_axis_true=energy_true) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=4.0 * u.deg) dataset = maker.run(empty, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.models = [ model_simu, FoVBackgroundModel(dataset_name=dataset.name) ] dataset.fake() datasets.append(dataset) tstart = tstart + 2.0 * u.hr return datasets
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log") geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = Models() for idx, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel(lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name=names[idx]) models.append(model_ecpl) models["gc"].spectral_model.reference = models[ "g09"].spectral_model.reference obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_spatial = TemplateSpatialModel.read( "$GAMMAPY_DATA/fermi-3fhl-gc/gll_iem_v06_gc.fits.gz") diffuse_model = SkyModel(PowerLawSpectralModel(), diffuse_spatial) maker = MapDatasetMaker() datasets = Datasets() observations = data_store.get_observations(obs_ids) for idx, geom in enumerate(geoms): stacked = MapDataset.create(geom=geom, name=names[idx]) for obs in observations: dataset = maker.run(stacked, obs) stacked.stack(dataset) bkg = stacked.models.pop(0) stacked.models = [models[idx], diffuse_model, bkg] datasets.append(stacked) datasets.write( "$GAMMAPY_DATA/tests/models", prefix="gc_example", overwrite=True, write_covariance=False, )
def test_safe_mask_maker(observations, caplog): obs = observations[0] axis = MapAxis.from_bounds(0.1, 10, nbin=16, unit="TeV", name="energy", interp="log") axis_true = MapAxis.from_bounds(0.1, 50, nbin=30, unit="TeV", name="energy_true", interp="log") geom = WcsGeom.create(npix=(11, 11), axes=[axis], skydir=obs.pointing_radec) empty_dataset = MapDataset.create(geom=geom, energy_axis_true=axis_true) dataset_maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(offset_max="3 deg", bias_percent=0.02, position=obs.pointing_radec) fixed_offset = 1.5 * u.deg safe_mask_maker_offset = SafeMaskMaker(offset_max="3 deg", bias_percent=0.02, fixed_offset=fixed_offset) dataset = dataset_maker.run(empty_dataset, obs) mask_offset = safe_mask_maker.make_mask_offset_max(dataset=dataset, observation=obs) assert_allclose(mask_offset.sum(), 109) mask_energy_aeff_default = safe_mask_maker.make_mask_energy_aeff_default( dataset=dataset, observation=obs) assert_allclose(mask_energy_aeff_default.data.sum(), 1936) mask_aeff_max = safe_mask_maker.make_mask_energy_aeff_max(dataset) mask_aeff_max_offset = safe_mask_maker_offset.make_mask_energy_aeff_max( dataset, obs) assert_allclose(mask_aeff_max.data.sum(), 1210) assert_allclose(mask_aeff_max_offset.data.sum(), 1210) mask_edisp_bias = safe_mask_maker.make_mask_energy_edisp_bias(dataset) mask_edisp_bias_offset = safe_mask_maker_offset.make_mask_energy_edisp_bias( dataset, obs) assert_allclose(mask_edisp_bias.data.sum(), 1815) assert_allclose(mask_edisp_bias_offset.data.sum(), 1694) mask_bkg_peak = safe_mask_maker.make_mask_energy_bkg_peak(dataset) assert_allclose(mask_bkg_peak.data.sum(), 1815) assert "WARNING" in [_.levelname for _ in caplog.records] message1 = "No default thresholds defined for obs 110380" assert message1 in [_.message for _ in caplog.records] safe_mask_maker_noroot = SafeMaskMaker(offset_max="3 deg", aeff_percent=-10, bias_percent=-10) mask_aeff_max_noroot = safe_mask_maker_noroot.make_mask_energy_aeff_max( dataset) mask_edisp_bias_noroot = safe_mask_maker_noroot.make_mask_energy_edisp_bias( dataset) assert_allclose(mask_aeff_max_noroot.data.sum(), 1815) assert_allclose(mask_edisp_bias_noroot.data.sum(), 1936)
def test_stack_npred(): pwl = PowerLawSpectralModel() gauss = GaussianSpatialModel(sigma="0.2 deg") model = SkyModel(pwl, gauss) axis = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=5) axis_etrue = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=11, name="energy_true") geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis], ) dataset_1 = MapDataset.create( geom, energy_axis_true=axis_etrue, name="dataset-1", gti=GTI.create("0 min", "30 min"), ) dataset_1.psf = None dataset_1.exposure.data += 1 dataset_1.mask_safe.data = geom.energy_mask(emin=1 * u.TeV) dataset_1.models["dataset-1-bkg"].map.data += 1 dataset_1.models.append(model) dataset_2 = MapDataset.create( geom, energy_axis_true=axis_etrue, name="dataset-2", gti=GTI.create("30 min", "60 min"), ) dataset_2.psf = None dataset_2.exposure.data += 1 dataset_2.mask_safe.data = geom.energy_mask(emin=0.2 * u.TeV) dataset_2.models["dataset-2-bkg"].map.data += 1 dataset_2.models.append(model) npred_1 = dataset_1.npred() npred_1.data[~dataset_1.mask_safe.data] = 0 npred_2 = dataset_2.npred() npred_2.data[~dataset_2.mask_safe.data] = 0 stacked_npred = Map.from_geom(geom) stacked_npred.stack(npred_1) stacked_npred.stack(npred_2) stacked = MapDataset.create(geom, energy_axis_true=axis_etrue, name="stacked") stacked.stack(dataset_1) stacked.stack(dataset_2) npred_stacked = stacked.npred() assert_allclose(npred_stacked.data, stacked_npred.data)