def test_mask_shape(): axis = MapAxis.from_edges([1, 3, 10], unit="TeV", interp="log", name="energy") geom_1 = WcsGeom.create(binsz=1, width=3, axes=[axis]) geom_2 = WcsGeom.create(binsz=1, width=5, axes=[axis]) dataset_1 = MapDataset.create(geom_1) dataset_2 = MapDataset.create(geom_2) dataset_1.psf = None dataset_2.psf = None dataset_1.edisp = None dataset_2.edisp = None model = SkyModel( spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel() ) dataset_1.model = model dataset_2.model = model fpe = FluxPointsEstimator( datasets=[dataset_2, dataset_1], e_edges=[1, 10] * u.TeV, source="source" ) with pytest.raises(ValueError): fpe.run()
def test_mask_shape(): axis = MapAxis.from_edges([1, 3, 10], unit="TeV", interp="log", name="energy") geom_1 = WcsGeom.create(binsz=1, width=3, axes=[axis]) geom_2 = WcsGeom.create(binsz=1, width=5, axes=[axis]) dataset_1 = MapDataset.create(geom_1) dataset_2 = MapDataset.create(geom_2) dataset_1.psf = None dataset_2.psf = None dataset_1.edisp = None dataset_2.edisp = None model = SkyModel(spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel(), name="source") dataset_1.models = model dataset_2.models = model fpe = FluxPointsEstimator(datasets=[dataset_2, dataset_1], e_edges=[1, 10] * u.TeV, source="source") fp = fpe.run() assert_allclose(fp.table["counts"], 0)
def test_datasets_io_no_model(tmpdir): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=2) geom = WcsGeom.create(npix=(5, 5), axes=[axis]) dataset_1 = MapDataset.create(geom, name="1") dataset_2 = MapDataset.create(geom, name="2") datasets = Datasets([dataset_1, dataset_2]) datasets.write(path=tmpdir, prefix="test") filename_1 = tmpdir / "test_data_1.fits" assert filename_1.exists() filename_2 = tmpdir / "test_data_2.fits" assert filename_2.exists()
def test_safe_mask_maker(observations): obs = observations[0] axis = MapAxis.from_edges([0.1, 1, 10], name="energy", interp="log", unit="TeV") geom = WcsGeom.create(npix=(11, 11), axes=[axis], skydir=obs.pointing_radec) empty_dataset = MapDataset.create(geom=geom) dataset_maker = MapDatasetMaker(offset_max="3 deg") safe_mask_maker = SafeMaskMaker(offset_max="3 deg") dataset = dataset_maker.run(empty_dataset, obs) mask_offset = safe_mask_maker.make_mask_offset_max(dataset=dataset, observation=obs) assert_allclose(mask_offset.sum(), 109) mask_energy_aeff_default = safe_mask_maker.make_mask_energy_aeff_default( dataset=dataset, observation=obs) assert_allclose(mask_energy_aeff_default.sum(), 242) with pytest.raises(NotImplementedError) as excinfo: safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert "only supported" in str(excinfo.value) with pytest.raises(NotImplementedError) as excinfo: safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert "only supported" in str(excinfo.value)
def simulate_map_dataset(random_state=0): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) skydir = SkyCoord("0 deg", "0 deg", frame="galactic") edges = np.logspace(-1, 2, 15) * u.TeV energy_axis = MapAxis.from_edges(edges=edges, name="energy", interp="log") geom = WcsGeom.create(skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], frame="galactic") gauss = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic") pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source") obs = Observation.create(pointing=skydir, livetime=1 * u.h, irfs=irfs) empty = MapDataset.create(geom) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, obs) dataset.models = skymodel dataset.fake(random_state=random_state) return dataset
def test_create(geom, geom_etrue): # tests empty datasets created migra_axis = MapAxis(nodes=np.linspace(0.0, 3.0, 51), unit="", name="migra") rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="theta") e_reco = MapAxis.from_edges(np.logspace(-1.0, 1.0, 3), name="energy", unit=u.TeV, interp="log") e_true = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4), name="energy", unit=u.TeV, interp="log") geom = WcsGeom.create(binsz=0.02, width=(2, 2), axes=[e_reco]) empty_dataset = MapDataset.create(geom=geom, energy_axis_true=e_true, migra_axis=migra_axis, rad_axis=rad_axis) assert empty_dataset.counts.data.shape == (2, 100, 100) assert empty_dataset.exposure.data.shape == (3, 100, 100) assert empty_dataset.psf.psf_map.data.shape == (3, 50, 10, 10) assert empty_dataset.psf.exposure_map.data.shape == (3, 1, 10, 10) assert empty_dataset.edisp.edisp_map.data.shape == (3, 50, 10, 10) assert empty_dataset.edisp.exposure_map.data.shape == (3, 1, 10, 10) assert_allclose(empty_dataset.edisp.edisp_map.data.sum(), 300) assert_allclose(empty_dataset.gti.time_delta, 0.0 * u.s)
def prepare_dataset_simple(filename_dataset): """Prepare dataset for a given skymodel.""" log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) edisp_gauss = EnergyDispersion2D.from_gauss( e_true=ENERGY_AXIS_TRUE.edges, migra=MIGRA_AXIS.edges, sigma=0.1, bias=0, offset=[0, 2, 4, 6, 8] * u.deg, ) irfs["edisp"] = edisp_gauss # irfs["aeff"].data.data = np.ones_like(irfs["aeff"].data.data) * 1e6 observation = Observation.create(obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs) empty = MapDataset.create(WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS) # maker = MapDatasetMaker(selection=["exposure", "edisp"]) # maker = MapDatasetMaker(selection=["exposure", "edisp", "background"]) maker = MapDatasetMaker( selection=["exposure", "edisp", "psf", "background"]) dataset = maker.run(empty, observation) filename_dataset.parent.mkdir(exist_ok=True, parents=True) log.info(f"Writing {filename_dataset}") dataset.write(filename_dataset, overwrite=True)
def test_adaptive_ring_bkg_maker(pars, geom, observations, exclusion_mask): adaptive_ring_bkg_maker = AdaptiveRingBackgroundMaker( r_in="0.2 deg", width="0.3 deg", r_out_max="2 deg", stepsize="0.2 deg", exclusion_mask=exclusion_mask, method=pars["method"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker(offset_max="2 deg") dataset = MapDataset.create(geom) obs = observations[pars["obs_idx"]] dataset = map_dataset_maker.run(dataset, obs) dataset = safe_mask_maker.run(dataset, obs) dataset = dataset.to_image() dataset_on_off = adaptive_ring_bkg_maker.run(dataset) mask = dataset.mask_safe assert_allclose(dataset_on_off.counts_off.data[mask].sum(), pars["counts_off"]) assert_allclose(dataset_on_off.acceptance_off.data[mask].sum(), pars["acceptance_off"]) assert_allclose(dataset_on_off.alpha.data[0][100][100], pars["alpha"]) assert_allclose(dataset_on_off.exposure.data[0][100][100], pars["exposure"])
def test_ring_bkg_maker(geom, observations, exclusion_mask): ring_bkg_maker = RingBackgroundMaker(r_in="0.2 deg", width="0.3 deg", exclusion_mask=exclusion_mask) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker(offset_max="2 deg") reference = MapDataset.create(geom) datasets = [] for obs in observations: dataset = map_dataset_maker.run(reference, obs) dataset = safe_mask_maker.run(dataset, obs) dataset = dataset.to_image() dataset_on_off = ring_bkg_maker.run(dataset) datasets.append(dataset_on_off) mask = dataset.mask_safe assert_allclose(datasets[0].counts_off.data[mask].sum(), 2511333) assert_allclose(datasets[1].counts_off.data[mask].sum(), 2143577.0) assert_allclose(datasets[0].acceptance_off.data[mask].sum(), 2961300) assert_allclose(datasets[1].acceptance_off.data[mask].sum(), 2364657.2) assert_allclose(datasets[0].alpha.data[0][100][100], 0.00063745599) assert_allclose(datasets[0].exposure.data[0][100][100], 806254444.8480084)
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(geom=geom, offset_max=offset_max, **geom_irf) if self.settings["datasets"]["stack-datasets"]: stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def empty_dataset(source_pos_radec, map_geom, e_reco_binning, livetime, irf_file, offset): source_pos_ra = source_pos_radec["ra"] source_pos_dec = source_pos_radec["dec"] source = SkyCoord(source_pos_ra, source_pos_dec, unit="deg", frame="icrs") e_reco_min = u.Quantity(e_reco_binning["e_reco_min"]).to("TeV") e_reco_min = e_reco_min.value e_reco_max = u.Quantity(e_reco_binning["e_reco_max"]).to("TeV") e_reco_max = e_reco_max.value n_e_reco = e_reco_binning["n_e_reco"] energy_axis = MapAxis.from_edges(np.logspace( np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco), unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=source, binsz=u.Quantity(map_geom["binsize"]).to("deg").value, width=(u.Quantity(map_geom["width"]).to("deg").value, u.Quantity(map_geom["width"]).to("deg").value), frame="icrs", axes=[energy_axis]) energy_axis_true = MapAxis.from_edges(np.logspace( np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco), unit="TeV", name="energy", interp="log") pointing = SkyCoord(u.Quantity(source_pos_ra).to("deg"), u.Quantity(source_pos_dec).to("deg") + offset, frame="icrs", unit="deg") irfs = load_cta_irfs(irf_file) obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) empty = MapDataset.create(geom, energy_axis_true=energy_axis_true) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) maker_safe_mask = SafeMaskMaker( methods=["offset-max"], offset_max=u.quantity.Quantity(map_geom["width"]) + 1.0 * u.deg) dataset = maker.run(empty, obs) dataset = maker_safe_mask.run(dataset, obs) return dataset
def test_map_maker_obs(observations): # Test for different spatial geoms and etrue, ereco bins geom_reco = geom(ebounds=[0.1, 1, 10]) e_true = MapAxis.from_edges([0.1, 0.5, 2.5, 10.0], name="energy", unit="TeV", interp="log") geom_exp = geom(ebounds=[0.1, 0.5, 2.5, 10.0]) reference = MapDataset.create(geom=geom_reco, energy_axis_true=e_true, binsz_irf=1.0, margin_irf=1.0) maker_obs = MapDatasetMaker(offset_max=2.0 * u.deg, cutout=False) map_dataset = maker_obs.run(reference, observations[0]) assert map_dataset.counts.geom == geom_reco assert map_dataset.background_model.map.geom == geom_reco assert map_dataset.exposure.geom == geom_exp assert map_dataset.edisp.edisp_map.data.shape == (3, 48, 6, 11) assert map_dataset.edisp.exposure_map.data.shape == (3, 1, 6, 11) assert map_dataset.psf.psf_map.data.shape == (3, 66, 6, 11) assert map_dataset.psf.exposure_map.data.shape == (3, 1, 6, 11) assert_allclose(map_dataset.gti.time_delta, 1800.0 * u.s) assert map_dataset.name == "obs_110380"
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) if "geom-irf" in self.settings["datasets"]: geom_irf = self._create_geometry(self.settings["datasets"]["geom-irf"]) else: geom_irf = geom.to_binsz(binsz=BINSZ_IRF) offset_max = Angle(self.settings["datasets"]["offset-max"]) stack_datasets = self.settings["datasets"]["stack-datasets"] log.info("Creating datasets.") maker = MapDatasetMaker( geom=geom, geom_true=geom_irf, offset_max=offset_max, ) if stack_datasets: stacked = MapDataset.create(geom=geom, geom_irf=geom_irf, name="stacked") for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def simple_dataset(): axis = MapAxis.from_energy_bounds(0.1, 10, 1, unit="TeV") geom = WcsGeom.create(npix=50, binsz=0.02, axes=[axis]) dataset = MapDataset.create(geom) dataset.mask_safe += 1 dataset.counts += 2 dataset.background_model.map += 1 return dataset
def obs_dataset(geom, observation): safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker( selection=["counts", "background", "exposure"]) reference = MapDataset.create(geom) cutout = reference.cutout(observation.pointing_radec, width="4 deg") dataset = map_dataset_maker.run(cutout, observation) dataset = safe_mask_maker.run(dataset, observation) return dataset
def test_to_image(geom): counts = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts-cube.fits.gz") background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background-cube.fits.gz") background = BackgroundModel(background) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure-cube.fits.gz") exposure = exposure.sum_over_axes(keepdims=True) dataset = MapDataset(counts=counts, background_model=background, exposure=exposure) dataset_im = dataset.to_image() assert dataset_im.mask_safe is None assert dataset_im.counts.data.sum() == dataset.counts.data.sum() assert_allclose(dataset_im.background_model.map.data.sum(), 28548.625, rtol=1e-5) ebounds = np.logspace(-1.0, 1.0, 3) axis = MapAxis.from_edges(ebounds, name="energy", unit=u.TeV, interp="log") geom = WcsGeom.create(skydir=(0, 0), binsz=0.5, width=(1, 1), frame="icrs", axes=[axis]) dataset = MapDataset.create(geom) # Check map_safe handling data = np.array([[[False, True], [True, True]], [[False, False], [True, True]]]) dataset.mask_safe = WcsNDMap.from_geom(geom=geom, data=data) dataset_im = dataset.to_image() assert dataset_im.mask_safe.data.dtype == bool desired = np.array([[False, True], [True, True]]) assert (dataset_im.mask_safe.data == desired).all() # Check that missing entries in the dataset do not break dataset_copy = dataset.copy() dataset_copy.exposure = None dataset_copy.background_model = None dataset_im = dataset_copy.to_image() assert dataset_im.exposure is None assert dataset_im.background_model is None dataset_copy = dataset.copy() dataset_copy.counts = None dataset_im = dataset_copy.to_image() assert dataset_im.counts is None
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), frame="galactic", proj="CAR", axes=[energy_axis], ) offset_max = 4 * u.deg maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom) spatial_model = PointSpatialModel(lon_0="-0.05 deg", lat_0="-0.05 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") datasets = Datasets([]) for idx, obs in enumerate(observations): cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max, name=f"dataset{idx}") dataset = maker.run(cutout, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.models = model datasets.append(dataset) return datasets
def test_map_dataset_geom(geom, sky_model): e_true = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=5) dataset = MapDataset.create(geom, energy_axis_true=e_true) dataset.counts = None dataset.background_model = None dataset.models = sky_model npred = dataset.npred() assert npred.geom == geom dataset.mask_safe = None with pytest.raises(ValueError): dataset._geom
def test_map_maker(pars, observations): stacked = MapDataset.create( geom=pars["geom"], energy_axis_true=pars["e_true"], binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) maker = MapDatasetMaker( geom=pars["geom"], energy_axis_true=pars["e_true"], offset_max="2 deg", background_oversampling=pars.get("background_oversampling"), binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") for obs in observations: dataset = maker.run(obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-4) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=1e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) stacked = MapDataset.create(geom) maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) spatial_model = PointSpatialModel(lon_0="0.01 deg", lat_0="0.01 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") stacked.models = model stacked.name = "stacked_ds" return Datasets([stacked])
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(offset_max=offset_max) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if self.settings["datasets"]["stack-datasets"]: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.background_model.name = f"bkg_{dataset.name}" # TODO remove this once dataset and model have unique identifiers log.debug(dataset) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.background_model.name = f"bkg_{dataset.name}" # TODO remove this once dataset and model have unique identifiers self._extract_irf_kernels(dataset) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def test_to_image(geom): ebounds = np.logspace(-1.0, 1.0, 3) axis = MapAxis.from_edges(ebounds, name="energy", unit=u.TeV, interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.5, width=(1, 1), coordsys="CEL", axes=[axis] ) dataset = MapDataset.create(geom) # Check map_safe handling data = np.array([[[False, True], [True, True]], [[False, False], [True, True]]]) dataset.mask_safe = WcsNDMap.from_geom(geom=geom, data=data) dataset_im = dataset.to_image() assert dataset_im.mask_safe.data.dtype == bool desired = np.array([[False, True], [True, True]]) assert (dataset_im.mask_safe.data == desired).all()
def prepare_dataset(filename_dataset): """Prepare dataset for a given skymodel.""" log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) observation = Observation.create(obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs) empty = MapDataset.create(WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, observation) filename_dataset.parent.mkdir(exist_ok=True, parents=True) log.info(f"Writing {filename_dataset}") dataset.write(filename_dataset, overwrite=True)
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry() geom_settings = self.config.datasets.geom geom_irf = dict(energy_axis_true=None, binsz_irf=None) if geom_settings.axes.energy_true.min is not None: geom_irf["energy_axis_true"] = self._make_energy_axis( geom_settings.axes.energy_true) geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value offset_max = geom_settings.selection.offset_max log.info("Creating datasets.") maker = MapDatasetMaker(selection=self.config.datasets.map_selection) safe_mask_selection = self.config.datasets.safe_mask.methods safe_mask_settings = self.config.datasets.safe_mask.settings maker_safe_mask = SafeMaskMaker(methods=safe_mask_selection, **safe_mask_settings) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if self.config.datasets.stack: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) log.debug(dataset) stacked.stack(dataset) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def test_create(geom, geom_etrue): # tests empty datasets created migra_axis = MapAxis(nodes=np.linspace(0.0, 3.0, 51), unit="", name="migra") rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="theta") empty_dataset = MapDataset.create(geom, geom_etrue, migra_axis, rad_axis) assert_allclose(empty_dataset.counts.data.sum(), 0.0) assert_allclose(empty_dataset.background_model.map.data.sum(), 0.0) assert empty_dataset.psf.psf_map.data.shape == (3, 50, 100, 100) assert empty_dataset.psf.exposure_map.data.shape == (3, 1, 100, 100) assert empty_dataset.edisp.edisp_map.data.shape == (3, 50, 100, 100) assert empty_dataset.edisp.exposure_map.data.shape == (3, 1, 100, 100) assert_allclose(empty_dataset.edisp.edisp_map.data.sum(), 30000) assert_allclose(empty_dataset.gti.time_delta, 0.0 * u.s)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) src_pos = SkyCoord(0, 0, unit="deg", frame="galactic") offset_max = 4 * u.deg maker = MapDatasetMaker(offset_max=offset_max) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") stacked = MapDataset.create(geom=geom) datasets = [] for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.edisp = dataset.edisp.get_energy_dispersion( position=src_pos, e_reco=energy_axis.edges) dataset.psf = dataset.psf.get_psf_kernel(position=src_pos, geom=geom, max_radius="0.3 deg") datasets.append(dataset) return datasets
def test_map_maker(pars, observations, keepdims): stacked = MapDataset.create(geom=pars["geom"], geom_irf=pars["geom_true"]) for obs in observations: maker = MapDatasetMaker( geom=pars["geom"], geom_true=pars["geom_true"], offset_max="2 deg", background_oversampling=pars.get("background_oversampling"), ) dataset = maker.run(obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-5) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=3e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-5)
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges( np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log" ) geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(1, 1), coordsys="GAL", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(1, 1), coordsys="GAL", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = [] for ind, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel( lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic" ) spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name=names[ind] ) models.append(model_ecpl) # test to link a spectral parameter params0 = models[0].spectral_model.parameters params1 = models[1].spectral_model.parameters ind = params0.parameters.index(params0["reference"]) params0.parameters[ind] = params1["reference"] # update the sky model ind = models[0].parameters.parameters.index(models[0].parameters["reference"]) models[0].parameters.parameters[ind] = params1["reference"] obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_model = SkyDiffuseCube.read( "$GAMMAPY_DATA/fermi_3fhl/gll_iem_v06_cutout.fits" ) datasets_list = [] for idx, geom in enumerate(geoms): observations = data_store.get_observations(obs_ids) stacked = MapDataset.create(geom=geom) stacked.background_model.name = "background_irf_" + names[idx] maker = MapDatasetMaker(geom=geom, offset_max=4.0 * u.deg) for obs in observations: dataset = maker.run(obs) stacked.stack(dataset) stacked.psf = stacked.psf.get_psf_kernel(position=geom.center_skydir, geom=geom, max_radius="0.3 deg") stacked.edisp = stacked.edisp.get_energy_dispersion(position=geom.center_skydir, e_reco=energy_axis.edges) stacked.name = names[idx] stacked.model = models[idx] + diffuse_model datasets_list.append(stacked) datasets = Datasets(datasets_list) dataset0 = datasets.datasets[0] print("dataset0") print("counts sum : ", dataset0.counts.data.sum()) print("expo sum : ", dataset0.exposure.data.sum()) print("bkg0 sum : ", dataset0.background_model.evaluate().data.sum()) path = "$GAMMAPY_DATA/tests/models/gc_example_" datasets.to_yaml(path, overwrite=True)
def test_map_dataset_fits_io(tmp_path, sky_model, geom, geom_etrue): dataset = get_map_dataset(sky_model, geom, geom_etrue) dataset.counts = dataset.npred() dataset.mask_safe = dataset.mask_fit gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset.gti = gti hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "BACKGROUND", "BACKGROUND_BANDS", "EDISP", "EDISP_BANDS", "EDISP_EXPOSURE", "EDISP_EXPOSURE_BANDS", "PSF", "PSF_BANDS", "PSF_EXPOSURE", "PSF_EXPOSURE_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "MASK_FIT", "MASK_FIT_BANDS", "GTI", ] assert actual == desired dataset.write(tmp_path / "test.fits") dataset_new = MapDataset.read(tmp_path / "test.fits") assert dataset_new.models is None assert dataset_new.mask.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose(dataset.background_model.map.data, dataset_new.background_model.map.data) assert_allclose(dataset.edisp.edisp_map.data, dataset_new.edisp.edisp_map.data) assert_allclose(dataset.psf.psf_map.data, dataset_new.psf.psf_map.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_fit.data, dataset_new.mask_fit.data) assert_allclose(dataset.mask_safe.data, dataset_new.mask_safe.data) assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert dataset.background_model.map.geom == dataset_new.background_model.map.geom assert dataset.edisp.edisp_map.geom == dataset_new.edisp.edisp_map.geom assert_allclose(dataset.gti.time_sum.to_value("s"), dataset_new.gti.time_sum.to_value("s")) # To test io of psf and edisp map stacked = MapDataset.create(geom) stacked.write("test.fits", overwrite=True) stacked1 = MapDataset.read("test.fits") assert stacked1.psf.psf_map is not None assert stacked1.psf.exposure_map is not None assert stacked1.edisp.edisp_map is not None assert stacked1.edisp.exposure_map is not None assert stacked.mask.dtype == bool assert_allclose(stacked1.psf.psf_map, stacked.psf.psf_map) assert_allclose(stacked1.edisp.edisp_map, stacked.edisp.edisp_map)
proj="CAR", axes=[energy_axis], ) # Reduced IRFs are defined in true energy (i.e. not measured energy). energy_axis_true = MapAxis.from_edges(np.logspace(-0.3, 1.3, 10), unit="TeV", name="energy", interp="log") # Now we can define the target dataset with this geometry. # In[ ]: stacked = MapDataset.create(geom=geom, energy_axis_true=energy_axis_true, name="crab-stacked") # ## Data reduction # # ### Create the maker classes to be used # # The `~gammapy.cube.MapDatasetMaker` object is initialized as well as the `~gammapy.cube.SafeMaskMaker` that carries here a maximum offset selection. # In[ ]: offset_max = 2.5 * u.deg maker = MapDatasetMaker() maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) # ### Perform the data reduction loop