def test_stack(geom, geom_etrue): m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model1 = BackgroundModel(m) c_map1 = Map.from_geom(geom) c_map1.quantity = 0.3 * np.ones(c_map1.data.shape) mask1 = np.ones(m.data.shape, dtype=bool) mask1[0][0][0:10] = False mask1 = Map.from_geom(geom, data=mask1) dataset1 = MapDataset( counts=c_map1, background_model=background_model1, exposure=get_exposure(geom_etrue), mask_safe=mask1, ) c_map2 = Map.from_geom(geom) c_map2.quantity = 0.1 * np.ones(c_map2.data.shape) background_model2 = BackgroundModel(m, norm=0.5) mask2 = np.ones(m.data.shape, dtype=bool) mask2[0][3] = False mask2 = Map.from_geom(geom, data=mask2) dataset2 = MapDataset( counts=c_map2, background_model=background_model2, exposure=get_exposure(geom_etrue), mask_safe=mask2, ) dataset1.stack(dataset2) assert_allclose(dataset1.counts.data.sum(), 7987) assert_allclose(dataset1.background_model.map.data.sum(), 5987) assert_allclose(dataset1.exposure.data, 2.0 * dataset2.exposure.data) assert_allclose(dataset1.mask_safe.data.sum(), 20000)
def test_names(geom, geom_etrue, sky_model): m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model1 = BackgroundModel(m, name="bkg1") assert background_model1.name == "bkg1" c_map1 = Map.from_geom(geom) c_map1.quantity = 0.3 * np.ones(c_map1.data.shape) model1 = sky_model.copy() assert model1.name != sky_model.name model1 = sky_model.copy(name="model1") assert model1.name == "model1" model2 = sky_model.copy(name="model2") dataset1 = MapDataset( counts=c_map1, background_model=background_model1, models=Models([model1, model2]), exposure=get_exposure(geom_etrue), ) dataset2 = dataset1.copy() assert dataset2.name != dataset1.name assert dataset2.background_model dataset2 = dataset1.copy(name="dataset2") assert dataset2.name == "dataset2" assert dataset2.background_model.name == "bkg1" assert dataset1.background_model is not dataset2.background_model assert dataset1.models.names == dataset2.models.names assert dataset1.models is not dataset2.models
def test_mask_shape(): axis = MapAxis.from_edges([1, 3, 10], unit="TeV", interp="log", name="energy") geom_1 = WcsGeom.create(binsz=1, width=3, axes=[axis]) geom_2 = WcsGeom.create(binsz=1, width=5, axes=[axis]) dataset_1 = MapDataset.create(geom_1) dataset_2 = MapDataset.create(geom_2) dataset_1.psf = None dataset_2.psf = None dataset_1.edisp = None dataset_2.edisp = None model = SkyModel( spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel() ) dataset_1.model = model dataset_2.model = model fpe = FluxPointsEstimator( datasets=[dataset_2, dataset_1], e_edges=[1, 10] * u.TeV, source="source" ) with pytest.raises(ValueError): fpe.run()
def test_mask_shape(): axis = MapAxis.from_edges([1, 3, 10], unit="TeV", interp="log", name="energy") geom_1 = WcsGeom.create(binsz=1, width=3, axes=[axis]) geom_2 = WcsGeom.create(binsz=1, width=5, axes=[axis]) dataset_1 = MapDataset.create(geom_1) dataset_2 = MapDataset.create(geom_2) dataset_1.psf = None dataset_2.psf = None dataset_1.edisp = None dataset_2.edisp = None model = SkyModel(spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel(), name="source") dataset_1.models = model dataset_2.models = model fpe = FluxPointsEstimator(datasets=[dataset_2, dataset_1], e_edges=[1, 10] * u.TeV, source="source") fp = fpe.run() assert_allclose(fp.table["counts"], 0)
def test_datasets_io_no_model(tmpdir): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=2) geom = WcsGeom.create(npix=(5, 5), axes=[axis]) dataset_1 = MapDataset.create(geom, name="1") dataset_2 = MapDataset.create(geom, name="2") datasets = Datasets([dataset_1, dataset_2]) datasets.write(path=tmpdir, prefix="test") filename_1 = tmpdir / "test_data_1.fits" assert filename_1.exists() filename_2 = tmpdir / "test_data_2.fits" assert filename_2.exists()
def test_from_geoms_onoff(): migra_axis = MapAxis(nodes=np.linspace(0.0, 3.0, 51), unit="", name="migra") rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="theta") e_reco = MapAxis.from_edges( np.logspace(-1.0, 1.0, 3), name="energy", unit=u.TeV, interp="log" ) e_true = MapAxis.from_edges( np.logspace(-1.0, 1.0, 4), name="energy", unit=u.TeV, interp="log" ) wcs = WcsGeom.create(binsz=0.02, width=(2, 2)) wcs_irf = WcsGeom.create(binsz=0.1, width=(2.5, 2.5)) geom = wcs.to_cube([e_reco]) geom_exposure = wcs.to_cube([e_true]) geom_psf = wcs_irf.to_cube([rad_axis, e_true]) geom_edisp = wcs_irf.to_cube([migra_axis, e_true]) empty_dataset = MapDataset.from_geoms(geom, geom_exposure, geom_psf, geom_edisp) assert empty_dataset.counts.data.shape == (2, 100, 100) assert empty_dataset.exposure.data.shape == (3, 100, 100) assert empty_dataset.psf.psf_map.data.shape == (3, 50, 25, 25) assert empty_dataset.psf.exposure_map.data.shape == (3, 1, 25, 25) assert empty_dataset.edisp.edisp_map.data.shape == (3, 50, 25, 25) assert empty_dataset.edisp.exposure_map.data.shape == (3, 1, 25, 25) assert_allclose(empty_dataset.edisp.edisp_map.data.sum(), 1875) assert_allclose(empty_dataset.gti.time_delta, 0.0 * u.s)
def empty_dataset(source_pos_radec, map_geom, e_reco_binning, livetime, irf_file, offset): source_pos_ra = source_pos_radec["ra"] source_pos_dec = source_pos_radec["dec"] source = SkyCoord(source_pos_ra, source_pos_dec, unit="deg", frame="icrs") e_reco_min = u.Quantity(e_reco_binning["e_reco_min"]).to("TeV") e_reco_min = e_reco_min.value e_reco_max = u.Quantity(e_reco_binning["e_reco_max"]).to("TeV") e_reco_max = e_reco_max.value n_e_reco = e_reco_binning["n_e_reco"] energy_axis = MapAxis.from_edges(np.logspace( np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco), unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=source, binsz=u.Quantity(map_geom["binsize"]).to("deg").value, width=(u.Quantity(map_geom["width"]).to("deg").value, u.Quantity(map_geom["width"]).to("deg").value), frame="icrs", axes=[energy_axis]) energy_axis_true = MapAxis.from_edges(np.logspace( np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco), unit="TeV", name="energy", interp="log") pointing = SkyCoord(u.Quantity(source_pos_ra).to("deg"), u.Quantity(source_pos_dec).to("deg") + offset, frame="icrs", unit="deg") irfs = load_cta_irfs(irf_file) obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) empty = MapDataset.create(geom, energy_axis_true=energy_axis_true) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) maker_safe_mask = SafeMaskMaker( methods=["offset-max"], offset_max=u.quantity.Quantity(map_geom["width"]) + 1.0 * u.deg) dataset = maker.run(empty, obs) dataset = maker_safe_mask.run(dataset, obs) return dataset
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) if "geom-irf" in self.settings["datasets"]: geom_irf = self._create_geometry(self.settings["datasets"]["geom-irf"]) else: geom_irf = geom.to_binsz(binsz=BINSZ_IRF) offset_max = Angle(self.settings["datasets"]["offset-max"]) stack_datasets = self.settings["datasets"]["stack-datasets"] log.info("Creating datasets.") maker = MapDatasetMaker( geom=geom, geom_true=geom_irf, offset_max=offset_max, ) if stack_datasets: stacked = MapDataset.create(geom=geom, geom_irf=geom_irf, name="stacked") for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def make_map_dataset(observations, target_pos, geom, geom_true, offset_max=2 * u.deg): maker = MapMaker(geom, offset_max, geom_true=geom_true) maps = maker.run(observations) table_psf = make_mean_psf(observations, target_pos) # PSF kernel used for the model convolution psf_kernel = PSFKernel.from_table_psf(table_psf, geom_true, max_radius="0.3 deg") edisp = make_mean_edisp( observations, target_pos, e_true=geom_true.axes[0].edges, e_reco=geom.axes[0].edges, ) background_model = BackgroundModel(maps["background"]) background_model.parameters["norm"].frozen = False background_model.parameters["tilt"].frozen = True dataset = MapDataset( counts=maps["counts"], exposure=maps["exposure"], background_model=background_model, psf=psf_kernel, edisp=edisp, ) return dataset
def test_safe_mask_maker(observations): obs = observations[0] axis = MapAxis.from_edges([0.1, 1, 10], name="energy", interp="log", unit="TeV") geom = WcsGeom.create(npix=(11, 11), axes=[axis], skydir=obs.pointing_radec) empty_dataset = MapDataset.create(geom=geom) dataset_maker = MapDatasetMaker(offset_max="3 deg") safe_mask_maker = SafeMaskMaker(offset_max="3 deg") dataset = dataset_maker.run(empty_dataset, obs) mask_offset = safe_mask_maker.make_mask_offset_max(dataset=dataset, observation=obs) assert_allclose(mask_offset.sum(), 109) mask_energy_aeff_default = safe_mask_maker.make_mask_energy_aeff_default( dataset=dataset, observation=obs) assert_allclose(mask_energy_aeff_default.sum(), 242) with pytest.raises(NotImplementedError) as excinfo: safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert "only supported" in str(excinfo.value) with pytest.raises(NotImplementedError) as excinfo: safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert "only supported" in str(excinfo.value)
def test_map_maker_obs(observations): # Test for different spatial geoms and etrue, ereco bins geom_reco = geom(ebounds=[0.1, 1, 10]) e_true = MapAxis.from_edges([0.1, 0.5, 2.5, 10.0], name="energy", unit="TeV", interp="log") geom_exp = geom(ebounds=[0.1, 0.5, 2.5, 10.0]) reference = MapDataset.create(geom=geom_reco, energy_axis_true=e_true, binsz_irf=1.0, margin_irf=1.0) maker_obs = MapDatasetMaker(offset_max=2.0 * u.deg, cutout=False) map_dataset = maker_obs.run(reference, observations[0]) assert map_dataset.counts.geom == geom_reco assert map_dataset.background_model.map.geom == geom_reco assert map_dataset.exposure.geom == geom_exp assert map_dataset.edisp.edisp_map.data.shape == (3, 48, 6, 11) assert map_dataset.edisp.exposure_map.data.shape == (3, 1, 6, 11) assert map_dataset.psf.psf_map.data.shape == (3, 66, 6, 11) assert map_dataset.psf.exposure_map.data.shape == (3, 1, 6, 11) assert_allclose(map_dataset.gti.time_delta, 1800.0 * u.s) assert map_dataset.name == "obs_110380"
def test_create(geom, geom_etrue): # tests empty datasets created migra_axis = MapAxis(nodes=np.linspace(0.0, 3.0, 51), unit="", name="migra") rad_axis = MapAxis(nodes=np.linspace(0.0, 1.0, 51), unit="deg", name="theta") e_reco = MapAxis.from_edges(np.logspace(-1.0, 1.0, 3), name="energy", unit=u.TeV, interp="log") e_true = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4), name="energy", unit=u.TeV, interp="log") geom = WcsGeom.create(binsz=0.02, width=(2, 2), axes=[e_reco]) empty_dataset = MapDataset.create(geom=geom, energy_axis_true=e_true, migra_axis=migra_axis, rad_axis=rad_axis) assert empty_dataset.counts.data.shape == (2, 100, 100) assert empty_dataset.exposure.data.shape == (3, 100, 100) assert empty_dataset.psf.psf_map.data.shape == (3, 50, 10, 10) assert empty_dataset.psf.exposure_map.data.shape == (3, 1, 10, 10) assert empty_dataset.edisp.edisp_map.data.shape == (3, 50, 10, 10) assert empty_dataset.edisp.exposure_map.data.shape == (3, 1, 10, 10) assert_allclose(empty_dataset.edisp.edisp_map.data.sum(), 300) assert_allclose(empty_dataset.gti.time_delta, 0.0 * u.s)
def test_adaptive_ring_bkg_maker(pars, geom, observations, exclusion_mask): adaptive_ring_bkg_maker = AdaptiveRingBackgroundMaker( r_in="0.2 deg", width="0.3 deg", r_out_max="2 deg", stepsize="0.2 deg", exclusion_mask=exclusion_mask, method=pars["method"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker(offset_max="2 deg") dataset = MapDataset.create(geom) obs = observations[pars["obs_idx"]] dataset = map_dataset_maker.run(dataset, obs) dataset = safe_mask_maker.run(dataset, obs) dataset = dataset.to_image() dataset_on_off = adaptive_ring_bkg_maker.run(dataset) mask = dataset.mask_safe assert_allclose(dataset_on_off.counts_off.data[mask].sum(), pars["counts_off"]) assert_allclose(dataset_on_off.acceptance_off.data[mask].sum(), pars["acceptance_off"]) assert_allclose(dataset_on_off.alpha.data[0][100][100], pars["alpha"]) assert_allclose(dataset_on_off.exposure.data[0][100][100], pars["exposure"])
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(geom=geom, offset_max=offset_max, **geom_irf) if self.settings["datasets"]["stack-datasets"]: stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def prepare_dataset_simple(filename_dataset): """Prepare dataset for a given skymodel.""" log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) edisp_gauss = EnergyDispersion2D.from_gauss( e_true=ENERGY_AXIS_TRUE.edges, migra=MIGRA_AXIS.edges, sigma=0.1, bias=0, offset=[0, 2, 4, 6, 8] * u.deg, ) irfs["edisp"] = edisp_gauss # irfs["aeff"].data.data = np.ones_like(irfs["aeff"].data.data) * 1e6 observation = Observation.create(obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs) empty = MapDataset.create(WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS) # maker = MapDatasetMaker(selection=["exposure", "edisp"]) # maker = MapDatasetMaker(selection=["exposure", "edisp", "background"]) maker = MapDatasetMaker( selection=["exposure", "edisp", "psf", "background"]) dataset = maker.run(empty, observation) filename_dataset.parent.mkdir(exist_ok=True, parents=True) log.info(f"Writing {filename_dataset}") dataset.write(filename_dataset, overwrite=True)
def test_ring_bkg_maker(geom, observations, exclusion_mask): ring_bkg_maker = RingBackgroundMaker(r_in="0.2 deg", width="0.3 deg", exclusion_mask=exclusion_mask) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker(offset_max="2 deg") reference = MapDataset.create(geom) datasets = [] for obs in observations: dataset = map_dataset_maker.run(reference, obs) dataset = safe_mask_maker.run(dataset, obs) dataset = dataset.to_image() dataset_on_off = ring_bkg_maker.run(dataset) datasets.append(dataset_on_off) mask = dataset.mask_safe assert_allclose(datasets[0].counts_off.data[mask].sum(), 2511333) assert_allclose(datasets[1].counts_off.data[mask].sum(), 2143577.0) assert_allclose(datasets[0].acceptance_off.data[mask].sum(), 2961300) assert_allclose(datasets[1].acceptance_off.data[mask].sum(), 2364657.2) assert_allclose(datasets[0].alpha.data[0][100][100], 0.00063745599) assert_allclose(datasets[0].exposure.data[0][100][100], 806254444.8480084)
def simulate_map_dataset(random_state=0): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) skydir = SkyCoord("0 deg", "0 deg", frame="galactic") edges = np.logspace(-1, 2, 15) * u.TeV energy_axis = MapAxis.from_edges(edges=edges, name="energy", interp="log") geom = WcsGeom.create(skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], frame="galactic") gauss = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic") pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source") obs = Observation.create(pointing=skydir, livetime=1 * u.h, irfs=irfs) empty = MapDataset.create(geom) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, obs) dataset.models = skymodel dataset.fake(random_state=random_state) return dataset
def get_map_dataset(sky_model, geom, geom_etrue, edisp=True, **kwargs): """Returns a MapDatasets""" # define background model m = Map.from_geom(geom) m.quantity = 0.2 * np.ones(m.data.shape) background_model = BackgroundModel(m) psf = get_psf() exposure = get_exposure(geom_etrue) if edisp: # define energy dispersion e_true = geom_etrue.get_axis_by_name("energy") edisp = EDispMap.from_diagonal_response(energy_axis_true=e_true) else: edisp = None # define fit mask center = sky_model.spatial_model.position circle = CircleSkyRegion(center=center, radius=1 * u.deg) mask_fit = background_model.map.geom.region_mask([circle]) mask_fit = Map.from_geom(geom, data=mask_fit) return MapDataset(models=sky_model, exposure=exposure, background_model=background_model, psf=psf, edisp=edisp, mask_fit=mask_fit, **kwargs)
def simple_dataset(): axis = MapAxis.from_energy_bounds(0.1, 10, 1, unit="TeV") geom = WcsGeom.create(npix=50, binsz=0.02, axes=[axis]) dataset = MapDataset.create(geom) dataset.mask_safe += 1 dataset.counts += 2 dataset.background_model.map += 1 return dataset
def test_to_image(geom): counts = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-counts-cube.fits.gz") background = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-background-cube.fits.gz") background = BackgroundModel(background) exposure = Map.read( "$GAMMAPY_DATA/fermi-3fhl-gc/fermi-3fhl-gc-exposure-cube.fits.gz") exposure = exposure.sum_over_axes(keepdims=True) dataset = MapDataset(counts=counts, background_model=background, exposure=exposure) dataset_im = dataset.to_image() assert dataset_im.mask_safe is None assert dataset_im.counts.data.sum() == dataset.counts.data.sum() assert_allclose(dataset_im.background_model.map.data.sum(), 28548.625, rtol=1e-5) ebounds = np.logspace(-1.0, 1.0, 3) axis = MapAxis.from_edges(ebounds, name="energy", unit=u.TeV, interp="log") geom = WcsGeom.create(skydir=(0, 0), binsz=0.5, width=(1, 1), frame="icrs", axes=[axis]) dataset = MapDataset.create(geom) # Check map_safe handling data = np.array([[[False, True], [True, True]], [[False, False], [True, True]]]) dataset.mask_safe = WcsNDMap.from_geom(geom=geom, data=data) dataset_im = dataset.to_image() assert dataset_im.mask_safe.data.dtype == bool desired = np.array([[False, True], [True, True]]) assert (dataset_im.mask_safe.data == desired).all() # Check that missing entries in the dataset do not break dataset_copy = dataset.copy() dataset_copy.exposure = None dataset_copy.background_model = None dataset_im = dataset_copy.to_image() assert dataset_im.exposure is None assert dataset_im.background_model is None dataset_copy = dataset.copy() dataset_copy.counts = None dataset_im = dataset_copy.to_image() assert dataset_im.counts is None
def test_map_dataset_fits_io(tmpdir, sky_model, geom, geom_etrue): dataset = get_map_dataset(sky_model, geom, geom_etrue) dataset.counts = dataset.npred() dataset.mask_safe = dataset.mask_fit hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "BACKGROUND", "BACKGROUND_BANDS", "EDISP_MATRIX", "EDISP_MATRIX_EBOUNDS", "PSF_KERNEL", "PSF_KERNEL_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "MASK_FIT", "MASK_FIT_BANDS", ] assert actual == desired dataset.write(tmpdir / "test.fits") dataset_new = MapDataset.read(tmpdir / "test.fits") assert dataset_new.model is None assert dataset_new.mask.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose(dataset.background_model.map.data, dataset_new.background_model.map.data) assert_allclose(dataset.edisp.data.data.value, dataset_new.edisp.data.data.value) assert_allclose(dataset.psf.data, dataset_new.psf.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_fit, dataset_new.mask_fit) assert_allclose(dataset.mask_safe, dataset_new.mask_safe) assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert dataset.background_model.map.geom == dataset_new.background_model.map.geom assert_allclose(dataset.edisp.e_true.edges.value, dataset_new.edisp.e_true.edges.value) assert dataset.edisp.e_true.unit == dataset_new.edisp.e_true.unit assert_allclose(dataset.edisp.e_reco.edges.value, dataset_new.edisp.e_reco.edges.value) assert dataset.edisp.e_true.unit == dataset_new.edisp.e_true.unit
def obs_dataset(geom, observation): safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") map_dataset_maker = MapDatasetMaker( selection=["counts", "background", "exposure"]) reference = MapDataset.create(geom) cutout = reference.cutout(observation.pointing_radec, width="4 deg") dataset = map_dataset_maker.run(cutout, observation) dataset = safe_mask_maker.run(dataset, observation) return dataset
def read_dataset(filename_dataset, filename_model, obs_id): log.info(f"Reading {filename_dataset}") dataset = MapDataset.read(filename_dataset) filename_events = get_filename_events(filename_dataset, filename_model, obs_id) log.info(f"Reading {filename_events}") events = EventList.read(filename_events) counts = Map.from_geom(WCS_GEOM) counts.fill_events(events) dataset.counts = counts return dataset
def all_cmd(model, obs_ids, obs_all, simple, core): if model == "all": models = AVAILABLE_MODELS else: models = [model] binned = False filename_dataset = get_filename_dataset(LIVETIME) filename_model = BASE_PATH / f"models/{model}.yaml" if simple: filename_dataset = Path( str(filename_dataset).replace("dataset", "dataset_simple")) prepare_dataset_simple(filename_dataset) else: prepare_dataset(filename_dataset) dataset = MapDataset.read(filename_dataset) if obs_all: for model in models: obs_ids = f"0:{obs_ids}" obs_ids = parse_obs_ids(obs_ids, model) with multiprocessing.Pool(processes=core) as pool: args = zip(repeat(filename_model), repeat(filename_dataset), repeat(dataset), obs_ids) pool.starmap(simulate_events, args) # simulate_events(filename_model=filename_model, filename_dataset=filename_dataset, nobs=obs_ids) with multiprocessing.Pool(processes=core) as pool: args = zip(repeat(filename_model), repeat(filename_dataset), obs_ids, repeat(binned), repeat(simple)) results = pool.starmap(fit_model, args) fit_gather(model) plot_pull_distribution(model) else: for model in models: simulate_events(filename_model=filename_model, filename_dataset=filename_dataset, nobs=obs_ids) fit_model(filename_model=filename_model, filename_dataset=filename_dataset, obs_id=str(obs_ids - 1), binned=binned, simple=simple) plot_results(filename_model=filename_model, filename_dataset=filename_dataset, obs_id=str(obs_ids - 1))
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), frame="galactic", proj="CAR", axes=[energy_axis], ) offset_max = 4 * u.deg maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom) spatial_model = PointSpatialModel(lon_0="-0.05 deg", lat_0="-0.05 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") datasets = Datasets([]) for idx, obs in enumerate(observations): cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max, name=f"dataset{idx}") dataset = maker.run(cutout, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.models = model datasets.append(dataset) return datasets
def test_map_dataset_geom(geom, sky_model): e_true = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=5) dataset = MapDataset.create(geom, energy_axis_true=e_true) dataset.counts = None dataset.background_model = None dataset.models = sky_model npred = dataset.npred() assert npred.geom == geom dataset.mask_safe = None with pytest.raises(ValueError): dataset._geom
def test_map_maker(pars, observations): stacked = MapDataset.create( geom=pars["geom"], energy_axis_true=pars["e_true"], binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) maker = MapDatasetMaker( geom=pars["geom"], energy_axis_true=pars["e_true"], offset_max="2 deg", background_oversampling=pars.get("background_oversampling"), binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") for obs in observations: dataset = maker.run(obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-4) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=1e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) stacked = MapDataset.create(geom) maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) spatial_model = PointSpatialModel(lon_0="0.01 deg", lat_0="0.01 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") stacked.models = model stacked.name = "stacked_ds" return Datasets([stacked])
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(offset_max=offset_max) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if self.settings["datasets"]["stack-datasets"]: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.background_model.name = f"bkg_{dataset.name}" # TODO remove this once dataset and model have unique identifiers log.debug(dataset) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.background_model.name = f"bkg_{dataset.name}" # TODO remove this once dataset and model have unique identifiers self._extract_irf_kernels(dataset) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def simulate_events_cmd(model, nobs): if model == "all": models = AVAILABLE_MODELS else: models = [model] filename_dataset = get_filename_dataset(LIVETIME) dataset = MapDataset.read(filename_dataset) obs_ids = f"0:{obs_ids}" obs_ids = parse_obs_ids(obs_ids, model) for model in models: filename_model = BASE_PATH / f"models/{model}.yaml" with multiprocessing.Pool(processes=core) as pool: args = zip(repeat(filename_model), repeat(filename_dataset), repeat(dataset), obs_ids) pool.starmap(simulate_events, args)