def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(geom=geom, offset_max=offset_max, **geom_irf) if self.settings["datasets"]["stack-datasets"]: stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) if "geom-irf" in self.settings["datasets"]: geom_irf = self._create_geometry(self.settings["datasets"]["geom-irf"]) else: geom_irf = geom.to_binsz(binsz=BINSZ_IRF) offset_max = Angle(self.settings["datasets"]["offset-max"]) stack_datasets = self.settings["datasets"]["stack-datasets"] log.info("Creating datasets.") maker = MapDatasetMaker( geom=geom, geom_true=geom_irf, offset_max=offset_max, ) if stack_datasets: stacked = MapDataset.create(geom=geom, geom_irf=geom_irf, name="stacked") for obs in self.observations: dataset = maker.run(obs) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: dataset = maker.run(obs) self._extract_irf_kernels(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def test_map_maker_ring(observations): geomd = geom(ebounds=[0.1, 10]) map_dataset_maker = MapDatasetMaker(offset_max="2 deg") safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") stacked = MapDatasetOnOff.create(geomd) regions = CircleSkyRegion(SkyCoord(0, 0, unit="deg", frame="galactic"), radius=0.5 * u.deg) exclusion = Map.from_geom(geomd) exclusion.data = exclusion.geom.region_mask([regions], inside=False) ring_bkg = RingBackgroundMaker(r_in="0.5 deg", width="0.4 deg", exclusion_mask=exclusion) for obs in observations: dataset = map_dataset_maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) dataset = dataset.to_image() dataset_on_off = ring_bkg.run(dataset) stacked.stack(dataset_on_off) assert_allclose(np.nansum(stacked.counts.data), 34366, rtol=1e-2) assert_allclose(np.nansum(stacked.acceptance_off.data), 434.36, rtol=1e-2)
def test_safe_mask_maker(observations): obs = observations[0] axis = MapAxis.from_edges([0.1, 1, 10], name="energy", interp="log", unit="TeV") geom = WcsGeom.create(npix=(11, 11), axes=[axis], skydir=obs.pointing_radec) empty_dataset = MapDataset.create(geom=geom) dataset_maker = MapDatasetMaker(offset_max="3 deg") safe_mask_maker = SafeMaskMaker(offset_max="3 deg") dataset = dataset_maker.run(empty_dataset, obs) mask_offset = safe_mask_maker.make_mask_offset_max(dataset=dataset, observation=obs) assert_allclose(mask_offset.sum(), 109) mask_energy_aeff_default = safe_mask_maker.make_mask_energy_aeff_default( dataset=dataset, observation=obs) assert_allclose(mask_energy_aeff_default.sum(), 242) with pytest.raises(NotImplementedError) as excinfo: safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert "only supported" in str(excinfo.value) with pytest.raises(NotImplementedError) as excinfo: safe_mask_maker.make_mask_energy_edisp_bias(dataset) assert "only supported" in str(excinfo.value)
def prepare_dataset_simple(filename_dataset): """Prepare dataset for a given skymodel.""" log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) edisp_gauss = EnergyDispersion2D.from_gauss( e_true=ENERGY_AXIS_TRUE.edges, migra=MIGRA_AXIS.edges, sigma=0.1, bias=0, offset=[0, 2, 4, 6, 8] * u.deg, ) irfs["edisp"] = edisp_gauss # irfs["aeff"].data.data = np.ones_like(irfs["aeff"].data.data) * 1e6 observation = Observation.create(obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs) empty = MapDataset.create(WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS) # maker = MapDatasetMaker(selection=["exposure", "edisp"]) # maker = MapDatasetMaker(selection=["exposure", "edisp", "background"]) maker = MapDatasetMaker( selection=["exposure", "edisp", "psf", "background"]) dataset = maker.run(empty, observation) filename_dataset.parent.mkdir(exist_ok=True, parents=True) log.info(f"Writing {filename_dataset}") dataset.write(filename_dataset, overwrite=True)
def empty_dataset(source_pos_radec, map_geom, e_reco_binning, livetime, irf_file, offset): source_pos_ra = source_pos_radec["ra"] source_pos_dec = source_pos_radec["dec"] source = SkyCoord(source_pos_ra, source_pos_dec, unit="deg", frame="icrs") e_reco_min = u.Quantity(e_reco_binning["e_reco_min"]).to("TeV") e_reco_min = e_reco_min.value e_reco_max = u.Quantity(e_reco_binning["e_reco_max"]).to("TeV") e_reco_max = e_reco_max.value n_e_reco = e_reco_binning["n_e_reco"] energy_axis = MapAxis.from_edges(np.logspace( np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco), unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=source, binsz=u.Quantity(map_geom["binsize"]).to("deg").value, width=(u.Quantity(map_geom["width"]).to("deg").value, u.Quantity(map_geom["width"]).to("deg").value), frame="icrs", axes=[energy_axis]) energy_axis_true = MapAxis.from_edges(np.logspace( np.log10(e_reco_min), np.log10(e_reco_max), n_e_reco), unit="TeV", name="energy", interp="log") pointing = SkyCoord(u.Quantity(source_pos_ra).to("deg"), u.Quantity(source_pos_dec).to("deg") + offset, frame="icrs", unit="deg") irfs = load_cta_irfs(irf_file) obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) empty = MapDataset.create(geom, energy_axis_true=energy_axis_true) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) maker_safe_mask = SafeMaskMaker( methods=["offset-max"], offset_max=u.quantity.Quantity(map_geom["width"]) + 1.0 * u.deg) dataset = maker.run(empty, obs) dataset = maker_safe_mask.run(dataset, obs) return dataset
def simulate_map_dataset(random_state=0): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) skydir = SkyCoord("0 deg", "0 deg", frame="galactic") edges = np.logspace(-1, 2, 15) * u.TeV energy_axis = MapAxis.from_edges(edges=edges, name="energy", interp="log") geom = WcsGeom.create(skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], frame="galactic") gauss = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic") pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source") obs = Observation.create(pointing=skydir, livetime=1 * u.h, irfs=irfs) empty = MapDataset.create(geom) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, obs) dataset.models = skymodel dataset.fake(random_state=random_state) return dataset
def test_map_maker_obs(observations): # Test for different spatial geoms and etrue, ereco bins geom_reco = geom(ebounds=[0.1, 1, 10]) e_true = MapAxis.from_edges([0.1, 0.5, 2.5, 10.0], name="energy", unit="TeV", interp="log") geom_exp = geom(ebounds=[0.1, 0.5, 2.5, 10.0]) maker_obs = MapDatasetMaker( geom=geom_reco, energy_axis_true=e_true, binsz_irf=1.0, margin_irf=1.0, offset_max=2.0 * u.deg, cutout=False, ) map_dataset = maker_obs.run(observations[0]) assert map_dataset.counts.geom == geom_reco assert map_dataset.background_model.map.geom == geom_reco assert map_dataset.exposure.geom == geom_exp assert map_dataset.edisp.edisp_map.data.shape == (3, 48, 6, 11) assert map_dataset.edisp.exposure_map.data.shape == (3, 1, 6, 11) assert map_dataset.psf.psf_map.data.shape == (3, 66, 6, 11) assert map_dataset.psf.exposure_map.data.shape == (3, 1, 6, 11) assert_allclose(map_dataset.gti.time_delta, 1800.0 * u.s) assert map_dataset.name == "obs_110380"
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry(self.settings["datasets"]["geom"]) geom_irf = dict(energy_axis_true=None, binsz_irf=None, margin_irf=None) if "energy-axis-true" in self.settings["datasets"]: axis_params = self.settings["datasets"]["energy-axis-true"] geom_irf["energy_axis_true"] = MapAxis.from_bounds(**axis_params) geom_irf["binsz_irf"] = self.settings["datasets"].get("binsz", None) geom_irf["margin_irf"] = self.settings["datasets"].get("margin", None) offset_max = Angle(self.settings["datasets"]["offset-max"]) log.info("Creating datasets.") maker = MapDatasetMaker(offset_max=offset_max) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if self.settings["datasets"]["stack-datasets"]: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.background_model.name = f"bkg_{dataset.name}" # TODO remove this once dataset and model have unique identifiers log.debug(dataset) stacked.stack(dataset) self._extract_irf_kernels(stacked) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = maker.run(stacked, obs) dataset = maker_safe_mask.run(dataset, obs) dataset.background_model.name = f"bkg_{dataset.name}" # TODO remove this once dataset and model have unique identifiers self._extract_irf_kernels(dataset) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def _map_making(self): """Make maps and datasets for 3d analysis.""" log.info("Creating geometry.") geom = self._create_geometry() geom_settings = self.config.datasets.geom geom_irf = dict(energy_axis_true=None, binsz_irf=None) if geom_settings.axes.energy_true.min is not None: geom_irf["energy_axis_true"] = self._make_energy_axis( geom_settings.axes.energy_true) geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to("deg").value offset_max = geom_settings.selection.offset_max log.info("Creating datasets.") maker = MapDatasetMaker(selection=self.config.datasets.map_selection) safe_mask_selection = self.config.datasets.safe_mask.methods safe_mask_settings = self.config.datasets.safe_mask.settings maker_safe_mask = SafeMaskMaker(methods=safe_mask_selection, **safe_mask_settings) stacked = MapDataset.create(geom=geom, name="stacked", **geom_irf) if self.config.datasets.stack: for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) log.debug(dataset) stacked.stack(dataset) datasets = [stacked] else: datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max) dataset = maker.run(cutout, obs) dataset = maker_safe_mask.run(dataset, obs) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), frame="galactic", proj="CAR", axes=[energy_axis], ) offset_max = 4 * u.deg maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) stacked = MapDataset.create(geom=geom) spatial_model = PointSpatialModel(lon_0="-0.05 deg", lat_0="-0.05 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") datasets = Datasets([]) for idx, obs in enumerate(observations): cutout = stacked.cutout(obs.pointing_radec, width=2 * offset_max, name=f"dataset{idx}") dataset = maker.run(cutout, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.models = model datasets.append(dataset) return datasets
def test_map_maker(pars, observations): stacked = MapDataset.create( geom=pars["geom"], energy_axis_true=pars["e_true"], binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) maker = MapDatasetMaker( geom=pars["geom"], energy_axis_true=pars["e_true"], offset_max="2 deg", background_oversampling=pars.get("background_oversampling"), binsz_irf=pars["binsz_irf"], margin_irf=pars["margin_irf"], ) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="2 deg") for obs in observations: dataset = maker.run(obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-4) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=1e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-4)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) stacked = MapDataset.create(geom) maker = MapDatasetMaker() safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) spatial_model = PointSpatialModel(lon_0="0.01 deg", lat_0="0.01 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") stacked.models = model stacked.name = "stacked_ds" return Datasets([stacked])
def prepare_dataset(filename_dataset): """Prepare dataset for a given skymodel.""" log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) observation = Observation.create(obs_id=1001, pointing=POINTING, livetime=LIVETIME, irfs=irfs) empty = MapDataset.create(WCS_GEOM, energy_axis_true=ENERGY_AXIS_TRUE, migra_axis=MIGRA_AXIS) maker = MapDatasetMaker( selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, observation) filename_dataset.parent.mkdir(exist_ok=True, parents=True) log.info(f"Writing {filename_dataset}") dataset.write(filename_dataset, overwrite=True)
def test_map_maker_obs(observations): # Test for different spatial geoms and etrue, ereco bins geom_reco = geom(ebounds=[0.1, 1, 10]) geom_true = geom(ebounds=[0.1, 0.5, 2.5, 10.0], binsz=1.0) geom_exp = geom(ebounds=[0.1, 0.5, 2.5, 10.0]) maker_obs = MapDatasetMaker(geom=geom_reco, geom_true=geom_true, offset_max=2.0 * u.deg, cutout=False) map_dataset = maker_obs.run(observations[0]) assert map_dataset.counts.geom == geom_reco assert map_dataset.background_model.map.geom == geom_reco assert map_dataset.exposure.geom == geom_exp assert map_dataset.edisp.edisp_map.data.shape == (3, 48, 5, 10) assert map_dataset.edisp.exposure_map.data.shape == (3, 1, 5, 10) assert map_dataset.psf.psf_map.data.shape == (3, 66, 5, 10) assert map_dataset.psf.exposure_map.data.shape == (3, 1, 5, 10) assert_allclose(map_dataset.gti.time_delta, 1800.0 * u.s) assert map_dataset.name == "obs_110380"
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") OBS_ID = 110380 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) energy_axis = MapAxis.from_bounds(0.1, 10, nbin=10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) src_pos = SkyCoord(0, 0, unit="deg", frame="galactic") offset_max = 4 * u.deg maker = MapDatasetMaker(offset_max=offset_max) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max="4 deg") stacked = MapDataset.create(geom=geom) datasets = [] for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) dataset.edisp = dataset.edisp.get_energy_dispersion( position=src_pos, e_reco=energy_axis.edges) dataset.psf = dataset.psf.get_psf_kernel(position=src_pos, geom=geom, max_radius="0.3 deg") datasets.append(dataset) return datasets
def test_map_maker(pars, observations, keepdims): stacked = MapDataset.create(geom=pars["geom"], geom_irf=pars["geom_true"]) for obs in observations: maker = MapDatasetMaker( geom=pars["geom"], geom_true=pars["geom_true"], offset_max="2 deg", background_oversampling=pars.get("background_oversampling"), ) dataset = maker.run(obs) stacked.stack(dataset) counts = stacked.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = stacked.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.mean(), pars["exposure"], rtol=3e-3) background = stacked.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-5) image_dataset = stacked.to_image() counts = image_dataset.counts assert counts.unit == "" assert_allclose(counts.data.sum(), pars["counts"], rtol=1e-5) exposure = image_dataset.exposure assert exposure.unit == "m2 s" assert_allclose(exposure.data.sum(), pars["exposure_image"], rtol=3e-3) background = image_dataset.background_model.map assert background.unit == "" assert_allclose(background.data.sum(), pars["background"], rtol=1e-5)
def data_prep(): data_store = DataStore.from_dir("$GAMMAPY_DATA/hess-dl3-dr1/") OBS_ID = 23523 obs_ids = OBS_ID * np.ones(N_OBS) observations = data_store.get_observations(obs_ids) time_intervals = [(obs.tstart, obs.tstop) for obs in observations] target_position = SkyCoord(ra=83.63308, dec=22.01450, unit="deg") emin, emax = [0.7, 10] * u.TeV energy_axis = MapAxis.from_bounds(emin.value, emax.value, 10, unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=target_position, binsz=0.02, width=(2, 2), coordsys="CEL", proj="CAR", axes=[energy_axis], ) energy_axis_true = MapAxis.from_bounds(0.1, 20, 20, unit="TeV", name="energy", interp="log") offset_max = 2 * u.deg datasets = [] maker = MapDatasetMaker(offset_max=offset_max) safe_mask_maker = SafeMaskMaker(methods=["offset-max"], offset_max=offset_max) for time_interval in time_intervals: observations = observations.select_time(time_interval) # Proceed with further analysis only if there are observations # in the selected time window if len(observations) == 0: log.warning(f"No observations in time interval: {time_interval}") continue stacked = MapDataset.create(geom=geom, energy_axis_true=energy_axis_true) for obs in observations: dataset = maker.run(stacked, obs) dataset = safe_mask_maker.run(dataset, obs) stacked.stack(dataset) stacked.edisp = stacked.edisp.get_energy_dispersion( position=target_position, e_reco=energy_axis.edges) stacked.psf = stacked.psf.get_psf_kernel(position=target_position, geom=stacked.exposure.geom, max_radius="0.3 deg") datasets.append(stacked) spatial_model = PointSpatialModel(lon_0=target_position.ra, lat_0=target_position.dec, frame="icrs") spatial_model.lon_0.frozen = True spatial_model.lat_0.frozen = True spectral_model = PowerLawSpectralModel(index=2.6, amplitude=2.0e-11 * u.Unit("1 / (cm2 s TeV)"), reference=1 * u.TeV) spectral_model.index.frozen = False sky_model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="") for dataset in datasets: model = sky_model.copy(name="crab") dataset.model = model return datasets
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges( np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log" ) geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(1, 1), coordsys="GAL", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(1, 1), coordsys="GAL", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = [] for ind, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel( lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic" ) spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name=names[ind] ) models.append(model_ecpl) # test to link a spectral parameter params0 = models[0].spectral_model.parameters params1 = models[1].spectral_model.parameters ind = params0.parameters.index(params0["reference"]) params0.parameters[ind] = params1["reference"] # update the sky model ind = models[0].parameters.parameters.index(models[0].parameters["reference"]) models[0].parameters.parameters[ind] = params1["reference"] obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_model = SkyDiffuseCube.read( "$GAMMAPY_DATA/fermi_3fhl/gll_iem_v06_cutout.fits" ) datasets_list = [] for idx, geom in enumerate(geoms): observations = data_store.get_observations(obs_ids) stacked = MapDataset.create(geom=geom) stacked.background_model.name = "background_irf_" + names[idx] maker = MapDatasetMaker(geom=geom, offset_max=4.0 * u.deg) for obs in observations: dataset = maker.run(obs) stacked.stack(dataset) stacked.psf = stacked.psf.get_psf_kernel(position=geom.center_skydir, geom=geom, max_radius="0.3 deg") stacked.edisp = stacked.edisp.get_energy_dispersion(position=geom.center_skydir, e_reco=energy_axis.edges) stacked.name = names[idx] stacked.model = models[idx] + diffuse_model datasets_list.append(stacked) datasets = Datasets(datasets_list) dataset0 = datasets.datasets[0] print("dataset0") print("counts sum : ", dataset0.counts.data.sum()) print("expo sum : ", dataset0.exposure.data.sum()) print("bkg0 sum : ", dataset0.background_model.evaluate().data.sum()) path = "$GAMMAPY_DATA/tests/models/gc_example_" datasets.to_yaml(path, overwrite=True)
name="model_simu", ) print(model_simu) # Now, comes the main part of dataset simulation. We create an in-memory observation and an empty dataset. We then predict the number of counts for the given model, and Poission fluctuate it using `fake()` to make a simulated counts maps. Keep in mind that it is important to specify the `selection` of the maps that you want to produce # In[ ]: # Create an in-memory observation obs = Observation.create(pointing=pointing, livetime=livetime, irfs=irfs) print(obs) # Make the MapDataset empty = MapDataset.create(geom) maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"]) maker_safe_mask = SafeMaskMaker(methods=["offset-max"], offset_max=4.0 * u.deg) dataset = maker.run(empty, obs) dataset = maker_safe_mask.run(dataset, obs) print(dataset) # In[ ]: # Add the model on the dataset and Poission fluctuate dataset.models = model_simu dataset.fake() # Do a print on the dataset - there is now a counts maps print(dataset) # Now use this dataset as you would in all standard analysis. You can plot the maps, or proceed with your custom analysis. # In the next section, we show the standard 3D fitting as in [analysis_3d](analysis_3d.ipynb). # In[ ]: