def test_spectrum_dataset_on_off_to_yaml(tmpdir): spectrum_datasets_on_off = make_observation_list() datasets = Datasets(spectrum_datasets_on_off) datasets.write(path=tmpdir) datasets_read = Datasets.read(tmpdir / "_datasets.yaml", tmpdir / "_models.yaml") assert len(datasets_read) == len(datasets) assert datasets_read[0].name == datasets[0].name assert datasets_read[1].name == datasets[1].name assert datasets_read[1].counts.data.sum() == datasets[1].counts.data.sum()
def test_datasets_io_no_model(tmpdir): axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=2) geom = WcsGeom.create(npix=(5, 5), axes=[axis]) dataset_1 = MapDataset.create(geom, name="1") dataset_2 = MapDataset.create(geom, name="2") datasets = Datasets([dataset_1, dataset_2]) datasets.write(path=tmpdir, prefix="test") filename_1 = tmpdir / "test_data_1.fits" assert filename_1.exists() filename_2 = tmpdir / "test_data_2.fits" assert filename_2.exists()
def run_region(self, kr, lon, lat, radius): # TODO: for now we have to read/create the allsky maps each in each job # because we can't pickle <functools._lru_cache_wrapper object # send this back to init when fixed # exposure exposure_hpx = Map.read( "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz" ) exposure_hpx.unit = "cm2 s" # iem iem_fermi_extra = Map.read("data/gll_iem_v06_extrapolated.fits") # norm=1.1, tilt=0.03 see paper appendix A model_iem = SkyDiffuseCube( iem_fermi_extra, norm=1.1, tilt=0.03, name="iem_extrapolated" ) # ROI roi_time = time() ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg") width = 2 * (radius + self.psf_margin) # Counts counts = Map.create( skydir=ROI_pos, width=width, proj="CAR", coordsys="GAL", binsz=1 / 8.0, axes=[self.energy_axis], dtype=float, ) counts.fill_by_coord( {"skycoord": self.events.radec, "energy": self.events.energy} ) axis = MapAxis.from_nodes( counts.geom.axes[0].center, name="energy", unit="GeV", interp="log" ) wcs = counts.geom.wcs geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis]) coords = counts.geom.get_coord() # expo data = exposure_hpx.interp_by_coord(coords) exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float) # read PSF psf_kernel = PSFKernel.from_table_psf( self.psf, counts.geom, max_radius=self.psf_margin * u.deg ) # Energy Dispersion e_true = exposure.geom.axes[0].edges e_reco = counts.geom.axes[0].edges edisp = EnergyDispersion.from_diagonal_response(e_true=e_true, e_reco=e_reco) # fit mask if coords["lon"].min() < 90 * u.deg and coords["lon"].max() > 270 * u.deg: coords["lon"][coords["lon"].value > 180] -= 360 * u.deg mask = ( (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg) & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg) & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg) & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg) ) mask_fermi = WcsNDMap(counts.geom, mask) # IEM eval_iem = MapEvaluator( model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp ) bkg_iem = eval_iem.compute_npred() # ISO eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp) bkg_iso = eval_iso.compute_npred() # merge iem and iso, only one local normalization is fitted background_total = bkg_iem + bkg_iso background_model = BackgroundModel(background_total) background_model.parameters["norm"].min = 0.0 # Sources model in_roi = self.FHL3.positions.galactic.contained_by(wcs) FHL3_roi = [] for ks in range(len(self.FHL3.table)): if in_roi[ks] == True: model = self.FHL3[ks].sky_model() model.spatial_model.parameters.freeze_all() # freeze spatial model.spectral_model.parameters["amplitude"].min = 0.0 if isinstance(model.spectral_model, PowerLawSpectralModel): model.spectral_model.parameters["index"].min = 0.1 model.spectral_model.parameters["index"].max = 10.0 else: model.spectral_model.parameters["alpha"].min = 0.1 model.spectral_model.parameters["alpha"].max = 10.0 FHL3_roi.append(model) model_total = SkyModels(FHL3_roi) # Dataset dataset = MapDataset( models=model_total, counts=counts, exposure=exposure, psf=psf_kernel, edisp=edisp, background_model=background_model, mask_fit=mask_fermi, name="3FHL_ROI_num" + str(kr), ) cat_stat = dataset.stat_sum() datasets = Datasets([dataset]) fit = Fit(datasets) results = fit.run(optimize_opts=self.optimize_opts) print("ROI_num", str(kr), "\n", results) fit_stat = datasets.stat_sum() if results.message == "Optimization failed.": pass else: datasets.write(path=Path(self.resdir), prefix=dataset.name, overwrite=True) np.save( self.resdir / f"3FHL_ROI_num{kr}_covariance.npy", results.parameters.covariance, ) np.savez( self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz", message=results.message, stat=[cat_stat, fit_stat], ) exec_time = time() - roi_time print("ROI", kr, " time (s): ", exec_time) for model in FHL3_roi: if ( self.FHL3[model.name].data["ROI_num"] == kr and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut ): flux_points = FluxPointsEstimator( datasets=datasets, e_edges=self.El_flux, source=model.name, sigma_ul=2.0, ).run() filename = self.resdir / f"{model.name}_flux_points.fits" flux_points.write(filename, overwrite=True) exec_time = time() - roi_time - exec_time print("ROI", kr, " Flux points time (s): ", exec_time)
filename = "$GAMMAPY_DATA/hawc_crab/HAWC19_flux_points.fits" flux_points_hawc = FluxPoints.read(filename) dataset_hawc = FluxPointsDataset(crab_model, flux_points_hawc, name="HAWC") # ## Datasets serialization # # The `datasets` object contains each dataset previously defined. # It can be saved on disk as datasets.yaml, models.yaml, and several data files specific to each dataset. Then the `datasets` can be rebuild later from these files. # In[ ]: datasets = Datasets([dataset_fermi, dataset_hess, dataset_hawc]) path = Path("crab-3datasets") path.mkdir(exist_ok=True) datasets.write(path=path, prefix="crab_10GeV_100TeV", overwrite=True) filedata = path / "crab_10GeV_100TeV_datasets.yaml" filemodel = path / "crab_10GeV_100TeV_models.yaml" datasets = Datasets.read(filedata=filedata, filemodel=filemodel) # ## Joint analysis # # We run the fit on the `Datasets` object that include a dataset for each instrument # # In[ ]: get_ipython().run_cell_magic( 'time', '', 'fit_joint = Fit(datasets)\nresults_joint = fit_joint.run()\nprint(results_joint)\nprint(results_joint.parameters.to_table())' )
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges( np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log" ) geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(1, 1), frame="galactic", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(1, 1), frame="galactic", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = [] for idx, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel( lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic" ) spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name=names[idx] ) models.append(model_ecpl) # test to link a spectral parameter params0 = models[0].spectral_model.parameters params1 = models[1].spectral_model.parameters params0.link("reference", params1["reference"]) # update the sky model models[0].parameters.link("reference", params1["reference"]) obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_model = SkyDiffuseCube.read( "$GAMMAPY_DATA/fermi_3fhl/gll_iem_v06_cutout.fits" ) datasets_list = [] for idx, geom in enumerate(geoms): observations = data_store.get_observations(obs_ids) stacked = MapDataset.create(geom=geom) stacked.background_model.name = "background_irf_" + names[idx] maker = MapDatasetMaker(offset_max=4.0 * u.deg) for obs in observations: dataset = maker.run(stacked, obs) stacked.stack(dataset) stacked.psf = stacked.psf.get_psf_kernel( position=geom.center_skydir, geom=geom, max_radius="0.3 deg" ) stacked.name = names[idx] stacked.models = models[idx] + diffuse_model datasets_list.append(stacked) datasets = Datasets(datasets_list) dataset0 = datasets[0] print("dataset0") print("counts sum : ", dataset0.counts.data.sum()) print("expo sum : ", dataset0.exposure.data.sum()) print("bkg0 sum : ", dataset0.background_model.evaluate().data.sum()) datasets.write("$GAMMAPY_DATA/tests/models", prefix="gc_example_", overwrite=True)