def test_map_dataset_fits_io(tmpdir, sky_model, geom, geom_etrue): dataset = get_map_dataset(sky_model, geom, geom_etrue) dataset.counts = dataset.npred() dataset.mask_safe = dataset.mask_fit hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "BACKGROUND", "BACKGROUND_BANDS", "EDISP_MATRIX", "EDISP_MATRIX_EBOUNDS", "PSF_KERNEL", "PSF_KERNEL_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "MASK_FIT", "MASK_FIT_BANDS", ] assert actual == desired dataset.write(tmpdir / "test.fits") dataset_new = MapDataset.read(tmpdir / "test.fits") assert dataset_new.model is None assert dataset_new.mask.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose(dataset.background_model.map.data, dataset_new.background_model.map.data) assert_allclose(dataset.edisp.data.data.value, dataset_new.edisp.data.data.value) assert_allclose(dataset.psf.data, dataset_new.psf.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_fit, dataset_new.mask_fit) assert_allclose(dataset.mask_safe, dataset_new.mask_safe) assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert dataset.background_model.map.geom == dataset_new.background_model.map.geom assert_allclose(dataset.edisp.e_true.edges.value, dataset_new.edisp.e_true.edges.value) assert dataset.edisp.e_true.unit == dataset_new.edisp.e_true.unit assert_allclose(dataset.edisp.e_reco.edges.value, dataset_new.edisp.e_reco.edges.value) assert dataset.edisp.e_true.unit == dataset_new.edisp.e_true.unit
def read_dataset(filename_dataset, filename_model, obs_id): log.info(f"Reading {filename_dataset}") dataset = MapDataset.read(filename_dataset) filename_events = get_filename_events(filename_dataset, filename_model, obs_id) log.info(f"Reading {filename_events}") events = EventList.read(filename_events) counts = Map.from_geom(WCS_GEOM) counts.fill_events(events) dataset.counts = counts return dataset
def all_cmd(model, obs_ids, obs_all, simple, core): if model == "all": models = AVAILABLE_MODELS else: models = [model] binned = False filename_dataset = get_filename_dataset(LIVETIME) filename_model = BASE_PATH / f"models/{model}.yaml" if simple: filename_dataset = Path( str(filename_dataset).replace("dataset", "dataset_simple")) prepare_dataset_simple(filename_dataset) else: prepare_dataset(filename_dataset) dataset = MapDataset.read(filename_dataset) if obs_all: for model in models: obs_ids = f"0:{obs_ids}" obs_ids = parse_obs_ids(obs_ids, model) with multiprocessing.Pool(processes=core) as pool: args = zip(repeat(filename_model), repeat(filename_dataset), repeat(dataset), obs_ids) pool.starmap(simulate_events, args) # simulate_events(filename_model=filename_model, filename_dataset=filename_dataset, nobs=obs_ids) with multiprocessing.Pool(processes=core) as pool: args = zip(repeat(filename_model), repeat(filename_dataset), obs_ids, repeat(binned), repeat(simple)) results = pool.starmap(fit_model, args) fit_gather(model) plot_pull_distribution(model) else: for model in models: simulate_events(filename_model=filename_model, filename_dataset=filename_dataset, nobs=obs_ids) fit_model(filename_model=filename_model, filename_dataset=filename_dataset, obs_id=str(obs_ids - 1), binned=binned, simple=simple) plot_results(filename_model=filename_model, filename_dataset=filename_dataset, obs_id=str(obs_ids - 1))
def simulate_events_cmd(model, nobs): if model == "all": models = AVAILABLE_MODELS else: models = [model] filename_dataset = get_filename_dataset(LIVETIME) dataset = MapDataset.read(filename_dataset) obs_ids = f"0:{obs_ids}" obs_ids = parse_obs_ids(obs_ids, model) for model in models: filename_model = BASE_PATH / f"models/{model}.yaml" with multiprocessing.Pool(processes=core) as pool: args = zip(repeat(filename_model), repeat(filename_dataset), repeat(dataset), obs_ids) pool.starmap(simulate_events, args)
def read(): datasets = [] spatial_model = PointSpatialModel(lon_0="-0.05 deg", lat_0="-0.05 deg", frame="galactic") spectral_model = ExpCutoffPowerLawSpectralModel( index=2, amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="gc-source") for ind in range(N_OBS): dataset = MapDataset.read(f"dataset-{ind}.fits") dataset.model = model datasets.append(dataset) return datasets
def test_map_dataset_fits_io(tmp_path, sky_model, geom, geom_etrue): dataset = get_map_dataset(sky_model, geom, geom_etrue) dataset.counts = dataset.npred() dataset.mask_safe = dataset.mask_fit gti = GTI.create([0 * u.s], [1 * u.h], reference_time="2010-01-01T00:00:00") dataset.gti = gti hdulist = dataset.to_hdulist() actual = [hdu.name for hdu in hdulist] desired = [ "PRIMARY", "COUNTS", "COUNTS_BANDS", "EXPOSURE", "EXPOSURE_BANDS", "BACKGROUND", "BACKGROUND_BANDS", "EDISP", "EDISP_BANDS", "EDISP_EXPOSURE", "EDISP_EXPOSURE_BANDS", "PSF", "PSF_BANDS", "PSF_EXPOSURE", "PSF_EXPOSURE_BANDS", "MASK_SAFE", "MASK_SAFE_BANDS", "MASK_FIT", "MASK_FIT_BANDS", "GTI", ] assert actual == desired dataset.write(tmp_path / "test.fits") dataset_new = MapDataset.read(tmp_path / "test.fits") assert dataset_new.models is None assert dataset_new.mask.dtype == bool assert_allclose(dataset.counts.data, dataset_new.counts.data) assert_allclose(dataset.background_model.map.data, dataset_new.background_model.map.data) assert_allclose(dataset.edisp.edisp_map.data, dataset_new.edisp.edisp_map.data) assert_allclose(dataset.psf.psf_map.data, dataset_new.psf.psf_map.data) assert_allclose(dataset.exposure.data, dataset_new.exposure.data) assert_allclose(dataset.mask_fit.data, dataset_new.mask_fit.data) assert_allclose(dataset.mask_safe.data, dataset_new.mask_safe.data) assert dataset.counts.geom == dataset_new.counts.geom assert dataset.exposure.geom == dataset_new.exposure.geom assert dataset.background_model.map.geom == dataset_new.background_model.map.geom assert dataset.edisp.edisp_map.geom == dataset_new.edisp.edisp_map.geom assert_allclose(dataset.gti.time_sum.to_value("s"), dataset_new.gti.time_sum.to_value("s")) # To test io of psf and edisp map stacked = MapDataset.create(geom) stacked.write("test.fits", overwrite=True) stacked1 = MapDataset.read("test.fits") assert stacked1.psf.psf_map is not None assert stacked1.psf.exposure_map is not None assert stacked1.edisp.edisp_map is not None assert stacked1.edisp.exposure_map is not None assert stacked.mask.dtype == bool assert_allclose(stacked1.psf.psf_map, stacked.psf.psf_map) assert_allclose(stacked1.edisp.edisp_map, stacked.edisp.edisp_map)
def read(filename): return MapDataset.read(filename)