def test_mde_run_switchoff(dataset): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) livetime = 10.0 * u.hr pointing = SkyCoord(0, 0, unit="deg", frame="galactic") obs = Observation.create(obs_id=1001, pointing=pointing, livetime=livetime, irfs=irfs) dataset.psf = None dataset.edisp = None dataset.background_model = None sampler = MapDatasetEventSampler(random_state=0) events = sampler.run(dataset=dataset, observation=obs) assert len(events.table) == 2407 assert_allclose(events.table["ENERGY"][0], 2.2450239000119323, rtol=1e-5) assert_allclose(events.table["RA"][0], 266.9128884464542, rtol=1e-5) assert_allclose(events.table["DEC"][0], -29.034641131874313, rtol=1e-5) meta = events.table.meta assert meta["RA_PNT"] == 266.4049882865447 assert meta["ONTIME"] == 36000.0 assert meta["OBS_ID"] == 1001 assert meta["RADECSYS"] == "icrs"
def test_mde_run(dataset): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) livetime = 10.0 * u.hr pointing = SkyCoord(0, 0, unit="deg", frame="galactic") obs = Observation.create(obs_id=1001, pointing=pointing, livetime=livetime, irfs=irfs) sampler = MapDatasetEventSampler(random_state=0) events = sampler.run(dataset=dataset, observation=obs) assert len(events.table) == 2422 assert_allclose(events.table["ENERGY"][0], 1.56446303986587, rtol=1e-5) assert_allclose(events.table["RA"][0], 268.8180057255861, rtol=1e-5) assert_allclose(events.table["DEC"][0], -28.45051813404372, rtol=1e-5) meta = events.table.meta assert meta["RA_PNT"] == 266.4049882865447 assert meta["ONTIME"] == 36000.0 assert meta["OBS_ID"] == 1001 assert meta["RADECSYS"] == "icrs"
def test_mde_sample_psf(dataset): sampler = MapDatasetEventSampler(random_state=0) events = sampler.sample_sources(dataset=dataset) events = sampler.sample_psf(dataset.psf, events) assert len(events.table) == 2407 assert_allclose(events.table["ENERGY_TRUE"][0], 2.245024, rtol=1e-5) assert events.table["ENERGY_TRUE"].unit == "TeV" assert_allclose(events.table["RA"][0], 266.909362, rtol=1e-5) assert events.table["RA"].unit == "deg" assert_allclose(events.table["DEC"][0], -29.039877, rtol=1e-5) assert events.table["DEC"].unit == "deg"
def test_mde_sample_background(dataset): sampler = MapDatasetEventSampler(random_state=0) events = sampler.sample_background(dataset=dataset) assert len(events.table["ENERGY"]) == 15 assert_allclose(events.table["ENERGY"][0], 1.894698, rtol=1e-5) assert events.table["ENERGY"].unit == "TeV" assert_allclose(events.table["RA"][0], 266.454448, rtol=1e-5) assert events.table["RA"].unit == "deg" assert_allclose(events.table["DEC"][0], -30.870316, rtol=1e-5) assert events.table["DEC"].unit == "deg" assert_allclose(events.table["MC_ID"][0], 0, rtol=1e-5)
def test_mde_sample_sources(dataset): sampler = MapDatasetEventSampler(random_state=0) events = sampler.sample_sources(dataset=dataset) assert len(events.table["ENERGY_TRUE"]) == 2407 assert_allclose(events.table["ENERGY_TRUE"][0], 2.245024, rtol=1e-5) assert events.table["ENERGY_TRUE"].unit == "TeV" assert_allclose(events.table["RA_TRUE"][0], 266.912888, rtol=1e-5) assert events.table["RA_TRUE"].unit == "deg" assert_allclose(events.table["DEC_TRUE"][0], -29.034641, rtol=1e-5) assert events.table["DEC_TRUE"].unit == "deg" assert_allclose(events.table["MC_ID"][0], 1, rtol=1e-5)
def simulate_events(filename_model, filename_dataset, dataset, obs_id): """Simulate events for a given model and dataset. Parameters ---------- filename_model : str Filename of the model definition. filename_dataset : str Filename of the dataset to use for simulation. nobs : int Number of obervations to simulate. """ log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) # log.info(f"Reading {filename_dataset}") # dataset = MapDataset.read(filename_dataset) log.info(f"Reading {filename_model}") models = Models.read(filename_model) # dataset.models = models dataset.models.extend(models) sampler = MapDatasetEventSampler(random_state=0) # obs_id = np.arange(nobs) # with multiprocessing.Pool(processes=core) as pool: # args1 = zip(obs_id, repeat(POINTING), repeat(LIVETIME), repeat(irfs), # repeat(dataset), repeat(filename_dataset), repeat(filename_model)) # pool.starmap(simulate_parallel, args1) # for obs_id in np.arange(nobs): observation = Observation.create(obs_id=obs_id, pointing=POINTING, livetime=LIVETIME, irfs=irfs) events = sampler.run(dataset, observation) path = get_filename_events(filename_dataset, filename_model, obs_id) log.info(f"Writing {path}") path.parent.mkdir(exist_ok=True, parents=True) events.table.write(str(path), overwrite=True)
def simulate_events(filename_model, filename_dataset, nobs): """Simulate events for a given model and dataset. Parameters ---------- filename_model : str Filename of the model definition. filename_dataset : str Filename of the dataset to use for simulation. nobs : int Number of obervations to simulate. """ log.info(f"Reading {IRF_FILE}") irfs = load_cta_irfs(IRF_FILE) log.info(f"Reading {filename_dataset}") dataset = MapDataset.read(filename_dataset) log.info(f"Reading {filename_model}") models = Models.read(filename_model) # dataset.models = models dataset.models.extend(models) sampler = MapDatasetEventSampler(random_state=0) for obs_id in np.arange(nobs): observation = Observation.create(obs_id=obs_id, pointing=POINTING, livetime=LIVETIME, irfs=irfs) events = sampler.run(dataset, observation) path = get_filename_events(filename_dataset, filename_model, obs_id) log.info(f"Writing {path}") path.parent.mkdir(exist_ok=True, parents=True) events.table.write(str(path), overwrite=True)