def spectrum_dataset_crab_fine(): e_true = np.logspace(-2, 2.5, 109) * u.TeV e_reco = np.logspace(-2, 2, 73) * u.TeV pos = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return SpectrumDataset.create(e_reco, e_true, region=region)
def spectrum_dataset_crab(): e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") geom = RegionGeom.create( "icrs;circle(83.63, 22.01, 0.11)", axes=[e_reco], binsz_wcs="0.01deg" ) return SpectrumDataset.create(geom=geom, energy_axis_true=e_true)
def create_datasets_1d(observations, target_position): on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) # Target geometry definition e_reco = MapAxis.from_energy_bounds(0.23, 20, 12, "TeV") e_true = MapAxis.from_energy_bounds(0.1, 40, 40, "TeV", name="energy_true") #data reduction makers dataset_maker = SpectrumDatasetMaker( containment_correction=True, selection=["counts", "exposure", "edisp"]) bkg_maker = ReflectedRegionsBackgroundMaker() safe_mask_maker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) datasets = [] geom = RegionGeom(on_region, axes=[e_reco]) dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true) for obs in observations: dataset = dataset_maker.run(dataset_empty.copy(), obs) dataset_on_off = bkg_maker.run(dataset, obs) if dataset_on_off.counts_off.data.sum() > 0: dataset_on_off = safe_mask_maker.run(dataset_on_off, obs) datasets.append(dataset_on_off) return datasets
def test_reflected_bkg_maker_with_wobble_finder(on_region, observations, exclusion_mask): datasets = [] reflected_bkg_maker = ReflectedRegionsBackgroundMaker( region_finder=WobbleRegionsFinder(n_off_regions=3), exclusion_mask=exclusion_mask, ) e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") geom = RegionGeom(region=on_region, axes=[e_reco]) dataset_empty = SpectrumDataset.create(geom=geom) spectrum_dataset_maker = SpectrumDatasetMaker(selection=["counts"]) for obs in observations: dataset = spectrum_dataset_maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) regions_0 = compound_region_to_regions(datasets[0].counts_off.geom.region) regions_1 = compound_region_to_regions(datasets[1].counts_off.geom.region) assert_allclose(len(regions_0), 3) assert_allclose(len(regions_1), 3)
def test_reflected_bkg_maker(on_region, reflected_bkg_maker, observations): datasets = [] e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") dataset_empty = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=on_region) maker = SpectrumDatasetMaker(selection=["counts"]) for obs in observations: dataset = maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) assert_allclose(datasets[0].counts_off.data.sum(), 76) assert_allclose(datasets[1].counts_off.data.sum(), 60) regions_0 = compound_region_to_list(datasets[0].counts_off.geom.region) regions_1 = compound_region_to_list(datasets[1].counts_off.geom.region) assert_allclose(len(regions_0), 11) assert_allclose(len(regions_1), 11)
def get_masked_dataset(ds0): """ This is a trick to get a masked datset from a dataset. Parameters ---------- ds0 : Dataset object The original complete unmasked dataset. Returns ------- masked_dataset : Dataset object The original dataset with masking applied. """ # print("dataset_tools/get_maked_dataset(ds) : masking disabled") # return ds0 e_true = ds0.exposure.geom.axes["energy_true"] e_reco = ds0.counts.geom.axes[0] region = ds0.counts.geom.region masked_dataset = SpectrumDataset.create(e_true=e_true, e_reco=e_reco, region=region, name=ds0.name) masked_dataset.models = ds0.models masked_dataset.stack(ds0) return masked_dataset
def spectrum_dataset_crab_fine(): e_true = MapAxis.from_edges(np.logspace(-2, 2.5, 109) * u.TeV, name="energy_true") e_reco = MapAxis.from_edges(np.logspace(-2, 2, 73) * u.TeV, name="energy") pos = SkyCoord(83.63, 22.01, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return SpectrumDataset.create(e_reco, e_true, region=region)
def spectrum_dataset_gc(): e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") pos = SkyCoord(0.0, 0.0, unit="deg", frame="galactic") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return SpectrumDataset.create(e_reco, e_true, region=region)
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations, caplog): pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) maker = SpectrumDatasetMaker(selection=["counts"]) datasets = [] e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") geom = RegionGeom.create(region=region, axes=[e_reco]) dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true) for obs in observations: dataset = maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) assert datasets[0].counts_off is None assert_allclose(datasets[0].acceptance_off, 0) assert_allclose(datasets[0].mask_safe.data, False) assert "WARNING" in [record.levelname for record in caplog.records] message1 = f"ReflectedRegionsBackgroundMaker failed. " \ f"No OFF region found outside exclusion mask for {datasets[0].name}." message2 = f"ReflectedRegionsBackgroundMaker failed. " \ f"Setting {datasets[0].name} mask to False." assert message1 in [record.message for record in caplog.records] assert message2 in [record.message for record in caplog.records]
def spectrum_dataset_gc(): e_reco = np.logspace(0, 2, 5) * u.TeV e_true = np.logspace(-0.5, 2, 11) * u.TeV pos = SkyCoord(0.0, 0.0, unit="deg", frame="galactic") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) return SpectrumDataset.create(e_reco, e_true, region=region)
def test_stack_no_livetime(): e_reco = MapAxis.from_energy_bounds(1, 10, 3, name="energy", unit="TeV") dataset_1 = SpectrumDataset.create(e_reco=e_reco) dataset_1.livetime = None dataset_2 = dataset_1.copy() with pytest.raises(ValueError): dataset_1.stack(dataset_2)
def test_stack_no_livetime(): e_reco = np.logspace(0, 1, 3) * u.TeV dataset_1 = SpectrumDataset.create(e_reco=e_reco) dataset_1.livetime = None dataset_2 = dataset_1.copy() with pytest.raises(ValueError): dataset_1.stack(dataset_2)
def simulate(): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) # Reconstructed and true energy axis center = SkyCoord(0.0, 0.0, unit="deg", frame="galactic") energy_axis = MapAxis.from_edges( np.logspace(-0.5, 1.0, 10), unit="TeV", name="energy", interp="log", ) energy_axis_true = MapAxis.from_edges( np.logspace(-1.2, 2.0, 31), unit="TeV", name="energy_true", interp="log", ) on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=center, radius=on_region_radius) pointing = SkyCoord(0.5, 0.5, unit="deg", frame="galactic") spectral_model = PowerLawSpectralModel( index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV" ) temporal_model = ExpDecayTemporalModel(t0="6 h", t_ref=gti_t0.mjd * u.d) model_simu = SkyModel( spectral_model=spectral_model, temporal_model=temporal_model, name="model-simu", ) lvtm = np.ones(N_OBS) * 1.0 * u.hr tstart = 1.0 * u.hr datasets = [] for i in range(N_OBS): obs = Observation.create( pointing=pointing, livetime=lvtm[i], tstart=tstart, irfs=irfs, reference_time=gti_t0, ) empty = SpectrumDataset.create( e_reco=energy_axis, e_true=energy_axis_true, region=on_region, name=f"dataset_{i}", ) maker = SpectrumDatasetMaker(selection=["aeff", "background", "edisp"]) dataset = maker.run(empty, obs) dataset.models = model_simu dataset.fake() datasets.append(dataset) tstart = tstart + 2.0 * u.hr return datasets
def test_spectrum_dataset_maker_hess_dl3(spectrum_dataset_crab, observations_hess_dl3): datasets = [] maker = SpectrumDatasetMaker(use_region_center=False) datasets = [] for obs in observations_hess_dl3: dataset = maker.run(spectrum_dataset_crab, obs) datasets.append(dataset) # Exposure assert_allclose(datasets[0].exposure.data.sum(), 7374718644.757894) assert_allclose(datasets[1].exposure.data.sum(), 6691006466.659032) # Background assert_allclose(datasets[0].npred_background().data.sum(), 7.7429157, rtol=1e-5) assert_allclose(datasets[1].npred_background().data.sum(), 5.7314076, rtol=1e-5) # Compare background with using bigger region e_reco = datasets[0].background.geom.axes["energy"] e_true = datasets[0].exposure.geom.axes["energy_true"] geom_bigger = RegionGeom.create("icrs;circle(83.63, 22.01, 0.22)", axes=[e_reco]) datasets_big_region = [] bigger_region_dataset = SpectrumDataset.create( geom=geom_bigger, energy_axis_true=e_true ) for obs in observations_hess_dl3: dataset = maker.run(bigger_region_dataset, obs) datasets_big_region.append(dataset) ratio_regions = ( datasets[0].counts.geom.solid_angle() / datasets_big_region[1].counts.geom.solid_angle() ) ratio_bg_1 = ( datasets[0].npred_background().data.sum() / datasets_big_region[0].npred_background().data.sum() ) ratio_bg_2 = ( datasets[1].npred_background().data.sum() / datasets_big_region[1].npred_background().data.sum() ) assert_allclose(ratio_bg_1, ratio_regions, rtol=1e-2) assert_allclose(ratio_bg_2, ratio_regions, rtol=1e-2) # Edisp -> it isn't exactly 8, is that right? it also isn't without averaging assert_allclose( datasets[0].edisp.edisp_map.data[:, :, 0, 0].sum(), e_reco.nbin * 2, rtol=1e-1 ) assert_allclose( datasets[1].edisp.edisp_map.data[:, :, 0, 0].sum(), e_reco.nbin * 2, rtol=1e-1 )
def test_npred_models(): e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3) spectrum_dataset = SpectrumDataset.create(e_reco=e_reco) spectrum_dataset.livetime = 1 * u.h spectrum_dataset.aeff.data.data += 1e10 * u.Unit("cm2") pwl_1 = PowerLawSpectralModel(index=2) pwl_2 = PowerLawSpectralModel(index=2) model_1 = SkyModel(spectral_model=pwl_1) model_2 = SkyModel(spectral_model=pwl_2) spectrum_dataset.models = Models([model_1, model_2]) npred = spectrum_dataset.npred() assert_allclose(npred.data.sum(), 64.8)
def test_spectrum_dataset_create(): e_reco = u.Quantity([0.1, 1, 10.0], "TeV") e_true = u.Quantity([0.05, 0.5, 5, 20.0], "TeV") empty_spectrum_dataset = SpectrumDataset.create(e_reco, e_true, name="test") assert empty_spectrum_dataset.name == "test" assert empty_spectrum_dataset.counts.data.sum() == 0 assert empty_spectrum_dataset.data_shape[0] == 2 assert empty_spectrum_dataset.background.data.sum() == 0 assert empty_spectrum_dataset.background.geom.axes[0].nbin == 2 assert empty_spectrum_dataset.aeff.data.axis("energy_true").nbin == 3 assert empty_spectrum_dataset.edisp.data.axis("energy").nbin == 2 assert empty_spectrum_dataset.livetime.value == 0 assert len(empty_spectrum_dataset.gti.table) == 0 assert empty_spectrum_dataset.energy_range[0] is None assert_allclose(empty_spectrum_dataset.mask_safe, 0)
def get_spectrumdataset(name): target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs") on_region_radius = Angle("0.11 deg") on_region = CircleSkyRegion(center=target_position, radius=on_region_radius) energy_axis = MapAxis.from_energy_bounds( 0.1, 40, nbin=15, per_decade=True, unit="TeV", name="energy" ) energy_axis_true = MapAxis.from_energy_bounds( 0.05, 100, nbin=20, per_decade=True, unit="TeV", name="energy_true" ) geom = RegionGeom.create(region=on_region, axes=[energy_axis]) return SpectrumDataset.create( geom=geom, energy_axis_true=energy_axis_true, name=name )
def test_fov_bkg_maker_spectrumdataset(obs_dataset): from regions import CircleSkyRegion maker = FoVBackgroundMaker() energy_axis = MapAxis.from_edges([1, 10], unit="TeV", name="energy", interp="log") region = CircleSkyRegion(obs_dataset._geom.center_skydir, Angle('0.1 deg')) geom = RegionGeom.create(region, axes=[energy_axis]) dataset = SpectrumDataset.create(geom) with pytest.raises(TypeError): maker.run(dataset) region_dataset = obs_dataset.to_region_map_dataset(region) with pytest.raises(TypeError): maker.run(region_dataset)
def get_spectrumdataset_rad_max(name, e_min=0.005 * u.TeV): """get the spectrum dataset maker for the energy-dependent spectrum extraction""" target_position = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs") on_center = PointSkyRegion(target_position) energy_axis = MapAxis.from_energy_bounds( e_min, 50, nbin=28, per_decade=False, unit="TeV", name="energy" ) energy_axis_true = MapAxis.from_energy_bounds( e_min, 50, nbin=20, per_decade=False, unit="TeV", name="energy_true" ) geom = RegionGeom.create(region=on_center, axes=[energy_axis]) return SpectrumDataset.create( geom=geom, energy_axis_true=energy_axis_true, name=name )
def _create_reference_dataset(self, name=None): """Create the reference dataset for the current analysis.""" log.debug("Creating target Dataset.") geom = self._create_geometry() geom_settings = self.config.datasets.geom geom_irf = dict(energy_axis_true=None, binsz_irf=None) if geom_settings.axes.energy_true.min is not None: geom_irf["energy_axis_true"] = self._make_energy_axis( geom_settings.axes.energy_true, name="energy_true") if geom_settings.wcs.binsize_irf is not None: geom_irf["binsz_irf"] = geom_settings.wcs.binsize_irf.to( "deg").value if self.config.datasets.type == '1d': return SpectrumDataset.create(geom, name=name, **geom_irf) else: return MapDataset.create(geom, name=name, **geom_irf)
def test_spectrum_dataset_create(): e_reco = MapAxis.from_edges(u.Quantity([0.1, 1, 10.0], "TeV"), name="energy") e_true = MapAxis.from_edges( u.Quantity([0.05, 0.5, 5, 20.0], "TeV"), name="energy_true" ) empty_spectrum_dataset = SpectrumDataset.create(e_reco, e_true, name="test") assert empty_spectrum_dataset.name == "test" assert empty_spectrum_dataset.counts.data.sum() == 0 assert empty_spectrum_dataset.data_shape[0] == 2 assert empty_spectrum_dataset.npred_background().data.sum() == 0 assert empty_spectrum_dataset.npred_background().geom.axes[0].nbin == 2 assert empty_spectrum_dataset.exposure.geom.axes[0].nbin == 3 assert empty_spectrum_dataset.edisp.edisp_map.geom.axes["energy"].nbin == 2 assert empty_spectrum_dataset.gti.time_sum.value == 0 assert len(empty_spectrum_dataset.gti.table) == 0 assert empty_spectrum_dataset.energy_range[0] is None assert_allclose(empty_spectrum_dataset.mask_safe, 0)
def test_npred_models(): e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3) spectrum_dataset = SpectrumDataset.create(e_reco=e_reco) spectrum_dataset.exposure.quantity = 1e10 * u.Unit("cm2 h") pwl_1 = PowerLawSpectralModel(index=2) pwl_2 = PowerLawSpectralModel(index=2) model_1 = SkyModel(spectral_model=pwl_1) model_2 = SkyModel(spectral_model=pwl_2) spectrum_dataset.models = Models([model_1, model_2]) npred = spectrum_dataset.npred() assert_allclose(npred.data.sum(), 64.8) npred_sig = spectrum_dataset.npred_sig() assert_allclose(npred_sig.data.sum(), 64.8) npred_sig_model1 = spectrum_dataset.npred_sig(model=model_1) assert_allclose(npred_sig_model1.data.sum(), 32.4)
def test_spectrum_dataset_create(): e_reco = MapAxis.from_edges(u.Quantity([0.1, 1, 10.0], "TeV"), name="energy") e_true = MapAxis.from_edges(u.Quantity([0.05, 0.5, 5, 20.0], "TeV"), name="energy_true") geom = RegionGeom(region=None, axes=[e_reco]) empty_spectrum_dataset = SpectrumDataset.create(geom, energy_axis_true=e_true, name="test") assert empty_spectrum_dataset.name == "test" assert empty_spectrum_dataset.counts.data.sum() == 0 assert empty_spectrum_dataset.data_shape[0] == 2 assert empty_spectrum_dataset.background.data.sum() == 0 assert empty_spectrum_dataset.background.geom.axes[0].nbin == 2 assert empty_spectrum_dataset.exposure.geom.axes[0].nbin == 3 assert empty_spectrum_dataset.edisp.edisp_map.geom.axes["energy"].nbin == 2 assert empty_spectrum_dataset.gti.time_sum.value == 0 assert len(empty_spectrum_dataset.gti.table) == 0 assert np.isnan(empty_spectrum_dataset.energy_range[0]) assert_allclose(empty_spectrum_dataset.mask_safe, 0)
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations): pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) maker = SpectrumDatasetMaker(selection=["counts"]) datasets = [] e_reco = np.logspace(0, 2, 5) * u.TeV e_true = np.logspace(-0.5, 2, 11) * u.TeV dataset_empty = SpectrumDataset.create(e_reco=e_reco, e_true=e_true, region=region) for obs in observations: dataset = maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) assert datasets[0].counts_off is None assert_allclose(datasets[0].acceptance_off, 0)
def test_run(observations, phase_bkg_maker): maker = SpectrumDatasetMaker() e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") pos = SkyCoord("08h35m20.65525s", "-45d10m35.1545s", frame="icrs") radius = Angle(0.2, "deg") region = SphericalCircleSkyRegion(pos, radius) dataset_empty = SpectrumDataset.create(e_reco, e_true, region=region) obs = observations["111630"] dataset = maker.run(dataset_empty, obs) dataset_on_off = phase_bkg_maker.run(dataset, obs) assert_allclose(dataset_on_off.acceptance, 0.1) assert_allclose(dataset_on_off.acceptance_off, 0.3) assert_allclose(dataset_on_off.counts.data.sum(), 28) assert_allclose(dataset_on_off.counts_off.data.sum(), 57)
def test_reflected_bkg_maker_no_off(reflected_bkg_maker, observations): pos = SkyCoord(83.6333313, 21.51444435, unit="deg", frame="icrs") radius = Angle(0.11, "deg") region = CircleSkyRegion(pos, radius) maker = SpectrumDatasetMaker(selection=["counts"]) datasets = [] e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") geom = RegionGeom.create(region=region, axes=[e_reco]) dataset_empty = SpectrumDataset.create(geom=geom, energy_axis_true=e_true) for obs in observations: dataset = maker.run(dataset_empty, obs) dataset_on_off = reflected_bkg_maker.run(dataset, obs) datasets.append(dataset_on_off) assert datasets[0].counts_off is None assert_allclose(datasets[0].acceptance_off, 0)
def test_npred_models(): e_reco = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=3) geom = RegionGeom(region=None, axes=[e_reco]) spectrum_dataset = SpectrumDataset.create(geom=geom) spectrum_dataset.exposure.quantity = 1e10 * u.Unit("cm2 h") pwl_1 = PowerLawSpectralModel(index=2) pwl_2 = PowerLawSpectralModel(index=2) model_1 = SkyModel(spectral_model=pwl_1) model_2 = SkyModel(spectral_model=pwl_2) spectrum_dataset.models = Models([model_1, model_2]) npred = spectrum_dataset.npred() assert_allclose(npred.data.sum(), 64.8) npred_sig = spectrum_dataset.npred_signal() assert_allclose(npred_sig.data.sum(), 64.8) npred_sig_model1 = spectrum_dataset.npred_signal(model_name=model_1.name) assert_allclose(npred_sig_model1.data.sum(), 32.4)
def generate_dataset(Eflux, flux, Erange=None, tstart=Time('2000-01-01 02:00:00', scale='utc'), tobs=100 * u.s, irf_file=None, alpha=1 / 5, name=None, fake=True, onoff=True, seed='random-seed', debug=False): """ Generate a dataset from a list of energies and flux points either as a SpectrumDataset or a SpectrumDatasetOnOff Note : - in SpectrumDataset, the backgound counts are assumed precisely know and are not fluctuated. - in SpectrumDatasetOnOff, the background counts (off counts) are fluctuated from the IRF known values. Parameters ---------- Eflux : Quantity Energies at which the flux is given. flux : Quantity Flux corresponding to the given energies. Erange : List, optional The energy boundaries within which the flux is defined, if not over all energies. The default is None. tstart : Time object, optional Start date of the dataset. The default is Time('2000-01-01 02:00:00',scale='utc'). tobs : Quantity, optional Duration of the observation. The default is 100*u.s. irf_file : String, optional The IRf file name. The default is None. alpha : Float, optional The on over off surface ratio for the On-Off analysis. The default is 1/5. name : String, optional The dataset name, also used to name tthe spectrum. The default is None. fake : Boolean, optional If True, the dataset counts are fluctuated. The default is True. onoff : Boolean, optional If True, use SpectrumDatasetOnOff, otherwise SpectrumDataSet. The default is True. seed : String, optional The seed for the randome generator; If an integer will generate the same random series at each run. The default is 'random-seed'. debug: Boolean If True, let's talk a bit. The default is False. Returns ------- ds : Dataset object The dataset. """ random_state = get_random_state(seed) ### Define on region on_pointing = SkyCoord(ra=0 * u.deg, dec=0 * u.deg, frame="icrs") # Observing region on_region = CircleSkyRegion(center=on_pointing, radius=0.5 * u.deg) # Define energy axis (see spectrum analysis notebook) # edges for SpectrumDataset - all dataset should have the same axes # Note that linear spacing is clearly problematic for powerlaw fluxes # Axes can also be defined using MapAxis unit = u.GeV E1v = min(Eflux).to(unit).value E2v = max(Eflux).to(unit).value # ereco = np.logspace(np.log10(1.1*E1v), np.log10(0.9*E2v), 20) * unit # ereco_axis = MapAxis.from_edges(ereco.to("TeV").value, # unit="TeV", # name="energy", # interp="log") ereco_axis = MapAxis.from_energy_bounds(1.1 * E1v * unit, 0.9 * E2v * unit, nbin=4, per_decade=True, name="energy") # etrue = np.logspace(np.log10( E1v), np.log10( E2v), 50) * unit # etrue_axis = MapAxis.from_edges(etrue.to("TeV").value, # unit="TeV", # name="energy_true", # interp="log") etrue_axis = MapAxis.from_energy_bounds(E1v * unit, E2v * unit, nbin=4, per_decade=True, name="energy_true") if (debug): print("Dataset ", name) print("Etrue : ", etrue_axis.edges) print("Ereco : ", ereco_axis.edges) # Load IRF irf = load_cta_irfs(irf_file) spec = TemplateSpectralModel(energy=Eflux, values=flux, interp_kwargs={"values_scale": "log"}) model = SkyModel(spectral_model=spec, name="Spec" + str(name)) obs = Observation.create(obs_id=1, pointing=on_pointing, livetime=tobs, irfs=irf, deadtime_fraction=0, reference_time=tstart) ds_empty = SpectrumDataset.create( e_reco=ereco_axis, # Ereco.edges, e_true=etrue_axis, #Etrue.edges, region=on_region, name=name) maker = SpectrumDatasetMaker(containment_correction=False, selection=["exposure", "background", "edisp"]) ds = maker.run(ds_empty, obs) ds.models = model mask = ds.mask_safe.geom.energy_mask(energy_min=Erange[0], energy_max=Erange[1]) mask = mask & ds.mask_safe.data ds.mask_safe = RegionNDMap(ds.mask_safe.geom, data=mask) ds.fake(random_state=random_state) # Fake is mandatory ? # Transform SpectrumDataset into SpectrumDatasetOnOff if needed if (onoff): ds = SpectrumDatasetOnOff.from_spectrum_dataset(dataset=ds, acceptance=1, acceptance_off=1 / alpha) print("Transformed in ONOFF") if fake: print(" Fluctuations : seed = ", seed) if (onoff): ds.fake(npred_background=ds.npred_background()) else: ds.fake(random_state=random_state) print("ds.energy_range = ", ds.energy_range) return ds
bias=0, sigma=0.2, ) observation = Observation.create( obs_id=0, pointing=SkyCoord("0d", "0d", frame="icrs"), irfs={"aeff": aeff, "edisp": edisp}, tstart=0 * u.h, tstop=0.5 * u.h, location=observatory_locations["hess"], ) geom = RegionGeom.create("icrs;circle(0, 0, 0.1)", axes=[energy_reco]) stacked = SpectrumDataset.create(geom=geom, energy_axis_true=energy_true) maker = SpectrumDatasetMaker(selection=["edisp", "exposure"]) dataset_1 = maker.run(stacked.copy(), observation=observation) dataset_2 = maker.run(stacked.copy(), observation=observation) pwl = PowerLawSpectralModel() model = SkyModel(spectral_model=pwl, name="test-source") dataset_1.mask_safe = geom.energy_mask(energy_min=2 * u.TeV) dataset_2.mask_safe = geom.energy_mask(energy_min=0.6 * u.TeV) dataset_1.models = model dataset_2.models = model dataset_1.counts = dataset_1.npred()
def _spectrum_extraction(self): """Run all steps for the spectrum extraction.""" log.info("Reducing spectrum datasets.") datasets_settings = self.config.datasets on_lon = datasets_settings.on_region.lon on_lat = datasets_settings.on_region.lat on_center = SkyCoord(on_lon, on_lat, frame=datasets_settings.on_region.frame) on_region = CircleSkyRegion(on_center, datasets_settings.on_region.radius) maker_config = {} if datasets_settings.containment_correction: maker_config[ "containment_correction"] = datasets_settings.containment_correction e_reco = self._make_energy_axis(datasets_settings.geom.axes.energy) maker_config["selection"] = ["counts", "exposure", "edisp"] dataset_maker = SpectrumDatasetMaker(**maker_config) bkg_maker_config = {} if datasets_settings.background.exclusion: exclusion_region = Map.read(datasets_settings.background.exclusion) bkg_maker_config["exclusion_mask"] = exclusion_region bkg_maker_config.update(datasets_settings.background.parameters) bkg_method = datasets_settings.background.method if bkg_method == "reflected": bkg_maker = ReflectedRegionsBackgroundMaker(**bkg_maker_config) log.debug( f"Creating ReflectedRegionsBackgroundMaker with arguments {bkg_maker_config}" ) else: bkg_maker = None log.warning( f"No background maker set for 1d analysis. Check configuration." ) safe_mask_selection = datasets_settings.safe_mask.methods safe_mask_settings = datasets_settings.safe_mask.parameters safe_mask_maker = SafeMaskMaker(methods=safe_mask_selection, **safe_mask_settings) e_true = self._make_energy_axis( datasets_settings.geom.axes.energy_true, name="energy_true") geom = RegionGeom.create(region=on_region, axes=[e_reco]) reference = SpectrumDataset.create(geom=geom, energy_axis_true=e_true) datasets = [] for obs in self.observations: log.info(f"Processing observation {obs.obs_id}") dataset = dataset_maker.run(reference.copy(), obs) if bkg_maker is not None: dataset = bkg_maker.run(dataset, obs) if dataset.counts_off is None: log.info( f"No OFF region found for observation {obs.obs_id}. Discarding." ) continue dataset = safe_mask_maker.run(dataset, obs) log.debug(dataset) datasets.append(dataset) self.datasets = Datasets(datasets) if datasets_settings.stack: stacked = self.datasets.stack_reduce(name="stacked") self.datasets = Datasets([stacked])