def to_wcs_nd_map(self, energy_axis_mode='center'): """Convert to a `gammapy.maps.WcsNDMap`. There is no copy of the ``data`` or ``wcs`` object, this conversion is cheap. This is meant to help migrate code using `SkyCube` over to the new maps classes. """ from gammapy.maps import WcsNDMap, WcsGeom, MapAxis if energy_axis_mode == 'center': energy = self.energies(mode='center') energy_axis = MapAxis.from_nodes(energy.value, unit=energy.unit) elif energy_axis_mode == 'edges': energy = self.energies(mode='edges') energy_axis = MapAxis.from_edges(energy.value, unit=energy.unit) else: raise ValueError('Invalid energy_axis_mode: {}'.format(energy_axis_mode)) # Axis order in SkyCube: energy, lat, lon npix = (self.data.shape[2], self.data.shape[1]) geom = WcsGeom(wcs=self.wcs, npix=npix, axes=[energy_axis]) # TODO: change maps and SkyCube to have a unit attribute # For now, SkyCube is a mix of numpy array and quantity in `data` # and we just strip the unit here data = np.asarray(self.data) # unit = getattr(self.data, 'unit', None) return WcsNDMap(geom=geom, data=data)
spectral_model_2 = PowerLawSpectralModel( index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV" ) sky_model_1 = SkyModel( spatial_model=spatial_model_1, spectral_model=spectral_model_1, name="source-1" ) sky_model_2 = SkyModel( spatial_model=spatial_model_2, spectral_model=spectral_model_2, name="source-2" ) models = sky_model_1 + sky_model_2 # Define map geometry axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 10), unit="TeV", name="energy") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(2, 2), coordsys="GAL", axes=[axis] ) # Define some observation parameters # we are not simulating many pointings / observations pointing = SkyCoord(0.2, 0.5, unit="deg", frame="galactic") livetime = 20 * u.hour exposure_map = make_map_exposure_true_energy( pointing=pointing, livetime=livetime, aeff=aeff, geom=geom ) dataset = MapDataset(model=models, exposure=exposure_map)
def spectrum_dataset_crab(): e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-0.5, 2, 11) * u.TeV, name="energy_true") geom = RegionGeom.create("icrs;circle(83.63, 22.01, 0.11)", axes=[e_reco], binsz_wcs="0.01deg") return SpectrumDataset.create(geom=geom, energy_axis_true=e_true)
# In[ ]: for e_min in [10, 100, 1000] * u.GeV: n = (events.energy > e_min).sum() print("Events above {0:4.0f}: {1:5.0f}".format(e_min, n)) # ## Counts # # Let us start to prepare things for an 3D map analysis of the Galactic center region with Gammapy. The first thing we do is to define the map geometry. We chose a TAN projection centered on position ``(glon, glat) = (0, 0)`` with pixel size 0.1 deg, and four energy bins. # In[ ]: gc_pos = SkyCoord(0, 0, unit="deg", frame="galactic") energy_axis = MapAxis.from_edges([10, 30, 100, 300, 2000], name="energy", unit="GeV", interp="log") counts = Map.create( skydir=gc_pos, npix=(100, 80), proj="TAN", coordsys="GAL", binsz=0.1, axes=[energy_axis], dtype=float, ) # We put this call into the same Jupyter cell as the Map.create # because otherwise we could accidentally fill the counts # multiple times when executing the ``fill_by_coord`` multiple times. counts.fill_by_coord({"skycoord": events.radec, "energy": events.energy})
def make_observation_list(): """obs with dummy IRF""" nbin = 3 energy = np.logspace(-1, 1, nbin + 1) * u.TeV livetime = 2 * u.h data_on = np.arange(nbin) dataoff_1 = np.ones(3) dataoff_2 = np.ones(3) * 3 dataoff_1[1] = 0 dataoff_2[1] = 0 axis = MapAxis.from_edges(energy, name="energy", interp="log") axis_true = axis.copy(name="energy_true") geom = RegionGeom(region=None, axes=[axis]) geom_true = RegionGeom(region=None, axes=[axis_true]) on_vector = RegionNDMap.from_geom(geom=geom, data=data_on) off_vector1 = RegionNDMap.from_geom(geom=geom, data=dataoff_1) off_vector2 = RegionNDMap.from_geom(geom=geom, data=dataoff_2) mask_safe = RegionNDMap.from_geom(geom, dtype=bool) mask_safe.data += True aeff = RegionNDMap.from_geom(geom_true, data=1, unit="m2") edisp = EDispKernelMap.from_gauss(energy_axis=axis, energy_axis_true=axis, sigma=0.2, bias=0, geom=geom) time_ref = Time("2010-01-01") gti1 = make_gti({ "START": [5, 6, 1, 2], "STOP": [8, 7, 3, 4] }, time_ref=time_ref) gti2 = make_gti({"START": [14], "STOP": [15]}, time_ref=time_ref) obs1 = SpectrumDatasetOnOff( counts=on_vector, counts_off=off_vector1, aeff=aeff, edisp=edisp, livetime=livetime, mask_safe=mask_safe, acceptance=1, acceptance_off=2, name="1", gti=gti1, ) obs2 = SpectrumDatasetOnOff( counts=on_vector, counts_off=off_vector2, aeff=aeff, edisp=edisp, livetime=livetime, mask_safe=mask_safe, acceptance=1, acceptance_off=4, name="2", gti=gti2, ) obs_list = [obs1, obs2] return obs_list
def test_up_downsample_consistency(factor): axis = MapAxis.from_edges([0, 1, 3, 7, 13], name="test", interp="lin") axis_new = axis.upsample(factor).downsample(factor) assert_allclose(axis.edges, axis_new.edges)
def __init__(self, selection="short", savefig=True): log.info("Executing __init__()") self.resdir = BASE_PATH / "results" self.savefig = savefig # event list self.events = EventList.read( "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_events_selected.fits.gz") # energies self.El_flux = [10.0, 20.0, 50.0, 150.0, 500.0, 2000.0] * u.GeV El_fit = 10**np.arange(1, 3.31, 0.1) * u.GeV self.energy_axis = MapAxis.from_edges(El_fit, name="energy", unit="GeV", interp="log") # psf margin for mask psf = PSFMap.read("$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz", format="gtpsf") psf_r99max = np.max( psf.containment_radius(fraction=0.99, energy_true=El_fit)) self.psf_margin = np.ceil(psf_r99max.value * 10) / 10.0 # iso norm=0.92 see paper appendix A self.model_iso = create_fermi_isotropic_diffuse_model( filename="data/iso_P8R2_SOURCE_V6_v06_extrapolated.txt", interp_kwargs={"fill_value": None}, ) self.model_iso.spectral_model.model2.norm.value = 0.92 # regions selection file3fhl = "$GAMMAPY_DATA/catalogs/fermi/gll_psch_v13.fit.gz" self.FHL3 = SourceCatalog3FHL(file3fhl) hdulist = fits.open(make_path(file3fhl)) self.ROIs = hdulist["ROIs"].data Scat = hdulist[1].data order = np.argsort(Scat.Signif_Avg)[::-1] ROIs_ord = Scat.ROI_num[order] if selection == "short": self.ROIs_sel = [430, 135, 118, 212, 277, 42, 272, 495] # Crab, Vela, high-lat, +some fast regions elif selection == "long": # get small regions with few sources among the most significant indexes = np.unique(ROIs_ord, return_index=True)[1] ROIs_ord = [ROIs_ord[index] for index in sorted(indexes)] self.ROIs_sel = [ kr for kr in ROIs_ord if sum(Scat.ROI_num == kr) <= 4 and self.ROIs.RADIUS[kr] < 6 ][:100] elif selection == "debug": self.ROIs_sel = [135] # Vela region else: raise ValueError(f"Invalid selection: {selection!r}") # fit options self.fit_opts = { "backend": "minuit", "optimize_opts": { "tol": 10.0, "strategy": 2 }, } # calculate flux points only for sources significant above this threshold self.sig_cut = 8.0 # diagnostics stored to produce plots and outputs self.diags = { "message": [], "stat": [], "params": {}, "errel": {}, "compatibility": {}, "cat_fp_sel": [], } self.diags["errel"]["flux_points"] = [] keys = [ "PL_tags", "PL_index", "PL_amplitude", "LP_tags", "LP_alpha", "LP_beta", "LP_amplitude", ] for key in keys: self.diags["params"][key] = []
"e_true": None, "exclusion_mask": Map.from_geom(geom(ebounds=[0.1, 10])), "counts": 34366, "exposure": 5.843302e08, "exposure_image": 1.16866e11, "background": 30424.451, "binsz_irf": 0.5, "migra": None, }, { # Test for different e_true and e_reco bins "geom": geom(ebounds=[0.1, 1, 10]), "e_true": MapAxis.from_edges([0.1, 0.5, 2.5, 10.0], name="energy_true", unit="TeV", interp="log"), "counts": 34366, "exposure": 9.951827e08, "exposure_image": 5.971096e11, "background": 28760.283, "background_oversampling": 2, "binsz_irf": 0.5, "migra": None,
def geom_image(): energy = np.logspace(-1.0, 1.0, 2) axis = MapAxis.from_edges(energy, name="energy", unit=u.TeV, interp="log") return WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(2, 2), frame="galactic", axes=[axis] )
def spectrum_dataset_gc(): e_reco = MapAxis.from_edges(np.logspace(0, 2, 5) * u.TeV, name="energy") e_true = MapAxis.from_edges(np.logspace(-1, 2, 13) * u.TeV, name="energy_true") geom = RegionGeom.create("galactic;circle(0, 0, 0.11)", axes=[e_reco]) return SpectrumDataset.create(geom=geom, energy_axis_true=e_true)
def spectrum_dataset_crab_fine(): e_true = MapAxis.from_edges(np.logspace(-2, 2.5, 109) * u.TeV, name="energy_true") e_reco = MapAxis.from_energy_edges(np.logspace(-2, 2, 73) * u.TeV) geom = RegionGeom.create("icrs;circle(83.63, 22.01, 0.11)", axes=[e_reco]) return SpectrumDataset.create(geom=geom, energy_axis_true=e_true)
def make_datasets_example(): # Define which data to use and print some information energy_axis = MapAxis.from_edges( np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log" ) geom0 = WcsGeom.create( skydir=(0, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geom1 = WcsGeom.create( skydir=(1, 0), binsz=0.1, width=(2, 2), frame="galactic", proj="CAR", axes=[energy_axis], ) geoms = [geom0, geom1] sources_coords = [(0, 0), (0.9, 0.1)] names = ["gc", "g09"] models = Models() for idx, (lon, lat) in enumerate(sources_coords): spatial_model = PointSpatialModel( lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic" ) spectral_model = ExpCutoffPowerLawSpectralModel( index=2 * u.Unit(""), amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"), reference=1.0 * u.TeV, lambda_=0.1 / u.TeV, ) model_ecpl = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, name=names[idx] ) models.append(model_ecpl) models["gc"].spectral_model.reference = models["g09"].spectral_model.reference obs_ids = [110380, 111140, 111159] data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") diffuse_spatial = TemplateSpatialModel.read( "$GAMMAPY_DATA/fermi-3fhl-gc/gll_iem_v06_gc.fits.gz" ) diffuse_model = SkyModel(PowerLawSpectralModel(), diffuse_spatial) maker = MapDatasetMaker() datasets = Datasets() observations = data_store.get_observations(obs_ids) for idx, geom in enumerate(geoms): stacked = MapDataset.create(geom=geom, name=names[idx]) for obs in observations: dataset = maker.run(stacked, obs) stacked.stack(dataset) bkg = stacked.models.pop(0) stacked.models = [models[idx], diffuse_model, bkg] datasets.append(stacked) datasets.write( "$GAMMAPY_DATA/tests/models", prefix="gc_example", overwrite=True, write_covariance=False, )
def make_all_models(): """Make an instance of each model, for testing.""" yield Model.create("ConstantSpatialModel", "spatial") map_constantmodel = Map.create(npix=(10, 20), unit="sr-1") yield Model.create("TemplateSpatialModel", "spatial", map=map_constantmodel) yield Model.create("DiskSpatialModel", "spatial", lon_0="1 deg", lat_0="2 deg", r_0="3 deg") yield Model.create("gauss", "spatial", lon_0="1 deg", lat_0="2 deg", sigma="3 deg") yield Model.create("PointSpatialModel", "spatial", lon_0="1 deg", lat_0="2 deg") yield Model.create( "ShellSpatialModel", "spatial", lon_0="1 deg", lat_0="2 deg", radius="3 deg", width="4 deg", ) yield Model.create("ConstantSpectralModel", "spectral", const="99 cm-2 s-1 TeV-1") yield Model.create( "CompoundSpectralModel", "spectral", model1=Model.create("PowerLawSpectralModel", "spectral"), model2=Model.create("PowerLawSpectralModel", "spectral"), operator=np.add, ) yield Model.create("PowerLawSpectralModel", "spectral") yield Model.create("PowerLawNormSpectralModel", "spectral") yield Model.create("PowerLaw2SpectralModel", "spectral") yield Model.create("ExpCutoffPowerLawSpectralModel", "spectral") yield Model.create("ExpCutoffPowerLawNormSpectralModel", "spectral") yield Model.create("ExpCutoffPowerLaw3FGLSpectralModel", "spectral") yield Model.create("SuperExpCutoffPowerLaw3FGLSpectralModel", "spectral") yield Model.create("SuperExpCutoffPowerLaw4FGLSpectralModel", "spectral") yield Model.create("LogParabolaSpectralModel", "spectral") yield Model.create("LogParabolaNormSpectralModel", "spectral") yield Model.create("TemplateSpectralModel", "spectral", energy=[1, 2] * u.cm, values=[3, 4] * u.cm) # TODO: add unit validation? yield Model.create("GaussianSpectralModel", "spectral") # TODO: yield Model.create("AbsorbedSpectralModel") # TODO: yield Model.create("NaimaSpectralModel") # TODO: yield Model.create("ScaleSpectralModel") yield Model.create("ConstantTemporalModel", "temporal") yield Model.create("LightCurveTemplateTemporalModel", "temporal", Table()) yield Model.create( "SkyModel", spatial_model=Model.create("ConstantSpatialModel", "spatial"), spectral_model=Model.create("PowerLawSpectralModel", "spectral"), ) m1 = Map.create(npix=(10, 20, 30), axes=[MapAxis.from_nodes([1, 2] * u.TeV, name="energy")]) yield Model.create("SkyDiffuseCube", map=m1) m2 = Map.create(npix=(10, 20, 30), axes=[MapAxis.from_edges([1, 2] * u.TeV, name="energy")]) yield Model.create("BackgroundModel", map=m2)
data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/") # In[ ]: # Select some observations from these dataset by hand obs_ids = [110380, 111140, 111159] observations = data_store.get_observations(obs_ids) # ### Prepare input maps # # Now we define a reference geometry for our analysis, We choose a WCS based gemoetry with a binsize of 0.02 deg and also define an energy axis: # In[ ]: energy_axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 10), unit="TeV", name="energy", interp="log") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(10, 8), coordsys="GAL", proj="CAR", axes=[energy_axis], ) # In addition we define the center coordinate and the FoV offset cut: # In[ ]: # Source position
# # # Define map geometry GLON = hdul[0].header['PSI_0'] * u.Unit("deg") GLAT = hdul[0].header['THETA_0'] * u.Unit("deg") src_pos = SkyCoord(GLON, GLAT, frame="galactic") emin = 0.03 emax = 100 unit = "TeV" lg_emin = np.log10(emin) lg_emax = np.log10(emax) ENERGY_BINS = 31 axis = MapAxis.from_edges( np.logspace(lg_emin, lg_emax, ENERGY_BINS), unit=unit, name="energy", interp="log", ) geom = WcsGeom.create(skydir=src_pos, binsz=0.02, width=(2, 2), frame="galactic", axes=[axis]) # ## Build model and create dataset # **Declare constants and parameters for our DM model** #JFAC = 2.00e19 * u.Unit("GeV2 cm-5") # <--- Reticulum II Point Source JFAC = 3.27e19 * u.Unit("GeV2 cm-5") # <--- Reticulum II Extended #JFAC = 1.26e20 * u.Unit("GeV2 cm-5") # <--- Draco I Extended
def to_cube(image): # introduce a fake enery axis for now axis = MapAxis.from_edges([1, 10] * u.TeV, name="energy") geom = image.geom.to_cube([axis]) return WcsNDMap.from_geom(geom=geom, data=image.data)
def test_get_axis_index_by_name(): e_axis = MapAxis.from_edges([1, 5], name="energy") geom = WcsGeom.create(width=5, binsz=1.0, axes=[e_axis]) assert geom.axes.index("energy") == 0 with pytest.raises(ValueError): geom.axes.index("time")
def test_downsample(region): axis = MapAxis.from_edges([1, 3.162278, 10] * u.TeV, name="energy", interp="log") geom = RegionGeom.create(region, axes=[axis]) geom_down = geom.downsample(factor=2, axis_name="energy") assert_allclose(geom_down.axes[0].edges.value, [1.0, 10.0], rtol=1e-5)
def energy_axis_ref(): edges = np.arange(1, 11) * u.TeV return MapAxis.from_edges(edges, name="energy")
def test_repr(region): axis = MapAxis.from_edges([1, 3.162278, 10] * u.TeV, name="energy", interp="log") geom = RegionGeom.create(region, axes=[axis]) assert "RegionGeom" in repr(geom) assert "CircleSkyRegion" in repr(geom)
def geom(): axis = MapAxis.from_edges(np.logspace(-1, 1, 3), unit=u.TeV, name="energy") return WcsGeom.create(skydir=(0, 0), npix=(5, 4), frame="galactic", axes=[axis])
{ # Test single energy bin with exclusion mask "geom": geom(ebounds=[0.1, 10]), "e_true": None, "exclusion_mask": Map.from_geom(geom(ebounds=[0.1, 10])), "counts": 34366, "exposure": 5.843302e08, "exposure_image": 1.16866e11, "background": 30424.451, "binsz_irf": 0.5, }, { # Test for different e_true and e_reco bins "geom": geom(ebounds=[0.1, 1, 10]), "e_true": MapAxis.from_edges( [0.1, 0.5, 2.5, 10.0], name="energy_true", unit="TeV", interp="log" ), "counts": 34366, "exposure": 9.951827e08, "exposure_image": 6.492968e10, "background": 28760.283, "background_oversampling": 2, "binsz_irf": 0.5, }, { # Test for different e_true and e_reco and spatial bins "geom": geom(ebounds=[0.1, 1, 10]), "e_true": MapAxis.from_edges( [0.1, 0.5, 2.5, 10.0], name="energy_true", unit="TeV", interp="log" ), "counts": 34366,
def setup(self): etrue = np.logspace(-1, 1, 10) * u.TeV self.e_true = MapAxis.from_edges(etrue, name="energy_true") ereco = np.logspace(-1, 1, 5) * u.TeV elo = ereco[:-1] ehi = ereco[1:] self.e_reco = MapAxis.from_edges(ereco, name="energy") start = u.Quantity([0], "s") stop = u.Quantity([1000], "s") time_ref = Time("2010-01-01 00:00:00.0") self.gti = GTI.create(start, stop, time_ref) self.livetime = self.gti.time_sum self.on_region = make_region("icrs;circle(0.,1.,0.1)") off_region = make_region("icrs;box(0.,1.,0.1, 0.2,30)") self.off_region = off_region.union( make_region("icrs;box(-1.,-1.,0.1, 0.2,150)")) self.wcs = WcsGeom.create(npix=300, binsz=0.01, frame="icrs").wcs self.aeff = RegionNDMap.create(region=self.on_region, wcs=self.wcs, axes=[self.e_true], unit="cm2") self.aeff.data += 1 data = np.ones(elo.shape) data[-1] = 0 # to test stats calculation with empty bins axis = MapAxis.from_edges(ereco, name="energy", interp="log") self.on_counts = RegionNDMap.create(region=self.on_region, wcs=self.wcs, axes=[axis]) self.on_counts.data += 1 self.on_counts.data[-1] = 0 self.off_counts = RegionNDMap.create(region=self.off_region, wcs=self.wcs, axes=[axis]) self.off_counts.data += 10 acceptance = RegionNDMap.from_geom(self.on_counts.geom) acceptance.data += 1 data = np.ones(elo.shape) data[-1] = 0 acceptance_off = RegionNDMap.from_geom(self.off_counts.geom) acceptance_off.data += 10 self.edisp = EDispKernelMap.from_diagonal_response( self.e_reco, self.e_true, self.on_counts.geom) self.dataset = SpectrumDatasetOnOff( counts=self.on_counts, counts_off=self.off_counts, aeff=self.aeff, edisp=self.edisp, livetime=self.livetime, acceptance=acceptance, acceptance_off=acceptance_off, name="test", gti=self.gti, )
def geom(ebounds, binsz=0.5): skydir = SkyCoord(0, -1, unit="deg", frame="galactic") energy_axis = MapAxis.from_edges(ebounds, name="energy", unit="TeV", interp="log") return WcsGeom.create( skydir=skydir, binsz=binsz, width=(10, 5), frame="galactic", axes=[energy_axis] )
def _create_offset_axis(fov_offset_bins): return MapAxis.from_edges(fov_offset_bins, name="offset")
reference="1 TeV") spectral_model_2 = PowerLaw(index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") sky_model_1 = SkyModel(spatial_model=spatial_model_1, spectral_model=spectral_model_1) sky_model_2 = SkyModel(spatial_model=spatial_model_2, spectral_model=spectral_model_2) models = sky_model_1 + sky_model_2 # Define map geometry axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 10), unit="TeV") geom = WcsGeom.create(skydir=(0, 0), binsz=0.02, width=(2, 2), coordsys="GAL", axes=[axis]) # Define some observation parameters # we are not simulating many pointings / observations pointing = SkyCoord(0.2, 0.5, unit="deg", frame="galactic") livetime = 20 * u.hour exposure_map = make_map_exposure_true_energy(pointing=pointing, livetime=livetime, aeff=aeff, geom=geom)
spectral_model_1 = PowerLaw( index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV" ) spectral_model_2 = PowerLaw( index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV" ) sky_model_1 = SkyModel(spatial_model=spatial_model_1, spectral_model=spectral_model_1) sky_model_2 = SkyModel(spatial_model=spatial_model_2, spectral_model=spectral_model_2) models = sky_model_1 + sky_model_2 # Define map geometry axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 10), unit="TeV") geom = WcsGeom.create( skydir=(0, 0), binsz=0.02, width=(2, 2), coordsys="GAL", axes=[axis] ) # Define some observation parameters # we are not simulating many pointings / observations pointing = SkyCoord(0.2, 0.5, unit="deg", frame="galactic") livetime = 20 * u.hour exposure_map = make_map_exposure_true_energy( pointing=pointing, livetime=livetime, aeff=aeff, geom=geom ) evaluator = MapEvaluator(model=models, exposure=exposure_map)
def plot_theta_squared_table(table): """Plot the theta2 distribution of ON, OFF counts, excess and signifiance in each theta2bin. Take the table containing the ON counts, the OFF counts, the acceptance, the off acceptance and the alpha (normalisation between ON and OFF) for each theta2 bin Parameters ---------- table : `~astropy.table.Table` Required columns: theta2_min, theta2_max, counts, counts_off and alpha """ import matplotlib.pyplot as plt theta2_edges = edges_from_lo_hi( table["theta2_min"].quantity, table["theta2_max"].quantity ) theta2_axis = MapAxis.from_edges(theta2_edges, interp="lin", name="theta_squared") ax0 = plt.subplot(2, 1, 1) x = theta2_axis.center.value x_edges = theta2_axis.edges.value xerr = (x - x_edges[:-1], x_edges[1:] - x) ax0.errorbar( x, table["counts"], xerr=xerr, yerr=np.sqrt(table["counts"]), linestyle="None", label="Counts", ) ax0.errorbar( x, table["counts_off"], xerr=xerr, yerr=np.sqrt(table["counts_off"]), linestyle="None", label="Counts Off", ) ax0.errorbar( x, table["excess"], xerr=xerr, yerr=(-table["excess_errn"], table["excess_errp"]), fmt="+", linestyle="None", label="Excess", ) ax0.set_ylabel("Counts") ax0.set_xticks([]) ax0.set_xlabel("") ax0.legend() ax1 = plt.subplot(2, 1, 2) ax1.errorbar(x, table["sqrt_ts"], xerr=xerr, linestyle="None") ax1.set_xlabel(f"Theta [{theta2_axis.unit}]") ax1.set_ylabel("Significance")