def test_evaluate_fk5_model(): geom = WcsGeom.create(width=(5, 5), binsz=0.1, frame="icrs") model = GaussianSpatialModel( lon_0="0 deg", lat_0="0 deg", sigma="0.1 deg", frame="fk5" ) data = model.evaluate_geom(geom) assert data.sum() > 0
def test_integrate_wcs_geom(): center = SkyCoord("0d", "0d", frame="icrs") model_0_0d = GaussianSpatialModel(lon="0.234d", lat="-0.172d", sigma=1e-4 * u.deg, frame="icrs") model_0_01d = GaussianSpatialModel(lon="0.234d", lat="-0.172d", sigma=0.01 * u.deg, frame="icrs") model_0_005d = GaussianSpatialModel(lon="0.234d", lat="-0.172d", sigma=0.005 * u.deg, frame="icrs") geom = WcsGeom.create(skydir=center, npix=100, binsz=0.02) # TODO: solve issue with small radii integrated_0_0d = model_0_0d.integrate_geom(geom) integrated_0_01d = model_0_01d.integrate_geom(geom) integrated_0_005d = model_0_005d.integrate_geom(geom) assert_allclose(integrated_0_0d.data.sum(), 1, atol=2e-4) assert_allclose(integrated_0_01d.data.sum(), 1, atol=2e-4) assert_allclose(integrated_0_005d.data.sum(), 1, atol=2e-4)
def test_sky_gaussian(): # Test symmetric model sigma = 1 * u.deg model = GaussianSpatialModel(lon_0="5 deg", lat_0="15 deg", sigma=sigma) assert model.parameters["sigma"].min == 0 val_0 = model(5 * u.deg, 15 * u.deg) val_sigma = model(5 * u.deg, 16 * u.deg) assert val_0.unit == "sr-1" ratio = val_0 / val_sigma assert_allclose(ratio, np.exp(0.5)) radius = model.evaluation_radius assert radius.unit == "deg" assert_allclose(radius.value, 5 * sigma.value) # test the normalization for an elongated Gaussian near the Galactic Plane m_geom_1 = WcsGeom.create(binsz=0.05, width=(20, 20), skydir=(2, 2), coordsys="GAL", proj="AIT") coords = m_geom_1.get_coord() solid_angle = m_geom_1.solid_angle() lon = coords.lon lat = coords.lat sigma = 3 * u.deg model_1 = GaussianSpatialModel(2 * u.deg, 2 * u.deg, sigma, 0.8, 30 * u.deg) vals_1 = model_1(lon, lat) assert vals_1.unit == "sr-1" assert_allclose(np.sum(vals_1 * solid_angle), 1, rtol=1.0e-3) radius = model_1.evaluation_radius assert radius.unit == "deg" assert_allclose(radius.value, 5 * sigma.value) # check the ratio between the value at the peak and on the 1-sigma isocontour sigma = 4 * u.deg semi_minor = 2 * u.deg e = np.sqrt(1 - (semi_minor / sigma)**2) model_2 = GaussianSpatialModel(0 * u.deg, 0 * u.deg, sigma, e, 0 * u.deg) val_0 = model_2(0 * u.deg, 0 * u.deg) val_major = model_2(0 * u.deg, 4 * u.deg) val_minor = model_2(2 * u.deg, 0 * u.deg) assert val_0.unit == "sr-1" ratio_major = val_0 / val_major ratio_minor = val_0 / val_minor assert_allclose(ratio_major, np.exp(0.5)) assert_allclose(ratio_minor, np.exp(0.5)) # check the rotation model_3 = GaussianSpatialModel(0 * u.deg, 0 * u.deg, sigma, e, 90 * u.deg) val_minor_rotated = model_3(0 * u.deg, 2 * u.deg) ratio_minor_rotated = val_0 / val_minor_rotated assert_allclose(ratio_minor_rotated, np.exp(0.5))
def test_integrate_geom_energy_axis(): center = SkyCoord("0d", "0d", frame='icrs') model = GaussianSpatialModel(lon="0d", lat="0d", sigma=0.1*u.deg, frame='icrs') radius = 1 * u.deg square = RectangleSkyRegion(center, radius, radius) axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=10) geom = RegionGeom(region=square, axes=[axis]) integral = model.integrate_geom(geom).data assert_allclose(integral, 1, rtol=0.01)
def test_integrate_geom(): center = SkyCoord("0d", "0d", frame='icrs') model = GaussianSpatialModel(lon="0d", lat="0d", sigma=0.1*u.deg, frame='icrs') radius_large = 1 * u.deg circle_large = CircleSkyRegion(center, radius_large) radius_small = 0.1 * u.deg circle_small = CircleSkyRegion(center, radius_small) geom_large, geom_small = RegionGeom(region=circle_large), RegionGeom(region=circle_small) integral_large, integral_small = model.integrate_geom(geom_large).data, model.integrate_geom(geom_small).data assert_allclose(integral_large[0], 1, rtol=0.01) assert_allclose(integral_small[0], 0.3953, rtol=0.01)
def simulate_map_dataset(random_state=0, name=None): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) skydir = SkyCoord("0 deg", "0 deg", frame="galactic") edges = np.logspace(-1, 2, 15) * u.TeV energy_axis = MapAxis.from_edges(edges=edges, name="energy", interp="log") geom = WcsGeom.create( skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], frame="galactic" ) gauss = GaussianSpatialModel( lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic" ) pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source") obs = Observation.create(pointing=skydir, livetime=1 * u.h, irfs=irfs) empty = MapDataset.create(geom, name=name) maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"]) dataset = maker.run(empty, obs) dataset.models.append(skymodel) dataset.fake(random_state=random_state) return dataset
def test_excess_map_estimator_map_dataset_on_off_reco_exposure( simple_dataset_on_off, ): # TODO: this has never worked... model = SkyModel( PowerLawSpectralModel(amplitude="1e-9 cm-2 s-1TeV-1"), GaussianSpatialModel(lat_0=0.0 * u.deg, lon_0=0.0 * u.deg, sigma=0.1 * u.deg, frame="icrs"), name="sky_model", ) simple_dataset_on_off.models = [model] spectral_model = PowerLawSpectralModel(index=15) estimator_mod = ExcessMapEstimator( 0.11 * u.deg, correlate_off=True, spectral_model=spectral_model, ) result_mod = estimator_mod.run(simple_dataset_on_off) assert result_mod["flux"].unit == "cm-2s-1" assert_allclose(result_mod["flux"].data.sum(), 5.254442e-06, rtol=1e-3) reco_exposure = estimate_exposure_reco_energy( simple_dataset_on_off, spectral_model=spectral_model) assert_allclose(reco_exposure.data.sum(), 7.977796e12, rtol=0.001)
def test_mask_shape(): axis = MapAxis.from_edges([1, 3, 10], unit="TeV", interp="log", name="energy") geom_1 = WcsGeom.create(binsz=1, width=3, axes=[axis]) geom_2 = WcsGeom.create(binsz=1, width=5, axes=[axis]) dataset_1 = MapDataset.create(geom_1) dataset_2 = MapDataset.create(geom_2) dataset_1.psf = None dataset_2.psf = None dataset_1.edisp = None dataset_2.edisp = None model = SkyModel( spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel(), name="source", ) dataset_1.models = model dataset_2.models = model fpe = FluxPointsEstimator(e_edges=[1, 10] * u.TeV, source="source") fp = fpe.run([dataset_2, dataset_1]) assert_allclose(fp.table["counts"], 0)
def test_compute_ts_map_downsampled(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="0.11 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator(model=model, downsampling_factor=2, kernel_width="1 deg", selection_optional=["ul"]) result = ts_estimator.run(input_dataset) assert_allclose(result["ts"].data[0, 99, 99], 1661.49, rtol=1e-2) assert_allclose(result["niter"].data[0, 99, 99], 7) assert_allclose(result["flux"].data[0, 99, 99], 1.065988e-09, rtol=1e-2) assert_allclose(result["flux_err"].data[0, 99, 99], 4.005628e-11, rtol=1e-2) assert_allclose(result["flux_ul"].data[0, 99, 99], 8.220152e-11, rtol=1e-2) assert result["flux"].unit == u.Unit("cm-2s-1") assert result["flux_err"].unit == u.Unit("cm-2s-1") assert result["flux_ul"].unit == u.Unit("cm-2s-1") # Check mask is correctly taken into account assert np.isnan(result["ts"].data[0, 30, 40])
def test_excess_map_estimator_map_dataset_on_off_with_correlation_model( simple_dataset_on_off, ): model = SkyModel( PowerLawSpectralModel(amplitude="1e-9 cm-2 s-1TeV-1"), GaussianSpatialModel(lat_0=0.0 * u.deg, lon_0=0.0 * u.deg, sigma=0.1 * u.deg, frame="icrs"), name="sky_model", ) simple_dataset_on_off.models = [model] estimator_mod = ExcessMapEstimator(0.11 * u.deg, correlate_off=True) result_mod = estimator_mod.run(simple_dataset_on_off) assert result_mod["npred"].data.shape == (1, 20, 20) assert_allclose(result_mod["sqrt_ts"].data[0, 10, 10], 6.240846, atol=1e-3) assert_allclose(result_mod["npred"].data[0, 10, 10], 388) assert_allclose(result_mod["npred_excess"].data[0, 10, 10], 148.68057) assert result_mod["flux"].unit == "cm-2s-1" assert_allclose(result_mod["flux"].data[0, 10, 10], 1.486806e-08, rtol=1e-3) assert_allclose(result_mod["flux"].data.sum(), 5.254442e-06, rtol=1e-3)
def test_compute_ts_map(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="0.1 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator(model=model, threshold=1, kernel_width="1 deg", selection_optional=[]) result = ts_estimator.run(input_dataset) assert_allclose(result["ts"].data[0, 99, 99], 1704.23, rtol=1e-2) assert_allclose(result["niter"].data[0, 99, 99], 8) assert_allclose(result["flux"].data[0, 99, 99], 1.02e-09, rtol=1e-2) assert_allclose(result["flux_err"].data[0, 99, 99], 3.84e-11, rtol=1e-2) assert_allclose(result["npred"].data[0, 99, 99], 3627.874063, rtol=1e-2) assert_allclose(result["npred_null"].data[0, 99, 99], 2601, rtol=1e-2) assert_allclose(result["npred_excess"].data[0, 99, 99], 1026.874063, rtol=1e-2) assert result["flux"].unit == u.Unit("cm-2s-1") assert result["flux_err"].unit == u.Unit("cm-2s-1") # Check mask is correctly taken into account assert np.isnan(result["ts"].data[0, 30, 40]) energy_axis = result["ts"].geom.axes["energy"] assert_allclose(energy_axis.edges.value, [0.1, 1])
def test_contributes_region_mask(): axis = MapAxis.from_edges(np.logspace(-1, 1, 3), unit=u.TeV, name="energy") geom = RegionGeom.create("galactic;circle(0, 0, 0.2)", axes=[axis], binsz_wcs="0.05 deg") mask = Map.from_geom(geom, unit="", dtype="bool") mask.data[...] = True spatial_model1 = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0 deg", sigma="0.1 deg", frame="galactic") spatial_model2 = PointSpatialModel(lon_0="0.3 deg", lat_0="0.3 deg", frame="galactic") model1 = SkyModel( spatial_model=spatial_model1, spectral_model=PowerLawSpectralModel(), name="source-1", ) model2 = SkyModel( spatial_model=spatial_model2, spectral_model=PowerLawSpectralModel(), name="source-2", ) assert model1.contributes(mask, margin=0 * u.deg) assert not model2.contributes(mask, margin=0 * u.deg) assert model2.contributes(mask, margin=0.3 * u.deg)
def get_lc(datasets): spatial_model1 = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.3 deg", frame="galactic") spatial_model1.parameters["lon_0"].frozen = True spatial_model1.parameters["lat_0"].frozen = True spatial_model1.parameters["sigma"].frozen = True spectral_model1 = PowerLawSpectralModel(index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") model_fit = SkyModel( spatial_model=spatial_model1, spectral_model=spectral_model1, name="model_fit", ) for dataset in datasets: dataset.models = [ model_fit, FoVBackgroundModel(dataset_name=dataset.name) ] lc_maker = LightCurveEstimator(energy_edges=[1.0, 10.0] * u.TeV, source="model_fit", reoptimize=False) lc = lc_maker.run(datasets) print(lc.to_table(format="lightcurve", sed_type="flux")["flux"])
def test_compute_ts_map_newton(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="0.1 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator(model=model, method="root newton", threshold=1, kernel_width="1 deg") result = ts_estimator.run(input_dataset) assert "root newton" in repr(ts_estimator) assert_allclose(result["ts"].data[99, 99], 1714.23, rtol=1e-2) assert_allclose(result["niter"].data[99, 99], 0) assert_allclose(result["flux"].data[99, 99], 1.02e-09, rtol=1e-2) assert_allclose(result["flux_err"].data[99, 99], 3.84e-11, rtol=1e-2) assert_allclose(result["flux_ul"].data[99, 99], 1.10e-09, rtol=1e-2) assert result["flux"].unit == u.Unit("cm-2s-1") assert result["flux_err"].unit == u.Unit("cm-2s-1") assert result["flux_ul"].unit == u.Unit("cm-2s-1") # Check mask is correctly taken into account assert np.isnan(result["ts"].data[30, 40])
def test_sky_model_init(): with pytest.raises(TypeError): spatial_model = GaussianSpatialModel() SkyModel(spectral_model=1234, spatial_model=spatial_model) with pytest.raises(TypeError): SkyModel(spectral_model=PowerLawSpectralModel(), spatial_model=1234)
def test_compute_flux_spatial_no_psf(): # check that spatial integration is not performed in the absence of a psf center = SkyCoord("0 deg", "0 deg", frame="galactic") region = CircleSkyRegion(center=center, radius=0.1 * u.deg) nbin = 2 energy_axis_true = MapAxis.from_energy_bounds(".1 TeV", "10 TeV", nbin=nbin, name="energy_true") spectral_model = ConstantSpectralModel() spatial_model = GaussianSpatialModel(lon_0=0 * u.deg, lat_0=0 * u.deg, frame="galactic", sigma="0.1 deg") models = SkyModel(spectral_model=spectral_model, spatial_model=spatial_model) model = Models(models) exposure_region = RegionNDMap.create(region, axes=[energy_axis_true]) exposure_region.data += 1.0 exposure_region.unit = "m2 s" evaluator = MapEvaluator(model=model[0], exposure=exposure_region) flux = evaluator.compute_flux_spatial() assert_allclose(flux, 1.0)
def make_example_2(): spatial = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="1 deg") model = SkyModel(PowerLawSpectralModel(), spatial, name="example_2") models = Models([model]) models.write(DATA_PATH / "example2.yaml", overwrite=True, write_covariance=False)
def spatial_model(self): """Spatial model (`~gammapy.modeling.models.SpatialModel`).""" d = self.data ra = d["RAJ2000"] dec = d["DEJ2000"] if self.is_pointlike: model = PointSpatialModel(lon_0=ra, lat_0=dec, frame="icrs") else: de = self.data_extended morph_type = de["Model_Form"].strip() e = (1 - (de["Model_SemiMinor"] / de["Model_SemiMajor"]) ** 2.0) ** 0.5 sigma = de["Model_SemiMajor"] phi = de["Model_PosAng"] if morph_type in ["Disk", "Elliptical Disk"]: r_0 = de["Model_SemiMajor"] model = DiskSpatialModel( lon_0=ra, lat_0=dec, r_0=r_0, e=e, phi=phi, frame="icrs" ) elif morph_type in ["Map", "Ring", "2D Gaussian x2"]: filename = de["Spatial_Filename"].strip() path = make_path( "$GAMMAPY_DATA/catalogs/fermi/Extended_archive_v15/Templates/" ) return TemplateSpatialModel.read(path / filename) elif morph_type in ["2D Gaussian", "Elliptical 2D Gaussian"]: model = GaussianSpatialModel( lon_0=ra, lat_0=dec, sigma=sigma, e=e, phi=phi, frame="icrs" ) else: raise ValueError(f"Invalid spatial model: {morph_type!r}") self._set_spatial_errors(model) return model
def from_gauss( cls, geom, sigma, max_radius=None, factor=4 ): """Create Gaussian PSF. This is used for testing and examples. The map geometry parameters (pixel size, energy bins) are taken from ``geom``. The Gaussian width ``sigma`` is a scalar, TODO : support array input if it should vary along the energy axis. Parameters ---------- geom : `~gammapy.maps.WcsGeom` Map geometry sigma : `~astropy.coordinates.Angle` Gaussian width. max_radius : `~astropy.coordinates.Angle` Desired kernel map size. factor : int Oversample factor to compute the PSF Returns ------- kernel : `~gammapy.irf.PSFKernel` the kernel Map with reduced geometry according to the max_radius """ from gammapy.modeling.models import GaussianSpatialModel gauss = GaussianSpatialModel(sigma=sigma) return cls.from_spatial_model( model=gauss, geom=geom, max_radius=max_radius, factor=factor )
def get_npred_map(): position = SkyCoord(0.0, 0.0, frame="galactic", unit="deg") energy_axis = MapAxis.from_bounds(1, 100, nbin=30, unit="TeV", name="energy_true", interp="log") exposure = Map.create( binsz=0.02, map_type="wcs", skydir=position, width="2 deg", axes=[energy_axis], frame="galactic", unit="cm2 s", ) spatial_model = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.2 deg", frame="galactic") spectral_model = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) exposure.data = 1e14 * np.ones(exposure.data.shape) evaluator = MapEvaluator(model=skymodel, exposure=exposure) npred = evaluator.compute_npred() return evaluator, npred
def spatial_model(self): """Spatial model (`~gammapy.modeling.models.SpatialModel`).""" d = self.data ra = d["RAJ2000"] dec = d["DEJ2000"] if self.is_pointlike: model = PointSpatialModel(lon_0=ra, lat_0=dec, frame="icrs") else: de = self.data_extended morph_type = de["Model_Form"].strip() e = (1 - (de["Model_SemiMinor"] / de["Model_SemiMajor"]) ** 2.0) ** 0.5 sigma = de["Model_SemiMajor"] phi = de["Model_PosAng"] if morph_type == "Disk": r_0 = de["Model_SemiMajor"] model = DiskSpatialModel( lon_0=ra, lat_0=dec, r_0=r_0, e=e, phi=phi, frame="icrs" ) elif morph_type in ["Map", "Ring", "2D Gaussian x2"]: filename = de["Spatial_Filename"].strip() path = make_path( "$GAMMAPY_DATA/catalogs/fermi/LAT_extended_sources_8years/Templates/" ) with warnings.catch_warnings(): # ignore FITS units warnings warnings.simplefilter("ignore", FITSFixedWarning) model = TemplateSpatialModel.read(path / filename) elif morph_type == "2D Gaussian": model = GaussianSpatialModel( lon_0=ra, lat_0=dec, sigma=sigma, e=e, phi=phi, frame="icrs" ) else: raise ValueError(f"Invalid spatial model: {morph_type!r}") self._set_spatial_errors(model) return model
def test_significance_map_estimator_map_dataset(simple_dataset): estimator = ExcessMapEstimator(0.1 * u.deg, selection_optional=["all"]) result = estimator.run(simple_dataset) assert_allclose(result["counts"].data[0, 10, 10], 162) assert_allclose(result["excess"].data[0, 10, 10], 81) assert_allclose(result["background"].data[0, 10, 10], 81) assert_allclose(result["sqrt_ts"].data[0, 10, 10], 7.910732, atol=1e-5) assert_allclose(result["err"].data[0, 10, 10], 12.727922, atol=1e-3) assert_allclose(result["errp"].data[0, 10, 10], 13.063328, atol=1e-3) assert_allclose(result["errn"].data[0, 10, 10], -12.396716, atol=1e-3) assert_allclose(result["ul"].data[0, 10, 10], 107.806275, atol=1e-3) simple_dataset.exposure += 1e10 * u.cm**2 * u.s axis = simple_dataset.exposure.geom.axes[0] simple_dataset.psf = PSFMap.from_gauss(axis, sigma="0.05 deg") model = SkyModel( PowerLawSpectralModel(amplitude="1e-9 cm-2 s-1 TeV-1"), GaussianSpatialModel(lat_0=0.0 * u.deg, lon_0=0.0 * u.deg, sigma=0.1 * u.deg, frame="icrs"), name="sky_model", ) simple_dataset.models = [model] simple_dataset.npred() estimator = ExcessMapEstimator(0.1 * u.deg, selection_optional="all") result = estimator.run(simple_dataset) assert_allclose(result["excess"].data.sum(), 19733.602, rtol=1e-3) assert_allclose(result["background"].data.sum(), 31818.398, rtol=1e-3) assert_allclose(result["sqrt_ts"].data[0, 10, 10], 4.217129, rtol=1e-3)
def models(): spatial_model = GaussianSpatialModel(lon_0="0 deg", lat_0="0 deg", sigma="0.2 deg", frame="galactic") spectral_model = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") t_max = 1000 * u.s time = np.arange(t_max.value) * u.s tau = u.Quantity("2e2 s") norm = np.exp(-time / tau) table = Table() table["TIME"] = time table["NORM"] = norm / norm.max() t_ref = Time("2000-01-01") table.meta = dict(MJDREFI=t_ref.mjd, MJDREFF=0, TIMEUNIT="s") temporal_model = LightCurveTemplateTemporalModel(table) model = SkyModel( spatial_model=spatial_model, spectral_model=spectral_model, temporal_model=temporal_model, name="test-source", ) bkg_model = FoVBackgroundModel(dataset_name="test") return [model, bkg_model]
def simulate_map_dataset(random_state=0): irfs = load_cta_irfs( "$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits" ) skydir = SkyCoord("0 deg", "0 deg", frame="galactic") edges = np.logspace(-1, 2, 15) * u.TeV energy_axis = MapAxis.from_edges(edges=edges, name="energy", interp="log") geom = WcsGeom.create( skydir=skydir, width=(4, 4), binsz=0.1, axes=[energy_axis], coordsys="GAL" ) gauss = GaussianSpatialModel( lon_0="0 deg", lat_0="0 deg", sigma="0.4 deg", frame="galactic" ) pwl = PowerLawSpectralModel(amplitude="1e-11 cm-2 s-1 TeV-1") skymodel = SkyModel(spatial_model=gauss, spectral_model=pwl, name="source") dataset = simulate_dataset( skymodel=skymodel, geom=geom, pointing=skydir, irfs=irfs, random_state=random_state, ) return dataset
def test_mask_shape(): axis = MapAxis.from_edges([1, 3, 10], unit="TeV", interp="log", name="energy") geom_1 = WcsGeom.create(binsz=1, width=3, axes=[axis]) geom_2 = WcsGeom.create(binsz=1, width=5, axes=[axis]) dataset_1 = MapDataset.create(geom_1) dataset_2 = MapDataset.create(geom_2) dataset_1.psf = None dataset_2.psf = None dataset_1.edisp = None dataset_2.edisp = None model = SkyModel( spectral_model=PowerLawSpectralModel(), spatial_model=GaussianSpatialModel() ) dataset_1.model = model dataset_2.model = model fpe = FluxPointsEstimator( datasets=[dataset_2, dataset_1], e_edges=[1, 10] * u.TeV, source="source" ) with pytest.raises(ValueError): fpe.run()
def fit_lc(datasets): spatial_model1 = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.3 deg", frame="galactic") spatial_model1.parameters["lon_0"].frozen = False spatial_model1.parameters["lat_0"].frozen = False spatial_model1.parameters["sigma"].frozen = True spectral_model1 = PowerLawSpectralModel(index=3, amplitude="2e-11 cm-2 s-1 TeV-1", reference="1 TeV") temporal_model1 = ExpDecayTemporalModel(t0="10 h", t_ref=gti_t0.mjd * u.d) model_fit = SkyModel( spatial_model=spatial_model1, spectral_model=spectral_model1, temporal_model=temporal_model1, name="fit", ) for dataset in datasets: dataset.models = [ model_fit, FoVBackgroundModel(dataset_name=dataset.name) ] dataset.background_model.parameters["norm"].frozen = True fit = Fit() result = fit.run(datasets=datasets) print(result.success) print(result.parameters.to_table())
def test_fov_bkg_maker_fit_with_source_model(obs_dataset, exclusion_mask): fov_bkg_maker = FoVBackgroundMaker(method="fit", exclusion_mask=exclusion_mask) test_dataset = obs_dataset.copy(name="test-fov") spatial_model = GaussianSpatialModel(lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.2 deg", frame="galactic") spectral_model = PowerLawSpectralModel(index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV") model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model, name="test-source") bkg_model = FoVBackgroundModel(dataset_name="test-fov") test_dataset.models = [model, bkg_model] dataset = fov_bkg_maker.run(test_dataset) # Here we check that source parameters are correctly thawed after fit. assert not dataset.models.parameters["index"].frozen assert not dataset.models.parameters["lon_0"].frozen model = dataset.models[f"{dataset.name}-bkg"].spectral_model assert not model.norm.frozen assert_allclose(model.norm.value, 0.830789, rtol=1e-4) assert_allclose(model.tilt.value, 0.0, rtol=1e-4)
def sky_model(): spatial_model = GaussianSpatialModel( lon_0="0.2 deg", lat_0="0.1 deg", sigma="0.2 deg", frame="galactic" ) spectral_model = PowerLawSpectralModel( index=3, amplitude="1e-11 cm-2 s-1 TeV-1", reference="1 TeV" ) return SkyModel(spatial_model=spatial_model, spectral_model=spectral_model)
def test_stack_npred(): pwl = PowerLawSpectralModel() gauss = GaussianSpatialModel(sigma="0.2 deg") model = SkyModel(pwl, gauss) axis = MapAxis.from_energy_bounds("0.1 TeV", "10 TeV", nbin=5) axis_etrue = MapAxis.from_energy_bounds( "0.1 TeV", "10 TeV", nbin=11, name="energy_true" ) geom = WcsGeom.create( skydir=(0, 0), binsz=0.05, width=(2, 2), frame="icrs", axes=[axis], ) dataset_1 = MapDataset.create( geom, energy_axis_true=axis_etrue, name="dataset-1", gti=GTI.create("0 min", "30 min"), ) dataset_1.psf = None dataset_1.exposure.data += 1 dataset_1.mask_safe.data = geom.energy_mask(energy_min=1 * u.TeV) dataset_1.background.data += 1 bkg_model_1 = FoVBackgroundModel(dataset_name=dataset_1.name) dataset_1.models = [model, bkg_model_1] dataset_2 = MapDataset.create( geom, energy_axis_true=axis_etrue, name="dataset-2", gti=GTI.create("30 min", "60 min"), ) dataset_2.psf = None dataset_2.exposure.data += 1 dataset_2.mask_safe.data = geom.energy_mask(energy_min=0.2 * u.TeV) dataset_2.background.data += 1 bkg_model_2 = FoVBackgroundModel(dataset_name=dataset_2.name) dataset_2.models = [model, bkg_model_2] npred_1 = dataset_1.npred() npred_1.data[~dataset_1.mask_safe.data] = 0 npred_2 = dataset_2.npred() npred_2.data[~dataset_2.mask_safe.data] = 0 stacked_npred = Map.from_geom(geom) stacked_npred.stack(npred_1) stacked_npred.stack(npred_2) stacked = MapDataset.create(geom, energy_axis_true=axis_etrue, name="stacked") stacked.stack(dataset_1) stacked.stack(dataset_2) npred_stacked = stacked.npred() assert_allclose(npred_stacked.data, stacked_npred.data)
def test_large_kernel(input_dataset): """Minimal test of compute_ts_image""" spatial_model = GaussianSpatialModel(sigma="4 deg") spectral_model = PowerLawSpectralModel(index=2) model = SkyModel(spatial_model=spatial_model, spectral_model=spectral_model) ts_estimator = TSMapEstimator(model=model, kernel_width="4 deg") with pytest.raises(ValueError): ts_estimator.run(input_dataset)