Ejemplo n.º 1
0
def test_spectrum_dataset_on_off_to_yaml(tmpdir):
    spectrum_datasets_on_off = make_observation_list()
    datasets = Datasets(spectrum_datasets_on_off)
    datasets.write(path=tmpdir)
    datasets_read = Datasets.read(tmpdir / "_datasets.yaml", tmpdir / "_models.yaml")
    assert len(datasets_read) == len(datasets)
    assert datasets_read[0].name == datasets[0].name
    assert datasets_read[1].name == datasets[1].name
    assert datasets_read[1].counts.data.sum() == datasets[1].counts.data.sum()
Ejemplo n.º 2
0
def test_datasets_io_no_model(tmpdir):
    axis = MapAxis.from_energy_bounds("1 TeV", "10 TeV", nbin=2)
    geom = WcsGeom.create(npix=(5, 5), axes=[axis])
    dataset_1 = MapDataset.create(geom, name="dataset_1")
    dataset_2 = MapDataset.create(geom, name="dataset_2")

    datasets = Datasets([dataset_1, dataset_2])

    datasets.write(filename=tmpdir / "datasets.yaml")

    filename_1 = tmpdir / "dataset_1.fits"
    assert filename_1.exists()

    filename_2 = tmpdir / "dataset_2.fits"
    assert filename_2.exists()
Ejemplo n.º 3
0
def data_reduction_fermi():
    log.info(f"data_reduction: fermi")
    containment_correction = instrument_opts["fermi"]["containment"]
    radius = instrument_opts["fermi"]["on_radius"]
    emin = u.Quantity(instrument_opts["fermi"]["emin"]).to_value("TeV")
    emax = u.Quantity(instrument_opts["fermi"]["emax"]).to_value("TeV")

    crab_pos = SkyCoord(ra=83.63, dec=22.01, unit="deg", frame="icrs")
    on_region = CircleSkyRegion(crab_pos, radius=Angle(radius))
    off_region = CircleAnnulusSkyRegion(crab_pos,
                                        inner_radius=1 * u.deg,
                                        outer_radius=2 * u.deg)

    energy = MapAxis.from_bounds(emin,
                                 emax,
                                 36,
                                 unit="TeV",
                                 name="energy",
                                 interp="log")
    dataset = extract_spectrum_fermi(on_region, off_region, energy,
                                     containment_correction)
    datasets = Datasets([dataset])

    datasets.write(f"reduced_fermi", overwrite=True)
Ejemplo n.º 4
0
def data_reduction_fermi():
    log.info(f"data_reduction: fermi")
    containment_correction = instrument_opts['fermi']['containment']
    radius = instrument_opts['fermi']['on_radius']
    emin = u.Quantity(instrument_opts['fermi']['emin']).to_value('TeV')
    emax = u.Quantity(instrument_opts['fermi']['emax']).to_value('TeV')

    crab_pos = SkyCoord(ra=83.63, dec=22.01, unit='deg', frame='icrs')
    on_region = CircleSkyRegion(crab_pos, radius=Angle(radius))
    off_region = CircleAnnulusSkyRegion(crab_pos,
                                        inner_radius=1 * u.deg,
                                        outer_radius=2 * u.deg)

    energy = MapAxis.from_bounds(emin,
                                 emax,
                                 36,
                                 unit='TeV',
                                 name="energy",
                                 interp='log')
    dataset = extract_spectrum_fermi(on_region, off_region, energy,
                                     containment_correction)
    datasets = Datasets([dataset])

    datasets.write(f"reduced_fermi", overwrite=True)
Ejemplo n.º 5
0
def make_datasets_example():
    # Define which data to use and print some information

    energy_axis = MapAxis.from_edges(np.logspace(-1.0, 1.0, 4),
                                     unit="TeV",
                                     name="energy",
                                     interp="log")
    geom0 = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.1,
        width=(2, 2),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geom1 = WcsGeom.create(
        skydir=(1, 0),
        binsz=0.1,
        width=(2, 2),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geoms = [geom0, geom1]

    sources_coords = [(0, 0), (0.9, 0.1)]
    names = ["gc", "g09"]
    models = Models()

    for idx, (lon, lat) in enumerate(sources_coords):
        spatial_model = PointSpatialModel(lon_0=lon * u.deg,
                                          lat_0=lat * u.deg,
                                          frame="galactic")
        spectral_model = ExpCutoffPowerLawSpectralModel(
            index=2 * u.Unit(""),
            amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
            reference=1.0 * u.TeV,
            lambda_=0.1 / u.TeV,
        )
        model_ecpl = SkyModel(spatial_model=spatial_model,
                              spectral_model=spectral_model,
                              name=names[idx])
        models.append(model_ecpl)

    models["gc"].spectral_model.reference = models[
        "g09"].spectral_model.reference

    obs_ids = [110380, 111140, 111159]
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")

    diffuse_spatial = TemplateSpatialModel.read(
        "$GAMMAPY_DATA/fermi-3fhl-gc/gll_iem_v06_gc.fits.gz")
    diffuse_model = SkyModel(PowerLawSpectralModel(), diffuse_spatial)

    maker = MapDatasetMaker()
    datasets = Datasets()

    observations = data_store.get_observations(obs_ids)

    for idx, geom in enumerate(geoms):
        stacked = MapDataset.create(geom=geom, name=names[idx])

        for obs in observations:
            dataset = maker.run(stacked, obs)
            stacked.stack(dataset)

        bkg = stacked.models.pop(0)
        stacked.models = [models[idx], diffuse_model, bkg]
        datasets.append(stacked)

    datasets.write(
        "$GAMMAPY_DATA/tests/models",
        prefix="gc_example",
        overwrite=True,
        write_covariance=False,
    )
Ejemplo n.º 6
0
        on_region = CircleSkyRegion(center=geom.center_skydir,
                                    radius=on_radius)

        # Load the 1D data set
        dataset_1d_file = config['global']['iact_dataset_1d'].replace("*", src)
        if os.path.exists(dataset_1d_file.replace(".yaml", "_stacked.yaml")):
            logging.info("Loading stacked dataset...")
            dataset_stack = Datasets.read(
                dataset_1d_file.replace(".yaml", "_stacked.yaml"))
        else:
            dataset_1d = Datasets.read(
                config['global']['iact_dataset_1d'].replace("*", src))
            # stack reduce the data set
            logging.info("stacking datasets...")
            dataset_stack = Datasets([dataset_1d.stack_reduce()])
            dataset_stack.write(
                dataset_1d_file.replace(".yaml", "_stacked.yaml"))

        # load fermi SED for plotting
        logging.info("Loading Fermi files")
        sed_file = glob.glob(config['global']['fermi_sed'].replace(
            "*", src, 1))[0]
        sed = np.load(sed_file, allow_pickle=True, encoding='latin1').flat[0]

        # load fermi best fit
        avg_file = config['global']['fermi_avg'].replace("*", src)
        d = np.load(avg_file, allow_pickle=True, encoding="latin1").flat[0]
        src_fgl_name = d['config']['selection']['target']
        src_dict = convert(d['sources'])[src_fgl_name]

        # important to get the scale
        # since interpolation of fermi llh was done with prefactor
Ejemplo n.º 7
0
def make_datasets_example():
    # Define which data to use and print some information

    energy_axis = MapAxis.from_edges(
        np.logspace(-1.0, 1.0, 4), unit="TeV", name="energy", interp="log"
    )
    geom0 = WcsGeom.create(
        skydir=(0, 0),
        binsz=0.1,
        width=(1, 1),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geom1 = WcsGeom.create(
        skydir=(1, 0),
        binsz=0.1,
        width=(1, 1),
        frame="galactic",
        proj="CAR",
        axes=[energy_axis],
    )
    geoms = [geom0, geom1]

    sources_coords = [(0, 0), (0.9, 0.1)]
    names = ["gc", "g09"]
    models = []

    for idx, (lon, lat) in enumerate(sources_coords):
        spatial_model = PointSpatialModel(
            lon_0=lon * u.deg, lat_0=lat * u.deg, frame="galactic"
        )
        spectral_model = ExpCutoffPowerLawSpectralModel(
            index=2 * u.Unit(""),
            amplitude=3e-12 * u.Unit("cm-2 s-1 TeV-1"),
            reference=1.0 * u.TeV,
            lambda_=0.1 / u.TeV,
        )
        model_ecpl = SkyModel(
            spatial_model=spatial_model, spectral_model=spectral_model, name=names[idx]
        )
        models.append(model_ecpl)

    # test to link a spectral parameter
    params0 = models[0].spectral_model.parameters
    params1 = models[1].spectral_model.parameters
    params0.link("reference", params1["reference"])
    # update the sky model
    models[0].parameters.link("reference", params1["reference"])

    obs_ids = [110380, 111140, 111159]
    data_store = DataStore.from_dir("$GAMMAPY_DATA/cta-1dc/index/gps/")

    diffuse_model = SkyDiffuseCube.read(
        "$GAMMAPY_DATA/fermi_3fhl/gll_iem_v06_cutout.fits"
    )

    datasets_list = []
    for idx, geom in enumerate(geoms):
        observations = data_store.get_observations(obs_ids)

        stacked = MapDataset.create(geom=geom)
        stacked.background_model.name = "background_irf_" + names[idx]

        maker = MapDatasetMaker(offset_max=4.0 * u.deg)

        for obs in observations:
            dataset = maker.run(stacked, obs)
            stacked.stack(dataset)

        stacked.psf = stacked.psf.get_psf_kernel(
            position=geom.center_skydir, geom=geom, max_radius="0.3 deg"
        )

        stacked.name = names[idx]
        stacked.models = models[idx] + diffuse_model
        datasets_list.append(stacked)

    datasets = Datasets(datasets_list)

    dataset0 = datasets[0]
    print("dataset0")
    print("counts sum : ", dataset0.counts.data.sum())
    print("expo sum : ", dataset0.exposure.data.sum())
    print("bkg0 sum : ", dataset0.background_model.evaluate().data.sum())

    datasets.write("$GAMMAPY_DATA/tests/models", prefix="gc_example_", overwrite=True)
Ejemplo n.º 8
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz"
        )
        exposure_hpx.unit = "cm2 s"

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyDiffuseCube(
            iem_fermi_extra, norm=1.1, tilt=0.03, name="iem_extrapolated"
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord(
            {"skycoord": self.events.radec, "energy": self.events.energy}
        )

        axis = MapAxis.from_nodes(
            counts.geom.axes[0].center, name="energy_true", unit="GeV", interp="log"
        )
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # read PSF
        psf_kernel = PSFKernel.from_table_psf(
            self.psf, geom, max_radius=self.psf_margin * u.deg
        )

        # Energy Dispersion
        e_true = exposure.geom.axes[0].edges
        e_reco = counts.geom.axes[0].edges
        edisp = EDispKernel.from_diagonal_response(e_true=e_true, e_reco=e_reco)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max() > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg)
        )
        mask_fermi = WcsNDMap(counts.geom, mask)

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem, exposure=exposure, psf=psf_kernel, edisp=edisp
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso, exposure=exposure, edisp=edisp)
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso
        background_model = BackgroundModel(
            background_total, name="bkg_iem+iso", datasets_names=[dataset_name]
        )
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models([background_model] + FHL3_roi)

        # Dataset
        dataset = MapDataset(
            models=model_total,
            counts=counts,
            exposure=exposure,
            psf=psf_kernel,
            edisp=edisp,
            mask_fit=mask_fermi,
            name=dataset_name,
        )
        cat_stat = dataset.stat_sum()

        datasets = Datasets([dataset])
        fit = Fit(datasets)
        results = fit.run(**self.optimize_opts)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            datasets.write(path=Path(self.resdir), prefix=dataset.name, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            for model in FHL3_roi:
                if (
                    self.FHL3[model.name].data["ROI_num"] == kr
                    and self.FHL3[model.name].data["Signif_Avg"] >= self.sig_cut
                ):
                    flux_points = FluxPointsEstimator(
                        e_edges=self.El_flux, source=model.name, n_sigma_ul=2,
                    ).run(datasets=datasets)
                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)
Ejemplo n.º 9
0
    def run_region(self, kr, lon, lat, radius):
        #    TODO: for now we have to read/create the allsky maps each in each job
        #    because we can't pickle <functools._lru_cache_wrapper object
        #    send this back to init when fixed

        log.info(f"ROI {kr}: loading data")

        # exposure
        exposure_hpx = Map.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_exposure_cube_hpx.fits.gz")
        exposure_hpx.unit = "cm2 s"

        # psf
        psf_map = PSFMap.read(
            "$GAMMAPY_DATA/fermi_3fhl/fermi_3fhl_psf_gc.fits.gz",
            format="gtpsf")
        # reduce size of the PSF
        axis = psf_map.psf_map.geom.axes["rad"].center.to_value(u.deg)
        indmax = np.argmin(np.abs(self.psf_margin - axis))
        psf_map = psf_map.slice_by_idx(slices={"rad": slice(0, indmax)})

        # iem
        iem_filepath = BASE_PATH / "data" / "gll_iem_v06_extrapolated.fits"
        iem_fermi_extra = Map.read(iem_filepath)
        # norm=1.1, tilt=0.03 see paper appendix A
        model_iem = SkyModel(
            PowerLawNormSpectralModel(norm=1.1, tilt=0.03),
            TemplateSpatialModel(iem_fermi_extra, normalize=False),
            name="iem_extrapolated",
        )

        # ROI
        roi_time = time()
        ROI_pos = SkyCoord(lon, lat, frame="galactic", unit="deg")
        width = 2 * (radius + self.psf_margin)

        # Counts
        counts = Map.create(
            skydir=ROI_pos,
            width=width,
            proj="CAR",
            frame="galactic",
            binsz=1 / 8.0,
            axes=[self.energy_axis],
            dtype=float,
        )
        counts.fill_by_coord({
            "skycoord": self.events.radec,
            "energy": self.events.energy
        })

        axis = MapAxis.from_nodes(counts.geom.axes[0].center,
                                  name="energy_true",
                                  unit="GeV",
                                  interp="log")
        wcs = counts.geom.wcs
        geom = WcsGeom(wcs=wcs, npix=counts.geom.npix, axes=[axis])
        coords = geom.get_coord()
        # expo
        data = exposure_hpx.interp_by_coord(coords)
        exposure = WcsNDMap(geom, data, unit=exposure_hpx.unit, dtype=float)

        # Energy Dispersion
        edisp = EDispKernelMap.from_diagonal_response(
            energy_axis_true=axis, energy_axis=self.energy_axis)

        # fit mask
        if coords["lon"].min() < 90 * u.deg and coords["lon"].max(
        ) > 270 * u.deg:
            coords["lon"][coords["lon"].value > 180] -= 360 * u.deg
        mask = (
            (coords["lon"] >= coords["lon"].min() + self.psf_margin * u.deg)
            & (coords["lon"] <= coords["lon"].max() - self.psf_margin * u.deg)
            & (coords["lat"] >= coords["lat"].min() + self.psf_margin * u.deg)
            & (coords["lat"] <= coords["lat"].max() - self.psf_margin * u.deg))
        mask_fermi = WcsNDMap(counts.geom, mask)
        mask_safe_fermi = WcsNDMap(counts.geom, np.ones(mask.shape,
                                                        dtype=bool))

        log.info(f"ROI {kr}: pre-computing diffuse")

        # IEM
        eval_iem = MapEvaluator(
            model=model_iem,
            exposure=exposure,
            psf=psf_map.get_psf_kernel(geom),
            edisp=edisp.get_edisp_kernel(),
        )
        bkg_iem = eval_iem.compute_npred()

        # ISO
        eval_iso = MapEvaluator(model=self.model_iso,
                                exposure=exposure,
                                edisp=edisp.get_edisp_kernel())
        bkg_iso = eval_iso.compute_npred()

        # merge iem and iso, only one local normalization is fitted
        dataset_name = "3FHL_ROI_num" + str(kr)
        background_total = bkg_iem + bkg_iso

        # Dataset
        dataset = MapDataset(
            counts=counts,
            exposure=exposure,
            background=background_total,
            psf=psf_map,
            edisp=edisp,
            mask_fit=mask_fermi,
            mask_safe=mask_safe_fermi,
            name=dataset_name,
        )

        background_model = FoVBackgroundModel(dataset_name=dataset_name)
        background_model.parameters["norm"].min = 0.0

        # Sources model
        in_roi = self.FHL3.positions.galactic.contained_by(wcs)
        FHL3_roi = []
        for ks in range(len(self.FHL3.table)):
            if in_roi[ks] == True:
                model = self.FHL3[ks].sky_model()
                model.spatial_model.parameters.freeze_all()  # freeze spatial
                model.spectral_model.parameters["amplitude"].min = 0.0
                if isinstance(model.spectral_model, PowerLawSpectralModel):
                    model.spectral_model.parameters["index"].min = 0.1
                    model.spectral_model.parameters["index"].max = 10.0
                else:
                    model.spectral_model.parameters["alpha"].min = 0.1
                    model.spectral_model.parameters["alpha"].max = 10.0

                FHL3_roi.append(model)
        model_total = Models(FHL3_roi + [background_model])
        dataset.models = model_total

        cat_stat = dataset.stat_sum()
        datasets = Datasets([dataset])

        log.info(f"ROI {kr}: running fit")
        fit = Fit(**self.fit_opts)
        results = fit.run(datasets=datasets)
        print("ROI_num", str(kr), "\n", results)
        fit_stat = datasets.stat_sum()

        if results.message != "Optimization failed.":
            filedata = Path(self.resdir) / f"3FHL_ROI_num{kr}_datasets.yaml"
            filemodel = Path(self.resdir) / f"3FHL_ROI_num{kr}_models.yaml"
            datasets.write(filedata, filemodel, overwrite=True)
            np.savez(
                self.resdir / f"3FHL_ROI_num{kr}_fit_infos.npz",
                message=results.message,
                stat=[cat_stat, fit_stat],
            )

            exec_time = time() - roi_time
            print("ROI", kr, " time (s): ", exec_time)

            log.info(f"ROI {kr}: running flux points")
            for model in FHL3_roi:
                if (self.FHL3[model.name].data["ROI_num"] == kr
                        and self.FHL3[model.name].data["Signif_Avg"] >=
                        self.sig_cut):
                    print(model.name)
                    flux_points = FluxPointsEstimator(
                        energy_edges=self.El_flux,
                        source=model.name,
                        n_sigma_ul=2,
                        selection_optional=["ul"],
                    ).run(datasets=datasets)
                    flux_points.meta["sqrt_ts_threshold_ul"] = 1

                    filename = self.resdir / f"{model.name}_flux_points.fits"
                    flux_points.write(filename, overwrite=True)

            exec_time = time() - roi_time - exec_time
            print("ROI", kr, " Flux points time (s): ", exec_time)