コード例 #1
0
    run_list=None,
    obs_table=obs_table_with_group_id,
    ntot_group=obs_groups.n_groups,
    excluded_sources=exclusion_table,
)

# Define the energy and offset binning to use
ebounds = EnergyBounds.equal_log_spacing(0.1, 100, 15, 'TeV')
#offset = sqrt_space(start=0, stop=2.5, num=20) * u.deg
offset=np.linspace(0,2.5,20) * u.deg

# Make the model (i.e. stack counts and livetime)
bgmaker.make_model("2D", ebounds=ebounds, offset=offset)

# Smooth the model
bgmaker.smooth_models("2D")
# Write the model to disk
bgmaker.save_models("2D")
bgmaker.save_models(modeltype="2D", smooth=True)

#now copy the background files as bkg into the source runs
data_dir="data_new"
shutil.move(outdir, data_dir) 
datastore= DataStore.from_dir("$HESS_DATA") 
datastore.copy_obs(datastore.obs_table,data_dir) 

group_filename = data_dir + '/background/group-def.fits'  
data_store = DataStore.from_dir(data_dir)                                    
hdu_index_table = bgmaker.make_total_index_table(                            
    data_store=data_store,
    modeltype='2D',
コード例 #2
0
def make_cubes(ereco, etrue, use_etrue, center):
    tmpdir = os.path.expandvars('$GAMMAPY_EXTRA') + "/test_datasets/cube/data"
    outdir = tmpdir
    outdir2 = os.path.expandvars(
        '$GAMMAPY_EXTRA') + '/test_datasets/cube/background'

    if os.path.isdir("data"):
        shutil.rmtree("data")
    if os.path.isdir("background"):
        shutil.rmtree("background")
    Path(outdir2).mkdir()

    ds = DataStore.from_dir("$GAMMAPY_EXTRA/datasets/hess-crab4-hd-hap-prod2")
    ds.copy_obs(ds.obs_table, tmpdir)
    data_store = DataStore.from_dir(tmpdir)
    # Create a background model from the 4 crab run for the counts ouside the exclusion region. it's just for test, normaly you take 8000 thousands AGN runs to build this kind of model
    axes = [ObservationGroupAxis('ZEN_PNT', [0, 49, 90], fmt='edges')]
    obs_groups = ObservationGroups(axes)
    obs_table_with_group_id = obs_groups.apply(data_store.obs_table)
    obs_groups.obs_groups_table.write(outdir2 + "/group-def.fits",
                                      overwrite=True)
    # Exclusion sources table
    cat = SourceCatalogGammaCat()
    exclusion_table = cat.table
    exclusion_table.rename_column('ra', 'RA')
    exclusion_table.rename_column('dec', 'DEC')
    radius = exclusion_table['morph_sigma']
    radius.value[np.isnan(radius)] = 0.3
    exclusion_table['Radius'] = radius
    exclusion_table = Table(exclusion_table)

    bgmaker = OffDataBackgroundMaker(data_store,
                                     outdir2,
                                     run_list=None,
                                     obs_table=obs_table_with_group_id,
                                     ntot_group=obs_groups.n_groups,
                                     excluded_sources=exclusion_table)
    bgmaker.make_model("2D")
    bgmaker.smooth_models("2D")
    bgmaker.save_models("2D")
    bgmaker.save_models(modeltype="2D", smooth=True)

    shutil.move(str(outdir2), str(outdir))
    fn = outdir + '/background/group-def.fits'
    hdu_index_table = bgmaker.make_total_index_table(
        data_store=data_store,
        modeltype='2D',
        out_dir_background_model="background",
        filename_obs_group_table=fn,
        smooth=True)
    fn = outdir + '/hdu-index.fits.gz'
    hdu_index_table.write(fn, overwrite=True)

    offset_band = Angle([0, 2.49], 'deg')

    ref_cube_images = make_empty_cube(image_size=50,
                                      energy=ereco,
                                      center=center)
    ref_cube_exposure = make_empty_cube(image_size=50,
                                        energy=etrue,
                                        center=center,
                                        data_unit="m2 s")

    data_store = DataStore.from_dir(tmpdir)

    refheader = ref_cube_images.sky_image_ref.to_image_hdu().header
    exclusion_mask = SkyMask.read(
        '$GAMMAPY_EXTRA/datasets/exclusion_masks/tevcat_exclusion.fits')
    exclusion_mask = exclusion_mask.reproject(reference=refheader)

    # Pb with the load psftable for one of the run that is not implemented yet...
    data_store.hdu_table.remove_row(14)

    cube_maker = StackedObsCubeMaker(empty_cube_images=ref_cube_images,
                                     empty_exposure_cube=ref_cube_exposure,
                                     offset_band=offset_band,
                                     data_store=data_store,
                                     obs_table=data_store.obs_table,
                                     exclusion_mask=exclusion_mask,
                                     save_bkg_scale=True)
    cube_maker.make_cubes(make_background_image=True, radius=10.)
    obslist = [data_store.obs(id) for id in data_store.obs_table["OBS_ID"]]
    ObsList = ObservationList(obslist)
    mean_psf_cube = make_mean_psf_cube(image_size=50,
                                       energy_cube=etrue,
                                       center_maps=center,
                                       center=center,
                                       ObsList=ObsList,
                                       spectral_index=2.3)
    if use_etrue:
        mean_rmf = make_mean_rmf(energy_true=etrue,
                                 energy_reco=ereco,
                                 center=center,
                                 ObsList=ObsList)

    filename_mask = 'exclusion_mask.fits'
    filename_counts = 'counts_cube.fits'
    filename_bkg = 'bkg_cube.fits'
    filename_significance = 'significance_cube.fits'
    filename_excess = 'excess_cube.fits'
    if use_etrue:
        filename_exposure = 'exposure_cube_etrue.fits'
        filename_psf = 'psf_cube_etrue.fits'
        filename_rmf = 'rmf.fits'
        mean_rmf.write(filename_rmf, clobber=True)
    else:
        filename_exposure = 'exposure_cube.fits'
        filename_psf = 'psf_cube.fits'
    exclusion_mask.write(filename_mask, clobber=True)
    cube_maker.counts_cube.write(filename_counts,
                                 format="fermi-counts",
                                 clobber=True)
    cube_maker.bkg_cube.write(filename_bkg,
                              format="fermi-counts",
                              clobber=True)
    cube_maker.significance_cube.write(filename_significance,
                                       format="fermi-counts",
                                       clobber=True)
    cube_maker.excess_cube.write(filename_excess,
                                 format="fermi-counts",
                                 clobber=True)
    cube_maker.exposure_cube.write(filename_exposure,
                                   format="fermi-counts",
                                   clobber=True)
    mean_psf_cube.write(filename_psf, format="fermi-counts", clobber=True)