def convert_obs_groups_binning_def_michi_to_default(): """Convert observation groups binning definition "michi" to "default". """ # observation groups binning definition "michi" # alt az bin edges definitions altitude_edges = Angle( [0, 20, 23, 27, 30, 33, 37, 40, 44, 49, 53, 58, 64, 72, 90], 'degree') azimuth_edges = Angle([-90, 90, 270], 'degree') # convert observation groups binning definition "michi" to "default" list_obs_group_axis = [ ObservationGroupAxis('ALT', altitude_edges, fmt='edges'), ObservationGroupAxis('AZ', azimuth_edges, fmt='edges') ] obs_groups_michi = ObservationGroups(list_obs_group_axis) print("Observation groups 'michi':") print(obs_groups_michi.obs_groups_table) # save outfile = 'bg_observation_groups_michi.ecsv' print('Writing {}'.format(outfile)) obs_groups_michi.write(outfile) # lookup table: equivalences in group/file naming "defualt" <-> "michi" # 3 columns: GROUP_ID, ALT_ID, AZ_ID # 28 rows: 1 per GROUP_ID lookup_obs_groups_michi = Table() n_cols = 1 + len(list_obs_group_axis) n_rows = obs_groups_michi.n_groups lookup_obs_groups_michi['GROUP_ID'] = np.zeros(n_rows, dtype=np.int) lookup_obs_groups_michi['ALT_ID'] = np.zeros(n_rows, dtype=np.int) lookup_obs_groups_michi['AZ_ID'] = np.zeros(n_rows, dtype=np.int) # loop over each observation group axis count_groups = 0 for alt_id in np.arange(len(altitude_edges) - 1): for az_id in np.arange(len(azimuth_edges) - 1): lookup_obs_groups_michi['GROUP_ID'][count_groups] = count_groups lookup_obs_groups_michi['ALT_ID'][count_groups] = alt_id lookup_obs_groups_michi['AZ_ID'][count_groups] = az_id count_groups += 1 print("lookup table:") print(lookup_obs_groups_michi) # save outfile = 'lookup_obs_groups_michi.ecsv' print('Writing {}'.format(outfile)) # `~astropy.io.ascii` always overwrites the file ascii.write(lookup_obs_groups_michi, outfile, format='ecsv', fast_writer=False)
def create_dummy_observation_grouping(): """Define dummy observation grouping. Define an observation grouping with only one group. Returns ------- obs_groups : `~gammapy.data.ObservationGroups` Observation grouping. """ alt_axis = ObservationGroupAxis('ALT', ALT_RANGE, fmt='edges') az_axis = ObservationGroupAxis('AZ', AZ_RANGE, fmt='edges') obs_groups = ObservationGroups([alt_axis, az_axis]) obs_groups.obs_groups_table['GROUP_ID'][0] = GROUP_ID return obs_groups
LMCrun=datastore.obs_table[sep<2.0*u.deg] LMCid=LMCrun['OBS_ID'].data #get required runs agnid=list(set(obsid)-set(LMCid)-set(SNid)) mylist=datastore.obs_list(agnid) zen_ang=[o1.pointing_zen.value for o1 in mylist] # Define the grouping nbins=10 zenith_bins=[0,10,20,30,40,50,90] #zenith_bins=[min(zen_ang), 10.0, 20.0, 30.0, 45.0, 60.0, max(zen_ang)] zenith_bins=zenith_bins*u.deg axes = [ObservationGroupAxis('ZEN_PNT', zenith_bins, fmt='edges')] # Create the ObservationGroups object obs_groups = ObservationGroups(axes) # write it to file filename = str(outdir + "/group-def.fits") obs_groups.obs_groups_table.write(filename, overwrite=True) obs_table_with_group_id = obs_groups.apply(datastore.obs_table.select_obs_id(agnid)) #gammacat exclusion mask fil_gammacat="/Users/asinha/Gammapy-dev/gammapy-extra/datasets/catalogs/gammacat/gammacat.fits.gz" cat = SourceCatalogGammaCat(filename=fil_gammacat) exclusion_table = cat.table.copy() exclusion_table.rename_column('ra', 'RA')
def make_cubes(ereco, etrue, use_etrue, center): tmpdir = os.path.expandvars('$GAMMAPY_EXTRA') + "/test_datasets/cube/data" outdir = tmpdir outdir2 = os.path.expandvars( '$GAMMAPY_EXTRA') + '/test_datasets/cube/background' if os.path.isdir("data"): shutil.rmtree("data") if os.path.isdir("background"): shutil.rmtree("background") Path(outdir2).mkdir() ds = DataStore.from_dir("$GAMMAPY_EXTRA/datasets/hess-crab4-hd-hap-prod2") ds.copy_obs(ds.obs_table, tmpdir) data_store = DataStore.from_dir(tmpdir) # Create a background model from the 4 crab run for the counts ouside the exclusion region. it's just for test, normaly you take 8000 thousands AGN runs to build this kind of model axes = [ObservationGroupAxis('ZEN_PNT', [0, 49, 90], fmt='edges')] obs_groups = ObservationGroups(axes) obs_table_with_group_id = obs_groups.apply(data_store.obs_table) obs_groups.obs_groups_table.write(outdir2 + "/group-def.fits", overwrite=True) # Exclusion sources table cat = SourceCatalogGammaCat() exclusion_table = cat.table exclusion_table.rename_column('ra', 'RA') exclusion_table.rename_column('dec', 'DEC') radius = exclusion_table['morph_sigma'] radius.value[np.isnan(radius)] = 0.3 exclusion_table['Radius'] = radius exclusion_table = Table(exclusion_table) bgmaker = OffDataBackgroundMaker(data_store, outdir2, run_list=None, obs_table=obs_table_with_group_id, ntot_group=obs_groups.n_groups, excluded_sources=exclusion_table) bgmaker.make_model("2D") bgmaker.smooth_models("2D") bgmaker.save_models("2D") bgmaker.save_models(modeltype="2D", smooth=True) shutil.move(str(outdir2), str(outdir)) fn = outdir + '/background/group-def.fits' hdu_index_table = bgmaker.make_total_index_table( data_store=data_store, modeltype='2D', out_dir_background_model="background", filename_obs_group_table=fn, smooth=True) fn = outdir + '/hdu-index.fits.gz' hdu_index_table.write(fn, overwrite=True) offset_band = Angle([0, 2.49], 'deg') ref_cube_images = make_empty_cube(image_size=50, energy=ereco, center=center) ref_cube_exposure = make_empty_cube(image_size=50, energy=etrue, center=center, data_unit="m2 s") data_store = DataStore.from_dir(tmpdir) refheader = ref_cube_images.sky_image_ref.to_image_hdu().header exclusion_mask = SkyMask.read( '$GAMMAPY_EXTRA/datasets/exclusion_masks/tevcat_exclusion.fits') exclusion_mask = exclusion_mask.reproject(reference=refheader) # Pb with the load psftable for one of the run that is not implemented yet... data_store.hdu_table.remove_row(14) cube_maker = StackedObsCubeMaker(empty_cube_images=ref_cube_images, empty_exposure_cube=ref_cube_exposure, offset_band=offset_band, data_store=data_store, obs_table=data_store.obs_table, exclusion_mask=exclusion_mask, save_bkg_scale=True) cube_maker.make_cubes(make_background_image=True, radius=10.) obslist = [data_store.obs(id) for id in data_store.obs_table["OBS_ID"]] ObsList = ObservationList(obslist) mean_psf_cube = make_mean_psf_cube(image_size=50, energy_cube=etrue, center_maps=center, center=center, ObsList=ObsList, spectral_index=2.3) if use_etrue: mean_rmf = make_mean_rmf(energy_true=etrue, energy_reco=ereco, center=center, ObsList=ObsList) filename_mask = 'exclusion_mask.fits' filename_counts = 'counts_cube.fits' filename_bkg = 'bkg_cube.fits' filename_significance = 'significance_cube.fits' filename_excess = 'excess_cube.fits' if use_etrue: filename_exposure = 'exposure_cube_etrue.fits' filename_psf = 'psf_cube_etrue.fits' filename_rmf = 'rmf.fits' mean_rmf.write(filename_rmf, clobber=True) else: filename_exposure = 'exposure_cube.fits' filename_psf = 'psf_cube.fits' exclusion_mask.write(filename_mask, clobber=True) cube_maker.counts_cube.write(filename_counts, format="fermi-counts", clobber=True) cube_maker.bkg_cube.write(filename_bkg, format="fermi-counts", clobber=True) cube_maker.significance_cube.write(filename_significance, format="fermi-counts", clobber=True) cube_maker.excess_cube.write(filename_excess, format="fermi-counts", clobber=True) cube_maker.exposure_cube.write(filename_exposure, format="fermi-counts", clobber=True) mean_psf_cube.write(filename_psf, format="fermi-counts", clobber=True)
scratch_dir = make_fresh_dir('background') scratch_dir # ### Make an observation table defining the run grouping # # Prepare a scheme to group observations with similar observing conditions and create a new ObservationTable with the grouping ID for each run # In[6]: # Create a background model from the 4 Crab runs for the counts ouside the exclusion region so here outside the Crab data_store = DataStore.from_dir( "$GAMMAPY_EXTRA/datasets/hess-crab4-hd-hap-prod2") # Define the grouping you want to use to group the obervations to make the acceptance curves # Here we use 2 Zenith angle bins only, you can also add efficiency bins for example etc... axes = [ObservationGroupAxis('ZEN_PNT', [0, 49, 90], fmt='edges')] # Create the ObservationGroups object obs_groups = ObservationGroups(axes) # write it to file filename = str(scratch_dir / 'group-def.fits') obs_groups.obs_groups_table.write(filename, overwrite=True) # Create a new ObservationTable with the column group_id # You give the runs list you want to use to produce the background model that are in your obs table. # Here very simple only the 4 Crab runs... list_ids = [23523, 23526, 23559, 23592] obs_table_with_group_id = obs_groups.apply( data_store.obs_table.select_obs_id(list_ids)) # ### Make table of known gamma-ray sources to exclude
d1 d1.obs_table['OBS_ID'] d1 print d1 crab_pos = SkyCoord.from_name('crab') crab_pos datastore datastore.table datastore.Table datastore.obs_table datastore.obs_table["ZEN_PNT"]<20.0 from gammapy.data import ObservationGroups, ObservationGroupAxis zenith = Angle([0, 30, 40, 50], 'deg') ntels = [3, 4] obs_groups = ObservationGroups([ ObservationGroupAxis('ZENITH', zenith, fmt='edges'), ObservationGroupAxis('N_TELS', ntels, fmt='values'), ]) print(obs_groups.info) obs1=obs_groups.apply(datastore.obs_table) zenith = Angle([0, 30, 40, 50], 'deg') ntels = [3, 4] obs_groups = ObservationGroups([ ObservationGroupAxis('ZEN_PNT', zenith, fmt='edges'), ObservationGroupAxis('N_TELS', ntels, fmt='values'), ]) obs_table zenith = Angle([0, 30, 40, 50], 'deg') ntels = [3, 4] obs_groups = ObservationGroups([