def make_fresh_dir(path): """Make a fresh directory. Delete first if exists""" path = Path(path) if path.is_dir(): shutil.rmtree(str(path)) path.mkdir() return path
def write_ogip(self, phafile=None, bkgfile=None, rmffile=None, arffile=None, outdir=None, clobber=True): """Write OGIP files Only those objects are written have been created with the appropriate functions before Parameters ---------- phafile : str PHA filename bkgfile : str BKG filename arffile : str ARF filename rmffile : str RMF : filename outdir : None directory to write the files to clobber : bool Overwrite """ if outdir is None: outdir = "ogip_data" basedir = Path(outdir) basedir.mkdir(exist_ok=True) if arffile is None: arffile = basedir / "arf_run{}.fits".format(self.obs) if rmffile is None: rmffile = basedir / "rmf_run{}.fits".format(self.obs) if phafile is None: phafile = basedir / "pha_run{}.pha".format(self.obs) if bkgfile is None: bkgfile = basedir / "bkg_run{}.pha".format(self.obs) self.phafile = phafile if self.pha is not None: self.pha.write(str(phafile), bkg=str(bkgfile), arf=str(arffile), rmf=str(rmffile), clobber=clobber) if self.bkg is not None: self.bkg.write(str(bkgfile), clobber=clobber) if self.arf is not None: self.arf.write(str(arffile), energy_unit='keV', effarea_unit='cm2', clobber=clobber) if self.rmf is not None: self.rmf.write(str(rmffile), energy_unit='keV', clobber=clobber)
def build_notebooks(args): if "GAMMAPY_DATA" not in os.environ: logging.info("GAMMAPY_DATA environment variable not set.") logging.info( "Running notebook tests requires this environment variable.") logging.info("Exiting now.") sys.exit() # prepare folder structure pathsrc = Path(args.src) path_temp = Path("temp") path_empty_nbs = Path("tutorials") path_filled_nbs = Path("docs") / "notebooks" path_static_nbs = Path("docs") / "_static" / "notebooks" rmtree(str(path_temp), ignore_errors=True) path_temp.mkdir(parents=True, exist_ok=True) path_filled_nbs.mkdir(parents=True, exist_ok=True) path_static_nbs.mkdir(parents=True, exist_ok=True) if pathsrc == path_empty_nbs: rmtree(str(path_temp), ignore_errors=True) rmtree(str(path_static_nbs), ignore_errors=True) rmtree(str(path_filled_nbs), ignore_errors=True) copytree(str(path_empty_nbs), str(path_temp), ignore=ignorefiles) elif pathsrc.exists(): notebookname = pathsrc.name pathdest = path_temp / notebookname copyfile(str(pathsrc), str(pathdest)) else: logging.info("Notebook file does not exist.") sys.exit() # strip and blackformat subprocess.call("gammapy jupyter --src temp black", shell=True) subprocess.call("gammapy jupyter --src temp strip", shell=True) # test /run passed = True for path in path_temp.glob("*.ipynb"): if not notebook_test(path): passed = False # convert into scripts # copy generated filled notebooks to doc # if passed: if pathsrc == path_empty_nbs: # copytree is needed to copy subfolder images copytree(str(path_empty_nbs), str(path_static_nbs), ignore=ignoreall) for path in path_static_nbs.glob("*.ipynb"): subprocess.call("jupyter nbconvert --to script '{}'".format( str(path)), shell=True) copytree(str(path_temp), str(path_filled_nbs), ignore=ignorefiles) else: pathsrc = path_temp / notebookname pathdest = path_static_nbs / notebookname copyfile(str(pathsrc), str(pathdest)) subprocess.call("jupyter nbconvert --to script '{}'".format( str(pathdest)), shell=True) pathdest = path_filled_nbs / notebookname copyfile(str(pathsrc), str(pathdest)) # else: # logging.info("Tests have not passed.") # logging.info("Tutorials not ready for documentation building process.") # rmtree(str(path_static_nbs), ignore_errors=True) # tear down rmtree(str(path_temp), ignore_errors=True)
# In[ ]: def hgps_data_download(): base_url = "https://www.mpi-hd.mpg.de/hfm/HESS/hgps/data/" for filename in hgps_filenames: url = base_url + filename path = hgps_data_path / filename if path.exists(): print("Already downloaded: {}".format(path)) else: print("Downloading {} to {}".format(url, path)) urlretrieve(url, str(path)) hgps_data_path.mkdir(parents=True, exist_ok=True) hgps_data_download() print("\n\nFiles at {} :\n".format(hgps_data_path.absolute())) for path in hgps_data_path.iterdir(): print(path) # ## Catalog with Astropy # # ### FITS file content # # Let's start by just opening up `hgps_catalog_v1.fits.gz` and looking at the content. # # Note that ``astropy.io.fits.open`` doesn't work with `Path` objects yet, # so you have to call `str(path)` and pass a string.
# define energy grid energy = energy_axis.edges * energy_axis.unit # mean edisp edisp = obs_list.make_mean_edisp(position=src_pos, e_true=energy, e_reco=energy) # ### Save maps and IRFs to disk # # It is common to run the preparation step independent of the likelihood fit, because often the preparation of maps, PSF and energy dispersion is slow if you have a lot of data. We first create a folder: # In[ ]: path = Path("analysis_3d") path.mkdir(exist_ok=True) # And the write the maps and IRFs to disk by calling the dedicated `.write()` methods: # In[ ]: # write maps maps["counts"].write(str(path / "counts.fits"), overwrite=True) maps["background"].write(str(path / "background.fits"), overwrite=True) maps["exposure"].write(str(path / "exposure.fits"), overwrite=True) # write IRFs psf_kernel.write(str(path / "psf.fits"), overwrite=True) edisp.write(str(path / "edisp.fits"), overwrite=True) # ## Likelihood fit