def from_eventlist(cls, event_list, bins): """Create CountsSpectrum from fits 'EVENTS' extension (`CountsSpectrum`). Subsets of the event list should be chosen via the appropriate methods in `~gammapy.data.EventList`. Parameters ---------- event_list : `~astropy.io.fits.BinTableHDU, `gammapy.data.EventListDataSet`, `gammapy.data.EventList`, str (filename) bins : `gammapy.spectrum.energy.EnergyBounds` Energy bin edges """ if isinstance(event_list, fits.BinTableHDU): event_list = EventList.read(event_list) elif isinstance(event_list, EventListDataset): event_list = event_list.event_list elif isinstance(event_list, str): event_list = EventList.read(event_list, hdu='EVENTS') energy = Energy(event_list.energy).to(bins.unit) val, dummy = np.histogram(energy, bins.value) livetime = event_list.observation_live_time_duration return cls(val, bins, livetime)
def sample_sources(self, dataset): """Sample source model components. Parameters ---------- dataset : `~gammapy.cube.MapDataset` Map dataset. Returns ------- events : `~gammapy.data.EventList` Event list """ events_all = [] for idx, model in enumerate(dataset.models): if isinstance(model, BackgroundModel): continue evaluator = dataset.evaluators.get(model.name) evaluator = copy.deepcopy(evaluator) evaluator.model.apply_irf["psf"] = False evaluator.model.apply_irf["edisp"] = False npred = evaluator.compute_npred() temporal_model = ConstantTemporalModel() table = self._sample_coord_time(npred, temporal_model, dataset.gti) table["MC_ID"] = idx + 1 events_all.append(EventList(table)) return EventList.stack(events_all)
def sample_sources(self, dataset): """Sample source model components. Parameters ---------- dataset : `~gammapy.cube.MapDataset` Map dataset. Returns ------- events : `~gammapy.data.EventList` Event list """ events_all = [] for idx, evaluator in enumerate(dataset._evaluators): evaluator = copy.deepcopy(evaluator) evaluator.edisp = None evaluator.psf = None npred = evaluator.compute_npred() temporal_model = ConstantTemporalModel() table = self._sample_coord_time(npred, temporal_model, dataset.gti) table["MC_ID"] = idx + 1 events_all.append(EventList(table)) return EventList.stack(events_all)
def process(self, obs): """Estimate background for one observation.""" all_events = obs.events.select_circular_region(self.on_region) self.on_phase = self._check_intervals(self.on_phase) self.off_phase = self._check_intervals(self.off_phase) # Loop over all ON- and OFF- phase intervals to filter the ON- and OFF- events list_on_events = [ self.filter_events(all_events, each_on_phase) for each_on_phase in self.on_phase ] list_off_events = [ self.filter_events(all_events, each_off_phase) for each_off_phase in self.off_phase ] # Loop over all ON- and OFF- phase intervals to compute the normalization factors a_on and a_off a_on = np.sum([_[1] - _[0] for _ in self.on_phase]) a_off = np.sum([_[1] - _[0] for _ in self.off_phase]) on_events = EventList.stack(list_on_events) off_events = EventList.stack(list_off_events) return BackgroundEstimate( on_region=self.on_region, on_events=on_events, off_region=None, off_events=off_events, a_on=a_on, a_off=a_off, method="Phase Bkg Estimator", )
class TestEventSelection: def setup_class(self): table = Table() table["RA"] = [0.0, 0.0, 0.0, 10.0] * u.deg table["DEC"] = [0.0, 0.9, 10.0, 10.0] * u.deg table["ENERGY"] = [1.0, 1.5, 1.5, 10.0] * u.TeV table["OFFSET"] = [0.1, 0.5, 1.0, 1.5] * u.deg self.events = EventList(table) center1 = SkyCoord(0.0, 0.0, frame="icrs", unit="deg") on_region1 = CircleSkyRegion(center1, radius=1.0 * u.deg) center2 = SkyCoord(0.0, 10.0, frame="icrs", unit="deg") on_region2 = RectangleSkyRegion(center2, width=0.5 * u.deg, height=0.3 * u.deg) self.on_regions = [on_region1, on_region2] def test_region_select(self): geom = WcsGeom.create(skydir=(0, 0), binsz=0.2, width=4.0 * u.deg, proj="TAN") new_list = self.events.select_region(self.on_regions[0], geom.wcs) assert len(new_list.table) == 2 union_region = self.on_regions[0].union(self.on_regions[1]) new_list = self.events.select_region(union_region, geom.wcs) assert len(new_list.table) == 3 region_string = "fk5;box(0,10, 0.25, 0.15)" new_list = self.events.select_region(region_string, geom.wcs) assert len(new_list.table) == 1 def test_map_select(self): axis = MapAxis.from_edges((0.5, 2.0), unit="TeV", name="ENERGY") geom = WcsGeom.create(skydir=(0, 0), binsz=0.2, width=4.0 * u.deg, proj="TAN", axes=[axis]) mask_data = geom.region_mask(regions=[self.on_regions[0]], inside=True) mask = Map.from_geom(geom, data=mask_data) new_list = self.events.select_map_mask(mask) assert len(new_list.table) == 2 def test_select_energy(self): energy_range = u.Quantity([1, 10], "TeV") new_list = self.events.select_energy(energy_range) assert len(new_list.table) == 3
def run(self, dataset, observation=None): """Run the event sampler, applying IRF corrections. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Map dataset observation : `~gammapy.data.Observation` In memory observation. edisp : Bool It allows to include or exclude the Edisp in the simulation. Returns ------- events : `~gammapy.data.EventList` Event list. """ if len(dataset.models) > 1: events_src = self.sample_sources(dataset) if len(events_src.table) > 0: if dataset.psf: events_src = self.sample_psf(dataset.psf, events_src) else: events_src.table["RA"] = events_src.table["RA_TRUE"] events_src.table["DEC"] = events_src.table["DEC_TRUE"] if dataset.edisp: events_src = self.sample_edisp(dataset.edisp, events_src) else: events_src.table["ENERGY"] = events_src.table[ "ENERGY_TRUE"] if dataset.background: events_bkg = self.sample_background(dataset) events = EventList.from_stack([events_bkg, events_src]) else: events = events_src if len(dataset.models) == 1 and dataset.background_model is not None: events_bkg = self.sample_background(dataset) events = EventList.from_stack([events_bkg]) events = self.event_det_coords(observation, events) events.table["EVENT_ID"] = np.arange(len(events.table)) events.table.meta = self.event_list_meta(dataset, observation) geom = dataset._geom selection = geom.contains(events.map_coord(geom)) return events.select_row_subset(selection)
def setup_class(self): table = Table() table["RA"] = [0.0, 0.0, 0.0, 10.0] * u.deg table["DEC"] = [0.0, 0.9, 10.0, 10.0] * u.deg table["ENERGY"] = [1.0, 1.5, 1.5, 10.0] * u.TeV table["OFFSET"] = [0.1, 0.5, 1.0, 1.5] * u.deg self.events = EventList(table) center1 = SkyCoord(0.0, 0.0, frame="icrs", unit="deg") on_region1 = CircleSkyRegion(center1, radius=1.0 * u.deg) center2 = SkyCoord(0.0, 10.0, frame="icrs", unit="deg") on_region2 = RectangleSkyRegion(center2, width=0.5 * u.deg, height=0.3 * u.deg) self.on_regions = [on_region1, on_region2]
def main(input_files, verbose, output): setup_logging(verbose=verbose) eventlist_list = [] for f in input_files: eventlist_list.append(EventList.read(f)) events = eventlist_list[0] for e in eventlist_list[1:]: events.stack(e) figures = [] figures.append(plt.figure()) ax = figures[-1].add_subplot(1, 1, 1) events.plot_energy(ax=ax) figures.append(plt.figure()) ax = figures[-1].add_subplot(1, 1, 1) events.plot_energy_offset(ax=ax) figures.append(plt.figure()) ax = figures[-1].add_subplot(1, 1, 1) events.plot_offset2_distribution(ax=ax) figures.append(plt.figure()) ax = figures[-1].add_subplot(1, 1, 1) events.plot_time(ax=ax) if output is None: plt.show() else: with PdfPages(output) as pdf: for fig in figures: fig.tight_layout(pad=0, h_pad=1.08, w_pad=1.08) pdf.savefig(fig)
def test_region_nd_map_fill_events(region_map): filename = "$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_023523.fits.gz" events = EventList.read(filename) region_map = Map.from_geom(region_map.geom) region_map.fill_events(events) assert_allclose(region_map.data.sum(), 665)
def extract_spectra_fermi(target_position, on_radius): """Extract 1d spectra for Fermi-LAT""" log.info("Extracting 1d spectra for Fermi-LAT") events = EventList.read("data/fermi/events.fits.gz") exposure = HpxNDMap.read("data/fermi/exposure_cube.fits.gz") psf = EnergyDependentTablePSF.read("data/fermi/psf.fits.gz") valid_range = (config.energy_bins >= 30 * u.GeV) * (config.energy_bins <= 2 * u.TeV) energy = config.energy_bins[valid_range] bkg_estimate = ring_background_estimate( pos=target_position, on_radius=on_radius, inner_radius=1 * u.deg, outer_radius=2 * u.deg, events=events, ) extract = SpectrumExtractionFermi1D( events=events, exposure=exposure, psf=psf, bkg_estimate=bkg_estimate, target_position=target_position, on_radius=on_radius, energy=energy, ) obs = extract.run() path = "results/spectra/fermi" log.info(f"Writing to {path}") obs.write(path, use_sherpa=True, overwrite=True)
def sample_background(self, dataset): """Sample background Parameters ---------- dataset : `~gammapy.cube.MapDataset` Map dataset Returns ------- events : `gammapy.data.EventList` Background events """ background = dataset.background_model.evaluate() temporal_model = ConstantTemporalModel() table = self._sample_coord_time(background, temporal_model, dataset.gti) table["MC_ID"] = 0 table.rename_column("ENERGY_TRUE", "ENERGY") table.rename_column("RA_TRUE", "RA") table.rename_column("DEC_TRUE", "DEC") return EventList(table)
def sample_background(self, dataset): """Sample background Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Map dataset Returns ------- events : `gammapy.data.EventList` Background events """ background = dataset.npred_background() temporal_model = ConstantTemporalModel() table = self._sample_coord_time(background, temporal_model, dataset.gti) table["MC_ID"] = 0 table["ENERGY"] = table["ENERGY_TRUE"] table["RA"] = table["RA_TRUE"] table["DEC"] = table["DEC_TRUE"] return EventList(table)
def extract_spectra_fermi(target_position, on_radius): """Extract 1d spectra for Fermi-LAT""" log.info("Extracting 1d spectra for Fermi-LAT") events = EventList.read("data/fermi/events.fits.gz") exposure = HpxNDMap.read("data/fermi/exposure_cube.fits.gz") psf = EnergyDependentTablePSF.read("data/fermi/psf.fits.gz") emin, emax, dex = 0.03, 2, 0.1 num = int(np.log10(emax / emin) / dex) energy = np.logspace(start=np.log10(emin), stop=np.log10(emax), num=num) * u.TeV bkg_estimate = fermi_ring_background_extract(events, target_position, on_radius) extract = SpectrumExtractionFermi1D( events=events, exposure=exposure, psf=psf, bkg_estimate=bkg_estimate, target_position=target_position, on_radius=on_radius, energy=energy, containment_correction=True, ) obs = extract.run() path = f"{config.repo_path}/results/spectra/fermi" log.info(f"Writing to {path}") obs.write(path, use_sherpa=True, overwrite=True)
def load(self): """Load HDU as appropriate class. TODO: this should probably go via an extensible registry. """ from gammapy.irf import IRF_REGISTRY hdu_class = self.hdu_class filename = self.path() hdu = self.hdu_name if hdu_class == "events": from gammapy.data import EventList return EventList.read(filename, hdu=hdu) elif hdu_class == "gti": from gammapy.data import GTI return GTI.read(filename, hdu=hdu) elif hdu_class == "map": from gammapy.maps import Map return Map.read(filename, hdu=hdu, format=self.format) else: cls = IRF_REGISTRY.get_cls(hdu_class) return cls.read(filename, hdu=hdu)
def events(): t = Table() t["EVENT_ID"] = np.array([1, 5], dtype=np.uint16) t["RA"] = [5, 11] * u.deg t["DEC"] = [0, 0] * u.deg t["ENERGY"] = [10, 12] * u.TeV t["TIME"] = [3, 4] * u.s return EventList(t)
def __init__(self, evt_file="$JOINT_CRAB/data/fermi/events.fits.gz", exp_file="$JOINT_CRAB/data/fermi/exposure_cube.fits.gz", psf_file="$JOINT_CRAB/data/fermi/psf.fits.gz", max_psf_radius='0.5 deg'): # Read data self.events = EventList.read(evt_file) self.exposure = HpxNDMap.read(exp_file) self.exposure.unit = u.Unit('cm2s') # no unit stored on map... self.psf = EnergyDependentTablePSF.read(psf_file)
def sample_sources(self, dataset): """Sample source model components. Parameters ---------- dataset : `~gammapy.cube.MapDataset` Map dataset. Returns ------- events : `~gammapy.data.EventList` Event list """ events_all = [] for idx, model in enumerate(dataset.models): if isinstance(model, BackgroundModel): continue evaluator = dataset.evaluators.get(model.name) evaluator = copy.deepcopy(evaluator) evaluator.model.apply_irf["psf"] = False evaluator.model.apply_irf["edisp"] = False npred = evaluator.compute_npred() if hasattr(model, "temporal_model"): if getattr(model, "temporal_model") is None: temporal_model = ConstantTemporalModel() else: temporal_model = model.temporal_model else: temporal_model = ConstantTemporalModel() table = self._sample_coord_time(npred, temporal_model, dataset.gti) if len(table) > 0: table["MC_ID"] = idx + 1 else: mcid = table.Column(name="MC_ID", length=0, dtype=int) table.add_column(mcid) events_all.append(EventList(table)) return EventList.stack(events_all)
def sample_sources(self, dataset): """Sample source model components. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Map dataset. Returns ------- events : `~gammapy.data.EventList` Event list """ events_all = [] for idx, evaluator in enumerate(dataset.evaluators.values()): if evaluator.needs_update: evaluator.update( dataset.exposure, dataset.psf, dataset.edisp, dataset._geom, dataset.mask, ) flux = evaluator.compute_flux() npred = evaluator.apply_exposure(flux) if evaluator.model.temporal_model is None: temporal_model = ConstantTemporalModel() else: temporal_model = evaluator.model.temporal_model table = self._sample_coord_time(npred, temporal_model, dataset.gti) if len(table) > 0: table["MC_ID"] = idx + 1 else: mcid = table.Column(name="MC_ID", length=0, dtype=int) table.add_column(mcid) events_all.append(EventList(table)) return EventList.from_stack(events_all)
def __init__( self, evt_file="../data/joint-crab/fermi/events.fits.gz", exp_file="../data/joint-crab/fermi/exposure_cube.fits.gz", psf_file="../data/joint-crab/fermi/psf.fits.gz", ): # Read data self.events = EventList.read(evt_file) self.exposure = HpxNDMap.read(exp_file) self.exposure.unit = u.Unit("cm2s") # no unit stored on map... self.psf = PSFMap.read(psf_file, format="gtpsf")
def _make_counts(dataset, observation, phases): event_lists = [] for interval in phases: events = observation.events.select_parameter(parameter="PHASE", band=interval) event_lists.append(events) events = EventList.stack(event_lists) counts = RegionNDMap.from_geom(dataset.counts.geom) counts.fill_events(events) return counts
def read_dataset(filename_dataset, filename_model, obs_id): log.info(f"Reading {filename_dataset}") dataset = MapDataset.read(filename_dataset) filename_events = get_filename_events(filename_dataset, filename_model, obs_id) log.info(f"Reading {filename_events}") events = EventList.read(filename_events) counts = Map.from_geom(WCS_GEOM) counts.fill_events(events) dataset.counts = counts return dataset
def __init__( self, evt_file="../data/joint-crab/fermi/events.fits.gz", exp_file="../data/joint-crab/fermi/exposure_cube.fits.gz", psf_file="../data/joint-crab/fermi/psf.fits.gz", max_psf_radius="0.5 deg", ): # Read data self.events = EventList.read(evt_file) self.exposure = HpxNDMap.read(exp_file) self.exposure.unit = u.Unit("cm2s") # no unit stored on map... self.psf = EnergyDependentTablePSF.read(psf_file)
def _make_counts(dataset, observation, phases): events = observation.events.select_region(dataset.counts.region) event_lists = [] for interval in phases: events = events.select_parameter(parameter="PHASE", band=interval) event_lists.append(events) events_off = EventList.stack(event_lists) edges = dataset.counts.energy.edges counts = CountsSpectrum(energy_hi=edges[1:], energy_lo=edges[:-1]) counts.fill_events(events_off) return counts
def image_bin(event_file, reference_file, out_file, overwrite): """Bin events into an image.""" log.info('Reading {}'.format(event_file)) events = EventList.read(event_file) reference_image = fits.open(reference_file)[0] out_image = bin_events_in_image(events, reference_image) log.info('Writing {}'.format(out_file)) out_image.writeto(out_file, clobber=overwrite)
def sample_sources(self, dataset): """Sample source model components. Parameters ---------- dataset : `~gammapy.datasets.MapDataset` Map dataset. Returns ------- events : `~gammapy.data.EventList` Event list """ events_all = [] for idx, model in enumerate(dataset.models): if isinstance(model, BackgroundModel): continue evaluator = dataset.evaluators.get(model) flux = evaluator.compute_flux() npred = evaluator.apply_exposure(flux) if model.temporal_model is None: temporal_model = ConstantTemporalModel() else: temporal_model = model.temporal_model table = self._sample_coord_time(npred, temporal_model, dataset.gti) if len(table) > 0: table["MC_ID"] = idx + 1 else: mcid = table.Column(name="MC_ID", length=0, dtype=int) table.add_column(mcid) events_all.append(EventList(table)) return EventList.from_stack(events_all)
def load(self): """Load HDU as appropriate class. TODO: this should probably go via an extensible registry. """ hdu_class = self.hdu_class filename = self.path() hdu = self.hdu_name if hdu_class == "events": from gammapy.data import EventList return EventList.read(filename, hdu=hdu) elif hdu_class == "gti": from gammapy.data import GTI return GTI.read(filename, hdu=hdu) elif hdu_class == "aeff_2d": from gammapy.irf import EffectiveAreaTable2D return EffectiveAreaTable2D.read(filename, hdu=hdu) elif hdu_class == "edisp_2d": from gammapy.irf import EnergyDispersion2D return EnergyDispersion2D.read(filename, hdu=hdu) elif hdu_class == "psf_table": from gammapy.irf import PSF3D return PSF3D.read(filename, hdu=hdu) elif hdu_class == "psf_3gauss": from gammapy.irf import EnergyDependentMultiGaussPSF return EnergyDependentMultiGaussPSF.read(filename, hdu=hdu) elif hdu_class == "psf_king": from gammapy.irf import PSFKing return PSFKing.read(filename, hdu=hdu) elif hdu_class == "bkg_2d": from gammapy.irf import Background2D return Background2D.read(filename, hdu=hdu) elif hdu_class == "bkg_3d": from gammapy.irf import Background3D return Background3D.read(filename, hdu=hdu) else: raise ValueError(f"Invalid hdu_class: {hdu_class}")
def make_counts_image(energy_band): """Apply event selections and bin event positions into a counts image.""" event_list = EventList.read(TOTAL_EVENTS_FILE) n_events = len(event_list) print('Number of events: {}'.format(n_events)) print('Applying energy band selection: {}'.format(energy_band)) event_list = event_list.select_energy(energy_band) n_events_selected = len(event_list) fraction = 100 * n_events_selected / n_events print('Number of events: {}. Fraction: {:.1f}%'.format(n_events_selected, fraction)) print('Filling counts image ...') header = fits.getheader(REF_IMAGE) image = event_list.fill_counts_header(header) print('Writing {}'.format(COUNTS_IMAGE)) image.writeto(COUNTS_IMAGE, clobber=True)
def cli_image_bin(event_file, reference_file, out_file, overwrite): """Bin events into an image. You have to give the event, reference and out FITS filename. """ log.info("Executing cli_image_bin") log.info("Reading {}".format(event_file)) events = EventList.read(event_file) log.info("Reading {}".format(reference_file)) m_ref = Map.read(reference_file) counts_map = Map.from_geom(m_ref.geom) fill_map_counts(counts_map, events) log.info("Writing {}".format(out_file)) counts_map.write(out_file, overwrite=overwrite)
def run(self, dataset, observation=None): """Run the event sampler, applying IRF corrections. Parameters ---------- dataset : `~gammapy.cube.MapDataset` Map dataset observation : `~gammapy.data.Observation` In memory observation. edisp : Bool It allows to include or exclude the Edisp in the simulation. Returns ------- events : `~gammapy.data.EventList` Event list. """ events_src = self.sample_sources(dataset) if dataset.psf: events_src = self.sample_psf(dataset.psf, events_src) else: events_src.table["RA"] = events_src.table["RA_TRUE"] events_src.table["DEC"] = events_src.table["DEC_TRUE"] if dataset.edisp: events_src = self.sample_edisp(dataset.edisp, events_src) else: events_src.table["ENERGY"] = events_src.table["ENERGY_TRUE"] if dataset.background_model: events_bkg = self.sample_background(dataset) events = EventList.stack([events_bkg, events_src]) else: events = events_src events.table["EVENT_ID"] = np.arange(len(events.table)) events.table.meta = self.event_list_meta(dataset, observation) return events
def setup_class(self): table = Table() table["RA"] = [0.0, 0.0, 0.0, 0.0, 10.0] * u.deg table["DEC"] = [0.0, 0.05, 0.9, 10.0, 10.0] * u.deg table["ENERGY"] = [1.0, 1.0, 1.5, 1.5, 10.0] * u.TeV table["OFFSET"] = [0.1, 0.1, 0.5, 1.0, 1.5] * u.deg table.meta["RA_PNT"] = 0 * u.deg table.meta["DEC_PNT"] = 0.5 * u.deg meta_obs = dict() meta_obs["RA_PNT"] = 0 * u.deg meta_obs["DEC_PNT"] = 0.5 * u.deg meta_obs["DEADC"] = 1 meta = time_ref_to_dict("2010-01-01") gti_table = Table({"START": [1], "STOP": [3]}, meta=meta) gti = GTI(gti_table) self.observation = Observation( events=EventList(table), obs_info=meta_obs, gti=gti )
def setup_class(self): self.observations = [] for sign in [-1, 1]: events = Table() events["RA"] = [0.0, 0.0, 0.0, 0.0, 10.0] * u.deg events["DEC"] = sign * ([0.0, 0.05, 0.9, 10.0, 10.0] * u.deg) events["ENERGY"] = [1.0, 1.0, 1.5, 1.5, 10.0] * u.TeV events["OFFSET"] = [0.1, 0.1, 0.5, 1.0, 1.5] * u.deg obs_info = dict( RA_PNT=0 * u.deg, DEC_PNT=sign * 0.5 * u.deg, DEADC=1, ) events.meta.update(obs_info) meta = time_ref_to_dict("2010-01-01") gti_table = Table({"START": [1], "STOP": [3]}, meta=meta) gti = GTI(gti_table) self.observations.append(Observation( events=EventList(events), obs_info=obs_info, gti=gti ))
def test_stack(self): event_lists = [self.events] * 3 stacked_list = EventList.stack(event_lists) assert len(stacked_list.table) == 49 * 3
def make_healpix_image(events, nside, sigma): theta = np.deg2rad(90 - events['B']) phi = np.deg2rad(events['L']) bins = np.arange(hp.nside2npix(nside) + 1) - 0.5 data = hp.ang2pix(nside, theta, phi) counts, _ = np.histogram(data, bins) counts = hp.smoothing(counts, sigma=sigma) return counts if __name__ == '__main__': filename = '/Users/deil/code/fhee/data/2fhl_events.fits.gz' events = EventList.read(filename) events.meta['EUNIT'] = 'GeV' counts = make_healpix_image(events, nside=256, sigma=0.001) filename = '2fhl_counts_healpix.fits.gz' print('Writing {}'.format(filename)) hp.write_map(filename, m=counts) # import matplotlib.pyplot as plt # hp.mollview(counts, title="Mollview image RING") # plt.show() # m = np.arange(hp.nside2npix(NSIDE)) # zeros = np.zeros(hp.nside2npix(NSIDE)) # hp.mollview(m, title="Mollview image RING") # print (len(counts) == hp.nside2npix(NSIDE))
def counts_skyimage_2fhl(**kwargs): log.info('Computing counts map.') events = EventList.read('2fhl_events.fits.gz') counts = SkyMap.empty('Counts', **kwargs) counts.fill(events) return counts
def counts_skyimage_2fhl(**kwargs): log.info("Computing counts map.") events = EventList.read("2fhl_events.fits.gz") counts = SkyMap.empty("Counts", **kwargs) counts.fill(events) return counts