"""Runs commands to produce convolved predicted counts map in current directory.""" import numpy as np import matplotlib.pyplot as plt from astropy.io import fits from gammapy.stats import significance from gammapy.image.utils import disk_correlate from aplpy import FITSFigure from npred_general import prepare_images model, gtmodel, ratio, counts, header = prepare_images() # Top hat correlation correlation_radius = 3 correlated_gtmodel = disk_correlate(gtmodel, correlation_radius) correlated_counts = disk_correlate(counts, correlation_radius) correlated_model = disk_correlate(model, correlation_radius) # Fermi significance fermi_significance = np.nan_to_num(significance(correlated_counts, gtmodel, method='lima')) # Gammapy significance significance = np.nan_to_num(significance(correlated_counts, correlated_model, method='lima')) titles = ['Gammapy Significance', 'Fermi Tools Significance'] # Plot fig = plt.figure(figsize=(10, 5)) hdu1 = fits.ImageHDU(significance, header)
def make_model_image(psf='Fermi', resolution=0.1, center=[0, 0], lat_range=[0, 180], lon_range=[0, 360], catalog='1FHL', total_flux='False', filename='1fhl_fermi_psf.fits'): from gammapy.image import coordinates from astropy.convolution import convolve from astropy.modeling.models import Gaussian2D, Disk2D from gammapy.image import images_to_cube from gammapy.image import paste_cutout_into_image from gammapy.image.utils import solid_angle reference = make_reference_survey_image(resolution, lat_range, lon_range, center, units='ph/cm2/s/sr') #Check these units! lons, lats = coordinates(reference) source_table = fits.open('simulated_galaxy_1.fits')[1].data sources = np.arange(len(source_table['flux'])).astype(int) if psf == 'None': # If there is no PSF defined, sources will be modelled as Gaussians source_kernel = Gaussian2D(0, 0, 0, 0.1, 0.1) new_image = im_1 = im_2 = im_3 = source_kernel(lats, lons) else: # Otherwise, all of the flux will be placed into the reference pixel to be later PSF convolved with the defined PSF # Hence original reference empty image is called new_image = im_1 = im_2 = im_3 = reference.data from gammapy.image import wcs_histogram2d header = reference.header lon = source_table['GLON'] lat = source_table['GLAT'] flux = source_table['flux'] total_point_image = wcs_histogram2d(header, lon, lat, weights=flux) new_image = total_point_image.data #import IPython; IPython.embed(); 1 /0 """ print len(sources) total_point_image = fits.ImageHDU(header=reference.header, data=new_image) from astropy.wcs import WCS wcs = WCS(total_point_image.header) new_image = np.zeros_like(total_point_image.data, dtype=np.float64) for source in sources: source_type = 'PointSource'#source_table['Source_Type'][source] print source if source_type == 'ExtendedSource': raise NotImplementedError # This needs more work... #image = source_table['Image'][source] #image.data = (image.data * solid_angle(image).value.mean()) / 1000000000000 # TODO: fix this hack... units??? #resample_factor1 = np.round(reference.header['CDELT1'] / image.header['CDELT1']) #resample_factor2 = np.round(reference.header['CDELT2'] / image.header['CDELT2']) #block_factors = np.array([resample_factor1, resample_factor2]) # TODO: fix this approximation... kapteyn image utils reprojectto? #resampled_image = block_reduce_hdu(image, block_factors, np.sum) #paste_cutout_into_image(total_point_image, resampled_image) elif source_type == 'PointSource': lon = source_table['GLON'][source] lat = source_table['GLAT'][source] flux = source_table['flux'][source] precise = False if precise: raise NotImplementedError else: #print(lon, lat) #import IPython; IPython.embed() print lon, lat x, y = wcs.wcs_world2pix(lon, lat, 0) print x, y # TODO: check if this is 0.5 or 1 pix off xi, yi = x.astype(int), y.astype(int) new_image[yi, xi] += flux total_point_image = fits.ImageHDU(header=reference.header, data=new_image) """ # Ensure flux or counts remain the same if total_flux == 'True': factor = source_table['Flux'].sum() else: factor = total_flux if psf == 'None': new_image = (new_image/new_image.sum()) * factor elif psf == 'Gaussian': new_image = correlate_gauss((new_image/new_image.sum()), 2) elif psf == 'Disk': new_image = disk_correlate((new_image/new_image.sum()), 2) elif psf == 'Fermi': print "Original Flux" print new_image.sum() new_image = correlate_fermi_psf(new_image, 5) print "PSF Convolved Flux" print new_image.sum() header = reference.header image = fits.ImageHDU(data=new_image, header=header) image.writeto(filename, clobber=True)
"""Runs commands to produce convolved predicted counts map in current directory.""" import numpy as np import matplotlib.pyplot as plt from astropy.io import fits from gammapy.stats import significance from gammapy.image.utils import disk_correlate from npred_general import prepare_images from aplpy import FITSFigure model, gtmodel, ratio, counts, header = prepare_images() # Top hat correlation correlation_radius = 3 correlated_gtmodel = disk_correlate(gtmodel, correlation_radius) correlated_counts = disk_correlate(counts, correlation_radius) correlated_model = disk_correlate(model, correlation_radius) # Fermi significance fermi_significance = np.nan_to_num(significance(correlated_counts, gtmodel, method='lima')) # Gammapy significance significance = np.nan_to_num(significance(correlated_counts, correlated_model, method='lima')) titles = ['Gammapy Significance', 'Fermi Tools Significance'] # Plot fig = plt.figure(figsize=(10, 5)) hdu1 = fits.ImageHDU(significance, header)
def make_model_image(psf='Fermi', resolution=0.1, center=[0, 0], lat_range=[0, 180], lon_range=[0, 360], catalog='1FHL', total_flux='False', filename='1fhl_fermi_psf.fits'): from gammapy.image import coordinates from astropy.convolution import convolve from astropy.modeling.models import Gaussian2D, Disk2D from gammapy.image import images_to_cube from gammapy.image import paste_cutout_into_image from gammapy.image.utils import solid_angle reference = make_reference_survey_image( resolution, lat_range, lon_range, center, units='ph/cm2/s/sr') #Check these units! lons, lats = coordinates(reference) source_table = fits.open('simulated_galaxy_1.fits')[1].data sources = np.arange(len(source_table['flux'])).astype(int) if psf == 'None': # If there is no PSF defined, sources will be modelled as Gaussians source_kernel = Gaussian2D(0, 0, 0, 0.1, 0.1) new_image = im_1 = im_2 = im_3 = source_kernel(lats, lons) else: # Otherwise, all of the flux will be placed into the reference pixel to be later PSF convolved with the defined PSF # Hence original reference empty image is called new_image = im_1 = im_2 = im_3 = reference.data from gammapy.image import wcs_histogram2d header = reference.header lon = source_table['GLON'] lat = source_table['GLAT'] flux = source_table['flux'] total_point_image = wcs_histogram2d(header, lon, lat, weights=flux) new_image = total_point_image.data #import IPython; IPython.embed(); 1 /0 """ print len(sources) total_point_image = fits.ImageHDU(header=reference.header, data=new_image) from astropy.wcs import WCS wcs = WCS(total_point_image.header) new_image = np.zeros_like(total_point_image.data, dtype=np.float64) for source in sources: source_type = 'PointSource'#source_table['Source_Type'][source] print source if source_type == 'ExtendedSource': raise NotImplementedError # This needs more work... #image = source_table['Image'][source] #image.data = (image.data * solid_angle(image).value.mean()) / 1000000000000 # TODO: fix this hack... units??? #resample_factor1 = np.round(reference.header['CDELT1'] / image.header['CDELT1']) #resample_factor2 = np.round(reference.header['CDELT2'] / image.header['CDELT2']) #block_factors = np.array([resample_factor1, resample_factor2]) # TODO: fix this approximation... kapteyn image utils reprojectto? #resampled_image = block_reduce_hdu(image, block_factors, np.sum) #paste_cutout_into_image(total_point_image, resampled_image) elif source_type == 'PointSource': lon = source_table['GLON'][source] lat = source_table['GLAT'][source] flux = source_table['flux'][source] precise = False if precise: raise NotImplementedError else: #print(lon, lat) #import IPython; IPython.embed() print lon, lat x, y = wcs.wcs_world2pix(lon, lat, 0) print x, y # TODO: check if this is 0.5 or 1 pix off xi, yi = x.astype(int), y.astype(int) new_image[yi, xi] += flux total_point_image = fits.ImageHDU(header=reference.header, data=new_image) """ # Ensure flux or counts remain the same if total_flux == 'True': factor = source_table['Flux'].sum() else: factor = total_flux if psf == 'None': new_image = (new_image / new_image.sum()) * factor elif psf == 'Gaussian': new_image = correlate_gauss((new_image / new_image.sum()), 2) elif psf == 'Disk': new_image = disk_correlate((new_image / new_image.sum()), 2) elif psf == 'Fermi': print "Original Flux" print new_image.sum() new_image = correlate_fermi_psf(new_image, 5) print "PSF Convolved Flux" print new_image.sum() header = reference.header image = fits.ImageHDU(data=new_image, header=header) image.writeto(filename, clobber=True)