Example #1
0
def moc2array(data, uniq, nside=128, reduceFunc=np.sum, density=True, fillVal=0.):
    """Convert a Multi-Order Coverage Map to a single nside HEALPix array. Useful
    for converting maps output by LIGO alerts. Expect that future versions of
    healpy or astropy will be able to replace this functionality. Note that this is
    a convienence function that will probably degrade portions of the MOC that are
    sampled at high resolution.

    Details of HEALPix Mulit-Order Coverage map: http://ivoa.net/documents/MOC/20190404/PR-MOC-1.1-20190404.pdf

    Parameters
    ----------
    data : np.array
        Data values for the MOC map
    uniq : np.array
        The UNIQ values for the MOC map
    nside : int (128)
        The output map nside
    reduceFunc : function (np.sum)
        The function to use to combine data into single healpixels.
    density : bool (True)
        If True, multiplies data values by pixel area before applying reduceFunc, and divides
        the final array by the output pixel area. Should be True if working on a probability density MOC.
    fillVal : float (0.)
        Value to fill empty HEALPixels with. Good choices include 0 (default), hp.UNSEEN, and np.nan.

    Returns
    -------
    np.array : HEALpy array of nside. Units should be the same as the input map as processed by reduceFunc.
    """

    # NUNIQ packing, from page 12 of http://ivoa.net/documents/MOC/20190404/PR-MOC-1.1-20190404.pdf
    orders = np.floor(np.log2(uniq / 4) / 2).astype(int)
    npixs = (uniq - 4 * 4**orders).astype(int)

    nsides = 2**orders
    names = ['ra', 'dec', 'area']
    types = [float]*len(names)
    data_points = np.zeros(data.size, dtype=list(zip(names, types)))
    for order in np.unique(orders):
        good = np.where(orders == order)
        ra, dec = _hpid2RaDec(nsides[good][0], npixs[good], nest=True)
        data_points['ra'][good] = ra
        data_points['dec'][good] = dec
        data_points['area'][good] = hp.nside2pixarea(nsides[good][0])

    if density:
        tobin_data = data*data_points['area']
    else:
        tobin_data = data

    result = _healbin(data_points['ra'], data_points['dec'], tobin_data, nside=nside,
                      reduceFunc=reduceFunc, fillVal=fillVal)

    if density:
        good = np.where(result != fillVal)
        result[good] = result[good] / hp.nside2pixarea(nside)

    return result
Example #2
0
def search_map_searched_area_pt(smap, ra, dec, nest=True):
    """Returns the area on the sky required to be imaged in a greedy
    search according to the map ``smap`` before imaging the point
    ``(ra, dec)``.

    :param smap: The map used for the search.

    :param ra: The RA of the point of interest.

    :param dec: The DEC of the point of interest.

    :param nest: ``True`` if the map is in nested order (default).

    :return: The area (in square degrees) that must be searched when
      following a greedy algorithm before the point at ``(ra, dec)``
      is imaged.

    """

    smap = np.atleast_1d(smap)
    
    nside = hp.npix2nside(smap.shape[0])

    theta = np.pi/2.0 - dec
    
    ptind = hp.ang2pix(nside, theta, ra, nest=nest)

    ptlevel = smap[ptind]

    nabove = np.sum(smap >= ptlevel)

    return nabove*hp.nside2pixarea(nside, degrees=True)
Example #3
0
def search_map_searched_area_cl(smap, cl):
    """Returns the area that must be searched greedily using ``smap`` to
    reach the credible level ``cl`` (fraction between 0 and 1 of the
    probability in the map).

    Note that the resulting credible area will be biased because the
    posterior pixel counts used to construct the search map are
    subject to Poisson fluctuations and we search greedily, resulting
    in the upward-fluctuations being searched first.  This bias tends
    to reduce the searched area compared to the true area that would
    be computed with perfect knowledge of the distribution on the sky.
    See https://dcc.ligo.org/LIGO-P1400054 .

    :param smap: The search map (need not be normalised).

    :param cl: Fraction (between 0 and 1) of the probability to be
      covered in the search.

    :return: The area (in square degrees) that must be searched to
      reach the desired coverage of the distribution.

    """

    smap = np.atleast_1d(smap)
    nside = hp.npix2nside(smap.shape[0])
    
    # Normalise the map to sum to 1:
    smap = smap / np.sum(smap)

    cum_probs = np.cumsum(np.sort(smap)[::-1])

    nsearched = np.sum(cum_probs <= cl)

    return nsearched*hp.nside2pixarea(nside, degrees=True)
Example #4
0
  def footprint_area(cat,ngal=1,mask=None,nside=4096,nest=True,label=''):
    import healpy as hp
    import matplotlib
    matplotlib.use ('agg')
    import matplotlib.pyplot as plt
    # plt.style.use('/home/troxel/SVA1/SVA1StyleSheet.mplstyle')
    from matplotlib.colors import LogNorm
    import pylab

    mask=CatalogMethods.check_mask(cat.coadd,mask)

    if not hasattr(cat, 'pix'):
      cat.pix=CatalogMethods.radec_to_hpix(cat.ra,cat.dec,nside=nside,nest=True)
    area=hp.nside2pixarea(nside)*(180./np.pi)**2
    print 'pixel area (arcmin)', area*60**2
    mask1=np.bincount(cat.pix[mask])>ngal
    print 'footprint area (degree)', np.sum(mask1)*area

    pix=np.arange(len(mask1))[mask1]
    print pix
    tmp=np.zeros((12*nside**2), dtype=[('hpix','int')])
    tmp['hpix'][pix.astype(int)]=1
    print tmp['hpix'][pix.astype(int)]
    fio.write('footprint_hpix'+label+'.fits.gz',tmp,clobber=True)

    tmp2=np.zeros(12*nside**2)
    tmp2[tmp.astype(int)]=1
    hp.cartview(tmp2,nest=True)
    plt.savefig('footprint_hpix'+label+'.png')
    plt.close()

    return 
Example #5
0
    def ConfidenceArea(self, adLevels):
        
        # create a normalized cumulative distribution
        self.log_skymap_sorted = np.sort(self.log_skymap.flatten())[::-1]
        self.log_skymap_cum = cumulative.fast_log_cumulative(self.log_skymap_sorted)
        # find the indeces  corresponding to the given CLs
        adLevels = np.ravel([adLevels])
        args = [(self.log_skymap_sorted,self.log_skymap_cum,level) for level in adLevels]
        adHeights = self.pool.map(FindHeights,args)

        dA = hp.nside2pixarea(self.nside, degrees=True)
        areas = []
        for height in adHeights:
            (index_hp,) = np.where(self.log_skymap>height)
            areas.append(len(index_hp)*dA)
        self.area_confidence = np.array(areas)
        
        if self.injection!=None:
            ra,dec = self.injection.get_ra_dec()
            theta,phi = eq2ang(ra,dec)
            ipix = hp.pixelfunc.ang2pix(self.nside,theta,phi, nest=True)
            logPval = self.log_skymap[ipix]
            confidence_level = np.exp(self.log_skymap_cum[np.abs(self.log_skymap_sorted-logPval).argmin()])
            height = FindHeights((self.log_skymap_sorted,self.log_skymap_cum,confidence_level))
            (index_hp,) = np.where(self.log_skymap >= height)
            searched_area = len(index_hp)*dA
            return self.area_confidence,(confidence_level,searched_area)

        del self.log_skymap_sorted
        del self.log_skymap_cum
        return self.area_confidence,None
Example #6
0
def GetArea(cat=None, ra=None, dec=None, nside=4096):
    """
    Compute the area covered by the dataset, in square degrees.
    The function checks if any objects are each in HEALPixel, then adds up how many pixels were found, and multiplies by the pixel area.

    Parameters
    ----------
    cat (None/structured array)
        If not None, the structured data array (e.g. numpy recarray)
    ra (float array/str)
        If `cat` is None, an array of the RA values. Otherwise, the column name for the RA column in `cat`.
    dec (float array/str)
        if `cat` is None, an array of the DEC values. Otherwise, the column name for the DEC column in `cat`.
    nside (int)
        HEALPix nside, which determines the pixel size used in the computation

    Returns
    -------
    area (float)
        The area covered by the dataset (in square degrees).

    """
    hps = RaDec2Healpix(cat=cat, ra=ra, dec=dec, nside=nside, nest=False)
    uhps = _np.unique(hps)
    area = len(uhps) * _hp.nside2pixarea(nside, degrees=True)
    return area
Example #7
0
 def __call__(self):
     #load module
     mod = importlib.import_module(self.ctx.params.beam_profile_provider)
     
     #delegate loading of profile
     
     params = self.ctx.params
     #setting up angular grid
     beam_area = np.radians(params.beam_elevation) * np.radians(params.beam_azimut) # [rad]
     pixel_area = hp.nside2pixarea(params.beam_nside, degrees=False)
     pixels = np.floor(np.sqrt(beam_area / pixel_area))
     pixels = pixels if pixels%2==1 else pixels+1
     
     pixel_size = hp.max_pixrad(params.beam_nside)
     theta = (np.linspace(0, pixels, pixels*2+1)-pixels/2)*(pixel_size)
     phi = theta
     
     beam_spec = BeamSpec(phi, theta, pixels**2)
     frequencies = np.arange(params.beam_frequency_min, params.beam_frequency_max, params.beam_frequency_pixscale)
     
     beam_profiles, beam_norms = mod.load_beam_profile(beam_spec, frequencies, self.ctx.params)
     
     self.ctx.beam_spec = beam_spec
     self.ctx.frequencies = frequencies
     self.ctx.beam_profiles = beam_profiles
     self.ctx.beam_norms = beam_norms
Example #8
0
def calc_omega(beam, nside):
    """Caclulate the Beam area and Beams^2 areas."""
    pixarea = hp.nside2pixarea(nside)  # pixel area in radianss
    Omega_p = np.sum(beam) * pixarea
    Omega_pp = np.sum(beam**2) * pixarea
    # return Omega_p**2/Omega_pp
    return Omega_p, Omega_pp
Example #9
0
    def ConfidenceVolume(self, adLevels):
        # create a normalized cumulative distribution
        self.log_volume_map_sorted = np.sort(self.log_volume_map.flatten())[::-1]
        self.log_volume_map_cum = cumulative.fast_log_cumulative(self.log_volume_map_sorted)
        
        # find the indeces  corresponding to the given CLs
        adLevels = np.ravel([adLevels])
        args = [(self.log_volume_map_sorted,self.log_volume_map_cum,level) for level in adLevels]
        adHeights = self.pool.map(FindHeights,args)
        self.heights = {str(lev):hei for lev,hei in zip(adLevels,adHeights)}
        dd = np.diff(self.d_grid)[0]
        dA = hp.nside2pixarea(self.nside)
        volumes = []
        for height in adHeights:
            (index_d, index_hp) = np.where(self.log_volume_map>height)
            volumes.append(np.sum([self.d_grid[i_d]**2. * dd * dA for i_d in index_d]))
        self.volume_confidence = np.array(volumes)

        if self.injection!=None:
            ra,dec = self.injection.get_ra_dec()
            distance = self.injection.distance
            logPval = self.logPosterior(distance,ra,dec)
            confidence_level = np.exp(self.log_volume_map_cum[np.abs(self.log_volume_map_sorted-logPval).argmin()])
            height = FindHeights((self.log_volume_map_sorted,self.log_volume_map_cum,confidence_level))
            (index_d, index_hp) = np.where(self.log_volume_map >= height)
            searched_volume = np.sum([self.d_grid[i_d]**2. * dd * dA for i_d in index_d])
            self.injection_volume_confidence = confidence_level
            self.injection_volume_height = height
            return self.volume_confidence,(confidence_level,searched_volume)

        del self.log_volume_map_sorted
        del self.log_volume_map_cum
        return self.volume_confidence,None
    def __init__(self,fake_data_key,exposure_maps,*args,**kwargs):
        mfa.mask_for_analysis.__init__(self,*args,**kwargs)

        self.area_mask = np.sum(1-self.mask_total)*hp.nside2pixarea(self.nside)*(360/(2.*np.pi))**2
        self.fake_data_key = fake_data_key

        self.exposure_maps = exposure_maps
Example #11
0
def ScaleCovariance(cat1, cat1_cov, cat2, njack=24, nside=4096, nest=False, cat1_ra='alphawin_j2000_i', cat1_dec='deltaawin_j2000_i', cat2_ra='ra', cat2_dec='dec'):
    pix = Utils.RaDec2Healpix(cat1[cat1_ra], cat1[cat1_dec], nside, nest=False)
    upix = np.unique(pix)
    npix = len(upix)
    area = hp.nside2pixarea(nside, degrees=True) * npix
    jarea = area / njack
    print area, jarea
Example #12
0
def smooth_variance_map(var_m, fwhm):
    """Smooth a variance map

    Algorithm from 'Pixel errors in convolved maps'
    J.P. Leahy, version 0.2

    Parameters
    ----------
    var_m : array
        input variance map
    fwhm : float (radians)
        target fwhm

    Returns
    -------
    smoothed_var_m : array
        smoothed variance map
    """

    # smooth map
    fwhm_variance = fwhm / np.sqrt(2)
    smoothed_var_m = hp.smoothing(var_m, fwhm=fwhm_variance, regression=False)

    # normalization factor
    pix_area = hp.nside2pixarea(hp.npix2nside(len(var_m)))
    orig_beam_width = fwhm/np.sqrt(8*np.log(2))
    A_vb = pix_area / (4. * np.pi * orig_beam_width**2)
    smoothed_var_m *= A_vb

    return smoothed_var_m
Example #13
0
def MakeMap(fitsfile,output=None,nside=256,nest=True,norm=True,masked=False):
    #
    # Save map as healpy format from fits input
    #
    import numpy as np
    import pyfits as pf
    import healpy as hp
    
    hdulist = pf.open(fitsfile)
    Cat = hdulist[1].data
    hdulist.close()

    pixarea = hp.nside2pixarea(nside,degrees=True)
    print 'nside = ',nside,' --> Pixel area (deg2) = ',pixarea

    tiles  = hp.ang2pix(nside,-Cat['dec']*np.pi/180.+np.pi/2.,Cat['ra']*np.pi/180.,nest)
    npix = hp.nside2npix(nside)
    n_hit_selec = np.zeros(npix)

    for itile in tiles:
        if norm:
            n_hit_selec[itile]+=1./pixarea
        else:
            n_hit_selec[itile]+=1
    if masked == True:
        n_hit_selec = MaskBorders(n_hit_selec)
    if output == None:
        return n_hit_selec
    else:
        hp.write_map(output,n_hit_selec,nest)   
        return
Example #14
0
    def setup_grid(self, wavelength=656*u.nm):
        # use HEALPIX to get evenly sized tiles
        NSIDE = hp.npix2nside(self.ntiles)

        colat, lon = hp.pix2ang(NSIDE, np.arange(0, self.ntiles))
        # co-latitude
        theta_values = u.Quantity(colat, unit=u.rad)
        # longitude
        phi_values = u.Quantity(lon, unit=u.rad)

        # the following formulae use the Roche approximation and assume
        # solid body rotation
        # solve for radius of rotating star at these co-latitudes
        if self.distortion:
            radii = self.radius*np.array([newton(surface, 1.01, args=(self.omega, x)) for x in theta_values])
        else:
            radii = self.radius*np.ones(self.ntiles)

        # and effective gravities
        geff = np.sqrt((-const.G*self.mass/radii**2 + self.Omega**2 * radii * np.sin(theta_values)**2)**2 +
                       self.Omega**4 * radii**2 * np.sin(theta_values)**2 * np.cos(theta_values)**2)

        # now make a ntiles sized CartesianRepresentation of positions
        self.tile_locs = SphericalRepresentation(phi_values,
                                                 90*u.deg-theta_values,
                                                 radii).to_cartesian()

        # normal to tile is the direction of the derivate of the potential
        # this is the vector form of geff above
        # the easiest way to express it is that it differs from (r, theta, phi)
        # by a small amount in the theta direction epsilon
        x = radii/self.radius
        a = 1./x**2 - (8./27.)*self.omega**2 * x * np.sin(theta_values)**2
        b = np.sqrt(
                (-1./x**2 + (8./27)*self.omega**2 * x * np.sin(theta_values)**2)**2 +
                ((8./27)*self.omega**2 * x * np.sin(theta_values) * np.cos(theta_values))**2
            )
        epsilon = np.arccos(a/b)
        self.tile_dirs = UnitSphericalRepresentation(phi_values,
                                                     90*u.deg - theta_values - epsilon)
        self.tile_dirs = self.tile_dirs.to_cartesian()

        # and ntiles sized arrays of tile properties
        tile_temperatures = 2000.0 * u.K * (geff / geff.max())**self.beta

        # fluxes, not accounting for limb darkening
        self.tile_scales = np.ones(self.ntiles)
        self.tile_fluxes = blackbody_nu(wavelength, tile_temperatures)

        # tile areas
        spher = self.tile_locs.represent_as(SphericalRepresentation)
        self.tile_areas = spher.distance**2 * hp.nside2pixarea(NSIDE) * u.rad * u.rad

        omega_vec = CartesianRepresentation(
            u.Quantity([0.0, 0.0, self.Omega.value],
                       unit=self.Omega.unit)
        )
        # get velocities of tiles
        self.tile_velocities = cross(omega_vec, self.tile_locs)
 def export_fits_prisim(self,fitsfile,pol_list,freq_list,scheme='RING',nside_out=None):
     '''
     export fits-file at channel chan and polarization pol
     Args:
     fitsfile, str, name of file to save .fits to
     pol_list, list of labels of polarizations to write
     chan_list, list of frequencies to write
     '''
     if nside_out is None:
         nside_out=self.nside
     pol_list=n.array(pol_list)
     freq_list=n.array(freq_list)
     pol_inds=[]
     freq_inds=[]
     for pol in pol_list:
         assert pol in self.pols
         pol_inds.append(n.where(n.array(self.pols)==pol)[0][0])
     for freq in freq_list:
         assert freq in self.fAxis
         freq_inds.append(n.where(n.array(self.fAxis)==freq)[0][0])
     data=self.data[:,freq_inds,:].reshape(-1,1)
     theta_out,phi_out=hp.pix2ang(nside_out,n.arange(hp.nside2npix(nside_out)))
     #freq_col=[fits.Column(name='Frequency [MHz]',format='D',array=n.array(freq_list))]
     #freq_columns=fits.ColDefs(freq_col,ascii=False)
     #freq_tbhdu = fits.BinTableHDU.from_columns(freq_col)
     #freq_tbhdu = fits.BinTableHDU.from_columns(n.array(freq_list))
     
     hduprimary=fits.PrimaryHDU()
     hduprimary.header.set('EXTNAME','PRIMARY')
     hduprimary.header.set('NEXTEN',2)
     hduprimary.header.set('FITSTYPE','IMAGE')
     hduprimary.header['NSIDE']=(nside_out,'NSIDE')
     hduprimary.header['PIXAREA']=(hp.nside2pixarea(nside_out),'pixel solid angle (steradians)')
     hduprimary.header['NEXTEN']=(2,'Number of extensions')
     hduprimary.header['NPOL'] = (len(pol_inds), 'Number of polarizations')
     hduprimary.header['SOURCE'] = ('HERA-CST', 'Source of data')
     hdulist=[hduprimary]
     fits.HDUList(hdulist).writeto(fitsfile,clobber=True)
     for pol in pol_list:
         #freq_tbhdu.header.set('EXTNAME','FREQS_{0}'.format(pol))
         freq_tbhdu=fits.ImageHDU(freq_list,name='FREQS_{0}'.format(pol))
         fits.append(fitsfile,freq_tbhdu.data,freq_tbhdu.header,verify=False)
     data_interp=n.zeros((hp.nside2npix(nside_out),len(freq_inds)))
     for polind,pol in zip(pol_inds,pol_list):
         for fi,freqind in enumerate(freq_inds):
             data=self.data[polind,freqind,:].flatten()
             data_interp[:,fi]=hp.get_interp_val(data,theta_out,phi_out)
             #if DEBUG:
             #    hp.mollview(data_interp[:,fi])
             #    plt.show()
         imghdu = fits.ImageHDU(data_interp, name='BEAM_{0}'.format(pol))
         imghdu.header['PIXTYPE'] = ('HEALPIX', 'Type of pixelization')
         imghdu.header['ORDERING'] = (scheme, 'Pixel ordering scheme, either RING or NESTED')
         imghdu.header['NSIDE'] = (nside_out, 'NSIDE parameter of HEALPIX')
         imghdu.header['NPIX'] = (hp.nside2npix(nside_out), 'Number of HEALPIX pixels')
         imghdu.header['FIRSTPIX'] = (0, 'First pixel # (0 based)')
         imghdu.header['LASTPIX'] = (len(data_interp)-1, 'Last pixel # (0 based)')
         fits.append(fitsfile,imghdu.data,imghdu.header,verify=False)
Example #16
0
def calculate_nside_resolution():
    NSIDE = [2**i for i in range(11)]
    print('given nside | number of pixels | resolution (pixel size in degree) | Maximum angular distance (degree) | pixel area (in square degrees)')
    for nside in NSIDE:
        npix = hp.nside2npix(nside)
        resol = np.rad2deg(hp.nside2resol(nside))
        maxrad = np.rad2deg(hp.max_pixrad(nside))
        pixarea = hp.nside2pixarea(nside, degrees=True)
        print('{0:^11} | {1:^16} | {2:^33.4f} | {3:^33.4f} | {4:^30.6f}'.format(nside, npix, resol, maxrad, pixarea))
Example #17
0
 def evaluate_volume_map(self):
     sys.stderr.write("computing log posterior for %d grid poinds\n"%(self.npix*self.dist_bins))
     sample_args = ((self.density,d,ipix,self.nside) for d in self.d_grid for ipix in np.arange(self.npix))
     results = self.pool.imap(sample_volume, sample_args, chunksize = np.int(self.npix*self.dist_bins/ (self.nthreads * 16)))
     self.log_volume_map = np.array([r for r in results]).reshape(len(self.d_grid),self.npix)
     self.volume_map = np.exp(self.log_volume_map)
     dA = hp.nside2pixarea(self.nside)
     dd = np.diff(self.d_grid)[0]
     self.volume_map/=(self.volume_map*dd*dA).sum()
Example #18
0
 def _initialise_dpgmm(self):
     self.model = DPGMM(self.dims)
     for point in self.posterior_samples:
         self.model.add(point)
     dd = np.diff(self.d_grid)[0]
     dA = hp.nside2pixarea(self.nside)
     self.model.setPrior(scale = dd*dA)
     self.model.setThreshold(1e-4)
     self.model.setConcGamma(1,1)
Example #19
0
 def _flat_bitmap(self, order, full_order, ipix, m):
     if self.children is None:
         nside = 1 << order
         ipix0 = ipix << 2 * (full_order - order)
         ipix1 = (ipix + 1) << 2 * (full_order - order)
         m[ipix0:ipix1] = len(self.samples) / hp.nside2pixarea(nside)
     else:
         for i, child in enumerate(self.children):
             child._flat_bitmap(order + 1, full_order, (ipix << 2) + i, m)
Example #20
0
 def __init__(self, col='metricdata', Nvisit=825, Asky = 18000.0, nside=128,
               norm=False, metricName='fOArea',  **kwargs):
     """Asky = square degrees """
     super().__init__(col=col, metricName=metricName, **kwargs)
     self.Nvisit = Nvisit
     self.nside = nside
     self.Asky = Asky
     self.scale = hp.nside2pixarea(self.nside, degrees=True)
     self.norm = norm
Example #21
0
def randomPositions(input, nside_pix, n=1):
    """
    Generate n random positions within a full HEALPix mask of booleans, or a set of (lon, lat) coordinates.

    Parameters:
    -----------
    input :     (1) full HEALPix mask of booleans, or (2) a set of (lon, lat) coordinates for catalog objects that define the occupied pixels.
    nside_pix : nside_pix is meant to be at coarser resolution than the input mask or catalog object positions
    so that gaps from star holes, bleed trails, cosmic rays, etc. are filled in. 

    Returns:
    --------
    lon,lat,area : Return the longitude and latitude of the random positions (deg) and the total area (deg^2).

    """
    input = np.array(input)
    if len(input.shape) == 1:
        if hp.npix2nside(len(input)) < nside_pix:
            logger.warning('Expected coarser resolution nside_pix in skymap.randomPositions')
        subpix = np.nonzero(input)[0] # All the valid pixels in the mask at the NSIDE for the input mask
        lon, lat = pix2ang(hp.npix2nside(len(input)), subpix)
    elif len(input.shape) == 2:
        lon, lat = input[0], input[1] # All catalog object positions
    else:
        logger.warning('Unexpected input dimensions for skymap.randomPositions')
    pix = surveyPixel(lon, lat, nside_pix)

    # Area with which the random points are thrown
    area = len(pix) * hp.nside2pixarea(nside_pix, degrees=True)

    # Create mask at the coarser resolution
    mask = np.tile(False, hp.nside2npix(nside_pix))
    mask[pix] = True

    # Estimate the number of points that need to be thrown based off
    # coverage fraction of the HEALPix mask
    coverage_fraction = float(np.sum(mask)) / len(mask) 
    n_throw = int(n / coverage_fraction)
        
    lon, lat = [], []
    count = 0
    while len(lon) < n:
        lon_throw = np.random.uniform(0., 360., n_throw)
        lat_throw = np.degrees(np.arcsin(np.random.uniform(-1., 1., n_throw)))

        pix_throw = ugali.utils.healpix.angToPix(nside_pix, lon_throw, lat_throw)
        cut = mask[pix_throw].astype(bool)

        lon = np.append(lon, lon_throw[cut])
        lat = np.append(lat, lat_throw[cut])

        count += 1
        if count > 10:
            raise RuntimeError('Too many loops...')

    return lon[0:n], lat[0:n], area
Example #22
0
 def simulate_PoissonLognormalSky(self, NSIDE, NGal_per_sqdeg):
     if self.verbose:
         t0 = time.time()
         utils.PrtMsg("Generating Lognormal field :", self.verbose)
     rho = self.simulate_LognormalSky(NSIDE)
     rate = hp.nside2pixarea(NSIDE, degrees=True) * NGal_per_sqdeg
     if self.verbose:
         t0 = utils.PrtAndRstTime(self.verbose, t0)
         utils.PrtMsg("Poisson sampling and returning : ", self.verbose)
     return np.random.poisson(rho * rate, size=rho.size)
Example #23
0
 def simulate_MultinomialLognormalSky(self, NSIDE, Npts_per_sqdeg):
     if self.verbose:
         t0 = time.time()
         utils.PrtMsg("Generating Lognormal field :", self.verbose)
     rho = self.simulate_LognormalSky(NSIDE)
     if self.verbose:
         t0 = utils.PrtAndRstTime(self.verbose, t0)
         utils.PrtMsg("Multinomial sampling and returning: ", self.verbose)
     Ntot = hp.nside2pixarea(NSIDE, degrees=True) * Npts_per_sqdeg * hp.nside2npix(NSIDE)
     return np.random.multinomial(Ntot, rho / np.sum(rho))
Example #24
0
    def map(self, mapunit):

        if self.jarea is None:
            self.jarea = np.zeros(self.njack)

        pix = hp.ang2pix(self.nside, mapunit['polar_ang'], mapunit['azim_ang'],
                         nest=True)
        upix = np.unique(pix)
        area = hp.nside2pixarea(self.nside,degrees=True) * len(upix)
        self.jarea[self.jcount] += area
Example #25
0
    def calc_areas(self, mags):
        """
        Calculate total area from the depth map as a function of magnitude.

        Parameters
        ----------
        mags: `np.array`
           Float array of magnitudes at which to compute area

        Returns
        -------
        areas: `np.array`
           Float array of total areas for each of the mags
        """

        pixsize = hp.nside2pixarea(self.nside, degrees=True)

        if (self.w < 0.0):
            # This is just constant area
            areas = np.zeros(mags.size) + self.config_area
            return areas

        if self.subpix_hpix > 0:
            # for the subregion, we need the area covered in the main pixel
            # I'm not sure what to do about border...but you shouldn't
            # be running this with a subregion with a border
            if self.subpix_border > 0.0:
                raise RuntimeError("Cannot run calc_areas() with a subregion with a border")

            bitShift = 2 * int(np.round(np.log(self.nside / self.subpix_nside) / np.log(2)))
            nFinePerSub = 2**bitShift
            ipnest = np.left_shift(hp.ring2nest(self.subpix_nside, self.subpix_hpix), bitShift) + np.arange(nFinePerSub)
        else:
            ipnest = self.sparse_depthmap.validPixels

        areas = np.zeros(mags.size)

        values = self.sparse_depthmap.getValuePixel(ipnest)

        gd, = np.where(values['m50'] > 0.0)

        depths = values['m50'][gd]
        st = np.argsort(depths)
        depths = depths[st]

        fracgoods = values['fracgood'][gd[st]]

        inds = np.clip(np.searchsorted(depths, mags) - 1, 1, depths.size - 1)

        lo = (inds < 0)
        areas[lo] = np.sum(fracgoods) * pixsize
        carea = pixsize * np.cumsum(fracgoods)
        areas[~lo] = carea[carea.size - inds[~lo]]

        return areas
Example #26
0
 def __init__(self, col='metricdata', Asky=18000., nside=128, Nvisit=825,
              norm=False, metricName='fONv',  **kwargs):
     """Asky = square degrees """
     super().__init__(col=col, metricName=metricName, **kwargs)
     self.Nvisit = Nvisit
     self.nside = nside
     # Determine how many healpixels are included in Asky sq deg.
     self.Asky = Asky
     self.scale = hp.nside2pixarea(self.nside, degrees=True)
     self.npix_Asky = np.int(np.ceil(self.Asky / self.scale))
     self.norm = norm
Example #27
0
def search_map(ras, decs, beam, nest=True, pix_per_beam=10):
    """Returns a healpix map optimised for searching on the sky.  It
    represents the Gaussian-beam convolved posterior.

    :param ras: RA posterior samples.

    :param decs: Corresponding DEC samples.

    :param beam: The beam FWHM in radians.

    :param nest: Whether to output the map in nested (default) or ring
      pixel ordering.

    :param pix_per_beam: The number of pixels in the output map per
      beam (default 10).

    :return: An array representing the posterior convolved with a
      Gaussian beam of the given size.  The array is normalised as a
      probability density per square degree.

    """

    nside = _find_nside(beam, pix_per_beam)

    thetas = np.pi/2.0 - decs

    # Create the map in ring coordinates first.
    hmap = np.bincount(hp.ang2pix(nside, thetas, ras))
    if hmap.shape[0] < hp.nside2npix(nside):
        hmap = np.concatenate((hmap, np.zeros(hp.nside2npix(nside)-hmap.shape[0])))

    hmap = hmap / float(thetas.shape[0]) / hp.nside2pixarea(nside)

    chmap = hps.smoothing(hmap, fwhm=beam, pol=False)

    if nest:
        chmap = hp.reorder(chmap, r2n=True)

    norm = np.sum(chmap) * hp.nside2pixarea(nside, degrees=True)
    
    return chmap / norm
Example #28
0
    def _as_healpix_fast(self, nside, nest=True):
        """Returns a healpix map of the posterior density, by default in
        nested order.

        """
        grid = self._adaptive_grid()

        pcentres, nsides = grid.pixel_centers_nsides()
        pcentres = np.array(pcentres)
        pposts = self.posterior(pcentres)
        
        map = np.zeros(hp.nside2npix(nside))

        for pc, pp, ns in zip(pcentres, pposts, nsides):
            if ns > nside:
                # Then we are extirpolating the posterior to the map
                i = hp.ang2pix(ns, np.pi/2.0-pc[1], pc[0], nest=True)
                n = ns
                while n > nside:
                    n = n / 2
                    i = i / 4
                map[i] += pp*hp.nside2pixarea(ns)/hp.nside2pixarea(nside)
            else:
                # We are interpolating the posterior to the map
                i = hp.ang2pix(ns, np.pi/2.0-pc[1], pc[0], nest=True)
                ilow = i
                ihigh = i+1
                n = ns
                while n < nside:
                    n *= 2
                    ilow *= 4
                    ihigh = 4*ihigh

                map[ilow:ihigh] = pp

        if nest:
            pass  # Map is already in nested order
        else:
            map = hp.pixelfunc.reorder(map, n2r=True)

        return map / np.sum(map)
Example #29
0
    def evaluate_sky_map(self):
        dd = np.diff(self.d_grid)[0]
        dA = hp.nside2pixarea(self.nside)
        # implementing a trapezoidal rule
        N = self.dist_bins
        left_log_sum = logsumexp(self.log_volume_map[1:N-1,:], axis=0)
        right_log_sum = logsumexp(self.log_volume_map[0:N-2,:], axis=0)
        self.log_skymap = np.logaddexp(left_log_sum,right_log_sum)+np.log(dd/2.)
#        # trapezoidal rule done line above
        self.log_skymap-= logsumexp(self.log_skymap)+np.log(dA)
        self.skymap = np.exp(self.log_skymap)
        self.skymap/=(self.skymap*dA).sum()
Example #30
0
def stellarDensity(infile, nside=256, lon_field='RA', lat_field='DEC'): 
    area = hp.nside2pixarea(nside,degrees=True)
    logger.debug("Reading %s"%infile)
    data = fitsio.read(infile,columns=[lon_field,lat_field])

    lon,lat = data[lon_field],data[lat_field]
    pix = ang2pix(nside,lon,lat)
    counts = collections.Counter(pix)
    pixels, number = np.array(sorted(counts.items())).T
    density = number/area

    return pixels, density
Example #31
0
    def convert_point_sources_to_healpix(point_source_pos,
                                         point_source_flux,
                                         nside=2**5):
        """
        Convert point sources to an approximate diffuse HEALPix model.

        The healpix map returned is in RING scheme.

        Parameters
        ----------
        point_source_pos : array_like
            An array of point sources. For each source, the entries are
            (ra, dec) [rad] (assumed to be in J2000).
            Shape=(N_SOURCES, 2).
        point_source_flux : array_like
            point_source_flux : array_like, optional
            An array of fluxes of the given point sources, per
            frequency. Fluxes in [Jy]. Shape=(NFREQS, N_SOURCES).
        nside : int, optional
            HEALPix nside parameter (must be a power of 2).

        Returns
        -------
        array_like
            The HEALPix diffuse model. Shape=(NFREQ, NPIX).
        """

        hmap = np.zeros((len(point_source_flux), healpy.nside2npix(nside)))

        # Get which pixel every point source lies in.
        pix = healpy.ang2pix(nside, np.pi / 2 - point_source_pos[:, 1],
                             point_source_pos[:, 0])

        hmap[:, pix] += point_source_flux / healpy.nside2pixarea(nside)

        return hmap
Example #32
0
def vmaps2vmap_I(pix_vmaps, weights, nside):
    """From individual freq pixel variance maps and weights create expected pixel variance map


       Args:
            pix_vmaps: list of pixel variance maps
            weights: weights for intensity freq. weighting (as applied onto the noise maps)
            nside: desired output map resolution

       See Planck 2018 gravitational lensing paper Eqs 16-17

    """
    assert len(pix_vmaps) == len(weights), (len(pix_vmaps), len(weights))
    nf, lmaxp1 = weights.shape
    lmax_out = min(2 * lmaxp1 - 2, 3 * nside - 1)
    ret_lm = np.zeros(hp.Alm.getsize(lmax_out), dtype=complex)
    for i, (pix_vmap, wl) in enumerate_progress(list(zip(pix_vmaps, weights))):
        m = read_map(pix_vmap)
        vpix = hp.nside2pixarea(hp.npix2nside(m.size), degrees=False)
        this_s2lm = hp.map2alm(m, iter=0, lmax=lmax_out)
        wl2 = _w2wsq(wl, 0, 0, lmax_out) * vpix
        hp.almxfl(this_s2lm, wl2, inplace=True)
        ret_lm += this_s2lm
    return hp.alm2map(ret_lm, nside, verbose=False)
Example #33
0
    def get_area(self):

        if hasattr(self, 'area'):
            return

        if self.params['area'] == 'None':

            import healpy as hp

            pix = hp.ang2pix(4096,
                             np.pi / 2. - np.radians(self.shape['dec']),
                             np.radians(self.shape['ra']),
                             nest=True)
            area = hp.nside2pixarea(4096) * (180. / np.pi)**2
            mask = np.bincount(pix) > 0
            self.area = np.sum(mask) * area
            self.area = float(self.area)
            print self.area

        else:

            self.area = self.params['area']

        return
Example #34
0
    def ConfidenceArea(self, adLevels):

        # create a normalized cumulative distribution
        self.log_skymap_sorted = np.sort(self.log_skymap.flatten())[::-1]
        self.log_skymap_cum = cumulative.fast_log_cumulative(
            self.log_skymap_sorted)
        # find the indeces  corresponding to the given CLs
        adLevels = np.ravel([adLevels])
        args = [(self.log_skymap_sorted, self.log_skymap_cum, level)
                for level in adLevels]
        adHeights = self.pool.map(FindHeights, args)

        dA = hp.nside2pixarea(self.nside, degrees=True)
        areas = []
        for height in adHeights:
            (index_hp, ) = np.where(self.log_skymap > height)
            areas.append(len(index_hp) * dA)
        self.area_confidence = np.array(areas)

        if self.injection != None:
            ra, dec = self.injection.get_ra_dec()
            theta, phi = eq2ang(ra, dec)
            ipix = hp.pixelfunc.ang2pix(self.nside, theta, phi, nest=True)
            logPval = self.log_skymap[ipix]
            confidence_level = np.exp(
                self.log_skymap_cum[np.abs(self.log_skymap_sorted -
                                           logPval).argmin()])
            height = FindHeights((self.log_skymap_sorted, self.log_skymap_cum,
                                  confidence_level))
            (index_hp, ) = np.where(self.log_skymap >= height)
            searched_area = len(index_hp) * dA
            return self.area_confidence, (confidence_level, searched_area)

        del self.log_skymap_sorted
        del self.log_skymap_cum
        return self.area_confidence, None
Example #35
0
    def get_areas(self):
        """
        Retrieve the area structure (area as a function of redshift) associated
        with the volume-limit mask.

        Returns
        -------
        astr: `redmapper.Catalog`
           Area structure catalog, with .z and .area
        """

        zbinsize = self.config.area_finebin
        zbins = np.arange(self.config.zrange[0], self.config.zrange[1], zbinsize)

        astr = Catalog(np.zeros(zbins.size, dtype=[('z', 'f4'),
                                                   ('area', 'f4')]))
        astr.z = zbins

        pixsize = hp.nside2pixarea(self.nside, degrees=True)

        validPixels = self.sparse_vlimmap.validPixels
        zmax = self.sparse_vlimmap.getValuePixel(validPixels)['zmax']
        st = np.argsort(zmax)

        fracgoods = self.sparse_vlimmap.getValuePixel(validPixels)['fracgood'][st]

        inds = np.searchsorted(zmax[st], zbins, side='right')

        lo = (inds <= 0)
        astr.area[lo] = np.sum(fracgoods.astype(np.float64)) * pixsize

        if np.sum(~lo) > 0:
            carea = pixsize * np.cumsum(fracgoods.astype(np.float64))
            astr.area[~lo] = carea[carea.size - inds[~lo]]

        return astr
Example #36
0
 def _fillPeriphery(self, filter):
     '''
     Fill in peripheral cells of shiftmap with average of nearest neighbors.
     '''
     all_neighbor_pix = numpy.unique(
         healpy.get_all_neighbours(
             self.nside,
             numpy.nonzero(
                 self.zeropoint_shiftmap.data.field(filter) != healpy.UNSEEN
             )[0]))
     filled_pix = numpy.nonzero(
         self.zeropoint_shiftmap.data.field(filter) != healpy.UNSEEN)[0]
     periphery_pix = numpy.setdiff1d(all_neighbor_pix, filled_pix)
     shiftmap_filled = numpy.ma.masked_array(
         self.zeropoint_shiftmap.data.field(filter),
         self.zeropoint_shiftmap.data.field(filter) == healpy.UNSEEN)
     self.zeropoint_shiftmap.data.field(
         filter)[periphery_pix] = numpy.array(numpy.mean(
             shiftmap_filled[healpy.get_all_neighbours(
                 self.nside, periphery_pix)],
             axis=0),
                                              dtype=shiftmap_filled.dtype)
     self.peripheral_area = healpy.nside2pixarea(
         self.nside, degrees=True) * len(periphery_pix)
Example #37
0
def rand_point_density(axis,
                       ra,
                       dec,
                       nbins,
                       hp_file_path,
                       ext=1,
                       header=True,
                       xmin=0.,
                       xmax=360.,
                       ymin=-1.,
                       ymax=1.,
                       vmax=None):
    # Import modules
    import numpy as np
    import matplotlib.pyplot as plt
    import healpy as hp

    nside, nest, mask = hp_mask_fracgood(hp_file_path, ext=ext, header=header)

    # Find pixel densities using pixel_density function
    pix_area = hp.nside2pixarea(nside, degrees=True)
    pix_count, all_ra, all_dec = pixel_count(nside,
                                             nest,
                                             ra=ra,
                                             dec=dec,
                                             mask=mask)
    if xmin < 0:
        all_ra[np.where(all_ra > xmax)] -= 360

    # Find widths of each self defined pixel
    nedges = nbins + 1
    deltax = (float(xmax) - float(xmin)) / float(nbins)
    deltay = (float(ymax) - float(ymin)) / float(nbins)

    x = all_ra.copy()
    y = np.sin(np.deg2rad(all_dec.copy()))
    # Now find the densities on each of these larger pixels
    counts = np.zeros((nbins, nbins))
    area = np.zeros_like(counts)
    pixi = (np.floor(x - xmin) / deltax).astype(int)
    pixi[np.where(pixi == nbins)] -= 1
    pixj = (np.floor(y - ymin) / deltay).astype(int)
    pixj[np.where(pixj == nbins)] -= 1
    for i, j, pc_i in zip(pixi, pixj, pix_count):
        counts[i, j] += pc_i
        area[i, j] += pix_area
    del pix_count, all_ra, all_dec, x, y

    # Now find the densities
    pix_density = np.empty_like(counts)
    for i in range(nbins):
        for j in range(nbins):
            try:
                pix_density[i, j] = counts[i, j] / area[i, j]
            except ZeroDivisionError:
                pix_density[i, j] = 0.0
    del counts, area
    pix_density = np.nan_to_num(pix_density)

    # Create arrays of the locations of the edges of pixels
    xedges = np.linspace(xmin, xmax, nedges)
    yedges = np.linspace(ymin, ymax, nedges)
    yedges = np.rad2deg(np.arcsin(yedges))
    xedges, yedges = np.meshgrid(xedges, yedges, indexing='ij')
    if vmax is None:
        vmax = pix_density.max()

    pc = axis.pcolormesh(xedges,
                         yedges,
                         pix_density.T,
                         vmin=0,
                         vmax=vmax,
                         cmap='Reds')

    # Return objects
    return (pc, vmax)
Example #38
0
def calc_prob_coverage(graceid, mappathinfo, inst_cov, band_cov, depth, depth_unit, approx_cov):
	cache_key = f'prob_{graceid}_{mappathinfo}_{inst_cov}_{band_cov}_{depth}_{depth_unit}_{approx_cov}'
	ztfid = 47
	ztf_approx_id = 76
	decamid = 38
	decam_approx_id = 77
	approx_dict = {
		ztfid: ztf_approx_id,
		decamid: decam_approx_id
	}
	areas = []
	times = []
	probs = []

	s3 = boto3.client('s3')
	try:
		with tempfile.NamedTemporaryFile() as f:
			# this HP module does not appear to be able to read files from memory
			# so we use a temporary file here which deletes itself as soon as the
			# context manager is exited.
			s3.download_fileobj(config.AWS_BUCKET, mappathinfo, f)
			GWmap = hp.read_map(f.name)
			#bestpixel = np.argmax(GWmap)
			nside = hp.npix2nside(len(GWmap))
	except ClientError:
		raise HTTPException('<b>Calculator ERROR: Map not found. Please contact the administrator.</b>')
	except Exception:
		raise HTTPException('<b> Map ERROR. Please contact the administrator. </b>')

	pointing_filter = []
	pointing_filter.append(models.pointing_event.graceid == graceid)
	pointing_filter.append(models.pointing.status == 'completed')
	pointing_filter.append(models.pointing_event.pointingid == models.pointing.id)
	pointing_filter.append(models.pointing.instrumentid != 49)

	if inst_cov != '':
		insts_cov = [int(x) for x in inst_cov.split(',')]
		pointing_filter.append(models.pointing.instrumentid.in_(insts_cov))
	if band_cov != '':
		bands_cov = [x for x in band_cov.split(',')]
		pointing_filter.append(models.pointing.band.in_(bands_cov))
	if depth_unit != 'None' and depth_unit != '':
		pointing_filter.append(models.pointing.depth_unit == depth_unit)
	if depth != None and function.isFloat(depth):
		if 'mag' in depth_unit:
			pointing_filter.append(models.pointing.depth >= float(depth))
		elif 'flux' in depth_unit:
			pointing_filter.append(models.pointing.depth <= float(depth))
		else:
			raise HTTPException('Unknown depth unit.')

	pointings_sorted = db.session.query(
		models.pointing.instrumentid,
		models.pointing.pos_angle,
		func.ST_AsText(models.pointing.position).label('position'),
		models.pointing.band,
		models.pointing.depth,
		models.pointing.time
	).filter(
		*pointing_filter
	).order_by(
		models.pointing.time.asc()
	).all()

	instrumentids = [x.instrumentid for x in pointings_sorted]

	for apid in approx_dict.keys():
		if apid in instrumentids:
			instrumentids.append(approx_dict[apid])

	#filter and query the relevant instrument footprints
	footprintinfo = db.session.query(
		func.ST_AsText(models.footprint_ccd.footprint).label('footprint'),
		models.footprint_ccd.instrumentid
	).filter(
		models.footprint_ccd.instrumentid.in_(instrumentids)
	).all()

	#get GW T0 time
	time_of_signal = db.session.query(
		models.gw_alert.time_of_signal
	).filter(
		models.gw_alert.graceid == graceid
	).filter(
		models.gw_alert.time_of_signal != None
	).order_by(
		models.gw_alert.datecreated.desc()
	).first()[0]

	if time_of_signal == None:
		raise HTTPException("<i><font color='red'>ERROR: Please contact administrator</font></i>")

	qps = []
	qpsarea=[]

	NSIDE4area = 512 #this gives pixarea of 0.013 deg^2 per pixel
	pixarea = hp.nside2pixarea(NSIDE4area, degrees=True)

	for p in pointings_sorted:
		ra, dec = function.sanatize_pointing(p.position)

		if approx_cov:
			if p.instrumentid in approx_dict.keys():
				footprint_ccds = [x.footprint for x in footprintinfo if x.instrumentid == approx_dict[p.instrumentid]]
			else:
				footprint_ccds = [x.footprint for x in footprintinfo if x.instrumentid ==  p.instrumentid]
		else:
			footprint_ccds = [x.footprint for x in footprintinfo if x.instrumentid ==  p.instrumentid]

		sanatized_ccds = function.sanatize_footprint_ccds(footprint_ccds)

		for ccd in sanatized_ccds:
			pointing_footprint = function.project_footprint(ccd, ra, dec, p.pos_angle)


			ras_poly = [x[0] for x in pointing_footprint][:-1]
			decs_poly = [x[1] for x in pointing_footprint][:-1]
			xyzpoly = astropy.coordinates.spherical_to_cartesian(1, np.deg2rad(decs_poly), np.deg2rad(ras_poly))
			qp = hp.query_polygon(nside,np.array(xyzpoly).T)
			qps.extend(qp)

			#do a separate calc just for area coverage. hardcode NSIDE to be high enough so sampling error low
			qparea = hp.query_polygon(NSIDE4area, np.array(xyzpoly).T)
			qpsarea.extend(qparea)

			#deduplicate indices, so that pixels already covered are not double counted
			deduped_indices=list(dict.fromkeys(qps))
			deduped_indices_area = list(dict.fromkeys(qpsarea))

			area = pixarea * len(deduped_indices_area)

			prob = 0
			for ind in deduped_indices:
				prob += GWmap[ind]
			elapsed = p.time - time_of_signal
			elapsed = elapsed.total_seconds()/3600
			times.append(elapsed)
			probs.append(prob)
			areas.append(area)

	cache.set(f'{cache_key}_times', times)
	cache.set(f'{cache_key}_probs', probs)
	cache.set(f'{cache_key}_areas', areas)

	return cache_key
Example #39
0
    ra_hp = {}
    dec_hp = {}

    prihdr = fits.Header()
    prihdr['author'] = 'COMPARAT'
    prihdu = fits.PrimaryHDU(header=prihdr)

    hdu_list = [prihdu]
    for nside in nside_values:
        HEALPIX_VAL[nside] = healpy.ang2pix(nside,
                                            f0[1].data['dec'] * np.pi / 180. +
                                            np.pi / 2.,
                                            f0[1].data['ra'] * np.pi / 180.,
                                            nest=True)
        set_HP[nside] = np.array(list(set(HEALPIX_VAL[nside])))
        area_estimated[nside] = len(set_HP[nside]) * healpy.nside2pixarea(
            nside, degrees=True)
        number_per_pixel[nside] = np.array(
            [nl((HEALPIX_VAL[nside] == el)) for el in set_HP[nside]])
        density_estimated[nside] = N_targets_total / area_estimated[nside]
        density_per_pixel[nside] = number_per_pixel[
            nside] / healpy.nside2pixarea(nside, degrees=True)
        median_dfluxlbs_per_pixel[nside] = np.array([
            np.median(f0[1].data['dfluxlbs'][(HEALPIX_VAL[nside] == el)])
            for el in set_HP[nside]
        ])
        median_fwhm_per_pixel[nside] = np.array([
            np.median(f0[1].data['fwhm'][(HEALPIX_VAL[nside] == el)])
            for el in set_HP[nside]
        ])
        ra_hp[nside] = np.array([
            healpy.pix2ang(nside, pix_id, nest=True)[1] * 180. / np.pi
Example #40
0
 def _cache(self, name=None):
     pixel_area = healpy.nside2pixarea(self.nside, degrees=True)
     vec = ang2vec(self.lon, self.lat)
     self.pix = query_disc(self.nside, vec, self.extension)
     self._norm = 1. / (len(self.pix) * pixel_area)
Example #41
0
def catsimPopulation(tag, mc_source_id_start=1, n=5000, n_chunk=100, config='simulate_population.yaml'):
    """
    n = Number of satellites to simulation
    n_chunk = Number of satellites in a file chunk
    """

    assert mc_source_id_start >= 1, "Starting mc_source_id must be >= 1" 
    assert n % n_chunk == 0, "Total number of satellites must be divisible by the chunk size"
    nside_pix = 256 # NSIDE = 128 -> 27.5 arcmin, NSIDE = 256 -> 13.7 arcmin 
    
    if not os.path.exists(tag): os.makedirs(tag)

    if isinstance(config,str): config = yaml.load(open(config))
    assert config['survey'] in ['des', 'ps1']

    infile_ebv = config['ebv']
    infile_fracdet = config['fracdet']
    infile_maglim_g = config['maglim_g']
    infile_maglim_r = config['maglim_r']
    infile_density = config['stellar_density']

    range_distance = config.get('range_distance',[5., 500.])
    range_stellar_mass = config.get('range_stellar_mass',[1.e1, 1.e6])
    range_r_physical = config.get('range_r_physical',[1.e-3, 2.0])
    
    m_density = np.load(infile_density)
    nside_density = healpy.npix2nside(len(m_density))
    m_fracdet = read_map(infile_fracdet, nest=False) #.astype(np.float16)
    nside_fracdet = healpy.npix2nside(len(m_fracdet))

    m_maglim_g = read_map(infile_maglim_g, nest=False) #.astype(np.float16)
    m_maglim_r = read_map(infile_maglim_r, nest=False) #.astype(np.float16)

    m_ebv = read_map(infile_ebv, nest=False) #.astype(np.float16)
    
    #m_foreground = healpy.read_map(infile_foreground)

    mask = (m_fracdet > 0.5)

    kwargs = dict(range_distance = range_distance,
                  range_stellar_mass = range_stellar_mass,
                  range_r_physical = range_r_physical)
    print kwargs
    # r_physical is azimuthally-averaged half-light radius, kpc
    simulation_area, lon_population, lat_population, distance_population, stellar_mass_population, r_physical_population = ugali.simulation.population.satellitePopulation(mask, nside_pix, n, **kwargs)
    n_g22_population = np.tile(np.nan, n)
    n_g24_population = np.tile(np.nan, n)
    abs_mag_population = np.tile(np.nan, n)
    surface_brightness_population = np.tile(np.nan, n)
    ellipticity_population = np.tile(np.nan, n)
    position_angle_population = np.tile(np.nan, n)
    age_population = np.tile(np.nan, n)
    metal_z_population = np.tile(np.nan, n)
    mc_source_id_population = np.arange(mc_source_id_start, mc_source_id_start + n)
    #cut_difficulty_population = np.tile(False, n)
    difficulty_population = np.tile(0, n)

    lon_array = []
    lat_array = []
    mag_1_array = []
    mag_2_array = []
    mag_1_error_array = []
    mag_2_error_array = []
    mag_extinction_1_array = []
    mag_extinction_2_array = []
    mc_source_id_array = []
    for ii, mc_source_id in enumerate(mc_source_id_population):
        print '  Simulating satellite (%i/%i) ... MC_SOURCE_ID = %i'%(ii + 1, n, mc_source_id)
        print '  distance=%.2e, stellar_mass=%.2e, rhalf=%.2e'%(distance_population[ii],stellar_mass_population[ii],r_physical_population[ii])
        lon, lat, mag_1, mag_2, mag_1_error, mag_2_error, mag_extinction_1, mag_extinction_2, n_g22, n_g24, abs_mag, surface_brightness, ellipticity, position_angle, age, metal_z, flag_too_extended = catsimSatellite(config,
                                                                                                                                                                             lon_population[ii], 
                                                                                                                                                                             lat_population[ii], 
                                                                                                                                                                             distance_population[ii], 
                                                                                                                                                                             stellar_mass_population[ii], 
                                                                                                                                                                             r_physical_population[ii],
                                                                                                                                                                             m_maglim_g,
                                                                                                                                                                             m_maglim_r,
                                                                                                                                                                             m_ebv)
        print '  ', len(lon)
        
        n_g22_population[ii] = n_g22
        n_g24_population[ii] = n_g24
        abs_mag_population[ii] = abs_mag
        surface_brightness_population[ii] = surface_brightness
        ellipticity_population[ii] = ellipticity
        position_angle_population[ii] = position_angle
        age_population[ii] = age
        metal_z_population[ii] = metal_z

        #print "Difficulty masking..."  

        # These objects are too extended and are not simulated
        if (flag_too_extended):
            difficulty_population[ii] |= 0b0001

        # We assume that these objects would be easily detected and
        # remove them to reduce data volume
        if (surface_brightness_population[ii]<25.)&(n_g22_population[ii]>1e2):
            difficulty_population[ii] |= 0b0010
        if (surface_brightness_population[ii]<28.)&(n_g22_population[ii]>1e4):
            difficulty_population[ii] |= 0b0100
        if (surface_brightness_population[ii]<30.)&(n_g22_population[ii]>1e5):
            difficulty_population[ii] |= 0b1000
        
        #cut_easy = (surface_brightness_population[ii]<25.)&(n_g22_population[ii]>1.e2) \
        #           | ((surface_brightness_population[ii] < 30.) & (n_g24_population[ii] > 1.e4)) \
        #           | ((surface_brightness_population[ii] < 31.) & (n_g24_population[ii] > 1.e5))
        #cut_hard = (surface_brightness_population[ii] > 35.) | (n_g24_population[ii] < 1.)
        #cut_difficulty_population[ii] = ~cut_easy & ~cut_hard
        #if cut_easy:
        #    difficulty_population[ii] += 1 # TOO EASY
        #if cut_hard:
        #    difficulty_population[ii] += 2 # TOO HARD
        #if flag_too_extended:
        #    difficulty_population[ii] += 3 # TOO EXTENDED

        if difficulty_population[ii] == 0:
            lon_array.append(lon)
            lat_array.append(lat)
            mag_1_array.append(mag_1)
            mag_2_array.append(mag_2)
            mag_1_error_array.append(mag_1_error)
            mag_2_error_array.append(mag_2_error)
            mag_extinction_1_array.append(mag_extinction_1)
            mag_extinction_2_array.append(mag_extinction_2)
            mc_source_id_array.append(np.tile(mc_source_id, len(lon)))

    # Concatenate all the arrays

    print "Concatenating arrays..."
    lon_array = np.concatenate(lon_array)
    lat_array = np.concatenate(lat_array)
    mag_1_array = np.concatenate(mag_1_array)
    mag_2_array = np.concatenate(mag_2_array)
    mag_1_error_array = np.concatenate(mag_1_error_array)
    mag_2_error_array = np.concatenate(mag_2_error_array)
    mag_extinction_1_array = np.concatenate(mag_extinction_1_array)
    mag_extinction_2_array = np.concatenate(mag_extinction_2_array)
    mc_source_id_array = np.concatenate(mc_source_id_array)

    # Now do the masking all at once

    print "Fracdet masking..."
    pix_array = ugali.utils.healpix.angToPix(nside_fracdet, lon_array, lat_array)
    cut_fracdet = (np.random.uniform(size=len(lon_array)) < m_fracdet[pix_array])

    lon_array = lon_array[cut_fracdet]
    lat_array = lat_array[cut_fracdet]
    mag_1_array = mag_1_array[cut_fracdet]
    mag_2_array = mag_2_array[cut_fracdet]
    mag_1_error_array = mag_1_error_array[cut_fracdet]
    mag_2_error_array = mag_2_error_array[cut_fracdet]
    mag_extinction_1_array = mag_extinction_1_array[cut_fracdet]
    mag_extinction_2_array = mag_extinction_2_array[cut_fracdet]
    mc_source_id_array = mc_source_id_array[cut_fracdet]

    # Cut out the entries that are easily detectable
    """
    lon_population = lon_population[cut_difficulty_population]
    lat_population = lat_population[cut_difficulty_population]
    distance_population = distance_population[cut_difficulty_population]
    stellar_mass_population = stellar_mass_population[cut_difficulty_population]
    r_physical_population = r_physical_population[cut_difficulty_population]
    n_g24_population = n_g24_population[cut_difficulty_population]
    abs_mag_population = abs_mag_population[cut_difficulty_population]
    surface_brightness_population = surface_brightness_population[cut_difficulty_population]
    ellipticity_population = ellipticity_population[cut_difficulty_population]
    position_angle_population = position_angle_population[cut_difficulty_population]
    age_population = age_population[cut_difficulty_population]
    metal_z_population = metal_z_population[cut_difficulty_population]
    mc_source_id_population = mc_source_id_population[cut_difficulty_population]
    """
    
    # Create bonus columns
    
    print "Creating bonus columns..."
    distance_modulus_population = ugali.utils.projector.distanceToDistanceModulus(distance_population)
    hpix_32_population = ugali.utils.healpix.angToPix(32, lon_population, lat_population) # Make sure this matches the dataset

    # Local stellar density
    pixarea = healpy.nside2pixarea(nside_density, degrees=True) * 60.**2 # arcmin^2
    density_population = m_density[ugali.utils.healpix.angToPix(nside_density, lon_population, lat_population)] / pixarea # arcmin^-2

    # Average fracdet within the azimuthally averaged half-light radius
    #m_fracdet_zero = np.where(m_fracdet >= 0., m_fracdet, 0.)
    #m_fracdet_zero = m_fracdet
    r_half = np.degrees(np.arctan2(r_physical_population, distance_population)) # Azimuthally averaged half-light radius in degrees
    fracdet_half_population = meanFracdet(m_fracdet, lon_population, lat_population, r_half)
    fracdet_core_population = meanFracdet(m_fracdet, lon_population, lat_population, 0.1)
    fracdet_wide_population = meanFracdet(m_fracdet, lon_population, lat_population, 0.5)

    # Magnitude limits
    nside_maglim = healpy.npix2nside(len(m_maglim_g))
    pix_population = ugali.utils.healpix.angToPix(nside_maglim, lon_population, lat_population)
    maglim_g_population = m_maglim_g[pix_population]
    maglim_r_population = m_maglim_r[pix_population]
    
    # E(B-V)
    nside_ebv = healpy.npix2nside(len(m_ebv))
    pix_population = ugali.utils.healpix.angToPix(nside_ebv, lon_population, lat_population)
    ebv_population = m_ebv[pix_population]

    # Survey
    survey_population = np.tile(config['survey'], len(lon_population))

    # Number of surviving catalog stars
    n_catalog_population = np.histogram(mc_source_id_array, bins=np.arange(mc_source_id_population[0] - 0.5, mc_source_id_population[-1] + 0.51))[0]

    # Faked-up coadd_object_ids
    coadd_object_id_array = []
    for mc_source_id in mc_source_id_population:
        coadd_object_id_array.append((1000000 * mc_source_id) + 1 + np.arange(np.sum(mc_source_id == mc_source_id_array)))
    coadd_object_id_array = -1 * np.concatenate(coadd_object_id_array) # Assign negative numbers to distinguish from real objects

    # Catalog output file

    # for ii in range(0, len(d.formats)): print '\'%s\': [ , \'%s\'],'%(d.names[ii], d.formats[ii])
    
    # See: 
    # https://github.com/sidneymau/simple/blob/master/search_algorithm.py 
    # https://github.com/sidneymau/simple/blob/master/config.yaml
    # /home/s1/kadrlica/projects/y3a2/dsphs/v2/skim/ , e.g., /home/s1/kadrlica/projects/y3a2/dsphs/v2/skim/y3a2_ngmix_cm_11755.fits

    #default_array = np.tile(np.nan, len(mc_source_id_array)) # To recognize that those values are synthetic filler
    default_array = np.tile(-9999., len(mc_source_id_array))

    """
    # Column name, data, fits format
    # Y3A2 pre-Gold
    key_map = {'CM_MAG_ERR_G': [mag_1_error_array, 'D'],
               'CM_MAG_ERR_R': [mag_2_error_array, 'D'],
               'CM_MAG_G': [mag_1_array, 'D'],
               'CM_MAG_R': [mag_2_array, 'D'],
               'CM_T': [default_array, 'D'],
               'CM_T_ERR': [default_array, 'D'],
               'COADD_OBJECT_ID': [coadd_object_id_array, 'K'],
               'DEC': [lat_array, 'D'],
               'FLAGS': [default_array, 'K'],
               'PSF_MAG_ERR_G': [mag_1_error_array, 'D'],
               'PSF_MAG_ERR_R': [mag_2_error_array, 'D'],
               'PSF_MAG_G': [mag_1_array, 'D'],
               'PSF_MAG_R': [mag_2_array, 'D'],
               'RA': [lon_array, 'D'],
               'SEXTRACTOR_FLAGS_G': [np.tile(0, len(mc_source_id_array)), 'I'],
               'SEXTRACTOR_FLAGS_R': [np.tile(0, len(mc_source_id_array)), 'I'],
               'WAVG_MAG_PSF_G': [mag_1_array, 'E'],
               'WAVG_MAG_PSF_R': [mag_2_array, 'E'],
               'WAVG_MAGERR_PSF_G': [mag_1_error_array, 'E'],
               'WAVG_MAGERR_PSF_R': [mag_2_error_array, 'E'],
               'WAVG_SPREAD_MODEL_I': [default_array, 'E'],
               'WAVG_SPREADERR_MODEL_I': [default_array, 'E'],
               'EXT_SFD98_G': [default_array, 'E'],
               'EXT_SFD98_R': [default_array, 'E'],
               'CM_MAG_SFD_G': [mag_1_array, 'D'],
               'CM_MAG_SFD_R': [mag_2_array, 'D'],
               'FLAG_FOOTPRINT': [np.tile(1, len(mc_source_id_array)), 'J'],
               'FLAG_FOREGROUND': [np.tile(0, len(mc_source_id_array)), 'J'],
               'EXTENDED_CLASS_MASH': [np.tile(0, len(mc_source_id_array)), 'K'],
               'PSF_MAG_SFD_G': [mag_1_array, 'D'],
               'PSF_MAG_SFD_R': [mag_2_array, 'D'],
               'WAVG_MAG_PSF_SFD_G': [mag_1_array, 'E'],
               'WAVG_MAG_PSF_SFD_R': [mag_2_array, 'E']}
    """
    
    if config['survey'] == 'des':
        # Y3 Gold v2.0
        key_map = odict([
                ('COADD_OBJECT_ID', [coadd_object_id_array, 'K']),
                ('RA', [lon_array, 'D']),
                ('DEC', [lat_array, 'D']),
                ('SOF_PSF_MAG_CORRECTED_G', [mag_1_array, 'D']),
                ('SOF_PSF_MAG_CORRECTED_R', [mag_2_array, 'D']),
                ('SOF_PSF_MAG_ERR_G', [mag_1_error_array, 'D']),
                ('SOF_PSF_MAG_ERR_R', [mag_2_error_array, 'D']),
                ('A_SED_SFD98_G', [mag_extinction_1_array, 'E']),
                ('A_SED_SFD98_R', [mag_extinction_2_array, 'E']),
                ('WAVG_MAG_PSF_G', [mag_1_array+mag_extinction_1_array, 'E']),
                ('WAVG_MAG_PSF_R', [mag_2_array+mag_extinction_2_array, 'E']),
                ('WAVG_MAGERR_PSF_G', [mag_1_error_array, 'E']),
                ('WAVG_MAGERR_PSF_R', [mag_2_error_array, 'E']),
                ('WAVG_SPREAD_MODEL_I', [default_array, 'E']),
                ('WAVG_SPREADERR_MODEL_I', [default_array, 'E']),
                ('SOF_CM_T', [default_array, 'D']),
                ('SOF_CM_T_ERR', [default_array, 'D']),
                ('FLAGS_GOLD', [np.tile(0, len(mc_source_id_array)), 'J']),
                ('EXTENDED_CLASS_MASH_SOF', [np.tile(0, len(mc_source_id_array)), 'I']),
                ])
    elif config['survey'] == 'ps1':
        # PS1
        key_map = odict([
                ('OBJID', [coadd_object_id_array, 'K']),
                ('RA', [lon_array, 'D']),
                ('DEC', [lat_array, 'D']),
                #('UNIQUEPSPSOBID', [coadd_object_id_array, 'K']),
                #('OBJINFOFLAG', [default_array, 'E']),
                #('QUALITYFLAG', [np.tile(16, len(mc_source_id_array)), 'I']),
                #('NSTACKDETECTIONS', [np.tile(99, len(mc_source_id_array)), 'I']),
                #('NDETECTIONS', [np.tile(99, len(mc_source_id_array)), 'I']),
                #('NG', [default_array, 'E']),
                #('NR', [default_array, 'E']),
                #('NI', [default_array, 'E']),
                ('GFPSFMAG', [mag_1_array+mag_extinction_1_array, 'E']),
                ('RFPSFMAG', [mag_2_array+mag_extinction_2_array, 'E']),
                #('IFPSFMAG', [np.tile(0., len(mc_source_id_array)), 'E'], # Too pass star selection
                ('GFPSFMAGERR', [mag_1_error_array, 'E']),
                ('RFPSFMAGERR', [mag_2_error_array, 'E']),
                #('IFPSFMAGERR', [default_array, 'E']),
                #('GFKRONMAG', [mag_1_array, 'E']),
                #('RFKRONMAG', [mag_2_array, 'E']),
                #('IFKRONMAG', [np.tile(0., len(mc_source_id_array)), 'E'], # Too pass star selection
                #('GFKRONMAGERR', [mag_1_error_array, 'E']),
                #('RFKRONMAGERR', [mag_2_error_array, 'E']),
                #('IFKRONMAGERR', [default_array, 'E']),
                #('GFLAGS', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('RFLAGS', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('IFLAGS', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('GINFOFLAG', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('RINFOFLAG', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('IINFOFLAG', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('GINFOFLAG2', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('RINFOFLAG2', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('IINFOFLAG2', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('GINFOFLAG3', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('RINFOFLAG3', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('IINFOFLAG3', [np.tile(0, len(mc_source_id_array)), 'I']),
                #('PRIMARYDETECTION', [default_array, 'E']),
                #('BESTDETECTION', [default_array, 'E']),
                #('EBV', [default_array, 'E']),
                #('EXTSFD_G', [mag_extinction_1_array 'E']),
                #('EXTSFD_R', [mag_extinction_2_array, 'E']),
                #('EXTSFD_I', [default_array, 'E']),
                ('GFPSFMAG_SFD', [mag_1_array, 'E']),
                ('RFPSFMAG_SFD', [mag_2_array, 'E']),
                ('EXTENDED_CLASS', [np.tile(0, len(mc_source_id_array)), 'I']),
                ])
    key_map['MC_SOURCE_ID'] = [mc_source_id_array, 'K']

    print "Writing catalog files..."
    columns = []
    for key in key_map:
        columns.append(pyfits.Column(name=key, format=key_map[key][1], array=key_map[key][0]))
    tbhdu = pyfits.BinTableHDU.from_columns(columns)
    tbhdu.header.set('AREA', simulation_area, 'Simulation area (deg^2)')

    for mc_source_id_chunk in np.split(np.arange(mc_source_id_start, mc_source_id_start + n), n / n_chunk):
        print '  writing MC_SOURCE_ID values from %i to %i'%(mc_source_id_chunk[0], mc_source_id_chunk[-1])
        cut_chunk = np.in1d(mc_source_id_array, mc_source_id_chunk)
        outfile = '%s/sim_catalog_%s_mc_source_id_%07i-%07i.fits'%(tag, tag, mc_source_id_chunk[0], mc_source_id_chunk[-1])
        header = copy.deepcopy(tbhdu.header)
        header.set('IDMIN',mc_source_id_chunk[0], 'Minimum MC_SOURCE_ID')
        header.set('IDMAX',mc_source_id_chunk[-1], 'Maximum MC_SOURCE_ID')
        pyfits.writeto(outfile, tbhdu.data[cut_chunk], header, clobber=True)

    # Population metadata output file
    
    print "Writing population metadata file..."
    tbhdu = pyfits.BinTableHDU.from_columns([
        pyfits.Column(name='RA', format='E', array=lon_population, unit='deg'),
        pyfits.Column(name='DEC', format='E', array=lat_population, unit='deg'),
        pyfits.Column(name='DISTANCE', format='E', array=distance_population, unit='kpc'),
        pyfits.Column(name='DISTANCE_MODULUS', format='E', array=distance_modulus_population, unit='kpc'),
        pyfits.Column(name='STELLAR_MASS', format='E', array=stellar_mass_population, unit='m_solar'),
        pyfits.Column(name='R_PHYSICAL', format='E', array=r_physical_population, unit='kpc'),
        pyfits.Column(name='N_G22', format='J', array=n_g22_population, unit=''),
        pyfits.Column(name='N_G24', format='J', array=n_g24_population, unit=''),
        pyfits.Column(name='N_CATALOG', format='J', array=n_catalog_population, unit=''),
        pyfits.Column(name='DIFFICULTY', format='J', array=difficulty_population, unit=''),
        pyfits.Column(name='ABS_MAG', format='E', array=abs_mag_population, unit='mag'),
        pyfits.Column(name='SURFACE_BRIGHTNESS', format='E', array=surface_brightness_population, unit='mag arcsec^-2'),
        pyfits.Column(name='ELLIPTICITY', format='E', array=ellipticity_population, unit=''),
        pyfits.Column(name='POSITION_ANGLE', format='E', array=position_angle_population, unit='deg'),
        pyfits.Column(name='AGE', format='E', array=age_population, unit='deg'),
        pyfits.Column(name='METAL_Z', format='E', array=metal_z_population, unit=''),
        pyfits.Column(name='MC_SOURCE_ID', format='K', array=mc_source_id_population, unit=''),
        pyfits.Column(name='HPIX_32', format='E', array=hpix_32_population, unit=''),
        pyfits.Column(name='DENSITY', format='E', array=density_population, unit='arcmin^-2'),
        pyfits.Column(name='FRACDET_HALF', format='E', array=fracdet_half_population, unit=''),
        pyfits.Column(name='FRACDET_CORE', format='E', array=fracdet_core_population, unit=''),
        pyfits.Column(name='FRACDET_WIDE', format='E', array=fracdet_wide_population, unit=''),
        pyfits.Column(name='MAGLIM_G', format='E', array=maglim_g_population, unit='mag'),
        pyfits.Column(name='MAGLIM_R', format='E', array=maglim_r_population, unit='mag'),
        pyfits.Column(name='EBV', format='E', array=ebv_population, unit='mag'),
        pyfits.Column(name='SURVEY', format='A12', array=survey_population, unit=''),
    ])
    tbhdu.header.set('AREA', simulation_area, 'Simulation area (deg^2)')
    tbhdu.writeto('%s/sim_population_%s_mc_source_id_%07i-%07i.fits'%(tag, tag, mc_source_id_start, mc_source_id_start + n - 1), clobber=True)

    # 5284.2452461023322

    # Mask output file

    print "Writing population mask file..."
    outfile_mask = '%s/sim_mask_%s_cel_nside_%i.fits'%(tag, tag, healpy.npix2nside(len(mask)))
    if not os.path.exists(outfile_mask):
        healpy.write_map(outfile_mask, mask.astype(int), nest=True, coord='C', overwrite=True)
        os.system('gzip -f %s'%(outfile_mask))
Example #42
0
def plot_healpix_map(data,
                     nest=False,
                     cmap='viridis',
                     colorbar=True,
                     label=None,
                     basemap=None,
                     vlimits=None):
    """Plot a healpix map using an all-sky projection.
    Pass the data array through :func:`prepare_data` to select a subset to plot
    and clip the color map to specified values or percentiles.
    This function is similar to :func:`plot_grid_map` but is generally slower
    at high resolution and has less elegant handling of pixels that wrap around
    in RA, which are not drawn.
    Requires that matplotlib, basemap, and healpy are installed.
    Parameters
    ----------
    data : array or masked array
        1D array of data associated with each healpix.  Must have a size that
        exactly matches the number of pixels for some NSIDE value. Use the
        output of :func:`prepare_data` as a convenient way to specify
        data cuts and color map clipping.
    nest : bool
        If True, assume NESTED pixel ordering.  Otheriwse, assume RING pixel
        ordering.
    cmap : colormap name or object
        Matplotlib colormap to use for mapping data values to colors.
    colorbar : bool
        Draw a colorbar below the map when True.
    label : str or None
        Label to display under the colorbar.  Ignored unless colorbar is True.
    basemap : Basemap object or None
        Use the specified basemap or create a default basemap using
        :func:`init_sky` when None.
    Returns
    -------
    basemap
        The basemap used for the plot, which will match the input basemap
        provided, or be a newly created basemap if None was provided.
    """
    import healpy as hp
    import matplotlib.pyplot as plt
    import matplotlib.colors
    from matplotlib.collections import PolyCollection

    data = prepare_data(data)
    if len(data.shape) != 1:
        raise ValueError('Invalid data array, should be 1D.')
    nside = hp.npix2nside(len(data))

    if basemap is None:
        basemap = init_sky()

    # Get pixel boundaries as quadrilaterals.
    corners = hp.boundaries(nside, np.arange(len(data)), step=1, nest=nest)
    corner_theta, corner_phi = hp.vec2ang(corners.transpose(0, 2, 1))
    corner_ra, corner_dec = (np.degrees(corner_phi),
                             np.degrees(np.pi / 2 - corner_theta))
    # Convert sky coords to map coords.
    x, y = basemap(corner_ra, corner_dec)
    # Regroup into pixel corners.
    verts = np.array([x.reshape(-1, 4), y.reshape(-1, 4)]).transpose(1, 2, 0)

    # Find and mask any pixels that wrap around in RA.
    uv_verts = np.array(
        [corner_phi.reshape(-1, 4),
         corner_theta.reshape(-1, 4)]).transpose(1, 2, 0)
    theta_edge = np.unique(uv_verts[:, :, 1])
    phi_edge = np.radians(basemap.lonmax)
    eps = 0.1 * np.sqrt(hp.nside2pixarea(nside))
    wrapped1 = hp.ang2pix(nside, theta_edge, phi_edge - eps, nest=nest)
    wrapped2 = hp.ang2pix(nside, theta_edge, phi_edge + eps, nest=nest)
    wrapped = np.unique(np.hstack((wrapped1, wrapped2)))
    data.mask[wrapped] = True

    # Normalize the data using its vmin, vmax attributes, if present.
    try:
        if vlimits is None:
            norm = matplotlib.colors.Normalize(vmin=data.vmin, vmax=data.vmax)
        else:
            norm = matplotlib.colors.Normalize(vmin=vlimits[0],
                                               vmax=vlimits[1])
    except AttributeError:
        norm = None

    # Make the collection and add it to the plot.
    collection = PolyCollection(verts,
                                array=data,
                                cmap=cmap,
                                norm=norm,
                                edgecolors='none')

    axes = plt.gca() if basemap.ax is None else basemap.ax
    axes.add_collection(collection)
    axes.autoscale_view()

    if colorbar:
        bar = plt.colorbar(collection,
                           ax=basemap.ax,
                           orientation='horizontal',
                           spacing='proportional',
                           pad=0.01,
                           aspect=50)
        if label:
            bar.set_label(label)

    return basemap
Example #43
0
def main(cat_path,
         mask_path,
         theory_dir,
         nside,
         z_bin_edges,
         out_dir,
         chain_version=None,
         data_result_dir=None,
         nside_fit=None,
         zbins=None,
         k0=None,
         nmocks=1,
         min_mock=0,
         lmax=3500,
         n_contam=0,
         gen_catalogs=False,
         test=False,
         force_truth=None,
         force=None):
    """
    Create mock density field(s) and/or catalog(s)

    Note that the theory directory should be the upper-most directory for
    CosmoSIS output, and should contain a directory 'galaxy_cl'

    :param cat_path: The path to the catalog file
    :type cat_path: ``str`` or :class:`os.PathLike`
    :param mask_path: The path to the mask file
    :type mask_path: ``str`` or :class:`os.PathLike`
    :param theory_dir: The top level directory containing the CosmoSIS theory
        output. Must contain a directory 'galaxy_cl' with files 'ell.txt' and
        'bin_{i}_{j}.txt' where 'i' and 'j' are redshift bin indices
    :type theory_dir: ``str`` or :class:`os.PathLike`
    :param nside: The resolution at which to create the mock density field(s)
    :type nside: ``int``
    :param z_bin_edges: The edges of the redshift bins for all redshift bins.
        Only the edges for the selected bins will be used, but all must be given
    :type z_bin_edges: (``Nbins+1``,) array-like of ``float``
    :param out_dir: The parent directory in which to save the mock density
        map(s) and catalog(s). Make sure write permissions are available. The
        actual results will be stored under here in a directory
        'gaussian_mock_output' if ``k0`` is ``None`` or 'lognormal_mock_output'
        otherwise: counts maps will be under that in 'ngal_maps' and
        catalogs in 'catalogs'
    :type out_dir: ``str`` or :class:`os.PathLike`
    :param chain_version: The run version of the data chains that are used as 
        the "truth" for the contamination. Ignored if no contamination is added, 
        otherwise it **must** be specified. Default ``None``
    :type chain_version: ``int`` or ``NoneType``, optional
    :param data_result_dir: The directory within which the results from the data 
        systematics fits can be found. This is needed to ensure that the proper 
        coefficients are used to contaminate the mocks. The mean fit parameters 
        as well as the map importance order for each redshift bin should be 
        stored in subdirectories 'zbin#' under this directory, where '#' is 
        replaced by the bin numbers included from `zbins`. The names of these 
        files are assumed to be 'mean_parameters_nside{res}.pkl' and 
        'map_importance_order_const_cov_fit{res}_v{v}.npy', where 'res' is the 
        fitting resolution for the redshift bin and 'v' is the version of the 
        data fit specified by `chain_version`. Ignored if no contamination is 
        added, otherwise it **must** be specified. Default ``None``
    :type data_result_dir: ``str`` or :class:`os.PathLike` or ``NoneType``, 
        optional
    :param nside_fit: The resolutions used when fitting for the redshift bins.
        Ignored if no contamination is being applied, otherwise it **must** be 
        specified. Default ``None``
    :type nside_fit: ``int`` or (``Nbins``,) array-like of ``int`` or 
        ``NoneType``, optional
    :param zbins: The redshift bin(s) for which to create mocks. Assumes there
        are ``Nbins`` total redshift bins, and the indexing starts from 1
        (not 0!). If given, this must have length of at least 1 and no larger
        than ``Nbins``. If ``None`` (default), makes a mock for all redshift
        bins
    :type zbins: ``NoneType`` or 1D array-like of ``int``, optional
    :param k0: Parameter for the skewness of the lognormal field, which can be
        different for each redshift bin, in which case it should have length
        equal to ``zbins``. If ``None`` (default), creates a Gaussian density
        field rather than a lognormal one
    :type k0: ``NoneType`` or ``int`` or 1D array-like of ``int``, optional
    :param nmocks: The number of mocks to create. Default 1
    :type nmocks: ``int``, optional
    :param min_mock: The number of the first mock, used to edit the random seed
        and file names in the case where some mocks have already been created.
        If some mocks have already been created and this is not used, an error
        will be raised when trying to write the catalog. Default 0
    :type min_mock: ``int``, optional
    :param lmax: The maximum scale beyond which to truncate C(l). Default 3500
    :type lmax: ``float``, optional
    :param n_contam: The number of systematics maps by which to contaminate.
        Use an array-like for multiple levels of contamination. Default 0.
    :type n_contam: ``int`` or array-like of ``int``, optional
    :param gen_catalogs: If ``True``, also generate mock galaxy catalog(s) from
        the density field(s). Default ``False``
    :type gen_catalogs: ``bool``, optional
    :param test: If ``True``, set a random seed for reproducibility. Default
        ``False``
    :type test: ``bool``, optional
    :param force_truth: If ``True`` and true mock overdensity fields with the 
        specified numbers already exist, create new ones and overwrite the 
        files. This should be done with caution if adding contamination levels 
        to existing mocks, as it will mean the true overdensity field for 
        existing catalogs is overwritten. But random seed setting when `test` is 
        ``True`` will be off when adding contamination levels unless everything 
        that is already stored is redone. If ``None`` (default), this is set to 
        ``False`` if `test` is ``False`` or ``True`` if `test` is ``True``
    :type force_truth: ``bool`` or ``NoneType``, optional
    :param force: If ``True`` and files exist for mocks with the specified 
        numbers (other than the true overdensities, see `force_truth`), they 
        will be overwritten with the newly generated mocks rather than being 
        skipped. If ``None`` (default), set to the same as `force_truth`
    :type force: ``bool`` or ``NoneType``, optional
    """
    mask = lsssys.Mask(
        pathlib.Path(mask_path).expanduser().resolve().as_posix(),
        ZMAXcol=None)
    cat = lsssys.Redmagic(
        pathlib.Path(cat_path).expanduser().resolve().as_posix())
    sorted_z = np.sort(z_bin_edges)
    if zbins is not None:
        if (len(zbins) >= len(sorted_z) or np.min(zbins) < 0
                or np.max(zbins) > len(sorted_z)):
            raise ValueError("Invalid size or value(s) for zbins")
        sorted_zbins = np.sort(zbins)
        zedges = np.dstack(
            (sorted_z[sorted_zbins - 1], sorted_z[sorted_zbins])).squeeze()
        number_density = calculate_number_density(cat, mask, zedges)[1]
    else:
        ngal_tot, number_density = calculate_number_density(
            cat, mask, sorted_z)
        sorted_zbins = np.arange(len(z_bin_edges) - 1) + 1
        zedges = np.dstack((sorted_z[:-1], sorted_z[1:])).squeeze()
    ngal_mean = np.atleast_1d(number_density) * hp.nside2pixarea(
        nside, degrees=True) * 60.**2
    output_dir = pathlib.Path(out_dir).expanduser().resolve()
    del cat
    if k0 is not None:
        if hasattr(k0, "__len__"):
            if len(k0) > 1:
                if zbins is not None and len(k0) != len(zbins):
                    raise ValueError("Mismatch between zbins and k0")
                elif zbins is None and len(k0) != (len(z_bin_edges) - 1):
                    raise ValueError("Mismatch between z_bin_edges and k0")
            else:
                k0 = k0[0]
        output_dir = output_dir.joinpath("lognormal_mock_output")
        ngal_fill = None
    else:
        output_dir = output_dir.joinpath("gaussian_mock_output")
        ngal_fill = 0
    output_dir.mkdir(parents=True, exist_ok=True)
    np.save(output_dir.joinpath("mean_ngals.npy"), ngal_mean)
    theory = initialize_theory(theory_dir, lmax, zbins, k0)
    delta_output_dir = output_dir.joinpath("delta_maps")
    delta_output_dir.mkdir(exist_ok=True)
    ngals_output_dir = output_dir.joinpath("ngal_maps")
    ngals_output_dir.mkdir(exist_ok=True)
    for n in np.unique(np.append(np.atleast_1d(n_contam), 0)):
        delta_output_dir.joinpath(f"n_contaminate_{n}").mkdir(exist_ok=True)
        ngals_output_dir.joinpath(f"n_contaminate_{n}").mkdir(exist_ok=True)
    if gen_catalogs:
        noisy_ngals_output_dir = output_dir.joinpath("poisson_ngal_maps")
        noisy_ngals_output_dir.mkdir(exist_ok=True)
        cats_output_dir = output_dir.joinpath("catalogs")
        cats_output_dir.mkdir(exist_ok=True)
        for n in np.atleast_1d(n_contam):
            noisy_ngals_output_dir.joinpath(f"n_contaminate_{n}").mkdir(
                exist_ok=True)
            cats_output_dir.joinpath(f"n_contaminate_{n}").mkdir(exist_ok=True)
    if hasattr(n_contam, "__len__") or n_contam != 0:
        sys_dir = pathlib.Path("/spiff/wagoner47/des/y3/systematics")
        if chain_version is None:
            raise ValueError(
                "Must specify chain_version if contamination is required")
        if data_result_dir is None:
            raise ValueError(
                "Must specify data_result_dir if contamination is required")
        coeff_dir = pathlib.Path(data_result_dir)
        if nside_fit is None:
            raise ValueError(
                "Must specify nside_fit if contamination is required")
        if not hasattr(nside_fit, "__len__"):
            nside_fit = np.full(sorted_zbins.size, nside_fit)
        elif len(nside_fit) != sorted_zbins.size:
            if len(nside_fit) == 1:
                nside_fit = np.full(sorted_zbins.size, nside_fit[0])
            else:
                raise ValueError(
                    "Different number of specified nside_fit and redshift bins"
                )
    mock = lsssys.Mock(theory, nside)
    mock.mask = mask.mask
    if force_truth is None:
        force_truth = test
    if force is None:
        force = force_truth
    if test:
        rand_states = np.arange(min_mock, nmocks + min_mock)
    else:
        rand_states = [None] * nmocks
    for i, r_state in enumerate(
            tqdm(rand_states, desc="Mock", dynamic_ncols=True), min_mock):
        try:
            assert not force_truth
            mock = _load_deltas(mock, i, delta_output_dir / "n_contaminate_0",
                                sorted_zbins.size)
        except (AssertionError, IOError):
            mock.gen_maps(rs=np.random.RandomState(r_state))
            if k0 is not None:
                mock.lognormalise(
                    k0,
                    [deltai.data[~mask.mask].std() for deltai in mock.delta])
            for z, deltai in enumerate(mock.delta):
                deltai.save(delta_output_dir.joinpath(
                    "n_contaminate_0", f"mock_{i}_bin{z}.fits").as_posix(),
                            clobber=True)
        try:
            assert not force
            mock = _load_ngals(mock, i, ngals_output_dir / "n_contaminate_0")
        except (AssertionError, IOError):
            mock.gen_ngal(ngal_mean, ngal_fill)
            for z in range(len(mock.ngal)):
                mock.ngal[z].data[~mask.mask] *= mask.fracdet[~mask.mask]
            mock.save(ngals_output_dir.joinpath("n_contaminate_0",
                                                f"mock_{i}.fits").as_posix(),
                      clobber=True)
        if gen_catalogs:
            for n in np.sort(np.atleast_1d(n_contam)):
                if n == 0:
                    mock = _load_deltas(mock, i,
                                        delta_output_dir / "n_contaminate_0",
                                        sorted_zbins.size)
                    mock = _load_ngals(mock, i,
                                       ngals_output_dir / "n_contaminate_0")
                else:
                    try:
                        assert not force
                        mock = _load_deltas(
                            mock, i, delta_output_dir / f"n_contaminate_{n}",
                            sorted_zbins.size)
                    except (AssertionError, IOError):
                        mock = _load_deltas(
                            mock, i, delta_output_dir / "n_contaminate_0",
                            sorted_zbins.size)
                        mock = contaminate_mock(mock, n, sorted_zbins,
                                                nside_fit, coeff_dir, sys_dir,
                                                chain_version)
                        for z, deltai in enumerate(mock.delta):
                            deltai.save(delta_output_dir.joinpath(
                                f"n_contaminate_{n}",
                                f"mock_{i}_bin{z}.fits").as_posix(),
                                        clobber=True)
                    try:
                        assert not force
                        mock = _load_ngals(
                            mock, i, ngals_output_dir / f"n_contaminate_{n}")
                    except (AssertionError, IOError):
                        mock.gen_ngal(ngal_mean, ngal_fill)
                        for z in range(len(mock.ngal)):
                            mock.ngal[z].data[~mask.
                                              mask] *= mask.fracdet[~mask.mask]
                        mock.save(ngals_output_dir.joinpath(
                            f"n_contaminate_{n}", f"mock_{i}.fits").as_posix(),
                                  clobber=True)
                try:
                    assert not force
                    mock = _load_ngals(
                        mock, i, noisy_ngals_output_dir / f"n_contaminate_{n}")
                except (AssertionError, IOError):
                    mock.poisson_sample()
                    mock.save(noisy_ngals_output_dir.joinpath(
                        f"n_contaminate_{n}", f"mock_{i}.fits").as_posix(),
                              clobber=True)
                ngal_tot = np.array([
                    ng.data[~mask.mask].astype(int).sum() for ng in mock.ngal
                ])
                gen_mock_catalogs(mock, cats_output_dir / f"n_contaminate_{n}",
                                  i, zedges, force)
Example #44
0
    print(f"Reading {fname_in}", flush=True)
    w = hp.read_map(fname_in, None, verbose=False)
    bad = np.logical_or(w[0] == hp.UNSEEN, w[0] == 0)
    # Discard worst pixels. This limit is arbitrary.
    not_bad = np.logical_not(bad)
    good = not_bad.copy()
    wsorted = np.sort(w[0, good])
    lim = wsorted[int(0.99 * wsorted.size)]
    not_bad[w[0] > lim] = False
    
    lim = 4 * np.median(w[0, good])
    good[w[0] > lim] = False
    fsky = np.sum(good) / good.size
    fsky_raw = 1 - np.sum(bad) / good.size
    nside = hp.get_nside(w)
    pix_area = hp.nside2pixarea(nside, degrees=True)
    pix_side = np.sqrt(pix_area) * 60  # in arc min

    print(f"el = {el}, fsky(99%) = {fsky_raw:8.3} (fsky(good) = {fsky:8.3})")

    for band in "LFL1", "LFL2", "MFL1", "MFL2", "HFL1", "HFL2":
        # Observing efficiency from
        #   https://docs.google.com/spreadsheets/d/1jR9gSsJ0w1dEO5Jb_URlD3SWYtgFtwBgB3W88p6puo0/edit?usp=sharing
        obs_eff = {
            "LFL1" : 0.25,
            "LFL2" : 0.25,
            "MFL1" : 0.25,
            "MFL2" : 0.25,
            "HFL1" : 0.22,
            "HFL2" : 0.22,
        }[band]
Example #45
0
    hdulist = fits.open(dsm_file)
    pixres = hdulist[0].header['PIXAREA']
    dsm_table = hdulist[1].data
    ra_deg_DSM = dsm_table['RA']
    dec_deg_DSM = dsm_table['DEC']
    temperatures = dsm_table['T_{0:.0f}'.format(freq / 1e6)]
    fluxes_DSM = temperatures * (2.0 * FCNST.k * freq**2 /
                                 FCNST.c**2) * pixres / CNST.Jy
    spindex = dsm_table['spindex'] + 2.0
    freq_DSM = 0.150  # in GHz
    freq_catalog = freq_DSM * 1e9 + NP.zeros(fluxes_DSM.size)
    catlabel = NP.repeat('DSM', fluxes_DSM.size)
    ra_deg = ra_deg_DSM
    dec_deg = dec_deg_DSM
    majax = NP.degrees(
        NP.sqrt(HP.nside2pixarea(64) * 4 / NP.pi) * NP.ones(fluxes_DSM.size))
    minax = NP.degrees(
        NP.sqrt(HP.nside2pixarea(64) * 4 / NP.pi) * NP.ones(fluxes_DSM.size))
    fluxes = fluxes_DSM

    freq_SUMSS = 0.843  # in GHz
    SUMSS_file = args['SUMSS_file']
    catalog = NP.loadtxt(SUMSS_file,
                         usecols=(0, 1, 2, 3, 4, 5, 10, 12, 13, 14, 15, 16))
    ra_deg_SUMSS = 15.0 * (catalog[:, 0] + catalog[:, 1] / 60.0 +
                           catalog[:, 2] / 3.6e3)
    dec_dd = NP.loadtxt(SUMSS_file, usecols=(3, ), dtype="|S3")
    sgn_dec_str = NP.asarray([dec_dd[i][0] for i in range(dec_dd.size)])
    sgn_dec = 1.0 * NP.ones(dec_dd.size)
    sgn_dec[sgn_dec_str == '-'] = -1.0
    dec_deg_SUMSS = sgn_dec * (NP.abs(catalog[:, 3]) + catalog[:, 4] / 60.0 +
Example #46
0
    def run(self, dataSlice, slicePoint=None):
        """
        Runs the metric for each dataSlice

        Parameters
        ---------------
        dataSlice: simulation data
        slicePoint:  slicePoint(default None)

        Returns
        -----------
        number of SL time delay supernovae

        """
        dataSlice.sort(order=self.mjdCol)

        # Crop it down so things are coadded per night at the median MJD time
        dataSlice = collapse_night(dataSlice, nightCol=self.nightCol, filterCol=self.filterCol,
                                   m5Col=self.m5Col, mjdCol=self.mjdCol)

        # get the pixel area
        area = hp.nside2pixarea(slicePoint['nside'], degrees=True)

        if len(dataSlice) == 0:
            return self.badVal

        season_id = np.floor(calcSeason(np.degrees(slicePoint['ra']), dataSlice[self.mjdCol]))

        seasons = self.season

        if self.season == [-1]:
            seasons = np.unique(season_id)

        season_lengths = []
        median_gaps = []
        for season in seasons:
            idx = np.where(season_id == season)[0]
            bright_enough = np.zeros(idx.size, dtype=bool)
            for key in self.m5mins:
                in_filt = np.where(dataSlice[idx][self.filterCol] == key)[0]
                A_x = self.phot_properties.Ax1[key] * slicePoint['ebv']
                bright_enough[in_filt[np.where((dataSlice[idx[in_filt]][self.m5Col] - A_x) > self.m5mins[key])[0]]] = True
            idx = idx[bright_enough]
            u_filters = np.unique(dataSlice[idx][self.filterCol])
            if (len(idx) < self.min_season_obs) | (np.size(u_filters) < self.nfilters_min):
                continue
            if self.night_collapse:
                u_nights, unight_indx = np.unique(dataSlice[idx][self.nightCol], return_index=True)
                idx = idx[unight_indx]
                order = np.argsort(dataSlice[self.mjdCol][idx])
                idx = idx[order]
            mjds_season = dataSlice[self.mjdCol][idx]
            cadence = mjds_season[1:]-mjds_season[:-1]
            season_lengths.append(mjds_season[-1]-mjds_season[0])
            median_gaps.append(np.median(cadence))

        # get the cumulative season length

        cumul_season_length = np.sum(season_lengths)

        if cumul_season_length == 0:
            return self.badVal
        # get gaps
        gap_median = np.mean(median_gaps)

        # estimate the number of lensed supernovae
        cumul_season = cumul_season_length/(12.*30.)

        N_lensed_SNe_Ia = 45.7 * area / 20000. * cumul_season /\
            2.5 / (2.15 * np.exp(0.37 * gap_median))

        return N_lensed_SNe_Ia
 def flat_bitmap(self):
     """Return flattened HEALPix representation."""
     m = np.empty(hp.nside2npix(hp.order2nside(self.order)))
     for nside, full_nside, ipix, ipix0, ipix1, samples in self.visit():
         m[ipix0:ipix1] = len(samples) / hp.nside2pixarea(nside)
     return m
Example #48
0
    dsm_file = '/data3/t_nithyanandan/project_MWA/foregrounds/gsmdata{0:0d}.fits'.format(nside)
    hdulist = fits.open(dsm_file)
    pixres = hdulist[0].header['PIXAREA']
    dsm_table = hdulist[1].data
    ra_deg_DSM = dsm_table['RA']
    dec_deg_DSM = dsm_table['DEC']
    temperatures = dsm_table['T_{0:.0f}'.format(freq/1e6)]
    fluxes_DSM = temperatures * (2.0* FCNST.k * freq**2 / FCNST.c**2) * pixres / CNST.Jy
    spindex = dsm_table['spindex'] + 2.0
    freq_DSM = 0.150 # in GHz
    freq_catalog = freq_DSM * 1e9 + NP.zeros(fluxes_DSM.size)
    catlabel = NP.repeat('DSM', fluxes_DSM.size)
    ra_deg = ra_deg_DSM
    dec_deg = dec_deg_DSM
    majax = NP.degrees(NP.sqrt(HP.nside2pixarea(64)*4/NP.pi) * NP.ones(fluxes_DSM.size))
    minax = NP.degrees(NP.sqrt(HP.nside2pixarea(64)*4/NP.pi) * NP.ones(fluxes_DSM.size))
    fluxes = fluxes_DSM

    freq_SUMSS = 0.843 # in GHz
    SUMSS_file = '/data3/t_nithyanandan/project_MWA/foregrounds/sumsscat.Mar-11-2008.txt'
    catalog = NP.loadtxt(SUMSS_file, usecols=(0,1,2,3,4,5,10,12,13,14,15,16))
    ra_deg_SUMSS = 15.0 * (catalog[:,0] + catalog[:,1]/60.0 + catalog[:,2]/3.6e3)
    dec_dd = NP.loadtxt(SUMSS_file, usecols=(3,), dtype="|S3")
    sgn_dec_str = NP.asarray([dec_dd[i][0] for i in range(dec_dd.size)])
    sgn_dec = 1.0*NP.ones(dec_dd.size)
    sgn_dec[sgn_dec_str == '-'] = -1.0
    dec_deg_SUMSS = sgn_dec * (NP.abs(catalog[:,3]) + catalog[:,4]/60.0 + catalog[:,5]/3.6e3)
    fmajax = catalog[:,7]
    fminax = catalog[:,8]
    fpa = catalog[:,9]
Example #49
0
def config(alert_ind,
           seed=1,
           scramble=True,
           e_range=(0, np.inf),
           g_range=[1., 5.],
           gamma=2.0,
           E0=1 * TeV,
           remove=False,
           ncpu=20,
           nside=256,
           poisson=False,
           injector=True,
           verbose=True,
           smear=True):
    r""" Configure point source likelihood and injector. 

    Parameters
    ----------
    alert_ind: int
    index of IceCube alert event

    seed : int
    Seed for random number generator

    Returns
    -------
    llh : PointSourceLLH
    Point source likelihood object
    inj : PriorInjector
     Point source injector object
    """
    seasons = [("GFUOnline_v001p02", "IC86, 2011-2018"),
               ("GFUOnline_v001p02", "IC86, 2019")]
    #skymaps_path = '/data/user/steinrob/millipede_scan_archive/fits_v3_prob_map/'
    #files = glob(skymaps_path + '*.fits')
    #skymap_fits = fits.open(files[alert_ind])[0].data

    #Turn this into a function read_alert_event()
    #skymap_files = glob('/data/ana/realtime/alert_catalog_v2/2yr_prelim/fits_files/Run13*.fits.gz')
    skymap_files = glob(
        '/data/ana/realtime/alert_catalog_v2/fits_files/Run1*.fits.gz')
    #skymap_f = fits.open(skymap_files[alert_ind])
    #skymap_fits = skymap_f[1].data
    #skymap_header = skymap_f[1].header
    skymap_fits, skymap_header = hp.read_map(skymap_files[alert_ind],
                                             h=True,
                                             verbose=False)
    skymap_header = {name: val for name, val in skymap_header}
    run_id, ev_id = skymap_header['RUNID'], skymap_header['EVENTID']
    ev_mjd = skymap_header['EVENTMJD']
    ev_iso = skymap_header['START']
    signalness = skymap_header['SIGNAL']
    ev_en = skymap_header['ENERGY']
    ev_ra, ev_dec = np.radians(skymap_header['RA']), np.radians(
        skymap_header['DEC'])
    ev_stream = skymap_header['I3TYPE']
    skymap_llh = skymap_fits.copy()
    skymap_fits = np.exp(-1. * skymap_fits /
                         2.)  #Convert from 2LLH to unnormalized probability
    skymap_fits = np.where(skymap_fits > 1e-12, skymap_fits, 0.0)
    skymap_fits = skymap_fits / np.sum(skymap_fits)
    if smear:
        ninety_msk = skymap_llh < 64.2
        init_nside = hp.get_nside(skymap_llh)
        cdf = np.cumsum(np.sort(skymap_fits[ninety_msk][::-1]))
        pixs_above_ninety = np.count_nonzero(cdf > 0.1)
        original_ninety_area = hp.nside2pixarea(init_nside) * pixs_above_ninety
        new_ninety_area = hp.nside2pixarea(init_nside) * np.count_nonzero(
            skymap_fits[ninety_msk])
        original_ninety_radius = np.sqrt(original_ninety_area / np.pi)
        new_ninety_radius = np.sqrt(new_ninety_area / np.pi)
        scaled_probs = scale_2d_gauss(skymap_fits, original_ninety_radius,
                                      new_ninety_radius)
        skymap_fits = scaled_probs

    if hp.pixelfunc.get_nside(skymap_fits) != nside:
        skymap_fits = hp.pixelfunc.ud_grade(skymap_fits, nside)
    skymap_fits = skymap_fits / skymap_fits.sum()
    #print(hp.pixelfunc.get_nside(skymap_fits))
    spatial_prior = SpatialPrior(skymap_fits, containment=0.99)

    llh = []  # store individual llh as lists to prevent pointer over-writing
    multillh = MultiPointSourceLLH(ncpu=1)

    if verbose:
        print("\n seasons:")
    for season in np.atleast_1d(seasons):
        sample = season[0]
        name = season[1]

        exp, mc, livetime = Datasets[sample].season(name,
                                                    floor=np.radians(0.2))
        sinDec_bins = Datasets[sample].sinDec_bins(name)
        energy_bins = Datasets[sample].energy_bins(name)

        msg = "   - % 15s (" % season
        msg += "livetime %7.2f days, %6d events" % (livetime, exp.size)
        msg += ", mjd0 %.2f" % min(exp['time'])
        msg += ", mjd1 %.2f)" % max(exp['time'])
        if verbose:
            print(msg)

        llh_model = EnergyLLH(twodim_bins=[energy_bins, sinDec_bins],
                              allow_empty=True,
                              bounds=g_range,
                              seed=gamma,
                              kernel=1,
                              ncpu=ncpu)

        llh.append(
            PointSourceLLH(exp,
                           mc,
                           livetime,
                           mode="box",
                           scramble=scramble,
                           llh_model=llh_model,
                           nsource_bounds=(0., 1e3),
                           nsource=1.))

        multillh.add_sample(sample + " : " + name, llh[-1])

        # save a little RAM by removing items copied into LLHs
        del exp, mc

        # END for (season)

    ######################### REMOVE EVENT

    if injector is False:
        return multillh, spatial_prior
    else:
        inj = PriorInjector(spatial_prior,
                            seed=seed,
                            gamma=gamma,
                            E0=1 * TeV,
                            bunchsize=10)
        inj.fill(multillh.exp, multillh.mc, multillh.livetime)

        if verbose:
            print("\n injected spectrum:")
            print("   - %s" % str(inj.spectrum))

    return multillh, spatial_prior, inj
Example #50
0
 def percentile_area(self, level):
     """
     area=percentile_area(level)
     determines the area of the map above which there is level of total probabiilty        
     """
     return (self.hpmap>=self.percentile(level)).sum()*healpy.nside2pixarea(self.nside,degrees=True)*u.deg**2
Example #51
0
 def area(self):
     nside = Localization.nside
     return hp.nside2pixarea(nside, degrees=True) * len(self.ipix)
Example #52
0
    def _get_expectation(self, data_analysis_bin, energy_bin_id, n_point_sources, n_ext_sources):

        # Compute the expectation from the model

        this_model_map = None

        for pts_id in range(n_point_sources):

            this_conv_src = self._convolved_point_sources[pts_id]

            expectation_per_transit = this_conv_src.get_source_map(energy_bin_id,
                                                                   tag=None,
                                                                   psf_integration_method=self._psf_integration_method)

            expectation_from_this_source = expectation_per_transit * data_analysis_bin.n_transits

            if this_model_map is None:

                # First addition

                this_model_map = expectation_from_this_source

            else:

                this_model_map += expectation_from_this_source

        # Now process extended sources
        if n_ext_sources > 0:

            this_ext_model_map = None

            for ext_id in range(n_ext_sources):

                this_conv_src = self._convolved_ext_sources[ext_id]

                expectation_per_transit = this_conv_src.get_source_map(energy_bin_id)

                if this_ext_model_map is None:

                    # First addition

                    this_ext_model_map = expectation_per_transit

                else:

                    this_ext_model_map += expectation_per_transit

            # Now convolve with the PSF
            if this_model_map is None:
                
                # Only extended sources
            
                this_model_map = (self._psf_convolutors[energy_bin_id].extended_source_image(this_ext_model_map) *
                                  data_analysis_bin.n_transits)
            
            else:

                this_model_map += (self._psf_convolutors[energy_bin_id].extended_source_image(this_ext_model_map) *
                                   data_analysis_bin.n_transits)


        # Now transform from the flat sky projection to HEALPiX

        if this_model_map is not None:

            # First divide for the pixel area because we need to interpolate brightness
            this_model_map = old_div(this_model_map, self._flat_sky_projection.project_plane_pixel_area)

            this_model_map_hpx = self._flat_sky_to_healpix_transform[energy_bin_id](this_model_map, fill_value=0.0)

            # Now multiply by the pixel area of the new map to go back to flux
            this_model_map_hpx *= hp.nside2pixarea(data_analysis_bin.nside, degrees=True)

        else:

            # No sources

            this_model_map_hpx = 0.0

        return this_model_map_hpx
Example #53
0
import gc
import time
from scipy.interpolate import interp1d
#from classy import Class

from util import histogram_hp, add_kappa_shell

# cosmological parameters
h = 0.6736
H0 = h * 100.  # km/s/Mpc
Om_m = 0.315192
c = 299792.458  # km/s

# healpix parameters
nside = 16384
delta_omega = hp.nside2pixarea(nside)
npix = (hp.nside2npix(nside))

# if downsampling
nside_out = 4096
npix_out = (hp.nside2npix(nside_out))
delta_omega_out = hp.nside2pixarea(nside_out)

# simulation name
sim_name = "AbacusSummit_base_c000_ph006"
simname = "/global/project/projectdirs/desi/cosmosim/Abacus/" + sim_name + "/lightcones/heal/"  #"/mnt/store/lgarrison/AbacusSummit_base_c000_ph006/lightcones/heal/"
Lbox = 2000.  # Mpc/h
PPD = 6912
NP = PPD**3

# particle density in 1/Mpc^3
Example #54
0
    'theta': theta,
    'phi': phi,
    'ipix': ipix
})

with open(data_dir + 'RA_DEC_FRAME.pickle', 'wb') as handle:
    pickle.dump(RA_DEC_Frame, handle, protocol=pickle.HIGHEST_PROTOCOL)
'''
Continue creating ngal map
'''
print('These two numbers should match:')
print('Total number of objects in ngal_new.fits: {}'.format(np.sum(bc)))
print('Total number of objects in catalogue: {}'.format(len(data_RA)))
bc = bc.astype('f16')  #Convert int counts to floats

pixarea = hp.nside2pixarea(nside, degrees=True)  #Get the pixel area in deg^2
pixarea_arcmin = pixarea * 60**2  #Convert pixel area in deg^2 to arcmin^2
pixresolution = np.sqrt(
    pixarea_arcmin)  #The pixel resolution is the square root of the pixel area

print('These two numbers should match:')
print('Calculated pixel resolution: {}'.format(pixresolution))
print('Pixel resolution given by healpy: {}'.format(
    hp.nside2resol(nside, arcmin=True))
      )  #This should give the same number as the calculation above

bc = bc / pixarea_arcmin  #We want the number of objects (galaxies) per arcmin^2 for each pixel

#hp.visufunc.mollview(bc)
#plt.show()
Example #55
0
def plot_sky_binned(ra,
                    dec,
                    weights=None,
                    data=None,
                    plot_type='grid',
                    max_bin_area=5,
                    clip_lo=None,
                    clip_hi=None,
                    verbose=False,
                    cmap='viridis',
                    colorbar=True,
                    label=None,
                    basemap=None):
    """Show objects on the sky using a binned plot.
    Bin values either show object counts per unit sky area or, if an array
    of associated data values is provided, mean data values within each bin.
    Objects can have associated weights.
    Requires that matplotlib and basemap are installed. When plot_type is
    "healpix", healpy must also be installed.
    Parameters
    ----------
    ra : array
        Array of object RA values in degrees. Must have the same shape as
        dec and will be flattened if necessary.
    dec : array
        Array of object DEC values in degrees. Must have the same shape as
        ra and will be flattened if necessary.
    weights : array or None
        Optional of weights associated with each object.  All objects are
        assumed to have equal weight when this is None.
    data : array or None
        Optional array of scalar values associated with each object. The
        resulting plot shows the mean data value per bin when data is
        specified.  Otherwise, the plot shows counts per unit sky area.
    plot_type : str
        Must be either 'grid' or 'healpix', and selects whether data in
        binned in healpix or in (sin(DEC), RA).
    max_bin_area : float
        The bin size will be chosen automatically to be as close as
        possible to this value but not exceeding it.
    clip_lo : float or str
        Clipping is applied to the plot data calculated as counts / area
        or the mean data value per bin. See :func:`prepare_data` for
        details.
    clip_hi : float or str
        Clipping is applied to the plot data calculated as counts / area
        or the mean data value per bin. See :func:`prepare_data` for
        details.
    verbose : bool
        Print information about the automatic bin size calculation.
    cmap : colormap name or object
        Matplotlib colormap to use for mapping data values to colors.
    colorbar : bool
        Draw a colorbar below the map when True.
    label : str or None
        Label to display under the colorbar.  Ignored unless colorbar is True.
    basemap : Basemap object or None
        Use the specified basemap or create a default basemap using
        :func:`init_sky` when None.
    Returns
    -------
    basemap
        The basemap used for the plot, which will match the input basemap
        provided, or be a newly created basemap if None was provided.
    """
    ra = np.asarray(ra).reshape(-1)
    dec = np.asarray(dec).reshape(-1)
    if len(ra) != len(dec):
        raise ValueError('Arrays ra,dec must have same size.')

    plot_types = (
        'grid',
        'healpix',
    )
    if plot_type not in plot_types:
        raise ValueError('Invalid plot_type, should be one of {0}.'.format(
            ', '.join(plot_types)))

    if data is not None and weights is None:
        weights = np.ones_like(data)

    if plot_type == 'grid':
        # Convert the maximum pixel area to steradians.
        max_bin_area = max_bin_area * (np.pi / 180.)**2

        # Pick the number of bins in cos(DEC) and RA to use.
        n_cos_dec = int(np.ceil(2 / np.sqrt(max_bin_area)))
        n_ra = int(np.ceil(4 * np.pi / max_bin_area / n_cos_dec))
        # Calculate the actual pixel area in sq. degrees.
        bin_area = 360**2 / np.pi / (n_cos_dec * n_ra)
        if verbose:
            print(
                'Using {0} x {1} grid in cos(DEC) x RA'.format(
                    n_cos_dec, n_ra),
                'with pixel area {:.3f} sq.deg.'.format(bin_area))

        # Calculate the bin edges in degrees.
        ra_edges = np.linspace(-180., +180., n_ra + 1)
        dec_edges = np.degrees(np.arcsin(np.linspace(-1., +1., n_cos_dec + 1)))

        # Put RA values in the range [-180, 180).
        ra = np.fmod(ra, 360.)
        ra[ra >= 180.] -= 360.

        # Histogram the input coordinates.
        counts, _, _ = np.histogram2d(dec,
                                      ra, [dec_edges, ra_edges],
                                      weights=weights)

        if data is None:
            grid_data = counts / bin_area
        else:
            sums, _, _ = np.histogram2d(dec,
                                        ra, [dec_edges, ra_edges],
                                        weights=weights * data)
            # This ratio might result in some nan (0/0) or inf (1/0) values,
            # but these will be masked by prepare_data().
            settings = np.seterr(all='ignore')
            grid_data = sums / counts
            np.seterr(**settings)

        grid_data = prepare_data(grid_data, clip_lo=clip_lo, clip_hi=clip_hi)

        basemap = plot_grid_map(grid_data, ra_edges, dec_edges, cmap, colorbar,
                                label, basemap)

    elif plot_type == 'healpix':

        import healpy as hp

        for n in range(1, 25):
            nside = 2**n
            bin_area = hp.nside2pixarea(nside, degrees=True)
            if bin_area <= max_bin_area:
                break
        npix = hp.nside2npix(nside)
        nest = False
        if verbose:
            print('Using healpix map with NSIDE={0}'.format(nside),
                  'and pixel area {:.3f} sq.deg.'.format(bin_area))

        pixels = hp.ang2pix(nside, np.radians(90 - dec), np.radians(ra), nest)
        counts = np.bincount(pixels, weights=weights, minlength=npix)
        if data is None:
            grid_data = counts / bin_area
        else:
            sums = np.bincount(pixels, weights=weights * data, minlength=npix)
            grid_data = np.zeros_like(sums, dtype=float)
            nonzero = counts > 0
            grid_data[nonzero] = sums[nonzero] / counts[nonzero]

        grid_data = prepare_data(grid_data, clip_lo=clip_lo, clip_hi=clip_hi)

        basemap = plot_healpix_map(grid_data, nest, cmap, colorbar, label,
                                   basemap)

    return basemap
Example #56
0
def makeBundleList(dbFile, runName=None, nside=64, benchmark='design',
                   lonCol='fieldRA', latCol='fieldDec', seeingCol='seeingFwhmGeom'):
    """
    make a list of metricBundle objects to look at the scientific performance
    of an opsim run.
    """

    # List to hold everything we're going to make
    bundleList = []

    # List to hold metrics that shouldn't be saved
    noSaveBundleList = []

    # Connect to the databse
    opsimdb = db.OpsimDatabaseV4(dbFile)
    if runName is None:
        runName = os.path.basename(dbFile).replace('_sqlite.db', '')

    # Fetch the proposal ID values from the database
    propids, propTags = opsimdb.fetchPropInfo()

    # Fetch the telescope location from config
    lat, lon, height = opsimdb.fetchLatLonHeight()

    # Add metadata regarding dithering/non-dithered.
    commonname = ''.join([a for a in lonCol if a in latCol])
    if commonname == 'field':
        slicermetadata = ' (non-dithered)'
    else:
        slicermetadata = ' (%s)' % (commonname)

    # Construct a WFD SQL where clause so multiple propIDs can query by WFD:
    wfdWhere = opsimdb.createSQLWhere('WFD', propTags)
    print('#FYI: WFD "where" clause: %s' % (wfdWhere))
    ddWhere = opsimdb.createSQLWhere('DD', propTags)
    print('#FYI: DD "where" clause: %s' % (ddWhere))

    # Set up benchmark values, scaled to length of opsim run.
    runLength = opsimdb.fetchRunLength()
    if benchmark == 'requested':
        # Fetch design values for seeing/skybrightness/single visit depth.
        benchmarkVals = utils.scaleBenchmarks(runLength, benchmark='design')
        # Update nvisits with requested visits from config files.
        benchmarkVals['nvisits'] = opsimdb.fetchRequestedNvisits(propId=propTags['WFD'])
        # Calculate expected coadded depth.
        benchmarkVals['coaddedDepth'] = utils.calcCoaddedDepth(benchmarkVals['nvisits'],
                                                               benchmarkVals['singleVisitDepth'])
    elif (benchmark == 'stretch') or (benchmark == 'design'):
        # Calculate benchmarks for stretch or design.
        benchmarkVals = utils.scaleBenchmarks(runLength, benchmark=benchmark)
        benchmarkVals['coaddedDepth'] = utils.calcCoaddedDepth(benchmarkVals['nvisits'],
                                                               benchmarkVals['singleVisitDepth'])
    else:
        raise ValueError('Could not recognize benchmark value %s, use design, stretch or requested.'
                         % (benchmark))
    # Check that nvisits is not set to zero (for very short run length).
    for f in benchmarkVals['nvisits']:
        if benchmarkVals['nvisits'][f] == 0:
            print('Updating benchmark nvisits value in %s to be nonzero' % (f))
            benchmarkVals['nvisits'][f] = 1

    # Set values for min/max range of nvisits for All/WFD and DD plots. These are somewhat arbitrary.
    nvisitsRange = {}
    nvisitsRange['all'] = {'u': [20, 80], 'g': [50, 150], 'r': [100, 250],
                           'i': [100, 250], 'z': [100, 300], 'y': [100, 300]}
    nvisitsRange['DD'] = {'u': [6000, 10000], 'g': [2500, 5000], 'r': [5000, 8000],
                          'i': [5000, 8000], 'z': [7000, 10000], 'y': [5000, 8000]}
    # Scale these ranges for the runLength.
    scale = runLength / 10.0
    for prop in nvisitsRange:
        for f in nvisitsRange[prop]:
            for i in [0, 1]:
                nvisitsRange[prop][f][i] = int(np.floor(nvisitsRange[prop][f][i] * scale))

    # Filter list, and map of colors (for plots) to filters.
    filters = ['u', 'g', 'r', 'i', 'z', 'y']
    colors = {'u': 'cyan', 'g': 'g', 'r': 'y', 'i': 'r', 'z': 'm', 'y': 'k'}
    filtorder = {'u': 1, 'g': 2, 'r': 3, 'i': 4, 'z': 5, 'y': 6}

    # Easy way to run through all fi

    # Set up a list of common summary stats
    commonSummary = [metrics.MeanMetric(), metrics.RobustRmsMetric(), metrics.MedianMetric(),
                     metrics.PercentileMetric(metricName='25th%ile', percentile=25),
                     metrics.PercentileMetric(metricName='75th%ile', percentile=75),
                     metrics.MinMetric(), metrics.MaxMetric()]
    allStats = commonSummary

    # Set up some 'group' labels
    reqgroup = 'A: Required SRD metrics'
    depthgroup = 'B: Depth per filter'
    uniformitygroup = 'C: Uniformity'
    airmassgroup = 'D: Airmass distribution'
    seeinggroup = 'E: Seeing distribution'
    transgroup = 'F: Transients'
    sngroup = 'G: SN Ia'
    altAzGroup = 'H: Alt Az'
    rangeGroup = 'I: Range of Dates'
    intergroup = 'J: Inter-Night'
    phaseGroup = 'K: Max Phase Gap'
    NEOGroup = 'L: NEO Detection'

    # Set up an object to track the metricBundles that we want to combine into merged plots.
    mergedHistDict = {}

    # Set the histogram merge function.
    mergeFunc = plots.HealpixHistogram()

    keys = ['NVisits', 'coaddm5', 'NormEffTime', 'Minseeing', 'seeingAboveLimit', 'minAirmass',
            'fracAboveAirmass']

    for key in keys:
        mergedHistDict[key] = plots.PlotBundle(plotFunc=mergeFunc)

    ##
    # Calculate the fO metrics for all proposals and WFD only.
    order = 0
    for prop in ('All prop', 'WFD only'):
        if prop == 'All prop':
            metadata = 'All Visits' + slicermetadata
            sqlconstraint = ''
        if prop == 'WFD only':
            metadata = 'WFD only' + slicermetadata
            sqlconstraint = '%s' % (wfdWhere)
        # Configure the count metric which is what is used for f0 slicer.
        m1 = metrics.CountMetric(col='observationStartMJD', metricName='fO')
        plotDict = {'xlabel': 'Number of Visits', 'Asky': benchmarkVals['Area'],
                    'Nvisit': benchmarkVals['nvisitsTotal'], 'xMin': 0, 'xMax': 1500}
        summaryMetrics = [metrics.fOArea(nside=nside, norm=False, metricName='fOArea: Nvisits (#)',
                                         Asky=benchmarkVals['Area'], Nvisit=benchmarkVals['nvisitsTotal']),
                          metrics.fOArea(nside=nside, norm=True, metricName='fOArea: Nvisits/benchmark',
                                         Asky=benchmarkVals['Area'], Nvisit=benchmarkVals['nvisitsTotal']),
                          metrics.fONv(nside=nside, norm=False, metricName='fONv: Area (sqdeg)',
                                       Asky=benchmarkVals['Area'], Nvisit=benchmarkVals['nvisitsTotal']),
                          metrics.fONv(nside=nside, norm=True, metricName='fONv: Area/benchmark',
                                       Asky=benchmarkVals['Area'], Nvisit=benchmarkVals['nvisitsTotal'])]
        caption = 'The FO metric evaluates the overall efficiency of observing. '
        caption += ('fOArea: Nvisits = %.1f sq degrees receive at least this many visits out of %d. '
                    % (benchmarkVals['Area'], benchmarkVals['nvisitsTotal']))
        caption += ('fONv: Area = this many square degrees out of %.1f receive at least %d visits.'
                    % (benchmarkVals['Area'], benchmarkVals['nvisitsTotal']))
        displayDict = {'group': reqgroup, 'subgroup': 'F0', 'displayOrder': order, 'caption': caption}
        order += 1
        slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)

        bundle = metricBundles.MetricBundle(m1, slicer, sqlconstraint, plotDict=plotDict,
                                            displayDict=displayDict, summaryMetrics=summaryMetrics,
                                            plotFuncs=[plots.FOPlot()],
                                            runName=runName, metadata=metadata)
        bundleList.append(bundle)

    ###
    # Calculate the Rapid Revisit Metrics.
    order = 0
    metadata = 'All Visits' + slicermetadata
    sqlconstraint = ''
    dTmin = 40.0  # seconds
    dTmax = 30.0*60. # seconds
    minNvisit = 100
    pixArea = float(hp.nside2pixarea(nside, degrees=True))
    scale = pixArea * hp.nside2npix(nside)
    cutoff1 = 0.15
    extraStats1 = [metrics.FracBelowMetric(cutoff=cutoff1, scale=scale, metricName='Area (sq deg)')]
    extraStats1.extend(commonSummary)
    slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
    m1 = metrics.RapidRevisitMetric(metricName='RapidRevisitUniformity',
                                    dTmin=dTmin / 60.0 / 60.0 / 24.0, dTmax=dTmax / 60.0 / 60.0 / 24.0,
                                    minNvisits=minNvisit)

    plotDict = {'xMin': 0, 'xMax': 1}
    summaryStats = extraStats1
    caption = 'Deviation from uniformity for short revisit timescales, between %s and %s seconds, ' % (
        dTmin, dTmax)
    caption += 'for pointings with at least %d visits in this time range. ' % (minNvisit)
    caption += 'Summary statistic "Area" below indicates the area on the sky which has a '
    caption += 'deviation from uniformity of < %.2f.' % (cutoff1)
    displayDict = {'group': reqgroup, 'subgroup': 'Rapid Revisit', 'displayOrder': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(m1, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1

    dTmax = dTmax/60.0 # need time in minutes for Nrevisits metric
    m2 = metrics.NRevisitsMetric(dT=dTmax)
    plotDict = {'xMin': 0.1, 'xMax': 2000, 'logScale': True}
    cutoff2 = 800
    extraStats2 = [metrics.FracAboveMetric(cutoff=cutoff2, scale=scale, metricName='Area (sq deg)')]
    extraStats2.extend(commonSummary)
    caption = 'Number of consecutive visits with return times faster than %.1f minutes, ' % (dTmax)
    caption += 'in any filter, all proposals. '
    caption += 'Summary statistic "Area" below indicates the area on the sky which has more than '
    caption += '%d revisits within this time window.' % (cutoff2)
    summaryStats = extraStats2
    displayDict = {'group': reqgroup, 'subgroup': 'Rapid Revisit', 'displayOrder': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(m2, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    m3 = metrics.NRevisitsMetric(dT=dTmax, normed=True)
    plotDict = {'xMin': 0, 'xMax': 1, 'cbarFormat': '%.1f'}
    cutoff3 = 0.6
    extraStats3 = [metrics.FracAboveMetric(cutoff=cutoff3, scale=scale, metricName='Area (sq deg)')]
    extraStats3.extend(commonSummary)
    summaryStats = extraStats3
    caption = 'Fraction of total visits where consecutive visits have return times faster '
    caption += 'than %.1f minutes, in any filter, all proposals. ' % (dTmax)
    caption += 'Summary statistic "Area" below indicates the area on the sky which has more '
    caption += 'than %d revisits within this time window.' % (cutoff3)
    displayDict = {'group': reqgroup, 'subgroup': 'Rapid Revisit', 'displayOrder': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(m3, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1

    # And add a histogram of the time between quick revisits.
    binMin = 0
    binMax = 120.
    binsize = 3.
    bins_metric = np.arange(binMin / 60.0 / 24.0, (binMax + binsize) / 60. / 24., binsize / 60. / 24.)
    bins_plot = bins_metric * 24.0 * 60.0
    m1 = metrics.TgapsMetric(bins=bins_metric, metricName='dT visits')
    plotDict = {'bins': bins_plot, 'xlabel': 'dT (minutes)'}
    caption = ('Histogram of the time between consecutive revisits (<%.1f minutes), over entire sky.'
               % (binMax))
    displayDict = {'group': reqgroup, 'subgroup': 'Rapid Revisit', 'order': order,
                   'caption': caption}
    slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
    plotFunc = plots.SummaryHistogram()
    bundle = metricBundles.MetricBundle(m1, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName,
                                        metadata=metadata, plotFuncs=[plotFunc])
    bundleList.append(bundle)
    order += 1

    ##
    # Trigonometric parallax and proper motion @ r=20 and r=24
    slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
    sqlconstraint = ''
    order = 0
    metric = metrics.ParallaxMetric(metricName='Parallax 20', rmag=20, seeingCol=seeingCol)
    summaryStats = allStats
    plotDict = {'cbarFormat': '%.1f', 'xMin': 0, 'xMax': 3}
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption': 'Parallax precision at r=20. (without refraction).'}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ParallaxMetric(metricName='Parallax 24', rmag=24, seeingCol=seeingCol)
    plotDict = {'cbarFormat': '%.1f', 'xMin': 0, 'xMax': 10}
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption': 'Parallax precision at r=24. (without refraction).'}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ParallaxMetric(metricName='Parallax Normed', rmag=24, normalize=True,
                                    seeingCol=seeingCol)
    plotDict = {'xMin': 0.5, 'xMax': 1.0}
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption':
                   'Normalized parallax (normalized to optimum observation cadence, 1=optimal).'}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ParallaxCoverageMetric(metricName='Parallax Coverage 20', rmag=20, seeingCol=seeingCol)
    plotDict = {}
    caption = "Parallax factor coverage for an r=20 star (0 is bad, 0.5-1 is good). "
    caption += "One expects the parallax factor coverage to vary because stars on the ecliptic "
    caption += "can be observed when they have no parallax offset while stars at the pole are always "
    caption += "offset by the full parallax offset."""
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ParallaxCoverageMetric(metricName='Parallax Coverage 24', rmag=24, seeingCol=seeingCol)
    plotDict = {}
    caption = "Parallax factor coverage for an r=24 star (0 is bad, 0.5-1 is good). "
    caption += "One expects the parallax factor coverage to vary because stars on the ecliptic "
    caption += "can be observed when they have no parallax offset while stars at the pole are always "
    caption += "offset by the full parallax offset."""
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ParallaxDcrDegenMetric(metricName='Parallax-DCR degeneracy 20', rmag=20,
                                            seeingCol=seeingCol)
    plotDict = {}
    caption = 'Correlation between parallax offset magnitude and hour angle an r=20 star.'
    caption += ' (0 is good, near -1 or 1 is bad).'
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ParallaxDcrDegenMetric(metricName='Parallax-DCR degeneracy 24', rmag=24,
                                            seeingCol=seeingCol)
    plotDict = {}
    caption = 'Correlation between parallax offset magnitude and hour angle an r=24 star.'
    caption += ' (0 is good, near -1 or 1 is bad).'
    displayDict = {'group': reqgroup, 'subgroup': 'Parallax', 'order': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1

    metric = metrics.ProperMotionMetric(metricName='Proper Motion 20', rmag=20, seeingCol=seeingCol)

    summaryStats = allStats
    plotDict = {'xMin': 0, 'xMax': 3}
    displayDict = {'group': reqgroup, 'subgroup': 'Proper Motion', 'order': order,
                   'caption': 'Proper Motion precision at r=20.'}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ProperMotionMetric(rmag=24, metricName='Proper Motion 24', seeingCol=seeingCol)
    summaryStats = allStats
    plotDict = {'xMin': 0, 'xMax': 10}
    displayDict = {'group': reqgroup, 'subgroup': 'Proper Motion', 'order': order,
                   'caption': 'Proper Motion precision at r=24.'}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1
    metric = metrics.ProperMotionMetric(rmag=24, normalize=True, metricName='Proper Motion Normed',
                                        seeingCol=seeingCol)
    plotDict = {'xMin': 0.2, 'xMax': 0.7}
    caption = 'Normalized proper motion at r=24. '
    caption += '(normalized to optimum observation cadence - start/end. 1=optimal).'
    displayDict = {'group': reqgroup, 'subgroup': 'Proper Motion', 'order': order,
                   'caption': caption}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, summaryMetrics=summaryStats,
                                        runName=runName, metadata=metadata)
    bundleList.append(bundle)
    order += 1

    ##
    # Calculate the time uniformity in each filter, for each year.
    order = 0

    slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
    plotFuncs = [plots.TwoDMap()]
    step = 0.5
    bins = np.arange(0, 365.25 * 10 + 40, 40) - step
    metric = metrics.AccumulateUniformityMetric(bins=bins)
    plotDict = {'xlabel': 'Night (days)', 'xextent': [bins.min(
    ) + step, bins.max() + step], 'cbarTitle': 'Uniformity'}
    for f in filters:
        sqlconstraint = 'filter = "%s"' % (f)
        caption = 'Deviation from uniformity in %s band. ' % f
        caption += 'Northern Healpixels are at the top of the image.'
        caption += '(0=perfectly uniform, 1=perfectly nonuniform).'
        displayDict = {'group': uniformitygroup, 'subgroup': 'per night',
                       'order': filtorder[f], 'caption': caption}
        metadata = '%s band' % (f) + slicermetadata
        bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                            displayDict=displayDict, runName=runName, metadata=metadata,
                                            plotFuncs=plotFuncs)
        noSaveBundleList.append(bundle)

    ##
    # Depth metrics.
    slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
    for f in filters:
        propCaption = '%s band, all proposals %s' % (f, slicermetadata)
        sqlconstraint = 'filter = "%s"' % (f)
        metadata = '%s band' % (f) + slicermetadata
        # Number of visits.
        metric = metrics.CountMetric(col='observationStartMJD', metricName='NVisits')
        plotDict = {'xlabel': 'Number of visits',
                    'xMin': nvisitsRange['all'][f][0],
                    'xMax': nvisitsRange['all'][f][1],
                    'colorMin': nvisitsRange['all'][f][0],
                    'colorMax': nvisitsRange['all'][f][1],
                    'binsize': 5,
                    'logScale': True, 'nTicks': 4, 'colorMin': 1}
        summaryStats = allStats
        displayDict = {'group': depthgroup, 'subgroup': 'Nvisits', 'order': filtorder[f],
                       'caption': 'Number of visits in filter %s, %s.' % (f, propCaption)}
        histMerge = {'color': colors[f], 'label': '%s' % (f),
                     'binsize': 5,
                     'xMin': nvisitsRange['all'][f][0], 'xMax': nvisitsRange['all'][f][1],
                     'legendloc': 'upper right'}
        bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                            displayDict=displayDict, runName=runName, metadata=metadata,
                                            summaryMetrics=summaryStats)
        mergedHistDict['NVisits'].addBundle(bundle, plotDict=histMerge)
        bundleList.append(bundle)
        # Coadded depth.
        metric = metrics.Coaddm5Metric()
        plotDict = {'zp': benchmarkVals['coaddedDepth'][f], 'xMin': -0.8, 'xMax': 0.8,
                    'xlabel': 'coadded m5 - %.1f' % benchmarkVals['coaddedDepth'][f]}
        summaryStats = allStats
        histMerge = {'legendloc': 'upper right', 'color': colors[f], 'label': '%s' % f, 'binsize': .02,
                     'xlabel': 'coadded m5 - benchmark value'}
        caption = ('Coadded depth in filter %s, with %s value subtracted (%.1f), %s. '
                   % (f, benchmark, benchmarkVals['coaddedDepth'][f], propCaption))
        caption += 'More positive numbers indicate fainter limiting magnitudes.'
        displayDict = {'group': depthgroup, 'subgroup': 'Coadded Depth',
                       'order': filtorder[f], 'caption': caption}
        bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                            displayDict=displayDict, runName=runName, metadata=metadata,
                                            summaryMetrics=summaryStats)
        mergedHistDict['coaddm5'].addBundle(bundle, plotDict=histMerge)
        bundleList.append(bundle)
        # Effective time.
        metric = metrics.TeffMetric(metricName='Normalized Effective Time', normed=True,
                                    fiducialDepth=benchmarkVals['singleVisitDepth'])
        plotDict = {'xMin': 0.1, 'xMax': 1.1}
        summaryStats = allStats
        histMerge = {'legendLoc': 'upper right', 'color': colors[f], 'label': '%s' % f, 'binsize': 0.02}
        caption = ('"Time Effective" in filter %s, calculated with fiducial single-visit depth of %s mag. '
                   % (f, benchmarkVals['singleVisitDepth'][f]))
        caption += 'Normalized by the fiducial time effective, if every observation was at '
        caption += 'the fiducial depth.'
        displayDict = {'group': depthgroup, 'subgroup': 'Time Eff.',
                       'order': filtorder[f], 'caption': caption}
        bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                            displayDict=displayDict, runName=runName, metadata=metadata,
                                            summaryMetrics=summaryStats)
        mergedHistDict['NormEffTime'].addBundle(bundle, plotDict=histMerge)
        bundleList.append(bundle)

    # Put in a z=0.5 Type Ia SN, based on Cambridge 2015 workshop notebook.
    # Check for 1) detection in any band, 2) detection on the rise in any band,
    # 3) good characterization
    peaks = {'uPeak': 25.9, 'gPeak': 23.6, 'rPeak': 22.6, 'iPeak': 22.7, 'zPeak': 22.7, 'yPeak': 22.8}
    peakTime = 15.
    transDuration = peakTime + 30.  # Days
    metric = metrics.TransientMetric(riseSlope=-2. / peakTime, declineSlope=1.4 / 30.0,
                                     transDuration=transDuration, peakTime=peakTime,
                                     surveyDuration=runLength,
                                     metricName='SNDetection', **peaks)
    caption = 'Fraction of z=0.5 type Ia SN that are detected in any filter'
    displayDict = {'group': transgroup, 'subgroup': 'Detected', 'caption': caption}
    sqlconstraint = ''
    metadata = '' + slicermetadata
    plotDict = {}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName, metadata=metadata)
    bundleList.append(bundle)

    metric = metrics.TransientMetric(riseSlope=-2. / peakTime, declineSlope=1.4 / 30.0,
                                     transDuration=transDuration, peakTime=peakTime,
                                     surveyDuration=runLength,
                                     nPrePeak=1, metricName='SNAlert', **peaks)
    caption = 'Fraction of z=0.5 type Ia SN that are detected pre-peak in any filter'
    displayDict = {'group': transgroup, 'subgroup': 'Detected on the rise', 'caption': caption}
    plotDict = {}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName, metadata=metadata)
    bundleList.append(bundle)

    metric = metrics.TransientMetric(riseSlope=-2. / peakTime, declineSlope=1.4 / 30.,
                                     transDuration=transDuration, peakTime=peakTime,
                                     surveyDuration=runLength, metricName='SNLots',
                                     nFilters=3, nPrePeak=3, nPerLC=2, **peaks)
    caption = 'Fraction of z=0.5 type Ia SN that are observed 6 times, 3 pre-peak, '
    caption += '3 post-peak, with observations in 3 filters'
    displayDict = {'group': transgroup, 'subgroup': 'Well observed', 'caption': caption}
    sqlconstraint = 'filter="r" or filter="g" or filter="i" or filter="z" '
    plotDict = {}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName, metadata=metadata)
    bundleList.append(bundle)

    # Good seeing in r/i band metrics, including in first/second years.
    order = 0
    for tcolor, tlabel, timespan in zip(['k', 'g', 'r'], ['10 years', '1 year', '2 years'],
                                        ['', ' and night<=365', ' and night<=730']):
        order += 1
        for f in (['r', 'i']):
            sqlconstraint = 'filter = "%s" %s' % (f, timespan)
            propCaption = '%s band, all proposals %s, over %s.' % (f, slicermetadata, tlabel)
            metadata = '%s band, %s' % (f, tlabel) + slicermetadata
            seeing_limit = 0.7
            airmass_limit = 1.2
            metric = metrics.MinMetric(col=seeingCol)
            summaryStats = allStats
            plotDict = {'xMin': 0.35, 'xMax': 1.5, 'color': tcolor}
            displayDict = {'group': seeinggroup, 'subgroup': 'Best Seeing',
                           'order': filtorder[f] * 100 + order,
                           'caption': 'Minimum FWHMgeom values in %s.' % (propCaption)}
            histMerge = {'label': '%s %s' % (f, tlabel), 'color': tcolor,
                         'binsize': 0.03, 'xMin': 0.35, 'xMax': 1.5, 'legendloc': 'upper right'}
            bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                                displayDict=displayDict, runName=runName, metadata=metadata,
                                                summaryMetrics=summaryStats)
            mergedHistDict['Minseeing'].addBundle(bundle, plotDict=histMerge)
            bundleList.append(bundle)

            metric = metrics.FracAboveMetric(col=seeingCol, cutoff=seeing_limit)
            summaryStats = allStats
            plotDict = {'xMin': 0, 'xMax': 1.1, 'color': tcolor}
            displayDict = {'group': seeinggroup, 'subgroup': 'Good seeing fraction',
                           'order': filtorder[f] * 100 + order,
                           'caption': 'Fraction of total images with FWHMgeom worse than %.1f, in %s'
                           % (seeing_limit, propCaption)}
            histMerge = {'color': tcolor, 'label': '%s %s' % (f, tlabel),
                         'binsize': 0.05, 'legendloc': 'upper right'}
            bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                                displayDict=displayDict, runName=runName, metadata=metadata,
                                                summaryMetrics=summaryStats)
            mergedHistDict['seeingAboveLimit'].addBundle(bundle, plotDict=histMerge)
            bundleList.append(bundle)

            metric = metrics.MinMetric(col='airmass')
            plotDict = {'xMin': 1, 'xMax': 1.5, 'color': tcolor}
            summaryStats = allStats
            displayDict = {'group': airmassgroup, 'subgroup': 'Best Airmass',
                           'order': filtorder[f] * 100 + order, 'caption':
                           'Minimum airmass in %s.' % (propCaption)}
            histMerge = {'color': tcolor, 'label': '%s %s' % (f, tlabel),
                         'binsize': 0.03, 'legendloc': 'upper right'}
            bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                                displayDict=displayDict, runName=runName, metadata=metadata,
                                                summaryMetrics=summaryStats)
            mergedHistDict['minAirmass'].addBundle(bundle, plotDict=histMerge)
            bundleList.append(bundle)

            metric = metrics.FracAboveMetric(col='airmass', cutoff=airmass_limit)
            plotDict = {'xMin': 0, 'xMax': 1, 'color': tcolor}
            summaryStats = allStats
            displayDict = {'group': airmassgroup, 'subgroup': 'Low airmass fraction',
                           'order': filtorder[f] * 100 + order, 'caption':
                           'Fraction of total images with airmass higher than %.2f, in %s'
                           % (airmass_limit, propCaption)}
            histMerge = {'color': tcolor, 'label': '%s %s' % (
                f, tlabel), 'binsize': 0.05, 'legendloc': 'upper right'}

            bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                                displayDict=displayDict, runName=runName, metadata=metadata,
                                                summaryMetrics=summaryStats)
            mergedHistDict['fracAboveAirmass'].addBundle(bundle, plotDict=histMerge)
            bundleList.append(bundle)

# SNe metrics from UK workshop.


    peaks = {'uPeak': 25.9, 'gPeak': 23.6, 'rPeak': 22.6, 'iPeak': 22.7, 'zPeak': 22.7, 'yPeak': 22.8}
    peakTime = 15.
    transDuration = peakTime + 30.  # Days
    metric = metrics.TransientMetric(riseSlope=-2. / peakTime, declineSlope=1.4 / 30.0,
                                     transDuration=transDuration, peakTime=peakTime,
                                     surveyDuration=runLength,
                                     metricName='SNDetection', **peaks)
    caption = 'Fraction of z=0.5 type Ia SN that are detected at any point in their light curve in any filter'
    displayDict = {'group': sngroup, 'subgroup': 'Detected', 'caption': caption}
    sqlconstraint = ''
    plotDict = {}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName)
    bundleList.append(bundle)

    metric = metrics.TransientMetric(riseSlope=-2. / peakTime, declineSlope=1.4 / 30.0,
                                     transDuration=transDuration, peakTime=peakTime,
                                     surveyDuration=runLength,
                                     nPrePeak=1, metricName='SNAlert', **peaks)
    caption = 'Fraction of z=0.5 type Ia SN that are detected pre-peak in any filter'
    displayDict = {'group': sngroup, 'subgroup': 'Detected on the rise', 'caption': caption}
    plotDict = {}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName)
    bundleList.append(bundle)

    metric = metrics.TransientMetric(riseSlope=-2. / peakTime, declineSlope=1.4 / 30.,
                                     transDuration=transDuration, peakTime=peakTime,
                                     surveyDuration=runLength, metricName='SNLots',
                                     nFilters=3, nPrePeak=3, nPerLC=2, **peaks)
    caption = 'Fraction of z=0.5 type Ia SN that are observed 6 times, 3 pre-peak, '
    caption += '3 post-peak, with observations in 3 filters'
    displayDict = {'group': sngroup, 'subgroup': 'Well observed', 'caption': caption}
    sqlconstraint = 'filter="r" or filter="g" or filter="i" or filter="z" '
    plotDict = {}
    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName)
    bundleList.append(bundle)

    propIDOrderDict = {}
    orderVal = 100
    for propID in propids:
        propIDOrderDict[propID] = orderVal
        orderVal += 100

    # Full range of dates:
    metric = metrics.FullRangeMetric(col='observationStartMJD')
    plotFuncs = [plots.HealpixSkyMap(), plots.HealpixHistogram()]
    caption = 'Time span of survey.'
    sqlconstraint = ''
    plotDict = {}
    displayDict = {'group': rangeGroup, 'caption': caption}

    bundle = metricBundles.MetricBundle(metric, slicer, sqlconstraint, plotDict=plotDict,
                                        displayDict=displayDict, runName=runName)
    bundleList.append(bundle)
    for f in filters:
        for propid in propids:
            displayDict = {'group': rangeGroup, 'subgroup': propids[propid], 'caption': caption,
                           'order': filtorder[f]}
            md = '%s, %s' % (f, propids[propid])
            sql = 'filter="%s" and proposalId=%i' % (f, propid)
            bundle = metricBundles.MetricBundle(metric, slicer, sql, plotDict=plotDict,
                                                metadata=md, plotFuncs=plotFuncs,
                                                displayDict=displayDict, runName=runName)
            bundleList.append(bundle)

    # Alt az plots
    slicer = slicers.HealpixSlicer(nside=64, latCol='zenithDistance', lonCol='azimuth', useCache=False)
    metric = metrics.CountMetric('observationStartMJD', metricName='Nvisits as function of Alt/Az')
    plotDict = {}
    plotFuncs = [plots.LambertSkyMap()]
    displayDict = {'group': altAzGroup, 'caption': 'Alt Az pointing distribution'}
    for f in filters:
        for propid in propids:
            displayDict = {'group': altAzGroup, 'subgroup': propids[propid],
                           'caption': 'Alt Az pointing distribution',
                           'order': filtorder[f]}
            md = '%s, %s' % (f, propids[propid])
            sql = 'filter="%s" and proposalId=%i' % (f, propid)
            bundle = metricBundles.MetricBundle(metric, slicer, sql, plotDict=plotDict,
                                                plotFuncs=plotFuncs, metadata=md,
                                                displayDict=displayDict, runName=runName)
            bundleList.append(bundle)

    sql = ''
    md = 'all observations'
    displayDict = {'group': altAzGroup, 'subgroup': 'All Observations',
                   'caption': 'Alt Az pointing distribution'}
    bundle = metricBundles.MetricBundle(metric, slicer, sql, plotDict=plotDict,
                                        plotFuncs=plotFuncs, metadata=md,
                                        displayDict=displayDict, runName=runName)
    bundleList.append(bundle)

    # Median inter-night gap (each and all filters)
    slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
    metric = metrics.InterNightGapsMetric(metricName='Median Inter-Night Gap')
    sqls = ['filter = "%s"' % f for f in filters]
    orders = [filtorder[f] for f in filters]
    orders.append(0)
    sqls.append('')
    for sql, order in zip(sqls, orders):
        displayDict = {'group': intergroup, 'subgroup': 'Median Gap', 'caption': 'Median gap between days',
                       'order': order}
        bundle = metricBundles.MetricBundle(metric, slicer, sql, displayDict=displayDict, runName=runName)
        bundleList.append(bundle)

    # Max inter-night gap in r and all bands
    dslicer = slicers.HealpixSlicer(nside=nside, lonCol='ditheredRA', latCol='ditheredDec')
    metric = metrics.InterNightGapsMetric(metricName='Max Inter-Night Gap', reduceFunc=np.max)

    plotDict = {'percentileClip': 95.}
    for sql, order in zip(sqls, orders):
        displayDict = {'group': intergroup, 'subgroup': 'Max Gap', 'caption': 'Max gap between nights',
                       'order': order}
        bundle = metricBundles.MetricBundle(metric, dslicer, sql, displayDict=displayDict,
                                            plotDict=plotDict, runName=runName)
        bundleList.append(bundle)

    # largest phase gap for periods
    periods = [0.1, 1.0, 10., 100.]
    sqls = {'u': 'filter = "u"', 'r': 'filter="r"',
            'g,r,i,z': 'filter="g" or filter="r" or filter="i" or filter="z"',
            'all': ''}

    for sql in sqls:
        for period in periods:
            displayDict = {'group': phaseGroup,
                           'subgroup': 'period=%.2f days, filter=%s' % (period, sql),
                           'caption': 'Maximum phase gaps'}
            metric = metrics.PhaseGapMetric(nPeriods=1, periodMin=period, periodMax=period,
                                            metricName='PhaseGap, %.1f' % period)
            bundle = metricBundles.MetricBundle(metric, slicer, sqls[sql],
                                                displayDict=displayDict, runName=runName)
            bundleList.append(bundle)

    # NEO XY plots
    slicer = slicers.UniSlicer()
    metric = metrics.PassMetric(metricName='NEODistances')
    stacker = stackers.NEODistStacker()
    stacker2 = stackers.EclipticStacker()
    for f in filters:
        plotFunc = plots.NeoDistancePlotter(eclipMax=10., eclipMin=-10.)
        caption = 'Observations within 10 degrees of the ecliptic. Distance an H=22 NEO would be detected'
        displayDict = {'group': NEOGroup, 'subgroup': 'xy', 'order': filtorder[f],
                       'caption': caption}
        plotDict = {}
        sqlconstraint = 'filter = "%s"' % (f)
        bundle = metricBundles.MetricBundle(metric, slicer,
                                            sqlconstraint, displayDict=displayDict,
                                            stackerList=[stacker, stacker2],
                                            plotDict=plotDict,
                                            plotFuncs=[plotFunc])
        noSaveBundleList.append(bundle)

    # Solar elongation
    sqls = ['filter = "%s"' % f for f in filters]
    orders = [filtorder[f] for f in filters]
    sqls.append('')
    orders.append(0)
    for sql, order in zip(sqls, orders):
        plotFuncs = [plots.HealpixSkyMap(), plots.HealpixHistogram()]
        displayDict = {'group': NEOGroup, 'subgroup': 'Solar Elongation',
                       'caption': 'Median solar elongation in degrees', 'order': order}
        metric = metrics.MedianMetric('solarElong')
        slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
        bundle = metricBundles.MetricBundle(metric, slicer, sql, displayDict=displayDict,
                                            plotFuncs=plotFuncs)
        bundleList.append(bundle)

        plotFuncs = [plots.HealpixSkyMap(), plots.HealpixHistogram()]
        displayDict = {'group': NEOGroup, 'subgroup': 'Solar Elongation',
                       'caption': 'Minimum solar elongation in degrees', 'order': order}
        metric = metrics.MinMetric('solarElong')
        slicer = slicers.HealpixSlicer(nside=nside, lonCol=lonCol, latCol=latCol)
        bundle = metricBundles.MetricBundle(metric, slicer, sql, displayDict=displayDict,
                                            plotFuncs=plotFuncs)
        bundleList.append(bundle)

    return (metricBundles.makeBundlesDictFromList(bundleList), mergedHistDict,
            metricBundles.makeBundlesDictFromList(noSaveBundleList))
fig = plt.figure(figsize=(opts.figure_width, opts.figure_height), frameon=False)
ax = plt.subplot(111,
    projection='mollweide' if opts.geo else 'astro mollweide')
ax.cla()
ax.grid()

skymap, metadata = fits.read_sky_map(infilename, nest=None)
nside = hp.npix2nside(len(skymap))

if opts.geo:
    dlon = -lal.GreenwichMeanSiderealTime(lal.LIGOTimeGPS(metadata['gps_time'])) % (2*np.pi)
else:
    dlon = 0

# Convert sky map from probability to probability per square degree.
probperdeg2 = skymap / hp.nside2pixarea(nside, degrees=True)

# Plot sky map.
vmax = probperdeg2.max()
plot.healpix_heatmap(
    probperdeg2, dlon=dlon, nest=metadata['nest'],
    vmin=0., vmax=vmax, cmap=plt.get_cmap(opts.colormap))

if opts.colorbar:
    # Plot colorbar.
    cb = plot.colorbar()

    # Set colorbar label.
    cb.set_label(r'prob. per deg$^2$')

# Add contours.
Example #58
0
 def characteristic_density_local(self, iso_sel, x_peak, y_peak, angsep_peak):
     """
     Compute the local characteristic density of a region
     """
 
     #characteristic_density = self.characteristic_density(iso_sel)
     characteristic_density = self.density
 
     x, y = self.proj.sphereToImage(self.data[self.survey.catalog['basis_1']][iso_sel], self.data[self.survey.catalog['basis_2']][iso_sel]) # Trimmed magnitude range for hotspot finding
     #x_full, y_full = proj.sphereToImage(data[basis_1], data[basis_2]) # If we want to use full magnitude range for significance evaluation
 
     # If fracdet map is available, use that information to either compute local density,
     # or in regions of spotty coverage, use the typical density of the region
     if self.fracdet is not None:
         # The following is copied from how it's used in compute_char_density
         fracdet_zero = np.tile(0., len(self.fracdet))
         cut = (self.fracdet != hp.UNSEEN)
         fracdet_zero[cut] = self.fracdet[cut]
 
         nside_fracdet = hp.npix2nside(len(self.fracdet))
         
         subpix_region_array = []
         for pix in np.unique(hp.ang2pix(self.nside,
                                         self.data[self.survey.catalog['basis_1']][iso_sel],
                                         self.data[self.survey.catalog['basis_2']][iso_sel],
                                         lonlat=True)):
             subpix_region_array.append(subpixel(self.pix_center, self.nside, nside_fracdet))
         subpix_region_array = np.concatenate(subpix_region_array)
 
         # Compute mean fracdet in the region so that this is available as a correction factor
         cut = (self.fracdet[subpix_region_array] != hp.UNSEEN)
         mean_fracdet = np.mean(self.fracdet[subpix_region_array[cut]])
 
         subpix_region_array = subpix_region_array[self.fracdet[subpix_region_array] > 0.99]
         subpix = hp.ang2pix(nside_fracdet, 
                             self.data[self.survey.catalog['basis_1']][cut_magnitude_threshold][iso_sel], 
                             self.data[self.survey.catalog['basis_2']][cut_magnitude_threshold][iso_sel],
                             lonlat=True)
 
         # This is where the local computation begins
         ra_peak, dec_peak = self.proj.imageToSphere(x_peak, y_peak)
         subpix_all = hp.query_disc(nside_fracet, hp.ang2vec(ra_peak, dec_peak, lonlat=True), np.radians(0.5))
         subpix_inner = hp.query_disc(nside_fracet, hp.ang2vec(ra_peak, dec_peak, lonlat=True), np.radians(0.3))
         subpix_annulus = subpix_all[~np.in1d(subpix_all, subpix_inner)]
         mean_fracdet = np.mean(fracdet_zero[subpix_annulus])
         print('mean_fracdet {}'.format(mean_fracdet))
         if mean_fracdet < 0.5:
             characteristic_density_local = characteristic_density
             print('characteristic_density_local baseline {}'.format(characteristic_density_local))
         else:
             # Check pixels in annulus with complete coverage
             subpix_annulus_region = np.intersect1d(subpix_region_array, subpix_annulus)
             print('{} percent pixels with complete coverage'.format(float(len(subpix_annulus_region)) / len(subpix_annulus)))
             if (float(len(subpix_annulus_region)) / len(subpix_annulus)) < 0.25:
                 characteristic_density_local = characteristic_density
                 print('characteristic_density_local spotty {}'.format(characteristic_density_local))
             else:
                 characteristic_density_local = float(np.sum(np.in1d(subpix, subpix_annulus_region))) \
                                                / (hp.nside2pixarea(nside_fracdet, degrees=True) * len(subpix_annulus_region)) # deg^-2
                 print('characteristic_density_local cleaned up {}'.format(characteristic_density_local))
     else:
         # Compute the local characteristic density
         area_field = np.pi * (0.5**2 - 0.3**2)
         n_field = np.sum((angsep_peak > 0.3) & (angsep_peak < 0.5))
         characteristic_density_local = n_field / area_field
 
         # If not good azimuthal coverage, revert
         cut_annulus = (angsep_peak > 0.3) & (angsep_peak < 0.5) 
         #phi = np.degrees(np.arctan2(y_full[cut_annulus] - y_peak, x_full[cut_annulus] - x_peak)) # Use full magnitude range, NOT TESTED!!!
         phi = np.degrees(np.arctan2(y[cut_annulus] - y_peak, x[cut_annulus] - x_peak)) # Impose magnitude threshold
         h = np.histogram(phi, bins=np.linspace(-180., 180., 13))[0]
         if np.sum(h > 0) < 10 or np.sum(h > 0.5 * np.median(h)) < 10:
             #angsep_peak = np.sqrt((x - x_peak)**2 + (y - y_peak)**2)
             characteristic_density_local = characteristic_density
 
     print('Characteristic density local = {:0.1f} deg^-2 = {:0.3f} arcmin^-2'.format(characteristic_density_local, characteristic_density_local / 60.**2))
 
     return(characteristic_density_local)
Example #59
0
    def plot_skymap(self,
                    maxpts=None,
                    trials=5,
                    jobs=1,
                    enable_multiresolution=True,
                    objid=None,
                    instruments=None,
                    geo=False,
                    dpi=600,
                    transparent=False,
                    colorbar=False,
                    contour=[50, 90],
                    annotate=True,
                    cmap='cylon',
                    load_pickle=False):
        """ Generate a fits file and sky map from a result

        Code adapted from ligo.skymap.tool.ligo_skymap_from_samples and
        ligo.skymap.tool.plot_skymap. Note, the use of this additionally
        required the installation of ligo.skymap.

        Parameters
        ----------
        maxpts: int
            Maximum number of samples to use, if None all samples are used
        trials: int
            Number of trials at each clustering number
        jobs: int
            Number of multiple threads
        enable_multiresolution: bool
            Generate a multiresolution HEALPix map (default: True)
        objid: str
            Event ID to store in FITS header
        instruments: str
            Name of detectors
        geo: bool
            Plot in geographic coordinates (lat, lon) instead of RA, Dec
        dpi: int
            Resolution of figure in fots per inch
        transparent: bool
            Save image with transparent background
        colorbar: bool
            Show colorbar
        contour: list
            List of contour levels to use
        annotate: bool
            Annotate image with details
        cmap: str
            Name of the colormap to use
        load_pickle: bool, str
            If true, load the cached pickle file (default name), or the
            pickle-file give as a path.
        """

        try:
            from astropy.time import Time
            from ligo.skymap import io, version, plot, postprocess, bayestar, kde
            import healpy as hp
        except ImportError as e:
            logger.info("Unable to generate skymap: error {}".format(e))
            return

        check_directory_exists_and_if_not_mkdir(self.outdir)

        logger.info('Reading samples for skymap')
        data = self.posterior

        if maxpts is not None and maxpts < len(data):
            logger.info('Taking random subsample of chain')
            data = data.sample(maxpts)

        default_obj_filename = os.path.join(
            self.outdir, '{}_skypost.obj'.format(self.label))

        if load_pickle is False:
            try:
                pts = data[['ra', 'dec', 'luminosity_distance']].values
                confidence_levels = kde.Clustered2Plus1DSkyKDE
                distance = True
            except KeyError:
                logger.warning(
                    "The results file does not contain luminosity_distance")
                pts = data[['ra', 'dec']].values
                confidence_levels = kde.Clustered2DSkyKDE
                distance = False

            logger.info('Initialising skymap class')
            skypost = confidence_levels(pts, trials=trials, jobs=jobs)
            logger.info('Pickling skymap to {}'.format(default_obj_filename))
            with open(default_obj_filename, 'wb') as out:
                pickle.dump(skypost, out)

        else:
            if isinstance(load_pickle, str):
                obj_filename = load_pickle
            else:
                obj_filename = default_obj_filename
            logger.info('Reading from pickle {}'.format(obj_filename))
            with open(obj_filename, 'rb') as file:
                skypost = pickle.load(file)
            skypost.jobs = jobs
            distance = isinstance(skypost, kde.Clustered2Plus1DSkyKDE)

        logger.info('Making skymap')
        hpmap = skypost.as_healpix()
        if not enable_multiresolution:
            hpmap = bayestar.rasterize(hpmap)

        hpmap.meta.update(io.fits.metadata_for_version_module(version))
        hpmap.meta['creator'] = "bilby"
        hpmap.meta['origin'] = 'LIGO/Virgo'
        hpmap.meta['gps_creation_time'] = Time.now().gps
        hpmap.meta['history'] = ""
        if objid is not None:
            hpmap.meta['objid'] = objid
        if instruments:
            hpmap.meta['instruments'] = instruments
        if distance:
            hpmap.meta['distmean'] = np.mean(data['luminosity_distance'])
            hpmap.meta['diststd'] = np.std(data['luminosity_distance'])

        try:
            time = data['geocent_time']
            hpmap.meta['gps_time'] = time.mean()
        except KeyError:
            logger.warning('Cannot determine the event time from geocent_time')

        fits_filename = os.path.join(self.outdir,
                                     "{}_skymap.fits".format(self.label))
        logger.info('Saving skymap fits-file to {}'.format(fits_filename))
        io.write_sky_map(fits_filename, hpmap, nest=True)

        skymap, metadata = io.fits.read_sky_map(fits_filename, nest=None)
        nside = hp.npix2nside(len(skymap))

        # Convert sky map from probability to probability per square degree.
        deg2perpix = hp.nside2pixarea(nside, degrees=True)
        probperdeg2 = skymap / deg2perpix

        if geo:
            obstime = Time(metadata['gps_time'], format='gps').utc.isot
            ax = plt.axes(projection='geo degrees mollweide', obstime=obstime)
        else:
            ax = plt.axes(projection='astro hours mollweide')
        ax.grid()

        # Plot sky map.
        vmax = probperdeg2.max()
        img = ax.imshow_hpx((probperdeg2, 'ICRS'),
                            nested=metadata['nest'],
                            vmin=0.,
                            vmax=vmax,
                            cmap=cmap)

        # Add colorbar.
        if colorbar:
            cb = plot.colorbar(img)
            cb.set_label(r'prob. per deg$^2$')

        if contour is not None:
            confidence_levels = 100 * postprocess.find_greedy_credible_levels(
                skymap)
            contours = ax.contour_hpx((confidence_levels, 'ICRS'),
                                      nested=metadata['nest'],
                                      colors='k',
                                      linewidths=0.5,
                                      levels=contour)
            fmt = r'%g\%%' if rcParams['text.usetex'] else '%g%%'
            plt.clabel(contours, fmt=fmt, fontsize=6, inline=True)

        # Add continents.
        if geo:
            geojson_filename = os.path.join(os.path.dirname(plot.__file__),
                                            'ne_simplified_coastline.json')
            with open(geojson_filename, 'r') as geojson_file:
                geoms = json.load(geojson_file)['geometries']
            verts = [
                coord for geom in geoms for coord in zip(*geom['coordinates'])
            ]
            plt.plot(*verts,
                     color='0.5',
                     linewidth=0.5,
                     transform=ax.get_transform('world'))

        # Add a white outline to all text to make it stand out from the background.
        plot.outline_text(ax)

        if annotate:
            text = []
            try:
                objid = metadata['objid']
            except KeyError:
                pass
            else:
                text.append('event ID: {}'.format(objid))
            if contour:
                pp = np.round(contour).astype(int)
                ii = np.round(
                    np.searchsorted(np.sort(confidence_levels), contour) *
                    deg2perpix).astype(int)
                for i, p in zip(ii, pp):
                    text.append(u'{:d}% area: {:d} deg$^2$'.format(p, i))
            ax.text(1, 1, '\n'.join(text), transform=ax.transAxes, ha='right')

        filename = os.path.join(self.outdir,
                                "{}_skymap.png".format(self.label))
        logger.info("Generating 2D projected skymap to {}".format(filename))
        safe_save_figure(fig=plt.gcf(), filename=filename, dpi=dpi)
Example #60
0
def read_skymap(params, is3D=False, map_struct=None):

    if map_struct is None:
        map_struct = {}

        if params["doDatabase"]:
            models = params["models"]
            localizations_all = models.Localization.query.all()
            localizations = models.Localization.query.filter_by(
                dateobs=params["dateobs"],
                localization_name=params["localization_name"]).all()
            if localizations == None:
                raise ValueError("No localization with dateobs=%s" %
                                 params["dateobs"])
            else:
                prob_data = localizations[0].healpix
                prob_data = prob_data / np.sum(prob_data)
                map_struct["prob"] = prob_data

                distmu = localizations[0].distmu
                distsigma = localizations[0].distsigma
                distnorm = localizations[0].distnorm

                if distmu is None:
                    map_struct["distmu"] = None
                    map_struct["distsigma"] = None
                    map_struct["distnorm"] = None
                else:
                    map_struct["distmu"] = np.array(distmu)
                    map_struct["distsigma"] = np.array(distsigma)
                    map_struct["distnorm"] = np.array(distnorm)
                    is3D = True
        else:
            filename = params["skymap"]

            if is3D:
                healpix_data = hp.read_map(filename,
                                           field=(0, 1, 2, 3),
                                           verbose=False)

                distmu_data = healpix_data[1]
                distsigma_data = healpix_data[2]
                prob_data = healpix_data[0]
                norm_data = healpix_data[3]

                map_struct["distmu"] = distmu_data / params["DScale"]
                map_struct["distsigma"] = distsigma_data / params["DScale"]
                map_struct["prob"] = prob_data
                map_struct["distnorm"] = norm_data

            else:
                prob_data = hp.read_map(filename, field=0, verbose=False)
                prob_data = prob_data / np.sum(prob_data)

                map_struct["prob"] = prob_data

    natural_nside = hp.pixelfunc.get_nside(map_struct["prob"])
    nside = params["nside"]

    print("natural_nside =", natural_nside)
    print("nside =", nside)

    if not is3D:
        map_struct["prob"] = hp.ud_grade(map_struct["prob"], nside, power=-2)

    if is3D:
        if natural_nside != nside:
            map_struct["prob"], map_struct["distmu"],\
            map_struct["distsigma"], map_struct["distnorm"] = ligodist.ud_grade(map_struct["prob"],\
                                                                                map_struct["distmu"],\
                                                                                map_struct["distsigma"],\
                                                                                nside)

        nside_down = 32
        _, distmu_down,\
        distsigma_down, distnorm_down = ligodist.ud_grade(map_struct["prob"],\
                                                          map_struct["distmu"],\
                                                          map_struct["distsigma"],\
                                                          nside_down)

        r = np.linspace(0, 2000)
        map_struct["distmed"] = np.zeros(distmu_down.shape)
        for ipix in range(len(map_struct["distmed"])):
            dp_dr = r**2 * distnorm_down[ipix] * norm(
                distmu_down[ipix], distsigma_down[ipix]).pdf(r)
            dp_dr_norm = np.cumsum(dp_dr / np.sum(dp_dr))
            idx = np.argmin(np.abs(dp_dr_norm - 0.5))
            map_struct["distmed"][ipix] = r[idx]
        map_struct["distmed"] = hp.ud_grade(map_struct["distmu"],
                                            nside,
                                            power=-2)

    npix = hp.nside2npix(nside)
    theta, phi = hp.pix2ang(nside, np.arange(npix))
    ra = np.rad2deg(phi)
    dec = np.rad2deg(0.5 * np.pi - theta)

    map_struct["ra"] = ra
    map_struct["dec"] = dec

    sort_idx = np.argsort(map_struct["prob"])[::-1]
    csm = np.empty(len(map_struct["prob"]))
    csm[sort_idx] = np.cumsum(map_struct["prob"][sort_idx])

    map_struct["cumprob"] = csm

    pixarea = hp.nside2pixarea(nside)
    pixarea_deg2 = hp.nside2pixarea(nside, degrees=True)

    map_struct["nside"] = nside
    map_struct["npix"] = npix
    map_struct["pixarea"] = pixarea
    map_struct["pixarea_deg2"] = pixarea_deg2

    return map_struct