Пример #1
0
def get_hpmask_subpix_indices(submask_nside, submask_hpix, submask_border, nside_mask, hpix):
    """
    """

    nside_cutref = np.clip(submask_nside * 4, 256, nside_mask)

    # Find out which cutref pixels are inside the main pixel
    theta, phi = hp.pix2ang(nside_cutref, np.arange(hp.nside2npix(nside_cutref)))
    ipring_coarse = hp.ang2pix(submask_nside, theta, phi)
    inhpix, = np.where(ipring_coarse == submask_hpix)

    # If there is a border, we need to find the boundary pixels
    if submask_border > 0.0:
        boundaries = hp.boundaries(submask_nside, submask_hpix, step=nside_cutref/submask_nside)
        # These are all the pixels that touch the boundary
        for i in xrange(boundaries.shape[1]):
            pixint = hp.query_disc(nside_cutref, boundaries[:, i],
                                   np.radians(submask_border), inclusive=True, fact=8)
            inhpix = np.append(inhpix, pixint)
            # Need to uniqify here because of overlapping pixels
            inhpix = np.unique(inhpix)

    # And now choose just those depthmap pixels that are in the inhpix region
    theta, phi = hp.pix2ang(nside_mask, hpix)
    ipring = hp.ang2pix(nside_cutref, theta, phi)

    _, use = esutil.numpy_util.match(inhpix, ipring)

    return use
Пример #2
0
def scrambler(data, nSide, nBGResample, method):

    # Setup maps
    temp_maps = {}
    nPixels = hp.nside2npix(nSide)
    for key in ['Local', 'Data', 'BG']:
        temp_maps[key] = zeros(nPixels)

    # Begin looping through data
    mjd = data['mjd']
    theta = data['ShowerPlane_zenith']
    phi = data['ShowerPlane_azimuth']

    # Fill local and data maps
    local_pix = hp.ang2pix(nSide, theta, phi)
    dec, ra = array(map(l2e, mjd, theta, phi)).T
    data_pix = hp.ang2pix(nSide, dec, ra)
    for i in range(len(local_pix)):
        temp_maps['Local'][local_pix[i]] += 1.0
        temp_maps['Data'][data_pix[i]] += 1.0

    # Function for getting nBGResample background counts with random times
    def getBG(th, ph):
        rndMJD = random.choice(mjd, nBGResample)
        dec, ra = array([l2e(mjd_one, th, ph) for mjd_one in rndMJD]).T
        pixelID = hp.ang2pix(nSide, dec, ra)
        return pixelID

    # Fill background map
    bg_pix = array(map(getBG, theta, phi)).flatten()
    for pix in bg_pix:
        temp_maps['BG'][pix] += 1.0

    return temp_maps
Пример #3
0
    def __init__(self, pts, ipix=None, nside=None):
        self._ipix = ipix
        self._nside = nside
        self._pts = pts
        
        if len(pts) <= 1 or nside >= 1<<29:
            # Stop here.  Either there is only one point left, or we
            # are in danger of exceeding the healpy limit on nside
            self._sub_grids = None
        elif ipix is None or nside is None:
            nside = 1
            sub_ipts = [hp.ang2pix(1, np.pi/2.0-pt[1], pt[0], nest=True) for pt in pts]
            sub_grids = []
            for i in range(12):
                subp = [pt for pt, ipt in zip(pts, sub_ipts) if ipt == i]
                sub_grids.append(_Hp_adaptive_grid_pixel(subp, i, 1))
                
            self._sub_grids = sub_grids
        else:
            sub_ipix = [4*ipix + i for i in range(4)]
            sub_nside = 2*nside
            sub_ipts = [hp.ang2pix(sub_nside, np.pi/2.0 - pt[1], pt[0], nest=True) for pt in pts]

            sub_grids = []
            for sip in sub_ipix:
                subp = [pt for pt, ipt in zip(pts, sub_ipts) if ipt == sip]
                sub_grids.append(_Hp_adaptive_grid_pixel(subp, sip, sub_nside))

            self._sub_grids = sub_grids
Пример #4
0
def Pixel_separator(Halo_data,Input_Para,General_Prop):
    import healpy as hp
    from XCat_Objects import DtoR, Halo_Object
    from XCat_Utilities import Read_Integer_Input

    nside = 1
    while ( Input_Para.SkySqrDeg < 64800.0/float(hp.nside2npix(2*nside)) ):
         nside = 2*nside

    if (General_Prop.Hala_data_existence):
       n = Halo_data.number_of_halos

       kp= Read_Integer_Input("We have %i halos in this slice which one you want to exctract ? "%n)
       kp= kp - 1
       while(kp > (n-1) or kp < 0 ):
          print "Invalid Number."
          kp= Read_Integer_Input("Please choose an integer between 1 and %i : "%n)
          kp= kp - 1
       k = hp.ang2pix(nside,DtoR*(90.0-Halo_data.DEC[kp]),DtoR*(Halo_data.RA[kp]))
       Pix_Halo_data = Halo_Object()
       for i in range(n):
           if ( k == hp.ang2pix(nside,DtoR*(90.0-Halo_data.DEC[i]),DtoR*(Halo_data.RA[i])) ):
              Pix_Halo_data.add_single_data(Halo_data,i)
       Pix_Halo_data.update_halo_data   
       General_Prop.update
       General_Prop.Pix_separator = True
       General_Prop.Pix_separator_update(nside,k)
    else:
       "First you need to add a halo catalog ..."
       Pix_Halo_data = None
    raw_input("Press enter to continue ... ")
    return Pix_Halo_data
Пример #5
0
def make_random(nside,data,Nf,outfile=None,viewmap=True,fmt='ascii',rsd=True):
    import healpy as hp
    import numpy as np
    import astropy.table as tab
    import matplotlib.pyplot as plt
    data_tab = tab.Table.read(data)
    ran=np.hstack((data_tab,)*Nf)
    Nr=len(ran)
    ran['ra']=np.random.uniform(0,360,Nr)
    ran['dec']=np.degrees(np.arcsin(np.random.uniform(-1,1,Nr)))
    map_gal = make_hp_map(nside,data)
    pix_nums = hp.ang2pix(nside,np.pi/2-ran['dec']*np.pi/180,ran['ra']*np.pi/180)
    mask = map_gal[pix_nums]>0
    if(rsd):
        new_random = tab.Table([ran['ra'][mask],ran['dec'][mask],ran['z'][mask]+ran['dz'][mask],np.ones(len(ran['ra'][mask]))],names=('ra','dec','z','w'))
    else:
        new_random = tab.Table([ran['ra'][mask],ran['dec'][mask],ran['z'][mask],np.ones(len(ran['ra'][mask]))],names=('ra','dec','z','w'))
    if(outfile!=None):
        new_random.write(outfile,format=fmt)
    if(viewmap):
        pix_nums = hp.ang2pix(nside,np.pi/2-new_random['dec']*np.pi/180,new_random['ra']*np.pi/180)
        bin_count = np.bincount(pix_nums)
        map_ran = np.append(bin_count,np.zeros(12*nside**2-len(bin_count)))
        plt.figure()
        hp.mollview(map_ran)
    return ran
Пример #6
0
    def fillLivetime(self):
        
        for i, cth in enumerate(self._ctheta_axis.center):

            dcostheta = self._ctheta_axis.width[i]
            
            if self._src_type == 'iso':
                self._tau[i] = dcostheta
            elif self._src_type == 'isodec':
                sinlat = np.linspace(-1,1,48)

                m = self._ltmap[:,i]

                self._tau[i] = 0
                for s in sinlat:                    
                    lat = np.arcsin(s)
                    th = np.pi/2. - lat                    
                    ipix = healpy.ang2pix(64,th,0,nest=True)
                    self._tau[i] += m[ipix]
                                       
            else:
                th = np.pi/2. - self._lonlat[1]*np.pi/180.
                phi = self._lonlat[0]*np.pi/180.
                m = self._ltmap[:,i]
                ipix = healpy.ang2pix(64,th,phi,nest=True)
#            tau = healpy.get_interp_val(m,th,phi,nest=True)
                self._tau[i] = m[ipix]
Пример #7
0
 def is_in(self, ra, decl):
     """
     Check if position is in the MOC map.
     ra, decl - sky coordinates (or lists of coordinates).
     """
     theta = 0.5*np.pi - np.radians(decl)
     phi = np.radians(ra)
     keys = self.healpix.keys()
     keys.sort()
     if isinstance(ra, float):
         for level in keys:
             healpix_cell = healpy.ang2pix(2**level, theta, phi, nest=True)
             if healpix_cell in self.healpix[level]:
                 return True
         return False
     else:
         result = np.zeros(len(ra), dtype=bool)
         for level in keys:
             healpix_cell = healpy.ang2pix(2**level, theta, phi, nest=True)
             cellcheck = [cell in self.healpix[level] for cell in healpix_cell]
             if cellcheck.any():
                 result = result + cellcheck
             if result.all():
                 return result
         return result
Пример #8
0
def cscrambler(data, nSide, nBGResample, method):

    # Setup maps
    test = ShowerLLH.TimeScramble()
    temp_maps = {}
    nPixels = hp.nside2npix(nSide)
    #for key in ['Local', 'Data', 'BG']:
    for key in ['Data', 'BG']:
        temp_maps[key] = zeros(nPixels)

    # Begin looping through data
    mjd = data['mjd']
    theta = data['ShowerPlane_zenith']
    phi = data['ShowerPlane_azimuth']

    # Fill local and data maps
    local_pix = hp.ang2pix(nSide, theta, phi)
    dec, ra = array(map(l2e, mjd, theta, phi)).T
    data_pix = hp.ang2pix(nSide, dec, ra)
    for i in range(len(local_pix)):
        #temp_maps['Local'][local_pix[i]] += 1.0
        temp_maps['Data'][data_pix[i]] += 1.0

    # Fill background map
    bg = test.cscramble(mjd, theta, phi, nSide, nBGResample, method)
    temp_maps['BG'] = asarray(bg)

    return temp_maps
Пример #9
0
    def view_observed_gsm(self, logged=False, show=False, **kwargs):
        """ View the GSM (Mollweide), with below-horizon area masked. """
        sky = self.observed_sky
        if logged:
            sky = np.log2(sky)

        # Get RA and DEC of zenith
        ra_rad, dec_rad = self.radec_of(0, np.pi / 2)
        ra_deg  = ra_rad / np.pi * 180
        dec_deg = dec_rad / np.pi * 180

        # Apply rotation
        derotate = hp.Rotator(rot=[ra_deg, dec_deg])
        g0, g1 = derotate(self._theta, self._phi)
        pix0 = hp.ang2pix(self._n_side, g0, g1)
        sky = sky[pix0]

        coordrotate = hp.Rotator(coord=['C', 'G'], inv=True)
        g0, g1 = coordrotate(self._theta, self._phi)
        pix0 = hp.ang2pix(self._n_side, g0, g1)
        sky = sky[pix0]

        hp.mollview(sky, coord='G', **kwargs)

        if show:
            plt.show()

        return sky
Пример #10
0
    def setUp(self):
        fixture_name = os.path.splitext(os.path.basename(__file__))[0]
        self.outdir = create_outdir(self.comm, fixture_name)

        # Create one observation per group, and each observation will have
        # one detector per process and a single chunk.
        self.data = create_distdata(self.comm, obs_per_group=1)

        self.ndet = self.data.comm.group_size
        self.rate = 20.0

        # Create detectors with default properties
        dnames, dquat, depsilon, drate, dnet, dfmin, dfknee, dalpha = \
            boresight_focalplane(self.ndet, samplerate=self.rate)

        # Pixelization
        self.nside = 64
        self.npix = 12 * self.nside**2
        self.subnside = 16
        if self.subnside > self.nside:
            self.subnside = self.nside
        self.subnpix = 12 * self.subnside * self.subnside

        # Samples per observation
        self.totsamp = self.npix

        # Dipole parameters
        self.solar_speed = 369.0
        gal_theta = np.deg2rad(90.0 - 48.05)
        gal_phi = np.deg2rad(264.31)
        z = self.solar_speed * np.cos(gal_theta)
        x = self.solar_speed * np.sin(gal_theta) * np.cos(gal_phi)
        y = self.solar_speed * np.sin(gal_theta) * np.sin(gal_phi)
        self.solar_vel = np.array([x, y, z])
        self.solar_quat = qa.from_vectors(np.array([0.0, 0.0, 1.0]), self.solar_vel)

        self.dip_check = 0.00335673

        self.dip_max_pix = hp.ang2pix(self.nside, gal_theta, gal_phi, nest=False)
        self.dip_min_pix = hp.ang2pix(self.nside, (np.pi - gal_theta), (np.pi + gal_phi), nest=False)

        # Populate the observations

        tod = TODHpixSpiral(
            self.data.comm.comm_group,
            dquat,
            self.totsamp,
            detranks=self.data.comm.comm_group.size,
            firsttime=0.0,
            rate=self.rate,
            nside=self.nside)

        self.data.obs[0]["tod"] = tod
Пример #11
0
def get_healsparse_subpix_indices(subpix_nside, subpix_hpix, subpix_border, coverage_nside):
    """
    Retrieve the coverage pixels that intersect the region, with a border.

    Parameters
    ----------
    subpix_nside: `int`
       Nside for the subregion
    subpix_hpix: `int`
       Pixel number for the subregion (ring format)
    subpix_border: `float`
       Border radius to cover outside subpix_hpix
    coverage_nside: `int`
       Nside of the healsparse coverage map
    """

    # First, we need to know which pixel(s) from nside_coverage are covered by
    # subpix_hpix

    if subpix_nside == coverage_nside:
        # simply convert to nest
        covpix = hp.ring2nest(subpix_nside, subpix_hpix)
    elif subpix_nside > coverage_nside:
        # what pixel is this contained in?
        theta, phi = hp.pix2ang(subpix_nside, subpix_hpix, nest=False)
        covpix = hp.ang2pix(coverage_nside, theta, phi, nest=True)
    else:
        # This is subpix_nside < coverage_nside
        # what coverage pixels are contained in subpix_hpix?
        subpix_hpix_nest = hp.ring2nest(subpix_nside, subpix_hpix)
        bit_shift = 2 * int(np.round(np.log(coverage_nside / subpix_nside) / np.log(2)))
        n_pix = 2**bit_shift
        covpix = np.left_shift(subpix_hpix_nest, bit_shift) + np.arange(n_pix)

    # And now if we have a border...
    if subpix_border > 0.0:
        nside_testing = max([coverage_nside * 4, subpix_nside * 4])
        boundaries = hp.boundaries(subpix_nside, subpix_hpix, step=nside_testing/subpix_nside)

        extrapix = np.zeros(0, dtype=np.int64)

        # These are pixels that touch the boundary
        for i in xrange(boundaries.shape[1]):
            pixint = hp.query_disc(nside_testing, boundaries[:, i],
                                   np.radians(subpix_border), inclusive=True, fact=8)
            extrapix = np.append(extrapix, pixint)

        extrapix = np.unique(extrapix)
        theta, phi = hp.pix2ang(nside_testing, extrapix)
        covpix = np.unique(np.append(covpix, hp.ang2pix(coverage_nside, theta, phi, nest=True)))

    return covpix
Пример #12
0
 def is_in(self, ra, decl):
     """
     Check if position is in the MOC map.
     ra, decl - sky coordinates.
     TODO: implement vectorized ra/decl support.
     """
     ra = np.atleast_1d(ra)
     decl = np.atleast_1d(decl)
     theta = 0.5*np.pi - np.radians(decl)
     phi = np.radians(ra)
     keys = list(self.healpix.keys())
     keys.sort()
     todo = np.ones(len(ra), dtype=bool)
     fullmask = np.arange(len(ra), dtype=int)
     for level in keys:
         healpix_cell = healpy.ang2pix(2**level, theta[todo], phi[todo],
                                       nest=True)
         mask = np.copy(fullmask[todo])
         for icell, cell in enumerate(healpix_cell):
             if cell in self.healpix[level]:
                 try:
                     todo[mask[icell]] = False
                 except IndexError:
                     import ipdb; ipdb.set_trace()
     return ~todo
Пример #13
0
    def get_src_lthist(self,ra,dec,cth_axis=None):

        if cth_axis is None:
            cth_axis = copy.deepcopy(self._cth_axis)
            new_axis = False
        else:
            tmp_cth_axis = Axis.create(cth_axis.edges[0],
                                       cth_axis.edges[-1],
                                       cth_axis.nbins*4)
            new_axis = True

        

        ipix = healpy.ang2pix(64,np.pi/2. - np.radians(dec),
                              np.radians(ra),nest=True)


        if new_axis:
            lt = interpolate(self._cth_axis.center,
                             self._ltmap[ipix,::-1]/self._cth_axis.width,
                             tmp_cth_axis.center)*tmp_cth_axis.width
            lt = np.sum(lt.reshape(-1,4),axis=1)  
        else:
            lt = self._ltmap[ipix,::-1]

        return Histogram(cth_axis,counts=lt,var=0)
Пример #14
0
def taylor_interpol_iter(m, pos, order=3, verbose=False, lmax=None):
        """Given a healpix map m[npix], and a set of positions                                       
        pos[{theta,phi},...], evaluate the values at those positions                                 
        using harmonic Taylor interpolation to the given order (3 by                                 
        default). Successively yields values for each cumulative order                               
        up to the specified one. If verbose is specified, it will print                              
        progress information to stderr."""
        nside = hp.npix2nside(m.size)
        if lmax is None: lmax = 3*nside
        # Find the healpix pixel centers closest to pos,                                             
        # and our deviation from these pixel centers.                                                
        ipos = hp.ang2pix(nside, pos[0], pos[1])
        pos0 = np.array(hp.pix2ang(nside, ipos))
        dpos = pos[:2]-pos0
        # Take wrapping into account                                                                 
        bad = dpos[1]>np.pi
        dpos[1,bad] = dpos[1,bad]-2*np.pi
        bad = dpos[1]<-np.pi
        dpos[1,bad] = dpos[1,bad]+2*np.pi

        # Since healpix' dphi actually returns dphi/sintheta, we choose                              
        # to expand in terms of dphi*sintheta instead.                                               
        dpos[1] *= np.sin(pos0[0])
        del pos0

        # We will now Taylor expand our healpix field to                                             
        # get approximations for the values at our chosen                                            
        # locations. The structure of this section is                                                
        # somewhat complicated by the fact that alm2map_der1 returns                                 
        # two different derivatives at the same time.                                                
        derivs = [[m]]
        res = m[ipos]
        yield res
        for o in range(1,order+1):
                # Compute our derivatives                                                            
                derivs2 = [None for i in range(o+1)]
                used    = [False for i in range(o+1)]
                # Loop through previous level in steps of two (except last)                          
                if verbose: tprint("order %d" % o)
                for i in range(o):
                        # Each alm2map_der1 provides two derivatives, so avoid                       
                        # doing double work.                                                         
                        if i < o-1 and i % 2 == 1:
                                continue
                        a = hp.map2alm(derivs[i], use_weights=True, lmax=lmax, iter=0)
                        derivs[i] = None
                        dtheta, dphi = hp.alm2map_der1(a, nside, lmax=lmax)[-2:]
                        derivs2[i:i+2] = [dtheta,dphi]
                        del a, dtheta, dphi
                        # Use these to compute the next level                                        
                        for j in range(i,min(i+2,o+1)):
                                if used[j]: continue
                                N = comb(o,j)/factorial(o)
                                res += N * derivs2[j][ipos] * dpos[0]**(o-j) * dpos[1]**j
                                used[j] = True
                                # If we are at the last order, we don't need to waste memory         
                                # storing the derivatives any more                                   
                                if o == order: derivs2[j] = None
                derivs = derivs2
                yield res
Пример #15
0
def cat2hpx_cg(ra, dec, nside):
    """
    def cat2hpx_cg(ra, dec, nside):
        R=hp.Rotator(coord='cg')
        phi, theta=R(ra, dec, lonlat=True)
        theta=90-theta
        pixs=hp.ang2pix(nside, theta*nm.pi/180, phi*nm.pi/180, nest=True)
        countmap1=nm.zeros((12*nside**2))
        for idx in pixs:
            countmap1[idx]+=1
        
        return countmap1
    """
    
    R=hp.Rotator(coord='cg')
    phi, theta=R(ra, dec, lonlat=True)
    theta=90-theta

    pixs=hp.ang2pix(nside, theta*nm.pi/180, phi*nm.pi/180, nest=True)

    countmap1=nm.zeros((12*nside**2))
    for idx in pixs:
        countmap1[idx]+=1


    return countmap1
Пример #16
0
def histSpectrum(config):

    files = glob.glob('%s/%s_data/files/*.hdf5' % (my.llh_data, config))
    files.sort()

    nside = 64
    npix = hp.nside2npix(nside)
    sbins = np.arange(npix, dtype=int)
    ebins = np.arange(5, 9.501, 0.05)

    for file in files[:10]:

        print 'Working on %s...' % file

        d = hdf5extractor(config, file)
        c0 = d['cuts']['llh']
        r = np.log10(d['ML_energy'])[c0]
        fit = zfix(d['zenith'], bintype='logdist')[c0]
        w = d['weights'][c0]

        zen = d['zenith'][c0]
        azi = d['azimuth'][c0]
        pix = hp.ang2pix(nside, zen, azi)

        x = r - fit
        y = pix
        h0 = np.histogram2d(x, y, bins=(ebins,sbins), weights=w)[0]
def angToPix(nside, lon, lat, nest=False):
    """
    Input (lon, lat) in degrees instead of (theta, phi) in radians
    """
    theta = np.radians(90. - lat)
    phi = np.radians(lon)
    return hp.ang2pix(nside, theta, phi, nest=nest)
Пример #18
0
def zoncaview(m):
    """
    m is a healpix sky map, such as provided by WMAP or Planck.
    """

    nside = hp.npix2nside(len(m))
    vmin = -1e3; vmax = 1e3

    # Set up some grids:
    xsize = ysize = 1000
    theta = np.linspace(np.pi, 0, ysize)
    phi   = np.linspace(-np.pi, np.pi, xsize)
    longitude = np.radians(np.linspace(-180, 180, xsize))
    latitude = np.radians(np.linspace(-90, 90, ysize))

    # Project the map to a rectangular matrix xsize x ysize:
    PHI, THETA = np.meshgrid(phi, theta)
    grid_pix = hp.ang2pix(nside, THETA, PHI)
    grid_map = m[grid_pix]

    # Create a sphere:
    r = 0.3
    x = r*np.sin(THETA)*np.cos(PHI)
    y = r*np.sin(THETA)*np.sin(PHI)
    z = r*np.cos(THETA)

    # The figure:
    mlab.figure(1, bgcolor=(1, 1, 1), fgcolor=(0, 0, 0), size=(400, 300))
    mlab.clf()

    mlab.mesh(x, y, z, scalars=grid_map, colormap="jet", vmin=vmin, vmax=vmax)

    mlab.draw()

    return
Пример #19
0
def computeHPXpix_sequ_new_simp(nside, propertyArray): 
    #return 'ERROR'
    #Hack by AJR, just return all of the pixel centers within the ra,dec range
    img_ras, img_decs = [propertyArray[v] for v in ['ra0', 'ra1', 'ra2','ra3']],[propertyArray[v] for v in ['dec0', 'dec1', 'dec2','dec3']]
    #print min(img_ras),max(img_ras)
    #more efficient version below failed for some reason
    for i in range(0,len(img_ras)):
    	if img_ras[i] > 360.:
    		img_ras[i] -= 360.
    	if img_ras[i] < 0.:
    		img_ras[i] += 360.
    #if max(img_ras) - min(img_ras) > 1.:
    #	print img_ras,img_decs	
    #if np.any(img_ras > 360.0):
    #    img_ras[img_ras > 360.0] -= 360.0
    #if np.any(img_ras < 0.0):
    #    img_ras[img_ras < 0.0] += 360.0
    # Coordinates of image corners
    #print img_ras
    img_phis = np.multiply(img_ras , np.pi/180.)
    img_thetas =  np.pi/2.  - np.multiply(img_decs , np.pi/180.)
    img_pix = hp.ang2pix(nside, img_thetas, img_phis, nest=False)
    pix_thetas, pix_phis = hp.pix2ang(nside, img_pix, nest=False)
    ipix_list = np.zeros(0, dtype=long)
    # loop over rings until reached bottom
    iring_U = ring_num(nside, np.cos(img_thetas.min()), shift=0)
    iring_B = ring_num(nside, np.cos(img_thetas.max()), shift=0)
    ipixs_ring = []
    pmax = np.max(img_phis)
    pmin = np.min(img_phis)
    if pmax-pmin == 0:
    	return []
    p1 = pmin
    p2 = pmax

    if pmin < .1 and pmax > 1.9*np.pi:
		#straddling line
		#img_phis.sort()
		for i in range(0,len(img_phis)):
			if img_phis[i] > p1 and img_phis[i] < np.pi:
				p1 = img_phis[i]
			if img_phis[i] < p2 and img_phis[i] > np.pi:
				p2 = img_phis[i]
		
		#ipixs_ring1 = np.int64(np.concatenate([in_ring(nside, iring, 0, p1, conservative=False) for iring in range(iring_U, iring_B+1)]))
		#ipixs_ring2 = np.int64(np.concatenate([in_ring(nside, iring, p2, 2.*np.pi, conservative=False) for iring in range(iring_U, iring_B+1)]))
# 		ipixs_ring1 = np.int64(np.concatenate([in_ring_simp(nside, iring, 0, p1, conservative=False) for iring in range(iring_U, iring_B+1)]))
# 		ipixs_ring2 = np.int64(np.concatenate([in_ring_simp(nside, iring, p2, 2.*np.pi, conservative=False) for iring in range(iring_U, iring_B+1)]))
# 		ipixs_ring = np.concatenate((ipixs_ring1,ipixs_ring2))
# 		print len(ipixs_ring),len(ipixs_ring1),len(ipixs_ring2),iring_B-iring_U,pmin,pmax,p1,p2
#     	
#     else:		
    ipixs_ring = np.int64(np.concatenate([in_ring(nside, iring, p1, p2, conservative=False) for iring in range(iring_U, iring_B+1)]))
	#ipixs_ring = np.int64(np.concatenate([in_ring_simp(nside, iring, p1, p2, conservative=False) for iring in range(iring_U, iring_B+1)]))
    if len(ipixs_ring) > 1000:
    	print len(ipixs_ring),iring_B-iring_U,pmin,pmax,p1,p2
    	return [] #temporary fix
    #	print len(ipixs_ring),iring_B-iring_U,pmin,pmax,min(img_ras),max(img_ras)  
    #print len(ipixs_ring),iring_B-iring_U,pmin,pmax,min(img_ras),max(img_ras)
    return ipixs_ring
Пример #20
0
def eqmap2azelmap(map,longi=-104.245,lat=34.4717,ut=12.0,year=2014,month=5,day=29):
    """
    function to rotate celestial coord map to az el with brute force using tools below from Victor Roytman
    """
    julian_date=jdcnv(year,month,day,ut)
    obs=ephem.Observer()
    obs.lon=str(longi)
    obs.lat=str(lat)
    obs.date=ephem.date((year,month,day,ut))
    nside=np.int(np.sqrt(len(map)/12))
    outmap=np.zeros(len(map))
    pixlist=range(len(map))
    htheta,hphi=hp.pix2ang(nside,pixlist)
    elevation=np.pi/2. -htheta
    azimuth=hphi
    ctheta=[]
    cphi=[]
    for az,el in zip(azimuth,elevation):
        ra,dec=obs.radec_of(az,el)
        ctheta.append(np.pi/2. -dec)
        cphi.append(ra)
    #ra,dec=azel2radec(julian_date, azimuth, elevation, lat*dtr, longi)
    #ctheta=np.pi/2.-dec
    #cphi=ra
    ctheta=np.array(ctheta)
    cphi=np.array(cphi)
    rpixlist=hp.ang2pix(nside,ctheta,cphi)
    outmap[pixlist]=map[rpixlist]
    return outmap
Пример #21
0
	def smooth_the_pixel_by_angles(self,angle_vec,counts_vec): #angle vec of the form [ [theta,phi], [theta,phi], ... ]
		smoothed_pixel_map = np.zeros(self.npix)


		#theta_center,phi_center = hp.pix2ang(self.nside,pix_num_center)
		# theta_center=angle_vec[::,0]
		# phi_center=angle_vec[::,1]
		for theta_center,phi_center, count in map(None,angle_vec[0],angle_vec[1],counts_vec):
			#ta = time.time()
			#theta_center,phi_center = angle
			lat = theta_center*360/(2*np.pi)
			lng = phi_center*360/(2*np.pi)

			mask_where = masks.mask_ring(0, self.mult_sigma_for_smooth*self.sigma*360/(2*np.pi),lat, lng, self.nside)
			#mask_where=np.ones(self.npix)
			#print 'start: ', time.time() - ta
			mask_pixel_vals = np.where(mask_where == 1)[0]
			pix_num_center=hp.ang2pix(self.nside,theta_center,phi_center)
			mask_pixel_vals = np.array(list(set(list(mask_pixel_vals) + [pix_num_center])))
			#print 'The mask_pixel_vals are ',mask_pixel_vals

			#t0 = time.time()
			vals = np.vectorize(self.gaussian_func_ang)(theta_center,phi_center,mask_pixel_vals)
			#print 'step vals: ', time.time() - t0

			#print 'The vals are ', vals
			#print 'Sum of vals: ', np.sum(vals)
			if np.sum(vals)==0:
				smoothed_pixel_map[pix_num_center] += count
			else:
				smoothed_pixel_map[mask_pixel_vals] += count*vals/np.sum(vals)

			#print 'total: ', time.time() - ta

		return smoothed_pixel_map #normalize to one
Пример #22
0
def RaDec2Healpix(ra=None, dec=None, nside=None, nest=False, cat=None):
    """
    Compute the HEALPix index for each RA/DEC pair. Numpy array is returned. 
  
    Parameters
    ----------
    cat (None/structured array)
        If not None, the structured data array (e.g. numpy recarray)
    ra (float array/str) 
        if `cat` is None, an array of the RA values. Otherwise, the column name for the RA column in `cat`.
    dec (float array/str)
        if `cat` is None, an array of the DEC values. Otherwise, the column name for the DEC column in `cat`.
    nside (int)
        HEALPix nside
    nest (bool)
        Whether or not to use nested format

    Returns
    -------
    index (int)
        Array of the HEALPix indexes for each RA/DEC pair.

    """
    _nsideExcept(nside)
    r, d = _CatOrArrays(cat, ra, dec)

    phi = _np.radians(r)
    theta = _np.radians(90.0 - d)
    hpInd = _hp.ang2pix(nside, theta, phi, nest=nest)
    return hpInd
Пример #23
0
def mkgalmapY3ac(res,zr,gz='.gz',md='',fore='',wm='',syscut=''):
	gl = []
	for i in range(0,12*res*res):
		gl.append(0)
	#f = fitsio.read(dir+'dr1_lss_red_'+zr+'_v0_redux.fits.gz',ext=1)
	f = fitsio.read(dir+'test'+zr+mask+'.fits'+gz,ext=1)
	ngt = 0
	w = 1.
	zem = 0
	fw = ''
	if fore == 'fore':
		fw = '_fore'
	#if fore == 'auto':
	#	fw = '_auto'
	if md == 'nodepth':
		md = '_none'
	#else:
	#	md = '_'+md	
	for i in range(0,len(f)):
		ra,dec = f[i]['RA'],f[i]['DEC']
		
		#if f[i]['v0'+md+fw] == 1.:
		
		#if wm != '':
		#	w = float(ln[4])
		#if z > zmin and z < zmax:
		th,phi = radec2thphi(ra,dec)
		p = hp.ang2pix(res,th,phi,nest=True)
		gl[p] += w
		ngt += w
	print len(gl),ngt
	return gl
Пример #24
0
    def __expand_valid(self, min_p=1e-7):
        #
        # Determine what the 'quanta' of probabilty is
        #
        if self._massp == 1.0:
            # This is to ensure we don't blow away everything because the map
            # is very spread out
            min_p = min(min_p, max(self.skymap))
        else:
            # NOTE: Only valid if CDF descending order is kept
            min_p = self.pseudo_pdf(*self.valid_points_decra[-1])

        self.valid_points_hist = []
        ns = healpy.npix2nside(len(self.skymap))

        # Renormalize first so that the vector histogram is properly normalized
        self._renorm = 0
        # Account for probability lost due to cut off
        for i, v in enumerate(self.skymap >= min_p):
            self._renorm += self.skymap[i] if v else 0

        for pt in self.valid_points_decra:
            th, ph = HealPixSampler.decra2thph(pt[0], pt[1])
            pix = healpy.ang2pix(ns, th, ph)
            if self.skymap[pix] < min_p:
                continue
            self.valid_points_hist.extend([pt]*int(round(self.pseudo_pdf(*pt)/min_p)))
        self.valid_points_hist = numpy.array(self.valid_points_hist).T
Пример #25
0
def make_counts_maps(ra,dec,z,z_tics,mask):
    """
    Make counts maps from ra,dec and z data.

    @param ra Right assention of the object in degrees
    @param dec Declination of the object in degrees
    @param z Redshift of the object
    @param z_tics an array of points that define the z-slizes
    @param mask The mask to be applied. This should be a vecor or zero or one
           entries and it should have the same number of pxiels as the output map.

    Returns counts map as a numpy array of dimensions [npix,len(z_tics)-1]
    """

    print "\ncreating the counts map"

    # sanity checks
    if len(ra) != len(dec) : raise RuntimeError("ra and dec have different sizes")
    if len(ra) != len(z) : raise RuntimeError("ra and z have different sizes")
    if len(z_tics) <2 : raise RuntimeError("at least 2 tics required to define a bin")

    nside=0
    try:
        nside = hp.npix2nside(np.shape(mask)[0])
    except:
        raise RuntimeError("length of mask not a healpix npix")

    # define the maps
    nmaps = len(z_tics)-1
    npix = np.shape(mask)[0]
    counts = np.zeros([npix,nmaps])

    # go through each object and add them into the correct bins

    for i in range(len(ra)):
        # convert (ra,dec) to (theta,phi)
        theta=-degr2rad*dec[i] + np.pi/2.
        phi=degr2rad*ra[i]

        # find the pixel id
        try:
            pixid=hp.ang2pix(nside,theta,phi)
        except:
            print "wrong theta and phi: theta= ",theta,"phi= ",phi,"ra= ",ra[i],"dec= ",dec[i]
            #raise RuntimeError("wrong theta and phi")

        # is the pixel in the mask?
        if mask[pixid]>0 :
            # find the bin to which the current object belong to
            binid=np.searchsorted(z_tics,z[i])-1

            # is the bind in the range?
            if binid >=0 and binid < len(z_tics)-1:
                try:
                    counts[pixid,binid] += 1
                except:
                    print "index error binid= ",binid,"len(z_tics)= ",len(z_tics),"z[i]= ",z[i]

    # return the maps
    return counts
Пример #26
0
def generate_map(t, p, nside, plot=False):
    """Generate map and bands for the given galaxy locations"""
    npix = 12*nside**2
    ix = hp.ang2pix(nside, t, p, nest=True)
    M = np.zeros(npix)
    for i in xrange(len(ix)):
        M[ix[i]] += 1.
    ave = 1.*len(t)/npix
    M = M/ave-1.0 #Calculate overdensity

    cl = hp.anafast(M)
    ell = np.arange(len(cl))+1
    ell2 = (ell[::3]+ell[1::3]+ell[2::3])/3.
    cl2 = (cl[::3]+cl[1::3]+cl[2::3])/3.

    if plot:
        plot_galaxies_and_density(t, p, M)
        plot_anafast_band_powers(ell2, cl2)
        plt.show()

    ell3 = ell2[1:]
    cl3 = cl2[1:] #Alex to Matias: Why are these removed?
    cl_err = np.ones(len(ell3))*0.00002
    index = np.arange(len(ell3))
    ell_min = ell3-1
    ell_max = ell3+1

    cl3 = cl3*1000 #Bigger c_l values for convergence

    bands = zip(index, ell3, ell_min, ell_max, cl3, cl_err, cl3)

    return M, bands
Пример #27
0
def get_mask_gal(percentage_keep=40, nside_out=512, coordinates='eq', quick=True):
    import pyfits
    from astropy.coordinates import FK5
    from astropy import units as u    
    savename = datadir+'mask_GAL0%i_%i_'%(percentage_keep,nside_out)
    savename += coordinates+'_.pkl'
    if quick: return pickle.load(open(savename,'r'))

    pp = pyfits.open(datadir+'HFI_Mask_GalPlane_2048_R1.10.fits')
    mask_gal = pp[1].data['GAL0%i'%percentage_keep]
    mask_gal = hp.reorder(mask_gal, out='RING', inp='NESTED')
    if coordinates=='gal':
        mask_out = hp.ud_grade(mask_gal, nside_out)
    if coordinates=='eq':
        nside_up = nside_out*2
        mask_gal = hp.ud_grade(mask_gal, nside_up)
        # Find the indices in an up-sampled *galactic* map that belong to these 
        # *equatorial* coordinates.
        theta, phi = hp.pix2ang(nside_up, np.arange(hp.nside2npix(nside_up)))
        ra = phi
        dec = np.pi/2.-theta
        coord = FK5(ra=ra, dec=dec, unit=(u.rad, u.rad))
        l_gal = coord.galactic.l.rad
        b_gal = coord.galactic.b.rad
        phi = l_gal
        theta = np.pi/2.-b_gal
        ind_up = hp.ang2pix(nside_up, theta, phi)
        mask_up_eq = mask_gal[ind_up]
        mask_out = hp.ud_grade(mask_up_eq, nside_out)

    pickle.dump(mask_out, open(savename,'w'))
    return mask_out
Пример #28
0
 def pseudo_pdf(self, dec_in, ra_in):
     """
     Return pixel probability for a given dec_in and ra_in. Note, uses healpy functions to identify correct pixel.
     """
     th, ph = HealPixSampler.decra2thph(dec_in, ra_in)
     res = healpy.npix2nside(len(self.skymap))
     return self.skymap[healpy.ang2pix(res, th, ph)]
Пример #29
0
def get_haslam(nside_out=512, coordinates='eq', quick=True):
    savename = datadir+'haslam_'+coordinates+'_%i.pkl'%nside_out
    if quick: return pickle.load(open(savename,'r'))
    radio = hp.read_map(datadir+'lambda_haslam408_dsds.fits')
    if coordinates=='gal':
        radio_out = hp.ud_grade(radio, nside_out)
    if coordinates=='eq':
        from astropy.coordinates import FK5
        from astropy import units as u
        # Up-sample and convert from galactic to equatorial.
        nside_up = nside_out*2
        radio_up_gal = hp.ud_grade(radio, nside_up)        
        # Find the indices in an up-sampled *galactic* map that belong to these 
        # *equatorial* coordinates.
        theta, phi = hp.pix2ang(nside_up, np.arange(hp.nside2npix(nside_up)))
        ra = phi
        dec = np.pi/2.-theta
        coord = FK5(ra=ra, dec=dec, unit=(u.rad, u.rad))
        l_gal = coord.galactic.l.rad
        b_gal = coord.galactic.b.rad
        phi = l_gal
        theta = np.pi/2.-b_gal
        ind_up = hp.ang2pix(nside_up, theta, phi)
        radio_up_eq = radio_up_gal[ind_up]
        radio_out = hp.ud_grade(radio_up_eq, nside_out)
    mask_out = np.ones_like(radio_out)
    pickle.dump((radio_out, mask_out), open(savename,'w'))
    return radio_out, mask_out
Пример #30
0
	def BeamMapComplex( self, RAnow, Bmap, freq ) : 
		''' return shape = (1, Nv, 12*nside**2) '''
		freq = jp.Num(freq, float)
		RAnow = jp.Num(RAnow, float)
		Bmap = jp.npfmt(Bmap)
		coordtrans = jp.CoordTrans()
		nside = hp.get_nside(Bmap)
		theta, phi = hp.pix2ang(nside, np.arange(12*nside**2))
		#--------------------------------------------------
		# For Bmap, rotate points in CeleXYZ from RA=0(+x) to RA=RAnow. Rotate points is negetive angle, so az=-RAnow
		az = -RAnow
		if (az != 0) : 
			pixnowB = hp.ang2pix(nside, theta, phi+az*np.pi/180)
			Bmap = Bmap[pixnowB]
			pixnowB = 0 #@
		# Now Bmapnow is still in CeleXYZ
		#--------------------------------------------------
		# For Tmap, don't need to rotate its points
		# Then for Tmap and Bmap(now), convert from CeleXYZ to AntXYZ. Note that here rotate XYZ, NOT points, so az=RAnow+90 from lon=0 to +AntX
		az = 90 + RAnow
		if (az != 0) : hpixXYZ = coordtrans.xyzRotation(self.hpixCeleXYZ.T, az=az).T
		else : hpixXYZ = self.hpixCeleXYZ.copy()
		#--------------------------------------------------
		# phase = 2pi/c * \vec(B) * \vec(s)
		phase = (2*np.pi/300*freq * self.blXYZ[:,None,:] * hpixXYZ[None,:,:]).sum(-1)  # (Nv, 12*nside**2) for 1 freq
		hpixXYZ = 0 #@
		#--------------------------------------------------
		Bmap = Bmap[None,:] * np.exp(1j*phase)
		return Bmap[None,:]  # None for Nfreq=1
Пример #31
0
    def fe_jump(self, x, iter, beta):

        q = x.copy()
        lqxy = 0
        
        fe_limit = np.max(self.fe)
        
        #draw skylocation and frequency from f-stat map
        accepted = False
        while accepted==False:
            log_f_new = self.params[self.pimap['log10_fgw']].sample()
            f_idx = (np.abs(np.log10(self.fe_freqs) - log_f_new)).argmin()

            gw_theta = np.arccos(self.params[self.pimap['cos_gwtheta']].sample())
            gw_phi = self.params[self.pimap['gwphi']].sample()
            hp_idx = hp.ang2pix(hp.get_nside(self.fe), gw_theta, gw_phi)

            fe_new_point = self.fe[f_idx, hp_idx]
            if np.random.uniform()<(fe_new_point/fe_limit):
                accepted = True

        #draw other parameters from prior
        cos_inc = self.params[self.pimap['cos_inc']].sample()
        psi = self.params[self.pimap['psi']].sample()
        phase0 = self.params[self.pimap['phase0']].sample()
        log10_h = self.params[self.pimap['log10_h']].sample()
        

        #put new parameters into q
        signal_name = 'cw'
        for param_name, new_param in zip(['log10_fgw','gwphi','cos_gwtheta','cos_inc','psi','phase0','log10_h'],
                                           [log_f_new, gw_phi, np.cos(gw_theta), cos_inc, psi, phase0, log10_h]):
            q[self.pimap[param_name]] = new_param
        
        #calculate Hastings ratio
        log_f_old = x[self.pimap['log10_fgw']]
        f_idx_old = (np.abs(np.log10(self.fe_freqs) - log_f_old)).argmin()
        
        gw_theta_old = np.arccos(x[self.pimap['cos_gwtheta']])
        gw_phi_old = x[self.pimap['gwphi']]
        hp_idx_old = hp.ang2pix(hp.get_nside(self.fe), gw_theta_old, gw_phi_old)
        
        fe_old_point = self.fe[f_idx_old, hp_idx_old]
        if fe_old_point>fe_limit:
            fe_old_point = fe_limit
            
        log10_h_old = x[self.pimap['log10_h']]
        phase0_old = x[self.pimap['phase0']]
        psi_old = x[self.pimap['psi']]
        cos_inc_old = x[self.pimap['cos_inc']]
        
        hastings_extra_factor = self.params[self.pimap['log10_h']].get_pdf(log10_h_old)
        hastings_extra_factor *= 1/self.params[self.pimap['log10_h']].get_pdf(log10_h)
        hastings_extra_factor = self.params[self.pimap['phase0']].get_pdf(phase0_old)
        hastings_extra_factor *= 1/self.params[self.pimap['phase0']].get_pdf(phase0)
        hastings_extra_factor = self.params[self.pimap['psi']].get_pdf(psi_old)
        hastings_extra_factor *= 1/self.params[self.pimap['psi']].get_pdf(psi)
        hastings_extra_factor = self.params[self.pimap['cos_inc']].get_pdf(cos_inc_old)
        hastings_extra_factor *= 1/self.params[self.pimap['cos_inc']].get_pdf(cos_inc)        
        
        lqxy = np.log(fe_old_point/fe_new_point * hastings_extra_factor)

        return q, float(lqxy)
Пример #32
0
    def __init__(self,
                 ras_deg=None,
                 decs_deg=None,
                 shape=None,
                 wcs=None,
                 nside=None,
                 verbose=True,
                 hp_coords="equatorial",
                 mask=None,
                 weights=None,
                 pixs=None):

        self.verbose = verbose
        if nside is not None:
            self.nside = nside
            self.shape = hp.nside2npix(nside)
            self.curved = True
        else:
            self.shape = shape
            self.wcs = wcs
            self.curved = False

        if pixs is None:
            if nside is not None:

                eq_coords = ['fk5', 'j2000', 'equatorial']
                gal_coords = ['galactic']

                if verbose: print("Calculating pixels...")
                if hp_coords in gal_coords:
                    if verbose: print("Transforming coords...")
                    from astropy.coordinates import SkyCoord
                    import astropy.units as u
                    gc = SkyCoord(ra=ras_deg * u.degree,
                                  dec=decs_deg * u.degree,
                                  frame='fk5')
                    gc = gc.transform_to('galactic')
                    phOut = gc.l.deg * np.pi / 180.
                    thOut = gc.b.deg * np.pi / 180.
                    thOut = np.pi / 2. - thOut  #polar angle is 0 at north pole

                    self.pixs = hp.ang2pix(nside, thOut, phOut)
                elif hp_coords in eq_coords:
                    ras_out = ras_deg
                    decs_out = decs_deg
                    self.pixs = hp.ang2pix(nside,
                                           ras_out,
                                           decs_out,
                                           lonlat=True)

                else:
                    raise ValueError

                if verbose: print("Done with pixels...")
            else:
                coords = np.vstack((decs_deg, ras_deg)) * np.pi / 180.
                if verbose: print("Calculating pixels...")
                self.pixs = enmap.sky2pix(shape, wcs, coords,
                                          corner=True)  # should corner=True?!
                if verbose: print("Done with pixels...")
        else:
            self.pixs = pixs

        self.counts = self.get_map(weights=weights)
        if weights is None:
            self.rcounts = self.get_map(weights=None)
        else:
            self.rcounts = self.counts
        if not self.curved:
            self.counts = enmap.enmap(self.counts, self.wcs)

        self.mask = np.ones(shape) if mask is None else mask
        self._counts()
def main(arguments=None):
    """
    *The main function used when ``find_atlas_exposure_containing_ssobject.py`` is run as a single script from the cl*
    """

    # SETUP VARIABLES
    # MAKE SURE HEALPIX SMALL ENOUGH TO MATCH FOOTPRINTS CORRECTLY
    nside = 1024
    pi = (4 * math.atan(1.0))
    DEG_TO_RAD_FACTOR = pi / 180.0
    RAD_TO_DEG_FACTOR = 180.0 / pi
    tileSide = 5.46

    i = 0
    outputList = []
    rsyncContent = []
    obscodes = {"02": "T05", "01": "T08"}

    # SETUP THE COMMAND-LINE UTIL SETTINGS
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName=False)
    arguments, settings, log, dbConn = su.setup()

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    dbSettings = {
        'host': '127.0.0.1',
        'user': '******',
        'tunnel': {
            'remote ip': 'starbase.mp.qub.ac.uk',
            'remote datbase host': 'dormammu',
            'remote user': '******',
            'port': 5003
        },
        'password': '******',
        'db': 'atlas_moving_objects'
    }

    # SETUP DATABASE CONNECTIONS
    dbConn = database(log=log, dbSettings=dbSettings).connect()

    # GRAB THE EXPOSURE LISTING
    for expPrefix, obscode in obscodes.iteritems():
        exposureList = []
        mjds = []
        sqlQuery = "select * from atlas_exposures where expname like '%(expPrefix)s%%'" % locals(
        )
        connected = 0
        while connected == 0:
            try:
                rows = readquery(log=log,
                                 sqlQuery=sqlQuery,
                                 dbConn=dbConn,
                                 quiet=False)
                connected = 1
            except:
                # SETUP DATABASE CONNECTIONS
                dbConn = database(log=log, dbSettings=dbSettings).connect()
                print "Can't connect to DB - try again"
                time.sleep(2)

        t = len(rows)

        print "There are %(t)s '%(expPrefix)s' exposures to check - hang tight" % locals(
        )

        for row in rows:
            row["mjd"] = row["mjd"] + row["exp_time"] / (2. * 60 * 60 * 24)
            exposureList.append(row)
            mjds.append(row["mjd"])

        results = []

        batchSize = 500
        total = len(mjds[1:])
        batches = int(total / batchSize)

        start = 0
        end = 0
        theseBatches = []
        for i in range(batches + 1):
            end = end + batchSize
            start = i * batchSize
            thisBatch = mjds[start:end]
            theseBatches.append(thisBatch)

        i = 0
        totalLen = len(theseBatches)
        index = 0
        for batch in theseBatches:
            i += 1

            if index > 1:
                # Cursor up one line and clear line
                sys.stdout.write("\x1b[1A\x1b[2K")
            print "Requesting batch %(i)04d/%(totalLen)s from JPL" % locals()
            index += 1

            eph = jpl_horizons_ephemeris(log=log,
                                         objectId=[ssobject],
                                         mjd=batch,
                                         obscode=obscode,
                                         verbose=False)

            for b in batch:
                match = 0
                # print b
                for row in eph:
                    if math.floor(row["mjd"] * 10000 +
                                  0.01) == math.floor(b * 10000 + 0.01):
                        match = 1
                        results.append(row)
                if match == 0:
                    for row in eph:
                        if math.floor(row["mjd"] * 10000) == math.floor(b *
                                                                        10000):
                            match = 1
                            results.append(row)
                if match == 0:
                    results.append(None)
                    this = math.floor(b * 10000 + 0.01)
                    print "MJD %(b)s (%(this)s) is missing" % locals()
                    for row in eph:
                        print math.floor(row["mjd"] * 10000 + 0.00001)
                    print ""

        print "Finding the exopsures containing the SS object"

        for e, r in zip(exposureList, results):
            # CALCULATE SEPARATION IN ARCSEC
            if not r:
                continue

            calculator = separations(
                log=log,
                ra1=r["ra_deg"],
                dec1=r["dec_deg"],
                ra2=e["raDeg"],
                dec2=e["decDeg"],
            )
            angularSeparation, north, east = calculator.get()
            sep = float(angularSeparation) / 3600.
            if sep < 5.:

                # THE SKY-LOCATION AS A HEALPIXEL ID
                pinpoint = hp.ang2pix(nside,
                                      theta=r["ra_deg"],
                                      phi=r["dec_deg"],
                                      lonlat=True)

                decCorners = (e["decDeg"] - tileSide / 2,
                              e["decDeg"] + tileSide / 2)
                corners = []
                for d in decCorners:
                    if d > 90.:
                        d = 180. - d
                    elif d < -90.:
                        d = -180 - d
                    raCorners = (
                        e["raDeg"] -
                        (tileSide / 2) / np.cos(d * DEG_TO_RAD_FACTOR),
                        e["raDeg"] +
                        (tileSide / 2) / np.cos(d * DEG_TO_RAD_FACTOR))
                    for rc in raCorners:
                        if rc > 360.:
                            rc = 720. - rc
                        elif rc < 0.:
                            rc = 360. + rc
                        corners.append(hp.ang2vec(rc, d, lonlat=True))

                # NEAR THE POLES RETURN SQUARE INTO TRIANGE - ALMOST DEGENERATE
                pole = False
                for d in decCorners:
                    if d > 87.0 or d < -87.0:
                        pole = True

                if pole == True:
                    corners = corners[1:]
                else:
                    # FLIP CORNERS 3 & 4 SO HEALPY UNDERSTANDS POLYGON SHAPE
                    corners = [corners[0], corners[1], corners[3], corners[2]]

                # RETURN HEALPIXELS IN EXPOSURE AREA
                expPixels = hp.query_polygon(nside, np.array(corners))
                if pinpoint in expPixels:
                    outputList.append({
                        "obs": e["expname"],
                        "mjd": e["mjd"],
                        "raDeg": r["ra_deg"],
                        "decDeg": r["dec_deg"],
                        "mag": r["apparent_mag"],
                        "sep": sep
                    })
                    thisMjd = int(math.floor(e["mjd"]))
                    expname = e["expname"]
                    ssobject_ = ssobject.replace(" ", "_")
                    raStr = r["ra_deg"]
                    decStr = r["dec_deg"]
                    rsyncContent.append(
                        "rsync -av [email protected]:/atlas/red/%(expPrefix)sa/%(thisMjd)s/%(expname)s.fits.fz %(ssobject_)s_atlas_exposures/"
                        % locals())
                    rsyncContent.append(
                        "touch %(ssobject_)s_atlas_exposures/%(expname)s.location"
                        % locals())
                    rsyncContent.append(
                        'echo "_RAJ2000,_DEJ2000,OBJECT\n%(raStr)s,%(decStr)s,%(ssobject)s" > %(ssobject_)s_atlas_exposures/%(expname)s.location'
                        % locals())

    dataSet = list_of_dictionaries(
        log=log,
        listOfDictionaries=outputList,
        # use re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T') for mysql
        reDatetime=False)

    ssobject = ssobject.replace(" ", "_")
    csvData = dataSet.csv(
        filepath="./%(ssobject)s_atlas_exposure_matches.csv" % locals())

    rsyncContent = ("\n").join(rsyncContent)
    pathToWriteFile = "./%(ssobject)s_atlas_exposure_rsync.sh" % locals()
    try:
        log.debug("attempting to open the file %s" % (pathToWriteFile, ))
        writeFile = codecs.open(pathToWriteFile, encoding='utf-8', mode='w')
    except IOError, e:
        message = 'could not open the file %s' % (pathToWriteFile, )
        log.critical(message)
        raise IOError(message)
Пример #34
0
def get_data(data_location='./',
             data_type=None,
             longitude_range=(0, 360),
             latitude_range=(-90, 90),
             field=0,
             resolution=0.1,
             cut_last_pixel=False,
             verbose=True,
             return_header=True,
             reverse_xaxis=True,
             dr_version=1):
    ''' Extracts region from Planck data set. Region will be in galactic
    coordinates.

    Parameters
    ----------
    data_location : str
        Filepath to location of Planck data. Default is current directory.
    data_type : str
        Data type to choose from. Options are:
            Narrow-band: ['CO-Type1', 'CO-Type2', 'CO-Type3']
                'CO-Type1' fields:
                    0:  12CO J 1-->0 Intensity
                    1:  12CO J 1-->0 Intensity Error
                    2:  12CO J 1-->0 Null Test
                    3:  12CO J 1-->0 Mask
                    4:  12CO J 2-->1 Intensity
                    5:  12CO J 2-->1 Intensity Error
                    6:  12CO J 2-->1 Null Test
                    7:  12CO J 2-->1 Mask
                    8:  12CO J 3-->2 Intensity
                    9:  12CO J 3-->2 Intensity Error
                    10: 12CO J 3-->2 Null Test
                    11: 12CO J 3-->2 Mask
                'CO-Type2' fields:
                    0:  12CO J 1-->0 Intensity
                    1:  12CO J 1-->0 Intensity Error
                    2:  12CO J 1-->0 Null Test
                    3:  12CO J 1-->0 Mask
                    4:  12CO J 2-->1 Intensity
                    5:  12CO J 2-->1 Intensity Error
                    6:  12CO J 2-->1 Null Test
                    7:  12CO J 2-->1 Mask
                'CO-Type3' fields:
                    0:  12CO Intensity
                    1:  12CO Intensity Error
                    2:  12CO Null Test
                    3:  12CO Mask

            Broad-band (GHz): ['030', '044', '070', '100', '143', '217', '353',
                               '545', '857']
                Broad-band fields:
                    0: I stokes
                    1: Hits
                    2: II_cov

            Processed data products: ['Dust Opacity', 'Thermal']
                'Dust Opacity' fields:
                    0: Opacity 353GHz
                    1: Error on opacity
                    2: E(B-V)
                    3: Error on E(B-V)
                    4: T for high freq correction
                    5: Error on T
                    6: Beta for high freq correction
                    7: Error on Beta
                'Thermal' dust model fields:
                    0: Intensity
                    1: Intensity standard deviation
                    2: Intensity ??
                    3: Intensity ??

    longitude : array-like
        Lower and upper longitude. Default is whole sky.
    latitude : array-like
        Lower and upper latitude. Default is whole sky.
    field : int
        Field in data type.
    resolution : float
        Pixel resolution in arcseconds.
    cut_last_pixel : bool
        Cuts off one pixel
    return_header : bool
        Return the header?
    verbose : bool
        Verbose?
    reverse_xaxis : bool
        The highest x-axis value begins at the origin.
    dr_version : int
        Data release version of data.

    Returns
    -------
    map : array-like
        Map of extracted region from Planck data.
    header : dict, optional
        FITS format header.

    Examples
    --------
    >>> import planckpy as pl
    >>> import pyfits as pf
    >>> (data, header) = pl.get_data(data_type = '857', longitude_range =
            (155,165), latitude_range = (-30, -15))
    >>> data.shape
    (151, 101)
    >>> header['TYPE']
    'I_STOKES'
    >>> pf.writeto('planck_region_857GHz.fits', data, header = header)

    '''

    if data_type is None:
        print('WARNING (get_data): No data type chosen. Returning None type.')
        return None

    # Get the filename
    filename = get_planck_filename(data_type=data_type,
                                   data_location=data_location,
                                   dr_version=dr_version)

    if verbose:
        print('Reading file:\n%s' % (filename))

    # Read the map using healpy library
    map_data = healpy.read_map(filename, field=field, h=True)
    map_raw, header_raw = map_data[0], map_data[1]

    # Change format of healpy header to pyfits header
    #   ...an actually useful format
    header_pf = pf.Header()
    for item in header_raw:
        header_pf.append(item)

    # Get nside from HEALPix format, acts as resolution of HEALPix image
    nside = header_pf['NSIDE']

    # Set up longitude / latitude grid for extracting the region
    longitude_res, latitude_res = resolution, resolution
    pixel_count_long = (longitude_range[1] - longitude_range[0]) / \
        longitude_res + 1
    pixel_count_lat = (latitude_range[1] - latitude_range[0]) / \
        latitude_res + 1
    if cut_last_pixel:
        pixel_count_long -= 1
        pixel_count_lat -= 1

    # Write axes of l/b positions
    longitude_axis = longitude_range[0] + longitude_res * \
            np.arange(pixel_count_long)
    latitude_axis = latitude_range[0] + latitude_res * \
            np.arange(pixel_count_lat)

    # Create map of l/b positions
    longitude_grid = np.zeros(shape=(pixel_count_long, pixel_count_lat))
    latitude_grid = np.zeros(shape=(pixel_count_long, pixel_count_lat))
    for b in range(len(latitude_axis)):
        longitude_grid[:, b] = longitude_axis
    for l in range(len(longitude_axis)):
        latitude_grid[l, :] = latitude_axis

    # Convert from phi / theta to l/b
    phi_grid = longitude_grid / 180. * np.pi
    theta_grid = (90. - latitude_grid) / 180. * np.pi

    # Convert from angle to pixel
    pixel_indices = healpy.ang2pix(
        nside=nside,
        theta=theta_grid,
        phi=phi_grid,
    )

    # Map the column data to a 2d array
    map_region = map_raw[pixel_indices]

    # Omit the degenerate axes
    map_region = np.squeeze(map_region)

    # Reverse the array
    if reverse_xaxis:
        map_region = map_region.T[::, ::-1]
    elif not reverse_xaxis:
        map_region = map_region.T

    # Build a header
    if return_header:
        header_region = build_header(header=header_pf,
                                     axes=(longitude_axis, latitude_axis),
                                     reverse_xaxis=reverse_xaxis,
                                     field=field)
        return map_region, header_region
    else:
        return map_region
Пример #35
0
    def inject_scan(self, ra, dec, ns, poisson=True):
        r''' Run All sky scan using event localization as 
        spatial prior, while also injecting events according
        to event localization

        Parameters:
        -----------
        ns: float
            Number of signal events to inject
        poisson: bool
            Will poisson fluctuate number of signal events
            to be injected
        Returns:
        --------
        ts: array
            array of ts values of 
        '''
        ### Set up spatial prior to be used in scan
        spatial_prior = SpatialPrior(self.skymap,
                                     allow_neg=self._allow_neg,
                                     containment=self._containment)
        pixels = np.arange(len(self.skymap))

        ## Perform all sky scan
        inj = PointSourceInjector(gamma=2, E0=1000.)
        inj.fill(dec,
                 self.llh.exp,
                 self.llh.mc,
                 self.llh.livetime,
                 temporal_model=self.llh.temporal_model)
        ni, sample = inj.sample(ra, ns, poisson=poisson)
        print('injected neutrino at:')
        print(np.rad2deg(sample['ra']), np.rad2deg(sample['dec']))

        val = self.llh.scan(0.0,
                            0.0,
                            scramble=False,
                            spatial_prior=spatial_prior,
                            time_mask=[self.duration / 2., self.centertime],
                            pixel_scan=[self.nside, self._pixel_scan_nsigma],
                            inject=sample)

        exp = self.llh.inject_events(self.llh.exp, sample)
        exp_theta = 0.5 * np.pi - exp['dec']
        exp_phi = exp['ra']
        exp_pix = hp.ang2pix(self.nside, exp_theta, exp_phi)
        overlap = np.isin(exp_pix, self.ipix_90)

        t_mask = (exp['time'] <= self.stop) & (exp['time'] >= self.start)
        events = exp[t_mask]

        # add field to see if neutrino is within 90% GW contour
        events = append_fields(events,
                               names=['in_contour', 'ts', 'ns', 'gamma', 'B'],
                               data=np.empty((5, events['ra'].size)),
                               usemask=False)

        for i in range(events['ra'].size):
            events['in_contour'][i] = overlap[i]

        for i in range(events['ra'].size):
            events['B'][i] = self.llh.llh_model.background(events[i])

        if val['TS'].size == 0:
            return (0, 0, 2.0, None)
        else:
            ts = val['TS_spatial_prior_0'].max()
            maxLoc = np.argmax(val['TS_spatial_prior_0'])
            ns = val['nsignal'][maxLoc]
            gamma = val['gamma'][maxLoc]
            ra = val['ra'][maxLoc]
            dec = val['dec'][maxLoc]

        val_pix = hp.ang2pix(self.nside, np.pi / 2. - val['dec'], val['ra'])
        for i in range(events['ra'].size):
            idx, = np.where(val_pix == exp_pix[t_mask][i])
            events['ts'][i] = val['TS_spatial_prior_0'][idx[0]]
            events['ns'][i] = val['nsignal'][idx[0]]
            events['gamma'][i] = val['gamma'][idx[0]]

        results = dict([('ts', ts), ('ns', ns), ('gamma', gamma), ('ra', ra),
                        ('dec', dec)])
        return (results, events)
Пример #36
0
def _healpix_lookup(map, lon, lat):
    """Look up the value of a HEALPix map in the pixel containing the point
    with the specified longitude and latitude."""
    nside = hp.npix2nside(len(map))
    return map[hp.ang2pix(nside, 0.5 * np.pi - lat, lon)]
Пример #37
0
def healpix_to_image(healpix_data,
                     coord_system_in,
                     wcs_out,
                     shape_out,
                     order='bilinear',
                     nested=False):
    """
    Convert image in HEALPIX format to a normal FITS projection image (e.g.
    CAR or AIT).

    .. note:: This function uses healpy, which is licensed
              under the GPLv2, so any package using this funtions has to (for
              now) abide with the GPLv2 rather than the BSD license.

    Parameters
    ----------
    healpix_data : `numpy.ndarray`
        HEALPIX data array
    coord_system_in : str or `~astropy.coordinates.BaseCoordinateFrame`
        The coordinate system for the input HEALPIX data, as an Astropy
        coordinate frame or corresponding string alias (e.g. ``'icrs'`` or
        ``'galactic'``)
    wcs_out : `~astropy.wcs.WCS`
        The WCS of the output array
    shape_out : tuple
        The shape of the output array
    order : int or str, optional
        The order of the interpolation (if ``mode`` is set to
        ``'interpolation'``). This can be either one of the following strings:

            * 'nearest-neighbor'
            * 'bilinear'

        or an integer. A value of ``0`` indicates nearest neighbor
        interpolation.
    nested : bool
        The order of the healpix_data, either nested or ring.  Stored in
        FITS headers in the ORDERING keyword.

    Returns
    -------
    reprojected_data : `numpy.ndarray`
        HEALPIX image resampled onto the reference image
    footprint : `~numpy.ndarray`
        Footprint of the input array in the output array. Values of 0 indicate
        no coverage or valid values in the input image, while values of 1
        indicate valid values.
    """
    import healpy as hp

    healpix_data = np.asarray(healpix_data, dtype=float)

    # Look up lon, lat of pixels in reference system
    yinds, xinds = np.indices(shape_out)
    lon_out, lat_out = wcs_out.wcs_pix2world(xinds, yinds, 0)

    # Convert between celestial coordinates
    coord_system_in = parse_coord_system(coord_system_in)
    with np.errstate(invalid='ignore'):
        lon_in, lat_in = convert_world_coordinates(
            lon_out, lat_out, wcs_out, (coord_system_in, u.deg, u.deg))

    # Convert from lon, lat in degrees to colatitude theta, longitude phi,
    # in radians
    theta = np.radians(90. - lat_in)
    phi = np.radians(lon_in)

    # hp.ang2pix() raises an exception for invalid values of theta, so only
    # process values for which WCS projection gives non-nan value
    good = np.isfinite(theta)
    data = np.empty(theta.shape, healpix_data.dtype)
    data[~good] = np.nan

    if isinstance(order, six.string_types):
        order = ORDER[order]

    if order == 1:
        data[good] = hp.get_interp_val(healpix_data, theta[good], phi[good],
                                       nested)
    elif order == 0:
        npix = len(healpix_data)
        nside = hp.npix2nside(npix)
        ipix = hp.ang2pix(nside, theta[good], phi[good], nested)
        data[good] = healpix_data[ipix]
    else:
        raise ValueError(
            "Only nearest-neighbor and bilinear interpolation are supported")

    footprint = good.astype(int)

    return data, footprint
Пример #38
0
def plot_sky_binned(ra,
                    dec,
                    weights=None,
                    data=None,
                    plot_type='grid',
                    max_bin_area=5,
                    clip_lo=None,
                    clip_hi=None,
                    verbose=False,
                    cmap='viridis',
                    colorbar=True,
                    label=None,
                    basemap=None):
    """Show objects on the sky using a binned plot.
    Bin values either show object counts per unit sky area or, if an array
    of associated data values is provided, mean data values within each bin.
    Objects can have associated weights.
    Requires that matplotlib and basemap are installed. When plot_type is
    "healpix", healpy must also be installed.
    Parameters
    ----------
    ra : array
        Array of object RA values in degrees. Must have the same shape as
        dec and will be flattened if necessary.
    dec : array
        Array of object DEC values in degrees. Must have the same shape as
        ra and will be flattened if necessary.
    weights : array or None
        Optional of weights associated with each object.  All objects are
        assumed to have equal weight when this is None.
    data : array or None
        Optional array of scalar values associated with each object. The
        resulting plot shows the mean data value per bin when data is
        specified.  Otherwise, the plot shows counts per unit sky area.
    plot_type : str
        Must be either 'grid' or 'healpix', and selects whether data in
        binned in healpix or in (sin(DEC), RA).
    max_bin_area : float
        The bin size will be chosen automatically to be as close as
        possible to this value but not exceeding it.
    clip_lo : float or str
        Clipping is applied to the plot data calculated as counts / area
        or the mean data value per bin. See :func:`prepare_data` for
        details.
    clip_hi : float or str
        Clipping is applied to the plot data calculated as counts / area
        or the mean data value per bin. See :func:`prepare_data` for
        details.
    verbose : bool
        Print information about the automatic bin size calculation.
    cmap : colormap name or object
        Matplotlib colormap to use for mapping data values to colors.
    colorbar : bool
        Draw a colorbar below the map when True.
    label : str or None
        Label to display under the colorbar.  Ignored unless colorbar is True.
    basemap : Basemap object or None
        Use the specified basemap or create a default basemap using
        :func:`init_sky` when None.
    Returns
    -------
    basemap
        The basemap used for the plot, which will match the input basemap
        provided, or be a newly created basemap if None was provided.
    """
    ra = np.asarray(ra).reshape(-1)
    dec = np.asarray(dec).reshape(-1)
    if len(ra) != len(dec):
        raise ValueError('Arrays ra,dec must have same size.')

    plot_types = (
        'grid',
        'healpix',
    )
    if plot_type not in plot_types:
        raise ValueError('Invalid plot_type, should be one of {0}.'.format(
            ', '.join(plot_types)))

    if data is not None and weights is None:
        weights = np.ones_like(data)

    if plot_type == 'grid':
        # Convert the maximum pixel area to steradians.
        max_bin_area = max_bin_area * (np.pi / 180.)**2

        # Pick the number of bins in cos(DEC) and RA to use.
        n_cos_dec = int(np.ceil(2 / np.sqrt(max_bin_area)))
        n_ra = int(np.ceil(4 * np.pi / max_bin_area / n_cos_dec))
        # Calculate the actual pixel area in sq. degrees.
        bin_area = 360**2 / np.pi / (n_cos_dec * n_ra)
        if verbose:
            print(
                'Using {0} x {1} grid in cos(DEC) x RA'.format(
                    n_cos_dec, n_ra),
                'with pixel area {:.3f} sq.deg.'.format(bin_area))

        # Calculate the bin edges in degrees.
        ra_edges = np.linspace(-180., +180., n_ra + 1)
        dec_edges = np.degrees(np.arcsin(np.linspace(-1., +1., n_cos_dec + 1)))

        # Put RA values in the range [-180, 180).
        ra = np.fmod(ra, 360.)
        ra[ra >= 180.] -= 360.

        # Histogram the input coordinates.
        counts, _, _ = np.histogram2d(dec,
                                      ra, [dec_edges, ra_edges],
                                      weights=weights)

        if data is None:
            grid_data = counts / bin_area
        else:
            sums, _, _ = np.histogram2d(dec,
                                        ra, [dec_edges, ra_edges],
                                        weights=weights * data)
            # This ratio might result in some nan (0/0) or inf (1/0) values,
            # but these will be masked by prepare_data().
            settings = np.seterr(all='ignore')
            grid_data = sums / counts
            np.seterr(**settings)

        grid_data = prepare_data(grid_data, clip_lo=clip_lo, clip_hi=clip_hi)

        basemap = plot_grid_map(grid_data, ra_edges, dec_edges, cmap, colorbar,
                                label, basemap)

    elif plot_type == 'healpix':

        import healpy as hp

        for n in range(1, 25):
            nside = 2**n
            bin_area = hp.nside2pixarea(nside, degrees=True)
            if bin_area <= max_bin_area:
                break
        npix = hp.nside2npix(nside)
        nest = False
        if verbose:
            print('Using healpix map with NSIDE={0}'.format(nside),
                  'and pixel area {:.3f} sq.deg.'.format(bin_area))

        pixels = hp.ang2pix(nside, np.radians(90 - dec), np.radians(ra), nest)
        counts = np.bincount(pixels, weights=weights, minlength=npix)
        if data is None:
            grid_data = counts / bin_area
        else:
            sums = np.bincount(pixels, weights=weights * data, minlength=npix)
            grid_data = np.zeros_like(sums, dtype=float)
            nonzero = counts > 0
            grid_data[nonzero] = sums[nonzero] / counts[nonzero]

        grid_data = prepare_data(grid_data, clip_lo=clip_lo, clip_hi=clip_hi)

        basemap = plot_healpix_map(grid_data, nest, cmap, colorbar, label,
                                   basemap)

    return basemap
Пример #39
0
def plot_healpix_map(data,
                     nest=False,
                     cmap='viridis',
                     colorbar=True,
                     label=None,
                     basemap=None,
                     vlimits=None):
    """Plot a healpix map using an all-sky projection.
    Pass the data array through :func:`prepare_data` to select a subset to plot
    and clip the color map to specified values or percentiles.
    This function is similar to :func:`plot_grid_map` but is generally slower
    at high resolution and has less elegant handling of pixels that wrap around
    in RA, which are not drawn.
    Requires that matplotlib, basemap, and healpy are installed.
    Parameters
    ----------
    data : array or masked array
        1D array of data associated with each healpix.  Must have a size that
        exactly matches the number of pixels for some NSIDE value. Use the
        output of :func:`prepare_data` as a convenient way to specify
        data cuts and color map clipping.
    nest : bool
        If True, assume NESTED pixel ordering.  Otheriwse, assume RING pixel
        ordering.
    cmap : colormap name or object
        Matplotlib colormap to use for mapping data values to colors.
    colorbar : bool
        Draw a colorbar below the map when True.
    label : str or None
        Label to display under the colorbar.  Ignored unless colorbar is True.
    basemap : Basemap object or None
        Use the specified basemap or create a default basemap using
        :func:`init_sky` when None.
    Returns
    -------
    basemap
        The basemap used for the plot, which will match the input basemap
        provided, or be a newly created basemap if None was provided.
    """
    import healpy as hp
    import matplotlib.pyplot as plt
    import matplotlib.colors
    from matplotlib.collections import PolyCollection

    data = prepare_data(data)
    if len(data.shape) != 1:
        raise ValueError('Invalid data array, should be 1D.')
    nside = hp.npix2nside(len(data))

    if basemap is None:
        basemap = init_sky()

    # Get pixel boundaries as quadrilaterals.
    corners = hp.boundaries(nside, np.arange(len(data)), step=1, nest=nest)
    corner_theta, corner_phi = hp.vec2ang(corners.transpose(0, 2, 1))
    corner_ra, corner_dec = (np.degrees(corner_phi),
                             np.degrees(np.pi / 2 - corner_theta))
    # Convert sky coords to map coords.
    x, y = basemap(corner_ra, corner_dec)
    # Regroup into pixel corners.
    verts = np.array([x.reshape(-1, 4), y.reshape(-1, 4)]).transpose(1, 2, 0)

    # Find and mask any pixels that wrap around in RA.
    uv_verts = np.array(
        [corner_phi.reshape(-1, 4),
         corner_theta.reshape(-1, 4)]).transpose(1, 2, 0)
    theta_edge = np.unique(uv_verts[:, :, 1])
    phi_edge = np.radians(basemap.lonmax)
    eps = 0.1 * np.sqrt(hp.nside2pixarea(nside))
    wrapped1 = hp.ang2pix(nside, theta_edge, phi_edge - eps, nest=nest)
    wrapped2 = hp.ang2pix(nside, theta_edge, phi_edge + eps, nest=nest)
    wrapped = np.unique(np.hstack((wrapped1, wrapped2)))
    data.mask[wrapped] = True

    # Normalize the data using its vmin, vmax attributes, if present.
    try:
        if vlimits is None:
            norm = matplotlib.colors.Normalize(vmin=data.vmin, vmax=data.vmax)
        else:
            norm = matplotlib.colors.Normalize(vmin=vlimits[0],
                                               vmax=vlimits[1])
    except AttributeError:
        norm = None

    # Make the collection and add it to the plot.
    collection = PolyCollection(verts,
                                array=data,
                                cmap=cmap,
                                norm=norm,
                                edgecolors='none')

    axes = plt.gca() if basemap.ax is None else basemap.ax
    axes.add_collection(collection)
    axes.autoscale_view()

    if colorbar:
        bar = plt.colorbar(collection,
                           ax=basemap.ax,
                           orientation='horizontal',
                           spacing='proportional',
                           pad=0.01,
                           aspect=50)
        if label:
            bar.set_label(label)

    return basemap
Пример #40
0
    truemap = h.alm2map(almtrue, nside, lmax=lmax)
    truemap *= strength / max(truemap)

    print "========="
    print "Dipole %d" % dipole
    maxi, maxang, mini, minang = GetMaxMin(truemap)
    D[0, dipole] = 90. - maxang[0] / degree

    # Strength for RA average subtracted
    # Loop over possible detector latitudes
    for id1, d1 in enumerate(d1s):
        for id2, d2 in enumerate(d2s):

            print np.pi / 2. - d2 * degree
            print np.pi / 2. - d1 * degree
            pixLo = h.ang2pix(nside, np.pi / 2. - d2 * degree, 0.)
            pixHi = h.ang2pix(nside, np.pi / 2. - d1 * degree, 0.)

            out = h.anafast(truemap, alm=True, lmax=lmax)
            for i in range(0, lmax + 1):
                index = h.sphtfunc.Alm.getidx(lmax, i, 0)
                out[1][index] = 0.0
            reducedmap = h.alm2map(out[1], nside, lmax=lmax)
            reducedmap[0:pixLo] = 0.
            reducedmap[pixHi:npix] = 0.
            fovPix = pixHi - pixLo

            # 'Fit' the dipole
            almreducedfit = h.map2alm(reducedmap, lmax=1)
            reducedfitmap = h.alm2map(almreducedfit, nside, lmax=1)
            maxi, maxang, mini, minang = GetMaxMin(reducedfitmap)
Пример #41
0
mat_contents = sio.loadmat('data/DES.mat')

Nside = 512
L_hp = 4 * Nside
L_mw = 2160  # 2160
Iterate = True

save_figs = True
show_figs = True

Npix = hp.nside2npix(Nside)
ra = np.ascontiguousarray(mat_contents['RA'])
ra_flip = (-1) * (ra - 71.) + 71.
dec = np.ascontiguousarray(mat_contents['dec'])
theta, phi = ssht.ra_dec_to_theta_phi(ra_flip, dec, Degrees=True)
pixnum = hp.ang2pix(Nside, theta, phi)

e1 = np.ascontiguousarray(mat_contents['e1'])
e2 = np.ascontiguousarray(mat_contents['e2'])
e2_flip = e2.copy() * (-1)

mcorr = np.ascontiguousarray(mat_contents['mcorr'])
c1 = np.ascontiguousarray(mat_contents['c1'])
c2 = np.ascontiguousarray(mat_contents['c2'])
c2_flip = c2.copy() * (-1)
weight = np.ascontiguousarray(mat_contents['weight'])

alpha = -21 + 180
beta = -37
g = 90
Пример #42
0
if opts.mapnpz is None:
	#define sky -- completely arbitrary choice of temp
	uniform_sky = np.ones(npix)*100.#*u.K

	#completely arbitrary choice of noise level XXX UN-USED RIGHT NOW
	noise = np.zeros(npix)
	for i in range(npix): noise[i] = random.uniform(-100,100)#* u.K

	####uniform sky tests and point source tests
	if opts.map is None: sky=uniform_sky
	elif opts.map is 'point':
		sky = np.zeros(npix)
		#define a point source
		theta0 = np.pi/2.
		phi0 = 0.
		pix0 = hp.ang2pix(nside,theta0,phi0)
		#make it slightly less point-like
		nbs = hp.get_all_neighbours(nside,theta0,phi=phi0)
		sky[pix0]=500#*u.K
		for nb in nbs: sky[nb]=500#*u.K
		sky = rotate_hmap(sky,[180,0])
	else: sky = hp.read_map(opts.map)

	#promote sky to matrix for frequency axis
	sky = np.outer(np.ones(nfreq),sky)*pow(nu/150e6,-0.7)

	#decompose sky into alm's
	n_alm = len(m)
	alm = np.zeros((nfreq,n_alm),dtype='complex128')
	print 'Calculating sky a_lm values:'
	for i in range(nfreq):
Пример #43
0
if nMaps != len(mapRas):
    raise valueError
print(nMaps)
goodMap = aveTools.onedl(nMaps)
if p['cutoutMask']:

    roundMask = np.round(mask)
else:  #just set to ones
    roundMask = np.ones(len(healpixMapT))

mapsT = aveTools.onedl(nMaps)
mapsQ = aveTools.onedl(nMaps)
mapsU = aveTools.onedl(nMaps)

centerPixes = healpy.ang2pix(p['mapNside'],
                             np.pi * (90 - mapDecs) / 180.,
                             np.pi * mapRas / 180.)

for i in np.arange(nMaps):

    if roundMask[centerPixes[i]] == 1:
        goodMap[i] = True

        print 'good data at ra = %3.1f, dec = %3.1f, pixel %i' % (
            mapRas[i], mapDecs[i], i)

        mapsT = liteMapNickHand.getEmptyMapAtLocation(tempfilename, mapRas[i],
                                                      mapDecs[i])
        mapsT.loadDataFromHealpixMap(healpixMapT)

        mapsQ = mapsT.copy()
Пример #44
0
if not os.path.isfile('{}.csv'.format(args.survey)):

    fbias_rel_err = np.zeros((n_source_bins, n_lens_bins))

    for source_bin in range(n_source_bins):

        print('Working on source bin {}...'.format(source_bin))

        table_s = zebu.read_raw_data(stage,
                                     'source',
                                     source_bin,
                                     survey=args.survey)

        table_s['w_sys'] = 1
        table_s['pix'] = healpy.ang2pix(nside,
                                        table_s['ra'],
                                        table_s['dec'],
                                        lonlat=True)
        all_pixs = np.unique(table_s['pix'])
        use_pixs = np.zeros(0, dtype=np.int)

        for pix in all_pixs:
            near_pixs = healpy.pixelfunc.get_all_neighbours(nside, pix)
            if np.all(np.isin(near_pixs, all_pixs)):
                use_pixs = np.append(use_pixs, pix)

        table_s = table_s[np.isin(table_s['pix'], use_pixs)]
        table_s = add_maximum_lens_redshift(table_s, dz_min=0.15)

        if 'd_com' not in table_s.colnames:
            table_s['d_com'] = zebu.cosmo.comoving_transverse_distance(
                table_s['z']).to(u.Mpc).value
Пример #45
0
def worker_smallsky(snapshot, z1, z2, batch_index, batches, omega_m, omega_l, boxsize, nside, randomize, seed):
    """
    Loads partial batch from snapshot, replicates (along one axis), randomizes if necessary and returns the projected
    pixel positions of the particles within the given shell.

    Args:
        snapshot: Snapshot instance
        z1: inner shell redshift
        z2: outer shell redshift
        batch_index: batch index
        batches: total number of batches
        omega_m: dark matter density parameter
        omega_l: dark energy density parameter
        boxsize: snapshot boxsize
        nside: HEALPix NSIDE
        randomize: randomization flag
        seed: randomization seed

    Returns: list of particle positions within the given shell on a HEALPix map

    """
    particles = snapshot.batch_load(batch_index, batches)

    shell_min = d_c(0, z1, omega_m, omega_l) / boxsize
    shell_max = d_c(0, z2, omega_m, omega_l) / boxsize

    replications = int(np.ceil(shell_max))
    replications_boxes = replications

    # randomization parameters
    if randomize:
        np.random.seed(seed)
        rand_f = [np.random.randint(2, size=3) for _ in range(0, replications_boxes)]
        rand_r = [np.random.randint(4, size=2) * np.pi / 2 for _ in range(0, replications_boxes)]
        rand_t = [np.random.rand(3) for _ in range(0, replications_boxes)]

    # replicated particle array
    _particles = np.zeros((replications_boxes * len(particles), 3), dtype=np.float32)

    box_index = 0
    for i in range(0, replications):
            box_slice = slice(box_index * len(particles), (box_index + 1) * len(particles))

            _particles[box_slice] = particles

            # randomize
            if randomize:
                # transformation matrices for flipping/rotating
                mat_f = np.matrix([
                    [1 - 2 * rand_f[box_index][0], 0, 0],
                    [0, 1 - 2 * rand_f[box_index][1], 0],
                    [0, 0, 1 - 2 * rand_f[box_index][2]]
                ])

                mat_rx = np.matrix([
                    [1, 0, 0],
                    [0, np.cos(rand_r[box_index][0]), -np.sin(rand_r[box_index][0])],
                    [0, np.sin(rand_r[box_index][0]), np.cos(rand_r[box_index][0])]
                ])

                mat_ry = np.matrix([
                    [np.cos(rand_r[box_index][1]), 0, np.sin(rand_r[box_index][1])],
                    [0, 1, 0],
                    [-np.sin(rand_r[box_index][1]), 0, np.cos(rand_r[box_index][1])]
                ])

                transform = np.dot(np.dot(mat_f, mat_rx), mat_ry)

                _particles[box_slice] = np.dot(_particles[box_slice], transform)

                _particles[box_slice] += rand_t[box_index]

                # wrap particles into their boundaries
                for v in range(0, 3):
                    _particles[box_slice][:, v][np.where(_particles[box_slice][:, v] < -0.5)] += 1
                    _particles[box_slice][:, v][np.where(_particles[box_slice][:, v] > 0.5)] -= 1

            # offset box position
            _particles[box_slice][:, 0] += i

            box_index += 1

    # select particles within shell
    observer = np.array([-0.5, 0, 0])
    particles = _particles - observer
    dist = np.sqrt(np.sum(particles**2, axis=1))
    shell = np.array(dist > shell_min) & np.array(dist < shell_max)

    # calculate and return the HEALPix positions for all particles within the shell
    theta = np.arccos(particles[shell][:, 2] / np.sqrt(np.sum((particles[shell]) ** 2, axis=1)), dtype=np.float32)
    phi = np.arctan2(particles[shell][:, 1], particles[shell][:, 0], dtype=np.float32)
    pixels = hp.ang2pix(nside, theta, phi, nest=False)

    return pixels
Пример #46
0
# Pixel Count for Healpy
nside = 32
npix = hp.nside2npix(nside)

# Initial Sky Array
n = np.zeros(npix)

# Ranges for Spherical Polar Plotting
thetas = np.linspace(0, np.pi, npix)
phis = np.linspace(0, 2 * np.pi, npix)

thetalim = 5 * np.pi / 8  # 'Aperture Width' of Beam
func = 1 - (((thetas - np.pi) / (thetalim - np.pi))**2)  # Beam Function
for j in phis:
    pixels = hp.ang2pix(nside, thetas, j)
    n[pixels] = func[pixels]
    n[n < 0] = 0
filter_array = n  # This is the filter to apply to GSM

(latitude, longitude, elevation) = ('-32.998370', '148.263659', 100
                                    )  # Near EDGES site
delta_t = 60  # EDGES antenna takes a number of measurements in 24 hours; this is the time between measurements in minutes
sky_array = []
timer = datetime(2018, 1, 1, 0, 0)
while timer.hour < 23:
    gsm = GSMObserver()
    gsm.lon = longitude
    gsm.lat = latitude
    gsm.elev = elevation
    gsm.date = timer
Пример #47
0
                        import fitsio, healpy as hp
                        import sys, time


        #job_server_address = ("dbwebdev.fnal.gov", 8765) #development
        job_server_address = ("ifdb01.fnal.gov", 8765) #production

        session = Session(job_server_address)

        input_file = sys.argv[1]
        input_filename = input_file.rsplit("/",1)[-1].rsplit(".",1)[-1]

        with T["fits/read"]:
                input_data = fitsio.read(input_file, ext=2, columns=["ALPHAWIN_J2000","DELTAWIN_J2000"])
        with T["hpix"]:
                hpix = hp.ang2pix(nside=16384,theta=input_data['ALPHAWIN_J2000'],phi=input_data['DELTAWIN_J2000'],
                        lonlat=True, nest=True)

        hpix = np.asarray(hpix, np.float64)
        input_data = append_fields(input_data, "HPIX", hpix)
        np.sort(input_data, order="HPIX")

        input_data = np.array(zip(input_data['ALPHAWIN_J2000'], input_data['DELTAWIN_J2000'], input_data['HPIX']))
        matches = []

        class Callback:

            def on_streams_update(self, nevents, data):
                        if "matches" in data:
                            for m in data["matches"]:
                                    matches.append(m)
                                    for obs_i, cat_id, obs_ra, obs_dec, cat_ra, cat_dec in m:
Пример #48
0
    def get_map_values(self, index, ra, dec):

        pix = hp.ang2pix(self.nside, (-dec + 90.) * N.pi / 180,
                         ra * N.pi / 180,
                         nest=1)
        return self.syst_maps['values'][index, pix]
Пример #49
0
def generate_randoms(mask,
                     max_ndraw,
                     ra_min=0,
                     ra_max=360,
                     dec_min=-90,
                     dec_max=90,
                     factor=2):
    """
    Generate random points within the footprint

    This can be used to generate points distributed uniformly over the entire
    footprint for generating a CDF from which to draw mock galaxy positions. It
    does not generate the CDF, it merely draws positions from a uniform density
    field with the mask applied. Unlike :func:`~.generate_random_points`, this
    function generates the points itself rather than calling :mod:`healpix_util`

    You should make sure that ``factor`` is large enough that the number of
    points generated here, ``int(max_ndraw * factor)``, is enough to sample the
    CDF well when ``max_ndraw`` is the maximum final number of mock galaxies to
    be drawn in any redshift bin. It needs to be larger than 1, but there is no
    guarantee that the default (2) is enough.

    :param mask: The pixel coverage mask
    :type mask: :class:`lsssys.Mask` or :class:`lsssys.HealMask`
    :param max_ndraw: The maximum number of mock galaxies that will be drawn
        in any redshift bin. This way, the set of points generated here can be
        used to generate the CDF for all of the redshift bins
    :type max_ndraw: ``int``
    :param ra_min: The minimum right ascension (in degrees) in which the data
        will be, to prevent drawing points far from the region of interest.
        Default 0.0
    :type ra_min: ``float``, optional
    :param ra_max: The maximum right ascension (in degrees) in which the data
        will be, to prevent drawing points far from the region of interest.
        Default 360.0
    :type ra_max: ``float``, optional
    :param dec_min: The minimum declination (in degrees) in which the data
        will be, to prevent drawing points far from the region of interest.
        Default -90.0
    :type dec_min: ``float``, optional
    :param dec_max: The maximum declination (in degrees) in which the data
        will be, to prevent drawing points far from the region of interest.
        Default 90.0
    :type dec_max: ``float``, optional
    :param factor: The multiplicative factor on ``max_ndraw`` that sets how many
        points should be generated here. This should probably be at least 2, but
        that is not checked. Default 2
    :type factor: ``int`` or ``float``, optional
    :return: An array of right ascension and an array of declination that can be
        used with weights to draw mock galaxy positions from the non-uniform
        density field with or without systematics
    :rtype: 2-``tuple`` of (``int(factor * max_ndraw)``,) :class:`numpy.ndarray`
        of ``float``
    """
    ra = np.zeros(int(factor * max_ndraw))
    dec = np.zeros_like(ra)
    nleft = ra.size
    nside = mask.nside
    if 0 < ra_max < ra_min:
        shift_ra = True
        ra_range = [ra_min - 360, ra_max]
    else:
        shift_ra = False
        ra_range = [ra_min, ra_max]
    while nleft > 0:
        # print("Number of points still needed:", nleft, flush=True)
        # print("Sphere point pick", flush=True)
        new_ra, new_dec = random_points_on_sphere(10 * ra.size, ra_range,
                                                  [dec_min, dec_max])
        if shift_ra:
            new_ra[new_ra < 0] += 360.0
        # print("Figure out which points are on good pixels", flush=True)
        keep = np.where(
            ~mask.mask[hp.ang2pix(nside, new_ra, new_dec, lonlat=True)])[0]
        if keep.size > 0:
            # print(
            #     "Number of points on good pixels:", keep.size, flush=True)
            i_start = ra.size - nleft
            nkeep = min(nleft, keep.size)
            i_stop = i_start + nkeep
            if nkeep < keep.size:
                keep = np.random.choice(keep, size=nkeep, replace=False)
            # print("Add new points", flush=True)
            ra[i_start:i_stop] = new_ra[keep]
            dec[i_start:i_stop] = new_dec[keep]
            # print("Adjust nleft", flush=True)
            nleft -= nkeep
    return ra, dec
Пример #50
0
    def characteristic_density(self, iso_sel):
        """
        Compute the characteristic density of a region
        Convlve the field and find overdensity peaks
        """

        x, y = self.proj.sphereToImage(self.data[self.survey.catalog['basis_1']][iso_sel], self.data[self.survey.catalog['basis_2']][iso_sel]) # Trimmed magnitude range for hotspot finding
        #x_full, y_full = proj.sphereToImage(data[basis_1], data[basis_2]) # If we want to use full magnitude range for significance evaluation
        delta_x = 0.01
        area = delta_x**2
        smoothing = 2. / 60. # Was 3 arcmin
        bins = np.arange(-8., 8. + 1.e-10, delta_x)
        centers = 0.5 * (bins[0: -1] + bins[1:])
        yy, xx = np.meshgrid(centers, centers)
    
        h = np.histogram2d(x, y, bins=[bins, bins])[0]
    
        h_g = scipy.ndimage.filters.gaussian_filter(h, smoothing / delta_x)
    
        #cut_goodcoverage = (data['NEPOCHS_G'][cut_magnitude_threshold] >= 2) & (data['NEPOCHS_R'][cut_magnitude_threshold] >= 2)
        # expect NEPOCHS to be good in DES data
    
        delta_x_coverage = 0.1
        area_coverage = (delta_x_coverage)**2
        bins_coverage = np.arange(-5., 5. + 1.e-10, delta_x_coverage)
        h_coverage = np.histogram2d(x, y, bins=[bins_coverage, bins_coverage])[0]
        #h_goodcoverage = np.histogram2d(x[cut_goodcoverage], y[cut_goodcoverage], bins=[bins_coverage, bins_coverage])[0]
        h_goodcoverage = np.histogram2d(x, y, bins=[bins_coverage, bins_coverage])[0]
    
        n_goodcoverage = h_coverage[h_goodcoverage > 0].flatten()
    
        #characteristic_density = np.mean(n_goodcoverage) / area_coverage # per square degree
        characteristic_density = np.median(n_goodcoverage) / area_coverage # per square degree
        print('Characteristic density = {:0.1f} deg^-2'.format(characteristic_density))
    
        # Use pixels with fracdet ~1.0 to estimate the characteristic density
        if self.fracdet is not None:
            fracdet_zero = np.tile(0., len(self.fracdet))
            cut = (self.fracdet != hp.UNSEEN)
            fracdet_zero[cut] = self.fracdet[cut]
    
            nside_fracdet = hp.npix2nside(len(self.fracdet))
            
            subpix_region_array = []
            for pix in np.unique(hp.ang2pix(self.nside,
                                            self.data[self.survey.catalog['basis_1']][iso_sel],
                                            self.data[self.survey.catalog['basis_2']][iso_sel],
                                            lonlat=True)):
                subpix_region_array.append(subpixel(self.pix_center, self.nside, nside_fracdet))
            subpix_region_array = np.concatenate(subpix_region_array)
    
            # Compute mean fracdet in the region so that this is available as a correction factor
            cut = (self.fracdet[subpix_region_array] != hp.UNSEEN)
            mean_fracdet = np.mean(self.fracdet[subpix_region_array[cut]])
    
            # Correct the characteristic density by the mean fracdet value
            characteristic_density_raw = 1. * characteristic_density
            characteristic_density /= mean_fracdet 
            print('Characteristic density (fracdet corrected) = {:0.1f} deg^-2'.format(characteristic_density))
    
        return(characteristic_density)
Пример #51
0
start = time.time()
NSIDE = 256
NPIX = hp.nside2npix(NSIDE)
NPIX
day = 60 * 60 * 24  #1日の秒数
year = day * 365
times = year + 1
time_array = np.arange(0, times, 1)

print("Calcurate orbit...")
#orbit = spin_prec(time_array)
#orbit = hp.vec2ang(orbit)
orbit_file = "/Users/yusuke/program/py_program/CMB/skymap/regenerate_s/orbit_angle.npz"
orbit = np.load(orbit_file)
orbit = np.array([orbit["theta"][:times], orbit["phi"][:times]])
pix = hp.ang2pix(NSIDE, orbit[0], orbit[1])

#ヒストグラムの処理は長い
print("Calcurate orbit histogram...")
hit_pix, bins = np.histogram(pix, bins=NPIX)
"""Planckのマップをreadして解析する"""
print("Reading Planck data...")
file_path = "/Users/yusuke/program/py_program/CMB/skymap/data/LFI_SkyMap_030-BPassCorrected_0256_R2.01_full.fits"
I_planck = hp.read_map(file_path, field=(0, 1, 2),
                       dtype=np.float32)  #PlanckのRING型データ
I_obs = [I_planck[0][pix], I_planck[1][pix],
         I_planck[2][pix]]  #Planckのデータを観測されるpix順に並び換えた時系列観測データI_obs
I_obs
I_map = np.zeros((3, NPIX))
I_map[0][pix[:]] = I_planck[0][pix]
I_map[1][pix[:]] = I_planck[1][pix]
Пример #52
0
def written_as_a_function_to_save_memory(z_bins, randoms, result):
    print('loading SuperCosmos catalog and mask...')

    catalog = np.load('datalog/wisecatalog_z.npy')  #ra,deg,z

    #print(catalog.shape)

    scosmask = healpy.read_map('WISExSCOSmask.fits')

    num = 30000000
    #coord=SkyCoord(catalog[0],catalog[1],frame='galactic',unit='deg').icrs

    #catalog[0],catalog[1]=coord.ra.deg,coord.dec.deg
    coord = SkyCoord(catalog[0], catalog[1], frame='icrs', unit='deg').galactic
    l, b = coord.l.deg, coord.b.deg

    catalog = catalog[:, scosmask[healpy.ang2pix(
        256, l, b, nest=False, lonlat=True)] == 1]
    '''
    print(catalog.shape)
    print('z_min:',np.min(catalog[2]),'z_max:',np.max(catalog[2]))
    plt.hist(catalog[2],100)
    plt.xlabel('z')
    plt.ylabel('# of galaxies')
    plt.title('SuperCosmos z histogram(masked)')
    plt.show()

    plt.scatter(catalog[0],catalog[1],c=catalog[2],s=0.005,cmap='rainbow',edgecolors='none')
    plt.xlabel('RA(deg)')
    plt.ylabel('DEC(deg)')
    plt.title('SuperCosmos redshift scatter plot(masked)')
    plt.colorbar()
    plt.show()

    raw_input=()
    '''
    catalog = catalog[:, catalog[2].argsort()]

    if z_bins == 3:
        catalog = catalog[:, catalog[0].size / 4 * z_bins:]
    else:
        catalog = catalog[:, catalog[0].size / 4 * z_bins:catalog[0].size / 4 *
                          (z_bins + 1)]

    if z_bins == 0:
        catalog = catalog[:, catalog[2] >= 0.01]

    #print('bin',z_bins,'size',catalog[2].shape,'z range',catalog[2,0],'~',catalog[2,-1])
    #catalog=catalog[:,catalog[0].size]

    #cat_galaxy=treecorr.Catalog(ra=catalog[0],dec=catalog[1],ra_units='deg',dec_units='deg',k=np.ones(catalog[0].size))
    cat_galaxy = treecorr.Catalog(ra=catalog[0],
                                  dec=catalog[1],
                                  ra_units='deg',
                                  dec_units='deg')

    print('Done!\n')

    print 'generating random galaxy catalog'
    #plt.scatter(catalog[0],catalog[1],s=0.01)
    #plt.xlabel('RA(deg)')
    #plt.ylabel('DEC(deg)')
    #plt.show()
    ra_min = np.min(cat_galaxy.ra)
    ra_max = np.max(cat_galaxy.ra)
    dec_min = np.min(cat_galaxy.dec)
    dec_max = np.max(cat_galaxy.dec)
    print('ra range = %f .. %f' % (ra_min, ra_max))
    print('dec range = %f .. %f' % (dec_min, dec_max))

    rand_ra = np.random.uniform(ra_min, ra_max, num)
    rand_sindec = np.random.uniform(np.sin(dec_min), np.sin(dec_max), num)
    rand_dec = np.arcsin(rand_sindec)

    coord = SkyCoord(rand_ra, rand_dec, frame='icrs', unit='rad').galactic
    l, b = coord.l.deg, coord.b.deg

    rand_ra = rand_ra[scosmask[healpy.ang2pix(
        256, l, b, nest=False, lonlat=True)] == 1]
    rand_dec = rand_dec[scosmask[healpy.ang2pix(
        256, l, b, nest=False, lonlat=True)] == 1]

    #plt.scatter(np.rad2deg(rand_ra),np.rad2deg(rand_dec),s=0.01)
    #plt.xlabel('RA(deg)')
    #plt.ylabel('DEC(deg)')
    #plt.show()
    print('Done!\n')

    cat_rand = treecorr.Catalog(ra=rand_ra,
                                dec=rand_dec,
                                ra_units='radians',
                                dec_units='radians')

    #load planck data
    print('loading Planck catalog and mask...')

    planckdata = fits.open('COM_CMB_IQU-smica-nosz_2048_R3.00_full.fits')

    planckmask = fits.open('HFI_Mask_GalPlane-apo0_2048_R2.00.fits')

    planckImap = planckdata[1].data['I_STOKES']

    planckImask = planckmask[1].data['GAL080']

    planckdata.close()
    planckmask.close()

    planckpix = np.arange(0, planckImap.size)

    planckImap = planckImap[planckImask == 1]
    planckpix = planckpix[planckImask == 1]

    planck_ra, planck_dec = healpy.pix2ang(nside=2048,
                                           ipix=planckpix,
                                           nest=True,
                                           lonlat=True)

    coord = SkyCoord(planck_ra, planck_dec, frame='galactic', unit='deg').icrs
    planck_ra, planck_dec = coord.ra.deg, coord.dec.deg

    cat_planck = treecorr.Catalog(ra=planck_ra,
                                  dec=planck_dec,
                                  ra_units='deg',
                                  dec_units='deg',
                                  k=planckImap)

    print('Done!\n')

    print('calculating cross-relation...')
    nk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    rk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    nk.process(cat_galaxy, cat_planck)
    rk.process(cat_rand, cat_planck)

    xi, varxi = nk.calculateXi(rk)
    sig = np.sqrt(varxi)
    r = np.exp(nk.meanlogr)

    #print xi

    print('Done!\n')

    #print('Plotting')

    #plt.plot(r, xi, color='blue')
    #plt.errorbar(r[xi>0], xi[xi>0], yerr=sig[xi>0], lw=1, ls='',ecolor='g')
    #leg = plt.errorbar(-r, xi, yerr=sig, color='blue')

    #plt.xscale('log')
    #plt.xlabel(r'$\theta$ (degrees)')
    #plt.ylabel(r'$w(\theta)$')
    #plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
    #plt.legend([leg], [r'$w(\theta)$'], loc='lower left')
    #plt.title('SuperCosmos x Planck at {} z bin'.format(str(z_bins)))

    #plt.show()

    #np.save('datalog/SuperCosmos_z_{}.npy'.format(str(z_bins)),np.array([xi,r,sig]))
    result[z_bins, randoms] = np.array([xi, r, sig])
    print randoms, ' runs'
    print('{} bins datalog saved!'.format(str(z_bins)))

    nk.clear()
    rk.clear()
    cat_galaxy.clear_cache()
    cat_rand.clear_cache()
    cat_planck.clear_cache()

    catalog = None
    scosmask = None
    coord = None
    l = None
    b = None
    ra_min, ra_max, dec_min, dec_max = None, None, None, None
    rand_ra, rand_sindec, rand_dec = None, None, None
    planck_ra, planck_dec = None, None
    planckImap, planckpix = None, None
    xi, r, sig, varxi = None, None, None, None

    return result
Пример #53
0
 def characteristic_density_local(self, iso_sel, x_peak, y_peak, angsep_peak):
     """
     Compute the local characteristic density of a region
     """
 
     #characteristic_density = self.characteristic_density(iso_sel)
     characteristic_density = self.density
 
     x, y = self.proj.sphereToImage(self.data[self.survey.catalog['basis_1']][iso_sel], self.data[self.survey.catalog['basis_2']][iso_sel]) # Trimmed magnitude range for hotspot finding
     #x_full, y_full = proj.sphereToImage(data[basis_1], data[basis_2]) # If we want to use full magnitude range for significance evaluation
 
     # If fracdet map is available, use that information to either compute local density,
     # or in regions of spotty coverage, use the typical density of the region
     if self.fracdet is not None:
         # The following is copied from how it's used in compute_char_density
         fracdet_zero = np.tile(0., len(self.fracdet))
         cut = (self.fracdet != hp.UNSEEN)
         fracdet_zero[cut] = self.fracdet[cut]
 
         nside_fracdet = hp.npix2nside(len(self.fracdet))
         
         subpix_region_array = []
         for pix in np.unique(hp.ang2pix(self.nside,
                                         self.data[self.survey.catalog['basis_1']][iso_sel],
                                         self.data[self.survey.catalog['basis_2']][iso_sel],
                                         lonlat=True)):
             subpix_region_array.append(subpixel(self.pix_center, self.nside, nside_fracdet))
         subpix_region_array = np.concatenate(subpix_region_array)
 
         # Compute mean fracdet in the region so that this is available as a correction factor
         cut = (self.fracdet[subpix_region_array] != hp.UNSEEN)
         mean_fracdet = np.mean(self.fracdet[subpix_region_array[cut]])
 
         subpix_region_array = subpix_region_array[self.fracdet[subpix_region_array] > 0.99]
         subpix = hp.ang2pix(nside_fracdet, 
                             self.data[self.survey.catalog['basis_1']][cut_magnitude_threshold][iso_sel], 
                             self.data[self.survey.catalog['basis_2']][cut_magnitude_threshold][iso_sel],
                             lonlat=True)
 
         # This is where the local computation begins
         ra_peak, dec_peak = self.proj.imageToSphere(x_peak, y_peak)
         subpix_all = hp.query_disc(nside_fracet, hp.ang2vec(ra_peak, dec_peak, lonlat=True), np.radians(0.5))
         subpix_inner = hp.query_disc(nside_fracet, hp.ang2vec(ra_peak, dec_peak, lonlat=True), np.radians(0.3))
         subpix_annulus = subpix_all[~np.in1d(subpix_all, subpix_inner)]
         mean_fracdet = np.mean(fracdet_zero[subpix_annulus])
         print('mean_fracdet {}'.format(mean_fracdet))
         if mean_fracdet < 0.5:
             characteristic_density_local = characteristic_density
             print('characteristic_density_local baseline {}'.format(characteristic_density_local))
         else:
             # Check pixels in annulus with complete coverage
             subpix_annulus_region = np.intersect1d(subpix_region_array, subpix_annulus)
             print('{} percent pixels with complete coverage'.format(float(len(subpix_annulus_region)) / len(subpix_annulus)))
             if (float(len(subpix_annulus_region)) / len(subpix_annulus)) < 0.25:
                 characteristic_density_local = characteristic_density
                 print('characteristic_density_local spotty {}'.format(characteristic_density_local))
             else:
                 characteristic_density_local = float(np.sum(np.in1d(subpix, subpix_annulus_region))) \
                                                / (hp.nside2pixarea(nside_fracdet, degrees=True) * len(subpix_annulus_region)) # deg^-2
                 print('characteristic_density_local cleaned up {}'.format(characteristic_density_local))
     else:
         # Compute the local characteristic density
         area_field = np.pi * (0.5**2 - 0.3**2)
         n_field = np.sum((angsep_peak > 0.3) & (angsep_peak < 0.5))
         characteristic_density_local = n_field / area_field
 
         # If not good azimuthal coverage, revert
         cut_annulus = (angsep_peak > 0.3) & (angsep_peak < 0.5) 
         #phi = np.degrees(np.arctan2(y_full[cut_annulus] - y_peak, x_full[cut_annulus] - x_peak)) # Use full magnitude range, NOT TESTED!!!
         phi = np.degrees(np.arctan2(y[cut_annulus] - y_peak, x[cut_annulus] - x_peak)) # Impose magnitude threshold
         h = np.histogram(phi, bins=np.linspace(-180., 180., 13))[0]
         if np.sum(h > 0) < 10 or np.sum(h > 0.5 * np.median(h)) < 10:
             #angsep_peak = np.sqrt((x - x_peak)**2 + (y - y_peak)**2)
             characteristic_density_local = characteristic_density
 
     print('Characteristic density local = {:0.1f} deg^-2 = {:0.3f} arcmin^-2'.format(characteristic_density_local, characteristic_density_local / 60.**2))
 
     return(characteristic_density_local)
Пример #54
0
        ra, dec = pixelname.radec_of(az_scan, alt)
        ra_arr[z] = (ra)
        dec_arr[z] = (dec)
    ra_arrs.append(ra_arr)
    dec_arrs.append(dec_arr)

#convert to healpix and plot...
#wait... healpy functions need DEC in the range 0,pi, so convert DEC
hp_decs = [np.pi / 2. - dec_arrs[j] for j in range(len(dec_arrs))]
print hp_decs

NSIDE = 128  #resolution of the map
npix = hp.nside2npix(NSIDE)

nhits_tot = np.zeros(npix)

for k in range(len(hp_decs)):
    healIndex = hp.ang2pix(NSIDE, hp_decs[k],
                           ra_arrs[k])  #index in healpy - ring format
    nhits = np.bincount(healIndex, minlength=npix)
    nhits_tot += nhits

print nhits_tot
#show the map
hp.mollview(nhits_tot, title='Hit map')
'''
plt.figure()
plt.plot(azimuths,elevations,'r.')
plt.show()
'''
Пример #55
0
    if (len(args.in_dir) > 8) and (args.in_dir[-8:] == ".fits.gz"):
        fi = glob.glob(args.in_dir)
    else:
        fi = glob.glob(args.in_dir + "/*.fits.gz")
    fi = sorted(fi)
    data = {}
    ndata = 0
    for i, f in enumerate(fi):
        if i % 10 == 0:
            print("\rread {} of {} {}".format(i, len(fi), ndata), end="")
        hdus = fitsio.FITS(f)
        dels = [delta.from_fitsio(h) for h in hdus[1:]]
        ndata += len(dels)
        phi = [d.ra for d in dels]
        th = [sp.pi / 2 - d.dec for d in dels]
        pix = healpy.ang2pix(cf.nside, th, phi)
        for d, p in zip(dels, pix):
            if not p in data:
                data[p] = []
            data[p].append(d)

            z = 10**d.ll / args.lambda_abs - 1.
            z_min_pix = sp.amin(sp.append([z_min_pix], z))
            d.r_comov = cosmo.r_comoving(z)
            if not args.old_deltas:
                d.we *= ((1. + z) / (1. + args.z_ref))**(cf.alpha - 1.)
            if not args.no_project:
                d.project()
        if not args.nspec is None:
            if ndata > args.nspec: break
    print("")
Пример #56
0
    def find_peaks(self, iso_sel):
        """
        Convolve field to find characteristic density and peaks within the selected pixel
        """

        #characteristic_density = self.characteristic_density(iso_sel)
        characteristic_density = self.density
    
        x, y = self.proj.sphereToImage(self.data[self.survey.catalog['basis_1']][iso_sel], self.data[self.survey.catalog['basis_2']][iso_sel]) # Trimmed magnitude range for hotspot finding
        #x_full, y_full = proj.sphereToImage(data[basis_1], data[basis_2]) # If we want to use full magnitude range for significance evaluation
        delta_x = 0.01
        area = delta_x**2
        smoothing = 2. / 60. # Was 3 arcmin
        bins = np.arange(-8., 8. + 1.e-10, delta_x)
        #bins = np.arange(-4., 4. + 1.e-10, delta_x) # SM: not sure what to prefer here...
        centers = 0.5 * (bins[0: -1] + bins[1:])
        yy, xx = np.meshgrid(centers, centers)
    
        h = np.histogram2d(x, y, bins=[bins, bins])[0]
        
        h_g = scipy.ndimage.filters.gaussian_filter(h, smoothing / delta_x)
    
        # SM: If we can speed up this block that would be great
        factor_array = np.arange(1., 5., 0.05)
        rara, decdec = self.proj.imageToSphere(xx.flatten(), yy.flatten())
        cutcut = (hp.ang2pix(self.nside, rara, decdec, lonlat=True) == self.pix_center).reshape(xx.shape)
        threshold_density = 5 * characteristic_density * area
        for factor in factor_array:
            # This is reducing the contrast against the background through the arbitrary measurement 'factor'
            # until there are fewer than 10 disconnected peaks
            h_region, n_region = scipy.ndimage.measurements.label((h_g * cutcut) > (area * characteristic_density * factor))
            #print 'factor', factor, n_region, n_region < 10
            if n_region < 10:
                threshold_density = area * characteristic_density * factor
                break
    
        h_region, n_region = scipy.ndimage.measurements.label((h_g * cutcut) > threshold_density)
        #h_region = np.ma.array(h_region, mask=(h_region < 1))
    
        x_peak_array = []
        y_peak_array = []
        angsep_peak_array = []
    
        for index in range(1, n_region + 1): # loop over peaks
            #index_peak = np.argmax(h_g * (h_region == index))
            index_peak = np.ravel_multi_index(scipy.ndimage.maximum_position(input=h_g, labels=h_region, index=index), h_g.shape)
            x_peak, y_peak = xx.flatten()[index_peak], yy.flatten()[index_peak]
            #print index, np.max(h_g * (h_region == index))

            # SM: Could these numbers be useful?
            #index_max = scipy.ndimage.maximum(input=h_g, labels=h_region, index=index)
            #index_stddev = scipy.ndimage.standard_deviation(input=h_g, labels=h_region, index=index)
            #print('max: {}'.format(index_max))
            #print('stddev: {}'.format(index_stddev))
            
            #angsep_peak = np.sqrt((x_full - x_peak)**2 + (y_full - y_peak)**2) # Use full magnitude range, NOT TESTED!!!
            angsep_peak = np.sqrt((x-x_peak)**2 + (y-y_peak)**2)
    
            x_peak_array.append(x_peak)
            y_peak_array.append(y_peak)
            angsep_peak_array.append(angsep_peak)
        
        return x_peak_array, y_peak_array, angsep_peak_array
Пример #57
0
moonSunSep = np.array(moonSunSep)
moonTargetSep = np.array(moonTargetSep)
moonAzDiff = moonTargetSep * 0
targetAlt = np.pi / 2. - np.arccos(1. / moonAM)
# Compute the azimuth difference given the moon-target-seperation
# Let's just do a stupid loop:
for i in np.arange(targetAlt.size):
    possibleDistances = haversine(0., np.radians(moonAlt[i]), az,
                                  az * 0 + targetAlt[i])
    diff = np.abs(possibleDistances - np.radians(moonTargetSep[i]))
    good = np.where(diff == diff.min())
    try:
        moonAzDiff[i] = az[good][0]
        # ok, now I have an alt and az, I can convert that back to a healpix id.

        hpid.append(hp.ang2pix(nside, np.pi / 2. - targetAlt[i],
                               moonAzDiff[i]))
    except:
        import pdb
        pdb.set_trace()
    if diff.min() > 1e-5:
        import pdb
        pdb.set_trace()

nrec = moonAM.size
nwave = moonWave.size

dtype = [('hpid', 'int'), ('moonAltitude', 'float'), ('moonSunSep', 'float'),
         ('spectra', 'float', (nwave)), ('mags', 'float', (6))]
moonSpectra = np.zeros(nrec, dtype=dtype)
moonSpectra['hpid'] = hpid
moonSpectra['moonAltitude'] = moonAlt
Пример #58
0
    xsize = 1000
    ysize = xsize / 2.
    linthresh = 0.1

    # this is the mollview min and max
    vmin = 0
    vmax = nbins[-1]

    theta = np.linspace(np.pi, 0, ysize)
    phi = np.linspace(-np.pi, np.pi, xsize)
    longitude = np.radians(np.linspace(-180, 180, xsize))
    latitude = np.radians(np.linspace(-90, 90, ysize))

    # project the map to a rectangular matrix xsize x ysize
    PHI, THETA = np.meshgrid(phi, theta)
    grid_pix = hp.ang2pix(nside, THETA, PHI)

    width = 24
    cmap = plt.cm.RdYlBu
    colormaptag = "colombi1_"

    data = [mask_func(nside) for mask_func in mask_funcs]

    fig = plt.figure(figsize=(cm2inch(width), cm2inch(width) * .8))

    figure_rows, figure_columns = 2, 2
    for i, (submap, nbin) in enumerate(zip(data, nbins)):
        # matplotlib is doing the mollveide projection
        submap = pebbles.plotting.apply_so_mask(submap)
        ax = plt.subplot(figure_rows,
                         figure_columns,
Пример #59
0
 def findTract(self, coord):
     """Find the tract whose inner region includes the coord."""
     theta, phi = coordToAng(coord)
     index = healpy.ang2pix(self._nside, theta, phi, nest=self.config.nest)
     return self[index]
Пример #60
0
    def produce_forests(self):
        """
            randomly creates Lya forests for testing
        """
        userprint("\n")
        nside = 8

        ### Load DRQ
        vac = fitsio.FITS(self._branchFiles + "/Products/cat.fits")
        ra = vac[1]["RA"][:] * np.pi / 180.
        dec = vac[1]["DEC"][:] * np.pi / 180.
        thid = vac[1]["THING_ID"][:]
        plate = vac[1]["PLATE"][:]
        mjd = vac[1]["MJD"][:]
        fiberid = vac[1]["FIBERID"][:]
        vac.close()

        ### Get Healpy pixels
        pixs = healpy.ang2pix(nside, np.pi / 2. - dec, ra)

        ### Save master file
        path = self._branchFiles + "/Products/Spectra/master.fits"
        head = {}
        head['NSIDE'] = nside
        cols = [thid, pixs, plate, mjd, fiberid]
        names = ['THING_ID', 'PIX', 'PLATE', 'MJD', 'FIBER']
        out = fitsio.FITS(path, 'rw', clobber=True)
        out.write(cols, names=names, header=head, extname="MASTER TABLE")
        out.close()

        ### Log lambda grid
        logl_min = 3.550
        logl_max = 4.025
        logl_step = 1.e-4
        log_lambda = np.arange(logl_min, logl_max, logl_step)

        ### Loop over healpix
        for p in np.unique(pixs):

            ### Retrieve objects from catalog and produce fake spectra
            p_thid = thid[(pixs == p)]
            p_fl = np.random.normal(loc=1.,
                                    scale=1.,
                                    size=(log_lambda.size, p_thid.size))
            p_iv = np.random.lognormal(mean=0.1,
                                       sigma=0.1,
                                       size=(log_lambda.size, p_thid.size))
            p_am = np.zeros((log_lambda.size, p_thid.size)).astype(int)
            p_am[np.random.random_sample(size=(log_lambda.size,
                                               p_thid.size)) > 0.90] = 1
            p_om = np.zeros((log_lambda.size, p_thid.size)).astype(int)

            ### Save to file
            p_path = self._branchFiles + "/Products/Spectra/pix_" + str(
                p) + ".fits"
            out = fitsio.FITS(p_path, 'rw', clobber=True)
            out.write(p_thid, header={}, extname="THING_ID_MAP")
            out.write(log_lambda, header={}, extname="LOGLAM_MAP")
            out.write(p_fl, header={}, extname="FLUX")
            out.write(p_iv, header={}, extname="IVAR")
            out.write(p_am, header={}, extname="ANDMASK")
            out.write(p_om, header={}, extname="ORMASK")
            out.close()

        return