Esempio n. 1
0
File: img.py Progetto: jdbowman/sim
def freq_interp_hpm(inmap, infreq, outfreq, spectral_index=(), curverture=()):
    inmap = hp.read_map(inmap)
    basefreq = infreq
    print 'Read base map from {0:s}'.format(inmap)
    print 'Frequency of inmap: {0:f} MHz'.format(infreq)
    if isinstance(outfreq, float):
        nu = np.array([outfreq])
    elif isinstance(outfreq, np.ndarray):
        nu = outfreq
    else:
        raise Exception("Check outfreq format")
    print 'Output frequencie(s):'
    print nu
    if not spectral_index:
        beta = np.ones(inmap.size) * -2.5
        print 'beta is asuume to be -2.5'
    elif isinstance(spectral_index, str):
        if spectral_index.rsplit('.')[-1] == 'fits':
            beta = hp.read_map(spectral_index)
            print 'read beta map from {0:s}'.format(beta)
    if not curverture:
        gamma = np.zeros(inmap.size)
        print 'gamma is asuume to be 0'
    elif isinstance(curverture, str):
        if curverture.rsplit('.')[-1] == 'fits':
            gamma = hp.read_map(curverture)
            print 'read gamma map from {0:s}'.format(gamma)
    outname = ['{0:s}_{1:.3f}MHz.fits'.format(inmap.rsplit('.', 1)[0], nu[i])
               for i in range(nu.size)]
    for f, name in zip(nu, outname):
        print 'Scaling base map to {0:.3f}MHz and save output to {1:s}'\
            .format(f, name)
        T = np.exp(np.log(inmap) + beta * np.log(f / basefreq)
                   + gamma * (np.log(f / basefreq)) ** 2)
        hp.write_map(name, T, coord='G')
Esempio n. 2
0
def MakeMap(fitsfile,output=None,nside=256,nest=True,norm=True,masked=False):
    #
    # Save map as healpy format from fits input
    #
    import numpy as np
    import pyfits as pf
    import healpy as hp
    
    hdulist = pf.open(fitsfile)
    Cat = hdulist[1].data
    hdulist.close()

    pixarea = hp.nside2pixarea(nside,degrees=True)
    print 'nside = ',nside,' --> Pixel area (deg2) = ',pixarea

    tiles  = hp.ang2pix(nside,-Cat['dec']*np.pi/180.+np.pi/2.,Cat['ra']*np.pi/180.,nest)
    npix = hp.nside2npix(nside)
    n_hit_selec = np.zeros(npix)

    for itile in tiles:
        if norm:
            n_hit_selec[itile]+=1./pixarea
        else:
            n_hit_selec[itile]+=1
    if masked == True:
        n_hit_selec = MaskBorders(n_hit_selec)
    if output == None:
        return n_hit_selec
    else:
        hp.write_map(output,n_hit_selec,nest)   
        return
Esempio n. 3
0
def ComputeSystCorrectionM1(syst_maps,syst_params,syst_limits,syst_i,output=None,nside=256,nest=True):
    #
    # Create a healpy map of the expected fluctuation density per pixel but for syst_i, given the value of the syst in the pixels
    # and the value of the linear parameters for each systmatics.
    #
    import healpy as hp
    import numpy as np

    maps = []
    for i,isyst in enumerate(syst_maps):
        map = hp.read_map(isyst,nest=nest)
        map[map<syst_limits[i][0]] = 0
        map[map>syst_limits[i][1]] = 0
        map/=map[map>0].mean()
        maps.append(map)
    
    predicted_fluc = np.zeros(len(maps[0]))
    for i,ipar in enumerate(syst_params):
        if i==syst_i:
            continue
        predicted_fluc+= ipar*maps[i]

    for i,imap in enumerate(maps):
        if i==syst_i:
            continue
        predicted_fluc[imap==0] = 0
    
    if output == None:
        return predicted_fluc
    else:
        hp.write_map(output,predicted_fluc,nest=nest)
        return
Esempio n. 4
0
def ComputeFullSystCorrection(syst_maps,syst_params,syst_limits=None,output=None,nside=256,nest=True):
    #
    # Create a healpy map of the expected fluctuation density per pixel given the value of the syst in the pixels
    # and the value of the linear parameters for each systmatics.
    #
    import healpy as hp
    import numpy as np
    import copy
    cop_maps = copy.deepcopy(syst_maps)
    maps = []
    for i,map in enumerate(cop_maps):
        if syst_limits is not None:
            map[map<syst_limits[i][0]] = 0
            map[map>syst_limits[i][1]] = 0
        map/=map[map>0].mean()
        maps.append(map)
    
    predicted_fluc = np.zeros(len(maps[0]))
    for i,ipar in enumerate(syst_params):
        predicted_fluc+= ipar*(maps[i]-1)

    for imap in maps:
        predicted_fluc[imap==0] = 0

    if output == None:
        return predicted_fluc
    else:
        hp.write_map(output,predicted_fluc,nest=nest)
        return
Esempio n. 5
0
def ascii2fits(file_1,nside_1,file_2,nside_2):
    """
    A function for converting ascii map to fits with a mask.
    @param file_1 input ascii file with useful pixel numbers
    @param nside_1 nside of the input ascii file
    @param file_2 output file path
    @param nside_2 nside of the output map
    """

    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)

    if(nside_2 > nside_1):
        raise RuntimeError("nside of the input should be greater than the output")

    logger.info("Reading data from "+file_1)
    pixs = np.loadtxt(file_1,skiprows=1)

    logger.info("Creating healpix mask with nside "+str(nside_2))
    mask = np.zeros(hp.nside2npix(nside_1))
    for pix in pixs:
        mask[int(pix)] = 1

    ud_mask = hp.ud_grade(mask,nside_2)

    logger.info("Writing output to "+file_2)
    hp.write_map(file_2,ud_mask)
Esempio n. 6
0
 def write_CMB_T_map(self, from_this=None, to_this='my_map'):
     if from_this is None:
         print "No CMB T map supplied"
     else:
         self.Tmapfile=to_this+".fits"
         hp.write_map(self.Tmapfile, from_this)
     return
Esempio n. 7
0
def main(nsim=0):

    nl = h._nl

    # Load map, mask, mll
    print ""
    print "Loading map, mask, mll, and calculating mll_inv..."

    map_data = hp.read_map(h._fn_map)
    mask = hp.read_map(h._fn_mask)
    mll = np.load(h._fn_mll)
    mll_inv = np.linalg.inv(mll)

    # Read in Planck map: normalize, remove mono-/dipole, mask
    print "Normalizing, removing mono-/dipole, and masking map..."
    map_masked = map_data * mask
    # Create cltt (cltt_data_masked) and correct it (cltt_data_corrected)
    print "Calculating cltt_data_masked, cltt_data_corrected..."
    cltt_data_masked = hp.anafast(map_masked)
    cltt_data_masked = cltt_data_masked[:nl]
    cltt_data_corrected = np.dot(mll_inv, cltt_data_masked)
    # Create simulation of map (map_sim) from cltt_data_corrected
    print "Creating and saving map_sim_%i..." % nsim
    map_sim = hp.synfast(cltt_data_corrected, h._nside)
    hp.write_map('output/map_sim_%i.fits' % nsim, map_sim)
Esempio n. 8
0
def MakePredictedDensityMap(syst_maps,syst_params,syst_limits,mean_dens,output,nside=256,nest=True):
    #
    # Create a healpy map of the expected density per pixel given the value of the syst in the pixels
    # and the value of the linear parameters for each systmatics. Must also give mean depth. 
    #
    import healpy as hp
    import numpy as np
    import copy
    cop_maps = copy.deepcopy(syst_maps)
    maps = []
    for i,map in enumerate(cop_maps):
        map[map<syst_limits[i][0]] = 0
        map[map>syst_limits[i][1]] = 0
        map/=map[map>0].mean()
        maps.append(map)
    
    predicted_dens = np.zeros(len(maps[0]))
    predicted_dens+= mean_dens
    for i,ipar in enumerate(syst_params):
        predicted_dens+= ipar*(maps[i]-1)

    for imap in maps:
        predicted_dens[imap==0] = 0

    hp.write_map(output,predicted_dens,nest=nest)
Esempio n. 9
0
def main(fname_config):

#Read configuration into classes
        Config = ConfigParser.ConfigParser()
        Config.read(fname_config)
        out = output(Config._sections['GlobalParameters'])

        Config.read('./ConfigFiles/'+Config.get('FreeFree','model')+'_config.ini')
        freefree = component(Config._sections['FreeFree'],out.nside)
        with open(out.output_dir+out.output_prefix+'freefree_config.ini','w') as configfile\
: Config.write(configfile)

        print('Computing free-free maps.')
        print '----------------------------------------------------- \n'
        if out.debug == True:
                print ''.join("%s: %s \n" % item   for item in vars(freefree).items())
                print '----------------------------------------------------- \n'
        
        conv_I = convert_units(freefree.template_units,out.output_units,out.output_frequency)
	
        scaled_map_ff = scale_freqs(freefree,out)*conv_I[...,np.newaxis]*freefree.em_template
        scaled_map_ff_pol = np.zeros((2,np.asarray(out.output_frequency).size,hp.nside2npix(out.nside)))

        if out.debug == True:
            ff = np.concatenate([scaled_map_ff[np.newaxis,...],scaled_map_ff_pol])
	    for i in range(0,len(out.output_frequency)):
		    hp.write_map(out.output_dir+out.output_prefix+'ff_%d'%(out.output_frequency[i])+'_'+str(out.nside)+'.fits',ff[:,i,:],coord='G',column_units=out.output_units)

        return np.concatenate([scaled_map_ff[np.newaxis,...],scaled_map_ff_pol])
Esempio n. 10
0
def mapper1(catalog, nside, out_map):
    # create empty map
    npix = hp.nside2npix(nside)

    # read catalog
    c = pandas.read_csv(catalog, sep=',', header=0, dtype={'ra' : np.float64, 'dec' : np.float64}, engine=None, usecols=['ra', 'dec'])

    ra = c["ra"]
    dec = c["dec"]

    # galaxy count
    num_gal = len(ra)

    # generate theta/phi vectors
    theta = np.deg2rad(90.0 - dec)
    phi = np.deg2rad(ra)

    # generate corresponding pixel_IDs
    pix_IDs = hp.ang2pix(nside, theta, phi, nest=False)

    # distribute galaxies according to pixel_ID
    cmap = np.bincount(pix_IDs, minlength=npix)
    assert len(cmap) == npix, ("pixel numbers mismatched")

    # write to file
    hp.write_map(out_map, cmap)

    print("num_gal =", num_gal)
    return None
def main(nsim=1, fnl=0.0):

    nl = 1024
    nside_fnl = 512

    # Load map, mll
    print ""
    print "Loading alm_g, alm_ng and creating map..."

    fn_almg = ('data/fnl_sims/alm_l_%04d_v3.fits' % (nsim,))
    #fn_almg = ('data/fnl_sims/alm_l_%i.fits' % (nsim,))
    almg = hp.read_alm(fn_almg)
    #almg = almg[:hp.Alm.getsize(nl)]
    fn_almng = ('data/fnl_sims/alm_nl_%04d_v3.fits' % (nsim,))
    #fn_almng = ('data/fnl_sims/alm_nl_%i.fits' % (nsim,))
    almng = hp.read_alm(fn_almng)
    #almng = almng[:hp.Alm.getsize(nl)]
    alm = almg * (2.7e6) + fnl * almng * (2.7e6) # convert to units of uK to be consistent with other maps

    map_sim_fnl = hp.alm2map(alm, nside=nside_fnl)
    
    #print "Normalizing map..."
    #map_sim_fnl *= (1e6 * 2.7) # convert to units of uK to be consistent with other maps

    fn_map = 'data/fnl_sims/map_fnl_%i_sim_%i.fits' % (int(fnl), nsim)
    print "Writing map: %s" % fn_map
    hp.write_map(fn_map, map_sim_fnl)
Esempio n. 12
0
def merger(config, params, avoids=False):

    # Collect all fits files
    prefix = '/net/user/fmcnally/ShowerLLH/maps/'
    masterList = glob.glob(prefix + 'raw/'+config+'*.fits')
    masterList.sort()

    # Refine to files containing every parameter in params
    fList = []
    for f in masterList:
        f_params = basename(f).split('_')
        if all(x in f_params for x in params):
            fList.append(f)

    # Refine to files containing no parameter in avoids
    if avoids:
        fList = [f for f in fList if not any(x in basename(f) for x in avoids)]

    # Merge files
    if len(fList) == 0:
        print 'No files found'
        return
    for i in range(len(fList)):
        temp = hp.read_map(fList[i])
        if i == 0:
            combined_map = np.zeros(temp.shape)
        combined_map += temp

    # Write to file
    outBase = '_'.join(params)
    outFile = prefix + 'merged/'+config+'_'+outBase
    hp.write_map(outFile, combined_map)
Esempio n. 13
0
def compute_distance(msk,saveto=None,process_mask="./process_mask"):

  if isinstance(msk,str):
    mskfile = msk
    delmskfile=""
  else:
    mskfile = tempfile.mktemp(prefix="input_mask_",suffix=".fits")
    delmskfile = mskfile
    hp.write_map(mskfile,msk)

  delsaveto = ""
  if not saveto:
    saveto = tempfile.mktemp(prefix="output_dist_",suffix=".fits")
    delsaveto = saveto

  parfile = tempfile.mktemp(prefix="mask_",suffix=".par")
  f=open(parfile,"w")
  print("mask_file = ",mskfile,file=f)
  print("distance_file = ",saveto,file=f)
  f.close()
  sbp.call([process_mask,parfile])

  dist = hp.read_map(saveto)

  os.remove(parfile)
  if delmskfile:
    os.remove(delmskfile)
  if delsaveto:
    os.remove(delsaveto)

  return dist
Esempio n. 14
0
def simulate_cmb(nside=2048, lmax=3000,
                 frequency=100,smear=False,
                 nomap = False, beam=None, beamP=None,
                 save=False, filename='testcmb.fits',
                 cl_file='bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl'):
        
    ls, cltt, clte, clee, clbb = get_theory_cls(lmax=lmax, cl_file=cl_file)
 
    Tlm, Elm, Blm = hp.synalm( (cltt, clee, clbb, clte), new=True, lmax=lmax)

    
    if smear:
        if (beam is None) or (beamP is None) :
            hdulist = fits.open(data_path + 'HFI_RIMO_Beams-100pc_R2.00.fits')
            beam = hdulist[beam_index['{}'.format(frequency)]].data.NOMINAL[0][:lmax+1]
            beamP = hdulist[beam_index['{}P'.format(frequency)]].data.NOMINAL[0][:lmax+1]
        hp.sphtfunc.almxfl(Tlm, beam, inplace=True)
        hp.sphtfunc.almxfl(Elm, beamP, inplace=True)
        hp.sphtfunc.almxfl(Blm, beamP, inplace=True)

    if nomap:
        return Tlm,Elm,Blm
    
    Tmap = hp.alm2map( Tlm, nside )
    Qmap, Umap = hp.alm2map_spin( (Elm, Blm), nside, 2, lmax=lmax)

    if save:
        hp.write_map([Tmap,Qmap,Umap],data_path + filename)
    return Tmap, Qmap, Umap
Esempio n. 15
0
def make_healpix_map_for_energy_band(energy_band, order):
    log.info(f'Making HEALPix map for energy band: {energy_band} and order: {order}')

    # Select events in energy band
    table = Table.read('input_data/fermi_hgps_events_selected.fits.gz', hdu=1)
    energy = table['ENERGY'].quantity.to('GeV').value
    mask = (energy_band['min'] <= energy) & (energy < energy_band['max'])
    table = table[mask]
    log.info(f'Number of events: {len(table)}')

    # Bin the events into a HEALPix counts map
    nside = hp.order2nside(order + shift_order)
    ipix = hp.ang2pix(
        nside=nside, nest=True, lonlat=True,
        theta=table['L'], phi=table['B'],
    )
    npix = hp.nside2npix(nside)
    log.debug(f'Number of pixels: {npix}')
    resolution = np.rad2deg(hp.nside2resol(nside))
    log.debug(f'Pixel resolution: {resolution} deg')
    image = np.bincount(ipix, minlength=npix)
    image = image.astype('float32')

    # TODO: smoothing the HEALPix map with default setting is very slow.
    # Maybe chunk the data into local WCS maps and then stitch back together?
    # For now: no smoothing
    # image = hp.smoothing(image, sigma=np.deg2rad(0.1))

    path = DATA_DIR / 'maps' / 'Fermi10GeV_healpix_maps' / 'energy_{min}_{max}.fits.gz'.format_map(energy_band)
    path.parent.mkdir(exist_ok=True, parents=True)
    log.info(f'Writing {path}')
    hp.write_map(str(path), image, coord='G', nest=True)
Esempio n. 16
0
File: ps.py Progetto: zonca/planck
def anafast(m, m2=None, gal_cut = 30, lmax = None):
    '''Utility to run anafast by Healpix'''
    healpy.write_map('tempmap.fits', m, nest = False)
    config_filename = 'anafastconfig.txt'
    config = ConfigObj()
    config.filename = config_filename 
    config['simul_type'] = 1
    if gal_cut:
        config['theta_cut_deg'] = gal_cut
    if lmax:
        config['nlmax'] =  lmax
    config['infile'] = 'tempmap.fits' 
    if not m2 is None:
        healpy.write_map('tempmap2.fits', m2, nest = False)
        config['infile2'] = 'tempmap2.fits' 
    config['outfile'] = 'tempcl.fits'
    config['won'] = 0
    config.write()
    if os.path.exists('tempcl.fits'):
        os.remove('tempcl.fits')
    callstring = 'anafast --double %s' % config_filename
    subprocess.call(callstring, shell=True)
    cl = pyfits.open('tempcl.fits')[1].data.field('TEMPERATURE')
    os.remove('tempcl.fits')
    return cl
Esempio n. 17
0
def obspix2mask(obspix, nside, fname=None):

    """
    From the observed pixels to a binary mask, (``mask[obspix]=1 , 0 elsewhere``)

    **Parameters**

    - ``osbpix``:{array}
        pixels observed during the scanning of the telescope and considered
        as not pathological (ordering in the  HEALPIX pixelization).
    - ``nside``: {int}
        Healpix parameter to define the pixelization grid of the map
    - ``fname``:{str}
        path to the fits file to write the map, if set it writes onto the file

    **Returns**

    - mask :{array}


    """
    mask = np.zeros(hp.nside2npix(nside))
    mask[obspix] = 1
    if not fname is None:
        hp.write_map(fname, mask)

    return mask
Esempio n. 18
0
def see_mapper(catalog_dir, nside, out_map):
    # create empty map
    hmap = np.zeros(hp.nside2npix(nside))
    npix = hp.nside2npix(nside)
    pix_seeing = np.zeros(npix)
    pix_totalcounts = np.zeros(npix)
    
    for cat in listdir(catalog_dir):
        if cat.endswith(".csv") and cat.startswith("with"):
            # read catalog, cols [ra,dec,psffwhm_i]
            c = pandas.read_csv(join(catalog_dir, cat), sep=',', header=0, dtype={'ra' : np.float64, 'dec' : np.float64, "psffwhm_i" : np.float64}, engine=None, usecols=[1,2,292])
            ra = c["ra"]
            dec = c["dec"]
            seeing = c["psffwhm_i"]

            # generate object pixel_IDs & pixel_counts
            theta = np.deg2rad(90.0 - dec)
            phi = np.deg2rad(ra)
            pix_IDs = np.array(hp.ang2pix(nside, theta, phi, nest=False))
            pix_counts = np.bincount(pix_IDs, minlength=npix)
            assert len(pix_counts) == npix, ("pixel numbers mismatched")

            # calculate "total" seeing in each pixel
            for (i, pix) in enumerate(pix_IDs):
                pix_seeing[pix] += seeing[i]
                pix_totalcounts[pix] += 1

            del c, ra, dec, seeing, pix_counts, pix_IDs
            gc.collect()

    pix_avg_seeing = pix_seeing/pix_totalcounts

    # map seeing
    hp.write_map(out_map, pix_avg_seeing) # CHANGE FILENAMES????
Esempio n. 19
0
def main(catalog_dir, nside, out_dir):
    
    # define map resolution, create map of zeros
    assert hp.isnsideok(nside), ("nside must be a power of 2")
    npix = hp.nside2npix(nside)
    hmap = np.zeros(npix)
    
    # create destination directory
    if not isdir(out_dir):
        mkdir(out_dir)
    else:
    	assert listdir(out_dir) == [], ("out_dir already exists/has content, choose a new destination directory")
    
    # create count maps
    mapper1(catalog_dir, nside, out_dir)

    # merge count maps
    for cmap in listdir(out_dir):
        if cmap.endswith(".fits"):
            m = hp.read_map(join(out_dir, cmap))
            hmap += m

	# assign filename & write final map
    if not all(x == 0 for x in hmap):
        out_filename = basename(normpath(catalog_dir)) + "_" + str(nside) + "cmap.fits"
        hp.write_map(join(out_dir, out_filename), hmap)
    else:
        print("empty map")

    return None
Esempio n. 20
0
def generate_gsm2016_map(args):
    filename, freq = args
    gsm_2016 = GlobalSkyModel2016(freq_unit='MHz', unit='MJysr')
    gsm_2016.generate(freq)
    generatged_map_data_k = jysr2k(gsm_2016.generated_map_data * 1e6, 150)
    hp.write_map(filename, generatged_map_data_k, coord='G',
                 column_units='K', fits_IDL=False, overwrite=True)
Esempio n. 21
0
def plot_coldgas(z):
    #firstfile = 0
    #lastfile = 127
    config = {}

    try:
        gal
    except NameError:
        gal = {}
        nTrees = {}
        nGals = {}
        nTreeGals = {}

    r = {}
    theta = {}
    phi = {}
    for i in range(len(model_names)):
        index = model_names[i]
        if index[:4] == "lgal":
            zz = "%10.2f"%(z)
        elif index[:4] == "sage":
            zz = "%10.3f"%(z)
        file_prefix = "model_z"+zz.strip()
        if not index in gal:
            (nTrees[index],nGals[index],nTreeGals[index],gal[index]) = read_lgal.readsnap_lgal_advance(model_paths[i],file_prefix,0,511,filter[i],dt[i],1)
        
    
        R = numpy.empty(nGals[index]*8,dtype=c_float)
        pix = numpy.empty(nGals[index]*8,dtype=numpy.int64)
        pixmap = numpy.zeros(healpy.nside2npix(NSIDE),dtype=numpy.float64)
        # I want the array to be Fortran-like array
        # to be used as ([x],[y],[z]) in Fortran code
        pos = numpy.ascontiguousarray(gal[index]['Pos'])
        pos_sphere = numpy.empty((nGals[index]*8,3),dtype=numpy.float32,order='C')
        print pos.flags
        print pos_sphere.flags
        print pos
        index_out = 0
        N = nGals[index]
        print N
        mymodule.make_sphere(c_int(nGals[index]),c_float(500.0),pos,pos_sphere)
        print pos_sphere 
        return 
        for i in range(2):
            for j in range(2):
                for k in range(2):
                    index_in = 0
                    
                    while (index_in < N):
                        #print pos_tmp[index][0:3] , pos[index_in][0:3]
                        pos_tmp = pos[index_in,0:3]-500.*numpy.array([i,j,k])
                        R[index_out] = numpy.sqrt(pos_tmp[0]*pos_tmp[0]+pos_tmp[1]*pos_tmp[1]+pos_tmp[2]*pos_tmp[2])
                        pix[index_out] = healpy.pixelfunc.vec2pix(NSIDE,pos_tmp[0],pos_tmp[1],pos_tmp[2])
                        if ((R[index_out] >1) & (R[index_out] < 500.0)):
                            pixmap[pix[index_out]] += 1.0
                        index_in += 1
                        index_out += 1
        healpy.write_map("my_map_full_500.fits", pixmap/numpy.sum(pixmap))
Esempio n. 22
0
def main():

    """
    This is the main routine.
    """

    h_nside=512       # The same value a bayesstar.fits
    smoothing=0.01    # Smooth the resulting map to 0.01 radians with a Gaussian
    hubbleconstant=72
    speedoflight=3E5
    

    filenameCat = 'XSC_Completed.tbl.gz'

    RA,DEC,JMAG,HMAG,KMAG = np.loadtxt(filenameCat,skiprows= 50,
                                       usecols = (0,1,3,4,5),
                                       dtype=[('f0',float),
                                              ('f1',float),
                                              ('f2',float),
                                              ('f3',float),
                                              ('f4',float)], unpack = True)
    
    MK_Star = -24.0
    DIST = np.power(10,(KMAG-MK_Star)/5)*1E-05

    REDSHIFT = (DIST*hubbleconstant)/speedoflight
    
    print("Interval of K MAG = ",min(KMAG)," - ",max(KMAG))
    print("Interval of distances [Mpc] = ",min(DIST)," - ",max(DIST))
    print("Interval of redshift = ",min(REDSHIFT)," - ",max(REDSHIFT))
    
    # Select Galaxies by redshift under the assumption of MK=MK_Star

    for rmin in np.arange(4)*0.01:
        rmax=rmin+0.01
        if rmin == 0:
            kmin=0
        else:
            kmin=5*np.log10(rmin/hubbleconstant*speedoflight*1E5)+MK_Star
            
        kmax=5*np.log10(rmax/hubbleconstant*speedoflight*1E5)+MK_Star
        galpixels_Range= np.zeros(hp.nside2npix(h_nside))
        include_me = np.logical_and((REDSHIFT > rmin),
                                    np.logical_and((REDSHIFT<rmax),
                                                   (KMAG != 0.0)))
        ra_Range         = RA[include_me]
        dec_Range        = DEC[include_me]
        pix_num_Range    = (DeclRaToIndex(dec_Range,ra_Range,h_nside))
        galpixels_Range[pix_num_Range]+=1
    
        print("Number of objects with %g < z < %g : %d" % (rmin,rmax,len(ra_Range)))
        map = hp.sphtfunc.smoothing(galpixels_Range,sigma = smoothing)
        hp.write_map("%g-%g_raw.fits" % (rmin,rmax),galpixels_Range)
        hp.write_map("%g-%g.fits" % (rmin,rmax),map)
        hp.mollview(map,coord='C',rot = [0,0.3], title='Relative Surface Density of Galaxies: %0.1f < K < %0.1f (%g < z < %g)' % (kmin,kmax,rmin,rmax), unit='prob',xsize = 2048)
        hp.graticule()
        plt.savefig("%g-%g.png" % (rmin,rmax))
        plt.show()
Esempio n. 23
0
 def func(nwrite, nread):
     with tempfile.TemporaryFile('a+b') as tmpfile:
         write_map(tmpfile, np.arange(12*16), nest=nwrite)
         tmpfile.seek(0)
         actual = read_map(tmpfile, nest=nread)
     with tempfile.NamedTemporaryFile('a+b') as tmpfile:
         hp.write_map(tmpfile.name, np.arange(12*16), nest=nwrite)
         expected = hp.read_map(tmpfile.name, nest=nread)
     assert_equal(expected, actual)
Esempio n. 24
0
    def write_fits(self, filename):
        """ Write out map data as FITS file.

        Parameters
        ----------
        filename: str
            file name for output FITS file
        """
        hp.write_map(filename, self.generated_map_data, column_units=self.unit)
Esempio n. 25
0
def get_fore_integral_flux_map(fore_files_list, e_min, e_max):
    """Returns the foreground map integrated between e_min and e_max
       A powerlaw is assumed fore the foregriunf energy spectrum, hence
       the interpolation between 2 given maps at given energies (given 
       by the model) is done in logarithmic scales. 
    
       fore_files_list: list of str
           Ordered list of the foreground files (one for each energy)
       e_min: float
           the min of the energy bin
       e_max: float 
           the max of the energy bin
    """
    input_file = os.path.join(FT_DATA_FOLDER, 'models/gll_iem_v06.fits')
    if not os.path.exists(input_file):
        abort("Map %s not found!"%input_file)
    frmaps = pf.open(input_file)
    fore_en = []#np.array([x[0] for x in frmaps['ENERGIES'].data])
    for ff in fore_files_list:
        m = re.search(FORE_EN, ff)
        en = int(m.group(0).replace('_', '').replace('.', ''))
        fore_en.append(en)
    fore_en = np.array(fore_en)
    out_name = fore_files_list[0].replace('_%i.fits'%fore_en[0], 
                                          '_%d-%d.fits'%(e_min, e_max))
    if os.path.exists(out_name):
        logger.info('ATT: file %s already exists and returned...'%out_name)
        fore_map = hp.read_map(out_name)
        return fore_map
    else: 
        logger.info('Computing the integral flux of the foreground model...')
        logger.info('...between %.2f - %.2f'%(e_min, e_max))
        fore_emin_sx, fore_emin_dx = find_outer_energies(e_min, fore_en)
        fore_emax_sx, fore_emax_dx = find_outer_energies(e_max, fore_en)
        fore_emin_sx_ind = np.where(fore_en == fore_emin_sx)[0]
        fore_emin_dx_ind = np.where(fore_en == fore_emin_dx)[0]
        fore_emax_sx_ind = np.where(fore_en == fore_emax_sx)[0]
        fore_emax_dx_ind = np.where(fore_en == fore_emax_dx)[0]
        fore_fmin_sx = hp.read_map(fore_files_list[fore_emin_sx_ind])
        fore_fmin_dx = hp.read_map(fore_files_list[fore_emin_dx_ind])
        fore_fmax_sx = hp.read_map(fore_files_list[fore_emax_sx_ind])
        fore_fmax_dx = hp.read_map(fore_files_list[fore_emax_dx_ind])
        m1 = (np.log10(fore_fmin_sx)-np.log10(fore_fmin_dx))/ \
            (np.log10(fore_emin_sx)-np.log10(fore_emin_dx))
        m2 = (np.log10(fore_fmax_sx)-np.log10(fore_fmax_dx))/ \
            (np.log10(fore_emax_sx)-np.log10(fore_emax_dx))
        logfore1 = m1*(np.log10(e_min)-np.log10(fore_emin_sx))+ \
            np.log10(fore_fmin_sx)
        logfore2 = m2*(np.log10(e_max)-np.log10(fore_emax_sx))+ \
            np.log10(fore_fmax_sx)
        fore1 = 10**(logfore1)
        fore2 = 10**(logfore2)
        fore_integ = np.sqrt(fore1*fore2)*(e_max - e_min)
        hp.write_map(out_name, fore_integ)
        logger.info('Created file %s'%out_name)
        return fore_integ
Esempio n. 26
0
def reorganize_map(mapin, obspix, npix, nside, pol, fname=None):
    """
    From the solution map of the preconditioner to a Healpix map.
    It specially splits the input array ``mapin`` which is a IQU
    for a polarization analysis in to 3 arrays ``i,q,u``.

    **Parameters**

    - ``mapin``:{array}
        solution array map (``size=npix*pol``);
    - ``obspix``:{array}
        array containing the observed pixels in the Healpix ordering;
    - ``npix``:{int}
    - ``nside``: {int}
        the same as in ``obspix2mask``;
    - ``pol``:{int}
    - ``fname``:{str}

    **Returns**

    - healpix_map:{list of arrays}
         pixelized map  with Healpix.

    """

    healpix_npix = hp.nside2npix(nside)

    if pol == 3:
        healpix_map = np.zeros(healpix_npix * pol).reshape((healpix_npix, pol))
        i = mapin[np.arange(0, npix * 3, 3)]
        q, u = mapin[np.arange(1, npix * 3, 3)], mapin[np.arange(2, npix * 3, 3)]

        m = np.where(q != 0.0)[0]
        healpix_map[obspix, 0] = i
        healpix_map[obspix, 1] = q
        healpix_map[obspix, 2] = u
        hp_list = [healpix_map[:, 0], healpix_map[:, 1], healpix_map[:, 2]]
    if pol == 2:
        healpix_map = np.zeros(healpix_npix * pol).reshape((healpix_npix, pol))

        q, u = mapin[np.arange(0, npix * pol, 2)], mapin[np.arange(1, npix * pol, pol)]

        healpix_map[obspix, 0] = q
        healpix_map[obspix, 1] = u
        hp_list = [healpix_map[:, 0], healpix_map[:, 1]]

    elif pol == 1:
        healpix_map = np.zeros(healpix_npix)

        healpix_map[obspix] = mapin
        hp_list = healpix_map
    if not fname is None:
        hp.write_map(fname, hp_list)

    return hp_list
Esempio n. 27
0
File: ps.py Progetto: zonca/planck
def remove_dipole(m, gal_cut = 30):
    #module abs path
    abspath = os.path.dirname(__file__)
    if os.path.exists('tempmap.fits'):
        os.remove('tempmap.fits')
    healpy.write_map('tempmap.fits',m,nest = False)
    callstring = 'idl %s/fixmap.pro  -IDL_QUIET 1 -quiet  -args tempmap.fits %d' % (abspath,gal_cut)
    subprocess.call(callstring, shell=True)
    out = healpy.read_map('no_dipole_tempmap.fits', nest=True)
    os.remove('no_dipole_tempmap.fits')
    return out
Esempio n. 28
0
def map_generator():

	#Constructing maps...............................
	#if not os.path.exists('./map'):
	#	os.makedirs('./map')

	#Star map
	if(map_list['star'] == True): map['star'] = hp.read_map(star_map_file)[mpix2hpix]

	#Galaxy map
	if((map_list['gal'] == True) or (map_list['odds'] == True)):
		for bin in zbin:
			map['gal'][zbintag(bin)] = {}
			i_eff = 0
			for od in od_cut:
				gal_map = np.zeros(N_mpix)	
				mask = cut(bin, od, cat['p']['val']['zp'], cat['p']['val']['od'])		
				mpix = cat['p']['val']['mpix'][mask]
				for i in mpix: 
					if(i >= 0): gal_map[i] += 1
				map['gal'][zbintag(bin)][odtag(eff_cut[i_eff])] = gal_map 
				
				if(map_list['gal'] == True): hp.write_map(folder_out + 'map/map_gal' + nside_tag + '_' + zbintag(bin) + '_' + odtag(eff_cut[i_eff]) + '.fits', np.append(gal_map, 0)[hpix2mpix])
				i_eff += 1

	#Odds map
	if(map_list['odds'] == True):
		for bin in zbin:
			mask = cut(bin, 0., cat['p']['val']['zp'], cat['p']['val']['od'])	
			od_map = np.zeros(N_mpix)	
			n_map = map['gal'][zbintag(bin)][odtag(0.0)]
			mpix = cat['p']['val']['mpix'][mask]
			odds = cat['p']['val']['od'][mask]
			for i in range(len(mpix)): 
				if(mpix[i] >= 0): od_map[mpix[i]] += odds[i] 
			for i in range(N_mpix):
				if(n_map[i] != 0): od_map[i] /= n_map[i]

			od_map_corr = np.copy(od_map)
			for i in range(N_mpix):
				if(n_map[i] == 0):
					i_vec = hp.pix2vec(nside, mpix2hpix[i])
					nest = hpix2mpix[hp.query_disc(nside, i_vec, 1 * (np.pi/ 180))]
					nest = nest[nest >= 0]
					o = 0
					n = 0
					for j in nest:
						o += od_map[j]
						if (od_map[j] > 0.00000001): n += 1
					o /= float(n)
					od_map_corr[i] = o
			
			map['odds'][zbintag(bin)] = od_map_corr
			hp.write_map(folder_out + 'Map/od_map' + nside_tag + zbintag(bin) + '.fits', np.append(od_map_corr,0)[hpix2mpix])
Esempio n. 29
0
def simulate_noise(npix=50331648, frequency=100,
                      save=True, filename='test_noise100.fits'):

    I = np.random.standard_normal(npix)
    Q = np.random.standard_normal(npix)
    U = np.random.standard_normal(npix)
    L = np.load(data_path + 'HFI_SkyMap_{}_2048_R2.02_full_cholesky.npy'.format(frequency), 'r')
    I, Q, U = L_dot_rand_map(L,I,Q,U,npix)
    if save:
        hp.write_map(filename,[I,Q,U])
    return I, Q, U
Esempio n. 30
0
def mapper1(catalog_dir, nside, out_dir):
    # create empty map
    hmap = np.zeros(hp.nside2npix(nside))
    npix = hp.nside2npix(nside)
    num_s = 0
    
    for cat in listdir(catalog_dir):
        if cat.endswith(".csv") and cat.startswith("with"):
            # read catalog
            c = pandas.read_csv(join(catalog_dir, cat), sep=',', header=0, dtype={'ra' : np.float64, 'dec' : np.float64, 'modelMag_i' : np.float64, 'modelMag_r' : np.float64, 'extinction_i' : np.float64, 'extinction_r' : np.float64}, engine=None, usecols=['ra', 'dec', 'clean', 'type', 'modelMag_i', 'modelMag_r', 'extinction_i'. 'extinction_r'])
            ra = c["ra"]
            dec = c["dec"]
            i = np.array(c["modelMag_i"] - c["extinction_i"])
            r = np.array(c["modelMag_r"] - c["extinction_r"])

            r_cut = np.where((r > 18.0) & (r < 18.5), True, False)
            i_cut = i < 21.3
            cleancut = c["clean"] == True
            typecut = c["type"] == 6
            totalcut = cleancut & typecut & r_cut & i_cut

            ra = ra[totalcut]
            dec = dec[totalcut]
            # ifib2 = np.array(ifib2[totalcut])

            # star count
            num_s += len(ra)

            # generate theta/phi vectors
            theta = np.deg2rad(90.0 - dec)
            phi = np.deg2rad(ra)
        
            # generate corresponding pixel_IDs
            pix_IDs = hp.ang2pix(nside, theta, phi, nest=False)

            # distribute stars according to pixel_ID
            cmap = np.bincount(pix_IDs, minlength=npix)
            assert len(cmap) == npix, ("pixel numbers mismatched")
        
            # sum to hmap
            hmap = cmap
        
            # assign filenames & write to file
            out_filename = "countmap_" + cat[:-4] + ".fits"
            hp.write_map(join(out_dir, out_filename), hmap)

            del c, ra, dec, cleancut, typecut, r_cut, i_cut
            gc.collect()
    # print("num_s =", num_s)
    # count = open(join(out_dir, "count.txt"), "w")
    # count.write(str(num_s))
    # count.close() 
    return None
Esempio n. 31
0
        outbase += '_gaia_%(version)s'%kwargs
    elif opts.type == 'rms':
        func = rms_photometry
        outbase += '_rms'
    else:
        msg = "Unrecognized type: %s"%args.type
        raise Exception(msg)

    results = utils.multiproc(func,args,kwargs)
    #results = [func(*a,**kwargs) for a in args]
     
    hpxmap = blank(nside)
     
    if None in results:
        print("WARNING: %i processes failed..."%results.count(None))
    for pix,stat in [r for r in results if r is not None]:
        hpxmap[pix] = stat
     
    hpxmap = np.ma.MaskedArray(hpxmap,np.isnan(hpxmap),fill_value=np.nan)
    hpxmaps[band] = hpxmap

    outfile = join(outdir,outbase+'_%s_n%i.fits'%(band,nside))
    print("Writing %s..."%outfile)
    hp.write_map(outfile,hpxmap,overwrite=True)

    q = [5,50,95]
    p = np.percentile(hpxmap.compressed(),q)
    print("Global Median Photometry:")
    print('%s (%s%%)'%(p,q))
    plt.ion()
Esempio n. 32
0
def clean_maps(k, fn, sdir):
    testmap = hp.read_map(f'{fn}maps_sky_signal.fits', field=np.arange(12), verbose=False)
    Qs = testmap[::2, sat_mask>0]
    Us = testmap[1::2, sat_mask>0]
    skymaps = np.array([np.transpose(Qs), np.transpose(Us)])

    nu_ref_sync_p=23.
    beta_sync_fid=-3.
    curv_sync_fid=0.

    nu_ref_dust_p=353.
    beta_dust_fid=1.5
    temp_dust_fid=19.6

    spec_i=np.zeros([2, npix]);
    spec_o=np.zeros([2, npix]);
    amps_o=np.zeros([3, 2, npix]);
    cova_o=np.zeros([6, 2, npix]);

    bs=beta_sync_fid; bd=beta_dust_fid; td=temp_dust_fid; cs=curv_sync_fid;
    sbs=3.0; sbd=3.0; 
    spec_i[0]=bs; spec_i[1]=bd

    fixed_pars={'nu_ref_d':nu_ref_dust_p,'nu_ref_s':nu_ref_sync_p,'T_d':td}
    var_pars=['beta_s','beta_d']
    var_prior_mean=[bs,bd]
    var_prior_width=[sbs,sbd]

    sky_true=sky.SkyModel(['syncpl', 'dustmbb', 'unit_response'])
    nus = [27., 39., 93., 145., 225., 280.]
    bps=np.array([{'nu':np.array([n-0.5,n+0.5]),'bps':np.array([1])} for n in nus])
    instrument=ins.InstrumentModel(bps)
    ml=mpl.MapLike({'data': skymaps, 
                    'noisevar':np.ones_like(skymaps),
                    'fixed_pars':fixed_pars,
                    'var_pars':var_pars,
                    'var_prior_mean':var_prior_mean,
                    'var_prior_width':var_prior_width,
                    'var_prior_type':['tophat' for b in var_pars]}, 
                   sky_true, 
                   instrument)
    sampler_args = {
        "method" : 'Powell',
        "tol" : None,
        "callback" : None,
        "options" : {'xtol':1E-4,'ftol':1E-4,'maxiter':None,'maxfev':None,'direc':None}
        }
    rdict = clean_pixels(ml, run_minimize, **sampler_args)

    Sbar = ml.f_matrix(rdict['params_ML']).T
    Sninv = np.linalg.inv(np.dot(Sbar.T, Sbar))
    P = np.diag([1., 1., 0.])
    Q = np.identity(6) - Sbar.dot(P).dot(Sninv).dot(Sbar.T)
    reducedmaps = np.einsum('ab, cdb', Q, skymaps).reshape((12, -1))
    
    filled_maps = np.zeros((12, hp.nside2npix(nside))) 
    filled_maps[:, sat_mask>0] = reducedmaps
    
    np.savez(f'{sdir}{sname}_hybrid_params_{k}', params=rdict['params_ML'], Sbar=Sbar, Q=Q)
    hp.write_map(f'{sdir}{sname}_residualmaps_{k}.fits', filled_maps, overwrite=True)
    return
Esempio n. 33
0
    args = p.parse_args()

    if args.infiles is None:
        raise ValueError('Input files must be specified')

    if args.outfile is None:
        raise ValueError('Outfile must be specified')
    else:
        comp.check_output_dir(args.outfile)

    # Read in all the input maps
    data_maps = []
    ref_maps = []
    local_maps = []
    for f in args.infiles:
        data_map, ref_map, local_map = hp.read_map(f, range(3), verbose=False)
        data_maps.append(data_map)
        ref_maps.append(ref_map)
        local_maps.append(local_map)

    # Merge maps
    merged_data = np.sum(data_maps, axis=0)
    merged_ref = np.sum(ref_maps, axis=0)
    merged_local = np.sum(local_maps, axis=0)
    hp.write_map(args.outfile, (merged_data, merged_ref, merged_local),
                 coord='C')

    print('Merged maps successfully saved, deleting unmerged maps')
    for f in args.infiles:
        os.remove(f)
Esempio n. 34
0
    'simCMB01.fits', 'simCMB02.fits', 'simCMB03.fits', 'simCMB04.fits',
    'simCMB05.fits', 'simCMB06.fits', 'simCMB07.fits', 'simCMB08.fits',
    'simCMB09.fits', 'simCMB10.fits'
]  #all NSIDE=1024
#iswFile='/shared/Data/PSG/hundred_point/ISWmap1024_RING_din1_R010.fits' #NSIDE=1024, DeltaT/T
iswFile = '/shared/Data/PSG/hundred_point/ISWmap_RING_R010.fits'  #NSIDE=64, DeltaT

nAmps = 10
amplitudes = np.logspace(-1, 2, nAmps)
ampTags = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']
nested = False

print 'reading map ', iswFile
isw = hp.read_map(iswFile, nest=nest)
for cmbFile in cmbFiles:
    print 'reading map ', cmbFile
    cmb = hp.read_map(mapDirectory + cmbFile, nest=nested)
    # rebin to NSIDE=64
    if nested:
        cmb = hp.ud_grade(cmb, 64, order_in='NESTED', order_out='NESTED')
    else:
        cmb = hp.ud_grade(cmb, 64, order_in='RING', order_out='RING')
    for ampNum in range(nAmps):
        myCMB = cmb + isw * amplitudes[ampNum]
        hp.write_map(mapDirectory + cmbFile[:-5] + ampTags[ampNum] + '.fits',
                     myCMB,
                     nest=nested,
                     coord='G')

print 'done'
Esempio n. 35
0
data = comm.bcast(data, root=0)
n2zeros = data * 0  #0,0,nan,nan......
index = np.arange(len(data))

nIQU = np.zeros((3, len(data)))

#a = np.random.rand()
for i in index[n2zeros == 0]:  #full_ali map, without mask out the edge.
    nIQU[0][i] = np.random.normal(0, data[i])
    nIQU[1][i] = np.random.normal(0, data[i] * np.sqrt(2))
    nIQU[2][i] = np.random.normal(0, data[i] * np.sqrt(2))

#print(nIQU)
hp.write_map(
    '/smc/jianyao/Ali_maps/mpitest/noise_realizations_95GHz_%s.fits' % (rank),
    nIQU)
'''
### calculate the mean value and standard deviation of the noise power spectrum from different noise realizations.

lmax = 2000

noise_map = hp.read_map('/smc/jianyao/Ali_maps/Noise_realizations/95GHz/noise_realizations_95GHz_%s.fits'%(rank), field = None)

ali_ma = hp.read_map('/smc/jianyao/Ali_maps/ali_mask_wo_edge.fits')

noise_mask = Mask(noise_map)

nl = hp.anafast(noise_mask, nspec = 3, lmax = lmax)
if rank == 0:
Esempio n. 36
0
def catsimPopulation(config,
                     tag,
                     mc_source_id_start=1,
                     n=5000,
                     n_chunk=100,
                     known_dwarfs=False):
    """ Create a population of satellites and then simulate the stellar distributions for each.

    Parameters
    ----------
    config : configuration file or dictionary
    tag    : output name
    mc_source_id_start : starting value of source id
    n       : number of satellites to simulate [5000]
    n_chunk : number of satellites written in a file chunk

    Returns
    -------
    None
    """

    assert mc_source_id_start >= 1, "Starting mc_source_id must be >= 1"
    assert n % n_chunk == 0, "Total number of satellites must be divisible by the chunk size"
    nside_pix = 256  # NSIDE = 128 -> 27.5 arcmin, NSIDE = 256 -> 13.7 arcmin

    if not os.path.exists(tag): os.makedirs(tag)

    if isinstance(config, str): config = yaml.load(open(config))
    assert config['survey'] in ['des', 'ps1', 'lsst']

    infile_ebv = config['ebv']
    infile_fracdet = config['fracdet']
    infile_maglim_g = config['maglim_g']
    infile_maglim_r = config['maglim_r']
    infile_density = config['stellar_density']

    range_distance = config.get('range_distance', [5., 500.])
    range_stellar_mass = config.get('range_stellar_mass', [1.e1, 1.e6])
    range_r_physical = config.get('range_r_physical', [1.e-3, 2.0])
    range_ellipticity = config.get('range_ellipticity', [0.1, 0.8])
    range_position_angle = config.get('range_position_angle', [0.0, 180.0])
    choice_age = config.get('choice_age', [10., 12.0, 13.5])
    choice_metal = config.get('choice_metal', [0.00010, 0.00020])
    dwarf_file = config.get('known_dwarfs', None)

    m_density = np.load(infile_density)
    nside_density = hp.npix2nside(len(m_density))
    m_fracdet = read_map(infile_fracdet, nest=False)  #.astype(np.float16)
    nside_fracdet = hp.npix2nside(len(m_fracdet))

    m_maglim_g = read_map(infile_maglim_g, nest=False)  #.astype(np.float16)
    m_maglim_r = read_map(infile_maglim_r, nest=False)  #.astype(np.float16)

    m_ebv = read_map(infile_ebv, nest=False)  #.astype(np.float16)

    #m_foreground = healpy.read_map(infile_foreground)

    mask = (m_fracdet > 0.5)

    if known_dwarfs:
        # Simulate from known dwarfs
        if dwarf_file is None: raise Exception("Must provide known_dwarf file")
        print("Simulating dwarfs from: %s" % dwarf_file)
        area, population = ugali.simulation.population.knownPopulation(
            dwarf_file, mask, nside_pix, n)
    else:
        # r_physical is azimuthally-averaged half-light radius, kpc
        kwargs = dict(range_distance=range_distance,
                      range_stellar_mass=range_stellar_mass,
                      range_r_physical=range_r_physical,
                      range_ellipticity=[0.1, 0.8],
                      range_position_angle=[0.0, 180.0],
                      choice_age=[10., 12.0, 13.5],
                      choice_metal=[0.00010, 0.00020],
                      plot=False)
        area, population = ugali.simulation.population.satellitePopulation(
            mask, nside_pix, n, **kwargs)

    population['id'] += mc_source_id_start
    simulation_area = area

    n_g22_population = np.tile(np.nan, n)
    n_g24_population = np.tile(np.nan, n)
    abs_mag_population = np.tile(np.nan, n)
    surface_brightness_population = np.tile(np.nan, n)
    extension_population = np.tile(np.nan, n)
    difficulty_population = np.tile(0, n)

    lon_array = []
    lat_array = []
    mag_1_array = []
    mag_2_array = []
    mag_1_error_array = []
    mag_2_error_array = []
    mag_extinction_1_array = []
    mag_extinction_2_array = []
    mc_source_id_array = []
    for ii, mc_source_id in enumerate(population['id']):
        print 'Simulating satellite (%i/%i) ... mc_source_id = %i' % (
            ii + 1, n, mc_source_id)
        print '  distance=%(distance).2e, stellar_mass=%(stellar_mass).2e, r_physical=%(r_physical).2e' % (
            population[ii])
        satellite = catsimSatellite(
            config, population[ii]['lon'], population[ii]['lat'],
            population[ii]['distance'], population[ii]['stellar_mass'],
            population[ii]['r_physical'], population[ii]['ellipticity'],
            population[ii]['position_angle'], population[ii]['age'],
            population[ii]['metallicity'], m_maglim_g, m_maglim_r, m_ebv)

        n_g22_population[ii] = satellite['n_g22']
        n_g24_population[ii] = satellite['n_g24']
        abs_mag_population[ii] = satellite['abs_mag']
        extension_population[ii] = satellite['extension']
        surface_brightness_population[ii] = satellite['surface_brightness']

        # These objects are too extended and are not simulated
        if (satellite['flag_too_extended']):
            difficulty_population[ii] |= 0b0001

        # We assume that these objects would be easily detected and
        # remove them to reduce data volume
        if (surface_brightness_population[ii] < 23.5) & (n_g22_population[ii] >
                                                         1e3):
            difficulty_population[ii] |= 0b0010

        # ADW 2019-08-31: I don't think these were implmented
        #if (surface_brightness_population[ii]<25.)&(n_g22_population[ii]>1e2):
        #    difficulty_population[ii] |= 0b0010
        #if (surface_brightness_population[ii]<28.)&(n_g22_population[ii]>1e4):
        #    difficulty_population[ii] |= 0b0100
        #if (surface_brightness_population[ii]<30.)&(n_g22_population[ii]>1e5):
        #    difficulty_population[ii] |= 0b1000

        # ADW: 2019-08-31: These were Keith's original cuts, which were too aggressive
        #cut_easy = (surface_brightness_population[ii]<25.)&(n_g22_population[ii]>1.e2) \
        #           | ((surface_brightness_population[ii] < 30.) & (n_g24_population[ii] > 1.e4)) \
        #           | ((surface_brightness_population[ii] < 31.) & (n_g24_population[ii] > 1.e5))
        #cut_hard = (surface_brightness_population[ii] > 35.) | (n_g24_population[ii] < 1.)
        #cut_difficulty_population[ii] = ~cut_easy & ~cut_hard
        #if cut_easy:
        #    difficulty_population[ii] += 1 # TOO EASY
        #if cut_hard:
        #    difficulty_population[ii] += 2 # TOO HARD
        #if flag_too_extended:
        #    difficulty_population[ii] += 3 # TOO EXTENDED

        # Only write satellites that aren't flagged
        if difficulty_population[ii] == 0:
            lon_array.append(satellite['lon'])
            lat_array.append(satellite['lat'])
            mag_1_array.append(satellite['mag_1'])
            mag_2_array.append(satellite['mag_2'])
            mag_1_error_array.append(satellite['mag_1_error'])
            mag_2_error_array.append(satellite['mag_2_error'])
            mag_extinction_1_array.append(satellite['mag_extinction_1'])
            mag_extinction_2_array.append(satellite['mag_extinction_2'])
            mc_source_id_array.append(
                np.tile(mc_source_id, len(satellite['lon'])))
        else:
            print '  difficulty=%i; satellite not simulated...' % difficulty_population[
                ii]

    # Concatenate the arrays
    print("Concatenating arrays...")
    lon_array = np.concatenate(lon_array)
    lat_array = np.concatenate(lat_array)
    mag_1_array = np.concatenate(mag_1_array)
    mag_2_array = np.concatenate(mag_2_array)
    mag_1_error_array = np.concatenate(mag_1_error_array)
    mag_2_error_array = np.concatenate(mag_2_error_array)
    mag_extinction_1_array = np.concatenate(mag_extinction_1_array)
    mag_extinction_2_array = np.concatenate(mag_extinction_2_array)
    mc_source_id_array = np.concatenate(mc_source_id_array)

    # Now do the masking all at once
    print("Fracdet masking...")
    pix_array = ugali.utils.healpix.ang2pix(nside_fracdet, lon_array,
                                            lat_array)
    cut_fracdet = (np.random.uniform(size=len(lon_array)) <
                   m_fracdet[pix_array])

    lon_array = lon_array[cut_fracdet]
    lat_array = lat_array[cut_fracdet]
    mag_1_array = mag_1_array[cut_fracdet]
    mag_2_array = mag_2_array[cut_fracdet]
    mag_1_error_array = mag_1_error_array[cut_fracdet]
    mag_2_error_array = mag_2_error_array[cut_fracdet]
    mag_extinction_1_array = mag_extinction_1_array[cut_fracdet]
    mag_extinction_2_array = mag_extinction_2_array[cut_fracdet]
    mc_source_id_array = mc_source_id_array[cut_fracdet]

    # Create bonus columns
    print("Creating bonus columns...")
    distance_modulus_population = ugali.utils.projector.dist2mod(
        population['distance'])
    hpix_32_population = ugali.utils.healpix.ang2pix(
        32, population['lon'],
        population['lat'])  # Make sure this matches the dataset

    # Local stellar density
    pixarea = hp.nside2pixarea(nside_density,
                               degrees=True) * 60.**2  # arcmin^2
    density_population = m_density[ugali.utils.healpix.ang2pix(
        nside_density, population['lon'],
        population['lat'])] / pixarea  # arcmin^-2

    # Average fracdet within the azimuthally averaged half-light radius
    #m_fracdet_zero = np.where(m_fracdet >= 0., m_fracdet, 0.)
    #m_fracdet_zero = m_fracdet

    # Azimuthally averaged half-light radius in degrees
    r_half = np.degrees(
        np.arctan2(population['r_physical'], population['distance']))
    fracdet_half_population = meanFracdet(m_fracdet, population['lon'],
                                          population['lat'], r_half)
    fracdet_core_population = meanFracdet(m_fracdet, population['lon'],
                                          population['lat'], 0.1)
    fracdet_wide_population = meanFracdet(m_fracdet, population['lon'],
                                          population['lat'], 0.5)

    # Magnitude limits
    nside_maglim = hp.npix2nside(len(m_maglim_g))
    pix_population = ugali.utils.healpix.ang2pix(nside_maglim,
                                                 population['lon'],
                                                 population['lat'])
    maglim_g_population = m_maglim_g[pix_population]
    maglim_r_population = m_maglim_r[pix_population]

    # E(B-V)
    nside_ebv = hp.npix2nside(len(m_ebv))
    pix_population = ugali.utils.healpix.ang2pix(nside_ebv, population['lon'],
                                                 population['lat'])
    ebv_population = m_ebv[pix_population]

    # Survey
    survey_population = np.tile(config['survey'], len(population))

    # Number of surviving catalog stars
    n_catalog_population = np.histogram(mc_source_id_array,
                                        bins=np.arange(
                                            population['id'][0] - 0.5,
                                            population['id'][-1] + 0.51))[0]

    # Faked-up coadd_object_ids
    coadd_object_id_array = []
    for mc_source_id in population['id']:
        coadd_object_id_array.append(
            (1000000 * mc_source_id) + 1 +
            np.arange(np.sum(mc_source_id == mc_source_id_array)))
    # Assign negative numbers to distinguish from real objects
    coadd_object_id_array = -1 * np.concatenate(coadd_object_id_array)

    # Object ID assignment can get messed up if there are duplicate population ids
    assert len(coadd_object_id_array) == len(mc_source_id_array)

    # Population metadata output file
    tbhdu = pyfits.BinTableHDU.from_columns([
        pyfits.Column(name='RA',
                      format='E',
                      array=population['lon'],
                      unit='deg'),
        pyfits.Column(name='DEC',
                      format='E',
                      array=population['lat'],
                      unit='deg'),
        pyfits.Column(name='DISTANCE',
                      format='E',
                      array=population['distance'],
                      unit='kpc'),
        pyfits.Column(name='DISTANCE_MODULUS',
                      format='E',
                      array=distance_modulus_population,
                      unit='kpc'),
        pyfits.Column(name='STELLAR_MASS',
                      format='E',
                      array=population['stellar_mass'],
                      unit='Msun'),
        pyfits.Column(name='R_PHYSICAL',
                      format='E',
                      array=population['r_physical'],
                      unit='kpc'),
        pyfits.Column(name='N_G22',
                      format='J',
                      array=n_g22_population,
                      unit=''),
        pyfits.Column(name='N_G24',
                      format='J',
                      array=n_g24_population,
                      unit=''),
        pyfits.Column(name='N_CATALOG',
                      format='J',
                      array=n_catalog_population,
                      unit=''),
        pyfits.Column(name='DIFFICULTY',
                      format='J',
                      array=difficulty_population,
                      unit=''),
        pyfits.Column(name='ABS_MAG',
                      format='E',
                      array=abs_mag_population,
                      unit='mag'),
        pyfits.Column(name='SURFACE_BRIGHTNESS',
                      format='E',
                      array=surface_brightness_population,
                      unit='mag arcsec^-2'),
        pyfits.Column(name='EXTENSION',
                      format='E',
                      array=extension_population,
                      unit='deg'),
        pyfits.Column(name='ELLIPTICITY',
                      format='E',
                      array=population['ellipticity'],
                      unit=''),
        pyfits.Column(name='POSITION_ANGLE',
                      format='E',
                      array=population['position_angle'],
                      unit='deg'),
        pyfits.Column(name='AGE',
                      format='E',
                      array=population['age'],
                      unit='Gyr'),
        pyfits.Column(name='METAL_Z',
                      format='E',
                      array=population['metallicity'],
                      unit=''),
        pyfits.Column(name='MC_SOURCE_ID',
                      format='K',
                      array=population['id'],
                      unit=''),
        pyfits.Column(name='HPIX_32',
                      format='E',
                      array=hpix_32_population,
                      unit=''),
        pyfits.Column(name='DENSITY',
                      format='E',
                      array=density_population,
                      unit='arcmin^-2'),
        pyfits.Column(name='FRACDET_HALF',
                      format='E',
                      array=fracdet_half_population,
                      unit=''),
        pyfits.Column(name='FRACDET_CORE',
                      format='E',
                      array=fracdet_core_population,
                      unit=''),
        pyfits.Column(name='FRACDET_WIDE',
                      format='E',
                      array=fracdet_wide_population,
                      unit=''),
        pyfits.Column(name='MAGLIM_G',
                      format='E',
                      array=maglim_g_population,
                      unit='mag'),
        pyfits.Column(name='MAGLIM_R',
                      format='E',
                      array=maglim_r_population,
                      unit='mag'),
        pyfits.Column(name='EBV', format='E', array=ebv_population,
                      unit='mag'),
        pyfits.Column(name='SURVEY',
                      format='A12',
                      array=survey_population,
                      unit=''),
    ])
    tbhdu.header.set('AREA', simulation_area, 'Simulation area (deg^2)')
    print("Writing population metadata file...")
    filename = '%s/sim_population_%s_mc_source_id_%07i-%07i.fits' % (
        tag, tag, mc_source_id_start, mc_source_id_start + n - 1)
    tbhdu.writeto(filename, overwrite=True)

    # Write simulated catalogs

    # Simulated catalog output needs to match the real data
    #   https://github.com/sidneymau/simple/blob/master/search_algorithm.py
    #   https://github.com/sidneymau/simple/blob/master/config.yaml
    #   /home/s1/kadrlica/projects/y3a2/dsphs/v2/skim/
    #   e.g., /home/s1/kadrlica/projects/y3a2/dsphs/v2/skim/y3a2_ngmix_cm_11755.fits
    # for ii in range(0, len(d.formats)): print '\'%s\': [ , \'%s\'],'%(d.names[ii], d.formats[ii])

    default_array = np.tile(-9999., len(mc_source_id_array))
    if config['survey'] == 'des':
        # Y3 Gold v2.0
        key_map = odict([
            ('COADD_OBJECT_ID', [coadd_object_id_array, 'K']),
            ('RA', [lon_array, 'D']),
            ('DEC', [lat_array, 'D']),
            ('SOF_PSF_MAG_CORRECTED_G', [mag_1_array, 'D']),
            ('SOF_PSF_MAG_CORRECTED_R', [mag_2_array, 'D']),
            ('SOF_PSF_MAG_ERR_G', [mag_1_error_array, 'D']),
            ('SOF_PSF_MAG_ERR_R', [mag_2_error_array, 'D']),
            ('A_SED_SFD98_G', [mag_extinction_1_array, 'E']),
            ('A_SED_SFD98_R', [mag_extinction_2_array, 'E']),
            ('WAVG_MAG_PSF_G', [mag_1_array + mag_extinction_1_array, 'E']),
            ('WAVG_MAG_PSF_R', [mag_2_array + mag_extinction_2_array, 'E']),
            ('WAVG_MAGERR_PSF_G', [mag_1_error_array, 'E']),
            ('WAVG_MAGERR_PSF_R', [mag_2_error_array, 'E']),
            ('WAVG_SPREAD_MODEL_I', [default_array, 'E']),
            ('WAVG_SPREADERR_MODEL_I', [default_array, 'E']),
            ('SOF_CM_T', [default_array, 'D']),
            ('SOF_CM_T_ERR', [default_array, 'D']),
            ('FLAGS_GOLD', [np.tile(0, len(mc_source_id_array)), 'J']),
            ('EXTENDED_CLASS_MASH_SOF',
             [np.tile(0, len(mc_source_id_array)), 'I']),
        ])
    elif config['survey'] == 'ps1':
        # PS1
        key_map = odict([
            ('OBJID', [coadd_object_id_array, 'K']),
            ('RA', [lon_array, 'D']),
            ('DEC', [lat_array, 'D']),
            #('UNIQUEPSPSOBID',          [coadd_object_id_array, 'K']),
            #('OBJINFOFLAG',             [default_array, 'E']),
            #('QUALITYFLAG',             [np.tile(16, len(mc_source_id_array)), 'I']),
            #('NSTACKDETECTIONS',        [np.tile(99, len(mc_source_id_array)), 'I']),
            #('NDETECTIONS',             [np.tile(99, len(mc_source_id_array)), 'I']),
            #('NG',                      [default_array, 'E']),
            #('NR',                      [default_array, 'E']),
            #('NI',                      [default_array, 'E']),
            ('GFPSFMAG', [mag_1_array + mag_extinction_1_array, 'E']),
            ('RFPSFMAG', [mag_2_array + mag_extinction_2_array, 'E']),
            #('IFPSFMAG',                 [np.tile(0., len(mc_source_id_array)), 'E'], # Pass star selection
            ('GFPSFMAGERR', [mag_1_error_array, 'E']),
            ('RFPSFMAGERR', [mag_2_error_array, 'E']),
            #('IFPSFMAGERR',              [default_array, 'E']),
            #('GFKRONMAG',                [mag_1_array, 'E']),
            #('RFKRONMAG',                [mag_2_array, 'E']),
            #('IFKRONMAG',                [np.tile(0., len(mc_source_id_array)), 'E'], # Pass star selection
            #('GFKRONMAGERR',             [mag_1_error_array, 'E']),
            #('RFKRONMAGERR',             [mag_2_error_array, 'E']),
            #('IFKRONMAGERR',             [default_array, 'E']),
            #('GFLAGS',                   [np.tile(0, len(mc_source_id_array)), 'I']),
            #('RFLAGS',                   [np.tile(0, len(mc_source_id_array)), 'I']),
            #('IFLAGS',                   [np.tile(0, len(mc_source_id_array)), 'I']),
            #('GINFOFLAG',                [np.tile(0, len(mc_source_id_array)), 'I']),
            #('RINFOFLAG',                [np.tile(0, len(mc_source_id_array)), 'I']),
            #('IINFOFLAG',                [np.tile(0, len(mc_source_id_array)), 'I']),
            #('GINFOFLAG2',               [np.tile(0, len(mc_source_id_array)), 'I']),
            #('RINFOFLAG2',               [np.tile(0, len(mc_source_id_array)), 'I']),
            #('IINFOFLAG2',               [np.tile(0, len(mc_source_id_array)), 'I']),
            #('GINFOFLAG3',               [np.tile(0, len(mc_source_id_array)), 'I']),
            #('RINFOFLAG3',               [np.tile(0, len(mc_source_id_array)), 'I']),
            #('IINFOFLAG3',               [np.tile(0, len(mc_source_id_array)), 'I']),
            #('PRIMARYDETECTION',         [default_array, 'E']),
            #('BESTDETECTION',            [default_array, 'E']),
            #('EBV',                      [default_array, 'E']),
            #('EXTSFD_G',                 [mag_extinction_1_array 'E']),
            #('EXTSFD_R',                 [mag_extinction_2_array, 'E']),
            #('EXTSFD_I',                 [default_array, 'E']),
            ('GFPSFMAG_SFD', [mag_1_array, 'E']),
            ('RFPSFMAG_SFD', [mag_2_array, 'E']),
            ('EXTENDED_CLASS', [np.tile(0, len(mc_source_id_array)), 'I']),
        ])
    elif config[
            'survey'] == 'lsst':  # Keys make to match those in the GCRCatalog native_quantities
        key_map = odict([
            ('objectId', [coadd_object_id_array, 'K']),
            ('coord_ra', [lon_array, 'D']),
            ('coord_dec', [lat_array, 'D']),
            ('mag_g', [mag_1_array + mag_extinction_1_array, 'E']),
            ('mag_r', [mag_2_array + mag_extinction_2_array, 'E']),
            ('magerr_g', [mag_1_error_array, 'D']),
            ('magerr_r', [mag_2_error_array, 'D']),
            ('mag_corrected_g', [mag_1_array, 'D']),
            ('mag_corrected_r', [mag_2_array, 'D']),
            ('extended_class', [np.tile(0, len(mc_source_id_array)), 'I']),
        ])
    key_map['MC_SOURCE_ID'] = [mc_source_id_array, 'K']

    print("Writing catalog files...")
    for mc_source_id_chunk in np.split(
            np.arange(mc_source_id_start, mc_source_id_start + n),
            n // n_chunk):
        outfile = '%s/sim_catalog_%s_mc_source_id_%07i-%07i.fits' % (
            tag, tag, mc_source_id_chunk[0], mc_source_id_chunk[-1])
        print('  ' + outfile)
        sel = np.in1d(mc_source_id_array, mc_source_id_chunk)
        columns = [
            pyfits.Column(name=k, format=v[1], array=v[0][sel])
            for k, v in key_map.items()
        ]
        tbhdu = pyfits.BinTableHDU.from_columns(columns)
        tbhdu.header.set('AREA', simulation_area, 'Simulation area (deg^2)')
        tbhdu.header.set('IDMIN', mc_source_id_chunk[0],
                         'Minimum MC_SOURCE_ID')
        tbhdu.header.set('IDMAX', mc_source_id_chunk[-1],
                         'Maximum MC_SOURCE_ID')
        tbhdu.writeto(outfile, overwrite=True)

    # Mask output file
    print("Writing population mask file...")
    outfile_mask = '%s/sim_mask_%s_cel_nside_%i.fits' % (
        tag, tag, hp.npix2nside(len(mask)))
    if not os.path.exists(outfile_mask):
        hp.write_map(outfile_mask,
                     mask.astype(int),
                     nest=True,
                     coord='C',
                     overwrite=True)
        os.system('gzip -f %s' % (outfile_mask))
Esempio n. 37
0
def make_synch_diff_maps():
    scale_to_30 = (30./28.4)**(-3.1)
    scale_to_spass = (2.305/28.4)**(-3.1)

    joint_synch_Q = hp.read_map('../'+dir+'/synch_Q_030_mean.fits')
    joint_synch_U = hp.read_map('../'+dir+'/synch_U_030_mean.fits')

    joint_Q_30    = scale_to_30*joint_synch_Q
    joint_U_30    = scale_to_30*joint_synch_U
    joint_Q_spass = scale_to_spass*joint_synch_Q
    joint_U_spass = scale_to_spass*joint_synch_U

    bp_030_Q      = hp.read_map('../data/BP_synch_Q_n0064.fits')
    bp_030_U      = hp.read_map('../data/BP_synch_U_n0064.fits')

    npipe_30_Q    = hp.read_map('../data/npipe6v20_comm_synch_n0064_60arc_Q_rc1.fits')
    npipe_30_U    = hp.read_map('../data/npipe6v20_comm_synch_n0064_60arc_U_rc1.fits')

    spass_Q       = hp.read_map('../data/spass_rmrot_n0064_ring_masked.fits',field=1)
    spass_U       = hp.read_map('../data/spass_rmrot_n0064_ring_masked.fits',field=2)

    bp_min_joint_Q    = bp_030_Q - joint_Q_30
    bp_min_joint_U    = bp_030_U - joint_U_30

    np_min_joint_Q    = npipe_30_Q - joint_Q_30
    np_min_joint_U    = npipe_30_U - joint_U_30

    spass_min_joint_Q = spass_Q - joint_Q_spass
    spass_min_joint_U = spass_U - joint_U_spass

    hp.write_map('../'+dir+'/npipe_minus_joint_Q_60arcmin_n0064.fits',np_min_joint_Q)
    hp.write_map('../'+dir+'/npipe_minus_joint_U_60arcmin_n0064.fits',np_min_joint_U)
    hp.write_map('../'+dir+'/BP_minus_joint_Q_60arcmin_n0064.fits',bp_min_joint_Q)
    hp.write_map('../'+dir+'/BP_minus_joint_U_60arcmin_n0064.fits',bp_min_joint_U)
    hp.write_map('../'+dir+'/spass_minus_joint_Q_60arcmin_n0064.fits',spass_min_joint_Q)
    hp.write_map('../'+dir+'/spass_minus_joint_U_60arcmin_n0064.fits',spass_min_joint_U)
Esempio n. 38
0
#!/usr/bin/env python
#
#  hpm_np_to_fits.py
#
#
#  Created by Danny Jacobs on 3/19/10.
#  PAPER Project
#
"""
convert a txt file with a list of healpix values into a healpix fits format

"""
import aipy as a, numpy as n, pylab as p, math as m
import sys, optparse, healpy as hp

o = optparse.OptionParser()
#a.scripting.add_standard_options(o, cal=True)
o.add_option('--nest',
             dest='nest',
             action='store_true',
             help="Switch from default ring mode to nest.")
opts, args = o.parse_args(sys.argv[1:])

for file in args:
    outfile = '.'.join(file.split('.')[:-1]) + '.fits'
    hpm = n.loadtxt(file, comments='#')
    print file + ' > ' + outfile
    hp.write_map(outfile, hpm, nest=opts.nest)
Esempio n. 39
0
def test(nested=False, doPlot=False, nside=64, doTangent=False):
    """
  Note that this is not a rigorous testing function
  Purpose:
      reads overmass files from overmass directory and creates ISW maps and masks
        using overmass profiles

  Args:
      nested:
      doPlot:
      NSIDE: must be 64 or 1024
      doTangent: set this to approximate arc lengths using tangent lines.
        Default: false.  Arc lengths are properly accounted for.
  Returns:
      writes ISW maps and masks to disk
  """

    CMBtemp = 2.7260  # +-0.0013 K (WMAP) Fixen, 2009

    # create comoving distance function
    zVals, comDists = getComDist(zMax=1.0, nSteps=5000)
    comovInterp = interp1d(zVals, comDists)

    # load HEALpix coordinates file
    print 'NSIDE=', nside, ' NESTED=', nested
    longitudes, latitudes = getMapCoords(nside, nested)

    # load GNS catalog coordinates
    cgl, cgb, vgl, vgb = getGNScoords()

    # collect filenames
    overmassDirectory = '/Data/PSG/'
    ISWDirectory = '/Data/PSG/hundred_point/'
    directoryFiles = listdir(overmassDirectory)
    overmassFiles = [file for file in directoryFiles if 'overmass' in file]

    # just do one file for now
    #overmassFiles = [file for file in overmassFiles if 'PSGplot060' in file]
    #overmassFiles = [file for file in overmassFiles if 'PSGplot100' in file]
    #overmassFiles = [file for file in overmassFiles if 'R010' in file]
    #overmassFiles = [file for file in overmassFiles if 'R020' in file]
    #overmassFiles = [file for file in overmassFiles if 'R030' in file]
    #overmassFiles = [file for file in overmassFiles if 'R040' in file]
    #overmassFiles = [file for file in overmassFiles if 'R050' in file]
    #overmassFiles = [file for file in overmassFiles if 'R060' in file]
    #overmassFiles = [file for file in overmassFiles if 'R070' in file]
    #overmassFiles = [file for file in overmassFiles if 'R080' in file]
    #overmassFiles = [file for file in overmassFiles if 'R090' in file]
    #overmassFiles = [file for file in overmassFiles if 'R100' in file]
    #overmassFiles = [file for file in overmassFiles if 'R110' in file]
    #overmassFiles = [file for file in overmassFiles if 'R120' in file]
    #overmassFiles = [file for file in overmassFiles if 'R130' in file]
    #overmassFiles = [file for file in overmassFiles if 'R140' in file]
    #overmassFiles = [file for file in overmassFiles if 'R150' in file]
    #overmassFiles = [file for file in overmassFiles if 'R160' in file]
    newProfile = False  #True
    newMap = True  #False

    # create healpix maps

    #zList = [0.4,0.45,0.5,0.55,0.6,0.65,0.7,0.75]
    zList = [0.52]  # used by PSG for median of GNS catalog
    rmax = 800  # Mpc, 2x PSGplot max
    #rmax = 1200    # Mpc, twice the rmax of overmass profiles
    npoints = 101  # number of points in the ISW profile
    delta_z = 0.3  # for limits of integration when making ISW profile
    cutoff = 0.02  # Maps extend to radius where amplitude = maxAmp*cutoff

    # loop over zList to create maps at each redshift
    for zCent in zList:
        zStr = str(
            zCent)  # this is sloppy formatting. eg: Want "0.40", not "0.4"
        print 'starting with z_cent = ' + zStr
        D_comov = comovInterp(zCent)
        print 'redshift: ', zCent, ', comoving dist: ', D_comov, ' Mpc'
        for omFile in overmassFiles:
            # get ISW profile
            ISWProfileFile = 'ISWprofile_z' + zStr + omFile[
                8:]  # 'overmass' is at start of omFile and has 8 characters
            if newProfile:
                print 'reading file ', omFile
                impactDomain, ISWRange = makeISWProfile(
                    overmassDirectory + omFile,
                    zCent,
                    ISWDirectory + ISWProfileFile,
                    rmax=rmax,
                    npoints=npoints,
                    delta_z=delta_z)
            else:
                print 'loading file ', ISWProfileFile
                impactDomain, ISWRange = np.loadtxt(ISWDirectory +
                                                    ISWProfileFile,
                                                    unpack=True)
            print 'impactDomain: ', impactDomain, ' Mpc'
            print 'ISWRange: ', ISWRange, ' DeltaT/T'

            if newMap:
                # add point in outer region for kludgy extrapolation, just as in ISWprofile.clusterVoid.__init__
                impactDomain = np.concatenate(
                    [impactDomain, [2 * impactDomain[-1]]])
                ISWRange = np.concatenate(
                    [ISWRange,
                     [0]])  # ramps down to zero at r=2*impactDomain[-1]

                # find cutoff radius at cutoff*100% of maximum amplitude
                maxAmp = ISWRange[0]
                impactLookup = interp1d(ISWRange,
                                        impactDomain)  #,kind='cubic')
                print 'maxAmp, cutoff, product: ', maxAmp, cutoff, maxAmp * cutoff
                maxRadius = impactLookup(maxAmp * cutoff)
                print 'max radius: ', maxRadius, ' Mpc'

                #doTangent = False
                if doTangent:  # for easier calculations, use maxRadius as arc length instead of tangential length
                    maxAngle = maxRadius / D_comov  #radians
                else:  # use maxRadius as a chord length
                    maxAngle = 2 * np.arcsin(maxRadius / (2 * D_comov))
                print 'radius for disc: ', maxAngle * 180 / np.pi, ' degrees'

                # visually check accuracy of interpolation function
                if doPlot:
                    # create ISW signal interpolation function
                    ISWinterp = interp1d(impactDomain,
                                         ISWRange)  # same line as in getCVmap
                    impactTest = np.linspace(0, maxRadius, 100)
                    ISWTest = ISWinterp(impactTest)

                    plt.plot(
                        impactDomain,
                        ISWRange)  # data points used to make interpolation
                    plt.plot(impactTest,
                             ISWTest)  # from interpolation function
                    plt.xlabel('r [Mpc]')
                    plt.ylabel('ISW: DeltaT / T')
                    plt.show()

                numCV = 50
                #numCV = 2
                mapArray = np.zeros(hp.nside2npix(nside))
                mask = np.zeros(hp.nside2npix(nside))
                cCentralVec = glgb2vec(cgl,
                                       cgb)  #returns array of unit vectors
                vCentralVec = glgb2vec(vgl,
                                       vgb)  #returns array of unit vectors
                for cvNum in np.arange(numCV):
                    print 'starting cv number ', cvNum + 1
                    cIndices, cISW = getCVmap(nside,
                                              cCentralVec[cvNum],
                                              maxAngle,
                                              latitudes,
                                              longitudes,
                                              impactDomain,
                                              ISWRange,
                                              D_comov,
                                              nest=nested,
                                              isVoid=False,
                                              doTangent=doTangent)
                    mapArray[cIndices] += cISW
                    mask[cIndices] = 1
                    vIndices, vISW = getCVmap(nside,
                                              vCentralVec[cvNum],
                                              maxAngle,
                                              latitudes,
                                              longitudes,
                                              impactDomain,
                                              ISWRange,
                                              D_comov,
                                              nest=nested,
                                              isVoid=True,
                                              doTangent=doTangent)
                    mapArray[vIndices] += vISW
                    mask[vIndices] = 1

                # 'overmass' is at start of omFile and has 8 characters; 'txt' is at end
                if nside == 64:
                    ISWMap = 'ISWmap_RING_z' + zStr + omFile[8:-3] + 'fits'
                    hp.write_map(ISWDirectory + ISWMap,
                                 mapArray * CMBtemp,
                                 nest=nested,
                                 coord='GALACTIC')
                    ISWMask = 'ISWmask_RING_z' + zStr + omFile[8:-3] + 'fits'
                    hp.write_map(ISWDirectory + ISWMask,
                                 mask,
                                 nest=nested,
                                 coord='GALACTIC')
                elif nside == 1024:
                    ISWMap = 'ISWmap_RING_1024_z' + zStr + omFile[8:-3] + 'fits'
                    hp.write_map(ISWDirectory + ISWMap,
                                 mapArray * CMBtemp,
                                 nest=nested,
                                 coord='GALACTIC')
                    ISWMask = 'ISWmask_RING_1024_z' + zStr + omFile[
                        8:-3] + 'fits'
                    hp.write_map(ISWDirectory + ISWMask,
                                 mask,
                                 nest=nested,
                                 coord='GALACTIC')
Esempio n. 40
0
# cls are multiplied by l(l+1)/(2pi) so need to get rid of this constant [units are muK^2]
cl *= 2. * np.pi / (l * (l + 1.))

# need to add cls for l = 0, 1 (i.e. l=0 => constant background and l=1 => dipole)
l = np.concatenate([np.array([0., 1.]), l])
cl = np.concatenate([np.zeros(2), cl])

# see https://healpy.readthedocs.io/en/latest/generated/healpy.sphtfunc.synfast.html#healpy.sphtfunc.synfast
# for additional parameters (such as adding a Gaussian smoothing, etc)

nside, lmax = 2048, 1500
map_sim = hp.synfast(cl, nside, lmax=lmax)

# saving maps as a fits file using healpy function, of course you could save this in anyway you want. filsize = 192Mb
filename = 'maps/test_map.fits'
hp.write_map(filename, map_sim)

# opening maps (checking this works)
map_sim = hp.read_map(filename)

# plot simulated map
hp.mollview(map_sim)
plt.show()

# measure cls from the simulated map
cl_sim = hp.anafast(map_sim, lmax=lmax)
cl_sim *= (l[:len(cl_sim)] * (l[:len(cl_sim)] + 1.)) / (2. * np.pi)
cl *= (l * (l + 1.)) / (2. * np.pi)

# plot input cls vs cls from simulated map
plt.figure(figsize=(8, 6))
Esempio n. 41
0
def save_jk_map(jk_map, fn):
    '''Save jackknife map to fits file.'''
    hp.write_map(fn, jk_map, overwrite=True)
    print(':: Jackknife map saved to file: {}'.format(fn))
Esempio n. 42
0
print "writing", outfile
#load the input CST txt file
#find the healpix indices corresponding to the theta/phi
#make a healpix map

D = n.loadtxt(args[0], skiprows=3)
theta = D[:, 0] * n.pi / 180
#CST will output either elev or phi. phi is zero at beam x=y=0, elev is zero at x=0,phi=0
if open(args[0]).readlines()[0].startswith('Elev'): theta += n.pi / 2
phi = D[:, 1] * n.pi / 180
#account for stupid CST full circle cuts
phi[theta < 0] += np.pi
theta[theta < 0] = np.abs(theta[theta < 0])
beam = D[:, 2]  #beam amplitude in dB
if opts.rot90:
    phi += n.pi / 2
if opts.flup:
    theta = np.pi - theta
healpix_indexes = hpy.ang2pix(opts.nside, theta, phi)

hp_map = n.zeros(hpy.nside2npix(opts.nside))

hp_map[healpix_indexes] = beam
hp_map -= hp_map[0]

if opts.voltage:
    hp_map = 10**(hp_map / 20.)
print "max/min", hp_map.max(), hp_map.min()
hpy.write_map(outfile, hp_map, fits_IDL=False)
print "Write successfull"
Esempio n. 43
0
def make_cmb_sims(params):
    """ Write cmb maps on disk

    Parameters
    ----------
    params: module contating all the simulation parameters

    """
    nmc_cmb = params.nmc_cmb
    nside = params.nside
    smooth = params.gaussian_smooth
    ch_name = [
        'SO_SAT_27', 'SO_SAT_39', 'SO_SAT_93', 'SO_SAT_145', 'SO_SAT_225',
        'SO_SAT_280'
    ]
    freqs = sonc.Simons_Observatory_V3_SA_bands()
    beams = sonc.Simons_Observatory_V3_SA_beams()
    band_int = params.band_int
    parallel = params.parallel
    root_dir = params.out_dir
    out_dir = f'{root_dir}/cmb/'
    file_str = params.file_string
    seed_cmb = params.seed_cmb
    cmb_ps_file = params.cmb_ps_file
    rank = 0
    size = 1
    if params.parallel:
        from mpi4py import MPI
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        size = comm.Get_size()
    if not os.path.exists(out_dir) and rank == 0:
        os.makedirs(out_dir)
    if cmb_ps_file:
        print(cmb_ps_file)
        cl_cmb = hp.read_cl(cmb_ps_file)
    else:
        cmb_ps_scalar_file = os.path.join(os.path.dirname(__file__),
                                          'datautils/Cls_Planck2018_r0.fits')
        cl_cmb_scalar = hp.read_cl(cmb_ps_scalar_file)
        cmb_ps_tensor_r1_file = os.path.join(
            os.path.dirname(__file__),
            'datautils/Cls_Planck2018_tensor_r1.fits')
        cmb_r = params.cmb_r
        cl_cmb_tensor = hp.read_cl(cmb_ps_tensor_r1_file) * cmb_r
        cl_cmb = cl_cmb_scalar + cl_cmb_tensor
    nmc_cmb = math.ceil(nmc_cmb / size) * size
    if nmc_cmb != params.nmc_cmb:
        print_rnk0(f'WARNING: setting nmc_cmb = {nmc_cmb}', rank)
    perrank = nmc_cmb // size
    for nmc in range(rank * perrank, (rank + 1) * perrank):
        if seed_cmb:
            np.random.seed(seed_cmb + nmc)
        nmc_str = str(nmc).zfill(4)
        if not os.path.exists(out_dir + nmc_str):
            os.makedirs(out_dir + nmc_str)
        cmb_temp = hp.synfast(cl_cmb, nside, new=True, verbose=False)
        file_name = f'cmb_{nmc_str}_{file_str}.fits'
        file_tot_path = f'{out_dir}{nmc_str}/{file_name}'
        hp.write_map(file_tot_path, cmb_temp, overwrite=True, dtype=np.float32)
        os.environ["PYSM_LOCAL_DATA"] = f'{out_dir}'
        sky = pysm3.Sky(nside=nside,
                        component_objects=[
                            pysm3.CMBMap(nside,
                                         map_IQU=f'{nmc_str}/{file_name}')
                        ])
        for nch, chnl in enumerate(ch_name):
            freq = freqs[nch]
            fwhm = beams[nch]
            cmb_map = sky.get_emission(freq * u.GHz)
            cmb_map = cmb_map.to(u.uK_CMB,
                                 equivalencies=u.cmb_equivalencies(freq *
                                                                   u.GHz))
            if smooth:
                cmb_map_smt = hp.smoothing(cmb_map,
                                           fwhm=np.radians(fwhm / 60.),
                                           verbose=False)
            else:
                cmb_map_smt = cmb_map
            file_name = f'{chnl}_cmb_{nmc_str}_{file_str}.fits'
            file_tot_path = f'{out_dir}{nmc_str}/{file_name}'
            hp.write_map(file_tot_path,
                         cmb_map_smt,
                         overwrite=True,
                         dtype=np.float32)
Esempio n. 44
0
    m[goodpix] /= np.sqrt(nh_LA[goodpix])

np.random.seed(seed)
t_SA, q_SA, u_SA = hp.synfast(
    [
        Nl_SA_T[channel], Nl_SA_P[channel], Nl_SA_P[channel], zeros, zeros,
        zeros
    ],
    nside=nside,
    pol=True,
    new=True,
    verbose=False,
)
goodpix = np.where(nh_SA > 0)
badpix = np.where(nh_SA <= 0)
for m in [t_SA, q_SA, u_SA]:
    m[badpix] = 0
    m[goodpix] /= np.sqrt(nh_SA[goodpix])

# Write maps
hp.write_map(
    "noise_SA_uKCMB_classical_nside%d_channel%d_seed%d.fits" %
    (nside, channel, seed),
    [t_SA, q_SA, u_SA],
)
hp.write_map(
    "noise_LA_uKCMB_classical_nside%d_channel%d_seed%d.fits" %
    (nside, channel, seed),
    [t_LA, q_LA, u_LA],
)
Esempio n. 45
0
# a low Nside.

from __future__ import division
import healpy as hp
import numpy as np

Ns = 1024  # Nside of origional map

mapsample = np.arange(hp.nside2npix(Ns))
masksample = np.ones_like(mapsample)

Ns_out = 32  # mask out pixels with Nside=32 in turn to creat Jackknife samples

mask_dg = hp.ud_grade(masksample, Ns_out)
jid = 0

for j in range(hp.nside2npix(Ns_out)):
    if mask_dg[j] == 0:
        continue
    else:
        print('Jack: ' + str(jid))
        jackmask = np.ones(hp.nside2npix(Ns_out))
        jackmask[j] = 0
        jackmask_ud = hp.ud_grade(jackmask, Ns)
        mask_jack = jackmask_ud * masksample
        print jid
        hp.write_map('mask_jack' + str(jid) + '_' + str(Ns) + '.fits',
                     mask_jack,
                     overwrite=True)
        jid += 1
Esempio n. 46
0
    )
    e_hp_hdu = fits.ImageHDU(DL07_paras, fits.Header(hp_header))
    e_hp_hdu.header['UNIT'] = r"$M_{sun}/kpc^2$"
    #-------------------------------------------------
    # Draw the map
    '''
    hp.mollview(
        DL07_paras/e_DL07_paras, 
        unit="S/N",
        #norm = 'log',
        nest=True,
        min = 0,
        max = 10,
    )
    hp.graticule()
    plt.show()
    '''
    #-------------------------------------------------
    # Save the SNR all-sky map
    hp.write_map(
        "SNR.fits",
        DL07_paras / e_DL07_paras,
        nest=True,
        coord='G',
        overwrite=True,
    )
    #-----------------------------------
    # Measure time
    elapsed_time = time.time() - start_time
    print("Exiting Main Program, spending ", elapsed_time, "seconds.")
Esempio n. 47
0
mps_no2 = np.transpose(mps_no2[:, 1:, :], axes=[1, 2, 0])
mps_nv = np.transpose(mps_nv[:, 1:, :], axes=[1, 2, 0])
if W_FWHM:
    mps_d = np.zeros([2, 2, hp.nside2npix(NSIDE_DEFAULT), nnu])
    for inu in np.arange(nnu):
        for ip in [0, 1]:
            mp_sky = hp.smoothing((mpfg + mpc)[ip, :, inu],
                                  fwhm=beam_max,
                                  verbose=False)
            mps_d[0, ip, :, inu] = mp_sky + mps_no1[ip, :, inu]
            mps_d[1, ip, :, inu] = mp_sky + mps_no2[ip, :, inu]
    mps_d_rw = np.array([mps_no1, mps_no2]) + (mpfg + mpc)[None, :, :, :]
else:
    mps_d = np.array([mps_no1, mps_no2]) + (mpfg + mpc)[None, :, :, :]
if OUTPUT_LEVEL > 0:
    hp.write_map(predir + 'cmb_true.fits', amc, overwrite=True)

#Domain decomposition
ipnest_sub = hp.ring2nest(NSIDE_SPEC_DEFAULT,
                          np.arange(hp.nside2npix(NSIDE_SPEC_DEFAULT)))
ipring = hp.nest2ring(NSIDE_DEFAULT, np.arange(hp.nside2npix(NSIDE_DEFAULT)))
ip_patches_good = []
for ip_sub_ring in np.arange(hp.nside2npix(NSIDE_SPEC_DEFAULT)):
    ips_ring = ipring[ipnest_sub[ip_sub_ring] * NSIDE_RATIO**2 +
                      np.arange(NSIDE_RATIO**2)]
    if np.sum(msk[ips_ring] > ZER0) > 0:
        ip_patches_good.append(ips_ring[msk[ips_ring] > ZER0])

plotmap = -1. * np.ones(hp.nside2npix(NSIDE_DEFAULT))
for i, ips in enumerate(ip_patches_good):
    plotmap[ips] = i
Esempio n. 48
0
    cl_extravar *= norm
    #This yields <10% variations
    depth_factor = 1 + hp.synfast(
        cl_extravar, o.nside_out, new=True, verbose=False)

    #This total map corresponds to the relative variation in the shot-noise variance
    snvmap = shotnoise_factor * depth_factor * msk

    if o.plot_stuff:
        hp.mollview(snvmap,
                    min=0.9,
                    title='Relative local shot noise variance')
        plt.show()

    hp.write_map("cont_lss_nvar_ns%d.fits" % o.nside_out,
                 snvmap,
                 overwrite=True)

if not os.path.isfile("cont_lss_star_ns%d.fits" % o.nside_out):
    starmap = hp.ud_grade(hp.read_map("star_template.fits", verbose=False),
                          nside_out=o.nside_out)
    starmap[starmap <= 0] = 1E-15
    starmap = -np.log(
        starmap)  #Contaminant will be proportional to log(n_star)
    mean = np.sum(starmap * msk) / np.sum(msk)

    clcont = hp.anafast((starmap - mean) * msk) / fsky
    nflat = np.mean(clcont[max(2, o.nside_out // 3 - 50):o.nside_out // 3 +
                           50]) * np.ones_like(clcont)
    #Add extra fluctuations beyond ell~nside/3 with flat power spectrum
    dstar = hp.synfast(nflat, o.nside_out, new=True, verbose=False)
Esempio n. 49
0
        data = self._get_galaxy_data(zbin)
        mask = np.bincount(data['ipix'],
                           weights=data['weight'],
                           minlength=self.npix)
        return mask

    def _read_nzdz(self, zbin):
        return np.loadtxt(self.dndz_list[zbin], unpack=True)


if __name__ == "__main__":
    from glob import glob
    from matplotlib import pyplot as plt
    lsfiles = glob(
        '/mnt/extraspace/damonge/S8z_data/KiDS_data/shear_KV450_catalog/*')
    kv450 = KV450(lsfiles, 1024)
    outdir = '/mnt/extraspace/gravityls_3/S8z/data/KV450/maps_1024/'
    for i in range(5):
        we1, we2, w2s2 = kv450.get_shear_map(i)
        w = kv450.get_mask(i)
        hp.write_map(outdir + 'kv450_we1_bin{}.fits'.format(i), we1)
        hp.write_map(outdir + 'kv450_we2_bin{}.fits'.format(i), we2)
        hp.write_map(outdir + 'kv450_w2s2_bin{}.fits'.format(i), w2s2)
        hp.write_map(outdir + 'kv450_w_bin{}.fits'.format(i), w)
        np.savez_compressed(outdir + 'kv450_sums_bin{}.npz'.format(i),
                            w2s2=np.sum(w2s2))
        # hp.mollview(we1 / w, title='KV450 - 1st zbin - e1', min=-0.5, max=0.5)
        # plt.savefig('kv450_e1_bin{}.png'.format(i))
        # hp.mollview(we2 / w, title='KV450 - 1st zbin - e2', min=-0.5, max=0.5)
        # plt.savefig('kv450_e2_bin{}.png'.format(i))
Esempio n. 50
0
    #-------------------------------------------------
    # For debugging
    # Draw the map
    '''
    hp.mollview(
        mdust_DL07_paras,
        unit="Msun/pc2",
        #norm = 'log',
        nest=True,
        min = 0,
        max = 10,
    )
    hp.graticule()
    plt.show()
    '''
    #-------------------------------------------------
    # Save the SIGMA_MDUST as a all-sky map
    hp.write_map(
        "mdust.fits",
        [mdust_DL07_paras, e_mdust_DL07_paras],
        nest=True,
        coord='G',
        overwrite=True,
        fits_IDL=False,
        column_names=['NORM', 'UNC'],
    )
    #-----------------------------------
    # Measure time
    elapsed_time = time.time() - start_time
    print("Exiting Main Program, spending ", elapsed_time, "seconds.")
Esempio n. 51
0
    def write_healpix_fits(self, path, comm_bytes=None):
        """
        Write data to a HEALPix format FITS table.

        The data across all processes is assumed to be synchronized (the
        data for a given submap shared between processes is identical).  The
        lowest rank process sharing each submap sends their copy to the root
        process for writing.

        Args:
            path (str): The path to the FITS file.
            comm_bytes (int): The approximate message size to use.
        """
        if comm_bytes is None:
            comm_bytes = self._commsize

        autotimer = timing.auto_timer(type(self).__name__)
        # We will reduce some number of whole submaps at a time.
        # Find the number of submaps that fit into the requested
        # communication size.
        dbytes = self._dtype(1).itemsize
        comm_submap = int(comm_bytes / (dbytes * self._submap * self._nnz))
        if comm_submap == 0:
            comm_submap = 1

        nsubmap = int(self._size / self._submap)
        if nsubmap * self._submap < self._size:
            nsubmap += 1

        # Determine which processes "own" each submap.

        owners = np.zeros(nsubmap, dtype=np.int32)
        owners.fill(self._comm.size)
        for m in self._local:
            owners[m] = self._comm.rank
        allowners = np.zeros_like(owners)
        self._comm.Allreduce(owners, allowners, op=MPI.MIN)

        # this function requires lots of RAM, since it accumulates the
        # full map on one process before writing.

        # use a cache to store the local map, so that we can be sure to
        # free the memory afterwards

        fdata = None
        temp = None
        if self._comm.rank == 0:
            fdata = []
            temp = Cache()
            for col in range(self._nnz):
                name = "col{}".format(col)
                temp.create(name, self._dtype, (self._size,))
                fdata.append(temp.reference(name))

        sendbuf = np.zeros(comm_submap * self._submap * self._nnz,
                           dtype=self._dtype)
        sendview = sendbuf.reshape(comm_submap, self._submap, self._nnz)

        recvbuf = None
        recvview = None
        if self._comm.rank == 0:
            recvbuf = np.zeros(comm_submap * self._submap * self._nnz,
                               dtype=self._dtype)
            recvview = recvbuf.reshape(comm_submap, self._submap, self._nnz)

        submap_off = 0
        ncomm = comm_submap
        while submap_off < nsubmap:
            if submap_off + ncomm > nsubmap:
                ncomm = nsubmap - submap_off
            if np.sum(allowners[submap_off:submap_off+ncomm]) \
               != ncomm * self._comm.size:
                # at least one submap has some hits.  reduce.
                for c in range(ncomm):
                    if allowners[submap_off + c] == self._comm.rank:
                        sendview[c,:,:] \
                            = self.data[self._glob2loc[submap_off + c],:,:]
                self._comm.Reduce(sendbuf, recvbuf, op=MPI.SUM, root=0)
                if self._comm.rank == 0:
                    # copy into FITS buffers
                    for c in range(ncomm):
                        sampoff = (submap_off + c) * self._submap
                        for col in range(self._nnz):
                            fdata[col][sampoff:sampoff+self._submap] \
                                = recvview[c,:,col]
                sendbuf.fill(0)
                if self._comm.rank == 0:
                    recvbuf.fill(0)

            submap_off += ncomm

        if self._comm.rank == 0:
            if os.path.isfile(path):
                os.remove(path)
            hp.write_map(path, fdata, dtype=self._dtype, fits_IDL=False,
                nest=self._nest)

        return
Esempio n. 52
0
#     #print(n.split('/')[-1])
#     mapi = hp.read_map(n, verbose=False).astype('bool')
#     mask_neg |= mapi

# noneg = (mask & (~mask_neg))
# print('original mask: ', mask.sum(), 'after removing negative pixels :', noneg.sum())
# hp.write_map('/Volumes/TimeMachine/data/mocks/mask.cut.hp.256.fits', noneg, dtype=np.float64, overwrite=True)

filename = '/Volumes/TimeMachine/data/mocks/mask.cut.hp.256.fits'
filename2 = '/Volumes/TimeMachine/data/mocks/mask.cut.w.hp.256.fits'

weights = glob(
    '/Volumes/TimeMachine/data/mocks/3dbox/*/results/regression/*/*weights.hp256.fits'
)
weights += glob(
    '/Volumes/TimeMachine/data/mocks/3dbox/*/cp2p/results/regression/*/*weights.hp256.fits'
)
print('total number of weights : %d' % len(weights))

mask = hp.read_map(filename, verbose=False).astype('bool')
for n in weights:
    wi = hp.read_map(n, verbose=False)
    maski = (wi > 0.5) & (wi < 2.0)
    mask &= maski

hp.write_map(filename2, mask, overwrite=True, fits_IDL=False)

# test
#mask2 = hp.read_map(filename2, verbose=False).astype('bool')
#print(mask.sum(), mask2.sum(), np.array_equal(mask, mask2))
Esempio n. 53
0
import copy as cp

if __name__ == "__main__":
    niter = 20000
    old_map = hp.read_map(
        '/Users/kwame/Documents/s2let_ilc_data/s2let_ilc_covar15_planck_diffuse_deconv_tapered_thresh_lmax3600_3600_hybridC_6_1_recon_inpaint800.fits'
    )
    mask = np.load(
        '/Users/kwame/Documents/s2let_ilc_data/nilc_pr1_builtmask_holes_ring_800.npy'
    )  #0 where holes
    new_map = cp.deepcopy(old_map)
    #nside = hp.get_nside(mask)
    #hole_pixs = np.where(mask == 0)[0]
    nside = 2048
    hole_pixs = mask[0]
    '''theta = mh.pi / 2.
    phi = 0.
    rad_arcmin = 21
    hole_pixs = hp.query_disc(nside,hp.ang2vec(theta,phi),np.radians(rad_arcmin/60.))'''
    hole_neighbours = hp.get_all_neighbours(nside, hole_pixs)  #8 x N_pix_holes

    new_map[hole_pixs] = np.mean(old_map)  #Start with mean map value
    for i in xrange(niter):
        print "Iteration no.", i + 1, "/", niter
        new_map[hole_pixs] = np.mean(new_map[hole_neighbours], axis=0)

    resid_map = new_map - old_map

    hp.write_map(
        '/Users/kwame/Documents/s2let_ilc_data/s2let_ilc_covar15_planck_diffuse_deconv_tapered_thresh_lmax3600_3600_hybridC_6_1_recon_inpaint800GD20000.fits',
        new_map)
Esempio n. 54
0
def release(ctx, chain, burnin, procver, resamp, copy_, freqmaps, ame, ff, cmb,
            synch, dust, br, diff, diffcmb, goodness, chisq, res, all_, plot):
    """
    Creates a release file-set on the BeyondPlanck format.
    https://gitlab.com/BeyondPlanck/repo/-/wikis/BeyondPlanck-Release-Candidate-2

    ex. c3pp release chains_v1_c{1,2}/chain_c000{1,2}.h5 30 BP_r1
    Will output formatted files using all chains specified,
    with a burnin of 30 to a directory called BP_r1

    This function outputs the following files to the {procver} directory:
    BP_chain01_full_{procver}.h5
    BP_resamp_chain01_full_Cl_{procver}.h5
    BP_resamp_chain01_full_noCl_{procver}.h5
    BP_param_full_v1.txt
    BP_param_resamp_Cl_v1.txt
    BP_param_resamp_noCl_v1.txt

    BP_030_IQU_full_n0512_{procver}.fits
    BP_044_IQU_full_n0512_{procver}.fits
    BP_070_IQU_full_n1024_{procver}.fits

    BP_cmb_IQU_full_n1024_{procver}.fits
    BP_synch_IQU_full_n1024_{procver}.fits
    BP_freefree_I_full_n1024_{procver}.fits
    BP_ame_I_full_n1024_{procver}.fits

    BP_cmb_GBRlike_{procver}.fits
    """
    # TODO
    # Use proper masks for output of CMB component
    # Use inpainted data as well in CMB component

    from src.fitsformatter import format_fits, get_data, get_header
    from pathlib import Path
    import shutil

    if all_:  # sets all other flags to true
        copy_ = not copy_
        freqmaps = not freqmaps
        ame = not ame
        ff = not ff
        cmb = not cmb
        synch = not synch
        dust = not dust
        br = not br
        diff = not diff
        diffcmb = not diffcmb
        goodness = not goodness
        res = not res
        chisq = not chisq

    if goodness:
        chisq = res = True
    elif chisq or res:
        goodness = True

    # Make procver directory if not exists
    click.echo("{:#^80}".format(""))
    click.echo(f"Creating directory {procver}")
    Path(procver).mkdir(parents=True, exist_ok=True)
    chains = chain
    maxchain = len(chains)
    """
    Copying chains files
    """
    if copy_:
        # Commander3 parameter file for main chain
        for i, chainfile in enumerate(chains, 1):
            path = os.path.split(chainfile)[0]
            for file in os.listdir(path):
                if file.startswith("param") and i == 1:  # Copy only first
                    click.echo(
                        f"Copying {path}/{file} to {procver}/BP_param_full_c" +
                        str(i).zfill(4) + ".txt")
                    if resamp:
                        shutil.copyfile(
                            f"{path}/{file}",
                            f"{procver}/BP_param_resamp_Cl_c" +
                            str(i).zfill(4) + ".txt",
                        )
                    else:
                        shutil.copyfile(
                            f"{path}/{file}",
                            f"{procver}/BP_param_full_c" + str(i).zfill(4) +
                            ".txt",
                        )

            if resamp:
                # Resampled CMB-only full-mission Gibbs chain file with Cls (for BR estimator)
                click.echo(f"Copying {chainfile} to {procver}/BP_resamp_c" +
                           str(i).zfill(4) + f"_full_Cl_{procver}.h5")
                shutil.copyfile(
                    chainfile,
                    f"{procver}/BP_resamp_c" + str(i).zfill(4) +
                    f"_full_Cl_{procver}.h5",
                )
            else:
                # Full-mission Gibbs chain file
                click.echo(f"Copying {chainfile} to {procver}/BP_c" +
                           str(i).zfill(4) + f"_full_{procver}.h5")
                shutil.copyfile(
                    chainfile,
                    f"{procver}/BP_c" + str(i).zfill(4) +
                    f"_full_{procver}.h5",
                )

    #if halfring:
    #   # Copy halfring files
    #   for i, chainfile in enumerate([halfring], 1):
    #       # Copy halfring files
    #       click.echo(f"Copying {resamp} to {procver}/BP_halfring_c" + str(i).zfill(4) + f"_full_Cl_{procver}.h5")
    #       shutil.copyfile(halfring, f"{procver}/BP_halfring_c" + str(i).zfill(4) + f"_full_Cl_{procver}.h5",)
    """
    IQU mean, IQU stdev, (Masks for cmb)
    Run mean and stddev from min to max sample (Choose min manually or start at 1?)
    """
    if resamp:
        chain = f"{procver}/BP_resamp_c0001_full_Cl_{procver}.h5"
    else:
        chain = f"{procver}/BP_c0001_full_{procver}.h5"
    if freqmaps:
        try:
            # Full-mission 30 GHz IQU frequency map
            # BP_030_IQU_full_n0512_{procver}.fits
            format_fits(
                chain=chain,
                extname="FREQMAP",
                types=[
                    "I_MEAN",
                    "Q_MEAN",
                    "U_MEAN",
                    "I_RMS",
                    "Q_RMS",
                    "U_RMS",
                    "I_STDDEV",
                    "Q_STDDEV",
                    "U_STDDEV",
                ],
                units=[
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                ],
                nside=512,
                burnin=burnin,
                maxchain=maxchain,
                polar=True,
                component="030",
                fwhm=0.0,
                nu_ref_t="30.0 GHz",
                nu_ref_p="30.0 GHz",
                procver=procver,
                filename=f"BP_030_IQU_full_n0512_{procver}.fits",
                bndctr=30,
                restfreq=28.456,
                bndwid=9.899,
            )
            # Full-mission 44 GHz IQU frequency map
            format_fits(
                chain=chain,
                extname="FREQMAP",
                types=[
                    "I_MEAN",
                    "Q_MEAN",
                    "U_MEAN",
                    "I_RMS",
                    "Q_RMS",
                    "U_RMS",
                    "I_STDDEV",
                    "Q_STDDEV",
                    "U_STDDEV",
                ],
                units=[
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                ],
                nside=512,
                burnin=burnin,
                maxchain=maxchain,
                polar=True,
                component="044",
                fwhm=0.0,
                nu_ref_t="44.0 GHz",
                nu_ref_p="44.0 GHz",
                procver=procver,
                filename=f"BP_044_IQU_full_n0512_{procver}.fits",
                bndctr=44,
                restfreq=44.121,
                bndwid=10.719,
            )
            # Full-mission 70 GHz IQU frequency map
            format_fits(
                chain=chain,
                extname="FREQMAP",
                types=[
                    "I_MEAN",
                    "Q_MEAN",
                    "U_MEAN",
                    "I_RMS",
                    "Q_RMS",
                    "U_RMS",
                    "I_STDDEV",
                    "Q_STDDEV",
                    "U_STDDEV",
                ],
                units=[
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                    "uK",
                ],
                nside=1024,
                burnin=burnin,
                maxchain=maxchain,
                polar=True,
                component="070",
                fwhm=0.0,
                nu_ref_t="70.0 GHz",
                nu_ref_p="70.0 GHz",
                procver=procver,
                filename=f"BP_070_IQU_full_n1024_{procver}.fits",
                bndctr=70,
                restfreq=70.467,
                bndwid=14.909,
            )

        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")
    """
    FOREGROUND MAPS
    """
    # Full-mission CMB IQU map
    if cmb:
        if resamp:
            try:
                format_fits(
                    chain,
                    extname="COMP-MAP-CMB-RESAMP",
                    types=[
                        "I_MEAN",
                        "I_STDDEV",
                    ],
                    units=[
                        "uK_cmb",
                        "uK_cmb",
                    ],
                    nside=1024,
                    burnin=burnin,
                    maxchain=maxchain,
                    polar=True,
                    component="CMB",
                    fwhm=14.0,
                    nu_ref_t="NONE",
                    nu_ref_p="NONE",
                    procver=procver,
                    filename=f"BP_cmb_resamp_IQU_full_n1024_{procver}.fits",
                    bndctr=None,
                    restfreq=None,
                    bndwid=None,
                )
            except Exception as e:
                print(e)
                click.secho("Continuing...", fg="yellow")

        else:
            try:
                format_fits(
                    chain,
                    extname="COMP-MAP-CMB",
                    types=[
                        "I_MEAN",
                        "Q_MEAN",
                        "U_MEAN",
                        "I_STDDEV",
                        "Q_STDDEV",
                        "U_STDDEV",
                        "mask1",
                        "mask2",
                    ],
                    units=[
                        "uK_cmb",
                        "uK_cmb",
                        "uK",
                        "uK",
                        "NONE",
                        "NONE",
                    ],
                    nside=1024,
                    burnin=burnin,
                    maxchain=maxchain,
                    polar=True,
                    component="CMB",
                    fwhm=14.0,
                    nu_ref_t="NONE",
                    nu_ref_p="NONE",
                    procver=procver,
                    filename=f"BP_cmb_IQU_full_n1024_{procver}.fits",
                    bndctr=None,
                    restfreq=None,
                    bndwid=None,
                )
            except Exception as e:
                print(e)
                click.secho("Continuing...", fg="yellow")

    if ff:
        try:
            # Full-mission free-free I map
            format_fits(
                chain,
                extname="COMP-MAP-FREE-FREE",
                types=[
                    "I_MEAN",
                    "I_TE_MEAN",
                    "I_STDDEV",
                    "I_TE_STDDEV",
                ],
                units=[
                    "uK_RJ",
                    "K",
                    "uK_RJ",
                    "K",
                ],
                nside=1024,
                burnin=burnin,
                maxchain=maxchain,
                polar=False,
                component="FREE-FREE",
                fwhm=30.0,
                nu_ref_t="40.0 GHz",
                nu_ref_p="40.0 GHz",
                procver=procver,
                filename=f"BP_freefree_I_full_n1024_{procver}.fits",
                bndctr=None,
                restfreq=None,
                bndwid=None,
            )
        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")

    if ame:
        try:
            # Full-mission AME I map
            format_fits(
                chain,
                extname="COMP-MAP-AME",
                types=[
                    "I_MEAN",
                    "I_NU_P_MEAN",
                    "I_STDDEV",
                    "I_NU_P_STDDEV",
                ],
                units=[
                    "uK_RJ",
                    "GHz",
                    "uK_RJ",
                    "GHz",
                ],
                nside=1024,
                burnin=burnin,
                maxchain=maxchain,
                polar=False,
                component="AME",
                fwhm=120.0,
                nu_ref_t="22.0 GHz",
                nu_ref_p="22.0 GHz",
                procver=procver,
                filename=f"BP_ame_I_full_n1024_{procver}.fits",
                bndctr=None,
                restfreq=None,
                bndwid=None,
            )
        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")

    if synch:
        try:
            # Full-mission synchrotron IQU map
            format_fits(
                chain,
                extname="COMP-MAP-SYNCHROTRON",
                types=[
                    "I_MEAN",
                    "Q_MEAN",
                    "U_MEAN",
                    "P_MEAN",
                    "I_BETA_MEAN",
                    "QU_BETA_MEAN",
                    "I_STDDEV",
                    "Q_STDDEV",
                    "U_STDDEV",
                    "P_STDDEV",
                    "I_BETA_STDDEV",
                    "QU_BETA_STDDEV",
                ],
                units=[
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "NONE",
                    "NONE",
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "NONE",
                    "NONE",
                ],
                nside=1024,
                burnin=burnin,
                maxchain=maxchain,
                polar=True,
                component="SYNCHROTRON",
                fwhm=60.0,  # 60.0,
                nu_ref_t="30.0 GHz",
                nu_ref_p="30.0 GHz",
                procver=procver,
                filename=f"BP_synch_IQU_full_n1024_{procver}.fits",
                bndctr=None,
                restfreq=None,
                bndwid=None,
            )
        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")

    if dust:
        try:
            # Full-mission thermal dust IQU map
            format_fits(
                chain,
                extname="COMP-MAP-DUST",
                types=[
                    "I_MEAN",
                    "Q_MEAN",
                    "U_MEAN",
                    "P_MEAN",
                    "I_BETA_MEAN",
                    "QU_BETA_MEAN",
                    "I_T_MEAN",
                    "QU_T_MEAN",
                    "I_STDDEV",
                    "Q_STDDEV",
                    "U_STDDEV",
                    "P_STDDEV",
                    "I_BETA_STDDEV",
                    "QU_BETA_STDDEV",
                    "I_T_STDDEV",
                    "QU_T_STDDEV",
                ],
                units=[
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "NONE",
                    "NONE",
                    "K",
                    "K",
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "uK_RJ",
                    "NONE",
                    "NONE",
                    "K",
                    "K",
                ],
                nside=1024,
                burnin=burnin,
                maxchain=maxchain,
                polar=True,
                component="DUST",
                fwhm=10.0,  # 60.0,
                nu_ref_t="545 GHz",
                nu_ref_p="353 GHz",
                procver=procver,
                filename=f"BP_dust_IQU_full_n1024_{procver}.fits",
                bndctr=None,
                restfreq=None,
                bndwid=None,
            )
        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")

    if diff:
        import healpy as hp
        try:
            click.echo("Creating frequency difference maps")
            path_dx12 = "/mn/stornext/u3/trygvels/compsep/cdata/like/BP_releases/dx12"
            path_npipe = "/mn/stornext/u3/trygvels/compsep/cdata/like/BP_releases/npipe"
            maps_dx12 = [
                "30ghz_2018_n1024_beamscaled_dip.fits",
                "44ghz_2018_n1024_beamscaled_dip.fits",
                "70ghz_2018_n1024_beamscaled_dip.fits"
            ]
            maps_npipe = [
                "npipe6v20_030_map_uK.fits",
                "npipe6v20_044_map_uK.fits",
                "npipe6v20_070_map_uK.fits",
            ]
            maps_BP = [
                f"BP_030_IQU_full_n0512_{procver}.fits",
                f"BP_044_IQU_full_n0512_{procver}.fits",
                f"BP_070_IQU_full_n1024_{procver}.fits",
            ]
            beamscaling = [9.8961854E-01, 9.9757886E-01, 9.9113965E-01]
            for i, freq in enumerate([
                    "030",
                    "044",
                    "070",
            ]):
                map_BP = hp.read_map(f"{procver}/{maps_BP[i]}",
                                     field=(0, 1, 2),
                                     verbose=False,
                                     dtype=None)
                map_npipe = hp.read_map(f"{path_npipe}/{maps_npipe[i]}",
                                        field=(0, 1, 2),
                                        verbose=False,
                                        dtype=None)
                map_dx12 = hp.read_map(f"{path_dx12}/{maps_dx12[i]}",
                                       field=(0, 1, 2),
                                       verbose=False,
                                       dtype=None)

                #dx12 dipole values:
                # 3362.08 pm 0.99, 264.021 pm 0.011, 48.253 ± 0.005
                # 233.18308357  2226.43833645 -2508.42179665
                #dipole_dx12 = -3362.08*hp.dir2vec(264.021, 48.253, lonlat=True)

                #map_dx12  = map_dx12/beamscaling[i]
                # Smooth to 60 arcmin
                map_BP = hp.smoothing(map_BP,
                                      fwhm=arcmin2rad(60.0),
                                      verbose=False)
                map_npipe = hp.smoothing(map_npipe,
                                         fwhm=arcmin2rad(60.0),
                                         verbose=False)
                map_dx12 = hp.smoothing(map_dx12,
                                        fwhm=arcmin2rad(60.0),
                                        verbose=False)

                #ud_grade 30 and 44ghz
                if i < 2:
                    map_npipe = hp.ud_grade(
                        map_npipe,
                        nside_out=512,
                    )
                    map_dx12 = hp.ud_grade(
                        map_dx12,
                        nside_out=512,
                    )

                # Remove monopoles
                map_BP -= np.mean(map_BP, axis=1).reshape(-1, 1)
                map_npipe -= np.mean(map_npipe, axis=1).reshape(-1, 1)
                map_dx12 -= np.mean(map_dx12, axis=1).reshape(-1, 1)

                hp.write_map(f"{procver}/BP_{freq}_diff_npipe_{procver}.fits",
                             np.array(map_BP - map_npipe),
                             overwrite=True,
                             column_names=["I_DIFF", "Q_DIFF", "U_DIFF"],
                             dtype=None)
                hp.write_map(f"{procver}/BP_{freq}_diff_dx12_{procver}.fits",
                             np.array(map_BP - map_dx12),
                             overwrite=True,
                             column_names=["I_DIFF", "Q_DIFF", "U_DIFF"],
                             dtype=None)

        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")

    if diffcmb:
        import healpy as hp
        try:
            click.echo("Creating cmb difference maps")
            path_cmblegacy = "/mn/stornext/u3/trygvels/compsep/cdata/like/BP_releases/cmb-legacy"
            mask_ = hp.read_map(
                "/mn/stornext/u3/trygvels/compsep/cdata/like/BP_releases/masks/dx12_v3_common_mask_int_005a_1024_TQU.fits",
                verbose=False,
                dtype=np.bool,
            )
            map_BP = hp.read_map(
                f"{procver}/BP_cmb_IQU_full_n1024_{procver}.fits",
                field=(0, 1, 2),
                verbose=False,
                dtype=None,
            )
            map_BP_masked = hp.ma(map_BP[0])
            map_BP_masked.mask = np.logical_not(mask_)
            mono, dip = hp.fit_dipole(map_BP_masked)
            nside = 1024
            ray = range(hp.nside2npix(nside))
            vecs = hp.pix2vec(nside, ray)
            dipole = np.dot(dip, vecs)
            map_BP[0] = map_BP[0] - dipole - mono
            map_BP = hp.smoothing(map_BP,
                                  fwhm=arcmin2rad(np.sqrt(60.0**2 - 14**2)),
                                  verbose=False)
            #map_BP -= np.mean(map_BP,axis=1).reshape(-1,1)
            for i, method in enumerate([
                    "commander",
                    "sevem",
                    "nilc",
                    "smica",
            ]):

                data = f"COM_CMB_IQU-{method}_2048_R3.00_full.fits"
                click.echo(f"making difference map with {data}")
                map_cmblegacy = hp.read_map(f"{path_cmblegacy}/{data}",
                                            field=(0, 1, 2),
                                            verbose=False,
                                            dtype=None)
                map_cmblegacy = hp.smoothing(map_cmblegacy,
                                             fwhm=arcmin2rad(60.0),
                                             verbose=False)
                map_cmblegacy = hp.ud_grade(
                    map_cmblegacy,
                    nside_out=1024,
                )
                map_cmblegacy = map_cmblegacy * 1e6

                # Remove monopoles
                map_cmblegacy_masked = hp.ma(map_cmblegacy[0])
                map_cmblegacy_masked.mask = np.logical_not(mask_)
                mono = hp.fit_monopole(map_cmblegacy_masked)
                click.echo(f"{method} subtracting monopole {mono}")
                map_cmblegacy[0] = map_cmblegacy[
                    0] - mono  #np.mean(map_cmblegacy,axis=1).reshape(-1,1)

                hp.write_map(f"{procver}/BP_cmb_diff_{method}_{procver}.fits",
                             np.array(map_BP - map_cmblegacy),
                             overwrite=True,
                             column_names=["I_DIFF", "Q_DIFF", "U_DIFF"],
                             dtype=None)

        except Exception as e:
            print(e)
            click.secho("Continuing...", fg="yellow")

    if goodness:
        import healpy as hp
        path_goodness = procver + "/goodness"
        Path(path_goodness).mkdir(parents=True, exist_ok=True)
        print("PATH", path_goodness)

        cmin = int(os.path.split(chains[0])[0].rsplit("_c")[-1])
        cmax = int(os.path.split(chains[-1])[0].rsplit("_c")[-1])
        chdir = os.path.split(chains[0])[0].rsplit("_", 1)[0]

        if chisq:
            try:
                format_fits(
                    chains,
                    extname="CHISQ",
                    types=[
                        "I_MEAN",
                        "P_MEAN",
                    ],
                    units=[
                        "NONE",
                        "NONE",
                    ],
                    nside=16,
                    burnin=burnin,
                    maxchain=maxchain,
                    polar=True,
                    component="CHISQ",
                    fwhm=0.0,
                    nu_ref_t="NONE",
                    nu_ref_p="NONE",
                    procver=procver,
                    filename=f'goodness/BP_chisq_n16_{procver}.fits',
                    bndctr=None,
                    restfreq=None,
                    bndwid=None,
                    cmin=cmin,
                    cmax=cmax,
                    chdir=chdir,
                )
            except Exception as e:
                print(e)
                click.secho("Continuing...", fg="yellow")

        if res:
            click.echo("Save and format chisq map and residual maps")
            bands = {
                "030": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "IQU",
                    "fields": (0, 1, 2),
                    "unit": "uK",
                    "scale": 1.,
                },
                "044": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "IQU",
                    "fields": (0, 1, 2),
                    "unit": "uK",
                    "scale": 1.,
                },
                "070": {
                    "nside": 1024,
                    "fwhm": 120,
                    "sig": "IQU",
                    "fields": (0, 1, 2),
                    "unit": "uK",
                    "scale": 1.,
                },
                "030-WMAP_Ka": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1e3,
                },
                "040-WMAP_Q1": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1.,
                },
                "040-WMAP_Q2": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1.,
                },
                "060-WMAP_V1": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1.,
                },
                "060-WMAP_V2": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1.,
                },
                "0.4-Haslam": {
                    "nside": 512,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1.,
                },
                "857": {
                    "nside": 1024,
                    "fwhm": 120,
                    "sig": "I",
                    "fields": (0, ),
                    "unit": "uK",
                    "scale": 1.,
                },
                "033-WMAP_Ka_P": {
                    "nside": 16,
                    "fwhm": 0,
                    "sig": "QU",
                    "fields": (1, 2),
                    "unit": "uK",
                    "scale": 1e3,
                },
                "041-WMAP_Q_P": {
                    "nside": 16,
                    "fwhm": 0,
                    "sig": "QU",
                    "fields": (1, 2),
                    "unit": "uK",
                    "scale": 1e3,
                },
                "061-WMAP_V_P": {
                    "nside": 16,
                    "fwhm": 0,
                    "sig": "QU",
                    "fields": (1, 2),
                    "unit": "uK",
                    "scale": 1e3,
                },
                "353": {
                    "nside": 1024,
                    "fwhm": 120,
                    "sig": "QU",
                    "fields": (1, 2),
                    "unit": "uK",
                    "scale": 1.,
                },
            }

            for label, b in bands.items():

                types = []
                units = []
                for l in b["sig"]:
                    types.append(f'{l}_MEAN')
                    units.append(b["unit"])
                for l in b["sig"]:
                    types.append(f'{l}_STDDEV')
                    units.append(b["unit"])
                try:
                    format_fits(
                        chains,
                        extname="FREQBAND_RES",
                        types=types,
                        units=units,
                        nside=b["nside"],
                        burnin=burnin,
                        maxchain=maxchain,
                        polar=True,
                        component=label,
                        fwhm=b["fwhm"],
                        nu_ref_t="NONE",
                        nu_ref_p="NONE",
                        procver=procver,
                        filename=
                        f'goodness/BP_res_{label}_{b["sig"]}_full_n{b["nside"]}_{b["fwhm"]}arcmin_{b["unit"]}_{procver}.fits',
                        bndctr=None,
                        restfreq=None,
                        bndwid=None,
                        cmin=cmin,
                        cmax=cmax,
                        chdir=chdir,
                        fields=b["fields"],
                        scale=b["scale"],
                    )
                except Exception as e:
                    print(e)
                    click.secho("Continuing...", fg="yellow")
    """ As implemented by Simone
    """
    if br and resamp:
        # Gaussianized TT Blackwell-Rao input file
        click.echo("{:-^50}".format("CMB GBR"))
        ctx.invoke(
            sigma_l2fits,
            filename=resamp,
            nchains=1,
            burnin=burnin,
            path="cmb/sigma_l",
            outname=f"{procver}/BP_cmb_GBRlike_{procver}.fits",
            save=True,
        )
    """
    TODO Generalize this so that they can be generated by Elina and Anna-Stiina
    """
    # Full-mission 30 GHz IQU beam symmetrized frequency map
    # BP_030_IQUdeconv_full_n0512_{procver}.fits
    # Full-mission 44 GHz IQU beam symmetrized frequency map
    # BP_044_IQUdeconv_full_n0512_{procver}.fits
    # Full-mission 70 GHz IQU beam symmetrized frequency map
    # BP_070_IQUdeconv_full_n1024_{procver}.fits
    """ Both sigma_l's and Dl's re in the h5. (Which one do we use?)
    """
    # CMB TT, TE, EE power spectrum
    # BP_cmb_Cl_{procver}.txt
    """ Just get this from somewhere
    """
    # Best-fit LCDM CMB TT, TE, EE power spectrum
    # BP_cmb_bfLCDM_{procver}.txt

    if plot:
        os.chdir(procver)
        ctx.invoke(plotrelease, procver=procver, all_=True)
Esempio n. 55
0
if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--config', help='CATCH configuration file.')
    parser.add_argument('--source',
                        default='observation',
                        help=('limit analysis to this data source '
                              '(default: show all data)'))
    parser.add_argument('--nside',
                        default=2048,
                        help=('Healpix nside parameter, default is 2048'
                              ' for 1.7 arcmin resolution'))
    parser.add_argument(
        '-o',
        default=None,
        help='output file name prefix, default based on --source')
    parser.add_argument('--format', default='png', help='plot file format')
    parser.add_argument('--dpi', type=int, default=200)
    args = parser.parse_args()

    prefix = args.source if args.o is None else args.o

    config = Config.from_file(args.config)
    with Catch.with_config(config) as catch:
        catch.source = args.source
        source_name = catch.source.__data_source_name__
        cov = make_sky_coverage_map(catch, args.nside)

    hp.write_map('.'.join((prefix, 'fits')), cov, overwrite=True)
    plot(cov, source_name)
    plt.savefig('.'.join((prefix, args.format)), dpi=args.dpi)
Esempio n. 56
0
        fig, ax = SWHT.display.disp3D(img, phi, theta, dmode='abs', cmap='jet')

        # save complex image to pickle file
        print 'Writing image to file %s ...' % outFn,
        SWHT.fileio.writeSWHTImgPkl(outFn, [img, phi, theta], fDict, mode='3D')
        print 'done'

    elif opts.imageMode.startswith(
            'heal'
    ):  # plot healpix and save healpix file using the opts.pkl name
        print 'Generating HEALPix Image with %i NSIDE' % (opts.pixels)
        # use the healpy.alm2map function as it is much faster, there is a ~1% difference between the 2 functions, this is probably due to the inner workings of healpy
        #m = SWHT.swht.makeHEALPix(imgCoeffs, nside=opts.pixels) # TODO: a rotation issue
        m = hp.alm2map(SWHT.util.array2almVec(imgCoeffs),
                       opts.pixels)  # TODO: a rotation issue

        # save complex image to HEALPix file
        print 'Writing image to file %s ...' % outFn,
        hp.write_map(
            outFn, m.real, coord='C'
        )  # only writing the real component, this should be fine, maybe missing some details, but you know, the sky should be real.
        print 'done'

    elif opts.imageMode.startswith('coeff'):  # plot the complex coefficients
        fig, ax = SWHT.display.dispCoeffs(imgCoeffs, zeroDC=True, vis=False)

    if not (opts.savefig is None): plt.savefig(opts.savefig)
    if not opts.nodisplay:
        if opts.imageMode.startswith('heal'): hp.mollview(m.real, coord='CG')
        plt.show()
Esempio n. 57
0
print("  Construct Sky...", flush=True)
sky = pysm.Sky(nside=args.nside, preset_strings=["d1", "s1", "f1", "a1"])

freqs = (np.array([
    args.bandcenter_ghz - 0.5 * args.bandwidth_ghz,
    args.bandcenter_ghz,
    args.bandcenter_ghz + 0.5 * args.bandwidth_ghz,
]) * u.GHz)

print("  Get Emission...", flush=True)
map_data = sky.get_emission(freqs)

if args.coord == "G":
    print("  Smoothing...", flush=True)
    smoothed = pysm.apply_smoothing_and_coord_transform(map_data,
                                                        fwhm=args.beam_arcmin *
                                                        u.arcmin)
else:
    to_coord = "G{}".format(args.coord)
    print("  Smoothing and rotating...", flush=True)
    smoothed = pysm.apply_smoothing_and_coord_transform(
        map_data,
        fwhm=args.beam_arcmin * u.arcmin,
        rot=hp.Rotator(coord=to_coord))

print("  Converting to NEST")
nested = hp.ud_grade(smoothed, args.nside, order_in="RING", order_out="NEST")

print("  Writing output...", flush=True)
hp.write_map(outfile, nested, nest=True)
Esempio n. 58
0
nPixels = hp.nside2npix(nsides)
hz = n.zeros(nPixels)
hcounts = n.zeros(nPixels)
hrms = n.zeros(nPixels)
# Find pixel # for a given theta and phi
pixInd = hp.ang2pix(nsides, thetas, phis, nest=False)
# Set pixel values at pixInd to power values
hz[pixInd] = z
hcounts[pixInd] = gcounts
hrms[pixInd] = grms
# Grey out pixels with no measurements
hz[hz == 0] = n.nan
hcounts[hcounts == 0] = n.nan
hrms[hrms == 0] = n.nan
# Write healpix maps to fits files
hp.write_map(str(nsides) + '_power_' + outfilename + '.fits', hz)
hp.write_map(str(nsides) + '_rms_' + outfilename + '.fits', hrms)
hp.write_map(str(nsides) + '_counts_' + outfilename + '.fits', hcounts)

######################## Plotting ###########################

# Obtain bounds for plotting window
extent = (x.min(), x.max(), y.min(), y.max())
# Compute quartiles for scaling of colorbar
fq = n.percentile(z, 10)
uq = n.percentile(z, 80)
# Change colormap for Healpix colorbars
bonemap = cm.bone_r
bonemap.set_under('0.75')
gnuplotmap = cm.gnuplot
gnuplotmap.set_under('0.75')
Esempio n. 59
0
        f'/fs/ess/PHS0336/data/tanveer/dr9/v4/elg_dnnp/{region}_clean_1024/windows/window_model_*fits'
    )
    print(region)

nwindows = 1000
nside = 1024
for i in range(nwindows):

    count_i = np.zeros(12 * nside * nside)
    wind_i = np.zeros(12 * nside * nside)

    for region in windows:
        d_ = ft.read(windows[region][i])
        wind_i[d_['hpix']] += d_['weight']
        count_i[d_['hpix']] += 1.0

    output_path = f'/fs/ess/PHS0336/data/tanveer/dr9/v4/elg_dnnp/windows_clean/nnwindow_{i}.hp{nside}.fits'
    output_dir = os.path.dirname(output_path)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    is_good = count_i > 0.0
    wind_i[is_good] = wind_i[is_good] / count_i[is_good]
    wind_i[~is_good] = hp.UNSEEN
    print(f'wrote {output_path}')
    hp.write_map(output_path,
                 wind_i,
                 dtype=np.float64,
                 fits_IDL=False,
                 overwrite=True)
Esempio n. 60
0
        fac) / np.square(np.radians(
            catalog[:, 4][catalog[:, -1] == 1]))  #Using GAUFLUX if EXTENDED
    #catalog[:,2:] * fluxfacs[:,None].T * 0.001 #mK

    sigmas = np.radians(catalog[:, 4] / 60.) / (
        2. * mh.sqrt(2. * mh.log(2.))
    )  #np.radians(fwhms) / (2.*mh.sqrt(2.*mh.log(2.))) #sigma in radians
    sigmas[catalog[:, -1] == 0] = fwhms_rad[j] / (
        2. * mh.sqrt(2. * mh.log(2.)))  #Use beamFWHM if not EXTENDED

    for i in xrange(len(coords)):  #Loop over point sources
        print 'Subtracting Gaussian profile for point source', i + 1, '/', len(
            coords), 'map', j
        samppixs = hp.query_disc(hp.get_nside(maps[j]),
                                 hp.ang2vec(coords[i, 1], coords[i, 0]),
                                 5. * sigmas[i])
        maps[j] = gauss_source(maps[j], temps[i], sigmas[i],
                               (coords[i, 1], coords[i, 0]), samppixs)
        #samppixsall = np.concatenate((samppixsall,samppixs)) #Slows program down

    #Save point source residuals
    resid[j] = origmaps[j] - maps[j]
'''testmap = cp.deepcopy(maps[j])
testmap[:] = 0.
testmap[samppixsall[1:]] = 1.'''

#Write new maps to FITS files
for i in xrange(len(maps)):
    hp.write_map(outfits[i], maps[i])
    hp.write_map(residfits[i], resid[i])