Пример #1
0
def load_signal(ctx):
    """
    Returns an interpolated global sky model (GSM) map dependent on the frequency.
    
    :param params: The ctx instance with the paramterization
    :returns signal: The astro signal
    """
    if gsm_maps is None:
        _load_files()
        
    gsm_frequencies, gsm_file_paths = gsm_maps[ctx.params.beam_nside]
    
    assert ctx.frequency >= gsm_frequencies[0], "Frequency (%s) outside available frequencies (%s - %s)"%(ctx.frequency, 
                                                                                                          gsm_frequencies[0], 
                                                                                                          gsm_frequencies[-1])
    assert ctx.frequency <= gsm_frequencies[-1], "Frequency (%s) outside available frequencies (%s - %s)"%(ctx.frequency, 
                                                                                                           gsm_frequencies[0], 
                                                                                                           gsm_frequencies[-1])
    
    
    for i, frequency in enumerate(gsm_frequencies):
        if ctx.frequency < frequency:
            break
    
    lf_file = gsm_file_paths[i-1]
    uf_file = gsm_file_paths[i]
    diff = (frequency - ctx.frequency) / (frequency - gsm_frequencies[i-1])
    
    lf_map = hp.read_map(lf_file, verbose=False)
    uf_map = hp.read_map(uf_file, verbose=False)
    
    gsm_map = diff * lf_map + (1-diff) * uf_map
    
    return gsm_map
Пример #2
0
 def setUp(self):
     self.path = os.path.dirname( os.path.realpath( __file__ ) )
     try:
         self.map1 = [hp.ma(m) for m in hp.read_map(os.path.join(self.path, 'data', 'wmap_band_iqumap_r9_7yr_W_v4.fits'), (0,1,2))]
         self.map2 = [hp.ma(m) for m in hp.read_map(os.path.join(self.path, 'data', 'wmap_band_iqumap_r9_7yr_V_v4.fits'), (0,1,2))]
         self.mask = hp.read_map(os.path.join(self.path, 'data', 'wmap_temperature_analysis_mask_r9_7yr_v4.fits')).astype(np.bool)
     except exceptions.IOError:
         warnings.warn("""Missing Wmap test maps from the data folder, please download them from Lambda and copy them in the test/data folder:
         http://lambda.gsfc.nasa.gov/data/map/dr4/skymaps/7yr/raw/wmap_band_iqumap_r9_7yr_W_v4.fits
         http://lambda.gsfc.nasa.gov/data/map/dr4/skymaps/7yr/raw/wmap_band_iqumap_r9_7yr_V_v4.fits
         http://lambda.gsfc.nasa.gov/data/map/dr4/ancillary/masks/wmap_temperature_analysis_mask_r9_7yr_v4.fits
         on Mac or Linux you can run the bash script get_wmap_maps.sh from the same folder
         """)
     for m in chain(self.map1, self.map2):
         m.mask = np.logical_not(self.mask)
     self.cla = hp.read_cl(os.path.join(self.path, 'data', 'cl_wmap_fortran.fits'))
     cls = pyfits.open(os.path.join(self.path, 'data',
                                    'cl_iqu_wmap_fortran.fits'))[1].data
     # order of HEALPIX is TB, EB while in healpy is EB, TB
     self.cliqu = [cls.field(i) for i in (0,1,2,3,5,4)]
     nside = 32
     lmax = 64
     fwhm_deg = 7.
     seed = 12345
     np.random.seed(seed)
     self.mapiqu = hp.synfast(self.cliqu, nside, lmax=lmax, pixwin=False,
                              fwhm=np.radians(fwhm_deg), new=False)
Пример #3
0
def doBasicSDSSCuts(sdss):
    # (Reid 2016 Section 2.2)
    # photometric quality flags
    import healpy as hp
    # bad region mask (DES footprint)
    path = '/n/des/lee.5922/data/balrog_cat/'
    goodmask = path+'y1a1_gold_1.0.2_wide_footprint_4096.fit'
    badmask = path+'y1a1_gold_1.0.2_wide_badmask_4096.fit'
    # Note that the masks here in in equatorial, ring format.
    gdmask = hp.read_map(goodmask)
    bdmask = hp.read_map(badmask)

    ind_good_ring = np.where(( gdmask >= 1) & ((bdmask.astype('int64') & (64+32+8)) == 0) )
    # healpixify the catalog.
    nside=4096
    # Convert silly ra/dec to silly HP angular coordinates.
    phi = sdss['RA'] * np.pi / 180.0
    theta = ( 90.0 - sdss['DEC'] ) * np.pi/180.0


    hpInd = hp.ang2pix(nside,theta,phi,nest=False)
    keep = np.in1d(hpInd,ind_good_ring)
    sdss  = sdss[keep]


    # quality cut ( Reid et al. 2016 Section 2.2 )
    exclude = 2**1 + 2**5 + 2**7 + 2**11 + 2**19 # BRIGHT, PEAK CENTER, NO PROFILE, DEBLENDED_TOO_MANY_PEAKS, NOT_CHECKED
    # blended object
    blended = 2**3
    nodeblend = 2**6
    # obejct not to be saturated
    saturated = 2**18
    saturated_center = 2**(32+11)
    
    use =  ( 
            (sdss['CLEAN'] == 1 ) &
            #(sdss['FIBER2MAG_I'] < 22.5) &
            (sdss['TYPE'] == 3) &
           ( ( sdss['FLAGS'] & exclude) == 0) &
           ( ((sdss['FLAGS'] & saturated) == 0) | (((sdss['FLAGS'] & saturated) > 0) & ((sdss['FLAGS'] & saturated_center) == 0) ) )&
           ( ((sdss['FLAGS'] & blended) == 0 ) | ((sdss['FLAGS'] & nodeblend) ==0) ) )
    
    """
    
    # Cuts Ashley used
    binned = 1879048192
    blending = 8
    bright =2
    edge = 4
    saturated = 2**18
    
    use = (
              #  ((sdss['FLAGS'] & binned) > 0) |
           ((sdss['FLAGS'] & blending) < 8) &
           ((sdss['FLAGS'] & bright) == 0) &
           ((sdss['FLAGS'] & edge) == 0) &
           ((sdss['FLAGS'] & saturated) == 0)
              )
    """
    return sdss[use] # & clear] # & completness95]
Пример #4
0
def check_EBlm2d(nu1=100,nu2=143, lmax=300,
                maskfield=2, source_maskfield=0,
                label_loc='lower right', xmax=None):
    
    map_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nu1)
    Q1,U1 =hp.read_map(data_path + map_name, field=(1,2))
    map_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nu2)
    Q2,U2 =hp.read_map(data_path + map_name, field=(1,2))
    mask=hp.read_map(data_path + 'HFI_Mask_GalPlane-apo0_2048_R2.00.fits',
                     field=maskfield)
    smask=hp.read_map(data_path + 'HFI_Mask_PointSrc_2048_R2.00.fits',
                     field=source_maskfield)
    mask *= smask

    hdulist = fits.open(data_path + 'HFI_RIMO_Beams-100pc_R2.00.fits')
    beam1 = hdulist[beam_index['{}P'.format(nu1)]].data.NOMINAL[0][:lmax+1]
    beam2 = hdulist[beam_index['{}P'.format(nu2)]].data.NOMINAL[0][:lmax+1]
    
    elm1,blm1 = get_ElmBlm(lmax=lmax, Qmap=Q1, Umap=U1, mask=mask,
                  healpy_format=False, recalc=True, div_beam=beam1)
    elm_hp1,blm_hp1 = get_ElmBlm(lmax=lmax, Qmap=Q1, Umap=U1, mask=mask,
                  healpy_format=True, recalc=True, div_beam=beam1)
    elm2,blm2 = get_ElmBlm(lmax=lmax, Qmap=Q2, Umap=U2, mask=mask,
                  healpy_format=False, recalc=True, div_beam=beam2)
    elm_hp2,blm_hp2 = get_ElmBlm(lmax=lmax, Qmap=Q2, Umap=U2, mask=mask,
                  healpy_format=True, recalc=True, div_beam=beam2)

    clee = cl_alm2d(alm1=elm1, alm2=elm2, lmax=lmax)
    clbb = cl_alm2d(alm1=blm1,alm2=blm2, lmax=lmax)
    l = np.arange(len(clee))
    clee_hp = hp.alm2cl(elm_hp1,elm_hp2, lmax=lmax)
    clbb_hp = hp.alm2cl(blm_hp1,blm_hp2, lmax=lmax)
    l_hp = np.arange(len(clee_hp))

    clplanck = np.loadtxt(data_path + 'bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl')
    clee_planck = clplanck[:,3]
    clbb_planck = clplanck[:,4]
    l_planck = clplanck[:,0]

    pl.figure()
    pl.title('EE check')
    pl.plot(l, clee*l*(l+1)/2./np.pi*1e12, label='2d')
    pl.plot(l,clee_hp*l_hp*(l_hp+1)/2./np.pi*1e12, label='healpy')
    pl.plot(l_planck, clee_planck, label='planck best fit')
    pl.legend(loc=label_loc)
    if xmax is None:
        pl.xlim(xmax=lmax)
    else:
        pl.xlim(xmax=xmax)

    pl.figure()
    pl.title('BB check')
    pl.plot(l, clbb*l*(l+1)/2./np.pi*1e12, label='2d')
    pl.plot(l_hp,clbb_hp*l_hp*(l_hp+1)/2./np.pi*1e12, label='healpy')
    pl.plot(l_planck, clbb_planck, label='planck best fit')
    pl.legend(loc=label_loc)
    if xmax is None:
        pl.xlim(xmax=lmax)
    else:
        pl.xlim(xmax=xmax)
Пример #5
0
 def setUp(self):
     self.lmax = 64
     self.path = os.path.dirname( os.path.realpath( __file__ ) )
     self.map1 = [hp.ma(m) for m in hp.read_map(os.path.join(self.path, 'data', 'wmap_band_iqumap_r9_7yr_W_v4_udgraded32.fits'), (0,1,2))]
     self.map2 = [hp.ma(m) for m in hp.read_map(os.path.join(self.path, 'data', 'wmap_band_iqumap_r9_7yr_V_v4_udgraded32.fits'), (0,1,2))]
     self.mask = hp.read_map(os.path.join(self.path, 'data', 'wmap_temperature_analysis_mask_r9_7yr_v4_udgraded32.fits')).astype(np.bool)
     for m in chain(self.map1, self.map2):
         m.mask = np.logical_not(self.mask)
     self.cla = hp.read_cl(os.path.join(self.path, 'data', 'cl_wmap_band_iqumap_r9_7yr_W_v4_udgraded32_II_lmax64_rmmono_3iter.fits'))
     self.cl_fortran_nomask = hp.read_cl(os.path.join(self.path, 'data', 'cl_wmap_band_iqumap_r9_7yr_W_v4_udgraded32_II_lmax64_rmmono_3iter_nomask.fits'))
     cls_file = pyfits.open(os.path.join(self.path, 'data',
                                    'cl_wmap_band_iqumap_r9_7yr_W_v4_udgraded32_IQU_lmax64_rmmono_3iter.fits'))
     # fix for pyfits to read the file with duplicate column names
     for i in range(2, 6):
         cls_file[1].header['TTYPE%d' % i] += '-%d' % i
     cls = cls_file[1].data
     # order of HEALPIX is TB, EB while in healpy is EB, TB
     self.cliqu = [np.array(cls.field(i)) for i in (0,1,2,3,5,4)]
     nside = 32
     lmax = 64
     fwhm_deg = 7.
     seed = 12345
     np.random.seed(seed)
     self.mapiqu = hp.synfast(self.cliqu, nside, lmax=lmax, pixwin=False,
                              fwhm=np.radians(fwhm_deg), new=False)
def read_and_diff_files_fast(f1,f2,nside=256,tmask=None,return_map=False):
    #assume tmask input is already degraded
    
    mm1=hp.read_map(f1,[0,1,2],verbose=False)
    mm2=hp.read_map(f2,[0,1,2],verbose=False)

    mmm1=[]
    mmm2=[]
    for m1,m2 in zip(mm1,mm2):
        m1=hp.ud_grade(hp.ma(m1),nside_out=nside)
        m2=hp.ud_grade(hp.ma(m2),nside_out=nside)
        tmask=m1.mask | m2.mask | tmask
        mmm1.append(m1)
        mmm2.append(m2)
    
    diff=[]
    for m1,m2 in zip(mmm1,mmm2):
        d=m1-m2
        d.mask=tmask
        diff.append(d)
    
    skyfrac=1-float(tmask.sum())/len(tmask)
        
    cldata=hp.anafast(diff)
    cldata_out=[]
    for cl in cldata:
        cldata_out.append(cl/skyfrac)
        
    if return_map is False:
        return cldata_out
    if return_map is True:
        return cldata_out,diff
Пример #7
0
def read_hpx_maps(fns):
    '''Read in one or more healpix maps and add them together. Must input
    an array of strings even if only inputting a single map.

    Parameters
    ----------
    fns : list of strings
        The filenames for the healpix maps to read in.

    Returns
    -------
    hpx_map: array-like
        A healpix map that is the sum of the Healpix maps in the input files.

    Notes
    -----
    The nside of the output map will be the nside of the file map in the list.
    Every other map will be upgraded or downgraded that that nside value.
    '''

    hpx_map = H.read_map(fns[0], verbose=False)
    nside = H.npix2nside(len(hpx_map))
    for fn_tmp in fns[1:]:
        tmp_map = H.read_map(fn_tmp, verbose=False)
        hpx_map += H.ud_grade(tmp_map, nside)

    return hpx_map
Пример #8
0
def freq_interp_hpm(inmap, infreq, outfreq, spectral_index=(), curverture=()):
    inmap = hp.read_map(inmap)
    basefreq = infreq
    print 'Read base map from {0:s}'.format(inmap)
    print 'Frequency of inmap: {0:f} MHz'.format(infreq)
    if isinstance(outfreq, float):
        nu = np.array([outfreq])
    elif isinstance(outfreq, np.ndarray):
        nu = outfreq
    else:
        raise Exception("Check outfreq format")
    print 'Output frequencie(s):'
    print nu
    if not spectral_index:
        beta = np.ones(inmap.size) * -2.5
        print 'beta is asuume to be -2.5'
    elif isinstance(spectral_index, str):
        if spectral_index.rsplit('.')[-1] == 'fits':
            beta = hp.read_map(spectral_index)
            print 'read beta map from {0:s}'.format(beta)
    if not curverture:
        gamma = np.zeros(inmap.size)
        print 'gamma is asuume to be 0'
    elif isinstance(curverture, str):
        if curverture.rsplit('.')[-1] == 'fits':
            gamma = hp.read_map(curverture)
            print 'read gamma map from {0:s}'.format(gamma)
    outname = ['{0:s}_{1:.3f}MHz.fits'.format(inmap.rsplit('.', 1)[0], nu[i])
               for i in range(nu.size)]
    for f, name in zip(nu, outname):
        print 'Scaling base map to {0:.3f}MHz and save output to {1:s}'\
            .format(f, name)
        T = np.exp(np.log(inmap) + beta * np.log(f / basefreq)
                   + gamma * (np.log(f / basefreq)) ** 2)
        hp.write_map(name, T, coord='G')
Пример #9
0
    def test_load_signal(self):
        params = Struct(beam_nside = GSM_NSIDE)
        ctx = Struct(params = params)
        
        ctx.frequency = 980.0
        astro_signal = gsm.load_signal(ctx)
        assert astro_signal is not None
        assert hp.get_nside(astro_signal) == ctx.params.beam_nside

        root_file_path = resource_filename(hide.__name__, gsm.GSM_FILE_PATH)        
        file_path = os.path.join(root_file_path, str(params.beam_nside), "gsm_%s.fits"%(ctx.frequency))
        gsm_map = hp.read_map(file_path)
        assert np.all(gsm_map == astro_signal)
        
        ctx.frequency = 1000.0
        astro_signal = gsm.load_signal(ctx)
        assert astro_signal is not None
        assert hp.get_nside(astro_signal) == ctx.params.beam_nside
        
        file_path = os.path.join(root_file_path, str(params.beam_nside), "gsm_%s.fits"%(ctx.frequency))
        gsm_map = hp.read_map(file_path)
        assert np.all(gsm_map == astro_signal)
        
        ctx.frequency = 1280.0
        astro_signal = gsm.load_signal(ctx)
        assert astro_signal is not None
        assert hp.get_nside(astro_signal) == ctx.params.beam_nside
        
        file_path = os.path.join(root_file_path, str(params.beam_nside), "gsm_%s.fits"%(ctx.frequency))
        gsm_map = hp.read_map(file_path)
        assert np.all(gsm_map == astro_signal)
Пример #10
0
def main(argv):
    
    _path = '/home/tiago/Develop/SMAPs/'

    file15 = 'smpas_obstime_15.fits'
    file2 = 'smpas_obstime_2.fits'
    extMapFile = 'extintion_at3800.fits'
    
    map15 = H.read_map(os.path.join(_path,file15))
    map2 = H.read_map(os.path.join(_path,file2))    
    extMap = H.read_map(os.path.join(_path,extMapFile))

    fig = py.figure(1,figsize=(8,3))

    H.mollview(map2*10**(-extMap),fig=1,coord=['G','E'],title='secz < 2.0',sub=(1,2,1),max=1296,cbar=False,notext=True)#,unit='hours z < 1.3')

    H.graticule()

    H.mollview(map15*10**(-extMap),fig=1,coord=['G','E'],title='secz < 1.5',sub=(1,2,2),max=1296,cbar=False,notext=True)#,unit='hours z < 1.3')

    H.graticule()

    #H.write_map(os.path.join(_path,'dcorr_'+file15),map15*10**(-extMap))
    #H.write_map(os.path.join(_path,'dcorr_'+file2),map2*10**(-extMap))
    #H.write_map(os.path.join(_path,'dcorr_'+file3),map3*10**(-extMap))

    py.savefig(os.path.join(_path,'Figures/fig2.png'))

    py.show()
Пример #11
0
def main(nsim=0):

    nl = h._nl

    # Load map, mask, mll
    print ""
    print "Loading map, mask, mll, and calculating mll_inv..."

    map_data = hp.read_map(h._fn_map)
    mask = hp.read_map(h._fn_mask)
    mll = np.load(h._fn_mll)
    mll_inv = np.linalg.inv(mll)

    # Read in Planck map: normalize, remove mono-/dipole, mask
    print "Normalizing, removing mono-/dipole, and masking map..."
    map_masked = map_data * mask
    # Create cltt (cltt_data_masked) and correct it (cltt_data_corrected)
    print "Calculating cltt_data_masked, cltt_data_corrected..."
    cltt_data_masked = hp.anafast(map_masked)
    cltt_data_masked = cltt_data_masked[:nl]
    cltt_data_corrected = np.dot(mll_inv, cltt_data_masked)
    # Create simulation of map (map_sim) from cltt_data_corrected
    print "Creating and saving map_sim_%i..." % nsim
    map_sim = hp.synfast(cltt_data_corrected, h._nside)
    hp.write_map('output/map_sim_%i.fits' % nsim, map_sim)
Пример #12
0
def simple_split(config,dirname='split',force=False):
    config = Config(config)
    filenames = config.getFilenames()
    healpix = filenames['pix'].compressed()

    nside_catalog = config['coords']['nside_catalog']
    nside_pixel = config['coords']['nside_pixel']

    release = config['data']['release'].lower()
    band_1 = config['catalog']['mag_1_band']
    band_2 = config['catalog']['mag_2_band']

    mangledir = config['mangle']['dirname']

    mangle_file_1 = join(mangledir,config['mangle']['filename_1'])
    logger.info("Reading %s..."%mangle_file_1)
    mangle_1 = healpy.read_map(mangle_file_1)
    
    mangle_file_2 = join(mangledir,config['mangle']['filename_2'])
    logger.info("Reading %s..."%mangle_file_2)
    mangle_2 = healpy.read_map(mangle_file_2)

    basedir,basename = os.path.split(config['mask']['dirname'])
    if basename == dirname:
        msg = "Input and output directory are the same."
        raise Exception(msg)
    outdir = mkdir(os.path.join(basedir,dirname))

    mask_1 = os.path.basename(config['mask']['basename_1'])
    mask_2 = os.path.basename(config['mask']['basename_2'])

    for band,mangle,base in [(band_1,mangle_1,mask_1),(band_2,mangle_2,mask_2)]:
        maglim = MAGLIMS[release][band]

        nside_mangle = healpy.npix2nside(len(mangle))
        if nside_mangle != nside_pixel:
            msg = "Mangle nside different from pixel nside"
            logger.warning(msg)
            #raise Exception(msg)


        pixels = np.nonzero((mangle>0)&(mangle>maglim))[0]
        print len(pixels)
        superpix = superpixel(pixels,nside_mangle,nside_catalog)
        print healpix
        for hpx in healpix:
            outfile = join(outdir,base)%hpx
            if os.path.exists(outfile) and not force:
                logger.warning("Found %s; skipping..."%outfile)
                continue

            pix = pixels[superpix == hpx]
            print hpx, len(pix)

            maglims = maglim*np.ones(len(pix))
            data = dict(MAGLIM=maglims )
            logger.info('Writing %s...'%outfile)
            ugali.utils.skymap.writeSparseHealpixMap(pix,data,nside_pixel,outfile)
Пример #13
0
def fetch_wmap_temperatures(masked=False, data_home=None,
                            download_if_missing=True):
    """Loader for WMAP temperature map data

    Parameters
    ----------
    masked : optional, default=False
        If True, then return the foreground-masked healpix array of data
        If False, then return the raw temperature array
    data_home : optional, default=None
        Specify another download and cache folder for the datasets. By default
        all scikit learn data is stored in '~/astroML_data' subfolders.

    download_if_missing : optional, default=True
        If False, raise a IOError if the data is not locally available
        instead of trying to download the data from the source site.

    Returns
    -------
    data : np.ndarray or np.ma.MaskedArray
        record array containing (masked) temperature data
    """
    # because of a bug in healpy, pylab must be imported before healpy is
    # or else a segmentation fault can result.
    import pylab
    import healpy as hp

    data_home = get_data_home(data_home)
    if not os.path.exists(data_home):
        os.makedirs(data_home)

    data_file = os.path.join(data_home, os.path.basename(DATA_URL))
    mask_file = os.path.join(data_home, os.path.basename(MASK_URL))

    if not os.path.exists(data_file):
        if not download_if_missing:
            raise IOError('data not present on disk. '
                          'set download_if_missing=True to download')
        data_buffer = download_with_progress_bar(DATA_URL)
        open(data_file, 'wb').write(data_buffer)

    data = hp.read_map(data_file)

    if masked:
        if not os.path.exists(mask_file):
            if not download_if_missing:
                raise IOError('mask data not present on disk. '
                              'set download_if_missing=True to download')
            mask_buffer = download_with_progress_bar(MASK_URL)
            open(mask_file, 'w').write(mask_buffer)

        mask = hp.read_map(mask_file)

        data = hp.ma(data)
        data.mask = np.logical_not(mask)  # WMAP mask has 0=bad. We need 1=bad

    return data
Пример #14
0
def get_fore_integral_flux_map(fore_files_list, e_min, e_max):
    """Returns the foreground map integrated between e_min and e_max
       A powerlaw is assumed fore the foregriunf energy spectrum, hence
       the interpolation between 2 given maps at given energies (given 
       by the model) is done in logarithmic scales. 
    
       fore_files_list: list of str
           Ordered list of the foreground files (one for each energy)
       e_min: float
           the min of the energy bin
       e_max: float 
           the max of the energy bin
    """
    input_file = os.path.join(FT_DATA_FOLDER, 'models/gll_iem_v06.fits')
    if not os.path.exists(input_file):
        abort("Map %s not found!"%input_file)
    frmaps = pf.open(input_file)
    fore_en = []#np.array([x[0] for x in frmaps['ENERGIES'].data])
    for ff in fore_files_list:
        m = re.search(FORE_EN, ff)
        en = int(m.group(0).replace('_', '').replace('.', ''))
        fore_en.append(en)
    fore_en = np.array(fore_en)
    out_name = fore_files_list[0].replace('_%i.fits'%fore_en[0], 
                                          '_%d-%d.fits'%(e_min, e_max))
    if os.path.exists(out_name):
        logger.info('ATT: file %s already exists and returned...'%out_name)
        fore_map = hp.read_map(out_name)
        return fore_map
    else: 
        logger.info('Computing the integral flux of the foreground model...')
        logger.info('...between %.2f - %.2f'%(e_min, e_max))
        fore_emin_sx, fore_emin_dx = find_outer_energies(e_min, fore_en)
        fore_emax_sx, fore_emax_dx = find_outer_energies(e_max, fore_en)
        fore_emin_sx_ind = np.where(fore_en == fore_emin_sx)[0]
        fore_emin_dx_ind = np.where(fore_en == fore_emin_dx)[0]
        fore_emax_sx_ind = np.where(fore_en == fore_emax_sx)[0]
        fore_emax_dx_ind = np.where(fore_en == fore_emax_dx)[0]
        fore_fmin_sx = hp.read_map(fore_files_list[fore_emin_sx_ind])
        fore_fmin_dx = hp.read_map(fore_files_list[fore_emin_dx_ind])
        fore_fmax_sx = hp.read_map(fore_files_list[fore_emax_sx_ind])
        fore_fmax_dx = hp.read_map(fore_files_list[fore_emax_dx_ind])
        m1 = (np.log10(fore_fmin_sx)-np.log10(fore_fmin_dx))/ \
            (np.log10(fore_emin_sx)-np.log10(fore_emin_dx))
        m2 = (np.log10(fore_fmax_sx)-np.log10(fore_fmax_dx))/ \
            (np.log10(fore_emax_sx)-np.log10(fore_emax_dx))
        logfore1 = m1*(np.log10(e_min)-np.log10(fore_emin_sx))+ \
            np.log10(fore_fmin_sx)
        logfore2 = m2*(np.log10(e_max)-np.log10(fore_emax_sx))+ \
            np.log10(fore_fmax_sx)
        fore1 = 10**(logfore1)
        fore2 = 10**(logfore2)
        fore_integ = np.sqrt(fore1*fore2)*(e_max - e_min)
        hp.write_map(out_name, fore_integ)
        logger.info('Created file %s'%out_name)
        return fore_integ
def test_block_diagonal_precond_onto_real_data():
    """
    Test the action of the block diagonal preconditioner, defined as
    :math: `M_{BD}=(A^T A)^{-1}`
    with a realistic scanning strategy.
    """
    runcase={'IQU':3}
    runcase={'I':1,'QU':2,'IQU':3}
    for pol in runcase.values():
        d,t,phi,pixs,hp_pixs,ground,ces_size=read_from_data('data/20120718_093931.hdf5',pol=pol,npairs=4)
        nt,npix,nb=len(d),len(hp_pixs),len(t)
        print nt,npix,nb,len(hp_pixs[pixs])
        nside=128
        pr=profile_run()

        P=SparseLO(npix,nt,pixs,phi,pixel_schema=hp_pixs,pol=pol)
        npix=P.ncols

        A=P.T*P
        x0=np.zeros(npix*pol)
        Mbd=BlockDiagonalPreconditionerLO(P,npix,pol=pol)
        if pol==1:
            fname='data/map_BD_i_cmb_'+str(nside)+'.fits'
            inm=hp.read_map('data/cmb_r0.2_3.5arcmin_128.fits')
        elif pol==2:
            inm=hp.read_map('data/cmb_r0.2_3.5arcmin_128.fits',field=[1,2])
            fname='data/map_BD_qu_cmb_'+str(nside)+'.fits'
        elif pol==3:
            fname='data/map_BD_iqu_cmb_'+str(nside)+'.fits'
            inm=hp.read_map('data/cmb_r0.2_3.5arcmin_128.fits',field=[0,1,2])
        b=P.T*d
        x=Mbd*b
        #show_matrix_form(Mbd*P.T*P)
        globals()['c']=0
        def count_iterations(x):
            globals()['c']+=1

        pr.enable()
        #x=Mbd*b
        x,info=spla.cg(A,b,x0=x0,M=Mbd,tol=1.e-3,maxiter=10,callback=count_iterations)
        pr.disable()
        #output_profile(pr)
        #checking_output(info)
        print "After  %d iteration. "%(globals()['c'])
        #assert checking_output(info) and globals()['c']==1
        hp_pixs=P.obspix

        hp_map=reorganize_map(x,hp_pixs,npix,nside,pol)
        #mask=obspix2mask(hp_pixs,pixs,nside)
        mask=obspix2mask(hp_pixs,nside)

        #show_map(mask,1,'ra23')

        compare_maps(hp_map,inm,pol,'ra23',mask,remove_offset=False,norm=None)
Пример #16
0
def compare_maps(component,i_check) :
    for nu in ['30p0','100p0','353p0'] :
        mp1=hp.read_map('test/benchmark/check%d'%i_check+component+'_'+nu+'_64.fits',field=[0,1,2],verbose=False)
        mp2=hp.read_map('test/Output/check%d'%i_check+component+'_'+nu+'_64.fits',field=[0,1,2],verbose=False)
        for i in [0,1,2] :
            norm=np.std(mp1[i])
            if norm<=0 : norm=1.
            diff=np.std((mp1[i]-mp2[i]))/norm
            if diff>1E-6 :
                return 0
    return 1
Пример #17
0
def get_mask(mask_percentage=60,
             mask_sources=True,
             apodization='0'):

    field = MASK_FIELD[mask_percentage]
    mask = hp.read_map(data_path + 'HFI_Mask_GalPlane-apo{}_2048_R2.00.fits'.format(apodization),
                       field=field)
    if mask_sources:
        smask = hp.read_map(data_path + 'HFI_Mask_PointSrc_2048_R2.00.fits')
        mask *= smask

    return mask
Пример #18
0
def plot_figure(metadata):
    try:
        allmap = hp.ma(hp.read_map(os.path.join(root_folder, metadata["file_name"]), (0,1,2)))
    except exceptions.IndexError:
        allmap = [hp.ma(hp.read_map(os.path.join(root_folder, metadata["file_name"])))]
    for comp, m in zip("IQU", allmap):
        if comp in "QU":
            plot_range = 20
        else:
            plot_range = 20
        if len(allmap) == 1: #only T, single ch
            plot_range = 20
        is_single_channel = isinstance(metadata["channel"], basestring) and len(metadata["channel"])==6
        if is_single_channel:
            if int(metadata["channel"][3:5]) < 24: # 70GHz
                plot_range = 20
        test_type = metadata["base_file_name"].split("/")[0]
        if isinstance(metadata["channel"], list):
            metadata["channel"] = "_".join(metadata["channel"])
        if metadata["channel"].find("_") < 0:
            try:
                if int(metadata["channel"]) > 70:
                    plot_range = 5
                    if int(metadata["channel"]) >= 353:
                        if comp in "QU":
                            plot_range = 100
                        else:
                            plot_range = 30
                    if int(metadata["channel"]) >= 545:
                        if comp in "QU":
                            plot_range = 1e6
                        else:
                            plot_range = 500
                    if int(metadata["channel"]) >= 857:
                        plot_range = 1e5
                        if comp in "QU":
                            plot_range = 1e6
                    if test_type == "surveydiff" and int(metadata["channel"]) >= 545 and comp == "Q":
                        plot_range *= 1e2
            except exceptions.ValueError:
                pass

        fig = plt.figure(figsize=(9, 6), dpi=100)
        matplotlib.rcParams.update({'font.size': 14})
        hp.mollview(m * 1e6, min=-plot_range, max=plot_range, unit="uK", title=metadata["title"] + " %s" % comp, xsize=900, hold=True)
        plt.savefig(os.path.join(out_folder, metadata["file_name"].replace(".fits", "_%s.jpg" % comp)), dpi=100)
        plt.close()
        fig = plt.figure(figsize=(9, 6), dpi=20)
        fig.add_axes([0.01, 0.01, 0.98, 0.98])
        matplotlib.rcParams.update({'font.size': 30})
        hp.mollview(m * 1e6, min=-plot_range, max=plot_range, cbar=True, title="", xsize=180, hold=True)
        plt.savefig(os.path.join(out_folder, metadata["file_name"].replace(".fits", "_%s_thumb.jpg" % comp)), dpi=20)
        plt.close()
Пример #19
0
def merger(out_filename, *in_files):
	mask1 = hp.read_map(in_files[0])
	C = np.array([1]*len(mask1))
	
	for mask in in_files:
		A = np.array(hp.read_map(mask))
		assert len(A) == len(C), ("nsides mismatched")
		C = C*A

	hp.write_map(out_filename, C)

	return None
Пример #20
0
    def load_data(self):
        data = fits.getdata(self.datapath)
        c1 = fits.Column(name='RA', array=data['azim_ang'], format='E')
        c2 = fits.Column(name='DEC', array=data['polar_ang'], format='E')
        c3 = fits.Column(name='REDSHIFT', array=data['redshift'], format='E')
        t = fits.BinTableHDU.from_columns([c1, c2, c3])
        self.data = t.data

        self.mask = hp.read_map(self.maskpath, 0, partial=True)
        zmask = hp.read_map(self.maskpath, 1, partial=True)
        self.mask[self.mask < .95] = hp.UNSEEN
        self.mask[zmask < .6] = hp.UNSEEN
Пример #21
0
def read_dpc_masks(freq):
    if freq > 70:
        nside = 2048
    else:
        nside = 1024
    ps_mask = np.logical_not(
        np.floor(
            hp.ud_grade(hp.read_map(glob(os.path.join(INPUT_PATH, "MASKs", "mask_ps_%dGHz_*.fits" % freq))[0]), nside)
        )
    ).astype(np.bool)
    gal_filename = glob(os.path.join(os.environ["DX9_LFI"], "MASKs", "destripingmask_%d.fits" % freq))[0]
    gal_mask = np.logical_not(hp.read_map(gal_filename)).astype(np.bool)
    return ps_mask, gal_mask
Пример #22
0
 def setUp(self):
     try:
         self.map = hp.ma(hp.read_map(os.path.join('data', 'wmap_band_imap_r9_7yr_W_v4.fits')))
         self.mask = hp.read_map(os.path.join('data', 'wmap_temperature_analysis_mask_r9_7yr_v4.fits')).astype(np.bool)
     except exceptions.IOError:
         warnings.warn("""Missing Wmap test maps from the data folder, please download them from Lambda and copy them in the test/data folder:
         http://lambda.gsfc.nasa.gov/data/map/dr4/skymaps/7yr/raw/wmap_band_imap_r9_7yr_W_v4.fits
         http://lambda.gsfc.nasa.gov/data/map/dr4/ancillary/masks/wmap_temperature_analysis_mask_r9_7yr_v4.fits
         on Mac or Linux you can run the bash script get_wmap_maps.sh from the same folder
         """)
         raise
     self.map.mask = np.logical_not(self.mask)
     self.cla = hp.read_cl(os.path.join('data', 'cl_wmap_fortran.fits'))
Пример #23
0
def compute_peebles_pcl_estimate(data_file,inv_noise_file,beam_file,num_samps):
    #write the data file
    d = hp.read_map(data_file)
    hp.write_map(spice_data,m=d)

    #create a mask file from inv_noise
    inv_n = hp.read_map(inv_noise_file)
    msk = get_mask_file(inv_n)
    hp.write_map(spice_mask,m=msk)

    if d.shape != inv_n.shape :
        raise RuntimeError("data and noise have different dimensions")

    nside=hp.npix2nside(np.shape(d)[0])


    #write the noise map
    n = np.zeros(np.shape(inv_n))
    n[inv_n>0]  = 1./np.sqrt(inv_n[inv_n>0])

    #write the beam file
    B_l_in = np.loadtxt(beam_file,delimiter=",")
    np.savetxt(spice_bl,np.asarray([B_l_in[:,0],B_l_in[:,1]]).T,fmt='%d   %0.2f')
    B_l = B_l_in[:,1]


    #compute the powe spectrum of the data
    call([map_to_alm,'-I',spice_data,'-O',spice_data_alm,'-L',str(2*nside),'-m',spice_mask])
    call([alm_to_cl,'-I',spice_data_alm,'-O',spice_dl,'-P','-m',spice_mask,'-G','-C',spice_ilm_jlm,'-M',spice_data,'-N',str(nside),'-L',str(2*nside+1)])

    call(['rm',spice_data_alm])
    call(['rm',spice_data])

    #read the power spectrum
    D_l = np.loadtxt(spice_dl,skiprows=2)[:,1]

    #apply beam to the cls
    D_l /= B_l**2

    #compute the noise power spectrum using Monte Carlo
    N_l = np.zeros(np.shape(D_l))

    # subtract
    S_l = D_l - N_l

    #delete the mask
    call(['rm',spice_mask])
    call(['rm',spice_dl])
    call(['rm',spice_bl])

    return (D_l,N_l,S_l)
Пример #24
0
def main(argv):

    _path = os.path.expanduser('~/Develop/SMAPs/')
    nna_file = os.path.expanduser('~/Documents/SMAPs/norpointT80.dat')
    nsa_file = os.path.expanduser('~/Documents/SMAPs/surpointT80.dat')

    sna_file = os.path.expanduser('~/Develop/SMAPs/coordinatesystemandtiling/smaps_pointT80norte.dat')
    ssa_file = os.path.expanduser('~/Develop/SMAPs/coordinatesystemandtiling/smaps_pointsulT80.dat')

    nna_pt = np.loadtxt(nna_file,unpack=True,usecols=(4,5))
    nsa_pt = np.loadtxt(nsa_file,unpack=True,usecols=(4,5))

    sna_pt = np.loadtxt(sna_file,unpack=True,usecols=(4,5))
    ssa_pt = np.loadtxt(ssa_file,unpack=True,usecols=(4,5))
		
    file1 = 'lambda_sfd_ebv.fits' 
    file2 = 'smpas_obstime_2.fits'
    file3 = 'smpas_obstime_15.fits'
    extMapFile = 'extintion_at3800.fits'
    
    map1 = H.read_map(os.path.join(_path,file1))
    map2 = H.read_map(os.path.join(_path,file2))
    map3 = H.read_map(os.path.join(_path,file3))    
    extMap = H.read_map(os.path.join(_path,extMapFile))
    
    H.mollview(map1,fig=1,coord=['G','E'],max=1.0,title='',sub=(2,2,3),cbar=False,notext=True)#,unit='hours z < 1.3')

    H.projplot((90-nna_pt[1])*np.pi/180.,nna_pt[0]*np.pi/180.,'r.')#,coord=['E','G'])
    H.projplot((90-nsa_pt[1])*np.pi/180.,nsa_pt[0]*np.pi/180.,'r.')#,coord=['E','G'])

    H.projplot((90-sna_pt[1])*np.pi/180.,sna_pt[0]*np.pi/180.,'w.')#,coord=['E','G'])
    H.projplot((90-ssa_pt[1])*np.pi/180.,ssa_pt[0]*np.pi/180.,'w.')#,coord=['E','G'])

    H.graticule()

    H.mollview(map2,fig=1,coord=['G','E'],title='',sub=(2,2,1),cbar=False,notext=True,max=1800)
	#,unit='hours z < 1.3')

    H.graticule()

    H.mollview(map3,fig=1,coord='G',title='',sub=(2,2,2),cbar=False,notext=True,max=1800)#,unit='hours z < 1.3')

    H.graticule()

    H.mollview(map2*10**(-extMap),fig=1,coord='G',title='',sub=(2,2,4),cbar=False,notext=True)#,unit='hours z < 1.3')

    H.graticule()

    py.savefig(os.path.join(_path,'Figures/fig1.png'))

    py.show()
def test_block_diagonal_precond_plus_noise_onto_real_data():

    """
    Test the action of the block diagonal preconditioner, defined as
    :math: `M_{BD}=(A^T diag(N^{-1}) A)^{-1}`
    with a realistic scanning strategy.
    """
    pol=1
    d,t,phi,pixs,hp_pixs,ground,ces_size=read_from_data('data/20120718_093931.hdf5',pol=pol)
    nt,npix,nb=len(d),len(hp_pixs),len(t)
    print nt,npix,nb,len(hp_pixs[pixs])
    nside=128
    pr=profile_run()
    N=BlockLO(nt/nb,t)
    P=SparseLO(npix,nt,pixs,phi,pol=pol,w=N.diag )
    A=P.T*N*P
    if pol==1:
        Mbd=BlockDiagonalPreconditionerLO(P.counts,P.mask,npix,pol)
        fname='data/map_BD_i_cmb_'+str(nside)+'.fits'
        print "reading input map"
        inm=hp.read_map('data/cmb_r0.2_3.5arcmin_128.fits')
        solver=spla.cg
    elif pol==3:
        Mbd=BlockDiagonalPreconditionerLO(P.counts,P.mask,npix,pol,P.sin2,P.cos2,P.sincos)
        fname='data/map_BD_iqu_cmb_'+str(nside)+'.fits'
        inm=hp.read_map('data/cmb_r0.2_3.5arcmin_128.fits',field=[0,1,2])
        print "reading input map"
        solver=spla.gmres
    b=P.T*N*d
    x0=np.zeros(npix*pol)

    x=Mbd*b
    globals()['c']=0
    def count_iterations(x):
        globals()['c']+=1


    pr.enable()
    #x,info=solver(A,b,x0=x0,M=Mbd,maxiter=10,callback=count_iterations)
    pr.disable()
    #output_profile(pr)

    print "After %d iteration. "%(globals()['c'])
    #assert checking_output(info) and globals()['c']==1

    #GRAFIC TOOLS
    mask=obspix2mask(hp_pixs,pixs,nside,'data/mask_ra23.fits',write=False)
    hp_map=reorganize_map(x,hp_pixs,npix,nside,pol,fname,write=False)

    compare_maps(hp_map,inm,pol,'ra23',mask)
Пример #26
0
def check_TE2d(nu=100, lmax=300,
                maskfield=2, source_maskfield=0,
                label_loc='lower right', xmax=None):
    
    map_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nu)
    I,Q,U =hp.read_map(data_path + map_name, field=(0,1,2))
    mask=hp.read_map(data_path + 'HFI_Mask_GalPlane-apo0_2048_R2.00.fits',
                     field=maskfield)
    smask=hp.read_map(data_path + 'HFI_Mask_PointSrc_2048_R2.00.fits',
                     field=source_maskfield)
    mask *= smask

    hdulist = fits.open(data_path + 'HFI_RIMO_Beams-100pc_R2.00.fits')
    beamP = hdulist[beam_index['{}P'.format(nu)]].data.NOMINAL[0][:lmax+1]
    beam = hdulist[beam_index['{}'.format(nu)]].data.NOMINAL[0][:lmax+1]

    #tlm = get_Tlm(lmax=lmax, Imap=I, mask=mask,
    #              healpy_format=False, recalc=True, div_beam=beam)
    #elm,blm = get_ElmBlm(lmax=lmax, Qmap=Q, Umap=U, mask=mask,
    #              healpy_format=False, recalc=True, div_beam=beamP)
    tlm_hp = get_Tlm(lmax=lmax, Imap=I, mask=mask,
                  healpy_format=True, recalc=True, div_beam=beam)
    elm_hp,blm_hp = get_ElmBlm(lmax=lmax, Qmap=Q, Umap=U, mask=mask,
                  healpy_format=True, recalc=True, div_beam=beamP)

    #cltt = cl_alm2d(tlm, lmax)
    #clee = cl_alm2d(elm, lmax)
    #clbb = cl_alm2d(blm, lmax)
    #l = np.arange(len(clee))
    clte_hp = hp.alm2cl(tlm_hp, elm_hp, lmax=lmax)
    #clee_hp = hp.alm2cl(elm_hp, lmax=lmax)
    #clbb_hp = hp.alm2cl(blm_hp, lmax=lmax)
    l_hp = np.arange(len(clte_hp))

    clplanck = np.loadtxt(data_path + 'bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl')
    clte_planck = clplanck[:,2]
    #clee_planck = clplanck[:,3]
    #clbb_planck = clplanck[:,4]
    l_planck = clplanck[:,0]

    pl.figure()
    pl.title('TE check')
    #pl.plot(l, clee*l*(l+1)/2./np.pi*1e12, label='2d')
    pl.plot(l_hp,clte_hp*l_hp*(l_hp+1)/2./np.pi*1e12, label='healpy')
    pl.plot(l_planck, clte_planck, label='planck best fit')
    pl.legend(loc=label_loc)
    if xmax is None:
        pl.xlim(xmax=lmax)
    else:
        pl.xlim(xmax=xmax)
Пример #27
0
def b_cov_T353_E143_B143(cl_file=pf.PLANCK_DATA_PATH+'bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl',lmax=100):

    Imap = hp.read_map(pf.PLANCK_DATA_PATH + 'HFI_SkyMap_353_2048_R2.02_full.fits')
    Tlm = hp.map2alm(Imap,lmax=lmax)
    cltt = hp.alm2cl(Tlm,lmax=lmax)

    mask = pf.get_planck_mask(psky=70)
    Qmap, Umap = hp.read_map(pf.PLANCK_DATA_PATH + 'HFI_SkyMap_143_2048_R2.02_full.fits',field=(1,2))
    Elm, Blm = hp.map2alm_spin( (Qmap*mask,Umap*mask), 2, lmax=lmax )
    clee = hp.alm2cl(Elm,lmax=lmax)
    clbb = hp.alm2cl(Blm,lmax=lmax)

    cov = calc_b_cov_TEB(cltt, clee, clbb)
    return cov
Пример #28
0
def prepare_map(mapname='HFI_SkyMap_143_2048_R2.02_full.fits',
                maskname='HFI_Mask_GalPlane-apo0_2048_R2.00.fits',
                field = (0,1,2),
                fwhm=0.0,
                nside_out=128,
                rewrite_map=False,
                masktype=None):

    newname = mapname[:-5] + '_fwhm_{:.3f}rad_nside_{}_mask_{}.fits'.format(fwhm, nside_out,masktype)
    if not os.path.exists(data_path + newname) or rewrite_map:
        
        print 'reading mask...'
        mask = hp.read_map(data_path + maskname, field=2)
        masknside = hp.get_nside(mask)
        
        if masknside != nside_out:
            print 'matching mask to map resolution...'
            mask = hp.pixelfunc.ud_grade(mask, nside_out=nside_out)
        print 'done'

            
        print 'processing map...'
        Imap, Qmap, Umap = hp.read_map( data_path + mapname,
                                        hdu=1, field=(0,1,2) )
        mapnside = hp.get_nside(Imap)


        if not np.isclose(fwhm, 0.):
            Imap = hp.sphtfunc.smoothing( Imap,fwhm=fwhm )
            Qmap = hp.sphtfunc.smoothing( Qmap,fwhm=fwhm )
            Umap = hp.sphtfunc.smoothing( Umap,fwhm=fwhm )

        if mapnside != nside_out:
            Imap = hp.pixelfunc.ud_grade( Imap,nside_out=nside_out )
            Qmap = hp.pixelfunc.ud_grade( Qmap,nside_out=nside_out )
            Umap = hp.pixelfunc.ud_grade( Umap,nside_out=nside_out )

        Imap *= mask
        Qmap *= mask
        Umap *= mask
        
        hp.fitsfunc.write_map( data_path + newname, [Imap, Qmap, Umap])

        print 'done'

    print 'reading map...'
    maps = hp.read_map( data_path + newname,
                        field=field )
    return maps
Пример #29
0
  def info_flag(cat):
    """
    Takes properly constructed im3shape CatalogStore object and adds info_flag values.
    """

    import healpy as hp
    gdmask=hp.read_map(config.golddir+'y1a1_gold_1.0.1_wide_footprint_4096.fit')
    badmask=hp.read_map(config.golddir+'y1a1_gold_1.0.1_wide_badmask_4096.fit')

    pix=hp.ang2pix(4096, np.pi/2.-np.radians(i3.dec),np.radians(i3.ra), nest=False)
    i3.gold_mask=(gdmask[pix] >=1)
    i3.gold_flag=badmask[pix]

    info_cuts =[
        'i3.gold_mask==False',
        'i3.gold_flag>0',
        'i3.modest!=1',
        'i3.maskfrac>.75', #0.75
        'i3.evals>10000',
        'i3.flagr==1',
        'i3.flagr==2',
        'i3.fluxfrac<.75', #.7
        'i3.snr<10.', #10
        'i3.snr>10000.', #10000
        'i3.rgp<1.1', #1.1
        'i3.rgp>3.5', #3.5
        'i3.rad>5', #10
        'i3.rad<.1', 
        'np.sqrt(i3.ra_off**2.+i3.dec_off**2.)>1', #1
        'i3.chi2pix<.5', #.8
        'i3.chi2pix>1.5', #2
        'i3.resmin<-0.2',#-2
        'i3.resmax>0.2',#2
        'i3.psffwhm>7',
        'i3.psffwhm<0',
        'i3.error!=0'
        # model image?
    ]

    i3.info = np.zeros(len(i3.coadd), dtype=np.int64)
    for i,cut in enumerate(info_cuts):
      mask=eval(cut).astype(int)
      print i,cut,np.sum(mask)
      j=1<<i
      flags=mask*j
      i3.info|=flags


    return
Пример #30
0
def compute_pcl_estimate(data_file,inv_noise_file,beam_file):
  d = hp.read_map(data_file)
  inv_n = hp.read_map(inv_noise_file)
  print "zero pixels= ",len(inv_n[inv_n<=0])
  if len(inv_n[inv_n<=0]) > 0:
    inv_n[inv_n<=0] = min(inv_n[inv_n>0])
  
  
  n  = 1./np.sqrt(inv_n)

  nside = hp.npix2nside(np.shape(d)[0])

  # compute the total power spectrum
  C_l = hp.anafast(d,lmax=2*nside)

  num_samps = int(1000)

  N_l = np.zeros(np.shape(C_l))

  B_l_in = np.loadtxt(beam_file,delimiter=",")

  B_l = B_l_in[:,1]

  #apply beam to the cls
  C_l /= B_l**2

  # Monte Carlo
  mu = np.zeros(np.shape(d))
  for samp in range(num_samps):
    if samp % 100 == 0 :
      print "samples =",samp
    # draw a realisation from noise
    n_i = np.random.normal(mu,n)

    # find the power spectrum of this realisation
    N_l_i = hp.anafast(n_i,lmax=2*nside)

    # accumulate
    N_l += N_l_i

  N_l /= float(num_samps)

  #apply beam to the nls
  N_l /= B_l**2

  # subtract
  S_l = C_l - N_l

  return (C_l,N_l,S_l)
import healpy as hp
import matplotlib.pyplot as plt
import numpy as np
import os
import sys

plt.style.use('classic')

hfi_pipe = '/global/cscratch1/sd/keskital/hfi_pipe/'
dipo_hires = hp.read_map(hfi_pipe + 'dipole_nside1024.fits')
cmb_hires = hp.read_map(
    hfi_pipe + 'sky_model/PR2/COM_CMB_IQU-commander_1024_R2.02_full.fits')
cmb_hires += dipo_hires

ver = 'npipe5v50'

# First threshold is for destriping, second for bandpass

threshholds = {
    30: (1e-4, 1e-3),
    44: (8e-5, 4e-4),
    70: (5e-5, 2e-4),
    100: (5e-5, 3e-4),
    143: (5e-5, 3e-4),
    217: (2e-4, 8e-4),
    353: (1e-3, 6e-3),
    545: (6e-3, 4e-2),
    857: (8e-1, 4e0),
}

nrow, ncol = 3, 3
Пример #32
0
def main():
    log = Logger.get()

    parser = argparse.ArgumentParser(
        description="Read a toast covariance matrix and invert it.")

    parser.add_argument("--input",
                        required=True,
                        default=None,
                        help="The input covariance FITS file")

    parser.add_argument(
        "--output",
        required=False,
        default=None,
        help="The output inverse covariance FITS file.",
    )

    parser.add_argument(
        "--rcond",
        required=False,
        default=None,
        help="Optionally write the inverse condition number map to this file.",
    )

    parser.add_argument(
        "--single",
        required=False,
        default=False,
        action="store_true",
        help="Write the output in single precision.",
    )

    parser.add_argument(
        "--threshold",
        required=False,
        default=1e-3,
        type=np.float,
        help="Reciprocal condition number threshold",
    )

    try:
        args = parser.parse_args()
    except SystemExit:
        return

    # get options

    infile = args.input
    outfile = None
    if args.output is not None:
        outfile = args.output
    else:
        inmat = re.match(r"(.*)\.fits", infile)
        if inmat is None:
            log.error("input file should have .fits extension")
            return
        inroot = inmat.group(1)
        outfile = "{}_inv.fits".format(inroot)

    # Get the default communicator
    mpiworld, procs, rank = get_world()

    # We need to read the header to get the size of the matrix.
    # This would be a trivial function call in astropy.fits or
    # fitsio, but we don't want to bring in a whole new dependency
    # just for that.  Instead, we open the file with healpy in memmap
    # mode so that nothing is actually read except the header.

    nside = 0
    ncovnz = 0
    if rank == 0:
        fake, head = hp.read_map(infile, h=True, memmap=True)
        for key, val in head:
            if key == "NSIDE":
                nside = int(val)
            if key == "TFIELDS":
                ncovnz = int(val)
    if mpiworld is not None:
        nside = mpiworld.bcast(nside, root=0)
        ncovnz = mpiworld.bcast(ncovnz, root=0)

    nnz = int(((np.sqrt(8.0 * ncovnz) - 1.0) / 2.0) + 0.5)

    npix = 12 * nside**2
    subnside = int(nside / 16)
    if subnside == 0:
        subnside = 1
    subnpix = 12 * subnside**2
    nsubmap = int(npix / subnpix)

    # divide the submaps as evenly as possible among processes

    dist = distribute_uniform(nsubmap, procs)
    local = np.arange(dist[rank][0], dist[rank][0] + dist[rank][1])

    if rank == 0:
        if os.path.isfile(outfile):
            os.remove(outfile)

    if mpiworld is not None:
        mpiworld.barrier()

    # create the covariance and inverse condition number map

    cov = None
    invcov = None
    rcond = None

    cov = DistPixels(
        comm=mpiworld,
        dtype=np.float64,
        size=npix,
        nnz=ncovnz,
        submap=subnpix,
        local=local,
    )

    if args.single:
        invcov = DistPixels(
            comm=mpiworld,
            dtype=np.float32,
            size=npix,
            nnz=ncovnz,
            submap=subnpix,
            local=local,
        )
    else:
        invcov = cov

    if args.rcond is not None:
        rcond = DistPixels(
            comm=mpiworld,
            dtype=np.float64,
            size=npix,
            nnz=nnz,
            submap=subnpix,
            local=local,
        )

    # read the covariance
    if rank == 0:
        log.info("Reading covariance from {}".format(infile))
    cov.read_healpix_fits(infile)

    # every process computes its local piece
    if rank == 0:
        log.info("Inverting covariance")
    covariance_invert(cov, args.threshold, rcond=rcond)

    if args.single:
        invcov.data[:] = cov.data.astype(np.float32)

    # write the inverted covariance
    if rank == 0:
        log.info("Writing inverted covariance to {}".format(outfile))
    invcov.write_healpix_fits(outfile)

    # write the condition number

    if args.rcond is not None:
        if rank == 0:
            log.info("Writing condition number map")
        rcond.write_healpix_fits(args.rcond)

    return
import sys
from os import path

outdir = '../Repo/'

# these are the parameters of the footprint

footprint_fname = outdir + 'Footprints/FullOctant.fits'
footprint_res = 2048
footprint_tag = None
footprint_zrange = [0.8, 2.0]

# reddening map for checking where the footprint has been placed
reddening = hp.ud_grade(
    hp.read_map(outdir +
                'ExtinctionMaps/HFI_CompMap_ThermalDustModel_2048_R1.20.fits',
                field=2), footprint_res)

# creating boolean healpy map
(theta, phi) = hp.pix2ang(footprint_res,
                          np.arange(hp.nside2npix(footprint_res)))
footprint = np.zeros(hp.nside2npix(footprint_res), dtype=bool)

# square of 10 deg of size on the equator
footprint[(theta < np.pi / 2.) & (phi < np.pi / 2.)] = True
sky_fraction = footprint.sum() / footprint.size

# plot of the location
hp.mollview(footprint.astype(int) + reddening, max=0.2)
plt.savefig(outdir + 'Footprints/FullOctant.png')
Пример #34
0
if len(args) != 3:
    raise ValueError('please supply exactly 3 input arguments\n%s' % usage)
gps = float(args[0])
inFITS = args[1]
outFITS = args[2]

if opts.source_coord not in ['C', 'E']:
    raise ValueError('--source-coord must be either "C" or "E"')
if opts.target_coord not in ['C', 'E']:
    raise ValueError('--target-ccord must be either "C" or "E"')

#-------------------------------------------------

if opts.verbose:
    print "reading in : %s" % inFITS
post = hp.read_map(inFITS, verbose=False)

#------------------------

if opts.source_coord == "C":
    if opts.verbose:
        print "  interpretting as Equatorial coordinates (C)"

    if opts.target_coord == "C":
        if opts.verbose:
            print "  no rotation necessary (--target-coord=C)"

    elif opts.target_coord == "E":
        if opts.verbose:
            print "  rotating to Geographic coordinates (E) at time=%.3f" % gps
        post = triangulate.rotateMapC2E(post, gps)
Пример #35
0
    y = -np.sin(phi0) * xyz[:, 0] + np.cos(phi0) * xyz[:, 1]
    z = np.sin(theta0) * np.cos(phi0) * xyz[:, 0] + np.sin(phi0) * np.sin(
        theta0) * xyz[:, 1] + np.cos(theta0) * xyz[:, 2]

    ipix = h.vec2pix(nside, x, y, z)

    dipole_out = amp_unit * dipole[ipix]
    return dipole_out


#++++++++++++++++++++++++++++++++++++++++

fname_map = sys.argv[1]
print fname_map

mapT = h.read_map(fname_map + '.fits', field=0)
mapQ = h.read_map(fname_map + '.fits', field=1)
mapU = h.read_map(fname_map + '.fits', field=2)

npix = len(mapT)
nside = h.npix2nside(npix)

dipole = gen_dipole(nside, 'K')
#h.mollview(dipole)
#py.savefig('dipole.png')
#os.system('display dipole.png&')
#sys.exit()

mapTQU = [mapT * 1.e-6 + dipole, mapQ * 1.e-6, mapU * 1.e-6]
#mapTQU = [mapT,mapQ,mapU]
h.write_map(fname_map + '_dipole.fits', mapTQU)
Пример #36
0
cos_r1 = np.zeros(npix)
sin_r1 = np.zeros(npix)
cos_r2 = np.zeros(npix)
sin_r2 = np.zeros(npix)
cos_r4 = np.zeros(npix)
sin_r4 = np.zeros(npix)

for i in range(0, ydays):
    print 'current date: ', i, '/', int(ydays)
    liblb.gen_scan_c(theta_antisun, theta_boresight, freq_antisun,
                     freq_boresight, total_time, today_julian, sample_rate,
                     dir_out, filename, title, nside, runtime_i,
                     option_gen_ptgtxt)
    today_julian += 1.

    nhits += h.read_map('dataout/' + filename + '/nhits_tmp.fits')
    cos_r1 += h.read_map('dataout/' + filename + '/cos_r1_tmp.fits')
    sin_r1 += h.read_map('dataout/' + filename + '/sin_r1_tmp.fits')
    cos_r2 += h.read_map('dataout/' + filename + '/cos_r2_tmp.fits')
    sin_r2 += h.read_map('dataout/' + filename + '/sin_r2_tmp.fits')
    cos_r4 += h.read_map('dataout/' + filename + '/cos_r4_tmp.fits')
    sin_r4 += h.read_map('dataout/' + filename + '/sin_r4_tmp.fits')

    if (i % 20 == 0):
        h.write_map(
            'dataout/' + filename + '/nhits_' + str(ydays) + '_tmp.fits',
            nhits)
    if (i % 20 == 0):
        h.write_map(
            'dataout/' + filename + '/cos_r1_' + str(ydays) + '_tmp.fits',
            cos_r1)
Пример #37
0
def check_TE2d(nu=100,
               lmax=300,
               maskfield=2,
               source_maskfield=0,
               label_loc='lower right',
               xmax=None):

    map_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nu)
    I, Q, U = hp.read_map(data_path + map_name, field=(0, 1, 2))
    mask = hp.read_map(data_path + 'HFI_Mask_GalPlane-apo0_2048_R2.00.fits',
                       field=maskfield)
    smask = hp.read_map(data_path + 'HFI_Mask_PointSrc_2048_R2.00.fits',
                        field=source_maskfield)
    mask *= smask

    hdulist = fits.open(data_path + 'HFI_RIMO_Beams-100pc_R2.00.fits')
    beamP = hdulist[beam_index['{}P'.format(nu)]].data.NOMINAL[0][:lmax + 1]
    beam = hdulist[beam_index['{}'.format(nu)]].data.NOMINAL[0][:lmax + 1]

    #tlm = get_Tlm(lmax=lmax, Imap=I, mask=mask,
    #              healpy_format=False, recalc=True, div_beam=beam)
    #elm,blm = get_ElmBlm(lmax=lmax, Qmap=Q, Umap=U, mask=mask,
    #              healpy_format=False, recalc=True, div_beam=beamP)
    tlm_hp = get_Tlm(lmax=lmax,
                     Imap=I,
                     mask=mask,
                     healpy_format=True,
                     recalc=True,
                     div_beam=beam)
    elm_hp, blm_hp = get_ElmBlm(lmax=lmax,
                                Qmap=Q,
                                Umap=U,
                                mask=mask,
                                healpy_format=True,
                                recalc=True,
                                div_beam=beamP)

    #cltt = cl_alm2d(tlm, lmax)
    #clee = cl_alm2d(elm, lmax)
    #clbb = cl_alm2d(blm, lmax)
    #l = np.arange(len(clee))
    clte_hp = hp.alm2cl(tlm_hp, elm_hp, lmax=lmax)
    #clee_hp = hp.alm2cl(elm_hp, lmax=lmax)
    #clbb_hp = hp.alm2cl(blm_hp, lmax=lmax)
    l_hp = np.arange(len(clte_hp))

    clplanck = np.loadtxt(
        data_path +
        'bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl')
    clte_planck = clplanck[:, 2]
    #clee_planck = clplanck[:,3]
    #clbb_planck = clplanck[:,4]
    l_planck = clplanck[:, 0]

    pl.figure()
    pl.title('TE check')
    #pl.plot(l, clee*l*(l+1)/2./np.pi*1e12, label='2d')
    pl.plot(l_hp,
            clte_hp * l_hp * (l_hp + 1) / 2. / np.pi * 1e12,
            label='healpy')
    pl.plot(l_planck, clte_planck, label='planck best fit')
    pl.legend(loc=label_loc)
    if xmax is None:
        pl.xlim(xmax=lmax)
    else:
        pl.xlim(xmax=xmax)
Пример #38
0
matplotlib.use('Agg')
import healpy as H, numpy as numpy, os, sys, glob, pickle, gzip, argparse
import modules.scl_cmb as scl_cmb
import modules.sky_local as sky_local

sims = scl_cmb.simulations()
from pylab import *
import scipy.ndimage as ndimage

########################################################################
### read  planck map
print '\n\t\t read  planck map\t'
fname = 'data/LGMCA/WPR2_CMB_muK.fits'
fname = 'data_sp/sptsz/data/LGMCA/WPR2_CMB_muK.fits'
planck_healpix_MAP = H.read_map(fname, verbose=0)
nside = 2048
########################################################################

parser = argparse.ArgumentParser(description='')
parser.add_argument('-which_rand',
                    dest='which_rand',
                    action='store',
                    help='which_rand',
                    type=str,
                    default=None)
parser.add_argument('-redmapper',
                    dest='redmapper',
                    action='store',
                    help='redmapper',
                    type=int,
Пример #39
0
     exit()
 option_file_name = argv[1]
 map_name = argv[2]
 # Load detail options from option file.
 option_list = aa.load(option_file_name)
 frame = option_list[0]
 alpha = option_list[1]
 delta = option_list[2]
 ds9_width = option_list[3]
 ds9_height = option_list[4]
 #-------------------------------------------------
 # Load map
 DL07_paras, hp_header = hp.read_map(
     map_name,
     # field indicates which column you choose to load, starting from 0.
     field=0,
     h=True,
     nest=None,
 )
 hp_hdu = fits.ImageHDU(DL07_paras, fits.Header(hp_header))
 hp_hdu.header['UNIT'] = r"$M_{sun}/kpc^2$"
 # Show the header of this map.
 hdul = fits.open(map_name)
 hdul.info()
 hdr = hdul[1].header
 print("### HEADER for this map ###")
 print(repr(hdr))
 print("### END of HEADER ###")
 #--------------------------------------------------
 # Initialize the target coordinate
 if frame == 'icrs':
Пример #40
0
                Contributors: Alex Krolewski, Eric Baxter, Simone Ferraro, [Colin Hill](http://user.astro.columbia.edu/~jch/)
                
                Checkout the source code on [GitHub](
                https://github.com/syasini/cmb-x-galaxy-overlaps) and press the star button to 
                show your support! ️👉⭐ 
                """)

    # --------------
    # CMB Experiment
    # --------------

    # add a checkbox to select the CMB experiment
    st.sidebar.markdown("# CMB")
    cmb = st.sidebar.selectbox("(blue)", cmb_list, index=cmb_list.index("Planck-Gal-70"))
    cmb_fname = os.path.join(".", "masks", cmb + ".fits")
    cmb_mask = hp.read_map(cmb_fname)

    # add button for adding foregrounds (optional)
    add_foregrounds = st.sidebar.checkbox("Add Planck Foregrounds")
    foregrounds_fname = os.path.join(".", "masks", "Planck-Gal-70.fits")

    if add_foregrounds:
        foregrounds = hp.read_map(foregrounds_fname)
        cmb_mask = np.minimum(cmb_mask, foregrounds)

    # calculate and print the CMB f_sky
    cmb_fsky = get_fsky(cmb_mask)
    st.sidebar.text(f"f_sky = {cmb_fsky:.2f}")

    st.sidebar.markdown("---")
Пример #41
0
writing ...  /Volumes/TimeMachine/data/mocks/mock.hpmask.dr7.fits
"""

import numpy as np
import fitsio as ft
import sys
import healpy as hp
sys.path.append('/Users/rezaie/github/DESILSS')
#from tools import hpix2radec

# data
data, h = ft.read(sys.argv[1], header=True)
print('data size', data.size)

# mock fracmap
frac = hp.read_map(sys.argv[2])

mhpix = np.argwhere(frac > 0).flatten()
print('mock > 0.0 ', mhpix.size)

# find the overlap
mockondata = np.in1d(data['hpix'], mhpix)
datamock = data[mockondata]
datamock['fracgood'] = frac[
    datamock['hpix']]  # replace the fracgood with the mocks

print('mock with imaging attrs ', datamock.size)
h['Note'] = 'This is for the mocks'
ft.write(sys.argv[3], datamock, header=h, clobber=True)
print('saving ... ', sys.argv[3])
Пример #42
0
def project_maps(npix,
                 input_file=None,
                 RA=None,
                 DEC=None,
                 pxsize=None,
                 same_units=False,
                 same_res=False):
    '''Project HEALPIX maps using gnomic view given coordinates.
    
    Parameters:
    -----------
        npix: int
            number of pixels
        input_file: str, optional
            Name of the data file to be used containing RA,DEC and pixel size.
        RA: float array, optional
            Right acention of objects, ICRS coordinates are required. Default:None
        DEC: float array, optional
            Declination of objects, ICRS coordinates are required. Default:None
        pxsize: float, optional
            pixel size in arcminutes. REcommended: 1.5
        same_units: bool, optional
            if changed to True all Planck maps will be provided in units of K_CMB.
            Default: False
        same_res: bool, optional
            if changed to True all Planck maps will be provided with the resolution 
            of the lowest-frequency channel. Default: False
        
    Returns:
    --------
        output: array
            single image or data cube containing the projected maps. 
            If out_path is set, one or several files will be written
    '''

    if input_file is not None:
        data_new = dt.ascii.read(input_file)
        ra = np.array(data_new[:]['RA'])
        dec = np.array(data_new[:]['DEC'])
        pixsize = np.array(data_new[:]['pixel_size'])
        nclusters = len(ra)
    else:
        ra = RA
        dec = DEC
        pixsize = pxsize
        nclusters = 1

    freq = [143, 217, 353, 545,
            857]  #0-353GHz are in K_cmb while 545 and 857GHz are in MJy/sr
    nf = len(freq)

    A = (2 * np.sqrt(2 * np.log(2)))

    output = np.zeros((nclusters, nf, npix, npix))

    for i in np.arange(nclusters):

        for f in np.arange(nf):
            all_sky = hp.read_map('HFI_{0}_layer.fits'.format(f))
            projected_map = hp.gnomview(all_sky,
                                        coord=('G', 'C'),
                                        rot=(ra[i], dec[i]),
                                        return_projected_map=True,
                                        xsize=npix,
                                        reso=pixsize[i],
                                        no_plot=True)

            if same_units is True:  #from https://wiki.cosmos.esa.int/planckpla2015/index.php/UC_CC_Tables
                if f == 0:
                    projected_map *= 371.7327
                if f == 1:
                    projected_map *= 483.6874
                if f == 2:
                    projected_map *= 287.4517

            if same_res is True and f != 0:
                kernel = np.sqrt(
                    sz.planck_beams(freq[0])**2 - sz.planck_beams(freq[f])**2)
                print(sz.planck_beams(freq[0]), sz.planck_beams(freq[f]),
                      kernel / A / pixsize[i])
                projected_map = ndimage.gaussian_filter(projected_map,
                                                        sigma=kernel / A /
                                                        pixsize[i],
                                                        order=0,
                                                        mode="reflect",
                                                        truncate=10)

            output[i, f, :, :] = projected_map

    print(output.shape)
    return (output)
Пример #43
0
def handler(payload, root):

    run_test = False

    if run_test:
        s3path = 'test'
    else:
        s3path = 'fit'

    role = root.attrib['role']

    print("ROLE is ", role)
    params = {elem.attrib['name']:
              elem.attrib['value']
              for elem in root.iterfind('.//Param')}

    for key, value in params.items():
        print(key, '=', value)
    keys = params.keys()

    notices = [150, 151, 152, 153, 164]

    if int(params['Packet_Type']) in notices:
        gwa = gw_alert(
                graceid = params['GraceID'] if 'GraceID' in keys else 'ERROR',
                packet_type = params['Packet_Type'] if 'Packet_Type' in keys else 0,
                alert_type = params['AlertType'] if 'AlertType' in keys else 'ERROR',
                detectors = params['Instruments'] if 'Instruments' in keys else '',
                far = params['FAR'] if 'FAR' in keys else 0.0,
                skymap_fits_url = params['skymap_fits'] if 'skymap_fits' in keys else '',
                prob_bns = params['BNS'] if 'BNS' in keys else 0.0,
                prob_nsbh =  params['NSBH'] if 'NSBH' in keys else 0.0,
                prob_gap = params['MassGap'] if 'MassGap' in keys else 0.0,
                prob_bbh = params['BBH'] if 'BBH' in keys else 0.0,
                prob_terrestrial = params['Terrestrial'] if 'Terrestrial' in keys else 0.0,
                prob_hasns = params['HasNS'] if 'HasNS' in keys else 0.0,
                prob_hasremenant = params['HasRemnant'] if 'HasRemnant' in keys else 0.0,
                datecreated = datetime.datetime.now(),
                role = role,
                description =  "Not sure what to put here",
            )

        path_info = gwa.graceid + '-' + gwa.alert_type
        filter = [
                gw_alert.graceid == gwa.graceid,
                gw_alert.alert_type == gwa.alert_type
                ]
        alertinfo = db.session.query(gw_alert).filter(*filter).all()

        if len(alertinfo) > 0:
            path_info = path_info + str(len(alertinfo))

        if 'skymap_fits' in params:

            print("downloading skymap_fits")
            s3 = boto3.client('s3')
            downloadpath = '{}/{}.fits.gz'.format(s3path, path_info)
            r = requests.get(params['skymap_fits'])
            with io.BytesIO() as f:
                f.write(r.content)

                f.seek(0)
                s3.upload_fileobj(f, Bucket=config.AWS_BUCKET, Key=downloadpath)

                print("download finished")
                
            skymap, header = hp.read_map(params['skymap_fits'], h=True, verbose=False)

            header = dict(header)
            hkeys = header.keys()

            gwa.time_of_signal = header['DATE-OBS'] if 'DATE-OBS' in hkeys else '1991-12-23T19:15:00'
            gwa.distance = header['DISTMEAN'] if 'DISTMEAN' in hkeys else "-999.9"
            gwa.distance_error = header['DISTSTD'] if 'DISTSTD' in hkeys else "-999.9"
            gwa.timesent = header['DATE'] if 'DATE' in hkeys else '1991-12-23T19:15:00'

            print('Creating 90/50 contours')
            
            prob, _ = ligo.skymap.io.fits.read_sky_map(gwa.skymap_fits_url, nest=None)
            prob = interpolate_nested(prob, nest=True)
            i = np.flipud(np.argsort(prob))
            cumsum = np.cumsum(prob[i])
            cls = np.empty_like(prob)
            cls[i] = cumsum * 100
            paths = list(ligo.skymap.postprocess.contour(cls, [50, 90], nest=True, degrees=True, simplify=True))
        
            contour_download_path = '{}/{}-contours-smooth.json'.format(s3path, path_info)
            with io.BytesIO() as cc:
                tt = json.dumps({
                    'type': 'FeatureCollection',
                    'features': [
                        {
                            'type': 'Feature',
                            'properties': {
                                'credible_level': contour
                            },
                            'geometry': {
                                'type': 'MultiLineString',
                                'coordinates': path
                            }
                        }
                        for contour, path in zip([50,90], paths)
                    ]
                })
                cc.write(tt.encode())
                cc.seek(0)
                s3.upload_fileobj(cc, Bucket=config.AWS_BUCKET, Key=contour_download_path)

            ####################

            print('Creating Fermi and LAT MOC files')
            ####################
            tos = datetime.datetime.strptime(gwa.time_of_signal, "%Y-%m-%dT%H:%M:%S.%f")
            #upload_GRB_MOC_toS3(s3, tos, gwa.graceid, s3path)
            fermi_moc_upload_path = '{}/{}-Fermi.json'.format(s3path, gwa.graceid)
            try:
                s3.head_object(Bucket=config.AWS_BUCKET, Key=fermi_moc_upload_path)
                print('Fermi file already exists')
            except:
                #calculate
                try:
                    earth_ra,earth_dec,earth_rad=function.getearthsatpos(tos)
                    contour = function.makeEarthContour(earth_ra,earth_dec,earth_rad)
                    skycoord = SkyCoord(contour, unit="deg", frame="icrs")
                    inside = SkyCoord(ra=earth_ra+180, dec=earth_dec, unit="deg", frame="icrs")
                    moc = MOC.from_polygon_skycoord(skycoord, max_depth=9)
                    moc = moc.complement()
                    mocfootprint = moc.serialize(format='json')
            
                    #store on S3
                    with io.BytesIO() as mm:
                        moc_string = json.dumps(mocfootprint)
                        mm.write(moc_string.encode())
                        mm.seek(0)
                        s3.upload_fileobj(mm, Bucket=config.AWS_BUCKET, Key=fermi_moc_upload_path)
                    print('Successfully Created Fermi MOC File for {}'.format(gwa.graceid))
                except:
                    print('ERROR in Fermi MOC creation for {}'.format(gwa.graceid))
        
            ####LAT Creation#####
            lat_moc_upload_path = '{}/{}-LAT.json'.format(s3path, gwa.graceid)
            try:
                s3.head_object(Bucket=config.AWS_BUCKET, Key=lat_moc_upload_path)
                print('LAT file already exists')
            except:
                try:
                    ra, dec = function.getFermiPointing(tos)
                    pointing_footprint=function.makeLATFoV(ra,dec)
                    skycoord = SkyCoord(pointing_footprint, unit="deg", frame="icrs")
                    moc = MOC.from_polygon_skycoord(skycoord, max_depth=9)
                    mocfootprint = moc.serialize(format='json')
            
                    with io.BytesIO() as ll:
                        moc_string = json.dumps(mocfootprint)
                        ll.write(moc_string.encode())
                        ll.seek(0)
                        s3.upload_fileobj(ll, Bucket=config.AWS_BUCKET, Key=lat_moc_upload_path)
                    print('Successfully Created LAT MOC File for {}'.format(gwa.graceid))
                except:
                    print('ERROR in LAT MOC creation for {}'.format(gwa.graceid))

        ###################

        if not run_test:
            db.session.add(gwa)
            print("commiting\n")
            db.session.commit()
        else:
            print('Sleeping, you should kill')
            time.sleep(20)
    else:
        print("\nNot Ligo, Don't Care\n")
Пример #44
0
def smooth_and_degrade(filename, bl, nside):
    mymap = hp.read_map(filename, verbose=False, field=(0, 1, 2))
    mymap = hp.smoothing(mymap, beam_window=bl, lmax=3 * nside - 1, verbose=False)
    mymap = hp.ud_grade(mymap, nside_out=nside)
    return mymap
Пример #45
0
def make_random(catfile,
                maskfile=None,
                savemaskfile=None,
                savethrowfile=None,
                outfile=None,
                factor=8,
                thresh=0.1,
                objtype=None,
                truthfile=None,
                chop=False):
    print("Reading Input catalog: {}".format(catfile))
    #datacat=Table.read(catfile)
    cat = fits.open(catfile)
    datacat = cat[1].data

    ztypes = ['Z_COSMO', 'TRUEZ', 'Z']
    try:
        z = datacat['Z_COSMO']
        print("Using Z_COSMO for z")
    except:
        try:
            z = datacat['TRUEZ']
            print("Using TRUEZ for z")
        except:
            try:
                z = datacat['Z']
                print("Using Z for z")
            except:
                raise ValueError(
                    "None of the specified z-types match. Check fits header")
    if truthfile is not None:  #- required to match targetid for ra,dec
        tru = fits.open(truthfile)
        trucat = tru[1].data
        truid = trucat['TARGETID']
        dataid = datacat['TARGETID']
        #- map targetid sorted as in dataid
        tt = np.argsort(truid)
        ss = np.searchsorted(truid[tt], dataid)
        srt_idx = tt[ss]
        np.testing.assert_array_equal(truid[srt_idx], dataid)
        print("100% targets matched for data catalog")
        ra = trucat['RA'][srt_idx]
        dec = trucat['DEC'][srt_idx]
    else:
        ra = datacat['ra']
        dec = datacat['dec']

    #-select the specified object
    if objtype is not None:
        print("Selecting obj type {} for randoms".format(objtype))
        try:
            kk = np.where(datacat['SOURCETYPE'] == objtype)[0]
            print("Using sourcetype {}".format(objtype))
        except:
            try:
                kk = np.where(datacat['SPECTYPE'] == objtype)[0]
                print("Using spectype {}".format(objtype))
            except:
                print("Objtype doesn't match header key. Check fits header")
        print("Total {} in the data: {}".format(objtype, len(kk)))
        ra = ra[kk]
        dec = dec[kk]
        z = z[kk]
    else:
        print("Working on full catalog")
        print("Total objects: {}".format(len(z)))

    #- mask first
    if maskfile is None:
        print("Creating mask")
        theta, phi = radec2thetaphi(ra, dec)
        mask, throwmask = make_mask(theta,
                                    phi,
                                    thresh=thresh,
                                    nside=32,
                                    outfile=savemaskfile,
                                    throwfile=savethrowfile)
    else:
        print("Reading maskfile: ".format(maskfile))
        mask = hp.read_map(maskfile)
    print("Generating random: factor {}".format(factor))
    ra_rnd, dec_rnd, z_rnd = generate_rnd(z, mask, factor=factor)
    #wt_rnd=np.ones_like(ra_rnd)

    #- Now cut to data boundaries
    if chop:
        cut = np.logical_and(ra_rnd >= np.min(ra), ra_rnd <= np.max(ra))
        ra_rnd = ra_rnd[cut]
        dec_rnd = dec_rnd[cut]
        z_rnd = z_rnd[cut]
        cut = np.logical_and(dec_rnd >= np.min(dec), dec_rnd <= np.max(dec))
        ra_rnd = ra_rnd[cut]
        dec_rnd = dec_rnd[cut]
        z_rnd = z_rnd[cut]
    wt_rnd = np.ones_like(ra_rnd)
    print("Data size: {}".format(len(ra)))
    print("Random size: {}".format(len(ra_rnd)))
    if outfile is not None:
        randata = Table([ra_rnd, dec_rnd, z_rnd, wt_rnd],
                        names=('ra', 'dec', 'z', 'wt'))
        randata.write(outfile, overwrite=True)
        print("Written random catalog file {}".format(outfile))
Пример #46
0
 for ifile in args.ifiles[comm.rank::comm.size]:
     name = os.path.basename(ifile)
     runit = unit
     #if "545" in name:
     #	runit *= factor_545
     #	npol = 1
     #elif "857" in name:
     #	runit *= factor_857
     #	npol = 1
     #else:
     #	npol = 3
     npol = args.npol
     fields = range(npol)
     omap = enmap.zeros((npol, ) + shape, wcs, dtype)
     progress("%s TQU read" % name)
     imap = np.array(healpy.read_map(ifile, fields)).astype(dtype)
     nside = healpy.npix2nside(imap.shape[-1])
     progress("%s TQU mask" % name)
     imap[healpy.mask_bad(imap)] = 0
     progress("%s TQU scale" % name)
     imap *= runit
     nside = healpy.npix2nside(imap.shape[-1])
     lmax = args.lmax or 3 * nside
     progress("%s TQU alm2map" % name)
     alm = curvedsky.map2alm_healpix(imap, lmax=lmax)
     del imap
     # work around healpix bug
     #progress("%s TQU rotate_alm" % name)
     #alm   = alm.astype(np.complex128,copy=False)
     #healpy.rotate_alm(alm, euler[0], euler[1], euler[2])
     #alm   = alm.astype(ctype,copy=False)
Пример #47
0
	
	clat = np.cos(HAWClat)
	slat = np.sin(HAWClat)
	
	#initial CR distribution isotropic:
	for i in range(0,npix) :
		CRmap[i] = isovalue
	
	# choose the step from iteration method
	iteration  = 20
	
	namefits = foldername + 'CR_64_360_iteration' + str(iteration) + '.fits'
	nameEfits = foldername + 'exposure_64_360_iteration' + str(iteration) +'.dat'
	namenorm = foldername + 'norm_64_360_iteration' + str(iteration) + '.dat'
	
	diffCRmap = H.read_map(namefits) 
	norm = np.array(pickle.load(open(namenorm, "r" )))[0]
	Emap0 = np.array(pickle.load(open(nameEfits, "r" )))[0]
	
	Nmap = np.zeros((Ntimestep,npix), dtype=np.double)
	
	for timeidx in range(0,Ntimestep) :
		#namefile = '../Skymap/HAWCoriginal/hawclocal-v126-nhit30-run000630-001089_N256_' + '{:03d}'.format(timeidx) + '_of_360.fits.gz'
		
		namefile = './paper1_newfreq_nside64_360steps/local_timeindex' + '{:03d}'.format(timeidx) + '.fits'
		tempmap = H.read_map(namefile)
		Nmap[timeidx] = H.ud_grade(tempmap,nside)*(npixhigh/(1.*npix))
	
	
	tempmap = np.zeros(npix, dtype=np.double)
	for j in range(0,npix) :
Пример #48
0
def read_lotss_noise_weight_map(nside, data_release, flux_min_cut, signal_to_noise):
    file_path = os.path.join(DATA_PATH, 'LoTSS/DR{}/weight_map__mean_minflux-{}_snr-{}.fits'.format(
        data_release, flux_min_cut, signal_to_noise))
    weight_map = hp.read_map(file_path)
    weight_map = hp.ud_grade(weight_map, nside)
    return weight_map
print("...loading 2MPZ catalogue...")
cat = Load2MPZ(twoMPZ_cat)
print("...done...")

if which_map == 0:
	print("...loading PlanckLens alms...")
	kappa_lm = hp.read_alm(planck_alms)
	print("...done...")

	print("...converting PlanckLens alms to map...")
	kappa = hp.alm2map(kappa_lm, nside)
	print("...done...")

elif which_map ==1:
	print("...reading SMICA map now: %s..." %(smica_map_file))
	kappa = hp.read_map(smica_map_file, field=0, verbose=0) #not kappa but SMICA: just T now.
	print("...done...")
elif which_map ==2:
	print("...reading LGMCA map now: %s..." %(lgmca_map_file))
	kappa = hp.read_map(lgmca_map_file, field=0, verbose=0) #not kappa but LGMCA: just T now.
	print("...done...")

print("...loading PlanckLens mask...")
mask_planck = hp.read_map(planck_mask, verbose=0)
print("...done...")

print("...loading 2MPZ mask...")
mask_twompz = hp.read_map(twoMPZ_mask, verbose=0)
print("...done...")

mask = mask_twompz * mask_planck
Пример #50
0
    def test_noise(self):
        # generate noise timestreams from the noise model
        nsig = OpSimNoise()
        nsig.exec(self.data)

        # make a simple pointing matrix
        pointing = OpPointingHpix(nside=self.map_nside, nest=True)
        pointing.exec(self.data)

        handle = None
        if self.comm.rank == 0:
            handle = open(
                os.path.join(self.outdir, "out_test_ground_noise_info"), "w")
        self.data.info(handle, common_flag_mask=self.common_flag_mask)
        if self.comm.rank == 0:
            handle.close()

        # For noise weighting in madam, we know we are using an analytic noise
        # and so we can use noise weights based on the NET.  This is instrument
        # specific.

        tod = self.data.obs[0]['tod']
        nse = self.data.obs[0]['noise']
        detweights = {}
        for d in tod.local_dets:
            detweights[d] = 1.0 / (self.rate * nse.NET(d)**2)

        # make a binned map with madam
        madam_out = os.path.join(self.outdir, "madam_noise")
        if self.comm.rank == 0:
            if os.path.isdir(madam_out):
                shutil.rmtree(madam_out)
            os.mkdir(madam_out)

        pars = {}
        pars['kfirst'] = 'F'
        pars['base_first'] = 1.0
        pars['fsample'] = self.rate
        pars['nside_map'] = self.map_nside
        pars['nside_cross'] = self.map_nside
        pars['nside_submap'] = min(8, self.map_nside)
        pars['write_map'] = 'F'
        pars['write_binmap'] = 'T'
        pars['write_matrix'] = 'F'
        pars['write_wcov'] = 'F'
        pars['write_hits'] = 'T'
        pars['kfilter'] = 'F'
        pars['path_output'] = madam_out
        pars['info'] = 0

        madam = OpMadam(params=pars,
                        detweights=detweights,
                        name='noise',
                        common_flag_mask=self.common_flag_mask)
        if madam.available:
            madam.exec(self.data)

            if self.comm.rank == 0:
                import matplotlib.pyplot as plt

                hitsfile = os.path.join(madam_out, 'madam_hmap.fits')
                hits = hp.read_map(hitsfile, nest=True)

                outfile = "{}.png".format(hitsfile)
                hp.mollview(hits, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                binfile = os.path.join(madam_out, 'madam_bmap.fits')
                bins = hp.read_map(binfile, nest=True)

                outfile = "{}.png".format(binfile)
                hp.mollview(bins, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                # check that pixel rms makes sense given the
                # number of hits and the timestream rms

                tothits = np.sum(hits)
                nt.assert_equal(self.ndet * ((self.data.comm.ngroups * \
                    self.totsamp) - self.nflagged), tothits)

                mask = (bins > -1.0e20)
                #print("num good pix = ", len(mask))
                rthits = np.sqrt(hits[mask].astype(np.float64))
                #print("rthits = ", rthits)
                #print("bmap = ", bins[mask])
                weighted = bins[mask] * rthits
                #print("weighted = ", weighted)
                pixrms = np.sqrt(np.mean(weighted**2))
                todrms = self.NET * np.sqrt(self.rate)
                relerr = np.absolute(pixrms - todrms) / todrms
                #print("pixrms = ", pixrms)
                #print("todrms = ", todrms)
                #print("relerr = ", relerr)
                self.assertTrue(relerr < 0.03)
        else:
            print("libmadam not available, skipping tests")
        return
Пример #51
0
def check_EBlm2d(nu1=100,
                 nu2=143,
                 lmax=300,
                 maskfield=2,
                 source_maskfield=0,
                 label_loc='lower right',
                 xmax=None):

    map_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nu1)
    Q1, U1 = hp.read_map(data_path + map_name, field=(1, 2))
    map_name = 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(nu2)
    Q2, U2 = hp.read_map(data_path + map_name, field=(1, 2))
    mask = hp.read_map(data_path + 'HFI_Mask_GalPlane-apo0_2048_R2.00.fits',
                       field=maskfield)
    smask = hp.read_map(data_path + 'HFI_Mask_PointSrc_2048_R2.00.fits',
                        field=source_maskfield)
    mask *= smask

    hdulist = fits.open(data_path + 'HFI_RIMO_Beams-100pc_R2.00.fits')
    beam1 = hdulist[beam_index['{}P'.format(nu1)]].data.NOMINAL[0][:lmax + 1]
    beam2 = hdulist[beam_index['{}P'.format(nu2)]].data.NOMINAL[0][:lmax + 1]

    elm1, blm1 = get_ElmBlm(lmax=lmax,
                            Qmap=Q1,
                            Umap=U1,
                            mask=mask,
                            healpy_format=False,
                            recalc=True,
                            div_beam=beam1)
    elm_hp1, blm_hp1 = get_ElmBlm(lmax=lmax,
                                  Qmap=Q1,
                                  Umap=U1,
                                  mask=mask,
                                  healpy_format=True,
                                  recalc=True,
                                  div_beam=beam1)
    elm2, blm2 = get_ElmBlm(lmax=lmax,
                            Qmap=Q2,
                            Umap=U2,
                            mask=mask,
                            healpy_format=False,
                            recalc=True,
                            div_beam=beam2)
    elm_hp2, blm_hp2 = get_ElmBlm(lmax=lmax,
                                  Qmap=Q2,
                                  Umap=U2,
                                  mask=mask,
                                  healpy_format=True,
                                  recalc=True,
                                  div_beam=beam2)

    clee = cl_alm2d(alm1=elm1, alm2=elm2, lmax=lmax)
    clbb = cl_alm2d(alm1=blm1, alm2=blm2, lmax=lmax)
    l = np.arange(len(clee))
    clee_hp = hp.alm2cl(elm_hp1, elm_hp2, lmax=lmax)
    clbb_hp = hp.alm2cl(blm_hp1, blm_hp2, lmax=lmax)
    l_hp = np.arange(len(clee_hp))

    clplanck = np.loadtxt(
        data_path +
        'bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl')
    clee_planck = clplanck[:, 3]
    clbb_planck = clplanck[:, 4]
    l_planck = clplanck[:, 0]

    pl.figure()
    pl.title('EE check')
    pl.plot(l, clee * l * (l + 1) / 2. / np.pi * 1e12, label='2d')
    pl.plot(l, clee_hp * l_hp * (l_hp + 1) / 2. / np.pi * 1e12, label='healpy')
    pl.plot(l_planck, clee_planck, label='planck best fit')
    pl.legend(loc=label_loc)
    if xmax is None:
        pl.xlim(xmax=lmax)
    else:
        pl.xlim(xmax=xmax)

    pl.figure()
    pl.title('BB check')
    pl.plot(l, clbb * l * (l + 1) / 2. / np.pi * 1e12, label='2d')
    pl.plot(l_hp,
            clbb_hp * l_hp * (l_hp + 1) / 2. / np.pi * 1e12,
            label='healpy')
    pl.plot(l_planck, clbb_planck, label='planck best fit')
    pl.legend(loc=label_loc)
    if xmax is None:
        pl.xlim(xmax=lmax)
    else:
        pl.xlim(xmax=xmax)
Пример #52
0
    def test_hwpconst(self):
        # make a pointing matrix with a HWP that is constant
        hwpstep = 2.0 * np.pi
        hwpsteptime = (self.totsamp / self.rate) / 60.0
        pointing = OpPointingHpix(nside=self.map_nside,
                                  nest=True,
                                  hwpstep=hwpstep,
                                  hwpsteptime=hwpsteptime)
        pointing.exec(self.data)

        # get locally hit pixels
        lc = OpLocalPixels()
        localpix = lc.exec(self.data)

        # construct a sky gradient operator, just to get the signal
        # map- we are not going to use the operator on the data.
        grad = OpSimGradient(nside=self.sim_nside, nest=True)
        sig = grad.sigmap()

        # pick a submap size and find the local submaps.
        submapsize = np.floor_divide(self.sim_nside, 16)
        localsm = np.unique(np.floor_divide(localpix, submapsize))

        # construct a distributed map which has the gradient
        npix = 12 * self.sim_nside * self.sim_nside
        distsig = DistPixels(comm=self.data.comm.comm_group,
                             size=npix,
                             nnz=1,
                             dtype=np.float64,
                             submap=submapsize,
                             local=localsm)
        lsub, lpix = distsig.global_to_local(localpix)
        distsig.data[lsub,
                     lpix, :] = np.array([sig[x]
                                          for x in localpix]).reshape(-1, 1)

        # create TOD from map
        scansim = OpSimScan(distmap=distsig)
        scansim.exec(self.data)

        handle = None
        if self.comm.rank == 0:
            handle = open(
                os.path.join(self.outdir, "out_test_ground_hwpconst_info"),
                "w")
        self.data.info(handle, common_flag_mask=self.common_flag_mask)
        if self.comm.rank == 0:
            handle.close()

        # make a binned map with madam
        madam_out = os.path.join(self.outdir, "madam_hwpconst")
        if self.comm.rank == 0:
            if os.path.isdir(madam_out):
                shutil.rmtree(madam_out)
            os.mkdir(madam_out)

        pars = {}
        pars['kfirst'] = 'F'
        pars['base_first'] = 1.0
        pars['fsample'] = self.rate
        pars['nside_map'] = self.map_nside
        pars['nside_cross'] = self.map_nside
        pars['nside_submap'] = min(8, self.map_nside)
        pars['write_map'] = 'F'
        pars['write_binmap'] = 'T'
        pars['write_matrix'] = 'F'
        pars['write_wcov'] = 'F'
        pars['write_hits'] = 'T'
        pars['kfilter'] = 'F'
        pars['path_output'] = madam_out
        pars['info'] = 0

        madam = OpMadam(params=pars,
                        name='scan',
                        common_flag_mask=self.common_flag_mask)
        if madam.available:
            madam.exec(self.data)

            if self.comm.rank == 0:
                import matplotlib.pyplot as plt

                hitsfile = os.path.join(madam_out, 'madam_hmap.fits')
                hits = hp.read_map(hitsfile, nest=True)

                outfile = "{}.png".format(hitsfile)
                hp.mollview(hits, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                binfile = os.path.join(madam_out, 'madam_bmap.fits')
                bins = hp.read_map(binfile, nest=True)

                outfile = "{}.png".format(binfile)
                hp.mollview(bins, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                # compare binned map to input signal

                tothits = np.sum(hits)
                nt.assert_equal(self.ndet * ((self.data.comm.ngroups * \
                    self.totsamp) - self.nflagged), tothits)
                mask = (bins > -1.0e20)
                nt.assert_almost_equal(bins[mask], sig[mask], decimal=4)
        else:
            print("libmadam not available, skipping tests")
        return
Пример #53
0
def check_cl_sims(
        nmaps=1,
        lmax=1000,
        nside=2048,
        read_file=False,
        filename='testsky.fits',
        frequency=100,
        beam=None,
        beamP=None,
        smear=True,
        nonoise=False,
        cl_file='bf_base_cmbonly_plikHMv18_TT_lowTEB_lmax4000.minimum.theory_cl'
):

    if read_file and os.path.exists(data_path + filename):
        Tmap, Qmap, Umap = hp.read_map(data_path + filename, field=(0, 1, 2))
    else:
        if nonoise:
            Tmap, Qmap, Umap = simulate_cmb(nside=nside,
                                            lmax=lmax,
                                            save=False,
                                            smear=smear,
                                            beam=beam,
                                            beamP=beamP,
                                            cl_file=cl_file)
        else:
            Tmap, Qmap, Umap = observe_cmb_sky(save=False,
                                               nside=nside,
                                               npix=None,
                                               lmax=3000,
                                               frequency=frequency,
                                               beam=beam,
                                               beamP=beamP,
                                               cl_file=cl_file)

    Tlm = hp.map2alm(Tmap, lmax=lmax)
    Elm, Blm = hp.map2alm_spin((Qmap, Umap), 2, lmax=lmax)

    if smear:
        if (beam is None) or (beamP is None):
            hdulist = fits.open(data_path + 'HFI_RIMO_Beams-100pc_R2.00.fits')
            beam = hdulist[beam_index['{}'.format(
                frequency)]].data.NOMINAL[0][:lmax + 1]
            beamP = hdulist[beam_index['{}P'.format(
                frequency)]].data.NOMINAL[0][:lmax + 1]
        hp.sphtfunc.almxfl(Tlm, 1. / beam, inplace=True)
        hp.sphtfunc.almxfl(Elm, 1. / beamP, inplace=True)
        hp.sphtfunc.almxfl(Blm, 1. / beamP, inplace=True)

    ls = np.arange(lmax + 1)
    factor = ls * (ls + 1.) / (2. * np.pi)

    cltt = hp.alm2cl(Tlm) * factor
    clee = hp.alm2cl(Elm) * factor
    clbb = hp.alm2cl(Blm) * factor
    clte = hp.alm2cl(Tlm, Elm) * factor

    cl = get_theory_cmb()
    ls_theory = cl[0][:lmax + 1]
    factor_theory = ls_theory * (ls_theory + 1.) / (2. * np.pi)

    cltt_theory = cl[1][:lmax + 1] * factor_theory
    clee_theory = cl[2][:lmax + 1] * factor_theory
    clbb_theory = cl[3][:lmax + 1] * factor_theory
    clte_theory = cl[4][:lmax + 1] * factor_theory

    plt.figure()
    plt.plot(ls, cltt, label='sims')
    plt.plot(ls_theory, cltt_theory, label='theory TT')
    plt.legend()

    plt.figure()
    plt.plot(ls, clte, label='sims')
    plt.plot(ls_theory, clte_theory, label='theory TE')
    plt.legend()

    plt.figure()
    plt.plot(ls, clee, label='sims')
    plt.plot(ls_theory, clee_theory, label='theory EE')
    plt.legend()

    plt.figure()
    plt.plot(ls, clbb, label='sims')
    plt.plot(ls_theory, clbb_theory, label='theory BB')
    plt.legend()
Пример #54
0
    def test_binned(self):
        rank = 0
        if self.comm is not None:
            rank = self.comm.rank
        # flag data outside valid intervals
        gapflagger = OpFlagGaps()
        gapflagger.exec(self.data)

        # generate noise timestreams from the noise model
        nsig = OpSimNoise()
        nsig.exec(self.data)

        # make a simple pointing matrix
        pointing = OpPointingHpix(nside=self.map_nside, nest=True, mode="IQU")
        pointing.exec(self.data)

        handle = None
        if rank == 0:
            handle = open(os.path.join(self.outdir, "info.txt"), "w")
        self.data.info(handle=handle)
        if rank == 0:
            handle.close()

        # construct distributed maps to store the covariance,
        # noise weighted map, and hits

        invnpp = DistPixels(self.data,
                            comm=self.data.comm.comm_world,
                            nnz=6,
                            dtype=np.float64)
        invnpp.data.fill(0.0)

        zmap = DistPixels(self.data,
                          comm=self.data.comm.comm_world,
                          nnz=3,
                          dtype=np.float64)
        zmap.data.fill(0.0)

        hits = DistPixels(self.data,
                          comm=self.data.comm.comm_world,
                          nnz=1,
                          dtype=np.int64)
        hits.data.fill(0)

        # accumulate the inverse covariance and noise weighted map.
        # Use detector weights based on the analytic NET.

        tod = self.data.obs[0]["tod"]
        nse = self.data.obs[0]["noise"]
        detweights = {}
        for d in tod.local_dets:
            detweights[d] = 1.0 / (self.rate * nse.NET(d)**2)

        build_invnpp = OpAccumDiag(detweights=detweights,
                                   invnpp=invnpp,
                                   hits=hits,
                                   zmap=zmap,
                                   name="noise")
        build_invnpp.exec(self.data)

        invnpp.allreduce()
        hits.allreduce()
        zmap.allreduce()

        hits.write_healpix_fits(os.path.join(self.outdir, "hits.fits"))
        invnpp.write_healpix_fits(os.path.join(self.outdir, "invnpp.fits"))
        zmap.write_healpix_fits(os.path.join(self.outdir, "zmap.fits"))

        # invert it
        covariance_invert(invnpp, 1.0e-3)

        invnpp.write_healpix_fits(os.path.join(self.outdir, "npp.fits"))

        # compute the binned map, N_pp x Z

        covariance_apply(invnpp, zmap)
        zmap.write_healpix_fits(os.path.join(self.outdir, "binned.fits"))

        # compare with MADAM

        madam_out = os.path.join(self.outdir, "madam")
        if rank == 0:
            if os.path.isdir(madam_out):
                shutil.rmtree(madam_out)
            os.mkdir(madam_out)

        pars = {}
        pars["temperature_only"] = "F"
        pars["force_pol"] = "T"
        pars["kfirst"] = "F"
        pars["base_first"] = 1.0
        pars["fsample"] = self.rate
        pars["nside_map"] = self.map_nside
        pars["nside_cross"] = self.map_nside
        pars["nside_submap"] = self.map_nside
        pars["pixlim_cross"] = 1.0e-2
        pars["pixlim_map"] = 1.0e-3
        pars["write_map"] = "F"
        pars["write_binmap"] = "T"
        pars["write_matrix"] = "T"
        pars["write_wcov"] = "T"
        pars["write_hits"] = "T"
        pars["kfilter"] = "F"
        pars["path_output"] = madam_out
        pars["info"] = 0

        madam = OpMadam(params=pars, detweights=detweights, name="noise")

        if madam.available:
            madam.exec(self.data)

            if rank == 0:
                import matplotlib.pyplot as plt

                hitsfile = os.path.join(madam_out, "madam_hmap.fits")
                hits = hp.read_map(hitsfile, nest=True, verbose=False)

                outfile = "{}.png".format(hitsfile)
                hp.mollview(hits, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                toastfile = os.path.join(self.outdir, "hits.fits")
                toasthits = hp.read_map(toastfile, nest=True, verbose=False)

                nt.assert_equal(hits, toasthits)

                tothits = np.sum(hits)
                nt.assert_equal(self.validsamp, tothits)

                covfile = os.path.join(madam_out, "madam_wcov_inv.fits")
                cov = hp.read_map(covfile,
                                  nest=True,
                                  field=None,
                                  verbose=False)

                toastfile = os.path.join(self.outdir, "invnpp.fits")
                toastcov = hp.read_map(toastfile,
                                       nest=True,
                                       field=None,
                                       verbose=False)
                """
                for p in range(6):
                    print("elem {} madam min/max = ".format(p),
                          np.min(cov[p]), " / ", np.max(cov[p]))
                    print("elem {} toast min/max = ".format(p),
                          np.min(toastcov[p]), " / ", np.max(toastcov[p]))
                    print("elem {} invNpp max diff = ".format(p),
                          np.max(np.absolute(toastcov[p] - cov[p])))
                    nt.assert_almost_equal(cov[p], toastcov[p])
                """

                covfile = os.path.join(madam_out, "madam_wcov.fits")
                cov = hp.read_map(covfile,
                                  nest=True,
                                  field=None,
                                  verbose=False)

                toastfile = os.path.join(self.outdir, "npp.fits")
                toastcov = hp.read_map(toastfile,
                                       nest=True,
                                       field=None,
                                       verbose=False)
                """
                for p in range(6):
                    covdiff = toastcov[p] - cov[p]
                    print("elem {} madam min/max = ".format(p),
                          np.min(cov[p]), " / ", np.max(cov[p]))
                    print("elem {} toast min/max = ".format(p),
                          np.min(toastcov[p]), " / ", np.max(toastcov[p]))
                    print("elem {} Npp max diff = ".format(p),
                          np.max(np.absolute(covdiff[p])))
                    print("elem {} Npp mean / rms diff = ".format(p),
                          np.mean(covdiff[p]), " / ", np.std(covdiff[p]))
                    print("elem {} Npp relative diff mean / rms = ".format(p),
                          np.mean(np.absolute(covdiff[p]/cov[p])), " / ",
                          np.std(np.absolute(covdiff[p]/cov[p])))
                    nt.assert_almost_equal(cov[p], toastcov[p])
                """

                binfile = os.path.join(madam_out, "madam_bmap.fits")
                bins = hp.read_map(binfile,
                                   nest=True,
                                   field=None,
                                   verbose=False)
                mask = hp.mask_bad(bins[0])
                bins[0][mask] = 0.0
                mask = hp.mask_bad(bins[1])
                bins[1][mask] = 0.0
                mask = hp.mask_bad(bins[2])
                bins[2][mask] = 0.0

                outfile = "{}_I.png".format(binfile)
                hp.mollview(bins[0], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                outfile = "{}_Q.png".format(binfile)
                hp.mollview(bins[1], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                outfile = "{}_U.png".format(binfile)
                hp.mollview(bins[2], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                toastfile = os.path.join(self.outdir, "binned.fits")
                toastbins = hp.read_map(toastfile,
                                        nest=True,
                                        field=None,
                                        verbose=False)

                outfile = "{}_I.png".format(toastfile)
                hp.mollview(toastbins[0], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                outfile = "{}_Q.png".format(toastfile)
                hp.mollview(toastbins[1], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                outfile = "{}_U.png".format(toastfile)
                hp.mollview(toastbins[2], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                # compare binned map to madam output

                diffmap = toastbins[0] - bins[0]
                mask = bins[0] != 0
                """
                print("toast/madam I diff mean / std = ",
                      np.mean(diffmap[mask]), np.std(diffmap[mask]))
                print("toast/madam I diff rel ratio min / max = ",
                      np.min(diffmap[mask]/bins[0][mask]), " / ",
                      np.max(diffmap[mask]/bins[0][mask]))
                """
                outfile = "{}_diff_madam_I.png".format(toastfile)
                hp.mollview(diffmap, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()
                # nt.assert_almost_equal(bins[0][mask], binserial[0][mask],
                #                       decimal=6)

                diffmap = toastbins[1] - bins[1]
                mask = bins[1] != 0
                """
                print("toast/madam Q diff mean / std = ",
                      np.mean(diffmap[mask]), np.std(diffmap[mask]))
                print("toast/madam Q diff rel ratio min / max = ",
                      np.min(diffmap[mask]/bins[1][mask]), " / ",
                      np.max(diffmap[mask]/bins[1][mask]))
                """
                outfile = "{}_diff_madam_Q.png".format(toastfile)
                hp.mollview(diffmap, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()
                # nt.assert_almost_equal(bins[1][mask], binserial[1][mask],
                #                       decimal=6)

                diffmap = toastbins[2] - bins[2]
                mask = bins[2] != 0
                """
                print("toast/madam U diff mean / std = ",
                      np.mean(diffmap[mask]), np.std(diffmap[mask]))
                print("toast/madam U diff rel ratio min / max = ",
                      np.min(diffmap[mask]/bins[2][mask]), " / ",
                      np.max(diffmap[mask]/bins[2][mask]))
                """
                outfile = "{}_diff_madam_U.png".format(toastfile)
                hp.mollview(diffmap, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()
                # nt.assert_almost_equal(bins[2][mask], binserial[2][mask],
                #                       decimal=6)

                # compute the binned map serially as a check

                zfile = os.path.join(self.outdir, "zmap.fits")
                ztoast = hp.read_map(zfile,
                                     nest=True,
                                     field=None,
                                     verbose=False)

                binserial = np.copy(ztoast)
                for p in range(self.map_npix):
                    binserial[0][p] = (toastcov[0][p] * ztoast[0][p] +
                                       toastcov[1][p] * ztoast[1][p] +
                                       toastcov[2][p] * ztoast[2][p])
                    binserial[1][p] = (toastcov[1][p] * ztoast[0][p] +
                                       toastcov[3][p] * ztoast[1][p] +
                                       toastcov[4][p] * ztoast[2][p])
                    binserial[2][p] = (toastcov[2][p] * ztoast[0][p] +
                                       toastcov[4][p] * ztoast[1][p] +
                                       toastcov[5][p] * ztoast[2][p])

                toastfile = os.path.join(self.outdir, "binned_serial")
                outfile = "{}_I.png".format(toastfile)
                hp.mollview(binserial[0], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                outfile = "{}_Q.png".format(toastfile)
                hp.mollview(binserial[1], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                outfile = "{}_U.png".format(toastfile)
                hp.mollview(binserial[2], xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                # compare binned map to madam output

                diffmap = binserial[0] - bins[0]
                mask = bins[0] != 0
                """
                print("serial/madam I diff mean / std = ",
                      np.mean(diffmap[mask]), np.std(diffmap[mask]))
                print("serial/madam I diff rel ratio min / max = ",
                      np.min(diffmap[mask]/bins[0][mask]), " / ",
                      np.max(diffmap[mask]/bins[0][mask]))
                """
                outfile = "{}_diff_madam_I.png".format(toastfile)
                hp.mollview(diffmap, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()
                nt.assert_almost_equal(bins[0][mask],
                                       binserial[0][mask],
                                       decimal=3)

                diffmap = binserial[1] - bins[1]
                mask = bins[1] != 0
                """
                print("serial/madam Q diff mean / std = ",
                      np.mean(diffmap[mask]), np.std(diffmap[mask]))
                print("serial/madam Q diff rel ratio min / max = ",
                      np.min(diffmap[mask]/bins[1][mask]), " / ",
                      np.max(diffmap[mask]/bins[1][mask]))
                """
                outfile = "{}_diff_madam_Q.png".format(toastfile)
                hp.mollview(diffmap, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()
                nt.assert_almost_equal(bins[1][mask],
                                       binserial[1][mask],
                                       decimal=3)

                diffmap = binserial[2] - bins[2]
                mask = bins[2] != 0
                """
                print("serial/madam U diff mean / std = ",
                      np.mean(diffmap[mask]), np.std(diffmap[mask]))
                print("serial/madam U diff rel ratio min / max = ",
                      np.min(diffmap[mask]/bins[2][mask]), " / ",
                      np.max(diffmap[mask]/bins[2][mask]))
                """
                outfile = "{}_diff_madam_U.png".format(toastfile)
                hp.mollview(diffmap, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()
                nt.assert_almost_equal(bins[2][mask],
                                       binserial[2][mask],
                                       decimal=3)

        else:
            if rank == 0:
                print("libmadam not available, skipping tests", flush=True)

        return
Пример #55
0
np.savetxt(fname, b.get_effective_ells())
##############################################################################
############################ DES Clustering ##################################
##############################################################################

des_folder_gcl = 'des_clustering'
# des_nside = 4096
des_mask = 'mask_ns{}.fits'.format(nside)

des_data_folder = os.path.join(data_folder, des_folder_gcl)

des_mask_path = os.path.join(des_data_folder, des_mask)

# Read mask
# mask_lss = hp.ud_grade(hp.read_map(des_mask_path, verbose=False), nside_out=2048)
des_mask = hp.read_map(des_mask_path, verbose=False)
des_mask_good = des_mask > gc_threshold  # Can be generalized to accept a different threshold
des_mask[~des_mask_good] = 0
# Read maps (gg)
nmaps = 5
des_maps = []
for i in range(nmaps):
    map_file = os.path.join(des_data_folder,
                            'map_counts_w_bin{}_ns{}.fits'.format(i, nside))
    des_maps.append(hp.read_map(map_file))
des_maps = np.array(des_maps)

des_N_mean = des_maps[:, des_mask_good].sum(
    axis=1) / des_mask[des_mask_good].sum()
des_maps_dg = np.zeros(des_maps.shape)
des_maps_dg[:, des_mask_good] = des_maps[:, des_mask_good] / (
Пример #56
0
import numpy as np
import healpy as hp
import matplotlib.pyplot as plt
from rotate_map_alm import *
from astropy.io import fits

Nside=2048
Npix=12*Nside**2

# Planck 353 GHz TQU
TQUmap = np.zeros((3,Npix))
#TQUmap = hp.read_map('/scr/depot1/jch/Planckdata/HFI_SkyMap_353_2048_R2.02_full_RING.fits', field=(0,1,2))
TQUfn = '/disks/jansky/a/users/goldston/susan/Planck/SOSDPol_and_HI/353GHz_IQU_2048_dipole_model_subtracted.fits'
TQUmap = hp.read_map(TQUfn, field=(0,1,2))
TQUhdr = fits.getheader(TQUfn)

# check images
for i in xrange(3):
    plt.clf()
    hp.mollview(TQUmap[i], coord='G')
    plt.savefig('rotate_map_test_Gal_'+str(i)+'.png')

# rotate
TQUmap_Equ = np.zeros((3,Npix))
TQUmap_Equ = rotate_map(TQUmap,2000.0,2000.0,'G','C',Nside)
# check images
for i in xrange(3):
    plt.clf()
    hp.mollview(TQUmap_Equ[i], coord='C')
    plt.savefig('rotate_map_test_Equ_'+str(i)+'.png')
Пример #57
0
                        default='180,0')
    parser.add_argument('--zoom',
                        help='zoom in around rot position',
                        required=False,
                        type=int,
                        default=None)
    parser.add_argument('--map_type',
                        help='type of mat to display, EFLUX, FLUX, TS or SIG',
                        required=False,
                        type=str,
                        default='EFLUX')

    args = parser.parse_args()
    # Read map

    hpx_ul = hp.read_map(args.map)

    # Get nside

    nside = hp.get_nside(hpx_ul)
    print nside

    # Get some meaningful limits for the color bar among the points which are larger than 0 and finite

    idx = (hpx_ul > 0) & np.isfinite(hpx_ul)
    if np.sum(idx) == 0: idx = (hpx_ul >= 0) & np.isfinite(hpx_ul)

    # Use the provided percentiles
    if args.min_percentile != 0:

        mmin = np.percentile(hpx_ul[idx], args.min_percentile)
Пример #58
0
    def test_grad(self):
        # add simple sky gradient signal
        grad = OpSimGradient(nside=self.sim_nside,
                             nest=True,
                             common_flag_mask=self.common_flag_mask)
        grad.exec(self.data)

        # make a simple pointing matrix
        pointing = OpPointingHpix(nside=self.map_nside, nest=True)
        pointing.exec(self.data)

        handle = None
        if self.comm.rank == 0:
            handle = open(
                os.path.join(self.outdir, "out_test_ground_grad_info"), "w")
        self.data.info(handle, common_flag_mask=self.common_flag_mask)
        if self.comm.rank == 0:
            handle.close()

        # make a binned map with madam
        madam_out = os.path.join(self.outdir, "madam_grad")
        if self.comm.rank == 0:
            if os.path.isdir(madam_out):
                shutil.rmtree(madam_out)
            os.mkdir(madam_out)

        pars = {}
        pars['kfirst'] = 'F'
        pars['base_first'] = 1.0
        pars['fsample'] = self.rate
        pars['nside_map'] = self.map_nside
        pars['nside_cross'] = self.map_nside
        pars['nside_submap'] = min(8, self.map_nside)
        pars['write_map'] = 'F'
        pars['write_binmap'] = 'T'
        pars['write_matrix'] = 'F'
        pars['write_wcov'] = 'F'
        pars['write_hits'] = 'T'
        pars['kfilter'] = 'F'
        pars['path_output'] = madam_out
        pars['info'] = 0

        madam = OpMadam(params=pars,
                        name='grad',
                        purge=False,
                        common_flag_mask=self.common_flag_mask)
        if madam.available:
            madam.exec(self.data)

            if self.comm.rank == 0:
                import matplotlib.pyplot as plt

                hitsfile = os.path.join(madam_out, 'madam_hmap.fits')
                hits = hp.read_map(hitsfile, nest=True)

                outfile = "{}.png".format(hitsfile)
                hp.mollview(hits, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                binfile = os.path.join(madam_out, 'madam_bmap.fits')
                bins = hp.read_map(binfile, nest=True)

                outfile = "{}.png".format(binfile)
                hp.mollview(bins, xsize=1600, nest=True)
                plt.savefig(outfile)
                plt.close()

                # compare binned map to input signal

                tothits = np.sum(hits)
                nt.assert_equal(self.ndet * ((self.data.comm.ngroups * \
                    self.totsamp) - self.nflagged), tothits)

                sig = grad.sigmap()
                mask = (bins > -1.0e20)
                nt.assert_almost_equal(bins[mask], sig[mask], decimal=4)
        else:
            print("libmadam not available, skipping tests")
        return
Пример #59
0
def getweights(tab_file, map):
    tab = Table.read(tab_file)

    hpmap = hp.read_map()
Пример #60
0
def mkCl(**kwargs):
    """                                      
    """
    get_var_from_file(kwargs['config'])

    logger.info('Calculating PSF with gtpsf...')
    dict_gtpsf = data.DICT_GTPSF
    logger.info('Calculating Wbeam Function...')
    out_wb_label = data.OUT_W_LABEL
    mask_label = data.MASK_LABEL
    out_wb_txt = os.path.join(GRATOOLS_OUT, 'Wbeam_%s.txt' % out_wb_label)
    if not os.path.exists(out_wb_txt):
        from GRATools.utils.ScienceTools_ import gtpsf
        gtpsf(dict_gtpsf)
        from GRATools.utils.gWindowFunc import get_psf
        psf_file = data.PSF_FILE
        psf = get_psf(psf_file)
        _l = np.arange(0, 1000)
        from GRATools.utils.gWindowFunc import build_wbeam
        wb = build_wbeam(psf, _l, out_wb_txt)
    else:
        from GRATools.utils.gWindowFunc import get_wbeam
        wb = get_wbeam(out_wb_txt)
    save_current_figure('Wbeam_%s.png' % out_wb_label, clear=True)

    logger.info('Starting Cl analysis...')
    in_label = data.IN_LABEL
    in_label = in_label + '_' + mask_label
    out_label = data.OUT_LABEL
    binning_label = data.BINNING_LABEL
    mask_file = data.MASK_FILE
    mask = hp.read_map(mask_file)
    cl_param_file = os.path.join(GRATOOLS_OUT, '%s_%s_parameters.txt' \
                                     %(in_label, binning_label))
    from GRATools.utils.gFTools import get_cl_param
    _emin, _emax, _emean, _f, _ferr, _cn, _fsky = get_cl_param(cl_param_file)
    cl_txt = open(os.path.join(GRATOOLS_OUT, '%s_%s_cls.txt' \
                                   %(out_label, binning_label)), 'w')
    for i, (emin, emax) in enumerate(zip(_emin, _emax)):
        logger.info('Considering bin %.2f - %.2f ...' % (emin, emax))
        gamma = data.WEIGHT_SPEC_INDEX
        Im = (1 / (1 - gamma)) * (emax**(1 - gamma) -
                                  emin**(1 - gamma)) / (emax - emin)
        eweightedmean = np.power(1 / Im, 1 / gamma)
        cl_txt.write('ENERGY\t %.2f %.2f %.2f\n' % (emin, emax, eweightedmean))
        l_max = 1000
        _l = np.arange(l_max)
        wb_en = wb.hslice(eweightedmean)(_l)
        flux_map_name = in_label + '_flux_%i-%i.fits' % (emin, emax)
        flux_map = hp.read_map(os.path.join(GRATOOLS_OUT_FLUX, flux_map_name))
        flux_map_masked = hp.ma(flux_map)
        flux_map_masked.mask = np.logical_not(mask)
        fsky = 1.-(len(np.where(flux_map_masked.filled() == hp.UNSEEN)[0])/\
                       float(len(flux_map)))
        if kwargs['show'] == True:
            hp.mollview(flux_map_masked.filled(),
                        title='f$_{sky}$ = %.3f' % fsky,
                        min=1e-7,
                        max=1e-4,
                        norm='log')
            plt.show()
        print 'fsky = ', fsky
        nside = hp.npix2nside(len(flux_map))
        wpix = hp.sphtfunc.pixwin(nside)[:l_max]
        _cl = hp.sphtfunc.anafast(flux_map_masked.filled(), lmax=l_max-1, \
                                      iter=5)
        _cl_fit = hp.sphtfunc.anafast(flux_map_masked.filled(), iter=4)
        cn_fit = np.average(_cl_fit[-500:-100] / fsky) / len(
            _cl_fit[-500:-100])
        print 'cn fit = ', cn_fit
        print 'cn poisson = ', _cn[i]
        cn = _cn[i]
        wl = wb_en * wpix
        _cl = (_cl / fsky - cn) / (wl**2)
        cl_txt.write('Cl\t%s\n'%str(list(_cl)).replace('[',''). \
                         replace(']','').replace(', ', ' '))
        _cl_err = np.sqrt(2. / ((2 * _l + 1) * fsky)) * (_cl + (cn / wl**2))
        cl_txt.write('Cl_ERR\t%s\n\n'%str(list(_cl_err)).replace('[',''). \
                         replace(']','').replace(', ', ' '))
    cl_txt.close()
    logger.info('Created %s'%(os.path.join(GRATOOLS_OUT, '%s_%s_cls.txt' \
                                               %(out_label, binning_label))))