""" save the label features table as .fits file
    """
    path_dir = os.path.dirname(path)
    if not os.path.exists(path_dir):
        os.makedirs(path_dir)

    ft.write(path, table, clobber=True)


if __name__ == '__main__':

    from argparse import ArgumentParser

    ap = ArgumentParser(description='Prepare eBOSS data for NN regression')
    ap.add_argument('-d', '--data_path', type=str, required=True)
    ap.add_argument('-r', '--randoms_path', type=str, required=True)
    ap.add_argument('-s', '--templates_path', type=str, required=True)
    ap.add_argument('-o', '--output_path', type=str, required=True)
    ap.add_argument('--label', type=str, default='ngal')
    ap.add_argument('-n', '--nside', type=int, default=512)
    ap.add_argument('-sl',
                    '--slices',
                    type=str,
                    nargs='*',
                    default=['main', 'highz'])

    config = ap.parse_args()

    setup_logging('info')
    prepare_table(config)
Beispiel #2
0
 
    
    cls_list = get_cl(ngal, nran, mask, selection_fn=selection_fn,
                       systematics=sysm, njack=0)
        
    if comm.rank == 0:
        output_dir = os.path.dirname(args.output_path)
        if not os.path.exists(output_dir):
            print(f'creating {output_dir}')
            os.makedirs(output_dir)
       
        np.save(args.output_path, cls_list)

if __name__ == '__main__':
    
    setup_logging("info") # turn on logging to screen
    comm = CurrentMPIComm.get()   
    
    if comm.rank == 0:
        print(f'hi from {comm.rank}')
        
        from argparse import ArgumentParser
        ap = ArgumentParser(description='Angular Clustering')
        ap.add_argument('-d', '--data_path', required=True)
        ap.add_argument('-o', '--output_path', required=True)
        ap.add_argument('-s', '--selection', default=None)
        ns = ap.parse_args()

        for (key, value) in ns.__dict__.items():
            print(f'{key:15s} : {value}')                
    else:
Beispiel #3
0
import sys
sys.path.append('/Users/mehdi/github/lssutils')
from lssutils.lab import *
from lssutils import setup_logging

setup_logging("info")

biases = [1.0, 2.0, 3.0, 4.0]

# initialize the task manager to run the tasks
with TaskManager(cpus_per_task=1, use_all_cpus=True) as tm:

    # set up the linear power spectrum

    # iterate through the bias values
    for bias in tm.iterate(biases):
        print(2 * bias)
def main(mockid, my_cols=cols_dr8_rand):
    setup_logging('info')

    logger = logging.getLogger('RegressionPrep')

    # --- input parameters
    nside = 256
    dataframe = '/home/mehdi/data/templates/pixweight-dr8-0.31.1.h5'
    random = f'/B/Shared/mehdi/mocksys/FA_EZmock_desi_ELG_v0_rand_00to2.hp{nside}.fits'
    output_dir = '/B/Shared/mehdi/mocksys/regression/'
    zcuts = {'low': [0.7, 1.0], 'high': [1.0, 1.5], 'all': [0.7, 1.5]}

    #---
    # start
    #---
    logger.info(f'ouput : {output_dir}')

    # --- templates
    df = pd.read_hdf(dataframe, key='templates')
    logger.info(f'read {dataframe}')

    # --- random
    hprandom = hp.read_map(random, verbose=False)
    logger.info(f'read {random}')

    # --- data
    data = ft.read(
        f'/B/Shared/Shadab/FA_LSS/FA_EZmock_desi_ELG_v0_{mockid}.fits')
    mask = ft.read(
        f'/B/Shared/Shadab/FA_LSS/EZmock_desi_v0.0_{mockid}/bool_index.fits'
    )['bool_index']
    data = data[mask]
    z_rsd = data['Z_COSMO'] + data['DZ_RSD']
    logger.info(f'read mock-{mockid}')

    for i, key_i in enumerate(zcuts):

        logger.info('split based on {}'.format(zcuts[key_i]))

        # --- prepare the names for the output files
        hpcat = None  #output_dir + f'/galmap_{mockid}_{key_i}_{nside}.hp.fits'
        hpmask = None  #output_dir + f'/mask_{mockid}_{key_i}_{nside}.hp.fits'
        fracgood = None  #output_dir + f'/frac_{mockid}_{key_i}_{nside}.hp.fits'
        fitname = None  #output_dir + f'/ngal_features_{mockid}_{key_i}_{nside}.fits'
        fitkfold = output_dir + f'ngal_features_{mockid}_{key_i}_{nside}.5r.npy'

        good = (z_rsd >= zcuts[key_i][0]) & (z_rsd < zcuts[key_i][1])
        logger.info(f'total # : {good.sum()}')
        hpdata = hpixsum(nside, data[good]['RA'], data[good]['DEC'])

        # --- append the galaxy and random density
        dataframe_i = df.copy()
        dataframe_i['ngal'] = hpdata
        dataframe_i['nran'] = hprandom
        dataframe_i['nran'][hprandom == 0] = np.nan

        dataframe_i.replace()
        dataframe_i.replace([np.inf, -np.inf], value=np.nan,
                            inplace=True)  # replace inf
        dataframe_i.dropna(inplace=True)
        logger.info('df shape : {}'.format(dataframe_i.shape))
        logger.info('columns  : {}'.format(my_cols))

        for column in dataframe_i.columns:
            logger.info(
                f'{column}: {np.percentile(dataframe_i[column], [0,1,99, 100])}'
            )

        # --- write
        hd5_2_fits(dataframe_i,
                   my_cols,
                   fitname,
                   hpmask,
                   fracgood,
                   fitkfold,
                   res=nside,
                   k=5)
Beispiel #5
0
                                   default=[i for i in range(18)])
    ap.add_argument('--nbin',      default=8, type=int)
    ap.add_argument('--njack',     default=20, type=int)
    ap.add_argument('--nside',     default=256, type=int)
    ap.add_argument('--lmax',      default=512, type=int)
    ap.add_argument('--smooth',    action='store_true')
    ap.add_argument('--verbose',   action='store_true')
    ns = ap.parse_args()
    
    if not os.path.exists(ns.oudir):os.makedirs(ns.oudir)
        
    logfile = ''.join([ns.oudir, ns.log]) if ns.log!='none' else None

    if logfile is not None:print(f'log in {logfile}')

    setup_logging('info', logfile=logfile)  
    
else:
    ns = None
    
ns = comm.bcast(ns, root=0)


#--- run
engine = PhotData(ns)
engine.read()

if ns.nnbar != 'none':
    engine.run_nnbar() # mean density

if ns.clfile != 'none':