Exemplo n.º 1
0
def parse_parameters(filename):
    """
    Take a parameter file, return list of initalised objects (jobs) and input parameters 
    """

    # read in the parameters
    mainInput = ParserClass.Parser(filename)
    if 'LogFile' in mainInput['Inputs']:
        if mainInput['Inputs']['LogFileUsePID']:
            logger = Logging.Logger(mainInput['Inputs']['LogFile']+'_{}'.format(os.getpid()))
        else:
            logger = Logging.Logger(mainInput['Inputs']['LogFile'])
       
    else:
        logger = print

    # Generate a filelist to loop over
    filelist = np.loadtxt(mainInput['Inputs']['filelist'],dtype=str,ndmin=1)
    if isinstance(mainInput['Inputs']['data_dir'], type(None)):
        filelist = [filename for filename in filelist]
    else:
        filelist = ['{}/{}'.format(mainInput['Inputs']['data_dir'],
                                   filename.split('/')[-1]) for filename in filelist]
                               
    # Some items should always be a list
    if not isinstance(mainInput['Inputs']['pipeline'], list):
        mainInput['Inputs']['pipeline'] = [mainInput['Inputs']['pipeline']]
    # Get the class names (modulename, classname)
    jobnames = [c for c in mainInput['Inputs']['pipeline']]

    logger('Running: '+' '.join(mainInput['Inputs']['pipeline']))


    prejobnames = [c for c in mainInput['Inputs']['preamble']]


    # Read the class parameter file
    classInput = ParserClass.Parser(mainInput['Inputs']['classParameters'])

    # Initalise the classes : classInput are the kwargs to initiate classes
    jobs = []
    for job in jobnames:
        jobs += [getClass(job)(logger=logger,**classInput[job])]

    # Initalise the classes : classInput are the kwargs to initiate classes
    prejobs = []
    for prejob in prejobnames:
        prejobs += [getClass(prejob)(logger=logger,**classInput[prejob])]


    return jobs,prejobs, filelist, mainInput, classInput, logger
Exemplo n.º 2
0
def level1_destripe(filename,options):
    
    """Plot hit maps for feeds

    Arguments:

    filename: the name of the COMAP Level-1 file

    """
    # Get the inputs:
    parameters = ParserClass.Parser(filename)

    title = parameters['Inputs']['title'] 

    for k1,v1 in options.items():
        if len(options.keys()) == 0:
            break
        for k2, v2 in v1.items():
            parameters[k1][k2] = v2

    # Read in all the data
    if not isinstance(parameters['Inputs']['feeds'], list):
        parameters['Inputs']['feeds'] = [parameters['Inputs']['feeds']]
    if not isinstance(parameters['Inputs']['frequencies'], list):
        parameters['Inputs']['frequencies'] = [parameters['Inputs']['frequencies']]
    if not isinstance(parameters['Inputs']['bands'], list):
        parameters['Inputs']['bands'] = [parameters['Inputs']['bands']]


    # loop over band and frequency
    for band in np.array(parameters['Inputs']['bands']).astype(int):
        for frequency in np.array(parameters['Inputs']['frequencies']).astype(int):

            # Data parsing object
            data = DataLevel2PCA(parameters,band=band,frequency=frequency,keeptod=True)
            data.naive.average()


            offsetMap, offsets = Destriper(parameters, data)

            offsets.average()

            # Write offsets back out to the level2 files
            toffs = offsets()
            nFeeds = len(parameters['Inputs']['feeds'])
            filelist = np.loadtxt(parameters['Inputs']['filelist'],dtype=str,ndmin=1)

            from astropy.io import fits
            hdu = fits.PrimaryHDU(data.naive()-offsetMap(),header=data.naive.wcs.to_header())
            weights = fits.ImageHDU(data.naive.weights(), header=data.hits.wcs.to_header())
            hits = fits.ImageHDU(data.hits(returnsum=True), header=data.hits.wcs.to_header())
            naive   = fits.ImageHDU(data.naive(), header=data.hits.wcs.to_header())
            offsets = fits.ImageHDU(offsetMap(), header=data.hits.wcs.to_header())
            
            hdu1 = fits.HDUList([hdu, weights,hits,naive,offsets])
            feedstrs = [str(v) for v in parameters['Inputs']['feeds']]
            hdu1.writeto('fitsfiles/gfield-allfeeds/{}_feeds{}_offset{}_band{}_freq{}.fits'.format(title,'-'.join(feedstrs),
                parameters['Destriper']['offset'],
                band,frequency),overwrite=True)
Exemplo n.º 3
0
def level1_destripe(filename, options):
    """Plot hit maps for feeds

    Arguments:

    filename: the name of the COMAP Level-1 file

    """
    # Get the inputs:
    parameters = ParserClass.Parser(filename)

    title = parameters['Inputs']['title']

    for k1, v1 in options.items():
        if len(options.keys()) == 0:
            break
        for k2, v2 in v1.items():
            parameters[k1][k2] = v2

    upperFrequency = parameters['Inputs']['upperFrequency']
    lowerFrequency = parameters['Inputs']['lowerFrequency']
    title = parameters['Inputs']['title']

    # Read in all the data
    if not isinstance(parameters['Inputs']['feeds'], list):
        parameters['Inputs']['feeds'] = [parameters['Inputs']['feeds']]
    filelist = np.loadtxt(parameters['Inputs']['filelist'], dtype=str, ndmin=1)

    nside = int(parameters['Inputs']['nside'])
    data = DataLevel2AverageHPX_test(filelist,
                                     parameters,
                                     nside=nside,
                                     keeptod=False,
                                     subtract_sky=False)

    offsetMap, offsets = DestriperHPX(parameters, data)

    naive = data.naive()
    offmap = offsetMap()
    hits = data.hits.return_hpx_hits()
    des = naive - offmap
    des[des == 0] = hp.UNSEEN
    naive[naive == 0] = hp.UNSEEN
    offmap[offmap == 0] = hp.UNSEEN
    hits[hits == 0] = hp.UNSEEN
    hp.write_map('{}_{}-{}.fits'.format(title, upperFrequency, lowerFrequency),
                 [naive - offmap, naive, offmap, hits],
                 overwrite=True,
                 partial=True)

    feedstrs = [str(v) for v in parameters['Inputs']['feeds']]
Exemplo n.º 4
0
def level1_destripe(filename, options):
    """Plot hit maps for feeds

    Arguments:

    filename: the name of the COMAP Level-1 file

    """
    # Get the inputs:
    p = ParserClass.Parser(filename)
    title = p['Inputs']['title']
    for k1, v1 in options.items():
        if len(options.keys()) == 0:
            break
        for k2, v2 in v1.items():
            p[k1][k2] = v2
    title = p['Inputs']['title']
    # Read in all the data
    if not isinstance(p['Inputs']['feeds'], list):
        p['Inputs']['feeds'] = [p['Inputs']['feeds']]
    filelist = np.loadtxt(p['Inputs']['filelist'], dtype=str, ndmin=1)

    np.random.seed(1)
    data = DataReader.ReadDataLevel2(
        filelist,
        feeds=p['Inputs']['feeds'],
        flag_spikes=p['ReadData']['flag_spikes'],
        offset_length=p['Destriper']['offset'],
        ifeature=p['ReadData']['ifeature'],
        feed_weights=p['Inputs']['feed_weights'],
        iband=p['ReadData']['iband'],
        keeptod=p['ReadData']['keeptod'],
        subtract_sky=p['ReadData']['subtract_sky'],
        map_info=p['Destriper'])

    offsetMap, offsets = Destriper.Destriper(p, data)

    write_map(p, data, offsetMap, postfix='')
Exemplo n.º 5
0
from mpi4py import MPI
import sys
import numpy as np
from comancpipeline.Tools import ParserClass

comm = MPI.COMM_WORLD
size = comm.Get_size()
rank = comm.Get_rank()

from run_average import main

if __name__ == "__main__":

    parameter_fname = sys.argv[1]

    parameters = ParserClass.Parser(parameter_fname)

    filelist = np.loadtxt(parameters['Inputs']['filelist'], dtype=str)
    #if prefix in sources:
    #    parameters = f'ParameterFiles/inputs_fornax_{prefix}.ini'
    #    classinfo  = f'ParameterFiles/ClassParameters_{prefix}.ini'
    #else:
    #parameters = f'ParameterFiles/inputs_fornax_general.ini'
    classinfo = f'ParameterFiles/ClassParameters.ini'

    nfiles = len(filelist)
    step = nfiles // size
    start = rank * step
    end = (rank + 1) * step
    if end > nfiles:
        end = nfiles
Exemplo n.º 6
0
def level1_destripe(filename,options):
    
    """Plot hit maps for feeds

    Arguments:

    filename: the name of the COMAP Level-1 file

    """
    # Get the inputs:
    parameters = ParserClass.Parser(filename)

    title = parameters['Inputs']['title'] 

    for k1,v1 in options.items():
        if len(options.keys()) == 0:
            break
        for k2, v2 in v1.items():
            parameters[k1][k2] = v2

    # Read in all the data
    if not isinstance(parameters['Inputs']['feeds'], list):
        parameters['Inputs']['feeds'] = [parameters['Inputs']['feeds']]
    if not isinstance(parameters['Inputs']['frequencies'], list):
        parameters['Inputs']['frequencies'] = [parameters['Inputs']['frequencies']]
    if not isinstance(parameters['Inputs']['bands'], list):
        parameters['Inputs']['bands'] = [parameters['Inputs']['bands']]


    # loop over band and frequency
    for band in np.array(parameters['Inputs']['bands']).astype(int):
        for frequency in np.array(parameters['Inputs']['frequencies']).astype(int):

            # Data parsing object
            data = DataLevel2(parameters,band=band,frequency=frequency,keeptod=True)
            data.naive.average()


            offsetMap, offsets = Destriper(parameters, data)

            offsets.average()

            # Write offsets back out to the level2 files
            toffs = offsets()
            nFeeds = len(parameters['Inputs']['feeds'])
            
            filelist = np.loadtxt(parameters['Inputs']['filelist'],dtype=str,ndmin=1)

            # Will define Nsamples, datasizes[], and chunks[[]]

            if parameters['Inputs']['saveoffsets']:
                for (chunk,filename) in zip(data.chunks,filelist):
                    out = h5py.File(filename,'a')
                    try:
                        features = out['level1/spectrometer/features'][:]
                    except KeyError:
                        out.close()
                        continue
                    selectFeature = data.featureBits(features.astype(float), data.ifeature)
                    selIDs = np.where(selectFeature)[0]
                    N = len(features[selectFeature])
                    N = int((N//data.offsetLen) * data.offsetLen)
                    selectFeature[selIDs[N:]] = False
        
                    if not 'level2/offsets' in out:
                        out.create_dataset('level2/offsets',out['level2/averaged_tod'].shape)
                        
                    thisOffsets = toffs[chunk[0]:chunk[1]]
                    nSamples = thisOffsets.size//nFeeds
                    thisOffsets = np.reshape(thisOffsets,(nFeeds, nSamples))
                    try:
                        dset = out['level2/offsets']
                    except KeyError:
                        print(filename)
                        del out['level2/offsets']
                        out.create_dataset('level2/offsets',out['level2/averaged_tod'].shape)

                    testtod = np.zeros(out['level2/averaged_tod'].shape[-1])
                    for (i,feedid) in enumerate(data.FeedIndex):
                        testtod*=0.
                        testtod[selectFeature] = thisOffsets[i,:]
                        dset[feedid,band,frequency,:] = testtod# thisOffsets[i,:]
                    out.close()

            from astropy.io import fits
            hdu = fits.PrimaryHDU(data.naive()-offsetMap(),header=data.naive.wcs.to_header())
            weights = fits.ImageHDU(data.naive.weights(), header=data.hits.wcs.to_header())
            hits = fits.ImageHDU(data.hits(returnsum=True), header=data.hits.wcs.to_header())
            naive   = fits.ImageHDU(data.naive(), header=data.hits.wcs.to_header())
            offsets = fits.ImageHDU(offsetMap(), header=data.hits.wcs.to_header())
            
            hdu1 = fits.HDUList([hdu, weights,hits,naive,offsets])
            feedstrs = [str(v) for v in parameters['Inputs']['feeds']]
            hdu1.writeto('fitsfiles/gfield-allfeeds/{}_feeds{}_offset{}_band{}_freq{}.fits'.format(title,'-'.join(feedstrs),
                parameters['Destriper']['offset'],
                band,frequency),overwrite=True)
Exemplo n.º 7
0
def level1_destripe(filename,options):
    
    """Plot hit maps for feeds

    Arguments:

    filename: the name of the COMAP Level-1 file

    """
    # Get the inputs:
    parameters = ParserClass.Parser(filename)
    title = parameters['Inputs']['title'] 
    for k1,v1 in options.items():
        if len(options.keys()) == 0:
            break
        for k2, v2 in v1.items():
            parameters[k1][k2] = v2
    title = parameters['Inputs']['title']

    # Read in all the data
    if not isinstance(parameters['Inputs']['feeds'], list):
        parameters['Inputs']['feeds'] = [parameters['Inputs']['feeds']]
    filelist = np.loadtxt(parameters['Inputs']['filelist'],dtype=str,ndmin=1)

    # extract the cutoff
    cutoff_str = parameters['Inputs']['filelist'].split('SY',1)[1][:-5]
    cutoff_str = cutoff_str.replace('_','-')
    cutoff = float(cutoff_str)


    data = DataReader.ReadDataLevel2(filelist,parameters,**parameters['ReadData'])
    offsetMap, offsets = Destriper.Destriper(parameters, data)

    ### 
    # Write out the offsets
    ###

    # ????

    ###
    # Write out the maps
    ###
    naive = data.naive.get_map()
    offmap= offsetMap.get_map()
    hits = data.naive.get_hits()
    variance = data.naive.get_cov()

    des = naive-offmap
    des[hits == 0] = np.nan
    clean_map = naive-offmap

    
    hdu = fits.PrimaryHDU(des,header=data.naive.wcs.to_header())
    cov = fits.ImageHDU(variance,name='Covariance',header=data.naive.wcs.to_header())
    hits = fits.ImageHDU(hits,name='Hits',header=data.naive.wcs.to_header())
    naive = fits.ImageHDU(naive,name='Naive',header=data.naive.wcs.to_header())
    ## add the parameters into the fits file
    c1 = fits.Column(name='cutoff', array=np.array([cutoff]), format='D', unit='K')
    c2 = fits.Column(name='feeds', array=np.array(parameters['Inputs']['feeds']), format='K')
    c3 = fits.Column(name='iband', array=np.array([int(parameters['ReadData']['iband'])]), format='K')
    c4 = fits.Column(name='threshold', array=np.array([parameters['Destriper']['threshold']]),format='D')
    para_table = fits.BinTableHDU.from_columns([c1, c2, c3, c4], name='Para')                    

    hdul = fits.HDUList([hdu,cov,hits,naive,para_table])
    if not os.path.exists(parameters['Inputs']['maps_directory']):
        os.makedirs(parameters['Inputs']['maps_directory'])
    fname = '{}/{}_Feeds{}_Band{}.fits'.format(parameters['Inputs']['maps_directory'],
                                               parameters['Inputs']['title'],
                                               '-'.join([str(int(f)) for f in parameters['Inputs']['feeds']]),
                                               int(parameters['ReadData']['iband']))
                                               
    hdul.writeto(fname,overwrite=True)