Ejemplo n.º 1
0
def readreduce(sdmname, scan, nskip):
    def reducedata(data):
        return n.abs(data).max(axis=3).max(axis=1).max(axis=0)[None,:]    # 2d array, but collapsed by taking max over time
#        return n.abs(data).max(axis=3).max(axis=1)    # returns 2d array with all ints

    data = n.roll(sdmreader.read_bdf(sdmname, scan, nskip=nskip, readints=intchunk, writebdfpkl=True), roll, axis=2)   # roll needed to get increasing freq spw order
    return reducedata(data)
Ejemplo n.º 2
0
def read_bdf_segment(d, segment=-1):
    """ Reads bdf (sdm) format data into numpy array for realtime pipeline.
    d defines pipeline state. assumes segmenttimes defined by RT.set_pipeline.
    d should have 'writebdfpkl' key to define boolean for writing to bdfpkls in ASDMBinary directory.
    """

    # define integration range
    if segment != -1:
        assert d.has_key('segmenttimes'), 'd must have segmenttimes defined'
        readints = d['readints']
        nskip = (24*3600*(d['segmenttimes'][segment,0] - d['starttime_mjd'])/d['inttime']).astype(int)
        logger.info('Reading segment %d/%d, times %s to %s' % (segment, len(d['segmenttimes'])-1, qa.time(qa.quantity(d['segmenttimes'][segment,0],'d'),form=['hms'], prec=9)[0], qa.time(qa.quantity(d['segmenttimes'][segment,1], 'd'), form=['hms'], prec=9)[0]))
    else:
        nskip = 0
        readints = 0

    # read (all) data
    data = sdmreader.read_bdf(d['filename'], d['scan'], nskip=nskip, readints=readints, writebdfpkl=d['writebdfpkl']).astype('complex64')

    # test that spw are in freq sorted order
    # only one use case supported: rolled spw
    dfreq = n.array([d['spw_reffreq'][i+1] - d['spw_reffreq'][i] for i in range(len(d['spw_reffreq'])-1)])
    dfreqneg = [df for df in dfreq if df < 0]
    if len(dfreqneg) <= 1:      # if spw are permuted, then roll them. !! not a perfect test of permutability!!
        if len(dfreqneg) == 1:
            logger.warn('Rolling spw frequencies to increasing order: %s' % str(d['spw_reffreq']))
            rollch = n.sum([d['spw_nchan'][ss] for ss in range(n.where(dfreq < 0)[0][0]+1)])
            data = n.roll(data, rollch, axis=2)
    else:
        raise StandardError, 'SPW out of order and can\'t be permuted to increasing order: %s' % str(d['spw_reffreq'])

    # optionally integrate (downsample)
    if ((d['read_tdownsample'] > 1) or (d['read_fdownsample'] > 1)):
        sh = data.shape
        tsize = sh[0]/d['read_tdownsample']
        fsize = sh[2]/d['read_fdownsample']
        data2 = n.zeros( (tsize, sh[1], fsize, sh[3]), dtype='complex64')
        if d['read_tdownsample'] > 1:
            logger.info('Downsampling in time by %d' % d['read_tdownsample'])
            for i in range(tsize):
                data2[i] = data[i*d['read_tdownsample']:(i+1)*d['read_tdownsample']].mean(axis=0)
        if d['read_fdownsample'] > 1:
            logger.info('Downsampling in frequency by %d' % d['read_fdownsample'])
            for i in range(fsize):
                data2[:,:,i,:] = data[:,:,i*d['read_fdownsample']:(i+1)*d['read_fdownsample']].mean(axis=2)
        data = data2

    takepol = [d['pols_orig'].index(pol) for pol in d['pols']]
    logger.debug('Selecting pols %s' % d['pols'])

    return data.take(d['chans'], axis=2).take(takepol, axis=3)