Ejemplo n.º 1
0
def searchone(filename, scan, paramfile, logfile, bdfdir):
    """ Searches one scan of filename

    filename is name of local sdm ('filename.GN' expected locally).
    scan is scan number to search. if none provided, script prints all.
    assumes filename is an sdm.
    """

    filename = os.path.abspath(filename)
    scans = ps.read_scans(filename, bdfdir=bdfdir)

    if scan != 0:
        d = rt.set_pipeline(filename, scan, paramfile=paramfile,
                            fileroot=os.path.basename(filename), logfile=logfile)
        rt.pipeline(d, range(d['nsegments']))

        # clean up and merge files
        pc.merge_segments(filename, scan)
        pc.merge_scans(os.path.dirname(filename), os.path.basename(filename), scans.keys())
    else:
        logger.info('Scans, Target names:')
        logger.info('%s' % str([(ss, scans[ss]['source']) for ss in scans]))
        logger.info('Example pipeline:')
        state = rt.set_pipeline(filename, scans.popitem()[0], paramfile=paramfile,
                                fileroot=os.path.basename(filename), logfile=logfile)
Ejemplo n.º 2
0
def mergeall(filename, snrmin, snrmax, bdfdir):
    """ Merge cands/noise files over all scans

    Tries to find scans from filename, but will fall back to finding relevant files if it does not exist.
    """

    filename = os.path.abspath(filename)
    bignumber = 500

    if os.path.exists(filename):
        scans = ps.read_scans(filename, bdfdir=bdfdir)
        scanlist = sorted(scans.keys())
    else:
        logger.warn('Could not find file {0}. Estimating scans from available files.'.format(filename))
        filelist = glob.glob(os.path.join(os.path.dirname(filename), '*{0}_sc*pkl'.format(os.path.basename(filename))))
        try:
            scanlist = sorted(set([int(fn.rstrip('.pkl').split('_sc')[1].split('seg')[0]) for fn in filelist]))
        except IndexError:
            logger.warn('Could not parse filenames for scans. Looking over big range.')
            scanlist = range(bignumber)

    logger.info('Merging over scans {0}'.format(scanlist))

    for scan in scanlist:
        pc.merge_segments(filename, scan)
    pc.merge_scans(os.path.dirname(filename), os.path.basename(filename), scanlist, snrmin=snrmin, snrmax=snrmax)
Ejemplo n.º 3
0
    def __init__(self, sdmfile, fileroot='', workdir=''):
        self.sdmfile = os.path.abspath(sdmfile)

        if not fileroot:
            self.fileroot = os.path.split(os.path.abspath(sdmfile))[1]
        else:
            self.fileroot = fileroot

        if not workdir:
            self.workdir = os.path.split(self.sdmfile)[0]
        else:
            self.workdir = workdir

        self.scans = ps.read_scans(sdmfile)
        self.sources = ps.read_sources(sdmfile)
        self.gainscans = [
            sc for sc in self.scans.keys()
            if 'PHASE' in self.scans[sc]['intent']
        ]  # get all cal fields
        self.bpscans = [
            sc for sc in self.scans.keys()
            if 'BANDPASS' in self.scans[sc]['intent']
        ]  # get all cal fields
        self.sdm = sdmpy.SDM(self.sdmfile)

        if len(self.gainstr) or len(self.bpstr):
            print 'Found gaincal scans %s and bpcal scans %s.' % (self.gainstr,
                                                                  self.bpstr)

        self.set_fluxinfo()
Ejemplo n.º 4
0
def mergeall(filename, snrmin, snrmax, bdfdir):
    """ Merge cands/noise files over all scans

    Tries to find scans from filename, but will fall back to finding relevant files if it does not exist.
    """

    filename = os.path.abspath(filename)
    bignumber = 500

    if os.path.exists(filename):
        scans = ps.read_scans(filename, bdfdir=bdfdir)
        scanlist = sorted(scans.keys())
    else:
        logger.warn('Could not find file {0}. Estimating scans from available files.'.format(filename))
        filelist = glob.glob(os.path.join(os.path.dirname(filename), '*{0}_sc*pkl'.format(os.path.basename(filename))))
        try:
            scanlist = sorted(set([int(fn.rstrip('.pkl').split('_sc')[1].split('seg')[0]) for fn in filelist]))
        except IndexError:
            logger.warn('Could not parse filenames for scans. Looking over big range.')
            scanlist = range(bignumber)

    logger.info('Merging over scans {0}'.format(scanlist))

    for scan in scanlist:
        pc.merge_segments(filename, scan)
    pc.merge_scans(os.path.dirname(filename), os.path.basename(filename), scanlist, snrmin=snrmin, snrmax=snrmax)
Ejemplo n.º 5
0
def searchone(filename, scan, paramfile, logfile, bdfdir):
    """ Searches one scan of filename

    filename is name of local sdm ('filename.GN' expected locally).
    scan is scan number to search. if none provided, script prints all.
    assumes filename is an sdm.
    """

    filename = os.path.abspath(filename)
    scans = ps.read_scans(filename, bdfdir=bdfdir)

    if scan != 0:
        d = rt.set_pipeline(filename, scan, paramfile=paramfile,
                            fileroot=os.path.basename(filename), logfile=logfile)
        rt.pipeline(d, range(d['nsegments']))

        # clean up and merge files
        pc.merge_segments(filename, scan)
        pc.merge_scans(os.path.dirname(filename), os.path.basename(filename), scans.keys())
    else:
        logger.info('Scans, Target names:')
        logger.info('%s' % str([(ss, scans[ss]['source']) for ss in scans]))
        logger.info('Example pipeline:')
        state = rt.set_pipeline(filename, scans.popitem()[0], paramfile=paramfile,
                                fileroot=os.path.basename(filename), logfile=logfile)
Ejemplo n.º 6
0
def read(filename, paramfile, bdfdir, scan):
    """ Simple parse and return metadata for pipeline for first scan """

    filename = os.path.abspath(filename)

    scans = ps.read_scans(filename, bdfdir=bdfdir)
    logger.info('Scans, Target names:')
    logger.info('%s' % str([(ss, scans[ss]['source']) for ss in scans]))
    logger.info('Example pipeline:')
    state = rt.set_pipeline(filename, scan, paramfile=paramfile, logfile=False)
Ejemplo n.º 7
0
def testrtpipe(filename, paramfile):
    """ Function for a quick test of rtpipe and queue system
    filename should have full path.
    """

    sc = ps.read_scans(filename, bdfdir=default_bdfdir)
    scan = sc.keys()[0]
    telcalfile = rtutils.gettelcalfile(telcaldir, filename, timeout=60)
    lastjob = rtutils.search('default', filename, paramfile, '', [scan], telcalfile=telcalfile, redishost=redishost, bdfdir=default_bdfdir)
    return lastjob
Ejemplo n.º 8
0
def read(filename, paramfile, bdfdir, scan):
    """ Simple parse and return metadata for pipeline for first scan """

    filename = os.path.abspath(filename)

    scans = ps.read_scans(filename, bdfdir=bdfdir)
    logger.info('Scans, Target names:')
    logger.info('%s' % str([(ss, scans[ss]['source']) for ss in scans]))
    logger.info('Example pipeline:')
    state = rt.set_pipeline(filename, scan, paramfile=paramfile, logfile=False)
Ejemplo n.º 9
0
def slowms(filename, slow, redishost, bdfdir):
    """ Take SDM filename and create MS with integration timescale of slow for all scans.
    Queues to 'slow' queue managed by redishost.
    """

    import rtpipe.parsesdm as ps
    sc = ps.read_scans(filename, bdfdir=bdfdir)
    logger.info('Creating measurement set for %s, scans %s' % (filename, sc.keys()))

    rtutils.linkbdfs(filename, sc, bdfdir)

    # Submit slow-processing job to our alternate queue.
    allscanstr = ','.join(str(s) for s in sc.keys())
    rtutils.integrate(filename, allscanstr, slow, redishost)
Ejemplo n.º 10
0
    def __init__(self, sdmfile, fileroot='', workdir=''):
        self.sdmfile = os.path.abspath(sdmfile)

        if not fileroot:
            self.fileroot = os.path.split(os.path.abspath(sdmfile))[1]
        else:
            self.fileroot = fileroot

        if not workdir:
            self.workdir = os.path.split(self.sdmfile)[0]
        else:
            self.workdir = workdir

        self.scans = ps.read_scans(sdmfile)
        self.sources = ps.read_sources(sdmfile)
        self.gainscans = [sc for sc in self.scans.keys() if 'PHASE' in self.scans[sc]['intent']]   # get all cal fields
        self.bpscans = [sc for sc in self.scans.keys() if 'BANDPASS' in self.scans[sc]['intent']]   # get all cal fields
        self.sdm = sdmpy.SDM(self.sdmfile)

        if len(self.gainstr) or len(self.bpstr):
            print 'Found gaincal scans %s and bpcal scans %s.' % (self.gainstr, self.bpstr)

        self.set_fluxinfo()