Beispiel #1
0
def do_download(cname, basedir='.'):
    # check whether download is needed -- saves rsync
    if os.path.isdir(basedir + '/' + cname):
        os.chdir(basedir + '/' + cname)
        if os.path.isfile('big-mslist.txt'):
            files = [l.rstrip() for l in open('big-mslist.txt').readlines()]
            for f in files:
                if not os.path.exists(f):
                    break
            else:
                return os.getcwd()

    update_status(cname, 'Downloading')
    os.chdir(basedir)
    do_rsync_download(cname, '/disks/paradata/shimwell/LoTSS-DR2/archive/',
                      os.getcwd())
    os.chdir(cname)
    striparchivename()

    success = make_list(workdir=os.getcwd())
    if not success:
        update_status(cname, 'Download failed')
        raise RuntimeError('Failed to make mslist')

    #filechecker()
    fixsymlinks()
    update_status(cname, 'Downloaded')
    return os.getcwd()  # return directory where everything has been done
def do_download(cname, basedir='.'):
    update_status(cname,'Downloading')
    os.chdir(basedir)
    do_rsync_download(cname,'/disks/paradata/shimwell/LoTSS-DR2/archive/',os.getcwd())
    os.chdir(cname)
    striparchivename()

    success=make_list(workdir=os.getcwd())
    if not success:
        update_status(cname,'Download failed')
        raise RuntimeError('Failed to make mslist')
    
    filechecker()
    fixsymlinks()
    os.system('cp '+os.environ['DDF_DIR']+'/ddf-pipeline/examples/tier1-jul2018.cfg reprocess-vlow.cfg')
    update_status(cname,'Downloaded')
    return os.getcwd() # return directory where everything has been done
Beispiel #3
0
name = sys.argv[1]
try:
    qsubfile = sys.argv[2]
except:
    qsubfile = '/home/mjh/git/ddf-pipeline/pipeline.qsub'

try:
    os.mkdir(name)
except OSError:
    warn('Working directory already exists')
    pass
os.chdir(name)
report('Downloading data')
if not download_dataset('https://lofar-webdav.grid.sara.nl',
                        '/SKSP/' + name + '/'):
    die('Download failed to get the right number of files')

report('Unpacking data')
unpack()

report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
if make_list():
    report('Submit job')
    os.system('qsub -N ddfp-' + name + ' -v WD=' + rootdir + '/' + name + ' ' +
              qsubfile)
else:
    die('make_list could not construct the MS list')
Beispiel #4
0
def do_run_pipeline(name, basedir):

    if name[0] != 'P' and name[0] != 'L':
        die('This code should be used only with field or observation names',
            database=False)

    do_field = (name[0] == 'P')

    try:
        qsubfile = sys.argv[2]
    except:
        qsubfile = '/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'

    workdir = basedir + '/' + name
    try:
        os.mkdir(workdir)
    except OSError:
        warn('Working directory already exists')

    report('Downloading data')
    if do_field:
        success = download_field(name, basedir=basedir)
    else:
        success = download_dataset('https://lofar-webdav.grid.sara.nl',
                                   '/SKSP/' + name + '/',
                                   basedir=basedir)

    if not success:
        die('Download failed, see earlier errors', database=False)

    report('Unpacking data')
    try:
        unpack(workdir=workdir)
    except RuntimeError:
        if do_field:
            update_status(name, 'List failed', workdir=workdir)
        raise
    if do_field:
        update_status(name, 'Unpacked', workdir=workdir)

    report('Deleting tar files')
    os.system('rm ' + workdir + '/*.tar.gz')
    os.system('rm ' + workdir + '/*.tar')

    averaged = False
    report('Checking structure')
    g = glob.glob(workdir + '/*.ms')
    msl = MSList(None, mss=g)
    uobsids = set(msl.obsids)
    for thisobs in uobsids:
        # check one MS with each ID
        for m, ch, o in zip(msl.mss, msl.channels, msl.obsids):
            if o == thisobs:
                channels = len(ch)
                print 'MS', m, 'has', channels, 'channels'
                if channels > 20:
                    update_status(name, 'Averaging', workdir=workdir)
                    print 'Averaging needed for', thisobs, '!'
                    averaged = True
                    average(wildcard=workdir + '/*' + thisobs + '*')
                    os.system('rm -r ' + workdir + '/*' + thisobs +
                              '*pre-cal.ms')
                break

    report('Making ms lists')
    success = make_list(workdir=workdir)
    if do_field:
        list_db_update(success, workdir=workdir)
    if not success:
        die('make_list could not construct the MS list', database=False)

    report('Creating custom config file from template')
    make_custom_config(name, workdir, do_field, averaged)

    # now run the job
    do_run_job(name, basedir=basedir, qsubfile=None, do_field=do_field)
striparchivename()
uselowres = args['uselowres']
if uselowres == False:
    fullmask = 'image_full_ampphase_di_m.NS.mask01.fits'
    indico = 'image_full_ampphase_di_m.NS.DicoModel'
    outdico = 'image_full_ampphase_di_m_SUB.NS.DicoModel'
else:
    fullmask = 'image_full_low_m.mask01.fits'
    indico = 'image_full_low_m.DicoModel'
    outdico = 'image_full_low_m_SUB.DicoModel'

if not os.path.isfile(args['mslist']):
    # try to make it
    from make_mslists import make_list
    success = make_list(workdir=os.getcwd())
    if not os.path.isfile(args['mslist']):
        raise IOError('File', args['mslist'],
                      'does not exist and could not be created')

boxfile = args['boxfile']
ncpu = args['ncpu']
timestepavg = args['timeavg']
freqstepavg = args['freqavg']
obsid = args['prefixname']

dopredict = True
dosubtract = True
doconcat = True
dokmscal = False
dophaseshift = True
Beispiel #6
0
else:
    success = download_dataset('https://lofar-webdav.grid.sara.nl',
                               '/SKSP/' + name + '/')

if not success:
    die('Download failed, see earlier errors', database=False)

report('Unpacking data')
unpack()
if do_field:
    unpack_db_update()

report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
success = make_list()
if do_field:
    list_db_update(success)

if success:
    report('Submit job')
    os.system(
        'pipeline.py /disks/paradata/shimwell/LoTSS-DR2/ongoing-leiden-runs/tier1-DR2.cfg'
    )
    if do_field():
        update_status(name, 'Queued')

else:
    die('make_list could not construct the MS list', database=False)
striparchivename()
uselowres = args['uselowres']
if uselowres == False:
  fullmask    = 'image_full_ampphase_di_m.NS.mask01.fits'
  indico      = 'image_full_ampphase_di_m.NS.DicoModel'
  outdico     = 'image_full_ampphase_di_m_SUB.NS.DicoModel'
else:
  fullmask    = 'image_full_low_m.mask01.fits'
  indico      = 'image_full_low_m.DicoModel'
  outdico     = 'image_full_low_m_SUB.DicoModel'


if not os.path.isfile(args['mslist']):
    # try to make it
    from make_mslists import make_list
    success=make_list(workdir=os.getcwd())
    if not os.path.isfile(args['mslist']):
      raise IOError('File', args['mslist'], 'does not exist and could not be created')

boxfile     = args['boxfile']
ncpu        = args['ncpu']
timestepavg = args['timeavg']
freqstepavg = args['freqavg']
obsid       = args['prefixname']


dopredict   = True
dosubtract  = True
doconcat    = True
dokmscal     = False
dophaseshift = True
if do_field:
    success=download_field(name)
else:
    success=download_dataset('https://lofar-webdav.grid.sara.nl','/SKSP/'+name+'/')

if not success:
    die('Download failed, see earlier errors',database=False)

    
report('Unpacking data')
unpack()
if do_field:
    unpack_db_update()
    
report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
success=make_list()
if do_field:
    list_db_update(success)

if success:
    report('Submit job')
    os.system('pipeline.py /disks/paradata/shimwell/LoTSS-DR2/ongoing-leiden-runs/tier1-DR2.cfg')
    if do_field():
        update_status(name,'Queued')

else:
    die('make_list could not construct the MS list',database=False)
Beispiel #9
0
def do_run_pipeline(name,basedir,qsubfile=None,do_field=True):
    '''
    set do_field False for the now obsolete behaviour of downloading
    and imaging a particular observation

    '''
    if qsubfile is None:
        qsubfile='/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'

    workdir=basedir+'/'+name
    try:
        os.mkdir(workdir)
    except OSError:
        warn('Working directory already exists')

    report('Downloading data')
    if do_field:
        success=download_field(name,basedir=basedir)
    else:
        success=download_dataset('https://lofar-webdav.grid.sara.nl','/SKSP/'+name+'/',basedir=basedir)

    if not success:
        die('Download failed, see earlier errors',database=False)

    report('Unpacking data')
    try:
        unpack(workdir=workdir)
    except RuntimeError:
        if do_field:
            update_status(name,'Unpack failed',workdir=workdir)
        raise
    if do_field:
        update_status(name,'Unpacked',workdir=workdir)

    report('Deleting tar files')
    os.system('rm '+workdir+'/*.tar.gz')
    os.system('rm '+workdir+'/*.tar')

    averaged=False
    report('Checking structure')
    g=glob.glob(workdir+'/*.ms')
    msl=MSList(None,mss=g)
    dysco=np.any(msl.dysco)
    uobsids=set(msl.obsids)
    for thisobs in uobsids:
        # check one MS with each ID
        for m,ch,o,hc in zip(msl.mss,msl.channels,msl.obsids,msl.hascorrected):
            if o==thisobs:
                if not(hc):
                    print('MS',m,'has no corrected_data column, force use of DATA')
                    averaged=True
                channels=len(ch)
                print('MS',m,'has',channels,'channels')
                if channels>20:
                    update_status(name,'Averaging',workdir=workdir)
                    print('Averaging needed for',thisobs,'!')
                    averaged=True
                    average(wildcard=workdir+'/*'+thisobs+'*')
                    os.system('rm -r '+workdir+'/*'+thisobs+'*pre-cal.ms')
                break
    
    report('Making ms lists')
    success=make_list(workdir=workdir)
    if do_field:
        list_db_update(success,workdir=workdir)
    if not success:
        die('make_list could not construct the MS list',database=False)
        
    report('Creating custom config file from template')
    make_custom_config(name,workdir,do_field,averaged)
    
    # now run the job
    do_run_job(name,basedir=basedir,qsubfile=None,do_field=do_field,dysco=dysco)