Exemple #1
0
 def check(self):
     if self.stop:
         if use_database():
             update_status(None, 'Stopped')
         os.system('CleanSHM.py')
         raise RuntimeError(
             'Caught user-defined exception, terminating gracefully')
Exemple #2
0
def do_run_job(name, basedir, qsubfile=None, do_field=True, prefix='ddfp'):
    config = ''
    workdir = basedir + '/' + name
    g = glob.glob(workdir + '/tier1*.cfg')
    if len(g) > 0:
        print 'Local config file exists, using that'
        config = ',CONFIG=' + g[0]
    if qsubfile is None:
        qsubfile = '/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'
    report('Submit job')
    os.system('qsub -N ' + prefix + '-' + name + ' -v WD=' + workdir + config +
              ' ' + qsubfile)
    if do_field:
        update_status(name, 'Queued', workdir=workdir)
Exemple #3
0
def do_run_job(name,basedir,qsubfile=None,do_field=True,prefix='ddfp',dysco=False):
    config=''
    workdir=basedir+'/'+name
    g=glob.glob(workdir+'/tier1*.cfg')
    if len(g)>0:
        print 'Local config file exists, using that'
        config=',CONFIG='+g[0]
    if qsubfile is None:
        qsubfile='/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'
    if dysco:
        qsubfile=qsubfile.replace('.qsub','-fdr14.qsub')
    report('Submit job')
    os.system('qsub -N '+prefix+'-'+name+' -v WD='+workdir+config+' '+qsubfile)
    if do_field:
        update_status(name,'Queued',workdir=workdir)
Exemple #4
0
def die(s,database=True):
    print(bcolors.FAIL+s+bcolors.ENDC)
    if database and use_database():
        update_status(None,'Failed')
    raise Exception(s)
def do_run_pipeline(name, basedir):

    if name[0] != 'P' and name[0] != 'L':
        die('This code should be used only with field or observation names',
            database=False)

    do_field = (name[0] == 'P')

    try:
        qsubfile = sys.argv[2]
    except:
        qsubfile = '/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'

    workdir = basedir + '/' + name
    try:
        os.mkdir(workdir)
    except OSError:
        warn('Working directory already exists')

    report('Downloading data')
    if do_field:
        success = download_field(name, basedir=basedir)
    else:
        success = download_dataset('https://lofar-webdav.grid.sara.nl',
                                   '/SKSP/' + name + '/',
                                   basedir=basedir)

    if not success:
        die('Download failed, see earlier errors', database=False)

    report('Unpacking data')
    try:
        unpack(workdir=workdir)
    except RuntimeError:
        if do_field:
            update_status(name, 'List failed', workdir=workdir)
        raise
    if do_field:
        update_status(name, 'Unpacked', workdir=workdir)

    report('Deleting tar files')
    os.system('rm ' + workdir + '/*.tar.gz')
    os.system('rm ' + workdir + '/*.tar')

    averaged = False
    report('Checking structure')
    g = glob.glob(workdir + '/*.ms')
    msl = MSList(None, mss=g)
    uobsids = set(msl.obsids)
    for thisobs in uobsids:
        # check one MS with each ID
        for m, ch, o in zip(msl.mss, msl.channels, msl.obsids):
            if o == thisobs:
                channels = len(ch)
                print 'MS', m, 'has', channels, 'channels'
                if channels > 20:
                    update_status(name, 'Averaging', workdir=workdir)
                    print 'Averaging needed for', thisobs, '!'
                    averaged = True
                    average(wildcard=workdir + '/*' + thisobs + '*')
                    os.system('rm -r ' + workdir + '/*' + thisobs +
                              '*pre-cal.ms')
                break

    report('Making ms lists')
    success = make_list(workdir=workdir)
    if do_field:
        list_db_update(success, workdir=workdir)
    if not success:
        die('make_list could not construct the MS list', database=False)

    report('Creating custom config file from template')
    make_custom_config(name, workdir, do_field, averaged)

    # now run the job
    do_run_job(name, basedir=basedir, qsubfile=None, do_field=do_field)
Exemple #6
0
def unpack_db_update():
    update_status(None,'Unpacked')
Exemple #7
0
def die(s,database=True):
    print bcolors.FAIL+s+bcolors.ENDC
    if database and use_database():
        update_status(None,'Failed')
    raise Exception(s)
def list_db_update(success,workdir=None):
    if success:
        update_status(None,'Ready',workdir=workdir)
    else:
        update_status(None,'List failed',workdir=workdir)
Exemple #9
0
 def check(self):
     if self.stop:
         if use_database():
             update_status(None,'Stopped')
         os.system('CleanSHM.py')
         raise RuntimeError('Caught user-defined exception, terminating gracefully')
Exemple #10
0
else:
    success = download_dataset('https://lofar-webdav.grid.sara.nl',
                               '/SKSP/' + name + '/')

if not success:
    die('Download failed, see earlier errors', database=False)

report('Unpacking data')
unpack()
if do_field:
    unpack_db_update()

report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
success = make_list()
if do_field:
    list_db_update(success)

if success:
    report('Submit job')
    os.system(
        'pipeline.py /disks/paradata/shimwell/LoTSS-DR2/ongoing-leiden-runs/tier1-DR2.cfg'
    )
    if do_field():
        update_status(name, 'Queued')

else:
    die('make_list could not construct the MS list', database=False)
def list_db_update(success,workdir=None):
    if success:
        update_status(None,'Ready',workdir=workdir)
    else:
        update_status(None,'List failed',workdir=workdir)
if do_field:
    success=download_field(name)
else:
    success=download_dataset('https://lofar-webdav.grid.sara.nl','/SKSP/'+name+'/')

if not success:
    die('Download failed, see earlier errors',database=False)

    
report('Unpacking data')
unpack()
if do_field:
    unpack_db_update()
    
report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
success=make_list()
if do_field:
    list_db_update(success)

if success:
    report('Submit job')
    os.system('pipeline.py /disks/paradata/shimwell/LoTSS-DR2/ongoing-leiden-runs/tier1-DR2.cfg')
    if do_field():
        update_status(name,'Queued')

else:
    die('make_list could not construct the MS list',database=False)
Exemple #13
0
def do_run_pipeline(name,basedir,qsubfile=None,do_field=True):
    '''
    set do_field False for the now obsolete behaviour of downloading
    and imaging a particular observation

    '''
    if qsubfile is None:
        qsubfile='/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'

    workdir=basedir+'/'+name
    try:
        os.mkdir(workdir)
    except OSError:
        warn('Working directory already exists')

    report('Downloading data')
    if do_field:
        success=download_field(name,basedir=basedir)
    else:
        success=download_dataset('https://lofar-webdav.grid.sara.nl','/SKSP/'+name+'/',basedir=basedir)

    if not success:
        die('Download failed, see earlier errors',database=False)

    report('Unpacking data')
    try:
        unpack(workdir=workdir)
    except RuntimeError:
        if do_field:
            update_status(name,'Unpack failed',workdir=workdir)
        raise
    if do_field:
        update_status(name,'Unpacked',workdir=workdir)

    report('Deleting tar files')
    os.system('rm '+workdir+'/*.tar.gz')
    os.system('rm '+workdir+'/*.tar')

    averaged=False
    report('Checking structure')
    g=glob.glob(workdir+'/*.ms')
    msl=MSList(None,mss=g)
    dysco=np.any(msl.dysco)
    uobsids=set(msl.obsids)
    for thisobs in uobsids:
        # check one MS with each ID
        for m,ch,o,hc in zip(msl.mss,msl.channels,msl.obsids,msl.hascorrected):
            if o==thisobs:
                if not(hc):
                    print('MS',m,'has no corrected_data column, force use of DATA')
                    averaged=True
                channels=len(ch)
                print('MS',m,'has',channels,'channels')
                if channels>20:
                    update_status(name,'Averaging',workdir=workdir)
                    print('Averaging needed for',thisobs,'!')
                    averaged=True
                    average(wildcard=workdir+'/*'+thisobs+'*')
                    os.system('rm -r '+workdir+'/*'+thisobs+'*pre-cal.ms')
                break
    
    report('Making ms lists')
    success=make_list(workdir=workdir)
    if do_field:
        list_db_update(success,workdir=workdir)
    if not success:
        die('make_list could not construct the MS list',database=False)
        
    report('Creating custom config file from template')
    make_custom_config(name,workdir,do_field,averaged)
    
    # now run the job
    do_run_job(name,basedir=basedir,qsubfile=None,do_field=do_field,dysco=dysco)