job_name = '{}_{}'.format(job_root, idd)
    outdir = os.path.join(out_root, idd)
    out_fname = '{}_results.pkl'.format(idd)
    infiles = [obs_fname, args.coaddfile]

    if all(os.path.exists(fn) for fn in infiles):
        args_script = [os.path.basename(args.coaddfile),
                       os.path.basename(obs_fname),
                       '--results-filename', out_fname]

        cluster, f_submit, f_script = condor_submit(script, create_only=test, args = args_script,
                                                    log_root = condor_dir, 
                                                    output_root = out_root,
                                                    jobname = job_name,
                                                    grid_proxy = '/home/adama/.globus/grid_proxy',
                                                    input_files = infiles,
                                                    output_files = [out_fname],
                                                    aux_input_files = [],
                                                    request_disk = job_disk,
                                                    request_memory = job_ram,
                                                    clustertools_version = 'py3-v3',
                                                    spt3g_env=True)

    else:
        print('Not processing observation {}:'.format(idd))
        for fn in infiles:
            if not os.path.exists(fn):
                print('{} does not exist'.format(fn))

        print('')

Example #2
0
    outdir = os.path.join(out_root, jobname)
    cls_outputfile = '{}.pkl'.format(jobname)
    outfiles = [cls_outputfile]
    infiles = [sim_skies_fnames[jsky]]

    log_dir = os.path.join(condor_dir, jobname)

    cluster, f_submit, f_script = condor_submit(
        pargs.script,
        create_only=test,
        args=[
            os.path.basename(sim_skies_fnames[jsky]), cls_outputfile,
            '--linear-bias-mag 0.0 1.0 2.0 3.0', '--ncalstares 4',
            '--norm-to-unbiased', '--fit-cosmology'
        ],
        log_root=log_dir,
        output_root=outdir,
        jobname=jobname,
        grid_proxy='/home/adama/.globus/grid_proxy',
        input_files=infiles,
        output_files=outfiles,
        request_disk=6 * core.G3Units.GB,
        request_memory=6 * core.G3Units.GB,
        clustertools_version='py3-v3',
        user_code='')

    # '--res 1.0',
    # '--ra-pixels 8000',
    # '--dec-pixels 3000'],
for idd, obs_fname in zip(ids, filenames):
    job_name = '{}_{}'.format(job_root, idd)
    outdir = os.path.join(out_root, idd)
    out_fname = '{}_results.pkl'.format(idd)
    infiles = [obs_fname, args.coaddfile]

    if all(os.path.exists(fn) for fn in infiles):
        args_script = [os.path.basename(args.coaddfile),
                       os.path.basename(obs_fname),
                       '--frequency', args.frequency,
                       '--results-filename', out_fname]

        cluster, f_submit, f_script = condor_submit(script, create_only=test, args = args_script,
                                                    log_root = condor_dir, 
                                                    output_root = out_root,
                                                    jobname = job_name,
                                                    grid_proxy = '/home/adama/.globus/grid_proxy',
                                                    extra_requirements = '(GLIDEIN_ResourceName =!= "NPX")')
                                                    input_files = infiles,
                                                    output_files = [out_fname],
                                                    aux_input_files = [],
                                                    request_disk = job_disk,
                                                    request_memory = job_ram,
                                                    clustertools_version = 'py3-v3',
                                                    spt3g_env=True)

    else:
        print('Not processing observation {}:'.format(idd))
        for fn in infiles:
            if not os.path.exists(fn):
                print('{} does not exist'.format(fn))
Example #4
0
    infiles = [cal_fname] + sorted(data_fnames)

    if all(os.path.exists(fn) for fn in infiles):
        args_in = [os.path.basename(dat) for dat in infiles]
        args = '{infiles} -o {outfile} -s {source} -r 2.0 -x 75 -y 50 -lr' \
              .format(infiles = ' '.join(args_in),
                      outfile = job_name+'.g3',
                      source = source)

        cluster, f_submit, f_script = condor_submit(
            script,
            create_only=test,
            args=[args],
            log_root=condor_dir,
            output_root=out_root,
            jobname=job_name,
            grid_proxy='/home/adama/.globus/grid_proxy',
            input_files=infiles,
            output_files=[job_name + '.g3'],
            request_disk=job_disk,
            request_memory=job_ram,
            clustertools_version='py3-v3')

    else:
        print('Not processing observation {}:'.format(obsid))
        for fn in infiles:
            if not os.path.exists(fn):
                print('{} does not exist'.format(fn))

        print('')
Example #5
0
    infiles = caldatafiles + sorted(rawdatafiles)
    outputfile = '{}_output.g3'.format(obs)

    if all(os.path.exists(fn) for fn in infiles):
        args_in = [os.path.basename(dat) for dat in infiles]
        optional_args = '--output {} '.format(outputfile) + \
                        '--source {} '.format(pargs.source) + \
                        '--res 2.0 ' + \
                        '--xlen 100 --ylen 60 --lr'
        args = args_in + [optional_args]
        print(args)
        cluster, f_submit, f_script = condor_submit(pargs.script, create_only=test, args = args,
                                                    log_root = condor_dir, 
                                                    output_root = out_root,
                                                    jobname = str(obs),
                                                    grid_proxy = '/home/adama/.globus/grid_proxy',
                                                    input_files= infiles,
                                                    output_files=[outputfile],
                                                    request_disk=8*core.G3Units.GB,
                                                    request_memory=6*core.G3Units.GB,
                                                    clustertools_version='py3-v3')

    else:
        print('Not processing observation {}:'.format(obs))
        for fn in infiles:
            if not os.path.exists(fn):
                print('{} does not exist'.format(fn))

        print('')

Example #6
0
                    action='store',
                    type=str,
                    help='Name of python script to run on the grid.')
parser.add_argument('--submit',
                    action='store_true',
                    help='Flag that submits jobs to the grid. Default is to '
                    'generate job files locally only.')
pargs = parser.parse_args()

test = True
if pargs.submit:
    test = False

condor_dir = '/scratch/adama/condor_logs/{}/'.format(pargs.jobname)
out_root = '/spt/user/adama/{}/'.format(pargs.jobname)

cluster, f_submit, f_script = condor_submit(
    pargs.script,
    create_only=test,
    args=[],
    log_root=condor_dir,
    output_root=out_root,
    jobname=pargs.jobname,
    grid_proxy='/home/adama/.globus/grid_proxy',
    input_files=[],  #infiles,
    output_files=[],  #outfiles,
    request_disk=2 * core.G3Units.GB,
    request_memory=2 * core.G3Units.GB,
    clustertools_version='py3-v3',
    user_code='')