Esempio n. 1
0
def write_1dpos_plot_sub(tag='1d_post_plot', exe=None, log_dir=None, output_dir="./"):
    """
    Write a submit file for plotting 1d posterior cumulants.
    """

    exe = exe or which("postprocess_1d_cumulative")
    plot_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)

    plot_sub_name = tag + '.sub'
    plot_job.set_sub_file(plot_sub_name)

    #
    # Logging options
    #
    uniq_str = "$(cluster)-$(process)"
    plot_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    plot_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    plot_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    plot_job.add_opt("save-sampler-file", "ILE_$(macromassid).sqlite")
    plot_job.add_opt("disable-triplot", '')
    plot_job.add_opt("disable-1d-density", '')

    plot_job.add_condor_cmd('getenv', 'True')
    plot_job.add_condor_cmd('request_memory', '2048')
    
    return plot_job, plot_sub_name
Esempio n. 2
0
def write_tri_plot_sub(tag='plot_tri', injection_file=None, exe=None, log_dir=None, output_dir="./"):
    """
    Write a submit file for launching jobs to coalesce ILE output
    """

    exe = exe or which("make_triplot")
    plot_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)

    plot_sub_name = tag + '.sub'
    plot_job.set_sub_file(plot_sub_name)

    #
    # Logging options
    #
    uniq_str = "$(cluster)-$(process)"
    plot_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    plot_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    plot_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    plot_job.add_opt("output", "ILE_triplot_$(macromassid).png")
    if injection_file is not None:
        plot_job.add_opt("injection", injection_file)
    plot_job.add_arg("ILE_$(macromassid).sqlite")

    plot_job.add_condor_cmd('getenv', 'True')
    #plot_job.add_condor_cmd('request_memory', '2048')
    
    return plot_job, plot_sub_name
Esempio n. 3
0
def write_posterior_plot_sub(tag='plot_post', exe=None, log_dir=None, output_dir="./"):
    """
    Write a submit file for launching jobs to coalesce ILE output
    """

    exe = exe or which("plot_like_contours")
    plot_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)

    plot_sub_name = tag + '.sub'
    plot_job.set_sub_file(plot_sub_name)

    #
    # Logging options
    #
    uniq_str = "$(cluster)-$(process)"
    plot_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    plot_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    plot_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    plot_job.add_opt("show-points", '')
    plot_job.add_opt("dimension1", "mchirp")
    plot_job.add_opt("dimension2", "eta")
    plot_job.add_opt("input-cache", "ILE_all.cache")
    plot_job.add_opt("log-evidence", '')

    plot_job.add_condor_cmd('getenv', 'True')
    plot_job.add_condor_cmd('request_memory', '1024')
    
    return plot_job, plot_sub_name
Esempio n. 4
0
def write_bayes_pe_postproc_sub(tag='bayespe_post_plot',
                                exe=None,
                                log_dir=None,
                                web_dir="./",
                                inj_xml=None):
    """
    Write a submit file for postprocessing output and pushing it through cbcBayesPostProc.py
    """

    exe = exe or which("cbcBayesPostProc.py")
    plot_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)

    plot_sub_name = tag + '.sub'
    plot_job.set_sub_file(plot_sub_name)

    #
    # Logging options
    #
    uniq_str = "$(cluster)-$(process)"
    plot_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    plot_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    plot_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    #
    # Injection options
    #
    plot_job.add_opt("outpath", web_dir)
    if inj_xml:
        plot_job.add_opt("inj", inj_xml)
        # FIXME: Since we put individual sim entries into their own XML, this is
        # always zero. We might need to tweak this if we use a bigger one
        plot_job.add_opt("eventnum", 0)

    # Calculate evidence (just to compare)
    plot_job.add_opt("dievidence", '')

    plot_job.add_opt("header", "header.txt")
    plot_job.add_opt("data", "tmp")

    plot_job.add_condor_cmd('getenv', 'True')
    plot_job.add_condor_cmd('request_memory', '1024')

    return plot_job, plot_sub_name
Esempio n. 5
0
def write_result_coalescence_sub(tag='coalesce',
                                 exe=None,
                                 log_dir=None,
                                 output_dir="./",
                                 use_default_cache=True):
    """
    Write a submit file for launching jobs to coalesce ILE output
    """

    exe = exe or which("ligolw_sqlite")
    sql_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)

    sql_sub_name = tag + '.sub'
    sql_job.set_sub_file(sql_sub_name)

    #
    # Logging options
    #
    uniq_str = "$(cluster)-$(process)"
    sql_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    sql_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    sql_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    if use_default_cache:
        sql_job.add_opt("input-cache", "ILE_$(macromassid).cache")
    else:
        sql_job.add_arg("$(macrofiles)")
    #sql_job.add_arg("*$(macromassid)*.xml.gz")
    sql_job.add_opt("database", "ILE_$(macromassid).sqlite")
    #if os.environ.has_key("TMPDIR"):
    #tmpdir = os.environ["TMPDIR"]
    #else:
    #print >>sys.stderr, "WARNING, TMPDIR environment variable not set. Will default to /tmp/, but this could be dangerous."
    #tmpdir = "/tmp/"
    tmpdir = "/dev/shm/"
    sql_job.add_opt("tmp-space", tmpdir)
    sql_job.add_opt("verbose", '')

    sql_job.add_condor_cmd('getenv', 'True')
    sql_job.add_condor_cmd('request_memory', '1024')

    return sql_job, sql_sub_name
Esempio n. 6
0
def init_job(exe, universe, tag, subdir, logdir, cp, memory=None):

    logtag = '$(cluster)-$(process)'

    subdir = os.path.abspath(subdir)
    logdir = os.path.abspath(logdir)
    nfslogdir = '%s/logs' % subdir
    if not os.path.isdir(nfslogdir):
        os.mkdir(nfslogdir)

    job = pipeline.CondorDAGJob(universe, exe)
    job.set_sub_file(os.path.join(subdir, '%s.sub' % (tag)))
    job.set_stderr_file(os.path.join(nfslogdir, '%s-%s.err' % (tag, logtag)))
    job.set_stdout_file(os.path.join(nfslogdir, '%s-%s.out' % (tag, logtag)))
    job.add_condor_cmd('getenv', 'True')

    if cp.has_section(tag):
        job.add_ini_opts(cp, tag)
    if memory:
        job.add_condor_cmd('requirements', 'memory > %s' % memory)

    return job
Esempio n. 7
0
#Setup DAG
outputPath=os.path.abspath(os.path.normpath(outputPath))
dagLog=os.path.normpath(dagLocks+"/"+outputName+".LOG")
myDag=pipeline.CondorDAG(os.path.normpath(dagLog))
myDag.set_dag_file(os.path.normpath(str(outputName)))

#Setup SUB
tsHandler=os.path.expanduser(cp.get('condor','clustertool'))
tsUniverse=str(cp.get('condor','clustertool_universe')).lower()
if not os.path.exists(str(tsHandler)):
    print "ERROR: Can't find tracksearch handler executable."
    os.abort()
    

myJob=pipeline.CondorDAGJob(tsUniverse,tsHandler)
myJob.set_sub_file(str(outputName)+".sub")
logDir=os.path.normpath(outputPath+"/logs/")
buildDir(logDir)
myJob.set_stdout_file(os.path.abspath(os.path.normpath(logDir+"/log_$(cluster)_$(process).out")))
myJob.set_stderr_file(os.path.abspath(os.path.normpath(logDir+"/log_$(cluster)_$(process).err")))


#All all the proper options and macro handles
if not cp.has_section('candidatethreshold'):
    print "NO [candidatethreshold] section!\n"
    os.abort()
else:
    if cp.has_option('candidatethreshold','expression-threshold'):
        newVal=cp.get('candidatethreshold','expression-threshold')
        if (newVal.__contains__('"') and not newVal.__contains__('\\')):
Esempio n. 8
0
def write_CIP_sub(tag='integrate',
                  exe=None,
                  log_dir=None,
                  use_eos=False,
                  ncopies=1,
                  arg_str=None,
                  request_memory=8192,
                  arg_vals=None,
                  **kwargs):
    """
    Write a submit file for launching jobs to marginalize the likelihood over intrinsic parameters.

    Inputs:
    Outputs:
        - An instance of the CondorDAGJob that was generated for ILE
    """

    exe = exe or which(
        "util_ConstructIntrinsicPosterior_GenericCoordinates.py")
    ile_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)
    # This is a hack since CondorDAGJob hides the queue property
    ile_job._CondorJob__queue = ncopies

    ile_sub_name = tag + '.sub'
    ile_job.set_sub_file(ile_sub_name)

    #
    # Add options en mass, by brute force
    #
    ile_job.add_opt(
        arg_str[2:], ''
    )  # because we must be idiotic in how we pass arguments, I strip off the first two elements of the line

    #
    # Macro based options.
    #     - select EOS from list (done via macro)
    #     - pass spectral parameters
    #
    #    ile_job.add_var_opt("event")
    if use_eos:
        ile_job.add_var_opt("using-eos")

    #
    # Logging options
    #
    uniq_str = "$(macroevent)-$(cluster)-$(process)"
    ile_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    ile_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    ile_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    if kwargs.has_key("fname_output_samples"
                      ) and kwargs["fname_output_samples"] is not None:
        #
        # Need to modify the output file so it's unique
        #
        ofname = kwargs["fname_output_samples"].split(".")
        ofname, ext = ofname[0], ".".join(ofname[1:])
        ile_job.add_file_opt("output-file",
                             "%s-%s.%s" % (ofname, uniq_str, ext))
    if kwargs.has_key("fname_output_integral"
                      ) and kwargs["fname_output_integral"] is not None:
        #
        # Need to modify the output file so it's unique
        #
        ofname = kwargs["fname_output_integral"].split(".")
        ofname, ext = ofname[0], ".".join(ofname[1:])
        ile_job.add_file_opt("output-file",
                             "%s-%s.%s" % (ofname, uniq_str, ext))

    #
    # Add normal arguments
    # FIXME: Get valid options from a module
    #
    for opt, param in kwargs.items():
        if isinstance(param, list) or isinstance(param, tuple):
            # NOTE: Hack to get around multiple instances of the same option
            for p in param:
                ile_job.add_arg("--%s %s" % (opt.replace("_", "-"), str(p)))
        elif param is True:
            ile_job.add_opt(opt.replace("_", "-"), None)
        elif param is None or param is False:
            continue
        else:
            ile_job.add_opt(opt.replace("_", "-"), str(param))

    ile_job.add_condor_cmd('getenv', 'True')
    ile_job.add_condor_cmd('request_memory', str(request_memory))
    # To change interactively:
    #   condor_qedit
    # for example:
    #    for i in `condor_q -hold  | grep oshaughn | awk '{print $1}'`; do condor_qedit $i RequestMemory 30000; done; condor_release -all

    try:
        ile_job.add_condor_cmd('accounting_group',
                               os.environ['LIGO_ACCOUNTING'])
        ile_job.add_condor_cmd('accounting_group_user',
                               os.environ['LIGO_USER_NAME'])
    except:
        print(
            " LIGO accounting information not available.  You must add this manually to integrate.sub !"
        )

    ###
    ### SUGGESTION FROM STUART (for later)
    # request_memory = ifthenelse( (LastHoldReasonCode=!=34 && LastHoldReasonCode=!=26), InitialRequestMemory, int(1.5 * NumJobStarts * MemoryUsage) )
    # periodic_release = ((HoldReasonCode =?= 34) || (HoldReasonCode =?= 26))
    # This will automatically release a job that is put on hold for using too much memory with a 50% increased memory request each tim.e

    return ile_job, ile_sub_name
priorfile = pagenode.get_pos_file()

# Convert prior samples to injections
convertsub = os.path.join(rundir, 'samples2injections.sub')
converterr = os.path.join(
    outerlogdir, 'samples2injection-$(cluster)-$(process)-$(node).err')
convertout = os.path.join(
    outerlogdir, 'samples2injection-$(cluster)-$(process)-$(node).out')

if opts.injections:
    injfile = os.path.abspath(opts.injections)
else:
    injfile = os.path.join(rundir, 'priorsamples.xml')
approx = prior_cp.get('engine', 'approx')
prior2injexe = prior_cp.get('condor', 'pos_to_sim_burst')
prior2injjob = pipeline.CondorDAGJob('vanilla', prior2injexe)
if main_cp.has_option('analysis', 'accounting_group'):
    prior2injjob.add_condor_cmd('accounting_group',
                                main_cp.get('analysis', 'accounting_group'))
prior2injjob.set_sub_file(convertsub)
prior2injjob.set_stderr_file(converterr)
prior2injjob.set_stdout_file(convertout)
prior2injjob.add_condor_cmd('getenv', 'True')
prior2injnode = pipeline.CondorDAGNode(prior2injjob)
prior2injnode.add_var_opt('output', injfile)
prior2injnode.add_var_opt('num-of-injs', str(opts.trials))
prior2injnode.add_var_opt('approx', approx)
prior2injnode.add_var_arg(priorfile)
prior2injnode.add_parent(priordagnode)

# Create the pipeline based on the injections
Esempio n. 10
0
if mode == "single":
    print "Run the following command:"
    print " ".join(
        [executable_name, interferometer,
         str(start_time),
         str(end_time)])
    sys.exit()

## Generate sub file and dag file

run_dir = run_dir + "/"
# Initialize dag
dag = pipeline.CondorDAG('s6publish.log', dax=False)
dag.set_dag_file(run_dir + 's6publish')

subFile = pipeline.CondorDAGJob(mode, executable_name)
subFile.set_stdout_file(run_dir + 's6publish-$(cluster)-$(process).out')
subFile.set_stderr_file(run_dir + 's6publish-$(cluster)-$(process).err')
subFile.set_sub_file(run_dir + 's6publish.sub')
#print "Subfile:"
#print subFile.get_sub_file()

#blah2=open('v1_run_commands.txt','r')
#cmds=blah2.readlines()
#cmds
#cmds=[i.strip() for i in cmds]
times = []
#print "Computing times:"
#print gps_range
#print gps_stride_per_job
for i in range(gps_range[0], gps_range[1], gps_stride_per_job):
Esempio n. 11
0
def write_integrate_likelihood_extrinsic_sub(tag='integrate', exe=None, log_dir=None, intr_prms=("mass1", "mass2"), ncopies=1, condor_commands=None, **kwargs):
    """
    Write a submit file for launching jobs to marginalize the likelihood over
    extrinsic parameters.

    Inputs:
        - 'tag' is a string to specify the base name of output files. The output
          submit file will be named tag.sub, and the jobs will write their
          output to tag-ID.out, tag-ID.err, tag.log, where 'ID' is a unique
          identifier for each instance of a job run from the sub file.
        - 'cache' is the path to a cache file which gives the location of the
          data to be analyzed.
        - 'coinc' is the path to a coincident XML file, from which masses and
          times will be drawn FIXME: remove this once it's no longer needed.
        - 'channelH1/L1/V1' is the channel name to be read for each of the
          H1, L1 and V1 detectors.
        - 'psdH1/L1/V1' is the path to an XML file specifying the PSD of
          each of the H1, L1, V1 detectors.
        - 'ncopies' is the number of runs with identical input parameters to
          submit per condor 'cluster'

    Outputs:
        - An instance of the CondorDAGJob that was generated for ILE
    """

    assert len(kwargs["psd_file"]) == len(kwargs["channel_name"])

    exe = exe or which("rapidpe_integrate_extrinsic_likelihood")
    ile_job = pipeline.CondorDAGJob(universe="vanilla", executable=exe)
    # This is a hack since CondorDAGJob hides the queue property
    ile_job._CondorJob__queue = ncopies

    ile_sub_name = tag + '.sub'
    ile_job.set_sub_file(ile_sub_name)

    #
    # Logging options
    #
    uniq_str = "$(macromassid)-$(cluster)-$(process)"
    ile_job.set_log_file("%s%s-%s.log" % (log_dir, tag, uniq_str))
    ile_job.set_stderr_file("%s%s-%s.err" % (log_dir, tag, uniq_str))
    ile_job.set_stdout_file("%s%s-%s.out" % (log_dir, tag, uniq_str))

    if kwargs.has_key("output_file") and kwargs["output_file"] is not None:
        #
        # Need to modify the output file so it's unique
        #
        ofname = kwargs["output_file"].split(".")
        ofname, ext = ofname[0], ".".join(ofname[1:])
        ile_job.add_file_opt("output-file", "%s-%s.%s" % (ofname, uniq_str, ext))
        del kwargs["output_file"]
        if kwargs.has_key("save_samples") and kwargs["save_samples"] is True:
            ile_job.add_opt("save-samples", '')
            del kwargs["save_samples"]

    #
    # Add normal arguments
    # FIXME: Get valid options from a module
    #
    for opt, param in kwargs.iteritems():
        if isinstance(param, list) or isinstance(param, tuple):
            # NOTE: Hack to get around multiple instances of the same option
            for p in param:
                ile_job.add_arg("--%s %s" % (opt.replace("_", "-"), str(p)))
        elif param is True:
            ile_job.add_opt(opt.replace("_", "-"), '')
        elif param is None:
            continue
        else:
            ile_job.add_opt(opt.replace("_", "-"), str(param))

    #
    # Macro based options
    #
    ile_job.add_var_opt("mass1")
    ile_job.add_var_opt("mass2")
    for p in intr_prms:
        ile_job.add_var_opt(p.replace("_", "-"))

    ile_job.add_condor_cmd('getenv', 'True')
    if condor_commands is not None:
        for cmd, value in condor_commands.iteritems():
            ile_job.add_condor_cmd(cmd, value)
    ile_job.add_condor_cmd('request_memory', '2048')
    
    return ile_job, ile_sub_name