tempfile.template = basename + '.dag.log.'
logfile = tempfile.mktemp()
fh = open(logfile, "w")
fh.close()

# create the DAG writing the log to the specified directory
dag = pipeline.CondorDAG(logfile, dax)
if usertag:
    dag.set_dag_file(basename + '.' + usertag)
else:
    dag.set_dag_file(basename)

# create the Condor jobs that will be used in the DAG
df_job = pipeline.LSCDataFindJob('cache', 'logs', cp, dax)
tmplt_job = inspiral.TmpltBankJob(cp, dax)
insp_job = inspiral.InspiralJob(cp, dax)
trig_job = inspiral.TrigbankJob(cp)
inca_job = inspiral.IncaJob(cp)

# set better submit file names than the default
if usertag:
    subsuffix = '.' + usertag + '.sub'
else:
    subsuffix = '.sub'
df_job.set_sub_file(basename + '.datafind' + subsuffix)
tmplt_job.set_sub_file(basename + '.tmpltbank' + subsuffix)
insp_job.set_sub_file(basename + '.inspiral' + subsuffix)
trig_job.set_sub_file(basename + '.trigbank' + subsuffix)
inca_job.set_sub_file(basename + '.inca' + subsuffix)

# set the usertag in the jobs
Beispiel #2
0
    si_job_coarse = SiClusterJobCoarse(cp)
    si_job_fine = SiClusterJobFine(cp)
    cp_job = FilesystemJob('cp')

    # Add ifo-specific template config
    if cp.has_section(ifo.lower() + '-tmpltbank'):
        tmplt_job.add_ini_opts(cp, ifo.lower() + '-tmpltbank')

    # Create a job to split the template into parallelization pieces
    split_job = inspiral.SplitBankJob(cp)

    # and one to run the inspirals
    if doCohPTF:
        insp_job = inspiral.PTFInspiralJob(cp)
    else:
        insp_job = inspiral.InspiralJob(cp)

    # template and inspiral jobs also need to know about the data
    if ifo == 'V1':
        channel_name = 'virgo-channel'
        data_name = 'virgo-data'
        type_name = 'virgo-type'
    else:
        channel_name = 'ligo-channel'
        data_name = 'ligo-data'
        type_name = 'ligo-type'

    channel = cp.get('input', channel_name)
    type = cp.get('input', type_name)

    tmplt_job.set_channel(channel)
Beispiel #3
0
for ifo in ifo_list:
  tmplt_jobs[ifo] = inspiral.TmpltBankJob(cp,opts.dax)
  tmplt_jobs[ifo].set_sub_file( basename + '.tmpltbank_' + ifo + subsuffix )

# inspinj:
inspinj_job = inspiral.InspInjJob(cp) 
inspinj_job.set_sub_file( basename + '.inspinj' + subsuffix )

if opts.noop_inspinj:
  inspinj_job.add_condor_cmd("noop_job", "true")

# inspiral:
insp_jobs = {}

for ifo in ifo_list:
  insp_jobs[ifo] = inspiral.InspiralJob(cp,opts.dax)
  insp_jobs[ifo].set_sub_file( basename + '.inspiral_' + ifo + subsuffix )

# create inspiral checkpoint job
insp_ckpt_job = inspiral.InspiralCkptJob(cp,opts.dax)
if cp.has_option('pipeline','remote-site'):
  insp_ckpt_job.set_executable_installed(False)

# ligolw_add:
lladd_job = pipeline.LigolwAddJob('logs', cp, opts.dax)
lladd_job.set_sub_file( basename + '.ligolw_add' + subsuffix )

# thinca:
thinca_job = inspiral.ThincaJob(cp,opts.dax)
thinca_job.set_universe('vanilla')
thinca_job.set_sub_file(basename + '.thinca' + subsuffix )