Exemple #1
0
def make_ligolw_add(orig_cache, pattern, outfile, logdir, cp, ligolw_job=None):
  """
  Take a cache, sieve it for pattern, create a cache-file for ligolw_add,
  and create a LigolwAddNode that will create a new
  output cache with the matching files replaced by the single ligolw_added
  file.
  """
  # determine the files to ligolw_add
  sub_cache = orig_cache.sieve(description=pattern)
  if len(sub_cache) == 0:
    print("warning: no files on which to run ligolw_add", file=sys.stderr)
    return None

  # create the cache-file
  cachefile = os.path.basename( outfile )[:-3]+'cache'
  sub_cache.tofile(open(cachefile,'w'))

  if ligolw_job is None:
    ligolw_job = pipeline.LigolwAddJob(logdir, cp)
    ligolw_job.set_universe("local")

  node = pipeline.LigolwAddNode(ligolw_job)
  node.add_output_file(outfile)
  node.add_var_opt("input", cachefile)
  node.add_var_opt("output", outfile)

  # return the cache-file without the ones
  # just extracted above
  new_cache = lal.Cache([entry for entry in orig_cache if entry not in sub_cache])
  new_cache.extend(lal.Cache.from_urls([outfile]))
  return node, new_cache
def init_job_types(config_parser, job_types = ("datafind", "rm", "binj", "power", "lladd", "binjfind", "bucluster", "bucut", "burca", "burca2", "sqlite", "burcatailor")):
	"""
	Construct definitions of the submit files.
	"""
	global datafindjob, rmjob, binjjob, powerjob, lladdjob, binjfindjob, buclusterjob, llb2mjob, bucutjob, burcajob, burca2job, sqlitejob, burcatailorjob

	# ligo_data_find
	if "datafind" in job_types:
		datafindjob = pipeline.LSCDataFindJob(os.path.join(os.getcwd(), get_cache_dir(config_parser)), os.path.join(os.getcwd(), get_out_dir(config_parser)), config_parser)

	# rm
	if "rm" in job_types:
		rmjob = RMJob(config_parser)

	# lalapps_binj
	if "binj" in job_types:
		binjjob = BurstInjJob(config_parser)

	# lalapps_power
	if "power" in job_types:
		powerjob = PowerJob(config_parser)

	# ligolw_add
	if "lladd" in job_types:
		lladdjob = pipeline.LigolwAddJob(os.path.join(get_out_dir(config_parser)), config_parser)
		lladdjob.cache_dir = get_cache_dir(config_parser)

	# lalapps_binjfind
	if "binjfind" in job_types:
		binjfindjob = BinjfindJob(config_parser)

	# lalapps_bucut
	if "bucut" in job_types:
		bucutjob = BucutJob(config_parser)

	# lalapps_bucluster
	if "bucluster" in job_types:
		buclusterjob = BuclusterJob(config_parser)

	# lalapps_burca
	if "burca" in job_types:
		burcajob = BurcaJob(config_parser)

	# lalapps_burca2
	if "burca2" in job_types:
		burca2job = Burca2Job(config_parser)

	# ligolw_sqlite
	if "sqlite" in job_types:
		sqlitejob = SQLiteJob(config_parser)

	# lalapps_burca_tailor
	if "burcatailor" in job_types:
		burcatailorjob = BurcaTailorJob(config_parser)
Exemple #3
0
                ifo.upper() + ':' + cp.get('input', 'virgo-channel'))
            insp_job.add_opt(
                ifo.lower() + '-frame-cache', 'cache/' + ifo[0] + '-' +
                cp.get('input', 'virgo-type') + '_CACHE-' + gps_times + '.lcf')
    else:
        if cp.has_section(ifo.lower() + '-inspiral'):
            insp_job.add_ini_opts(cp, ifo.lower() + '-inspiral')

        insp_job.set_channel(channel)
        insp_job.add_ini_opts(cp, data_name)

        # First inspiral doesn't do vetos
        insp_job.add_ini_opts(cp, 'no-veto-inspiral')

    # add log configuration
    lwadd_job = pipeline.LigolwAddJob('logs', cp)

    # make the lwadd job a priority so results get reported quickly
    lwadd_job.add_condor_cmd('priority', '20')
    lwadd_job.add_opt('add-lfn-table', '')

    # set the usertag in the template bank
    if usertag:
        tmplt_job.add_opt('user-tag', usertag)

    # set better submit file names than the default
    subsuffix = '.sub'
    df_job.set_sub_file(basename + '_' + ifo + '.datafind' + subsuffix)
    tmplt_job.set_sub_file(basename + '_' + ifo + '.tmpltbank' + subsuffix)
    split_job.set_sub_file(basename + '_' + ifo + '.splitbank' + subsuffix)
    insp_job.set_sub_file(basename + '_' + ifo + '.inspiral' + subsuffix)
Exemple #4
0
  inspinj_job.add_condor_cmd("noop_job", "true")

# inspiral:
insp_jobs = {}

for ifo in ifo_list:
  insp_jobs[ifo] = inspiral.InspiralJob(cp,opts.dax)
  insp_jobs[ifo].set_sub_file( basename + '.inspiral_' + ifo + subsuffix )

# create inspiral checkpoint job
insp_ckpt_job = inspiral.InspiralCkptJob(cp,opts.dax)
if cp.has_option('pipeline','remote-site'):
  insp_ckpt_job.set_executable_installed(False)

# ligolw_add:
lladd_job = pipeline.LigolwAddJob('logs', cp, opts.dax)
lladd_job.set_sub_file( basename + '.ligolw_add' + subsuffix )

# thinca:
thinca_job = inspiral.ThincaJob(cp,opts.dax)
thinca_job.set_universe('vanilla')
thinca_job.set_sub_file(basename + '.thinca' + subsuffix )
thinca_job.add_condor_cmd("getenv", "True")

# sire:
sire_job = inspiral.SireJob(cp)
sire_job.set_sub_file( basename + '.sire' + subsuffix )
sire_summary_job = inspiral.SireJob(cp)
sire_summary_job.set_sub_file( basename + '.sire_summary' + subsuffix )

all_jobs = [inspinj_job, sire_job, sire_summary_job]