def make_coinc_branch(dag, datafinds, seglistdict, time_slides, timing_params, psds_per_power, enable_clustering, tag, do_injections = False, verbose = False):
	# injection list


	if do_injections:
		assert len(time_slides) == 1
		if verbose:
			print >>sys.stderr, "Building lalapps_binj jobs ..."
		binjnodes = power.make_binj_fragment(dag, seglistdict.extent_all(), time_slides.keys()[0], tag, 0.0, float(power.powerjob.get_opts()["low-freq-cutoff"]), float(power.powerjob.get_opts()["low-freq-cutoff"]) + float(power.powerjob.get_opts()["bandwidth"]))
		# add binj nodes as parents of the datafinds to force the binj's to
		# be run first.  this ensures that once a datafind has run the
		# power jobs that follow it will immediately be able to run, which
		# helps depth-first dagman do smarter things.
		for node in datafinds:
			for binjnode in binjnodes:
				node.add_parent(binjnode)
	else:
		binjnodes = set()


	# single-instrument trigger generation


	trigger_nodes = power.make_single_instrument_stage(dag, datafinds, seglistdict, tag, timing_params, psds_per_power, binjnodes = binjnodes, verbose = verbose)
	if enable_clustering:
		if verbose:
			print >>sys.stderr, "building pre-lladd bucluster jobs ..."
		trigger_nodes = power.make_bucluster_fragment(dag, trigger_nodes, "PRELLADD_%s" % tag, verbose = verbose)


	# coincidence analysis


	coinc_nodes = set()
	binj_cache = set([cache_entry for node in binjnodes for cache_entry in node.get_output_cache()])
	# otherwise too many copies of the offset vector will be fed into
	# burca
	assert len(binj_cache) < 2
	for n, (time_slides_cache_entry, these_time_slides) in enumerate(time_slides.items()):
		if verbose:
			print >>sys.stderr, "%s %d/%d (%s):" % (tag, n + 1, len(time_slides), time_slides_cache_entry.path)
		tisi_cache = set([time_slides_cache_entry])
		if do_injections:
			# lalapps_binj has already copied the time slide
			# document into its own output
			extra_input_cache = set()
		else:
			# ligolw_add needs to copy the time slide document
			# into is output
			extra_input_cache = tisi_cache
		nodes = set()
		for seg, parents, cache, clipseg in power.group_coinc_parents(trigger_nodes, these_time_slides, verbose = verbose):
			nodes |= power.make_lladd_fragment(dag, parents | binjnodes, "%s_%d" % (tag, n), segment = seg, input_cache = cache | binj_cache, extra_input_cache = extra_input_cache, remove_input = do_injections, preserve_cache = binj_cache | tisi_cache)
		if enable_clustering:
			if verbose:
				print >>sys.stderr, "building post-lladd bucluster jobs ..."
			nodes = power.make_bucluster_fragment(dag, nodes, "POSTLLADD_%s_%d" % (tag, n), verbose = verbose)
		if verbose:
			print >>sys.stderr, "building burca jobs ..."
		coinc_nodes |= power.make_burca_fragment(dag, nodes, "%s_%d" % (tag, n), verbose = verbose)
		if verbose:
			print >>sys.stderr, "done %s %d/%d" % (tag, n + 1, len(time_slides))


	# injection identification


	if do_injections:
		if verbose:
			print >>sys.stderr, "building binjfind jobs ..."
		coinc_nodes = power.make_binjfind_fragment(dag, coinc_nodes, tag, verbose = verbose)


	# conversion to SQLite database files


	if verbose:
		print >>sys.stderr, "building sqlite jobs ..."
	coinc_nodes = power.make_sqlite_fragment(dag, coinc_nodes, tag, verbose = verbose)


	# done


	power.write_output_cache(coinc_nodes, "%s_%s_output.cache" % (os.path.splitext(dag.get_dag_file())[0], tag))
	return coinc_nodes
Beispiel #2
0
#                               DAG Construction
#
# =============================================================================
#

power.make_dag_directories(config_parser)

dag = pipeline.CondorDAG(condor_log)
dag.set_dag_file(options.dag_name)

datafinds = power.make_datafind_stage(dag, options.data_seglists, verbose=True)

nodes = power.make_single_instrument_stage(dag,
                                           datafinds,
                                           options.data_seglists,
                                           options.user_tag,
                                           timing_params,
                                           psds_per_job,
                                           verbose=True)
nodes = power.make_lladded_bucluster_fragment(dag, nodes, options.data_seg,
                                              options.user_tag)

make_publish_fragment(dag, nodes, options.data_seg, options.user_tag,
                      options.publish_dest)
# FIXME: still broken
if options.dmtmon_dest:
    make_burst2mon_fragment(dag, darmpowerfrag, options.instrument,
                            options.data_seg, options.user_tag)

# FIXME: still broken
nodes = power.make_single_instrument_injections_stage(dag,
Beispiel #3
0
def make_coinc_branch(dag,
                      datafinds,
                      seglistdict,
                      time_slides,
                      timing_params,
                      psds_per_power,
                      enable_clustering,
                      tag,
                      do_injections=False,
                      verbose=False):
    # injection list

    if do_injections:
        assert len(time_slides) == 1
        if verbose:
            print >> sys.stderr, "Building lalapps_binj jobs ..."
        binjnodes = power.make_binj_fragment(
            dag, seglistdict.extent_all(),
            time_slides.keys()[0], tag, 0.0,
            float(power.powerjob.get_opts()["low-freq-cutoff"]),
            float(power.powerjob.get_opts()["low-freq-cutoff"]) +
            float(power.powerjob.get_opts()["bandwidth"]))
        # add binj nodes as parents of the datafinds to force the binj's to
        # be run first.  this ensures that once a datafind has run the
        # power jobs that follow it will immediately be able to run, which
        # helps depth-first dagman do smarter things.
        for node in datafinds:
            for binjnode in binjnodes:
                node.add_parent(binjnode)
    else:
        binjnodes = set()

    # single-instrument trigger generation

    trigger_nodes = power.make_single_instrument_stage(dag,
                                                       datafinds,
                                                       seglistdict,
                                                       tag,
                                                       timing_params,
                                                       psds_per_power,
                                                       binjnodes=binjnodes,
                                                       verbose=verbose)
    if enable_clustering:
        if verbose:
            print >> sys.stderr, "building pre-lladd bucluster jobs ..."
        trigger_nodes = power.make_bucluster_fragment(dag,
                                                      trigger_nodes,
                                                      "PRELLADD_%s" % tag,
                                                      verbose=verbose)

    # coincidence analysis

    coinc_nodes = set()
    binj_cache = set([
        cache_entry for node in binjnodes
        for cache_entry in node.get_output_cache()
    ])
    # otherwise too many copies of the offset vector will be fed into
    # burca
    assert len(binj_cache) < 2
    for n, (time_slides_cache_entry,
            these_time_slides) in enumerate(time_slides.items()):
        if verbose:
            print >> sys.stderr, "%s %d/%d (%s):" % (
                tag, n + 1, len(time_slides), time_slides_cache_entry.path)
        tisi_cache = set([time_slides_cache_entry])
        if do_injections:
            # lalapps_binj has already copied the time slide
            # document into its own output
            extra_input_cache = set()
        else:
            # ligolw_add needs to copy the time slide document
            # into is output
            extra_input_cache = tisi_cache
        nodes = set()
        for seg, parents, cache, clipseg in power.group_coinc_parents(
                trigger_nodes, these_time_slides, verbose=verbose):
            nodes |= power.make_lladd_fragment(
                dag,
                parents | binjnodes,
                "%s_%d" % (tag, n),
                segment=seg,
                input_cache=cache | binj_cache,
                extra_input_cache=extra_input_cache,
                remove_input=do_injections,
                preserve_cache=binj_cache | tisi_cache)
        if enable_clustering:
            if verbose:
                print >> sys.stderr, "building post-lladd bucluster jobs ..."
            nodes = power.make_bucluster_fragment(dag,
                                                  nodes,
                                                  "POSTLLADD_%s_%d" % (tag, n),
                                                  verbose=verbose)
        if verbose:
            print >> sys.stderr, "building burca jobs ..."
        coinc_nodes |= power.make_burca_fragment(dag,
                                                 nodes,
                                                 "%s_%d" % (tag, n),
                                                 verbose=verbose)
        if verbose:
            print >> sys.stderr, "done %s %d/%d" % (tag, n + 1,
                                                    len(time_slides))

    # injection identification

    if do_injections:
        if verbose:
            print >> sys.stderr, "building binjfind jobs ..."
        coinc_nodes = power.make_binjfind_fragment(dag,
                                                   coinc_nodes,
                                                   tag,
                                                   verbose=verbose)

    # conversion to SQLite database files

    if verbose:
        print >> sys.stderr, "building sqlite jobs ..."
    coinc_nodes = power.make_sqlite_fragment(dag,
                                             coinc_nodes,
                                             tag,
                                             verbose=verbose)

    # done

    power.write_output_cache(
        coinc_nodes,
        "%s_%s_output.cache" % (os.path.splitext(dag.get_dag_file())[0], tag))
    return coinc_nodes
#
# =============================================================================
#


power.make_dag_directories(config_parser)


dag = pipeline.CondorDAG(condor_log)
dag.set_dag_file(options.dag_name)


datafinds = power.make_datafind_stage(dag, options.data_seglists, verbose = True)


nodes = power.make_single_instrument_stage(dag, datafinds, options.data_seglists, options.user_tag, timing_params, psds_per_job, verbose = True)
nodes = power.make_lladded_bucluster_fragment(dag, nodes, options.data_seg, options.user_tag)


make_publish_fragment(dag, nodes, options.data_seg, options.user_tag, options.publish_dest)
# FIXME: still broken
if options.dmtmon_dest:
	make_burst2mon_fragment(dag, darmpowerfrag, options.instrument, options.data_seg, options.user_tag)


# FIXME: still broken
nodes = power.make_single_instrument_injections_stage(dag, datafinds, binjnodes, options.data_seglists, "INJECTIONS_%s" % options.user_tag, timing_params, psds_per_injection, verbose = True)
nodes = power.make_lladded_bucluster_fragment(dag, nodes, options.data_seg, "INJECTIONS_%s" % options.user_tag)
nodes = power.make_bucut_fragment(dag, nodes, "INJECTIONS_%s" % options.user_tag)
nodes = power.make_binjfind_fragment(dag, nodes, "INJECTIONS_%s" % options.user_tag)