Пример #1
0
def make_burst2mon_fragment(dag, parent, instrument, seg, tag):
	cluster_output = "%s-POWERMON_%s-%s-%s.xml" % (instrument, tag, int(seg[0]), int(abs(seg)))
	cluster = power.make_bucluster_fragment(dag, [], instrument, seg, "POWERMON_%s" % tag)
	cluster.add_parent(parent)
	cluster.set_pre_script("/bin/cp %s %s" % (parent.get_output_files()[0], cluster_output))
	cluster.add_file_arg(cluster_output)

	node = Burst2MonNode(llb2mjob)
	node.set_name("ligolw_burst2mon")
	node.add_parent(cluster)
	node.set_input(cluster.get_output_files()[0])
	node.set_output(os.path.join(options.dmtmon_dest, cluster.get_output_files()[0]))
	node.add_macro("macrocomment", tag)
	dag.add_node(node)

	return node
Пример #2
0
def make_burst2mon_fragment(dag, parent, instrument, seg, tag):
    cluster_output = "%s-POWERMON_%s-%s-%s.xml" % (instrument, tag, int(
        seg[0]), int(abs(seg)))
    cluster = power.make_bucluster_fragment(dag, [], instrument, seg,
                                            "POWERMON_%s" % tag)
    cluster.add_parent(parent)
    cluster.set_pre_script("/bin/cp %s %s" %
                           (parent.get_output_files()[0], cluster_output))
    cluster.add_file_arg(cluster_output)

    node = Burst2MonNode(llb2mjob)
    node.set_name("ligolw_burst2mon")
    node.add_parent(cluster)
    node.set_input(cluster.get_output_files()[0])
    node.set_output(
        os.path.join(options.dmtmon_dest,
                     cluster.get_output_files()[0]))
    node.add_macro("macrocomment", tag)
    dag.add_node(node)

    return node
Пример #3
0
def make_coinc_branch(dag, datafinds, seglistdict, time_slides, timing_params, psds_per_power, enable_clustering, tag, do_injections = False, verbose = False):
	# injection list


	if do_injections:
		assert len(time_slides) == 1
		if verbose:
			print >>sys.stderr, "Building lalapps_binj jobs ..."
		binjnodes = power.make_binj_fragment(dag, seglistdict.extent_all(), time_slides.keys()[0], tag, 0.0, float(power.powerjob.get_opts()["low-freq-cutoff"]), float(power.powerjob.get_opts()["low-freq-cutoff"]) + float(power.powerjob.get_opts()["bandwidth"]))
		# add binj nodes as parents of the datafinds to force the binj's to
		# be run first.  this ensures that once a datafind has run the
		# power jobs that follow it will immediately be able to run, which
		# helps depth-first dagman do smarter things.
		for node in datafinds:
			for binjnode in binjnodes:
				node.add_parent(binjnode)
	else:
		binjnodes = set()


	# single-instrument trigger generation


	trigger_nodes = power.make_single_instrument_stage(dag, datafinds, seglistdict, tag, timing_params, psds_per_power, binjnodes = binjnodes, verbose = verbose)
	if enable_clustering:
		if verbose:
			print >>sys.stderr, "building pre-lladd bucluster jobs ..."
		trigger_nodes = power.make_bucluster_fragment(dag, trigger_nodes, "PRELLADD_%s" % tag, verbose = verbose)


	# coincidence analysis


	coinc_nodes = set()
	binj_cache = set([cache_entry for node in binjnodes for cache_entry in node.get_output_cache()])
	# otherwise too many copies of the offset vector will be fed into
	# burca
	assert len(binj_cache) < 2
	for n, (time_slides_cache_entry, these_time_slides) in enumerate(time_slides.items()):
		if verbose:
			print >>sys.stderr, "%s %d/%d (%s):" % (tag, n + 1, len(time_slides), time_slides_cache_entry.path)
		tisi_cache = set([time_slides_cache_entry])
		if do_injections:
			# lalapps_binj has already copied the time slide
			# document into its own output
			extra_input_cache = set()
		else:
			# ligolw_add needs to copy the time slide document
			# into is output
			extra_input_cache = tisi_cache
		nodes = set()
		for seg, parents, cache, clipseg in power.group_coinc_parents(trigger_nodes, these_time_slides, verbose = verbose):
			nodes |= power.make_lladd_fragment(dag, parents | binjnodes, "%s_%d" % (tag, n), segment = seg, input_cache = cache | binj_cache, extra_input_cache = extra_input_cache, remove_input = do_injections, preserve_cache = binj_cache | tisi_cache)
		if enable_clustering:
			if verbose:
				print >>sys.stderr, "building post-lladd bucluster jobs ..."
			nodes = power.make_bucluster_fragment(dag, nodes, "POSTLLADD_%s_%d" % (tag, n), verbose = verbose)
		if verbose:
			print >>sys.stderr, "building burca jobs ..."
		coinc_nodes |= power.make_burca_fragment(dag, nodes, "%s_%d" % (tag, n), verbose = verbose)
		if verbose:
			print >>sys.stderr, "done %s %d/%d" % (tag, n + 1, len(time_slides))


	# injection identification


	if do_injections:
		if verbose:
			print >>sys.stderr, "building binjfind jobs ..."
		coinc_nodes = power.make_binjfind_fragment(dag, coinc_nodes, tag, verbose = verbose)


	# conversion to SQLite database files


	if verbose:
		print >>sys.stderr, "building sqlite jobs ..."
	coinc_nodes = power.make_sqlite_fragment(dag, coinc_nodes, tag, verbose = verbose)


	# done


	power.write_output_cache(coinc_nodes, "%s_%s_output.cache" % (os.path.splitext(dag.get_dag_file())[0], tag))
	return coinc_nodes
Пример #4
0
def make_coinc_branch(dag,
                      datafinds,
                      seglistdict,
                      time_slides,
                      timing_params,
                      psds_per_power,
                      enable_clustering,
                      tag,
                      do_injections=False,
                      verbose=False):
    # injection list

    if do_injections:
        assert len(time_slides) == 1
        if verbose:
            print >> sys.stderr, "Building lalapps_binj jobs ..."
        binjnodes = power.make_binj_fragment(
            dag, seglistdict.extent_all(),
            time_slides.keys()[0], tag, 0.0,
            float(power.powerjob.get_opts()["low-freq-cutoff"]),
            float(power.powerjob.get_opts()["low-freq-cutoff"]) +
            float(power.powerjob.get_opts()["bandwidth"]))
        # add binj nodes as parents of the datafinds to force the binj's to
        # be run first.  this ensures that once a datafind has run the
        # power jobs that follow it will immediately be able to run, which
        # helps depth-first dagman do smarter things.
        for node in datafinds:
            for binjnode in binjnodes:
                node.add_parent(binjnode)
    else:
        binjnodes = set()

    # single-instrument trigger generation

    trigger_nodes = power.make_single_instrument_stage(dag,
                                                       datafinds,
                                                       seglistdict,
                                                       tag,
                                                       timing_params,
                                                       psds_per_power,
                                                       binjnodes=binjnodes,
                                                       verbose=verbose)
    if enable_clustering:
        if verbose:
            print >> sys.stderr, "building pre-lladd bucluster jobs ..."
        trigger_nodes = power.make_bucluster_fragment(dag,
                                                      trigger_nodes,
                                                      "PRELLADD_%s" % tag,
                                                      verbose=verbose)

    # coincidence analysis

    coinc_nodes = set()
    binj_cache = set([
        cache_entry for node in binjnodes
        for cache_entry in node.get_output_cache()
    ])
    # otherwise too many copies of the offset vector will be fed into
    # burca
    assert len(binj_cache) < 2
    for n, (time_slides_cache_entry,
            these_time_slides) in enumerate(time_slides.items()):
        if verbose:
            print >> sys.stderr, "%s %d/%d (%s):" % (
                tag, n + 1, len(time_slides), time_slides_cache_entry.path)
        tisi_cache = set([time_slides_cache_entry])
        if do_injections:
            # lalapps_binj has already copied the time slide
            # document into its own output
            extra_input_cache = set()
        else:
            # ligolw_add needs to copy the time slide document
            # into is output
            extra_input_cache = tisi_cache
        nodes = set()
        for seg, parents, cache, clipseg in power.group_coinc_parents(
                trigger_nodes, these_time_slides, verbose=verbose):
            nodes |= power.make_lladd_fragment(
                dag,
                parents | binjnodes,
                "%s_%d" % (tag, n),
                segment=seg,
                input_cache=cache | binj_cache,
                extra_input_cache=extra_input_cache,
                remove_input=do_injections,
                preserve_cache=binj_cache | tisi_cache)
        if enable_clustering:
            if verbose:
                print >> sys.stderr, "building post-lladd bucluster jobs ..."
            nodes = power.make_bucluster_fragment(dag,
                                                  nodes,
                                                  "POSTLLADD_%s_%d" % (tag, n),
                                                  verbose=verbose)
        if verbose:
            print >> sys.stderr, "building burca jobs ..."
        coinc_nodes |= power.make_burca_fragment(dag,
                                                 nodes,
                                                 "%s_%d" % (tag, n),
                                                 verbose=verbose)
        if verbose:
            print >> sys.stderr, "done %s %d/%d" % (tag, n + 1,
                                                    len(time_slides))

    # injection identification

    if do_injections:
        if verbose:
            print >> sys.stderr, "building binjfind jobs ..."
        coinc_nodes = power.make_binjfind_fragment(dag,
                                                   coinc_nodes,
                                                   tag,
                                                   verbose=verbose)

    # conversion to SQLite database files

    if verbose:
        print >> sys.stderr, "building sqlite jobs ..."
    coinc_nodes = power.make_sqlite_fragment(dag,
                                             coinc_nodes,
                                             tag,
                                             verbose=verbose)

    # done

    power.write_output_cache(
        coinc_nodes,
        "%s_%s_output.cache" % (os.path.splitext(dag.get_dag_file())[0], tag))
    return coinc_nodes