コード例 #1
0
power.make_dag_directories(config_parser)
dag = pipeline.CondorDAG(tempfile.mkstemp(".log", "power_likelihood_", options.condor_log_dir)[1])
dag.set_dag_file("power_likelihood")


#
# Generate likelihood data
#


input_cache_nodes = set()
round_robin_cache_nodes = [set() for cache in options.round_robin_cache]
for seg in options.distribution_segments:
	if options.verbose:
		print("generating distribution measurement jobs for %s ..." % str(seg), file=sys.stderr)
	input_cache_nodes |= power.make_burca_tailor_fragment(dag, set([entry for entry in options.input_cache if entry.segmentlistdict.intersects_segment(seg)]), seg, "LIKELIHOOD_MAIN")
	for i, (nodes, cache) in enumerate(zip(round_robin_cache_nodes, options.round_robin_cache)):
		nodes |= power.make_burca_tailor_fragment(dag, set([entry for entry in cache if entry.segmentlistdict.intersects_segment(seg)]), seg, "LIKELIHOOD_RR%02d" % i)


#
# Compute likelihood ratios for coincs
#


if options.verbose:
	print("generating likelihood assignment jobs for main group ...", file=sys.stderr)
parents = reduce(lambda a, b: a | b, round_robin_cache_nodes, input_cache_nodes)
nodes = power.make_burca2_fragment(dag, options.input_cache, parents, "LIKELIHOOD_MAIN")

コード例 #2
0
power.make_dag_directories(config_parser)
dag = pipeline.CondorDAG(tempfile.mkstemp(".log", "power_likelihood_", options.condor_log_dir)[1])
dag.set_dag_file("power_likelihood")


#
# Generate likelihood data
#


input_cache_nodes = set()
round_robin_cache_nodes = [set() for cache in options.round_robin_cache]
for seg in options.distribution_segments:
	if options.verbose:
		print("generating distribution measurement jobs for %s ..." % str(seg), file=sys.stderr)
	input_cache_nodes |= power.make_burca_tailor_fragment(dag, set([entry for entry in options.input_cache if entry.segmentlistdict.intersects_segment(seg)]), seg, "LIKELIHOOD_MAIN")
	for i, (nodes, cache) in enumerate(zip(round_robin_cache_nodes, options.round_robin_cache)):
		nodes |= power.make_burca_tailor_fragment(dag, set([entry for entry in cache if entry.segmentlistdict.intersects_segment(seg)]), seg, "LIKELIHOOD_RR%02d" % i)


#
# Compute likelihood ratios for coincs
#


if options.verbose:
	print("generating likelihood assignment jobs for main group ...", file=sys.stderr)
parents = reduce(lambda a, b: a | b, round_robin_cache_nodes, input_cache_nodes)
nodes = power.make_burca2_fragment(dag, options.input_cache, parents, "LIKELIHOOD_MAIN")