コード例 #1
0
def group_coinc_parents(parents, offset_vectors, extentlimit = None, verbose = False):
	if not offset_vectors:
		# no-op
		return []

	if verbose:
		print >>sys.stderr, "Grouping jobs for coincidence analysis:"

	#
	# use ligolw_cafe to group each output file according to how they
	# need to be combined to perform the coincidence analysis
	#

	seglists, bins = cafe.ligolw_cafe([cache_entry for parent in parents for cache_entry in parent.get_output_cache()], offset_vectors, extentlimit = extentlimit, verbose = verbose)

	#
	# retrieve the file caches and segments.  note that ligolw_cafe
	# returns the bins sorted by segment, so we do too
	#

	caches = [set(bin.objects) for bin in bins]
	segs = [cache_span(bin.objects) for bin in bins]

	#
	# determine the clipping boundaries to use for each coincidence job
	# if an extentlimit has been imposed
	#

	clipsegs = [None] * len(bins)
	if extentlimit is not None:
		extents = [bin.extent for bin in bins]
		for i, extent in enumerate(extents):
			# FIXME:  when we can rely on Python >= 2.5,
			#lo = segments.NegInfinity if i == 0 or extents[i - 1].disjoint(extent) else extent[0]
			# etc.
			if i == 0 or extents[i - 1].disjoint(extent):
				lo = segments.NegInfinity
			else:
				lo = extent[0]
			if i >= len(extents) - 2 or extents[i + 1].disjoint(extent):
				hi = segments.PosInfinity
			else:
				hi = extent[1]
			if lo is not segments.NegInfinity or hi is not segments.PosInfinity:
				clipsegs[i] = segments.segment(lo, hi)

	#
	# match parents to caches
	#

	if verbose:
		print >>sys.stderr, "Matching jobs to caches ..."
	parent_groups, unused = match_nodes_to_caches(parents, caches)
	if verbose and unused:
		# there were parents that didn't match any caches.  this
		# happens if ligolw_cafe decides their outputs aren't
		# needed
		print >>sys.stderr, "Notice:  %d jobs (of %d) produce output that will not be used by a coincidence job" % (unused, len(parents))

	#
	# done
	#

	return zip(segs, parent_groups, caches, clipsegs)
コード例 #2
0
ファイル: lalapps_cafe.py プロジェクト: zhudongdong1/lalsuite
# =============================================================================
#

options, cachenames = parse_command_line()

cache = []
for filename in cachenames:
    cache.extend(cafe.load_cache(filename, options.verbose))


@lsctables.use_in
class LIGOLWContentHandler(lsctables.ligolw.LIGOLWContentHandler):
    pass


seglists, outputcaches = cafe.ligolw_cafe(
    cache,
    lsctables.TimeSlideTable.get_table(
        ligolw_utils.load_filename(
            options.time_slides,
            verbose=options.verbose,
            contenthandler=LIGOLWContentHandler)).as_dict().values(),
    options.verbose)
instruments = set(seglists.keys())

if options.single_instrument:
    cafe.write_single_instrument_caches(options.base, outputcaches,
                                        instruments, options.verbose)
else:
    cafe.write_caches(options.base, outputcaches, instruments, options.verbose)
コード例 #3
0
#
# =============================================================================
#
#                                     Main
#
# =============================================================================
#


options, cachenames = parse_command_line()


cache = []
for filename in cachenames:
	cache.extend(cafe.load_cache(filename, options.verbose))


@lsctables.use_in
class LIGOLWContentHandler(lsctables.ligolw.LIGOLWContentHandler):
	pass

seglists, outputcaches = cafe.ligolw_cafe(cache, lsctables.TimeSlideTable.get_table(ligolw_utils.load_filename(options.time_slides, verbose = options.verbose, contenthandler = LIGOLWContentHandler)).as_dict().values(), options.verbose)
instruments = set(seglists.keys())


if options.single_instrument:
	cafe.write_single_instrument_caches(options.base, outputcaches, instruments, options.verbose)
else:
	cafe.write_caches(options.base, outputcaches, instruments, options.verbose)