#
# Parse .ini file, loading the single-instrument segment lists while at it.
#


seglistdict, tiling_phase, config_parser = parse_config_file(options)


#
# Define .sub files
#


power.init_job_types(config_parser)


#
# Using time slide information, construct segment lists describing times
# requiring trigger construction.
#


if options.verbose:
	print >>sys.stderr, "Computing segments for which lalapps_power jobs are required ..."

background_time_slides = {}
background_seglistdict = segments.segmentlistdict()
if options.do_noninjections:
	for filename in options.background_time_slides:
Example #2
0
# Command line
#

options, filenames = parse_command_line()

#
# Parse .ini file, loading the single-instrument segment lists while at it.
#

seglistdict, tiling_phase, config_parser = parse_config_file(options)

#
# Define .sub files
#

power.init_job_types(config_parser)

#
# Using time slide information, construct segment lists describing times
# requiring trigger construction.
#

if options.verbose:
    print >> sys.stderr, "Computing segments for which lalapps_power jobs are required ..."

background_time_slides = {}
background_seglistdict = segments.segmentlistdict()
if options.do_noninjections:
    for filename in options.background_time_slides:
        cache_entry = CacheEntry(
            None, None, None, "file://localhost" + os.path.abspath(filename))
Example #3
0
    def set_output(self, destination):
        self.add_macro("macrodestination", destination)


#
# =============================================================================
#
#                              Define .sub Files
#
# =============================================================================
#

power.init_job_types(config_parser,
                     types=[
                         "datafind", "binj", "power", "lladd", "binjfind",
                         "bucluster", "bucut"
                     ])

publishjob = PublishJob(config_parser)
gsiscpjob = GsiScpJob(config_parser)

#
# =============================================================================
#
#                             Publish DAG Fragment
#
# =============================================================================
#

for name_value in options.__dict__.items():
	print >>log_fh, "%s %s" % name_value
print >>log_fh

#
# create the config parser object and read in the ini file
#

config_parser = ConfigParser.ConfigParser()
config_parser.read(options.config_file)

#
# initialize lalapps.power and lalapps.cosmicstring modules
#

power.init_job_types(config_parser, job_types = ("datafind", "binj", "lladd", "binjfind", "burca", "sqlite"))
cosmicstring.init_job_types(config_parser, job_types = ("string", "meas_likelihood", "calc_likelihood", "runsqlite"))

#
# make directories to store the cache files, job logs, and trigger output
#

def make_dag_directories(top_level_directory, config_parser):
	cwd = os.getcwd()
	power.make_dir_if_not_exists(top_level_directory)
	os.chdir(top_level_directory)
	power.make_dag_directories(config_parser)
	# FIXME:  move this into make_dag_directories().  requires update
	# of excess power and gstlal dags
	power.make_dir_if_not_exists(power.get_triggers_dir(config_parser))
	os.chdir(cwd)
			pipeline.CondorDAGNode.add_file_arg(self, filename)
	
	def set_output(self, destination):
		self.add_macro("macrodestination", destination)


#
# =============================================================================
#
#                              Define .sub Files
#
# =============================================================================
#


power.init_job_types(config_parser, types = ["datafind", "binj", "power", "lladd", "binjfind", "bucluster", "bucut"])


publishjob = PublishJob(config_parser)
gsiscpjob = GsiScpJob(config_parser)


#
# =============================================================================
#
#                             Publish DAG Fragment
#
# =============================================================================
#

Example #6
0
    print("%s %s" % name_value, file=log_fh)
print(file=log_fh)

#
# create the config parser object and read in the ini file
#

config_parser = ConfigParser.ConfigParser()
config_parser.read(options.config_file)

#
# initialize lalapps.power and lalapps.cosmicstring modules
#

power.init_job_types(config_parser,
                     job_types=("datafind", "binj", "lladd", "binjfind",
                                "burca", "sqlite"))
cosmicstring.init_job_types(config_parser,
                            job_types=("string", "meas_likelihood",
                                       "calc_likelihood", "runsqlite"))

#
# make directories to store the cache files, job logs, and trigger output
#


def make_dag_directories(top_level_directory, config_parser):
    cwd = os.getcwd()
    power.make_dir_if_not_exists(top_level_directory)
    os.chdir(top_level_directory)
    power.make_dag_directories(config_parser)