Exemple #1
0
    def __init__(self, config_parser):
        pipeline.CondorDAGJob.__init__(
            self, "vanilla",
            power.get_executable(config_parser,
                                 "lalapps_string_meas_likelihood"))
        self.set_sub_file("lalapps_string_meas_likelihood.sub")
        self.set_stdout_file(
            os.path.join(
                power.get_out_dir(config_parser),
                "lalapps_string_meas_likelihood-$(cluster)-$(process).out"))
        self.set_stderr_file(
            os.path.join(
                power.get_out_dir(config_parser),
                "lalapps_string_meas_likelihood-$(cluster)-$(process).err"))
        self.add_condor_cmd("getenv", "True")
        self.add_condor_cmd("accounting_group",
                            power.get_accounting_group(config_parser))
        self.add_ini_opts(config_parser, "lalapps_string_meas_likelihood")

        self.cache_dir = power.get_cache_dir(config_parser)
        self.output_dir = "."
        self.files_per_meas_likelihood = get_files_per_meas_likelihood(
            config_parser)
        if self.files_per_meas_likelihood < 1:
            raise ValueError("files_per_meas_likelihood < 1")
Exemple #2
0
	def __init__(self, config_parser):
		pipeline.CondorDAGJob.__init__(self, "vanilla", power.get_executable(config_parser, "lalapps_string_calc_likelihood"))
		self.set_sub_file("lalapps_string_calc_likelihood.sub")
		self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_string_calc_likelihood-$(cluster)-$(process).out"))
		self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_string_calc_likelihood-$(cluster)-$(process).err"))
		self.add_condor_cmd("getenv", "True")
		self.add_condor_cmd("accounting_group", power.get_accounting_group(config_parser))
		self.add_ini_opts(config_parser, "lalapps_string_calc_likelihood")
		self.cache_dir = power.get_cache_dir(config_parser)
		self.files_per_calc_likelihood = get_files_per_calc_likelihood(config_parser)
		if self.files_per_calc_likelihood < 1:
			raise ValueError("files_per_calc_likelihood < 1")
Exemple #3
0
        def __init__(self, config_parser):
                """
                config_parser = ConfigParser object
                """
                pipeline.CondorDAGJob.__init__(self, "vanilla", power.get_executable(config_parser, "lalapps_run_sqlite"))
                self.add_ini_opts(config_parser, "lalapps_run_sqlite")
                self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_run_sqlite-$(cluster)-$(process).out"))
                self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_run_sqlite-$(cluster)-$(process).err"))
		self.add_condor_cmd("getenv", "True")
		self.add_condor_cmd("accounting_group", power.get_accounting_group(config_parser))
                self.set_sub_file("lalapps_run_sqlite.sub")
		self.files_per_run_sqlite = get_files_per_run_sqlite(config_parser)
		if self.files_per_run_sqlite < 1:
			raise ValueError("files_per_run_sqlite < 1")
Exemple #4
0
  def __init__(self,config_parser):
    """
    config_parser = ConfigParser object from which options are read.
    """
    pipeline.CondorDAGJob.__init__(self, power.get_universe(config_parser), power.get_executable(config_parser, "lalapps_StringSearch"))
    pipeline.AnalysisJob.__init__(self, config_parser)
    self.add_ini_opts(config_parser, "lalapps_StringSearch")
    self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).out"))
    self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).err"))
    self.add_condor_cmd("getenv", "True")
    self.add_condor_cmd("accounting_group", power.get_accounting_group(config_parser))
    self.set_sub_file("lalapps_StringSearch.sub")
    #self.add_condor_cmd("Requirements", "Memory > 1100")

    self.output_dir = power.get_triggers_dir(config_parser)
Exemple #5
0
    def __init__(self, config_parser):
        """
		config_parser = ConfigParser object
		"""
        pipeline.CondorDAGJob.__init__(
            self, "vanilla",
            power.get_executable(config_parser, "lalapps_run_sqlite"))
        self.add_ini_opts(config_parser, "lalapps_run_sqlite")
        self.set_stdout_file(
            os.path.join(power.get_out_dir(config_parser),
                         "lalapps_run_sqlite-$(cluster)-$(process).out"))
        self.set_stderr_file(
            os.path.join(power.get_out_dir(config_parser),
                         "lalapps_run_sqlite-$(cluster)-$(process).err"))
        self.add_condor_cmd("getenv", "True")
        self.add_condor_cmd("accounting_group",
                            power.get_accounting_group(config_parser))
        self.set_sub_file("lalapps_run_sqlite.sub")
        self.files_per_run_sqlite = get_files_per_run_sqlite(config_parser)
        if self.files_per_run_sqlite < 1:
            raise ValueError("files_per_run_sqlite < 1")
Exemple #6
0
    def __init__(self, config_parser):
        """
    config_parser = ConfigParser object from which options are read.
    """
        pipeline.CondorDAGJob.__init__(
            self, power.get_universe(config_parser),
            power.get_executable(config_parser, "lalapps_StringSearch"))
        pipeline.AnalysisJob.__init__(self, config_parser)
        self.add_ini_opts(config_parser, "lalapps_StringSearch")
        self.set_stdout_file(
            os.path.join(power.get_out_dir(config_parser),
                         "lalapps_StringSearch-$(cluster)-$(process).out"))
        self.set_stderr_file(
            os.path.join(power.get_out_dir(config_parser),
                         "lalapps_StringSearch-$(cluster)-$(process).err"))
        self.add_condor_cmd("getenv", "True")
        self.add_condor_cmd("accounting_group",
                            power.get_accounting_group(config_parser))
        self.set_sub_file("lalapps_StringSearch.sub")
        #self.add_condor_cmd("Requirements", "Memory > 1100")

        self.output_dir = power.get_triggers_dir(config_parser)