def __init__(self, config_parser):
		pipeline.CondorDAGJob.__init__(self, "vanilla", power.get_executable(config_parser, "ligolw_burst2mon"))
		self.set_sub_file("ligolw_burst2mon.sub")
		self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "ligolw_burst2mon-$(cluster)-$(process).out"))
		self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "ligolw_burst2mon-$(cluster)-$(process).err"))
		self.add_condor_cmd("getenv", "True")
		self.add_ini_opts(config_parser, "ligolw_burst2mon")
Example #2
0
    def __init__(self, config_parser):
        pipeline.CondorDAGJob.__init__(
            self, "vanilla",
            power.get_executable(config_parser,
                                 "lalapps_string_meas_likelihood"))
        self.set_sub_file("lalapps_string_meas_likelihood.sub")
        self.set_stdout_file(
            os.path.join(
                power.get_out_dir(config_parser),
                "lalapps_string_meas_likelihood-$(cluster)-$(process).out"))
        self.set_stderr_file(
            os.path.join(
                power.get_out_dir(config_parser),
                "lalapps_string_meas_likelihood-$(cluster)-$(process).err"))
        self.add_condor_cmd("getenv", "True")
        self.add_condor_cmd("accounting_group",
                            power.get_accounting_group(config_parser))
        self.add_ini_opts(config_parser, "lalapps_string_meas_likelihood")

        self.cache_dir = power.get_cache_dir(config_parser)
        self.output_dir = "."
        self.files_per_meas_likelihood = get_files_per_meas_likelihood(
            config_parser)
        if self.files_per_meas_likelihood < 1:
            raise ValueError("files_per_meas_likelihood < 1")
Example #3
0
 def __init__(self, config_parser):
     pipeline.CondorDAGJob.__init__(
         self, "vanilla", power.get_executable(config_parser, "publish"))
     self.set_sub_file("publish.sub")
     self.set_stdout_file(
         os.path.join(power.get_out_dir(config_parser),
                      "publish-$(cluster)-$(process).out"))
     self.set_stderr_file(
         os.path.join(power.get_out_dir(config_parser),
                      "publish-$(cluster)-$(process).err"))
Example #4
0
 def __init__(self, config_parser):
     pipeline.CondorDAGJob.__init__(
         self, "local", power.get_executable(config_parser, "gsiscp"))
     self.set_sub_file("gsiscp.sub")
     self.set_stdout_file(
         os.path.join(power.get_out_dir(config_parser),
                      "gsiscp-$(cluster)-$(process).out"))
     self.set_stderr_file(
         os.path.join(power.get_out_dir(config_parser),
                      "gsiscp-$(cluster)-$(process).err"))
     self.add_condor_cmd("getenv", "True")
Example #5
0
	def __init__(self, config_parser):
		pipeline.CondorDAGJob.__init__(self, "vanilla", power.get_executable(config_parser, "lalapps_string_calc_likelihood"))
		self.set_sub_file("lalapps_string_calc_likelihood.sub")
		self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_string_calc_likelihood-$(cluster)-$(process).out"))
		self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_string_calc_likelihood-$(cluster)-$(process).err"))
		self.add_condor_cmd("getenv", "True")
		self.add_ini_opts(config_parser, "lalapps_string_calc_likelihood")
		self.cache_dir = power.get_cache_dir(config_parser)
		self.files_per_calc_likelihood = get_files_per_calc_likelihood(config_parser)
		if self.files_per_calc_likelihood < 1:
			raise ValueError, "files_per_calc_likelihood < 1"
Example #6
0
 def __init__(self, config_parser):
     pipeline.CondorDAGJob.__init__(
         self, "vanilla",
         power.get_executable(config_parser, "ligolw_burst2mon"))
     self.set_sub_file("ligolw_burst2mon.sub")
     self.set_stdout_file(
         os.path.join(power.get_out_dir(config_parser),
                      "ligolw_burst2mon-$(cluster)-$(process).out"))
     self.set_stderr_file(
         os.path.join(power.get_out_dir(config_parser),
                      "ligolw_burst2mon-$(cluster)-$(process).err"))
     self.add_condor_cmd("getenv", "True")
     self.add_ini_opts(config_parser, "ligolw_burst2mon")
Example #7
0
        def __init__(self, config_parser):
                """
                config_parser = ConfigParser object
                """
                pipeline.CondorDAGJob.__init__(self, "vanilla", power.get_executable(config_parser, "lalapps_run_sqlite"))
                self.add_ini_opts(config_parser, "lalapps_run_sqlite")
                self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_run_sqlite-$(cluster)-$(process).out"))
                self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_run_sqlite-$(cluster)-$(process).err"))
		self.add_condor_cmd("getenv", "True")
                self.set_sub_file("lalapps_run_sqlite.sub")
		self.files_per_run_sqlite = get_files_per_run_sqlite(config_parser)
		if self.files_per_run_sqlite < 1:
			raise ValueError, "files_per_run_sqlite < 1"
Example #8
0
  def __init__(self,config_parser):
    """
    config_parser = ConfigParser object from which options are read.
    """
    pipeline.CondorDAGJob.__init__(self, power.get_universe(config_parser), power.get_executable(config_parser, "lalapps_StringSearch"))
    pipeline.AnalysisJob.__init__(self, config_parser)
    self.add_ini_opts(config_parser, "lalapps_StringSearch")
    self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).out"))
    self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).err"))
    self.add_condor_cmd("getenv", "True")
    self.set_sub_file("lalapps_StringSearch.sub")
    #self.add_condor_cmd("Requirements", "Memory > 1100")

    self.output_dir = power.get_triggers_dir(config_parser)
Example #9
0
 def __init__(self, config_parser):
     """
             config_parser = ConfigParser object
             """
     pipeline.CondorDAGJob.__init__(
         self, "vanilla",
         power.get_executable(config_parser, "lalapps_run_sqlite"))
     self.add_ini_opts(config_parser, "lalapps_run_sqlite")
     self.set_stdout_file(
         os.path.join(power.get_out_dir(config_parser),
                      "lalapps_run_sqlite-$(cluster)-$(process).out"))
     self.set_stderr_file(
         os.path.join(power.get_out_dir(config_parser),
                      "lalapps_run_sqlite-$(cluster)-$(process).err"))
     self.add_condor_cmd("getenv", "True")
     self.set_sub_file("lalapps_run_sqlite.sub")
     self.files_per_run_sqlite = get_files_per_run_sqlite(config_parser)
     if self.files_per_run_sqlite < 1:
         raise ValueError, "files_per_run_sqlite < 1"
Example #10
0
    def __init__(self, config_parser):
        """
    config_parser = ConfigParser object from which options are read.
    """
        pipeline.CondorDAGJob.__init__(
            self, power.get_universe(config_parser),
            power.get_executable(config_parser, "lalapps_StringSearch"))
        pipeline.AnalysisJob.__init__(self, config_parser)
        self.add_ini_opts(config_parser, "lalapps_StringSearch")
        self.set_stdout_file(
            os.path.join(power.get_out_dir(config_parser),
                         "lalapps_StringSearch-$(cluster)-$(process).out"))
        self.set_stderr_file(
            os.path.join(power.get_out_dir(config_parser),
                         "lalapps_StringSearch-$(cluster)-$(process).err"))
        self.add_condor_cmd("getenv", "True")
        self.set_sub_file("lalapps_StringSearch.sub")
        #self.add_condor_cmd("Requirements", "Memory > 1100")

        self.output_dir = power.get_triggers_dir(config_parser)
	def __init__(self, config_parser):
		pipeline.CondorDAGJob.__init__(self, "local", power.get_executable(config_parser, "gsiscp"))
		self.set_sub_file("gsiscp.sub")
		self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "gsiscp-$(cluster)-$(process).out"))
		self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "gsiscp-$(cluster)-$(process).err"))
		self.add_condor_cmd("getenv", "True")
	def __init__(self, config_parser):
		pipeline.CondorDAGJob.__init__(self, "vanilla", power.get_executable(config_parser, "publish"))
		self.set_sub_file("publish.sub")
		self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "publish-$(cluster)-$(process).out"))
		self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "publish-$(cluster)-$(process).err"))