def make_dag_directories(top_level_directory, config_parser): cwd = os.getcwd() power.make_dir_if_not_exists(top_level_directory) os.chdir(top_level_directory) power.make_dag_directories(config_parser) # FIXME: move this into make_dag_directories(). requires update # of excess power and gstlal dags power.make_dir_if_not_exists(power.get_triggers_dir(config_parser)) os.chdir(cwd)
def __init__(self,config_parser): """ config_parser = ConfigParser object from which options are read. """ pipeline.CondorDAGJob.__init__(self, power.get_universe(config_parser), power.get_executable(config_parser, "lalapps_StringSearch")) pipeline.AnalysisJob.__init__(self, config_parser) self.add_ini_opts(config_parser, "lalapps_StringSearch") self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).out")) self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).err")) self.add_condor_cmd("getenv", "True") self.set_sub_file("lalapps_StringSearch.sub") #self.add_condor_cmd("Requirements", "Memory > 1100") self.output_dir = power.get_triggers_dir(config_parser)
def __init__(self, config_parser): """ config_parser = ConfigParser object from which options are read. """ pipeline.CondorDAGJob.__init__( self, power.get_universe(config_parser), power.get_executable(config_parser, "lalapps_StringSearch")) pipeline.AnalysisJob.__init__(self, config_parser) self.add_ini_opts(config_parser, "lalapps_StringSearch") self.set_stdout_file( os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).out")) self.set_stderr_file( os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).err")) self.add_condor_cmd("getenv", "True") self.set_sub_file("lalapps_StringSearch.sub") #self.add_condor_cmd("Requirements", "Memory > 1100") self.output_dir = power.get_triggers_dir(config_parser)