def make_dag_directories(top_level_directory, config_parser): cwd = os.getcwd() power.make_dir_if_not_exists(top_level_directory) os.chdir(top_level_directory) power.make_dag_directories(config_parser) # FIXME: move this into make_dag_directories(). requires update # of excess power and gstlal dags power.make_dir_if_not_exists(power.get_triggers_dir(config_parser)) os.chdir(cwd)
for key, offset in tiling_phase.items(): if key in background_seglistdict: background_seglistdict[key].shift(offset) if key in injection_seglistdict: injection_seglistdict[key].shift(offset) background_seglistdict &= seglistdict injection_seglistdict &= seglistdict # # Start DAG # power.make_dag_directories(config_parser) dag = pipeline.CondorDAG(tempfile.mkstemp(".log", "power_", options.condor_log_dir)[1]) dag.set_dag_file(os.path.splitext(filenames[0])[0]) # # Build datafind jobs. # datafinds = power.make_datafind_stage(dag, injection_seglistdict | background_seglistdict, verbose = options.verbose) # # Main analysis #
# can't use segmentlistdict's offset mechanism to do this because we need # the offsets to still be 0 for coincidence testing later. for key, offset in tiling_phase.items(): if key in background_seglistdict: background_seglistdict[key].shift(offset) if key in injection_seglistdict: injection_seglistdict[key].shift(offset) background_seglistdict &= seglistdict injection_seglistdict &= seglistdict # # Start DAG # power.make_dag_directories(config_parser) dag = pipeline.CondorDAG( tempfile.mkstemp(".log", "power_", options.condor_log_dir)[1]) dag.set_dag_file(os.path.splitext(filenames[0])[0]) # # Build datafind jobs. # datafinds = power.make_datafind_stage(dag, injection_seglistdict | background_seglistdict, verbose=options.verbose) # # Main analysis