def finalise_DAG(dag, parents=[]): dag.write_sub_files() dag.write_dag() dag.write_script() dagfile = os.path.split(dag.get_dag_file()) DAGManJob = pipeline.CondorDAGManJob(dagfile[1], dagfile[0]) DAGManNode = pipeline.CondorDAGManNode(DAGManJob) for node in parents: DAGManNode.add_parent(node) return DAGManNode
outerdag = pipeline.CondorDAG(outerdaglog) outerdag.set_dag_file(os.path.join(rundir, 'priortest')) # Run code with prior sampling trig_time = 1085855789 fake_event = pipe_utils.Event(trig_time=trig_time) tfpath = os.path.join(rundir, 'time.txt') tfile = open(tfpath, 'w') print('%i\n' % (trig_time), file=tfile) tfile.close() prior_cp.set('input', 'gps-time-file', tfpath) priordag = pipe_utils.LALInferencePipelineDAG(prior_cp) priordag.set_dag_file(os.path.join(priordir, 'lalinference_priorsample')) priordagjob = pipeline.CondorDAGManJob(priordag.get_dag_file(), dir=priordir) priordagnode = pipeline.CondorDAGManNode(priordagjob) # Find the output file pagenode = filter(lambda n: isinstance(n, pipe_utils.ResultsPageNode), priordag.get_nodes())[0] priorfile = pagenode.get_pos_file() # Convert prior samples to injections convertsub = os.path.join(rundir, 'samples2injections.sub') converterr = os.path.join( outerlogdir, 'samples2injection-$(cluster)-$(process)-$(node).err') convertout = os.path.join( outerlogdir, 'samples2injection-$(cluster)-$(process)-$(node).out') if opts.injections: injfile = os.path.abspath(opts.injections) else:
else: # The link doens't exist, so create it and update config try: os.link(os.path.abspath(injpath), myinjpath) except: from shutil import copyfile copyfile(injpath, myinjpath) cp.set('input', 'injection-file', myinjpath) for this_cp in setup_roq(cp): # Create the DAG from the configparser object dag = pipe_utils.LALInferencePipelineDAG(this_cp, dax=False) dagjob = pipeline.CondorDAGManJob( os.path.join(this_cp.get('paths', 'basedir'), dag.get_dag_file()), this_cp.get('paths', 'basedir')) dagnode = pipeline.CondorDAGManNode(dagjob) outerdag.add_node(dagnode) dag.write_sub_files() dag.write_dag() dag.write_script() outerdag.write_sub_files() outerdag.write_dag() outerdag.write_script() # End of program print('Successfully created DAG file.') if opts.condor_submit: import subprocess from subprocess import Popen
# MVSC Calculation if 'FULL_DATA' in tag and veto_cat in sim_caches: print "\tsetting up MVSC dag..." mvsc_dag_name = options.config_file.replace('.ini','')+'_mvsc_'+tag+'_n'+cp.get("mvsc_dag","number-of-trees")+'_l'+cp.get("mvsc_dag","leaf-size")+'_s'+cp.get("mvsc_dag","sampled-parameters")+'_c'+cp.get("mvsc_dag","criterion-for-optimization")+'.dag' mvsc_dag_generator_job = inspiral.MVSCDagGenerationJob(cp) for key,val in cp.items("mvsc_dag"): mvsc_dag_generator_job.add_opt(key,val) mvsc_dag_generator_job.add_opt("ini-file", options.config_file) mvsc_dag_generator_node = inspiral.MVSCDagGenerationNode(mvsc_dag_generator_job) mvsc_dag_generator_node.set_user_tag(tag) mvsc_dag_generator_node.set_database(result_db.path) mvsc_dag_generator_node.add_parent(comp_durs_node) dag.add_node(mvsc_dag_generator_node) mvsc_dag_job = pipeline.CondorDAGManJob(mvsc_dag_name, os.getcwd(), None) mvsc_dag_node = pipeline.CondorDAGManNode(mvsc_dag_job) mvsc_dag_node.add_parent(mvsc_dag_generator_node) dag.add_node(mvsc_dag_node) ############################################################################ # Compute the uncombined false alarm rates print "\tsetting up cfar nodes:" print "\t\tfor uncombined false alarm rates..." # set node options: output database is same as input ucfar_node = inspiral.CFarNode( ucfar_job ) ucfar_node.set_category('cfar') ucfar_node.set_tmp_space( tmp_space ) ucfar_node.set_input( result_db.path ) ucfar_node.set_output( result_db.path )