Example #1
0
 def create_dag_node(self):
   """
   Return a CondorDAGNode that represents this entire DAG.
   """
   dir, fname = os.path.split(self.get_dag_path())
   job = pipeline.CondorDAGManJob(fname, dir)
   node = pipeline.CondorDAGNode(job)
   return node
Example #2
0
def finalise_DAG(dag, parents=[]):

    dag.write_sub_files()
    dag.write_dag()
    dag.write_script()
    dagfile = os.path.split(dag.get_dag_file())
    DAGManJob = pipeline.CondorDAGManJob(dagfile[1], dagfile[0])
    DAGManNode = pipeline.CondorDAGManNode(DAGManJob)
    for node in parents:
        DAGManNode.add_parent(node)

    return DAGManNode
                        .format(myinjpath, injpath)))
                    sys.exit(1)
            else:
                # The link doens't exist, so create it and update config
                try:
                    os.link(os.path.abspath(injpath), myinjpath)
                except:
                    from shutil import copyfile
                    copyfile(injpath, myinjpath)
                cp.set('input', 'injection-file', myinjpath)

    for this_cp in setup_roq(cp):
        # Create the DAG from the configparser object
        dag = pipe_utils.LALInferencePipelineDAG(this_cp, dax=False)
        dagjob = pipeline.CondorDAGManJob(
            os.path.join(this_cp.get('paths', 'basedir'), dag.get_dag_file()),
            this_cp.get('paths', 'basedir'))
        dagnode = pipeline.CondorDAGManNode(dagjob)
        outerdag.add_node(dagnode)
        dag.write_sub_files()
        dag.write_dag()
        dag.write_script()

outerdag.write_sub_files()
outerdag.write_dag()
outerdag.write_script()

# End of program
print('Successfully created DAG file.')

if opts.condor_submit:
Example #4
0
          with open('sites.xml','w') as fout:
            for line in lines:
              fout.write(line)
        if cp.has_option('analysis','accounting_group_user'):
          lines=[]
          with open('sites.xml') as fin:
            for line in fin:
              if '<profile namespace="condor" key="getenv">True</profile>' in line:
                line=line+'    <profile namespace="condor" key="accounting_group_user">'+cp.get('analysis','accounting_group_user')+'</profile>\n'
              lines.append(line)
          with open('sites.xml','w') as fout:
            for line in lines:
              fout.write(line)

      full_dag_path=os.path.join(cp.get('paths','basedir'),dag.get_dag_file())
      dagjob=pipeline.CondorDAGManJob(full_dag_path,dir=rundir_root)
      dagnode=pipeline.CondorDAGManNode(dagjob)
      outerdag.add_node(dagnode)

      dag.write_sub_files()
      dag.write_dag()
      dag.write_script()
      os.chdir(olddir)

if(opts.dax):
  # Create a text file with the frames listed
  pfnfile = outerdag.create_frame_pfn_file()
  peg_frame_cache = inspiralutils.create_pegasus_cache_file(pfnfile)

outerdag.write_sub_files()
outerdag.write_dag()
    daglogdir, 'lalinference_injection_test_' + str(uuid.uuid1()) + '.log')
outerdag = pipeline.CondorDAG(outerdaglog)
outerdag.set_dag_file(os.path.join(rundir, 'priortest'))

# Run code with prior sampling
trig_time = 1085855789
fake_event = pipe_utils.Event(trig_time=trig_time)
tfpath = os.path.join(rundir, 'time.txt')
tfile = open(tfpath, 'w')
print('%i\n' % (trig_time), file=tfile)
tfile.close()
prior_cp.set('input', 'gps-time-file', tfpath)

priordag = pipe_utils.LALInferencePipelineDAG(prior_cp)
priordag.set_dag_file(os.path.join(priordir, 'lalinference_priorsample'))
priordagjob = pipeline.CondorDAGManJob(priordag.get_dag_file(), dir=priordir)
priordagnode = pipeline.CondorDAGManNode(priordagjob)
# Find the output file
pagenode = filter(lambda n: isinstance(n, pipe_utils.ResultsPageNode),
                  priordag.get_nodes())[0]
priorfile = pagenode.get_pos_file()

# Convert prior samples to injections
convertsub = os.path.join(rundir, 'samples2injections.sub')
converterr = os.path.join(
    outerlogdir, 'samples2injection-$(cluster)-$(process)-$(node).err')
convertout = os.path.join(
    outerlogdir, 'samples2injection-$(cluster)-$(process)-$(node).out')

if opts.injections:
    injfile = os.path.abspath(opts.injections)
Example #6
0
  ############################################################################
  # MVSC Calculation

  if 'FULL_DATA' in tag and veto_cat in sim_caches:
    print "\tsetting up MVSC dag..."
    mvsc_dag_name = options.config_file.replace('.ini','')+'_mvsc_'+tag+'_n'+cp.get("mvsc_dag","number-of-trees")+'_l'+cp.get("mvsc_dag","leaf-size")+'_s'+cp.get("mvsc_dag","sampled-parameters")+'_c'+cp.get("mvsc_dag","criterion-for-optimization")+'.dag'
    mvsc_dag_generator_job = inspiral.MVSCDagGenerationJob(cp)
    for key,val in cp.items("mvsc_dag"):
      mvsc_dag_generator_job.add_opt(key,val)
    mvsc_dag_generator_job.add_opt("ini-file", options.config_file)
    mvsc_dag_generator_node = inspiral.MVSCDagGenerationNode(mvsc_dag_generator_job)
    mvsc_dag_generator_node.set_user_tag(tag)
    mvsc_dag_generator_node.set_database(result_db.path)
    mvsc_dag_generator_node.add_parent(comp_durs_node)
    dag.add_node(mvsc_dag_generator_node)
    mvsc_dag_job = pipeline.CondorDAGManJob(mvsc_dag_name, os.getcwd(), None)
    mvsc_dag_node = pipeline.CondorDAGManNode(mvsc_dag_job)
    mvsc_dag_node.add_parent(mvsc_dag_generator_node)
    dag.add_node(mvsc_dag_node)

  ############################################################################
  # Compute the uncombined false alarm rates
  
  print "\tsetting up cfar nodes:"
  print "\t\tfor uncombined false alarm rates..."
  
  # set node options: output database is same as input
  ucfar_node = inspiral.CFarNode( ucfar_job )
  ucfar_node.set_category('cfar')
  ucfar_node.set_tmp_space( tmp_space )
  ucfar_node.set_input( result_db.path )