コード例 #1
0
##############################################################################
# Create a static frame cache file for use by pegasus

ifo_frame_type_dict = {}
for ifo in ifos:
  ifo_frame_type_dict[ifo] = inspiralutils.get_data_options(cp,ifo)[1]

# determine which data server port to use
try:
  ldrserver = cp.get("datafind","server")
except:
  ldrserver = "internal_ldr_port"

pfnfile = inspiralutils.create_frame_pfn_file(ifo_frame_type_dict,opts.gps_start_time,opts.gps_end_time,server=ldrserver)
peg_frame_cache = os.path.join(os.getcwd(),inspiralutils.create_pegasus_cache_file(pfnfile))


##############################################################################
# create a local execute directory for the pipeline
tmp_exec_dir = tempfile.mkdtemp( prefix="%s-%s-%s-%d." % (''.join(ifos),
  basename, opts.gps_start_time, 
  int(opts.gps_end_time) - int(opts.gps_start_time)) )
os.chmod(tmp_exec_dir, 0755)

##############################################################################
# Determine the segments to analyze

# first, copy the segment files and make sure that the ini file reflects the
# new path, relative to the DAG directories.
コード例 #2
0
ファイル: lalinference_pipe.py プロジェクト: Solaro/lalsuite
#if opts.lvalert is not None:
#  cp.set('input','lvalert-file',os.path.abspath(opts.lvalert))

if opts.gid is not None:
  cp.set('input','gid',opts.gid)

if opts.pipedown_db is not None:
  cp.set('input','pipedown-db',os.path.abspath(opts.pipedown_db))


# Create the DAG from the configparser object
dag=pipe_utils.LALInferencePipelineDAG(cp,dax=opts.dax,site=opts.grid_site)
if(opts.dax):
# Create a text file with the frames listed
  pfnfile = dag.create_frame_pfn_file()
  peg_frame_cache = inspiralutils.create_pegasus_cache_file(pfnfile)
else:
  peg_frame_cache = '/dev/null'

# A directory to store the DAX temporary files
import uuid
execdir=os.path.join(local_work_dir,'lalinference_pegasus_'+str(uuid.uuid1()))
olddir=os.getcwd()
os.chdir(cp.get('paths','basedir'))
if opts.grid_site is not None:
    site='local,'+opts.grid_site
else:
    site=None
# Create the DAX scripts
if opts.dax:
  dag.prepare_dax(tmp_exec_dir=execdir,grid_site=site,peg_frame_cache=peg_frame_cache)
コード例 #3
0
      #if opts.lvalert is not None:
      #  cp.set('input','lvalert-file',os.path.abspath(opts.lvalert))

      if opts.gid is not None:
        cp.set('input','gid',opts.gid)

      if opts.pipedown_db is not None:
        cp.set('input','pipedown-db',os.path.abspath(opts.pipedown_db))


      # Create the DAG from the configparser object
      dag=pipe_utils.LALInferencePipelineDAG(cp,dax=opts.dax,site=opts.grid_site)
      if((opts.dax) and not cp.has_option('lalinference','fake-cache')):
        # Create a text file with the frames listed
        pfnfile = dag.create_frame_pfn_file()
        peg_frame_cache = inspiralutils.create_pegasus_cache_file(pfnfile)
      else:
        peg_frame_cache = '/dev/null'

      # A directory to store the DAX temporary files
      execdir=os.path.join(local_work_dir,'lalinference_pegasus_'+str(uuid.uuid1()))
      olddir=os.getcwd()
      os.chdir(cp.get('paths','basedir'))
      if opts.grid_site is not None:
        site='local,'+opts.grid_site
      else:
        site=None
      # Create the DAX scripts
      if opts.dax:
        dag.prepare_dax(tmp_exec_dir=execdir,grid_site=site,peg_frame_cache=peg_frame_cache)
        # Ugly hack to replace pegasus.transfer.links=true in the pegasus.properties files created by pipeline.py
コード例 #4
0
ifo_frame_type_dict = {}
for ifo in ifos:
    ifo_frame_type_dict[ifo] = inspiralutils.get_data_options(cp, ifo)[1]

# determine which data server port to use
try:
    ldrserver = cp.get("datafind", "server")
except:
    ldrserver = "internal_ldr_port"

pfnfile = inspiralutils.create_frame_pfn_file(ifo_frame_type_dict,
                                              opts.gps_start_time,
                                              opts.gps_end_time,
                                              server=ldrserver)
peg_frame_cache = os.path.join(
    os.getcwd(), inspiralutils.create_pegasus_cache_file(pfnfile))

##############################################################################
# create a local execute directory for the pipeline
tmp_exec_dir = tempfile.mkdtemp(
    prefix="%s-%s-%s-%d." %
    (''.join(ifos), basename, opts.gps_start_time,
     int(opts.gps_end_time) - int(opts.gps_start_time)))
os.chmod(tmp_exec_dir, 0o755)

##############################################################################
# Determine the segments to analyze

# first, copy the segment files and make sure that the ini file reflects the
# new path, relative to the DAG directories.