Exemple #1
0
    exttrigInjections = [startExttrig, stopExttrig]

    # check the values given
    if startExttrig < 1:
        print >> sys.stderr, "exttrig-inj-start must be larger than 0."
        sys.exit(1)
    if startExttrig > stopExttrig:
        print >> sys.stderr, "exttrig-inj-stop must be larger than "\
                             "exttrig-inj-start."
        sys.exit(1)
else:
    exttrigInjections = [0, 0]

doSlides = cp.has_option('input', 'do-long-slides')

tmplt_job = inspiral.TmpltBankJob(cp, opts.dax)

# spinchecker
spincheck_job = inspiral.PTFSpinCheckerJob(cp, opts.dax)

# inspiral:
insp_jobs = inspiral.PTFInspiralJob(cp, opts.dax)

if doExtTrig:
    insp_jobs.add_opt('analyze-inj-segs-only', '')

for ifo1 in ifo_list:
    if do[ifo1]:
        ifo1 = ifo1.upper()
        spincheck_job.add_opt(ifo1.lower() + '-data', '')
        insp_jobs.add_opt(ifo1.lower() + '-data', '')
Exemple #2
0
    if not cp.has_option('segments', ifo.lower() + '-analyze'):
        continue

    # decide if we need to segment the data
    available_segments = get_valid_segments(
        cp.get('segfind', 'segment-url'), cp.get('framefind', 'base-dir'), ifo,
        cp.get('segments',
               ifo.lower() + '-analyze'), gps_start_time, gps_end_time)

    if not available_segments:
        print("No available segments for %s, skipping" % ifo)
        continue

    # create the Condor jobs that will be used in the DAG
    df_job = pipeline.LSCDataFindJob('cache', 'logs', cp)
    tmplt_job = inspiral.TmpltBankJob(cp)

    # Based on S6A results ttrigscan clustering has
    # been replaced with 30-ms window clustering
    # ts_job = TrigscanJob(cp)

    si_job_coarse = SiClusterJobCoarse(cp)
    si_job_fine = SiClusterJobFine(cp)
    cp_job = FilesystemJob('cp')

    # Add ifo-specific template config
    if cp.has_section(ifo.lower() + '-tmpltbank'):
        tmplt_job.add_ini_opts(cp, ifo.lower() + '-tmpltbank')

    # Create a job to split the template into parallelization pieces
    split_job = inspiral.SplitBankJob(cp)
Exemple #3
0
  try: frame_types.append(cp.get('input','virgo-type'))
  except: pass
  try: frame_types.append(cp.get('input','geo-type'))
  except: pass
  frame_types = [t for t in frame_types if t]
except:
  lsync_file = None
df_job = pipeline.LSCDataFindJob(
  'cache','logs',cp,opts.dax,lsync_file,'|'.join(frame_types))
df_job.set_sub_file( basename + '.datafind'+ subsuffix )

# tmpltbank:
tmplt_jobs = {}

for ifo in ifo_list:
  tmplt_jobs[ifo] = inspiral.TmpltBankJob(cp,opts.dax)
  tmplt_jobs[ifo].set_sub_file( basename + '.tmpltbank_' + ifo + subsuffix )

# inspinj:
inspinj_job = inspiral.InspInjJob(cp) 
inspinj_job.set_sub_file( basename + '.inspinj' + subsuffix )

if opts.noop_inspinj:
  inspinj_job.add_condor_cmd("noop_job", "true")

# inspiral:
insp_jobs = {}

for ifo in ifo_list:
  insp_jobs[ifo] = inspiral.InspiralJob(cp,opts.dax)
  insp_jobs[ifo].set_sub_file( basename + '.inspiral_' + ifo + subsuffix )