default_cp = create_default_config_wod(options.config_file) cp = default_cp.get_cp() # Initialize dag dag = stfu_pipe.followUpDAG(options.config_file, cp, options) # if the "do_remoteScans" option is valid, we need to prepare a job to check grid proxy path at the starting of the dag. setup_proxy_job = stfu_pipe.setupProxyJob(options, cp) setup_proxy_node = stfu_pipe.setupProxyNode(dag, setup_proxy_job, cp, options) search = 'gps_only' q_ht_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='qdatafind', dir=search) q_rds_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='Q_RDS', dir=search) ht_qscan_job = stfu_pipe.qscanJob(options, cp, dir=search, tag_base='FG_HT') remote_datafind_job = stfu_pipe.remoteDatafindJob(options, cp, tag_base='Q_RDS', dir=search) remote_rds_qscan_job = stfu_pipe.remoteQscanJob(options, cp, dir=search, tag_base='FG_RDS') remote_seis_qscan_job = stfu_pipe.remoteQscanJob(options, cp, dir=search, tag_base='FG_SEIS_RDS') distrib_remote_rds_qscan_job = stfu_pipe.distribRemoteQscanJob( options, cp, dir=search, tag_base='FG_RDS')
# Parse options and config files options, filenames = parse_command_line() default_cp = create_default_config_wod(options.config_file) cp = default_cp.get_cp() # Initialize dag dag = stfu_pipe.followUpDAG(options.config_file,cp,options) # if the "do_remoteScans" option is valid, we need to prepare a job to check grid proxy path at the starting of the dag. setup_proxy_job = stfu_pipe.setupProxyJob(options,cp) setup_proxy_node = stfu_pipe.setupProxyNode(dag,setup_proxy_job,cp,options) search='gps_only' q_ht_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='qdatafind', dir=search) q_rds_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='Q_RDS', dir=search) ht_qscan_job = stfu_pipe.qscanJob(options,cp, dir=search, tag_base='FG_HT') remote_datafind_job = stfu_pipe.remoteDatafindJob(options,cp, tag_base='Q_RDS', dir=search) remote_rds_qscan_job = stfu_pipe.remoteQscanJob(options,cp, dir=search, tag_base='FG_RDS') remote_seis_qscan_job = stfu_pipe.remoteQscanJob(options,cp, dir=search, tag_base='FG_SEIS_RDS') distrib_remote_rds_qscan_job = stfu_pipe.distribRemoteQscanJob(options,cp, dir=search, tag_base='FG_RDS') distrib_remote_seis_qscan_job = stfu_pipe.distribRemoteQscanJob(options,cp, dir=search, tag_base='FG_SEIS_RDS') if options.gps_times: gpsevents = time_only_events(options.gps_times) elif options.input_file: gpsevents = extractTimesFromFile(options.input_file) else: print >> sys.stderr, "an argument is missing in the command:\n You need to use one of the options --gps-times or --input-file" sys.exit(1)
for ifo_index,ifo in enumerate(ifos_list): if cp.has_option("followup-background-qscan-times",ifo+"range"): if not cp.get("followup-background-qscan-times",ifo+"range"): cp.set("followup-background-qscan-times",ifo+"range",ifo_range) range_string = string.strip(cp.get("followup-background-qscan-times",ifo+"range")).replace(',','_') #Get current UTC time to be used in the ini file name time_now = "_".join([str(i) for i in time.gmtime()[0:6]]) #Initialize dag dag = stfu_pipe.followUpDAG(time_now + "-" + range_string + ".ini",cp,opts) # CONDOR JOB CLASSES htdataJob = stfu_pipe.fuDataFindJob(cp,tag_base='Q_HT',dir='') rdsdataJob = stfu_pipe.fuDataFindJob(cp,tag_base='Q_RDS',dir='') htQscanBgJob = stfu_pipe.qscanJob(opts,cp,tag_base='BG_HT',dir='') rdsQscanBgJob = stfu_pipe.qscanJob(opts,cp,tag_base='BG_RDS',dir='') seisQscanBgJob = stfu_pipe.qscanJob(opts,cp,tag_base='BG_SEIS_RDS',dir='') setupLogJob = setupLogFileJob(opts,cp) start_node = setupLogFileNode(dag,setupLogJob,cp,range_string,'start') for ifo in ifos_list: # FIX ME: input argument segFile is not needed any more segFile = {} times, timeListFile = fu_utils.getQscanBackgroundTimes(cp,opts,ifo,segFile) #Prepare files for remote scans at Lyon CC... if opts.prepare_scan_ccin2p3 and ifo in cp.get("fu-remote-jobs","remote-ifos").strip().split(","): if times:
if not cp.get("followup-background-qscan-times", ifo + "range"): cp.set("followup-background-qscan-times", ifo + "range", ifo_range) range_string = string.strip( cp.get("followup-background-qscan-times", ifo + "range")).replace(',', '_') #Get current UTC time to be used in the ini file name time_now = "_".join([str(i) for i in time.gmtime()[0:6]]) #Initialize dag dag = stfu_pipe.followUpDAG(time_now + "-" + range_string + ".ini", cp, opts) # CONDOR JOB CLASSES htdataJob = stfu_pipe.fuDataFindJob(cp, tag_base='Q_HT', dir='') rdsdataJob = stfu_pipe.fuDataFindJob(cp, tag_base='Q_RDS', dir='') htQscanBgJob = stfu_pipe.qscanJob(opts, cp, tag_base='BG_HT', dir='') rdsQscanBgJob = stfu_pipe.qscanJob(opts, cp, tag_base='BG_RDS', dir='') seisQscanBgJob = stfu_pipe.qscanJob(opts, cp, tag_base='BG_SEIS_RDS', dir='') setupLogJob = setupLogFileJob(opts, cp) start_node = setupLogFileNode(dag, setupLogJob, cp, range_string, 'start') for ifo in ifos_list: # FIX ME: input argument segFile is not needed any more segFile = {} times, timeListFile = fu_utils.getQscanBackgroundTimes( cp, opts, ifo, segFile) #Prepare files for remote scans at Lyon CC... if opts.prepare_scan_ccin2p3 and ifo in cp.get(