if not filenames: filenames = [] return options, (filenames or []) ############################################################################### ##### MAIN #################################################################### ############################################################################### # Parse options and config files options, filenames = parse_command_line() default_cp = create_default_config_wod(options.config_file) cp = default_cp.get_cp() # Initialize dag dag = stfu_pipe.followUpDAG(options.config_file, cp, options) # if the "do_remoteScans" option is valid, we need to prepare a job to check grid proxy path at the starting of the dag. setup_proxy_job = stfu_pipe.setupProxyJob(options, cp) setup_proxy_node = stfu_pipe.setupProxyNode(dag, setup_proxy_job, cp, options) search = 'gps_only' q_ht_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='qdatafind', dir=search) q_rds_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='Q_RDS', dir=search) ht_qscan_job = stfu_pipe.qscanJob(options, cp, dir=search, tag_base='FG_HT') remote_datafind_job = stfu_pipe.remoteDatafindJob(options, cp, tag_base='Q_RDS',
if not filenames: filenames = [] return options, (filenames or []) ############################################################################### ##### MAIN #################################################################### ############################################################################### # Parse options and config files options, filenames = parse_command_line() default_cp = create_default_config_wod(options.config_file) cp = default_cp.get_cp() # Initialize dag dag = stfu_pipe.followUpDAG(options.config_file,cp,options) # if the "do_remoteScans" option is valid, we need to prepare a job to check grid proxy path at the starting of the dag. setup_proxy_job = stfu_pipe.setupProxyJob(options,cp) setup_proxy_node = stfu_pipe.setupProxyNode(dag,setup_proxy_job,cp,options) search='gps_only' q_ht_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='qdatafind', dir=search) q_rds_data_find_job = stfu_pipe.fuDataFindJob(cp, tag_base='Q_RDS', dir=search) ht_qscan_job = stfu_pipe.qscanJob(options,cp, dir=search, tag_base='FG_HT') remote_datafind_job = stfu_pipe.remoteDatafindJob(options,cp, tag_base='Q_RDS', dir=search) remote_rds_qscan_job = stfu_pipe.remoteQscanJob(options,cp, dir=search, tag_base='FG_RDS') remote_seis_qscan_job = stfu_pipe.remoteQscanJob(options,cp, dir=search, tag_base='FG_SEIS_RDS') distrib_remote_rds_qscan_job = stfu_pipe.distribRemoteQscanJob(options,cp, dir=search, tag_base='FG_RDS') distrib_remote_seis_qscan_job = stfu_pipe.distribRemoteQscanJob(options,cp, dir=search, tag_base='FG_SEIS_RDS')
ifo_range = ",".join(stfu_pipe.get_day_boundaries(int(gpstime.GpsSecondsFromPyUTC(time.time())) - 86400)) # print "Start time : " + ifo_range.split(",")[0] + " End Time : " + ifo_range.split(",")[-1] range_string = "" #Check the time ranges for each ifo in the ini file and , if they are left empty fill them with yesterday's start-end times. for ifo_index,ifo in enumerate(ifos_list): if cp.has_option("followup-background-qscan-times",ifo+"range"): if not cp.get("followup-background-qscan-times",ifo+"range"): cp.set("followup-background-qscan-times",ifo+"range",ifo_range) range_string = string.strip(cp.get("followup-background-qscan-times",ifo+"range")).replace(',','_') #Get current UTC time to be used in the ini file name time_now = "_".join([str(i) for i in time.gmtime()[0:6]]) #Initialize dag dag = stfu_pipe.followUpDAG(time_now + "-" + range_string + ".ini",cp,opts) # CONDOR JOB CLASSES htdataJob = stfu_pipe.fuDataFindJob(cp,tag_base='Q_HT',dir='') rdsdataJob = stfu_pipe.fuDataFindJob(cp,tag_base='Q_RDS',dir='') htQscanBgJob = stfu_pipe.qscanJob(opts,cp,tag_base='BG_HT',dir='') rdsQscanBgJob = stfu_pipe.qscanJob(opts,cp,tag_base='BG_RDS',dir='') seisQscanBgJob = stfu_pipe.qscanJob(opts,cp,tag_base='BG_SEIS_RDS',dir='') setupLogJob = setupLogFileJob(opts,cp) start_node = setupLogFileNode(dag,setupLogJob,cp,range_string,'start') for ifo in ifos_list: # FIX ME: input argument segFile is not needed any more segFile = {}
range_string = "" #Check the time ranges for each ifo in the ini file and , if they are left empty fill them with yesterday's start-end times. for ifo_index, ifo in enumerate(ifos_list): if cp.has_option("followup-background-qscan-times", ifo + "range"): if not cp.get("followup-background-qscan-times", ifo + "range"): cp.set("followup-background-qscan-times", ifo + "range", ifo_range) range_string = string.strip( cp.get("followup-background-qscan-times", ifo + "range")).replace(',', '_') #Get current UTC time to be used in the ini file name time_now = "_".join([str(i) for i in time.gmtime()[0:6]]) #Initialize dag dag = stfu_pipe.followUpDAG(time_now + "-" + range_string + ".ini", cp, opts) # CONDOR JOB CLASSES htdataJob = stfu_pipe.fuDataFindJob(cp, tag_base='Q_HT', dir='') rdsdataJob = stfu_pipe.fuDataFindJob(cp, tag_base='Q_RDS', dir='') htQscanBgJob = stfu_pipe.qscanJob(opts, cp, tag_base='BG_HT', dir='') rdsQscanBgJob = stfu_pipe.qscanJob(opts, cp, tag_base='BG_RDS', dir='') seisQscanBgJob = stfu_pipe.qscanJob(opts, cp, tag_base='BG_SEIS_RDS', dir='') setupLogJob = setupLogFileJob(opts, cp) start_node = setupLogFileNode(dag, setupLogJob, cp, range_string, 'start') for ifo in ifos_list: # FIX ME: input argument segFile is not needed any more segFile = {}