def veto_injections(workflow, inj_file, veto_file, veto_name, out_dir, tags=None): tags = [] if tags is None else tags make_analysis_dir(out_dir) node = Executable(workflow.cp, 'strip_injections', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() node.add_opt('--segment-name', veto_name) node.add_input_opt('--veto-file', veto_file) node.add_input_opt('--injection-file', inj_file) node.add_opt('--ifos', ' '.join(workflow.ifos)) node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file') workflow += node return node.output_files[0]
def create_segs_from_cats_job(cp, out_dir, ifo_string, tag=None): """ This function creates the CondorDAGJob that will be used to run ligolw_segments_from_cats as part of the workflow Parameters ----------- cp : pycbc.workflow.configuration.WorkflowConfigParser The in-memory representation of the configuration (.ini) files out_dir : path Directory in which to put output files ifo_string : string String containing all active ifos, ie. "H1L1V1" tag : string, optional (default=None) Use this to specify a tag. This can be used if this module is being called more than once to give call specific configuration (by setting options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This is also used to tag the Files returned by the class to uniqueify the Files and uniqueify the actual filename. FIXME: Filenames may not be unique with current codes! Returns -------- job : Job instance The Job instance that will run segments_from_cats jobs """ segServerUrl = cp.get_opt_tags("workflow-segments", "segments-database-url", [tag]) vetoDefFile = cp.get_opt_tags("workflow-segments", "segments-veto-definer-file", [tag]) if tag: currTags = [tag] else: currTags = [] job = Executable(cp, 'segments_from_cats', universe='local', ifos=ifo_string, out_dir=out_dir, tags=currTags) job.add_opt('--separate-categories') job.add_opt('--segment-url', segServerUrl) job.add_opt('--veto-file', vetoDefFile) # FIXME: Would like the proxy in the Workflow instance # FIXME: Explore using the x509 condor commands # Set up proxy to be accessible in a NFS location # If the user has logged in with gsissh then X509_USER_PROXY will be set # However, certain users log in with an ssh key and then ligo-proxy-init # This route does not set X509_USER_PROXY, so use the default file location if os.environ.has_key('X509_USER_PROXY'): proxy = os.getenv('X509_USER_PROXY') else: proxy = "/tmp/x509up_u%d" % os.getuid() proxyfile = os.path.join(out_dir, 'x509up.file') try: shutil.copyfile(proxy, proxyfile) except IOError: raise RuntimeError('Cannot find certificate in %s. ' 'Make sure that ligo-proxy-init ' 'has been run.' % proxy) job.add_profile('condor', 'environment', 'USER=$ENV(USER);X509_USER_PROXY=%s' % proxyfile) return job
def rerank_coinc_followup(workflow, statmap_file, bank_file, out_dir, tags=None, injection_file=None, ranking_file=None): if tags is None: tags = [] make_analysis_dir(out_dir) if not workflow.cp.has_section("workflow-rerank"): logging.info("No reranking done in this workflow") return statmap_file else: logging.info("Setting up reranking of candidates") # Generate reduced data files (maybe this could also be used elsewhere?) stores = FileList([]) for ifo in workflow.ifos: make_analysis_dir('strain_files') node = Executable(workflow.cp, 'strain_data_reduce', ifos=[ifo], out_dir='strain_files', tags=tags).create_node() node.add_opt('--gps-start-time', workflow.analysis_time[0]) node.add_opt('--gps-end-time', workflow.analysis_time[1]) if injection_file: node.add_input_opt('--injection-file', injection_file) fil = node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file') stores.append(fil) workflow += node # Generate trigger input file node = Executable(workflow.cp, 'rerank_trigger_input', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() node.add_input_opt('--statmap-file', statmap_file) node.add_input_opt('--bank-file', bank_file) trigfil = node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file') workflow += node # Parallelize coinc trigger followup factor = int( workflow.cp.get_opt_tags("workflow-rerank", "parallelization-factor", tags)) exe = Executable(workflow.cp, 'coinc_followup', ifos=workflow.ifos, out_dir=out_dir, tags=tags) stat_files = FileList([]) for i in range(factor): node = exe.create_node() node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file', tags=[str(i)]) node.add_multiifo_input_list_opt('--hdf-store', stores) node.add_input_opt('--input-file', trigfil) node.add_opt('--start-index', str(i)) node.add_opt('--stride', factor) workflow += node stat_files += node.output_files exe = Executable(workflow.cp, 'rerank_coincs', ifos=workflow.ifos, out_dir=out_dir, tags=tags) node = exe.create_node() node.add_input_list_opt('--stat-files', stat_files) node.add_input_opt('--statmap-file', statmap_file) node.add_input_opt('--followup-file', trigfil) if ranking_file: node.add_input_opt('--ranking-file', ranking_file) node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file') workflow += node return node.output_file