Example #1
0
def create_posterior_files(workflow, samples_files, output_dir,
                           parameters=None, name="extract_posterior",
                           analysis_seg=None, tags=None):
    """Sets up job to create posterior files from some given samples files.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The workflow instance we are populating
    samples_files : str or list of str
        One or more files to extract the posterior samples from.
    output_dir: str
        The directory to store result plots and files.
    name: str, optional
        The name in the [executables] section of the configuration file
        to use, and the section to read for additional arguments to pass to
        the executable. Default is ``extract_posterior``.
    analysis_segs: ligo.segments.Segment, optional
       The segment this job encompasses. If None then use the total analysis
       time from the workflow.
    tags: list, optional
        Tags to add to the inference executables.

    Returns
    -------
    pycbc.workflow.FileList
        A list of output files.
    """
    if analysis_seg is None:
        analysis_seg = workflow.analysis_time
    if tags is None:
        tags = []
    # Catch if a parameters option was specified:
    # we need to do this because Executable will automatically add any
    # option in the section to the node. However, we need to add the
    # appropriate escapes to the parameters option so pegasus will render it
    # properly (see _params_for_pegasus for details).
    parameters = None
    if workflow.cp.has_option(name, 'parameters'):
        parameters = workflow.cp.get(name, 'parameters')
        workflow.cp.remove_option(name, 'parameters')
    extract_posterior_exe = Executable(workflow.cp, name,
                                       ifos=workflow.ifos,
                                       out_dir=output_dir)
    node = extract_posterior_exe.create_node()
    # add back the parameters option if it was specified
    if parameters is not None:
        node.add_opt("--parameters", _params_for_pegasus(parameters))
        # and put the opt back in the config file in memory
        workflow.cp.set(name, 'parameters', parameters)
    if not isinstance(samples_files, list):
        samples_files = [samples_files]
    node.add_input_list_opt("--input-file", samples_files)
    node.new_output_file_opt(analysis_seg, ".hdf", "--output-file", tags=tags)
    # add node to workflow
    workflow += node
    return node.output_files
Example #2
0
def cut_distant_injections(workflow, inj_file, out_dir, tags=None):
    "Set up a job for removing injections that are too distant to be seen"
    if tags is None:
        tags = []

    node = Executable(workflow.cp, 'inj_cut', ifos=workflow.ifos,
                      out_dir=out_dir, tags=tags).create_node()
    node.add_input_opt('--input', inj_file)
    node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
    workflow += node
    return node.output_files[0]
Example #3
0
def create_fits_file(workflow,
                     inference_file,
                     output_dir,
                     name="create_fits_file",
                     analysis_seg=None,
                     tags=None):
    """Sets up job to create fits files from some given samples files.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The workflow instance we are populating
    inference_file: pycbc.workflow.File
        The file with posterior samples.
    output_dir: str
        The directory to store result plots and files.
    name: str, optional
        The name in the [executables] section of the configuration file
        to use, and the section to read for additional arguments to pass to
        the executable. Default is ``create_fits_file``.
    analysis_segs: ligo.segments.Segment, optional
       The segment this job encompasses. If None then use the total analysis
       time from the workflow.
    tags: list, optional
        Tags to add to the inference executables.

    Returns
    -------
    pycbc.workflow.FileList
        A list of output files.
    """
    if analysis_seg is None:
        analysis_seg = workflow.analysis_time
    if tags is None:
        tags = []
    create_fits_exe = Executable(workflow.cp,
                                 name,
                                 ifos=workflow.ifos,
                                 out_dir=output_dir)
    node = create_fits_exe.create_node()
    node.add_input_opt("--input-file", inference_file)
    node.new_output_file_opt(analysis_seg, ".fits", "--output-file", tags=tags)
    # add node to workflow
    workflow += node
    return node.output_files
Example #4
0
def compute_inj_optimal_snr(workflow, inj_file, precalc_psd_files, out_dir,
                            tags=None):
    "Set up a job for computing optimal SNRs of a sim_inspiral file."
    if tags is None:
        tags = []

    node = Executable(workflow.cp, 'optimal_snr', ifos=workflow.ifos,
                      out_dir=out_dir, tags=tags).create_node()
    node.add_input_opt('--input-file', inj_file)
    node.add_input_list_opt('--time-varying-psds', precalc_psd_files)
    node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
    workflow += node
    return node.output_files[0]
Example #5
0
def make_skipped_html(workflow, skipped_data, out_dir, tags):
    """
    Make a html snippet from the list of skipped background coincidences
    """
    exe = Executable(workflow.cp,
                     'html_snippet',
                     ifos=workflow.ifos,
                     out_dir=out_dir,
                     tags=tags)

    node = exe.create_node()

    parsed_data = {}
    for ifo, time in skipped_data:
        if ifo not in parsed_data:
            parsed_data[ifo] = {}
        if time not in parsed_data[ifo]:
            parsed_data[ifo][time] = 1
        else:
            parsed_data[ifo][time] = parsed_data[ifo][time] + 1

    n_events = len(skipped_data)
    html_string = '"{} background events have been skipped '.format(n_events)
    html_string += 'because one of their single triggers already appears '
    html_string += 'in the events followed up above. '
    html_string += 'Specifically, the following single detector triggers '
    html_string += 'were found in these coincidences. '
    html_template = '{} event at time {} appeared {} times. '
    for ifo in parsed_data:
        for time in parsed_data[ifo]:
            n_occurances = parsed_data[ifo][time]
            html_string += html_template.format(ifo, time, n_occurances)

    html_string += '"'

    node.add_opt('--html-text', html_string)
    node.add_opt('--title', '"Events were skipped"')
    node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file')
    workflow += node
    files = node.output_files
    return files
Example #6
0
def compute_inj_optimal_snr(workflow, inj_file, precalc_psd_files, out_dir,
                            tags=None):
    "Set up a job for computing optimal SNRs of a sim_inspiral file."
    if tags is None:
        tags = []

    node = Executable(workflow.cp, 'optimal_snr', ifos=workflow.ifos,
                      out_dir=out_dir, tags=tags).create_node()
    node.add_input_opt('--input-file', inj_file)
    node.add_input_list_opt('--time-varying-psds', precalc_psd_files)
    node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
    workflow += node
    return node.output_files[0]
Example #7
0
def cut_distant_injections(workflow, inj_file, out_dir, tags=None):
    "Set up a job for removing injections that are too distant to be seen"
    if tags is None:
        tags = []

    node = Executable(workflow.cp, 'inj_cut', ifos=workflow.ifos,
                      out_dir=out_dir, tags=tags).create_node()
    node.add_input_opt('--input', inj_file)
    node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
    workflow += node
    return node.output_files[0]
Example #8
0
def inj_to_hdf(workflow, inj_file, out_dir, tags=None):
    """ Convert injection file to hdf format if not already one
    """
    _, ext = os.path.splitext(inj_file.name)
    if ext == '.hdf':
        return inj_file

    if tags is None:
        tags = []

    node = Executable(workflow.cp,
                      'inj2hdf',
                      ifos=workflow.ifos,
                      out_dir=out_dir,
                      tags=tags).create_node()
    node.add_input_opt('--injection-file', inj_file)
    node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file')
    workflow += node
    return node.output_file
Example #9
0
def setup_injection_minifollowups(workflow,
                                  injection_file,
                                  inj_xml_file,
                                  single_triggers,
                                  tmpltbank_file,
                                  insp_segs,
                                  insp_seg_name,
                                  dax_output,
                                  out_dir,
                                  tags=None):
    """ Create plots that followup the closest missed injections
    
    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file: 
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: dict
        A dictionary, keyed by ifo name, of the data read by each inspiral job.
    insp_segs_name: str 
        The name of the segmentlist to read from the inspiral segment file
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables
    
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the 
        minifollops plots.
    """
    logging.info('Entering injection minifollowups module')

    if not workflow.cp.has_section('workflow-injection_minifollowups'):
        logging.info(
            'There is no [workflow-injection_minifollowups] section in configuration file'
        )
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + '/' + '_'.join(tags) +
                                  'injection_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, 'local')

    exe = Executable(workflow.cp,
                     'injection_minifollowup',
                     ifos=workflow.ifos,
                     out_dir=dax_output)

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--injection-file', injection_file)
    node.add_input_opt('--injection-xml-file', inj_xml_file)
    node.add_multiifo_input_list_opt('--single-detector-triggers',
                                     single_triggers)
    node.add_multiifo_input_list_opt('--inspiral-segments', insp_segs.values())
    node.add_opt('--inspiral-segment-name', insp_seg_name)
    node.new_output_file_opt(workflow.analysis_time,
                             '.dax',
                             '--output-file',
                             tags=tags)
    node.new_output_file_opt(workflow.analysis_time,
                             '.dax.map',
                             '--output-map',
                             tags=tags)

    name = node.output_files[0].name
    map_loc = node.output_files[1].name

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this is a sub-workflow
    fil = node.output_files[0]

    job = dax.DAX(fil)
    job.addArguments('--basename %s' %
                     os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_loc)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info('Leaving injection minifollowups module')
Example #10
0
def setup_single_det_minifollowups(workflow,
                                   single_trig_file,
                                   tmpltbank_file,
                                   insp_segs,
                                   insp_seg_name,
                                   dax_output,
                                   out_dir,
                                   veto_file=None,
                                   veto_segment_name=None,
                                   tags=None):
    """ Create plots that followup the Nth loudest clustered single detector
    triggers from a merged single detector trigger HDF file.
    
    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    single_trig_file: pycbc.workflow.File
        The File class holding the single detector triggers.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: dict
        A dictionary, keyed by ifo name, of the data read by each inspiral job.
    insp_segs_name: str 
        The name of the segmentlist to read from the inspiral segment file
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables    
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the 
        minifollops plots.
    """
    logging.info('Entering minifollowups module')

    if not workflow.cp.has_section('workflow-minifollowups'):
        msg = 'There is no [workflow-minifollowups] section in '
        msg += 'configuration file'
        logging.info(msg)
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    curr_ifo = single_trig_file.ifo
    config_path = os.path.abspath(dax_output + '/' + curr_ifo + \
                                   '_'.join(tags) + 'singles_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, 'local')

    exe = Executable(workflow.cp,
                     'singles_minifollowup',
                     ifos=curr_ifo,
                     out_dir=dax_output)

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--single-detector-file', single_trig_file)
    node.add_input_opt('--inspiral-segments', insp_segs[curr_ifo])
    node.add_opt('--inspiral-segment-name', insp_seg_name)
    node.add_opt('--instrument', curr_ifo)
    if veto_file is not None:
        assert (veto_segment_name is not None)
        node.add_input_opt('--veto-file', veto_file)
        node.add_opt('--veto-segment-name', veto_segment_name)
    node.new_output_file_opt(workflow.analysis_time,
                             '.dax',
                             '--output-file',
                             tags=tags)
    node.new_output_file_opt(workflow.analysis_time,
                             '.dax.map',
                             '--output-map',
                             tags=tags)

    name = node.output_files[0].name
    map_loc = node.output_files[1].name

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this is a sub-workflow
    fil = node.output_files[0]

    job = dax.DAX(fil)
    job.addArguments('--basename %s' \
                     % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_loc)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info('Leaving minifollowups module')
def setup_foreground_inference(workflow,
                               coinc_file,
                               single_triggers,
                               tmpltbank_file,
                               insp_segs,
                               insp_data_name,
                               insp_anal_name,
                               dax_output,
                               out_dir,
                               tags=None):
    """ Creates workflow node that will run the inference workflow.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file: pycbc.workflow.File
        The file associated with coincident triggers.
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read and analyzed by each inspiral
       job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    dax_output : str
        The name of the output DAX file.
    out_dir: path
        The directory to store inference result plots and files
    tags: {None, optional}
        Tags to add to the inference executables
    """

    logging.info("Entering inference module")

    # check if configuration file has inference section
    if not workflow.cp.has_section("workflow-inference"):
        logging.info(
            "There is no [workflow-inference] section in configuration file")
        logging.info("Leaving inference module")
        return

    # default tags is a list
    tags = [] if tags is None else tags

    # make the directory that will contain the dax file
    makedir(dax_output)

    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + "/" + "_".join(tags) \
                                        + "foreground_inference.ini")
    workflow.cp.write(open(config_path, "w"))
    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, "local")

    # create an Executable for the inference workflow generator
    exe = Executable(workflow.cp,
                     "foreground_inference",
                     ifos=workflow.ifos,
                     out_dir=dax_output)

    # create the node that will run in the workflow
    node = exe.create_node()
    node.add_input_opt("--config-files", config_file)
    node.add_input_opt("--bank-file", tmpltbank_file)
    node.add_input_opt("--statmap-file", coinc_file)
    node.add_multiifo_input_list_opt("--single-detector-triggers",
                                     single_triggers)
    node.new_output_file_opt(workflow.analysis_time,
                             ".dax",
                             "--output-file",
                             tags=tags)
    node.new_output_file_opt(workflow.analysis_time,
                             ".dax.map",
                             "--output-map",
                             tags=tags)
    node.new_output_file_opt(workflow.analysis_time,
                             ".tc.txt",
                             "--transformation-catalog",
                             tags=tags)

    # get dax name and use it for the workflow name
    name = node.output_files[0].name
    node.add_opt("--workflow-name", name)

    # get output map name and use it for the output dir name
    map_file = node.output_files[1]
    node.add_opt("--output-dir", out_dir)

    # get the transformation catalog name
    tc_file = node.output_files[2]

    # add this node to the workflow
    workflow += node

    # create job for dax that will run a sub-workflow
    # and add it to the workflow
    fil = node.output_files[0]
    job = dax.DAX(fil)
    job.addArguments("--basename %s" %
                     os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_file, tc_file)
    workflow._adag.addJob(job)

    # make dax a child of the inference workflow generator node
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)

    logging.info("Leaving inference module")
Example #12
0
def veto_injections(workflow,
                    inj_file,
                    veto_file,
                    veto_name,
                    out_dir,
                    tags=None):
    tags = [] if tags is None else tags
    make_analysis_dir(out_dir)

    node = Executable(workflow.cp,
                      'strip_injections',
                      ifos=workflow.ifos,
                      out_dir=out_dir,
                      tags=tags).create_node()
    node.add_opt('--segment-name', veto_name)
    node.add_input_opt('--veto-file', veto_file)
    node.add_input_opt('--injection-file', inj_file)
    node.add_opt('--ifos', ' '.join(workflow.ifos))
    node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
    workflow += node
    return node.output_files[0]
Example #13
0
def setup_foreground_inference(workflow, coinc_file, single_triggers,
                       tmpltbank_file, insp_segs, insp_data_name,
                       insp_anal_name, dax_output, out_dir, tags=None):
    """ Creates workflow node that will run the inference workflow.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file: pycbc.workflow.File
        The file associated with coincident triggers.
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read and analyzed by each inspiral
       job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    dax_output : str
        The name of the output DAX file.
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables
    """

    logging.info("Entering inference module")

    # check if configuration file has inference section    
    if not workflow.cp.has_section("workflow-inference"):
        logging.info("There is no [workflow-inference] section in configuration file")
        logging.info("Leaving inference module")
        return

    # default tags is a list
    tags = [] if tags is None else tags

    # make the directory that will contain the dax file
    makedir(dax_output)
    
    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + "/" + "_".join(tags) \
                                        + "foreground_inference.ini")
    workflow.cp.write(open(config_path, "w"))
    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, "local")

    # create an Executable for the inference workflow generator
    exe = Executable(workflow.cp, "foreground_inference", ifos=workflow.ifos,
                     out_dir=dax_output)

    # create the node that will run in the workflow
    node = exe.create_node()
    node.add_input_opt("--config-files", config_file)
    node.add_input_opt("--bank-file", tmpltbank_file)
    node.add_input_opt("--statmap-file", coinc_file)
    node.add_multiifo_input_list_opt("--single-detector-triggers",
                                     single_triggers)
    node.new_output_file_opt(workflow.analysis_time, ".dax", "--output-file",
                                     tags=tags)
    node.new_output_file_opt(workflow.analysis_time, ".dax.map",
                                     "--output-map", tags=tags)

    # get dax name and use it for the workflow name
    name = node.output_files[0].name
    node.add_opt("--workflow-name", name)

    # get output map name and use it for the output dir name
    map_loc = node.output_files[1].name
    node.add_opt("--output-dir", out_dir)

    # add this node to the workflow
    workflow += node

    # create job for dax that will run a sub-workflow
    # and add it to the workflow
    fil = node.output_files[0]
    job = dax.DAX(fil)
    job.addArguments("--basename %s" % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_loc)
    workflow._adag.addJob(job)

    # make dax a child of the inference workflow generator node
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)

    logging.info("Leaving inference module")
Example #14
0
def rerank_coinc_followup(workflow,
                          statmap_file,
                          bank_file,
                          out_dir,
                          tags=None,
                          injection_file=None,
                          ranking_file=None):
    if tags is None:
        tags = []

    make_analysis_dir(out_dir)

    if not workflow.cp.has_section("workflow-rerank"):
        logging.info("No reranking done in this workflow")
        return statmap_file
    else:
        logging.info("Setting up reranking of candidates")

    # Generate reduced data files (maybe this could also be used elsewhere?)
    stores = FileList([])
    for ifo in workflow.ifos:
        make_analysis_dir('strain_files')
        node = Executable(workflow.cp,
                          'strain_data_reduce',
                          ifos=[ifo],
                          out_dir='strain_files',
                          tags=tags).create_node()
        node.add_opt('--gps-start-time', workflow.analysis_time[0])
        node.add_opt('--gps-end-time', workflow.analysis_time[1])
        if injection_file:
            node.add_input_opt('--injection-file', injection_file)

        fil = node.new_output_file_opt(workflow.analysis_time, '.hdf',
                                       '--output-file')
        stores.append(fil)
        workflow += node

    # Generate trigger input file
    node = Executable(workflow.cp,
                      'rerank_trigger_input',
                      ifos=workflow.ifos,
                      out_dir=out_dir,
                      tags=tags).create_node()
    node.add_input_opt('--statmap-file', statmap_file)
    node.add_input_opt('--bank-file', bank_file)
    trigfil = node.new_output_file_opt(workflow.analysis_time, '.hdf',
                                       '--output-file')
    workflow += node

    # Parallelize coinc trigger followup
    factor = int(
        workflow.cp.get_opt_tags("workflow-rerank", "parallelization-factor",
                                 tags))
    exe = Executable(workflow.cp,
                     'coinc_followup',
                     ifos=workflow.ifos,
                     out_dir=out_dir,
                     tags=tags)

    stat_files = FileList([])
    for i in range(factor):
        node = exe.create_node()
        node.new_output_file_opt(workflow.analysis_time,
                                 '.hdf',
                                 '--output-file',
                                 tags=[str(i)])
        node.add_multiifo_input_list_opt('--hdf-store', stores)
        node.add_input_opt('--input-file', trigfil)
        node.add_opt('--start-index', str(i))
        node.add_opt('--stride', factor)
        workflow += node
        stat_files += node.output_files

    exe = Executable(workflow.cp,
                     'rerank_coincs',
                     ifos=workflow.ifos,
                     out_dir=out_dir,
                     tags=tags)
    node = exe.create_node()
    node.add_input_list_opt('--stat-files', stat_files)
    node.add_input_opt('--statmap-file', statmap_file)
    node.add_input_opt('--followup-file', trigfil)

    if ranking_file:
        node.add_input_opt('--ranking-file', ranking_file)

    node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file')
    workflow += node
    return node.output_file
Example #15
0
 def create_node(self):
     node = Executable.create_node(self)
     node.set_priority(1000)
     return node
Example #16
0
def setup_foreground_minifollowups(workflow,
                                   coinc_file,
                                   single_triggers,
                                   tmpltbank_file,
                                   insp_segs,
                                   insp_data_name,
                                   insp_anal_name,
                                   dax_output,
                                   out_dir,
                                   tags=None):
    """ Create plots that followup the Nth loudest coincident injection
    from a statmap produced HDF file.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file:
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read and analyzed by each inspiral
       job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables

    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the
        minifollops plots.
    """
    logging.info('Entering minifollowups module')

    if not workflow.cp.has_section('workflow-minifollowups'):
        logging.info(
            'There is no [workflow-minifollowups] section in configuration file'
        )
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + '/' + '_'.join(tags) +
                                  'foreground_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = resolve_url_to_file(config_path)

    exe = Executable(workflow.cp,
                     'foreground_minifollowup',
                     ifos=workflow.ifos,
                     out_dir=dax_output,
                     tags=tags)

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--statmap-file', coinc_file)
    node.add_multiifo_input_list_opt('--single-detector-triggers',
                                     single_triggers)
    node.add_input_opt('--inspiral-segments', insp_segs)
    node.add_opt('--inspiral-data-read-name', insp_data_name)
    node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
    if tags:
        node.add_list_opt('--tags', tags)
    node.new_output_file_opt(workflow.analysis_time, '.dax', '--dax-file')
    node.new_output_file_opt(workflow.analysis_time, '.dax.map',
                             '--output-map')

    name = node.output_files[0].name
    map_file = node.output_files[1]

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this in a sub-workflow
    fil = node.output_files[0]

    # determine if a staging site has been specified
    job = SubWorkflow(fil.name, is_planned=False)
    input_files = [tmpltbank_file, coinc_file, insp_segs] + single_triggers
    job.add_inputs(*input_files)
    job.set_subworkflow_properties(map_file,
                                   staging_site=workflow.staging_site,
                                   cache_file=workflow.cache_file)
    job.add_into_workflow(workflow, parents=[node])
    logging.info('Leaving minifollowups module')
Example #17
0
def setup_single_det_minifollowups(workflow,
                                   single_trig_file,
                                   tmpltbank_file,
                                   insp_segs,
                                   insp_data_name,
                                   insp_anal_name,
                                   dax_output,
                                   out_dir,
                                   veto_file=None,
                                   veto_segment_name=None,
                                   statfiles=None,
                                   tags=None):
    """ Create plots that followup the Nth loudest clustered single detector
    triggers from a merged single detector trigger HDF file.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    single_trig_file: pycbc.workflow.File
        The File class holding the single detector triggers.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read by each inspiral job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    statfiles: FileList (optional, default=None)
        Supplementary files necessary for computing the single-detector
        statistic.
    tags: {None, optional}
        Tags to add to the minifollowups executables
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the
        minifollops plots.
    """
    logging.info('Entering minifollowups module')

    if not workflow.cp.has_section('workflow-sngl_minifollowups'):
        msg = 'There is no [workflow-sngl_minifollowups] section in '
        msg += 'configuration file'
        logging.info(msg)
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    curr_ifo = single_trig_file.ifo
    config_path = os.path.abspath(dax_output + '/' + curr_ifo + \
                                   '_'.join(tags) + 'singles_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = resolve_url_to_file(config_path)

    exe = Executable(workflow.cp,
                     'singles_minifollowup',
                     ifos=curr_ifo,
                     out_dir=dax_output,
                     tags=tags)

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--single-detector-file', single_trig_file)
    node.add_input_opt('--inspiral-segments', insp_segs)
    node.add_opt('--inspiral-data-read-name', insp_data_name)
    node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
    node.add_opt('--instrument', curr_ifo)
    if veto_file is not None:
        assert (veto_segment_name is not None)
        node.add_input_opt('--veto-file', veto_file)
        node.add_opt('--veto-segment-name', veto_segment_name)
    if statfiles:
        statfiles = statfiles.find_output_with_ifo(curr_ifo)
        node.add_input_list_opt('--statistic-files', statfiles)
    if tags:
        node.add_list_opt('--tags', tags)
    node.new_output_file_opt(workflow.analysis_time, '.dax', '--dax-file')
    node.new_output_file_opt(workflow.analysis_time, '.dax.map',
                             '--output-map')

    name = node.output_files[0].name
    map_file = node.output_files[1]

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this in a sub-workflow
    fil = node.output_files[0]

    job = SubWorkflow(fil.name, is_planned=False)
    input_files = [tmpltbank_file, insp_segs, single_trig_file]
    if veto_file is not None:
        input_files.append(veto_file)
    if statfiles:
        input_files += statfiles
    job.add_inputs(*input_files)
    job.set_subworkflow_properties(map_file,
                                   staging_site=workflow.staging_site,
                                   cache_file=workflow.cache_file)
    job.add_into_workflow(workflow, parents=[node])
    logging.info('Leaving minifollowups module')
Example #18
0
def setup_injection_minifollowups(
    workflow,
    injection_file,
    inj_xml_file,
    single_triggers,
    tmpltbank_file,
    insp_segs,
    insp_data_name,
    insp_anal_name,
    dax_output,
    out_dir,
    tags=None,
):
    """ Create plots that followup the closest missed injections
    
    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file: 
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read by each inspiral job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables
    
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the 
        minifollops plots.
    """
    logging.info("Entering injection minifollowups module")

    if not workflow.cp.has_section("workflow-injection_minifollowups"):
        logging.info("There is no [workflow-injection_minifollowups] section in configuration file")
        logging.info("Leaving minifollowups")
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + "/" + "_".join(tags) + "injection_minifollowup.ini")
    workflow.cp.write(open(config_path, "w"))

    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, "local")

    exe = Executable(workflow.cp, "injection_minifollowup", ifos=workflow.ifos, out_dir=dax_output)

    node = exe.create_node()
    node.add_input_opt("--config-files", config_file)
    node.add_input_opt("--bank-file", tmpltbank_file)
    node.add_input_opt("--injection-file", injection_file)
    node.add_input_opt("--injection-xml-file", inj_xml_file)
    node.add_multiifo_input_list_opt("--single-detector-triggers", single_triggers)
    node.add_input_opt("--inspiral-segments", insp_segs)
    node.add_opt("--inspiral-data-read-name", insp_data_name)
    node.add_opt("--inspiral-data-analyzed-name", insp_anal_name)
    node.new_output_file_opt(workflow.analysis_time, ".dax", "--output-file", tags=tags)
    node.new_output_file_opt(workflow.analysis_time, ".dax.map", "--output-map", tags=tags)

    name = node.output_files[0].name
    map_loc = node.output_files[1].name

    node.add_opt("--workflow-name", name)
    node.add_opt("--output-dir", out_dir)

    workflow += node

    # execute this in a sub-workflow
    fil = node.output_files[0]

    job = dax.DAX(fil)
    job.addArguments("--basename %s" % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_loc)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info("Leaving injection minifollowups module")
Example #19
0
 def create_node(self):
     node = Executable.create_node(self)
     node.set_priority(1000)
     return node
Example #20
0
def setup_single_det_minifollowups(workflow, single_trig_file, tmpltbank_file,
                                   insp_segs, insp_data_name, insp_anal_name,
                                   dax_output, out_dir, veto_file=None,
                                   veto_segment_name=None, statfiles=None,
                                   tags=None):
    """ Create plots that followup the Nth loudest clustered single detector
    triggers from a merged single detector trigger HDF file.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    single_trig_file: pycbc.workflow.File
        The File class holding the single detector triggers.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read by each inspiral job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    statfiles: FileList (optional, default=None)
        Supplementary files necessary for computing the single-detector
        statistic.
    tags: {None, optional}
        Tags to add to the minifollowups executables
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the
        minifollops plots.
    """
    logging.info('Entering minifollowups module')

    if not workflow.cp.has_section('workflow-sngl_minifollowups'):
        msg = 'There is no [workflow-sngl_minifollowups] section in '
        msg += 'configuration file'
        logging.info(msg)
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    curr_ifo = single_trig_file.ifo
    config_path = os.path.abspath(dax_output + '/' + curr_ifo + \
                                   '_'.join(tags) + 'singles_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(urljoin('file:', pathname2url(config_path)), site='local')

    exe = Executable(workflow.cp, 'singles_minifollowup',
                     ifos=curr_ifo, out_dir=dax_output, tags=tags)

    wikifile = curr_ifo + '_'.join(tags) + 'loudest_table.txt'

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--single-detector-file', single_trig_file)
    node.add_input_opt('--inspiral-segments', insp_segs)
    node.add_opt('--inspiral-data-read-name', insp_data_name)
    node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
    node.add_opt('--instrument', curr_ifo)
    node.add_opt('--wiki-file', wikifile)
    if veto_file is not None:
        assert(veto_segment_name is not None)
        node.add_input_opt('--veto-file', veto_file)
        node.add_opt('--veto-segment-name', veto_segment_name)
    if statfiles:
        statfiles = statfiles.find_output_with_ifo(curr_ifo)
        node.add_input_list_opt('--statistic-files', statfiles)
    node.new_output_file_opt(workflow.analysis_time, '.dax', '--output-file')
    node.new_output_file_opt(workflow.analysis_time, '.dax.map',
                             '--output-map')
    node.new_output_file_opt(workflow.analysis_time, '.tc.txt',
                             '--transformation-catalog')

    name = node.output_files[0].name
    map_file = node.output_files[1]
    tc_file = node.output_files[2]

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this in a sub-workflow
    fil = node.output_files[0]

    # determine if a staging site has been specified
    try:
        staging_site = workflow.cp.get('workflow-sngl_minifollowups',
                                       'staging-site')
    except:
        staging_site = None

    job = dax.DAX(fil)
    job.addArguments('--basename %s' \
                     % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_file, tc_file,
                                staging_site=staging_site)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info('Leaving minifollowups module')
Example #21
0
def create_segs_from_cats_job(cp, out_dir, ifo_string, tag=None):
    """
    This function creates the CondorDAGJob that will be used to run 
    ligolw_segments_from_cats as part of the workflow

    Parameters
    -----------
    cp : pycbc.workflow.configuration.WorkflowConfigParser
        The in-memory representation of the configuration (.ini) files
    out_dir : path
        Directory in which to put output files
    ifo_string : string
        String containing all active ifos, ie. "H1L1V1"
    tag : string, optional (default=None)
        Use this to specify a tag. This can be used if this module is being
        called more than once to give call specific configuration (by setting
        options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This
        is also used to tag the Files returned by the class to uniqueify
        the Files and uniqueify the actual filename.
        FIXME: Filenames may not be unique with current codes!

    Returns
    --------
    job : Job instance
        The Job instance that will run segments_from_cats jobs
    """
    segServerUrl = cp.get_opt_tags("workflow-segments", 
                                   "segments-database-url", [tag])
    vetoDefFile = cp.get_opt_tags("workflow-segments", 
                                  "segments-veto-definer-file", [tag])

    if tag:
        currTags = [tag]
    else:
        currTags = []
    job = Executable(cp, 'segments_from_cats', universe='local',
                               ifos=ifo_string, out_dir=out_dir, tags=currTags)
    job.add_opt('--separate-categories')
    job.add_opt('--segment-url', segServerUrl)
    
    job.add_opt('--veto-file', vetoDefFile)
    # FIXME: Would like the proxy in the Workflow instance
    # FIXME: Explore using the x509 condor commands
    # Set up proxy to be accessible in a NFS location
    # If the user has logged in with gsissh then X509_USER_PROXY will be set
    # However, certain users log in with an ssh key and then ligo-proxy-init
    # This route does not set X509_USER_PROXY, so use the default file location
    if os.environ.has_key('X509_USER_PROXY'):
        proxy = os.getenv('X509_USER_PROXY')
    else:
        proxy = "/tmp/x509up_u%d" % os.getuid()
    proxyfile = os.path.join(out_dir, 'x509up.file')
    try:
        shutil.copyfile(proxy, proxyfile)
    except IOError:
        raise RuntimeError('Cannot find certificate in %s. '
                           'Make sure that ligo-proxy-init ' 
                           'has been run.' % proxy)
                           
        
    job.add_profile('condor', 'environment',
                       'USER=$ENV(USER);X509_USER_PROXY=%s' % proxyfile)

    return job
Example #22
0
def veto_injections(workflow, inj_file, veto_file, veto_name, out_dir, tags=None):
    tags = [] if tags is None else tags
    make_analysis_dir(out_dir)
    
    node = Executable(workflow.cp, 'strip_injections', ifos=workflow.ifos,
                          out_dir=out_dir, tags=tags).create_node()
    node.add_opt('--segment-name', veto_name)
    node.add_input_opt('--veto-file', veto_file)
    node.add_input_opt('--injection-file', inj_file)
    node.add_opt('--ifos', ' '.join(workflow.ifos))
    node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
    workflow += node
    return node.output_files[0]  
Example #23
0
def setup_single_det_minifollowups(workflow, single_trig_file, tmpltbank_file,
                                  insp_segs, insp_data_name, insp_anal_name,
                                  dax_output, out_dir, veto_file=None,
                                  veto_segment_name=None, tags=None):
    """ Create plots that followup the Nth loudest clustered single detector
    triggers from a merged single detector trigger HDF file.
    
    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    single_trig_file: pycbc.workflow.File
        The File class holding the single detector triggers.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read by each inspiral job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables    
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the 
        minifollops plots.
    """
    logging.info('Entering minifollowups module')

    if not workflow.cp.has_section('workflow-sngl_minifollowups'):
        msg = 'There is no [workflow-sngl_minifollowups] section in '
        msg += 'configuration file'
        logging.info(msg)
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    curr_ifo = single_trig_file.ifo
    config_path = os.path.abspath(dax_output + '/' + curr_ifo + \
                                   '_'.join(tags) + 'singles_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, 'local')

    exe = Executable(workflow.cp, 'singles_minifollowup',
                     ifos=curr_ifo, out_dir=dax_output, tags=tags)

    wikifile = curr_ifo + '_'.join(tags) + 'loudest_table.txt'

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--single-detector-file', single_trig_file)
    node.add_input_opt('--inspiral-segments', insp_segs)
    node.add_opt('--inspiral-data-read-name', insp_data_name)
    node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
    node.add_opt('--instrument', curr_ifo)
    node.add_opt('--wiki-file', wikifile)
    if veto_file is not None:
        assert(veto_segment_name is not None)
        node.add_input_opt('--veto-file', veto_file)
        node.add_opt('--veto-segment-name', veto_segment_name)
    node.new_output_file_opt(workflow.analysis_time, '.dax', '--output-file', tags=tags)
    node.new_output_file_opt(workflow.analysis_time, '.dax.map', '--output-map', tags=tags)

    name = node.output_files[0].name
    map_file = node.output_files[1]

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this in a sub-workflow
    fil = node.output_files[0]

    job = dax.DAX(fil)
    job.addArguments('--basename %s' \
                     % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_file)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info('Leaving minifollowups module')
Example #24
0
def setup_plotthinca(workflow,
                     input_files,
                     cache_filename,
                     coinc_cachepattern,
                     slide_cachepattern,
                     output_dir,
                     tags=[],
                     **kwargs):
    """
    This function sets up the nodes that will generate summary from a list of
    thinca files.

    Parameters
    -----------
    Workflow : ahope.Workflow
        The ahope workflow instance that the coincidence jobs will be added to.
    input_files : ahope.FileList
        An FileList of files that are used as input at this stage.
    cache_filename : str
        Filename of the ihope cache.
    coinc_cachepattern : str
        The pattern that will be used to find zero-lag coincidence filenames in the cache.
    slide_cachepattern : str
        The pattern that will be used to find time slide filenames in the cache.
    output_dir : path
        The directory in which output files will be stored.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['full_data'].
        This will be used in output names and directories.

    Returns
    --------
    plot_files : ahope.FileList
        A list of the output files from this stage.
    """

    plot_files = FileList([])

    # create executable
    plotthinca_job = Executable(workflow.cp, 'plotthinca', 'vanilla',
                                workflow.ifos, output_dir, tags)

    # get all ifo combinations of at least 2 coincident ifos
    ifo_combos = []
    for n in xrange(len(plotthinca_job.ifo_list) + 1):
        for ifo_list in itertools.combinations(plotthinca_job.ifo_list, n + 2):
            ifo_combos.append(ifo_list)

    for tag in tags:
        for ifo_list in ifo_combos:
            ifo_string = ''.join(ifo_list)

            # create node
            node = Node(plotthinca_job)
            node.add_opt('--gps-start-time', workflow.analysis_time[0])
            node.add_opt('--gps-end-time', workflow.analysis_time[1])
            node.add_opt('--cache-file', cache_filename)
            node.add_opt('--ifo-times', ifo_string)
            node.add_opt('--ifo-tag', 'SECOND_' + ifo_string)
            for ifo in ifo_list:
                node.add_opt('--%s-triggers' % ifo.lower(), '')
            node.add_opt('--user-tag', tag.upper() + '_SUMMARY_PLOTS')
            node.add_opt('--output-path', output_dir)
            node.add_opt('--coinc-pattern', coinc_cachepattern)
            node.add_opt('--slide-pattern', slide_cachepattern)
            node.add_opt('--enable-output')

            # add node to workflow
            workflow.add_node(node)

            # make all input_files parents
            #for f in input_files:
            #    dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
            #    workflow._adag.addDependency(dep)

    return plot_files
Example #25
0
def setup_foreground_minifollowups(workflow, coinc_file, single_triggers,
                       tmpltbank_file, insp_segs, insp_data_name,
                       insp_anal_name, dax_output, out_dir, tags=None):
    """ Create plots that followup the Nth loudest coincident injection
    from a statmap produced HDF file.
    
    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file: 
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read and analyzed by each inspiral
       job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables
    
    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the 
        minifollops plots.
    """
    logging.info('Entering minifollowups module')
    
    if not workflow.cp.has_section('workflow-minifollowups'):
        logging.info('There is no [workflow-minifollowups] section in configuration file')
        logging.info('Leaving minifollowups')
        return
    
    tags = [] if tags is None else tags
    makedir(dax_output)
    
    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + '/' + '_'.join(tags) + 'foreground_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))
    
    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(config_path, 'local')
    
    exe = Executable(workflow.cp, 'foreground_minifollowup', ifos=workflow.ifos, out_dir=dax_output)
    
    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--statmap-file', coinc_file)
    node.add_multiifo_input_list_opt('--single-detector-triggers', single_triggers)
    node.add_input_opt('--inspiral-segments', insp_segs)
    node.add_opt('--inspiral-data-read-name', insp_data_name)
    node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
    node.new_output_file_opt(workflow.analysis_time, '.dax', '--output-file', tags=tags)
    node.new_output_file_opt(workflow.analysis_time, '.dax.map', '--output-map', tags=tags)

    name = node.output_files[0].name
    map_loc = node.output_files[1].name

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)
    
    workflow += node
    
    # execute this in a sub-workflow
    fil = node.output_files[0]
    
    job = dax.DAX(fil)
    job.addArguments('--basename %s' % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_loc)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info('Leaving minifollowups module')
Example #26
0
def setup_hardware_injection_page(workflow,
                                  input_files,
                                  cache_filename,
                                  inspiral_cachepattern,
                                  output_dir,
                                  tags=[],
                                  **kwargs):
    """
    This function sets up the nodes that will create the hardware injection page.

    Parameters
    -----------
    Workflow : ahope.Workflow
        The ahope workflow instance that the coincidence jobs will be added to.
    input_files : ahope.FileList
        An FileList of files that are used as input at this stage.
    cache_filename : str
        Filename of the ihope cache.
    inspiral_cachepattern : str
        The pattern that will be used to find inspiral filenames in the cache.
    output_dir : path
        The directory in which output files will be stored.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['full_data'].
        This will be used to search the cache.

    Returns
    --------
    plot_files : ahope.FileList
        A list of the output files from this stage.
    """

    logging.info("Entering hardware injection page setup.")

    out_files = FileList([])

    # check if hardware injection section exists
    # if not then do not do add hardware injection job to the workflow
    if not workflow.cp.has_section('workflow-hardware-injections'):
        msg = "There is no workflow-hardware-injections section. "
        msg += "The hardware injection page will not be added to the workflow."
        logging.info(msg)
        logging.info("Leaving hardware injection page setup.")
        return out_files

    # make the output dir
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # create executable
    hwinjpage_job = Executable(workflow.cp, 'hardware_injection_page',
                               'vanilla', workflow.ifos, output_dir, tags)

    # retrieve hardware injection file
    hwinjDefUrl = workflow.cp.get_opt_tags('workflow-hardware-injections',
                                           'hwinj-definer-url', tags)
    hwinjDefBaseName = os.path.basename(hwinjDefUrl)
    hwinjDefNewPath = os.path.join(output_dir, hwinjDefBaseName)
    urllib.urlretrieve(hwinjDefUrl, hwinjDefNewPath)

    # update hwinj definer file location
    workflow.cp.set("workflow-hardware-injections", "hwinj-definer-file",
                    hwinjDefNewPath)

    # query for the hardware injection segments
    get_hardware_injection_segment_files(workflow, output_dir, hwinjDefNewPath)

    # create node
    node = Node(hwinjpage_job)
    node.add_opt('--gps-start-time', workflow.analysis_time[0])
    node.add_opt('--gps-end-time', workflow.analysis_time[1])
    node.add_opt('--source-xml', hwinjDefNewPath)
    node.add_opt('--segment-dir', output_dir)
    node.add_opt('--cache-file', cache_filename)
    node.add_opt('--cache-pattern', inspiral_cachepattern)
    node.add_opt('--analyze-injections', '')
    for ifo in workflow.ifos:
        node.add_opt('--%s-injections' % ifo.lower(), '')
    outfile = File(node.executable.ifo_string,
                   'HWINJ_SUMMARY',
                   workflow.analysis_time,
                   extension='html',
                   directory=output_dir)
    node.add_opt('--outfile', outfile.storage_path)

    # add node to workflow
    workflow.add_node(node)

    # make all input_files parents
    #for f in input_files:
    #    dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
    #    workflow._adag.addDependency(dep)

    out_files += node.output_files

    logging.info("Leaving hardware injection page setup.")

    return out_files
Example #27
0
def setup_foreground_minifollowups(workflow, coinc_file, single_triggers,
                       tmpltbank_file, insp_segs, insp_data_name,
                       insp_anal_name, dax_output, out_dir, tags=None):
    """ Create plots that followup the Nth loudest coincident injection
    from a statmap produced HDF file.

    Parameters
    ----------
    workflow: pycbc.workflow.Workflow
        The core workflow instance we are populating
    coinc_file:
    single_triggers: list of pycbc.workflow.File
        A list cointaining the file objects associated with the merged
        single detector trigger files for each ifo.
    tmpltbank_file: pycbc.workflow.File
        The file object pointing to the HDF format template bank
    insp_segs: SegFile
       The segment file containing the data read and analyzed by each inspiral
       job.
    insp_data_name: str
        The name of the segmentlist storing data read.
    insp_anal_name: str
        The name of the segmentlist storing data analyzed.
    out_dir: path
        The directory to store minifollowups result plots and files
    tags: {None, optional}
        Tags to add to the minifollowups executables

    Returns
    -------
    layout: list
        A list of tuples which specify the displayed file layout for the
        minifollops plots.
    """
    logging.info('Entering minifollowups module')

    if not workflow.cp.has_section('workflow-minifollowups'):
        logging.info('There is no [workflow-minifollowups] section in configuration file')
        logging.info('Leaving minifollowups')
        return

    tags = [] if tags is None else tags
    makedir(dax_output)

    # turn the config file into a File class
    config_path = os.path.abspath(dax_output + '/' + '_'.join(tags) + 'foreground_minifollowup.ini')
    workflow.cp.write(open(config_path, 'w'))

    config_file = wdax.File(os.path.basename(config_path))
    config_file.PFN(urljoin('file:', pathname2url(config_path)), site='local')

    exe = Executable(workflow.cp, 'foreground_minifollowup',
                     ifos=workflow.ifos, out_dir=dax_output, tags=tags)

    node = exe.create_node()
    node.add_input_opt('--config-files', config_file)
    node.add_input_opt('--bank-file', tmpltbank_file)
    node.add_input_opt('--statmap-file', coinc_file)
    node.add_multiifo_input_list_opt('--single-detector-triggers',
                                     single_triggers)
    node.add_input_opt('--inspiral-segments', insp_segs)
    node.add_opt('--inspiral-data-read-name', insp_data_name)
    node.add_opt('--inspiral-data-analyzed-name', insp_anal_name)
    if tags:
        node.add_list_opt('--tags', tags)
    node.new_output_file_opt(workflow.analysis_time, '.dax', '--output-file')
    node.new_output_file_opt(workflow.analysis_time, '.dax.map', '--output-map')
    node.new_output_file_opt(workflow.analysis_time, '.tc.txt',
                             '--transformation-catalog')

    name = node.output_files[0].name
    map_file = node.output_files[1]
    tc_file = node.output_files[2]

    node.add_opt('--workflow-name', name)
    node.add_opt('--output-dir', out_dir)

    workflow += node

    # execute this in a sub-workflow
    fil = node.output_files[0]

    # determine if a staging site has been specified
    try:
        staging_site = workflow.cp.get('workflow-foreground_minifollowups',
                                       'staging-site')
    except:
        staging_site = None

    job = dax.DAX(fil)
    job.addArguments('--basename %s' % os.path.splitext(os.path.basename(name))[0])
    Workflow.set_job_properties(job, map_file, tc_file, staging_site=staging_site)
    workflow._adag.addJob(job)
    dep = dax.Dependency(parent=node._dax_node, child=job)
    workflow._adag.addDependency(dep)
    logging.info('Leaving minifollowups module')
Example #28
0
def setup_plotnumtemplates(workflow,
                           input_files,
                           cache_filename,
                           tmpltbank_cachepattern,
                           output_dir,
                           tags=[],
                           **kwargs):
    """
    This function sets up the nodes that will generate a plot of the number
    of templates against time.

    Parameters
    -----------
    Workflow : ahope.Workflow
        The ahope workflow instance that the coincidence jobs will be added to.
    input_files : ahope.FileList
        An FileList of files that are used as input at this stage.
    cache_filename : str
        Filename of the ihope cache.
    tmpltbank_cachepattern : str
        The pattern that will be used to find template_bank filenames in the cache.
    output_dir : path
        The directory in which output files will be stored.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['full_data'].
        This will be used in output names and directories.

    Returns
    --------
    plot_files : ahope.FileList
        A list of the output files from this stage.
    """

    plot_files = FileList([])

    # create executable
    plotnumtemplates_job = Executable(workflow.cp, 'plotnumtemplates',
                                      'vanilla', workflow.ifos, output_dir,
                                      tags)

    for tag in tags:
        # create node
        node = Node(plotnumtemplates_job)
        node.add_opt('--gps-start-time', workflow.analysis_time[0])
        node.add_opt('--gps-end-time', workflow.analysis_time[1])
        node.add_opt('--cache-file', cache_filename)
        node.add_opt('--ifo-times', node.executable.ifo_string)
        node.add_opt('--user-tag', tag.upper() + '_SUMMARY_PLOTS')
        node.add_opt('--output-path', output_dir)
        node.add_opt('--bank-pattern', tmpltbank_cachepattern)
        node.add_opt('--enable-output')

        # add node to workflow
        workflow.add_node(node)

        # make all input_files parents
        #for f in input_files:
        #    dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
        #    workflow._adag.addDependency(dep)

    return plot_files