Example #1
0
def setup_matchedfltr_dax_generated(workflow,
                                    science_segs,
                                    datafind_outs,
                                    tmplt_banks,
                                    output_dir,
                                    injection_file=None,
                                    tags=None,
                                    link_to_tmpltbank=False,
                                    compatibility_mode=False):
    '''
    Setup matched-filter jobs that are generated as part of the workflow.
    This
    module can support any matched-filter code that is similar in principle to
    lalapps_inspiral, but for new codes some additions are needed to define
    Executable and Job sub-classes (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of glue.segments.segmentlist instances
        The list of times that are being analysed in this workflow. 
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.
    link_to_tmpltbank : boolean, optional (default=True)
        If this option is given, the job valid_times will be altered so that there
        will be one inspiral file for every template bank and they will cover the
        same time span. Note that this option must also be given during template
        bank generation to be meaningful.
        
    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    if tags is None:
        tags = []
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = science_segs.keys()
    match_fltr_exe = os.path.basename(cp.get('executables', 'inspiral'))
    # Select the appropriate class
    exe_class = select_matchedfilter_class(match_fltr_exe)

    if link_to_tmpltbank:
        # Use this to ensure that inspiral and tmpltbank jobs overlap. This
        # means that there will be 1 inspiral job for every 1 tmpltbank and
        # the data read in by both will overlap as much as possible. (If you
        # ask the template bank jobs to use 2000s of data for PSD estimation
        # and the matched-filter jobs to use 4000s, you will end up with
        # twice as many matched-filter jobs that still use 4000s to estimate a
        # PSD but then only generate triggers in the 2000s of data that the
        # template bank jobs ran on.
        tmpltbank_exe = os.path.basename(cp.get('executables', 'tmpltbank'))
        link_exe_instance = select_tmpltbank_class(tmpltbank_exe)
    else:
        link_exe_instance = None

    # Set up class for holding the banks
    inspiral_outs = FileList([])

    # Matched-filtering is done independently for different ifos, but might not be!
    # If we want to use multi-detector matched-filtering or something similar to this
    # it would probably require a new module
    for ifo in ifos:
        logging.info("Setting up matched-filtering for %s." % (ifo))
        job_instance = exe_class(workflow.cp,
                                 'inspiral',
                                 ifo=ifo,
                                 out_dir=output_dir,
                                 injection_file=injection_file,
                                 tags=tags)
        if link_exe_instance:
            link_job_instance = link_exe_instance(cp,
                                                  'tmpltbank',
                                                  ifo=ifo,
                                                  out_dir=output_dir,
                                                  tags=tags)
        else:
            link_job_instance = None

        sngl_ifo_job_setup(workflow,
                           ifo,
                           inspiral_outs,
                           job_instance,
                           science_segs[ifo],
                           datafind_outs,
                           parents=tmplt_banks,
                           allow_overlap=False,
                           link_job_instance=link_job_instance,
                           compatibility_mode=compatibility_mode)
    return inspiral_outs
Example #2
0
def setup_matchedfltr_dax_generated(workflow, science_segs, datafind_outs,
                                    tmplt_banks, output_dir,
                                    injection_file=None,
                                    tags=None, link_to_tmpltbank=False,
                                    compatibility_mode=False):
    '''
    Setup matched-filter jobs that are generated as part of the workflow.
    This
    module can support any matched-filter code that is similar in principle to
    lalapps_inspiral, but for new codes some additions are needed to define
    Executable and Job sub-classes (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of glue.segments.segmentlist instances
        The list of times that are being analysed in this workflow.
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.
    link_to_tmpltbank : boolean, optional (default=True)
        If this option is given, the job valid_times will be altered so that there
        will be one inspiral file for every template bank and they will cover the
        same time span. Note that this option must also be given during template
        bank generation to be meaningful.

    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    if tags is None:
        tags = []
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = science_segs.keys()
    match_fltr_exe = os.path.basename(cp.get('executables','inspiral'))
    # Select the appropriate class
    exe_class = select_matchedfilter_class(match_fltr_exe)

    if link_to_tmpltbank:
        # Use this to ensure that inspiral and tmpltbank jobs overlap. This
        # means that there will be 1 inspiral job for every 1 tmpltbank and
        # the data read in by both will overlap as much as possible. (If you
        # ask the template bank jobs to use 2000s of data for PSD estimation
        # and the matched-filter jobs to use 4000s, you will end up with
        # twice as many matched-filter jobs that still use 4000s to estimate a
        # PSD but then only generate triggers in the 2000s of data that the
        # template bank jobs ran on.
        tmpltbank_exe = os.path.basename(cp.get('executables', 'tmpltbank'))
        link_exe_instance = select_tmpltbank_class(tmpltbank_exe)
    else:
        link_exe_instance = None

    # Set up class for holding the banks
    inspiral_outs = FileList([])

    # Matched-filtering is done independently for different ifos, but might not be!
    # If we want to use multi-detector matched-filtering or something similar to this
    # it would probably require a new module
    for ifo in ifos:
        logging.info("Setting up matched-filtering for %s." %(ifo))
        job_instance = exe_class(workflow.cp, 'inspiral', ifo=ifo,
                                               out_dir=output_dir,
                                               injection_file=injection_file,
                                               tags=tags)
        if link_exe_instance:
            link_job_instance = link_exe_instance(cp, 'tmpltbank', ifo=ifo,
                                               out_dir=output_dir, tags=tags)
        else:
            link_job_instance = None

        sngl_ifo_job_setup(workflow, ifo, inspiral_outs, job_instance,
                           science_segs[ifo], datafind_outs,
                           parents=tmplt_banks, allow_overlap=False,
                           link_job_instance=link_job_instance,
                           compatibility_mode=compatibility_mode)
    return inspiral_outs
Example #3
0
def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs,
                                          tmplt_banks, output_dir,
                                          injection_file=None, gate_files=None,
                                          tags=[], link_to_tmpltbank=False,
                                          compatibility_mode=False):
    '''
    Setup matched-filter jobs that are generated as part of the workflow in
    which a single job reads in and generates triggers over multiple ifos.
    This
    module can support any matched-filter code that is similar in principle to
    pycbc_multi_inspiral or lalapps_coh_PTF_inspiral, but for new codes some
    additions are needed to define Executable and Job sub-classes
    (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of glue.segments.segmentlist instances
        The list of times that are being analysed in this workflow.
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.

    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = science_segs.keys()
    match_fltr_exe = os.path.basename(cp.get('executables','inspiral'))

    # Select the appropriate class
    exe_class = select_matchedfilter_class(match_fltr_exe)

    # List for holding the output
    inspiral_outs = FileList([])

    logging.info("Setting up matched-filtering for %s." %(' '.join(ifos),))

    if match_fltr_exe == 'lalapps_coh_PTF_inspiral':
        cp.set('inspiral', 'right-ascension', cp.get('workflow', 'ra'))
        cp.set('inspiral', 'declination', cp.get('workflow', 'dec'))
        cp.set('inspiral', 'sky-error', cp.get('workflow', 'sky-error'))
        cp.set('inspiral', 'trigger-time', cp.get('workflow', 'trigger-time'))
        cp.set('inspiral', 'block-duration',
               str(abs(science_segs[ifos[0]][0]) - \
                       2 * int(cp.get('inspiral', 'pad-data'))))

        job_instance = exe_class(workflow.cp, 'inspiral', ifo=ifos,
                                 out_dir=output_dir,
                                 injection_file=injection_file, 
                                 gate_files=gate_files, tags=tags)
        multi_ifo_coherent_job_setup(workflow, inspiral_outs, job_instance,
                                     science_segs, datafind_outs, output_dir,
                                     parents=tmplt_banks)
    else:
        job_instance = exe_class(workflow.cp, 'inspiral', ifo=ifos,
                                 out_dir=output_dir,
                                 injection_file=injection_file,
                                 gate_files=gate_files, tags=tags)
        multi_ifo_job_setup(workflow, inspiral_outs, job_instance,
                            science_segs, datafind_outs, output_dir,
                            parents=tmplt_banks)
    return inspiral_outs
Example #4
0
def setup_tmpltbank_dax_generated(workflow,
                                  science_segs,
                                  datafind_outs,
                                  output_dir,
                                  tags=None,
                                  link_to_matchedfltr=True,
                                  compatibility_mode=False,
                                  psd_files=None):
    '''
    Setup template bank jobs that are generated as part of the CBC workflow.
    This function will add numerous jobs to the CBC workflow using
    configuration options from the .ini file. The following executables are
    currently supported:

    * lalapps_tmpltbank
    * pycbc_geom_nonspin_bank

    Parameters
    ----------
    workflow: pycbc.workflow.core.Workflow
        An instanced class that manages the constructed workflow.
    science_segs : Keyed dictionary of glue.segmentlist objects
        scienceSegs[ifo] holds the science segments to be analysed for each
        ifo.
    datafind_outs : pycbc.workflow.core.FileList
        The file list containing the datafind files.
    output_dir : path string
        The directory where data products will be placed.
    tags : list of strings
        If given these tags are used to uniquely name and identify output files
        that would be produced in multiple calls to this function.
    link_to_matchedfltr : boolean, optional (default=True)
        If this option is given, the job valid_times will be altered so that
        there will be one inspiral file for every template bank and they will
        cover the same time span. Note that this option must also be given
        during matched-filter generation to be meaningful.
    psd_file : pycbc.workflow.core.FileList
        The file list containing predefined PSDs, if provided.

    Returns
    --------
    tmplt_banks : pycbc.workflow.core.FileList
        The FileList holding the details of all the template bank jobs.
    '''
    if tags is None:
        tags = []
    cp = workflow.cp
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    ifos = science_segs.keys()
    tmplt_bank_exe = os.path.basename(cp.get('executables', 'tmpltbank'))
    # Select the appropriate class
    exe_class = select_tmpltbank_class(tmplt_bank_exe)

    # The exe instance needs to know what data segments are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    if link_to_matchedfltr:
        # Use this to ensure that inspiral and tmpltbank jobs overlap. This
        # means that there will be 1 inspiral job for every 1 tmpltbank and
        # the data read in by both will overlap as much as possible. (If you
        # ask the template bank jobs to use 2000s of data for PSD estimation
        # and the matched-filter jobs to use 4000s, you will end up with
        # twice as many matched-filter jobs that still use 4000s to estimate a
        # PSD but then only generate triggers in the 2000s of data that the
        # template bank jobs ran on.
        tmpltbank_exe = os.path.basename(cp.get('executables', 'inspiral'))
        link_exe_instance = select_matchedfilter_class(tmpltbank_exe)
    else:
        link_exe_instance = None

    # Set up class for holding the banks
    tmplt_banks = FileList([])

    # Template banks are independent for different ifos, but might not be!
    # Begin with independent case and add after FIXME
    for ifo in ifos:
        job_instance = exe_class(workflow.cp,
                                 'tmpltbank',
                                 ifo=ifo,
                                 out_dir=output_dir,
                                 tags=tags)
        # Check for the write_psd flag
        if cp.has_option_tags("workflow-tmpltbank", "tmpltbank-write-psd-file",
                              tags):
            job_instance.write_psd = True
        else:
            job_instance.write_psd = False

        if link_exe_instance:
            link_job_instance = link_exe_instance(cp,
                                                  'inspiral',
                                                  ifo=ifo,
                                                  out_dir=output_dir,
                                                  tags=tags)
        else:
            link_job_instance = None
        sngl_ifo_job_setup(workflow,
                           ifo,
                           tmplt_banks,
                           job_instance,
                           science_segs[ifo],
                           datafind_outs,
                           link_job_instance=link_job_instance,
                           allow_overlap=True,
                           compatibility_mode=compatibility_mode)
    return tmplt_banks
Example #5
0
def setup_matchedfltr_dax_generated_multi(workflow,
                                          science_segs,
                                          datafind_outs,
                                          tmplt_banks,
                                          output_dir,
                                          injection_file=None,
                                          gate_files=None,
                                          tags=[],
                                          link_to_tmpltbank=False,
                                          compatibility_mode=False):
    '''
    Setup matched-filter jobs that are generated as part of the workflow in
    which a single job reads in and generates triggers over multiple ifos.
    This
    module can support any matched-filter code that is similar in principle to
    pycbc_multi_inspiral or lalapps_coh_PTF_inspiral, but for new codes some
    additions are needed to define Executable and Job sub-classes
    (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of glue.segments.segmentlist instances
        The list of times that are being analysed in this workflow.
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.

    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = science_segs.keys()
    match_fltr_exe = os.path.basename(cp.get('executables', 'inspiral'))

    # Select the appropriate class
    exe_class = select_matchedfilter_class(match_fltr_exe)

    # List for holding the output
    inspiral_outs = FileList([])

    logging.info("Setting up matched-filtering for %s." % (' '.join(ifos), ))

    if match_fltr_exe == 'lalapps_coh_PTF_inspiral':
        cp.set('inspiral', 'right-ascension', cp.get('workflow', 'ra'))
        cp.set('inspiral', 'declination', cp.get('workflow', 'dec'))
        cp.set('inspiral', 'sky-error', cp.get('workflow', 'sky-error'))
        cp.set('inspiral', 'trigger-time', cp.get('workflow', 'trigger-time'))
        cp.set('inspiral', 'block-duration',
               str(abs(science_segs[ifos[0]][0]) - \
                       2 * int(cp.get('inspiral', 'pad-data'))))

        job_instance = exe_class(workflow.cp,
                                 'inspiral',
                                 ifo=ifos,
                                 out_dir=output_dir,
                                 injection_file=injection_file,
                                 gate_files=gate_files,
                                 tags=tags)
        multi_ifo_coherent_job_setup(workflow,
                                     inspiral_outs,
                                     job_instance,
                                     science_segs,
                                     datafind_outs,
                                     output_dir,
                                     parents=tmplt_banks)
    else:
        job_instance = exe_class(workflow.cp,
                                 'inspiral',
                                 ifo=ifos,
                                 out_dir=output_dir,
                                 injection_file=injection_file,
                                 gate_files=gate_files,
                                 tags=tags)
        multi_ifo_job_setup(workflow,
                            inspiral_outs,
                            job_instance,
                            science_segs,
                            datafind_outs,
                            output_dir,
                            parents=tmplt_banks)
    return inspiral_outs
Example #6
0
def setup_tmpltbank_dax_generated(workflow, science_segs, datafind_outs,
                                  output_dir, tags=None,
                                  link_to_matchedfltr=True,
                                  compatibility_mode=False,
                                  psd_files=None):
    '''
    Setup template bank jobs that are generated as part of the CBC workflow.
    This function will add numerous jobs to the CBC workflow using
    configuration options from the .ini file. The following executables are
    currently supported:

    * lalapps_tmpltbank
    * pycbc_geom_nonspin_bank

    Parameters
    ----------
    workflow: pycbc.workflow.core.Workflow
        An instanced class that manages the constructed workflow.
    science_segs : Keyed dictionary of glue.segmentlist objects
        scienceSegs[ifo] holds the science segments to be analysed for each
        ifo. 
    datafind_outs : pycbc.workflow.core.FileList
        The file list containing the datafind files.
    output_dir : path string
        The directory where data products will be placed. 
    tags : list of strings
        If given these tags are used to uniquely name and identify output files
        that would be produced in multiple calls to this function.
    link_to_matchedfltr : boolean, optional (default=True)
        If this option is given, the job valid_times will be altered so that
        there will be one inspiral file for every template bank and they will
        cover the same time span. Note that this option must also be given
        during matched-filter generation to be meaningful.
    psd_file : pycbc.workflow.core.FileList
        The file list containing predefined PSDs, if provided.

    Returns
    --------
    tmplt_banks : pycbc.workflow.core.FileList
        The FileList holding the details of all the template bank jobs.
    '''
    if tags is None:
        tags = []
    cp = workflow.cp
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here .... 

    ifos = science_segs.keys()
    tmplt_bank_exe = os.path.basename(cp.get('executables', 'tmpltbank'))
    # Select the appropriate class
    exe_class = select_tmpltbank_class(tmplt_bank_exe)

    # The exe instance needs to know what data segments are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here .... 

    if link_to_matchedfltr:
        # Use this to ensure that inspiral and tmpltbank jobs overlap. This
        # means that there will be 1 inspiral job for every 1 tmpltbank and
        # the data read in by both will overlap as much as possible. (If you
        # ask the template bank jobs to use 2000s of data for PSD estimation
        # and the matched-filter jobs to use 4000s, you will end up with
        # twice as many matched-filter jobs that still use 4000s to estimate a
        # PSD but then only generate triggers in the 2000s of data that the
        # template bank jobs ran on.
        tmpltbank_exe = os.path.basename(cp.get('executables', 'inspiral'))
        link_exe_instance = select_matchedfilter_class(tmpltbank_exe)
    else:
        link_exe_instance = None

    # Set up class for holding the banks
    tmplt_banks = FileList([])


    # Template banks are independent for different ifos, but might not be!
    # Begin with independent case and add after FIXME
    for ifo in ifos:
        job_instance = exe_class(workflow.cp, 'tmpltbank', ifo=ifo, 
                                               out_dir=output_dir,
                                               tags=tags)
        # Check for the write_psd flag
        if cp.has_option_tags("workflow-tmpltbank", "tmpltbank-write-psd-file", tags):
            job_instance.write_psd = True
        else:
            job_instance.write_psd = False

        if link_exe_instance:
            link_job_instance = link_exe_instance(cp, 'inspiral', ifo=ifo,
                        out_dir=output_dir, tags=tags)
        else:
            link_job_instance = None
        sngl_ifo_job_setup(workflow, ifo, tmplt_banks, job_instance, 
                           science_segs[ifo], datafind_outs,
                           link_job_instance=link_job_instance, 
                           allow_overlap=True,
                           compatibility_mode=compatibility_mode)
    return tmplt_banks
Example #7
0
def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs,
                                          tmplt_banks, output_dir,
                                          injection_file=None,
                                          tags=None):
    '''
    Setup matched-filter jobs that are generated as part of the workflow in
    which a single job reads in and generates triggers over multiple ifos.
    This
    module can support any matched-filter code that is similar in principle to
    pycbc_multi_inspiral or lalapps_coh_PTF_inspiral, but for new codes some
    additions are needed to define Executable and Job sub-classes
    (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of ligo.segments.segmentlist instances
        The list of times that are being analysed in this workflow.
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.

    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    if tags is None:
        tags = []
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = sorted(science_segs.keys())
    match_fltr_exe = os.path.basename(cp.get('executables','inspiral'))

    # List for holding the output
    inspiral_outs = FileList([])

    logging.info("Setting up matched-filtering for %s." %(' '.join(ifos),))

    if match_fltr_exe == 'pycbc_multi_inspiral':
        exe_class = select_matchedfilter_class(match_fltr_exe)
        cp.set('inspiral', 'longitude',\
               str(radians(float(cp.get('workflow', 'ra')))))
        cp.set('inspiral', 'latitude',\
               str(radians(float(cp.get('workflow', 'dec')))))
        # At the moment we aren't using sky grids, but when we do this code
        # might be used then. 
        # from pycbc.workflow.grb_utils import get_sky_grid_scale
        # if cp.has_option("jitter_skyloc", "apply-fermi-error"):
        #     cp.set('inspiral', 'sky-error',
        #            str(get_sky_grid_scale(float(cp.get('workflow',
        #                                                'sky-error')))))
        # else:
        #     cp.set('inspiral', 'sky-error',
        #            str(get_sky_grid_scale(float(cp.get('workflow',
        #                                                'sky-error')),
        #                                   sigma_sys=0.0)))
        # cp.set('inspiral', 'trigger-time',\
        #        cp.get('workflow', 'trigger-time'))
        # cp.set('inspiral', 'block-duration',
        #        str(abs(science_segs[ifos[0]][0]) - \
        #                2 * int(cp.get('inspiral', 'pad-data'))))

        job_instance = exe_class(workflow.cp, 'inspiral', ifo=ifos,
                                 out_dir=output_dir,
                                 injection_file=injection_file,
                                 tags=tags)
        if cp.has_option("workflow", "do-long-slides") and "slide" in tags[-1]:
            slide_num = int(tags[-1].replace("slide", ""))
            logging.info("Setting up matched-filtering for slide {}"
                         .format(slide_num))
            slide_shift = int(cp.get("inspiral", "segment-length"))
            time_slide_dict = {ifo: (slide_num + 1) * ix * slide_shift
                               for ix, ifo in enumerate(ifos)}
            multi_ifo_coherent_job_setup(workflow, inspiral_outs, job_instance,
                                         science_segs, datafind_outs,
                                         output_dir, parents=tmplt_banks,
                                         slide_dict=time_slide_dict)
        else:
            multi_ifo_coherent_job_setup(workflow, inspiral_outs, job_instance,
                                         science_segs, datafind_outs,
                                         output_dir, parents=tmplt_banks)
    else:
        # Select the appropriate class
        raise ValueError("Not currently supported.")
    return inspiral_outs
Example #8
0
def setup_matchedfltr_dax_generated(workflow, science_segs, datafind_outs,
                                    tmplt_banks, output_dir,
                                    injection_file=None,
                                    tags=None):
    '''
    Setup matched-filter jobs that are generated as part of the workflow.
    This
    module can support any matched-filter code that is similar in principle to
    lalapps_inspiral, but for new codes some additions are needed to define
    Executable and Job sub-classes (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of ligo.segments.segmentlist instances
        The list of times that are being analysed in this workflow.
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.

    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    if tags is None:
        tags = []
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = science_segs.keys()
    match_fltr_exe = os.path.basename(cp.get('executables','inspiral'))
    # Select the appropriate class
    exe_class = select_matchedfilter_class(match_fltr_exe)

    # Set up class for holding the banks
    inspiral_outs = FileList([])

    # Matched-filtering is done independently for different ifos, but might not be!
    # If we want to use multi-detector matched-filtering or something similar to this
    # it would probably require a new module
    for ifo in ifos:
        logging.info("Setting up matched-filtering for %s." %(ifo))
        job_instance = exe_class(workflow.cp, 'inspiral', ifo=ifo,
                                               out_dir=output_dir,
                                               injection_file=injection_file,
                                               tags=tags)

        sngl_ifo_job_setup(workflow, ifo, inspiral_outs, job_instance,
                           science_segs[ifo], datafind_outs,
                           parents=tmplt_banks, allow_overlap=False)
    return inspiral_outs
Example #9
0
def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs,
                                          tmplt_banks, output_dir,
                                          injection_file=None,
                                          tags=None, link_to_tmpltbank=False,
                                          compatibility_mode=False):
    '''
    Setup matched-filter jobs that are generated as part of the workflow in
    which a single job reads in and generates triggers over multiple ifos.
    This
    module can support any matched-filter code that is similar in principle to
    pycbc_multi_inspiral or lalapps_coh_PTF_inspiral, but for new codes some
    additions are needed to define Executable and Job sub-classes
    (see jobutils.py).

    Parameters
    -----------
    workflow : pycbc.workflow.core.Workflow
        The Workflow instance that the coincidence jobs will be added to.
    science_segs : ifo-keyed dictionary of ligo.segments.segmentlist instances
        The list of times that are being analysed in this workflow.
    datafind_outs : pycbc.workflow.core.FileList
        An FileList of the datafind files that are needed to obtain the
        data used in the analysis.
    tmplt_banks : pycbc.workflow.core.FileList
        An FileList of the template bank files that will serve as input
        in this stage.
    output_dir : path
        The directory in which output will be stored.
    injection_file : pycbc.workflow.core.File, optional (default=None)
        If given the file containing the simulation file to be sent to these
        jobs on the command line. If not given no file will be sent.
    tags : list of strings (optional, default = [])
        A list of the tagging strings that will be used for all jobs created
        by this call to the workflow. An example might be ['BNSINJECTIONS'] or
        ['NOINJECTIONANALYSIS']. This will be used in output names.

    Returns
    -------
    inspiral_outs : pycbc.workflow.core.FileList
        A list of output files written by this stage. This *will not* contain
        any intermediate products produced within this stage of the workflow.
        If you require access to any intermediate products produced at this
        stage you can call the various sub-functions directly.
    '''
    if tags is None:
        tags = []
    # Need to get the exe to figure out what sections are analysed, what is
    # discarded etc. This should *not* be hardcoded, so using a new executable
    # will require a bit of effort here ....

    cp = workflow.cp
    ifos = sorted(science_segs.keys())
    match_fltr_exe = os.path.basename(cp.get('executables','inspiral'))

    # List for holding the output
    inspiral_outs = FileList([])

    logging.info("Setting up matched-filtering for %s." %(' '.join(ifos),))

    if match_fltr_exe == 'pycbc_multi_inspiral':
        exe_class = select_matchedfilter_class(match_fltr_exe)
        cp.set('inspiral', 'longitude',\
               str(radians(float(cp.get('workflow', 'ra')))))
        cp.set('inspiral', 'latitude',\
               str(radians(float(cp.get('workflow', 'dec')))))
        # At the moment we aren't using sky grids, but when we do this code
        # might be used then. 
        # from pycbc.workflow.grb_utils import get_sky_grid_scale
        # if cp.has_option("jitter_skyloc", "apply-fermi-error"):
        #     cp.set('inspiral', 'sky-error',
        #            str(get_sky_grid_scale(float(cp.get('workflow',
        #                                                'sky-error')))))
        # else:
        #     cp.set('inspiral', 'sky-error',
        #            str(get_sky_grid_scale(float(cp.get('workflow',
        #                                                'sky-error')),
        #                                   sigma_sys=0.0)))
        # cp.set('inspiral', 'trigger-time',\
        #        cp.get('workflow', 'trigger-time'))
        # cp.set('inspiral', 'block-duration',
        #        str(abs(science_segs[ifos[0]][0]) - \
        #                2 * int(cp.get('inspiral', 'pad-data'))))

        job_instance = exe_class(workflow.cp, 'inspiral', ifo=ifos,
                                 out_dir=output_dir,
                                 injection_file=injection_file,
                                 tags=tags)
        if cp.has_option("workflow", "do-long-slides") and "slide" in tags[-1]:
            slide_num = int(tags[-1].replace("slide", ""))
            logging.info("Setting up matched-filtering for slide {}"
                         .format(slide_num))
            slide_shift = int(cp.get("inspiral", "segment-length"))
            time_slide_dict = {ifo: (slide_num + 1) * ix * slide_shift
                               for ix, ifo in enumerate(ifos)}
            multi_ifo_coherent_job_setup(workflow, inspiral_outs, job_instance,
                                         science_segs, datafind_outs,
                                         output_dir, parents=tmplt_banks,
                                         slide_dict=time_slide_dict)
        else:
            multi_ifo_coherent_job_setup(workflow, inspiral_outs, job_instance,
                                         science_segs, datafind_outs,
                                         output_dir, parents=tmplt_banks)
    else:
        # Select the appropriate class
        raise ValueError("Not currently supported.")
    return inspiral_outs