def get_singles_times( connection, verbose = False ):
    # get single-ifo filtered segments
    ifo_segments = compute_dur.get_single_ifo_segments(
        connection, program_name = "inspiral", usertag = "FULL_DATA")

    # get all veto segments
    xmldoc = dbtables.get_xml(connection)
    veto_segments = compute_dur.get_veto_segments(xmldoc, verbose)

    sngls_durations = {}

    for veto_def_name, veto_seg_dict in veto_segments.items():
        post_vetoes_ifosegs = ifo_segments - veto_seg_dict
        for ifo, segs_list in post_vetoes_ifosegs.items():
            sngls_durations[ifo] = float( abs(segs_list) )

    return sngls_durations
Example #2
0
def get_singles_times(connection, verbose=False):
    # get single-ifo filtered segments
    ifo_segments = compute_dur.get_single_ifo_segments(connection,
                                                       program_name="inspiral",
                                                       usertag="FULL_DATA")

    # get all veto segments
    xmldoc = dbtables.get_xml(connection)
    veto_segments = compute_dur.get_veto_segments(xmldoc, verbose)

    sngls_durations = {}

    for veto_def_name, veto_seg_dict in veto_segments.items():
        post_vetoes_ifosegs = ifo_segments - veto_seg_dict
        for ifo, segs_list in post_vetoes_ifosegs.items():
            sngls_durations[ifo] = float(abs(segs_list))

    return sngls_durations
Example #3
0
def printmissed(connection, simulation_table, recovery_table, map_label, livetime_program,
    param_name = None, param_ranges = None, exclude_coincs = None, include_only_coincs = None, sim_tag = 'ALLINJ',
    limit = None, daily_ihope_pages_location = 'https://ldas-jobs.ligo.caltech.edu/~cbc/ihope_daily', verbose = False):
    
    from pylal import ligolw_sqlutils as sqlutils
    from pylal import ligolw_cbc_compute_durations as compute_dur
    from glue import segments
    from glue.ligolw import dbtables

    # Get simulation/recovery tables
    simulation_table = sqlutils.validate_option(simulation_table)
    recovery_table = sqlutils.validate_option(recovery_table)
    
    # create the get_sim_tag function
    sim_map = sqlutils.sim_tag_proc_id_mapper( connection )
    connection.create_function( 'get_sim_tag', 1, sim_map.get_sim_tag )

    #
    #   Create and prepare the CloseMissedTable to store summary information
    #
    
    # Get simulation table column names from database
    simulation_table_columns = sqlutils.get_column_names_from_table( connection, simulation_table )
    column_names = simulation_table_columns + \
        ['rank', 'decisive_distance', 'gps_time', 'gps_time_ns', 'injection_time_utc__Px_click_for_daily_ihope_xP_', 'elogs', 'instruments_on', 'veto_def_name', 'mini_followup','omega_scan', 'sim_tag']
    
    
    # define needed tables
    class CloseMissedTable(table.Table):
        tableName = "close_missed_injections:table"
        validcolumns = {}
        for col_name in column_names:
            if 'rank' in col_name:
                validcolumns[col_name] = "int_4u"
            elif 'instruments_on' == col_name:
                validcolumns[col_name] = lsctables.ExperimentTable.validcolumns['instruments']
            elif 'veto_def_name' == col_name:
                validcolumns[col_name] = lsctables.ExperimentSummaryTable.validcolumns['veto_def_name']
            elif 'decisive_distance' == col_name:
                validcolumns[col_name] = sqlutils.get_col_type(simulation_table, 'eff_dist_h')
            elif 'gps_time' == col_name or 'gps_time_ns' == col_name:
                validcolumns[col_name] = "int_4s"
            elif 'sim_tag' == col_name:
                validcolumns[col_name] = "lstring"
            else:
                validcolumns[col_name] = sqlutils.get_col_type(simulation_table, col_name, default = 'lstring')
    
    class CloseMissed(object):
        __slots__ = CloseMissedTable.validcolumns.keys()
    
        def get_pyvalue(self):
            return generic_get_pyvalue(self)
    
    # connect the rows to the tables
    CloseMissedTable.RowType = CloseMissed
    
    # create the table
    cmtable = lsctables.New(CloseMissedTable)

    # set up sim_rec_map table
    sqlutils.create_sim_rec_map_table(connection, simulation_table, recovery_table, map_label, None)
    
    #
    #   Set table filters
    #
    
    # we force the include/exclude filters to None; will check for excluded/included ifo time
    # when cycling through the ifo times
    filter = """
        WHERE
            simulation_id NOT IN (
                SELECT
                    sim_id
                FROM
                    sim_rec_map )"""
    af = create_filter( connection, simulation_table, param_name = param_name, param_ranges = param_ranges,
            exclude_coincs = None, include_only_coincs = None, sim_tag = sim_tag, verbose = verbose)
    af = re.sub(r'experiment_summary[.]sim_proc_id', 'process_id', af)
    if af != '':
        filter = '\n'.join([ filter, """
            AND""", af])
    # get desired instrument times
    if include_only_coincs is not None:
        include_times = [on_instruments for on_instruments, type in
            sqlutils.parse_coinc_options( include_only_coincs, verbose = verbose ).get_coinc_types().items()
            if 'ALL' in type]
    if exclude_coincs is not None:
        exclude_times = [on_instruments for on_instruments, type in
            sqlutils.parse_coinc_options( exclude_coincs, verbose = verbose ).get_coinc_types().items()
            if 'ALL' in type]

    # get the usertags of inspiral jobs in this database
    sqlquery = """
        SELECT value
        FROM process_params
        WHERE param == "-userTag"
        GROUP BY value
    """
    usertags = set(usertag[0] for usertag in connection.cursor().execute(sqlquery) )
    
    # Get the single-ifo science segments after CAT-1 vetoes
    try:
        if "FULL_DATA" in usertags:
            tag = "FULL_DATA"
        else:
            tag = list(usertags)[0]
    except IndexError:
        # This is hacky anyway, so let's just take a guess
        tag = "FULL_DATA"
    ifo_segments = compute_dur.get_single_ifo_segments(connection, program_name = livetime_program, usertag = tag)
    if ifo_segments == {}:
        raise ValueError, "Cannot find any analysis segments using %s as a livetime program; cannot get missed injections." % livetime_program
    
    if verbose:
        print >> sys.stderr, "Getting all veto category names from the experiment_summary table..."

    xmldoc = dbtables.get_xml(connection)
    # get veto_segments
    veto_segments = compute_dur.get_veto_segments(xmldoc, verbose)

    # make a dictionary of zerolag "shifts" needed for the get_coinc_segments function
    zerolag_dict = {}
    for ifo in ifo_segments:
        zerolag_dict[ifo] = 0.0

    # Cycle over available veto categories
    for veto_def_name, veto_seg_dict in veto_segments.items():
        post_vetoes_ifosegs = ifo_segments - veto_seg_dict
    
        # make a dictionary of coincident segments by exclusive on-ifos
        coinc_segs = compute_dur.get_coinc_segments(post_vetoes_ifosegs, zerolag_dict)
    
        #
        #   Get all the on_instrument times and cycle over them
        #
        sqlquery = """
            SELECT DISTINCT experiment.instruments
            FROM experiment
                JOIN experiment_summary ON (
                    experiment.experiment_id == experiment_summary.experiment_id )
            WHERE experiment_summary.veto_def_name == :1
        """
        for on_instruments in connection.cursor().execute(sqlquery, (veto_def_name,)).fetchall():
            on_instruments = lsctables.instrument_set_from_ifos(on_instruments[0])
    
            # check if this on_instruments is desired; if not, skip
            if include_only_coincs is not None and frozenset(on_instruments) not in include_times:
                continue
            if exclude_coincs is not None and frozenset(on_instruments) in exclude_times:
                continue
 
            on_times = coinc_segs[','.join(sorted(on_instruments))]
            
            def is_in_on_time(gps_time, gps_time_ns):
                return gps_time + (1e-9 * gps_time_ns) in on_times
            
            connection.create_function('is_in_on_time', 2, is_in_on_time)
            
            # add the check for on time to the filter
            in_this_filter = filter
            # figure out if simulation_table has end_times or start_times
            end_or_start = any('end_time' in c for c in simulation_table_columns) and '_end_time' or '_start_time'
            for instrument in on_instruments:
                inst_time = instrument.lower()[0] + end_or_start
                inst_time_ns = inst_time + '_ns' 
                in_this_filter = ''.join([ in_this_filter,
                    '\n\tAND is_in_on_time(', inst_time, ',', inst_time_ns, ')' ])
     
            #
            #   Set up decisive distance argument
            #
            
            def get_decisive_distance( *args ):
               return sorted(args)[1]
            
            connection.create_function('get_decisive_distance', len(on_instruments), get_decisive_distance)
            decisive_distance = ''.join(['get_decisive_distance(', ','.join(['eff_dist_'+inst.lower()[0] for inst in on_instruments]), ')' ])
            
            #
            #   Initialize ranking. Statistics for ranking are based on decisive distance
            #
            if verbose:
                print >> sys.stderr, "Getting statistics for ranking..."
            ranker = sqlutils.rank_stats(simulation_table, decisive_distance, 'ASC')
            # add requirement that stats not be found in the sim_rec_table to in_this_filter
            ranker.populate_stats_list(connection, limit = limit, filter = in_this_filter)
            connection.create_function( 'rank', 1, ranker.get_rank )
            
            #
            #   Get the Data
            #
            sqlquery = ''.join(["""
                SELECT
                    *,
                    get_sim_tag(process_id),
                    """, decisive_distance, """,
                    rank(""", decisive_distance, """)
                FROM
                    """, simulation_table, """
                """, in_this_filter, """
                    %s""" % (limit is not None and ''.join(['AND rank(', decisive_distance, ') <= ', str(limit)]) or ''), """
                ORDER BY
                    rank(""", decisive_distance, """) ASC
                    """])
            
            if verbose:
                print >> sys.stderr, "Getting injections..."
                print >> sys.stderr, "SQLite query used is:"
                print >> sys.stderr, sqlquery
            
            for values in connection.cursor().execute( sqlquery ).fetchall():
                cmrow = CloseMissed()
                [ setattr(cmrow, column, values[ii]) for ii, column in enumerate(simulation_table_columns) ]
                cmrow.decisive_distance = values[-2]
                cmrow.rank = values[-1]
                cmrow.instruments_on = lsctables.ifos_from_instrument_set(on_instruments)
                cmrow.veto_def_name = veto_def_name
                cmrow.sim_tag = values[-3]
                cmrow.mini_followup = None
                cmrow.omega_scan = None
                cmrow.gps_time = getattr(cmrow, sorted(on_instruments)[0][0].lower() + end_or_start)
                cmrow.gps_time_ns = getattr(cmrow, sorted(on_instruments)[0][0].lower() + end_or_start + '_ns')
                # set  elog page
                elog_pages = [(ifo, get_elog_page(ifo, cmrow.gps_time)) for ifo in on_instruments]
                cmrow.elogs = ','.join([ create_hyperlink(elog[1], elog[0]) for elog in sorted(elog_pages) ])
                # set daily_ihope page
                injection_time_utc = format_end_time_in_utc( cmrow.gps_time ) 
                daily_ihope_address = get_daily_ihope_page(cmrow.gps_time, pages_location = daily_ihope_pages_location)
                cmrow.injection_time_utc__Px_click_for_daily_ihope_xP_ = create_hyperlink( daily_ihope_address, injection_time_utc ) 
            
                # add the row
                cmtable.append(cmrow)

    # drop the sim_rec_map table
    connection.cursor().execute("DROP TABLE sim_rec_map")
   
    return cmtable
def successful_injections(connection,
                          tag,
                          on_ifos,
                          veto_cat,
                          dist_type="distance",
                          weight_dist=False,
                          verbose=False):
    """
    My attempt to get a list of the simulations that actually made
    it into some level of coincident time
    """

    xmldoc = dbtables.get_xml(connection)
    connection.create_function('end_time_with_ns', 2, end_time_with_ns)

    # Get the veto segments as dictionaries, keyed by veto category
    veto_segments = compute_dur.get_veto_segments(xmldoc, verbose)

    # ------------------------ Get List of Injections ------------------------ #
    sql_params_dict = {}
    sqlquery = """
        SELECT DISTINCT
            simulation_id,
            end_time_with_ns(geocent_end_time, geocent_end_time_ns),"""
    # add the desired distance measure to the SQL query
    if dist_type == "distance":
        connection.create_function('distance_func', 2, chirp_dist)
        sqlquery += """
            distance_func(distance, sim_inspiral.mchirp)
        FROM sim_inspiral """
    elif dist_type == "decisive_distance":
        connection.create_function('decisive_dist_func', 6, decisive_dist)
        sql_params_dict['ifos'] = on_ifos
        sql_params_dict['weight_dist'] = weight_dist
        sqlquery += """
            decisive_dist_func(
                eff_dist_h, eff_dist_l, eff_dist_v,
                sim_inspiral.mchirp, :weight_dist, :ifos)
        FROM sim_inspiral """

    if tag != 'ALL_INJ':
        # if a specific injection set is wanted
        sqlquery += """
        JOIN process_params ON (
            process_params.process_id == sim_inspiral.process_id)
        WHERE process_params.value = :usertag) """
        sql_params_dict["usertag"] = tag
    else:
        # for all injections
        tag = 'FULL_DATA'

    # Get segments that define which time was filtered
    ifo_segments = compute_dur.get_single_ifo_segments(connection,
                                                       program_name="inspiral",
                                                       usertag=tag)

    zero_lag_dict = dict([(ifo, 0.0) for ifo in ifo_segments])

    successful_inj = []
    # determine coincident segments for that veto category
    coinc_segs = compute_dur.get_coinc_segments(
        ifo_segments - veto_segments[veto_cat], zero_lag_dict)

    # Apply vetoes to single-ifo filter segments
    for injection in connection.execute(sqlquery, sql_params_dict):
        inj_segment = segments.segment(injection[1], injection[1])
        if coinc_segs[on_ifos].intersects_segment(inj_segment):
            successful_inj.append(injection)

    return successful_inj
def sngl_snr_hist(
    connection,
    ifo,
    mchirp,
    eta,
    min_snr,
    snr_stat = None,
    sngls_width = None,
    usertag = "FULL_DATA",
    datatype = None,
    sngls_bins = None):
    """
    Creates a histogram of sngl_inspiral triggers and returns a list of counts
    and the associated snr bins.

    @connection: connection to a SQLite database with lsctables
    @ifo: the instrument one desires triggers from
    @mchirp: the chirp mass from the desired template
    @eta: the symmetric mass ratio from the desired template
    @min_snr: a lower threshold on the value of the snr_stat
    @sngls_width: the bin width for the histogram
    @usertag: the usertag for the triggers. The default is "FULL_DATA".
    @datatype: the datatype (all_data, slide, ...) if single-ifo triggers from
        coincident events is desired. The default is to collect all triggers.
    @sngls_bins: a list of bin edges for the snr-histogram
    """

    # create function for the desired snr statistic
    set_getsnr_function(connection, snr_stat)

    connection.create_function('end_time_w_ns', 2, end_time_w_ns)

    # set SQL statement parameters
    sql_params_dict = {
        "mchirp": mchirp, "eta": eta,
        "min_snr": min_snr, "ifo": ifo,
        "usertag": usertag}

    # SQLite query to get a list of (snr, gps-time) tuples for inspiral triggers
    sqlquery = """
    SELECT DISTINCT
        get_snr(snr, chisq, chisq_dof) as snr_stat,
        end_time_w_ns(end_time, end_time_ns)
    FROM sngl_inspiral
        JOIN process_params ON (
            process_params.process_id == sngl_inspiral.process_id) """
    # if a datatype is given, get only the inspiral triggers from coincs of that type
    if datatype:
        sqlquery += """
        JOIN coinc_event_map, experiment_map, experiment_summary ON (
            coinc_event_map.event_id == sngl_inspiral.event_id
            AND experiment_map.coinc_event_id == coinc_event_map.coinc_event_id
            AND experiment_summary.experiment_summ_id == experiment_map.experiment_summ_id) """
    sqlquery += """
    WHERE
        sngl_inspiral.ifo == :ifo 
        AND snr_stat >= :min_snr
        AND sngl_inspiral.mchirp == :mchirp
        AND sngl_inspiral.eta == :eta
        AND process_params.value == :usertag """
    if datatype:
        sqlquery += """
        AND experiment_summary.datatype == :type
        """
        sql_params_dict["type"] = datatype

    # get dq-veto segments
    xmldoc = dbtables.get_xml(connection)
    veto_segments = compute_dur.get_veto_segments(xmldoc, False)
    veto_segments = veto_segments[ veto_segments.keys()[0] ]

    snr_array = np.array([])
    # apply vetoes to the list of trigger times
    for snr, trig_time in connection.execute( sqlquery, sql_params_dict ):
        trig_segment = segments.segment(trig_time, trig_time)
        if not veto_segments[ifo].intersects_segment( trig_segment ):
            snr_array = np.append( snr_array, snr )

    if sngls_bins is None:
        sngls_bins = np.arange(min_snr, np.max(snr_array) + sngls_width, sngls_width)
    
    # make the binned snr histogram
    sngls_hist, _ = np.histogram(snr_array, bins=sngls_bins)

    return sngls_hist, sngls_bins
def successful_injections(
    connection,
    tag,
    on_ifos,
    veto_cat,
    dist_type = "distance",
    weight_dist = False,
    verbose = False):

    """
    My attempt to get a list of the simulations that actually made
    it into some level of coincident time
    """

    xmldoc = dbtables.get_xml(connection)
    connection.create_function('end_time_with_ns', 2, end_time_with_ns)

    # Get the veto segments as dictionaries, keyed by veto category
    veto_segments = compute_dur.get_veto_segments(xmldoc, verbose)

    # ------------------------ Get List of Injections ------------------------ #
    sql_params_dict = {}
    sqlquery = """
        SELECT DISTINCT
            simulation_id,
            end_time_with_ns(geocent_end_time, geocent_end_time_ns),"""
    # add the desired distance measure to the SQL query
    if dist_type == "distance":
        connection.create_function('distance_func', 2, chirp_dist)
        sqlquery += """
            distance_func(distance, sim_inspiral.mchirp)
        FROM sim_inspiral """
    elif dist_type == "decisive_distance":
        connection.create_function('decisive_dist_func', 6, decisive_dist)
        sql_params_dict['ifos'] = on_ifos
        sql_params_dict['weight_dist'] = weight_dist
        sqlquery += """
            decisive_dist_func(
                eff_dist_h, eff_dist_l, eff_dist_v,
                sim_inspiral.mchirp, :weight_dist, :ifos)
        FROM sim_inspiral """

    if tag != 'ALL_INJ':
        # if a specific injection set is wanted
        sqlquery += """
        JOIN process_params ON (
            process_params.process_id == sim_inspiral.process_id)
        WHERE process_params.value = :usertag) """
        sql_params_dict["usertag"] = tag
    else:
        # for all injections
        tag = 'FULL_DATA'

    # Get segments that define which time was filtered
    ifo_segments = compute_dur.get_single_ifo_segments(
        connection,
        program_name = "inspiral",
        usertag = tag)

    zero_lag_dict = dict([(ifo, 0.0) for ifo in ifo_segments])

    successful_inj = []
    # determine coincident segments for that veto category 
    coinc_segs = compute_dur.get_coinc_segments(
        ifo_segments - veto_segments[veto_cat],
        zero_lag_dict)

    # Apply vetoes to single-ifo filter segments
    for injection in connection.execute(sqlquery, sql_params_dict):
        inj_segment = segments.segment(injection[1], injection[1])
        if coinc_segs[on_ifos].intersects_segment( inj_segment ):
            successful_inj.append( injection )

    return successful_inj
Example #7
0
def sngl_snr_hist(connection,
                  ifo,
                  mchirp,
                  eta,
                  min_snr,
                  snr_stat=None,
                  sngls_width=None,
                  usertag="FULL_DATA",
                  datatype=None,
                  sngls_bins=None):
    """
    Creates a histogram of sngl_inspiral triggers and returns a list of counts
    and the associated snr bins.

    @connection: connection to a SQLite database with lsctables
    @ifo: the instrument one desires triggers from
    @mchirp: the chirp mass from the desired template
    @eta: the symmetric mass ratio from the desired template
    @min_snr: a lower threshold on the value of the snr_stat
    @sngls_width: the bin width for the histogram
    @usertag: the usertag for the triggers. The default is "FULL_DATA".
    @datatype: the datatype (all_data, slide, ...) if single-ifo triggers from
        coincident events is desired. The default is to collect all triggers.
    @sngls_bins: a list of bin edges for the snr-histogram
    """

    # create function for the desired snr statistic
    set_getsnr_function(connection, snr_stat)

    connection.create_function('end_time_w_ns', 2, end_time_w_ns)

    # set SQL statement parameters
    sql_params_dict = {
        "mchirp": mchirp,
        "eta": eta,
        "min_snr": min_snr,
        "ifo": ifo,
        "usertag": usertag
    }

    # SQLite query to get a list of (snr, gps-time) tuples for inspiral triggers
    sqlquery = """
    SELECT DISTINCT
        get_snr(snr, chisq, chisq_dof) as snr_stat,
        end_time_w_ns(end_time, end_time_ns)
    FROM sngl_inspiral
        JOIN process_params ON (
            process_params.process_id == sngl_inspiral.process_id) """
    # if a datatype is given, get only the inspiral triggers from coincs of that type
    if datatype:
        sqlquery += """
        JOIN coinc_event_map, experiment_map, experiment_summary ON (
            coinc_event_map.event_id == sngl_inspiral.event_id
            AND experiment_map.coinc_event_id == coinc_event_map.coinc_event_id
            AND experiment_summary.experiment_summ_id == experiment_map.experiment_summ_id) """
    sqlquery += """
    WHERE
        sngl_inspiral.ifo == :ifo 
        AND snr_stat >= :min_snr
        AND sngl_inspiral.mchirp == :mchirp
        AND sngl_inspiral.eta == :eta
        AND process_params.value == :usertag """
    if datatype:
        sqlquery += """
        AND experiment_summary.datatype == :type
        """
        sql_params_dict["type"] = datatype

    # get dq-veto segments
    xmldoc = dbtables.get_xml(connection)
    veto_segments = compute_dur.get_veto_segments(xmldoc, False)
    veto_segments = veto_segments[veto_segments.keys()[0]]

    snr_array = np.array([])
    # apply vetoes to the list of trigger times
    for snr, trig_time in connection.execute(sqlquery, sql_params_dict):
        trig_segment = segments.segment(trig_time, trig_time)
        if not veto_segments[ifo].intersects_segment(trig_segment):
            snr_array = np.append(snr_array, snr)

    if sngls_bins is None:
        sngls_bins = np.arange(min_snr,
                               np.max(snr_array) + sngls_width, sngls_width)

    # make the binned snr histogram
    sngls_hist, _ = np.histogram(snr_array, bins=sngls_bins)

    return sngls_hist, sngls_bins