コード例 #1
0
def new_table(tab, *args, **kwargs):
    """Create a new `~glue.ligolw.Table`

    This is just a convenience wrapper around `~glue.ligolw.lsctables.New`

    Parameters
    ----------
    tab : `type`, `str`
        `~glue.ligolw.Table` subclass, or name of table to create
    *args, **kwargs
        other parameters are passed directly to `~glue.ligolw.lsctables.New`

    Returns
    -------
    table : `~glue.ligolw.Table`
        a newly-created table with the relevant attributes and structure
    """
    if isinstance(tab, str):
        tab = lsctables.TableByName[table.StripTableName(tab)]
    return lsctables.New(tab, *args, **kwargs)
コード例 #2
0
ファイル: hveto.py プロジェクト: pvasired/gwsumm
def read_hveto_triggers(f, columns=HVETO_COLUMNS, filt=None, nproc=1):
    """Read a `SnglBurstTable` of triggers from an Hveto txt file.
    """
    # allow multiprocessing
    if nproc != 1:
        from gwpy.table.io.cache import read_cache
        return read_cache(f,
                          lsctables.SnglBurstTable.tableName,
                          columns=columns,
                          nproc=nproc,
                          format='hveto')

    # format list of files
    if isinstance(f, CacheEntry):
        files = [f.path]
    elif isinstance(f, (str, unicode)) and f.endswith(('.cache', '.lcf')):
        files = open_cache(f).pfnlist()
    elif isinstance(f, (str, unicode)):
        files = f.split(',')
    elif isinstance(f, Cache):
        files = f.pfnlist()
    else:
        files = list(f)

    # generate output
    out = lsctables.New(lsctables.SnglBurstTable, columns=columns)
    append = out.append

    # iterate over files
    for f in files:
        trigs = loadtxt(f, dtype=float)
        for t, f, snr in trigs:
            b = lsctables.SnglBurst()
            b.set_peak(LIGOTimeGPS(float(t)))
            b.peak_frequency = f
            b.snr = snr
            if filt is None or filt(b):
                append(b)
    return out
コード例 #3
0
def get_triggers(channel, etg, segments, config=GWSummConfigParser(),
                 cache=None, columns=None, format=None, query=True,
                 multiprocess=False, ligolwtable=None, filter=None,
                 timecolumn=None, return_=True):
    """Read a table of transient event triggers for a given channel.
    """
    key = '%s,%s' % (str(channel), etg.lower())
    # convert input segments to a segmentlist (for convenience)
    if isinstance(segments, DataQualityFlag):
        segments = segments.active
    segments = SegmentList(segments)

    # get processes
    if multiprocess is True:
        nproc = count_free_cores()
    elif multiprocess is False:
        nproc = 1
    else:
        nproc = multiprocess

    # find LIGO_LW table for this ETG
    try:
        TableClass = get_etg_table(etg)
    except KeyError:
        TableClass = None

    # work out columns
    if columns is None:
        try:
            columns = config.get(etg.lower(), 'columns').split(',')
        except (NoSectionError, NoOptionError):
            columns = None
        else:
            columns = [c.strip(' \'\"') for c in columns]

    # read segments from global memory
    try:
        havesegs = globalv.TRIGGERS[key].meta['segments']
    except KeyError:
        new = segments
    else:
        new = segments - havesegs

    # read new triggers
    if query and abs(new) != 0:
        ntrigs = 0
        vprint("    Grabbing %s triggers for %s" % (etg, str(channel)))

        # store read kwargs
        kwargs = get_etg_read_kwargs(config, etg, exclude=['columns'])
        trigfindkwargs = dict((k[9:], kwargs.pop(k)) for k in kwargs.keys() if
                              k.startswith('trigfind-'))

        if format is not None:
            kwargs['format'] = format
        if timecolumn is not None:
            kwargs['timecolumn'] = timecolumn
        if 'format' not in kwargs:
            try:
                kwargs['format'] = get_etg_format(etg)
            except KeyError:
                kwargs['format'] = etg.lower()
        if kwargs['format'].startswith('ascii.'):  # customise column selection
            kwargs['include_names'] = columns
        else:
            kwargs['columns'] = columns
        if etg.lower().replace('-', '_') in ['pycbc_live']:
            kwargs['ifo'] = get_channel(channel).ifo

        # if single file
        if cache is not None and len(cache) == 1:
            trigs = read_cache(cache, new, etg, nproc=nproc, **kwargs)
            if trigs is not None:
                add_triggers(trigs, key)
                ntrigs += len(trigs)
        # otherwise, loop over segments
        else:
            for segment in new:
                # find trigger files
                if cache is None and etg.lower() in ['kw', 'kleinewelle']:
                    kwargs['filt'] = lambda t: t.channel == str(channel)
                if cache is None and not etg.lower() == 'hacr':
                    try:
                        segcache = trigfind.find_trigger_urls(
                            str(channel), etg, segment[0], segment[1],
                            **trigfindkwargs)
                    except ValueError as e:
                        warnings.warn("Caught %s: %s"
                                      % (type(e).__name__, str(e)))
                        continue
                elif cache is not None:
                    segcache = cache
                # read table
                if etg.lower() == 'hacr':
                    from gwpy.table.io.hacr import get_hacr_triggers
                    trigs = get_hacr_triggers(channel, segment[0], segment[1],
                                              columns=columns)
                    trigs.meta['segments'] = SegmentList([segment])
                else:
                    trigs = read_cache(segcache, SegmentList([segment]), etg,
                                       nproc=nproc, **kwargs)
                if trigs is not None:
                    add_triggers(trigs, key)
                    ntrigs += len(trigs)
                vprint(".")
        vprint(" | %d events read\n" % ntrigs)

    # if asked to read triggers, but didn't actually read any,
    # create an empty table so that subsequent calls don't raise KeyErrors
    if query and key not in globalv.TRIGGERS:
        if columns is None and TableClass is not None:
            tab = EventTable(lsctables.New(TableClass), get_as_columns=True)
        else:
            tab = EventTable(names=columns)
        tab.meta['segments'] = SegmentList()
        add_triggers(tab, key)

    # work out time function
    if return_:
        trigs = keep_in_segments(globalv.TRIGGERS[key], segments, etg)
        if filter:
            if isinstance(filter, string_types):
                filter = filter.split(' ')
            # if a filter string is provided, return a filtered copy of
            # the trigger set stored in global memory
            return filter_triggers(trigs, *filter)
        return trigs
    else:
        return
コード例 #4
0
ファイル: triggers.py プロジェクト: andrew-lundgren/hveto
def get_triggers(channel, etg, segments, cache=None, snr=None, frange=None,
                 columns=None, raw=False, **kwargs):
    """Get triggers for the given channel
    """
    # get table from etg
    try:
        Table = TABLE[etg.lower()]
    except KeyError as e:
        e.args = ('Unknown ETG %r, cannot map to LIGO_LW Table class' % etg,)
        raise
    tablename = strip_table_name(Table.tableName)
    # get default columns for this table
    if columns is None:
        for key in COLUMNS:
            if issubclass(Table, key):
                columns = COLUMNS[key][:]
                break
    if 'channel' in columns:
        columns.pop('channel')

    # find triggers
    if cache is None:
        cache = find_trigger_files(channel, etg, segments, **kwargs)

    # read cache
    trigs = lsctables.New(Table, columns=columns)
    cache = cache.unique()
    cache.sort(key=lambda x: x.segment[0])
    for segment in segments:
        if len(cache.sieve(segment=segment)):
            if tablename.endswith('_inspiral'):
                filt = lambda t: float(t.get_end()) in segment
            else:
                filt = lambda t: float(t.get_peak()) in segment
            trigs.extend(Table.read(cache.sieve(segment=segment), filt=filt))

    # format table as numpy.recarray
    recarray = trigs.to_recarray(columns=columns)

    # filter
    if snr is not None:
        recarray = recarray[recarray['snr'] >= snr]
    if tablename.endswith('_burst') and frange is not None:
        recarray = recarray[
            (recarray['peak_frequency'] >= frange[0]) &
            (recarray['peak_frequency'] < frange[1])]

    # return basic table if 'raw'
    if raw:
        return recarray

    # otherwise spend the rest of this function converting functions to
    # something useful for the hveto core analysis
    addfields = {}
    dropfields = []

    # append channel to all events
    columns.append('channel')
    addfields['channel'] = numpy.repeat(channel, recarray.shape[0])

    # rename frequency column
    if tablename.endswith('_burst'):
        recarray = recfunctions.rename_fields(
            recarray, {'peak_frequency': 'frequency'})
        idx = columns.index('peak_frequency')
        columns.pop(idx)
        columns.insert(idx, 'frequency')

    # map time to its own column
    if tablename.endswith('_inspiral'):
        tcols = ['end_time', 'end_time_ns']
    elif tablename.endswith('_burst'):
        tcols = ['peak_time', 'peak_time_ns']
    else:
        tcols = None
    if tcols:
        times = recarray[tcols[0]] + recarray[tcols[1]] * 1e-9
        addfields['time'] = times
        dropfields.extend(tcols)
        columns = ['time'] + columns[2:]

    # add and remove fields as required
    if addfields:
        names, data = zip(*addfields.items())
        recarray = recfunctions.rec_append_fields(recarray, names, data)
        recarray = recfunctions.rec_drop_fields(recarray, dropfields)

    return recarray[columns]
コード例 #5
0
ファイル: triggers.py プロジェクト: pvasired/gwsumm
def get_triggers(channel, etg, segments, config=ConfigParser(), cache=None,
                 query=True, multiprocess=False, tablename=None,
                 columns=None, contenthandler=None, return_=True):
    """Read a table of transient event triggers for a given channel.
    """
    key = '%s,%s' % (str(channel), etg.lower())
    if isinstance(segments, DataQualityFlag):
        segments = segments.active
    segments = SegmentList(segments)

    # get LIGO_LW table for this etg
    if tablename:
        TableClass = lsctables.TableByName[tablename]
        register_etg_table(etg, TableClass, force=True)
    elif key in globalv.TRIGGERS:
        TableClass = type(globalv.TRIGGERS[key])
    else:
        TableClass = get_etg_table(etg)

    # work out columns
    if columns is None:
        try:
            columns = config.get(etg, 'columns').split(',')
        except (NoSectionError, NoOptionError):
            if etg.lower() in ['cwb', 'cwb-ascii']:
                columns = None
            else:
                columns = TableClass.validcolumns.keys()
    if columns is not None:
        for col in ['process_id', 'search', 'channel']:
            if col not in columns:
                columns.append(col)

    # read segments from global memory
    try:
        havesegs = globalv.TRIGGERS[key].segments
    except KeyError:
        new = segments
        globalv.TRIGGERS.setdefault(
            key, lsctables.New(TableClass, columns=columns))
        globalv.TRIGGERS[key].segments = type(segments)()
    else:
        new = segments - havesegs

    # read new triggers
    query &= (abs(new) != 0)
    if query:
        # store read kwargs
        kwargs = {'columns': columns}

        # set content handler
        if contenthandler is None:
            contenthandler = get_partial_contenthandler(TableClass)
        lsctables.use_in(contenthandler)

        for segment in new:
            kwargs['filt'] = (
                lambda t: float(get_row_value(t, 'time')) in segment)
            # find trigger files
            if cache is None and etg.lower() == 'hacr':
                raise NotImplementedError("HACR parsing has not been "
                                          "implemented.")
            if cache is None and re.match('dmt(.*)omega', etg.lower()):
                segcache = find_dmt_omega(channel, segment[0], segment[1])
                kwargs['format'] = 'ligolw'
            elif cache is None and etg.lower() in ['kw', 'kleinewelle']:
                segcache = find_kw(channel, segment[0], segment[1])
                kwargs['format'] = 'ligolw'
                kwargs['filt'] = lambda t: (
                    float(get_row_value(t, 'time')) in segment and
                    t.channel == str(channel))
            elif cache is None:
                segcache = trigfind.find_trigger_urls(str(channel), etg,
                                                      segment[0],
                                                      segment[1])
                kwargs['format'] = 'ligolw'
            elif isinstance(cache, Cache):
                segcache = cache.sieve(segment=segment)
            else:
                segcache = cache
            if isinstance(segcache, Cache):
                segcache = segcache.checkfilesexist()[0]
            if 'format' not in kwargs and 'ahope' not in etg.lower():
                kwargs['format'] = etg.lower()
            if (issubclass(TableClass, lsctables.SnglBurstTable) and
                    etg.lower().startswith('cwb')):
                kwargs['ifo'] = get_channel(channel).ifo
            # read triggers and store
            if len(segcache) == 0:
                continue
            if kwargs.get('format', None) == 'ligolw':
                kwargs['contenthandler'] = contenthandler
            table = TableClass.read(segcache, **kwargs)
            globalv.TRIGGERS[key].extend(table)
            try:
                csegs = cache_segments(segcache)
            except AttributeError:
                csegs = SegmentList()
            try:
                globalv.TRIGGERS[key].segments.extend(csegs)
            except AttributeError:
                globalv.TRIGGERS[key].segments = csegs
            finally:
                globalv.TRIGGERS[key].segments.coalesce()
            vprint('\r')

    # work out time function
    if return_:
        times = get_table_column(globalv.TRIGGERS[key], 'time').astype(float)

        # return correct triggers
        out = lsctables.New(TableClass, columns=columns)
        out.channel = str(channel)
        out.etg = str(etg)
        out.extend(t for (i, t) in enumerate(globalv.TRIGGERS[key]) if
                   times[i] in segments)
        out.segments = segments & globalv.TRIGGERS[key].segments
        return out
    else:
        return