def primary_vetoed(starttime=None, hveto_path=None, snr=6.0, significance=5.0): """Catalogue all vetoed primary triggers from a given analysis This utility queries the output of an hveto analysis for the triggers vetoed from its primary channel over all rounds (up to thresholds on signal-to-noise ratio and round significance). Parameters ---------- starttime : `str` or `float` start GPS time for this analysis hveto_path : 'str' path of the hveto files directory, not required if ``starttime`` is given snr : `float`, optional signal-to-noise ratio threshold on triggers, default: 6.0 significance : `float`, optional hveto significance threshold on auxiliary channels, default: 5.0 Returns ------- catalogue : `~gwpy.table.EventTable` a tabular catalogue of primary triggers vetoed in the hveto run """ path = const.get_hvetopath(starttime) if starttime else hveto_path t_vetoed = EventTable(names=[ 'time', 'snr', 'peak_frequency', 'channel', 'winner', 'significance' ]) try: files = glob.glob(os.path.join(path, 'triggers', '*VETOED*.txt')) t_summary = EventTable.read(os.path.join(path, 'summary-stats.txt'), format='ascii') n = len(t_summary) files = files[:n] t_vetoed = EventTable.read(files, format='ascii') lenoffiles = t_summary['nveto'] winsig = [ round(t_summary['significance'][i], 4) for i in range(n) for j in range(lenoffiles[i]) ] winchans = [ t_summary['winner'][i] for i in range(n) for j in range(lenoffiles[i]) ] rounds = [i + 1 for i in range(n) for j in range(lenoffiles[i])] colsig = Column(data=winsig, name='significance') colwin = Column(data=winchans, name='winner') colround = Column(data=rounds, name='round') t_vetoed.add_column(colwin) t_vetoed.add_column(colsig) t_vetoed.add_column(colround) t_vetoed = t_vetoed.filter('snr>{0}'.format(snr), 'significance>{0}'.format(significance)) except (FileNotFoundError, ValueError): warnings.warn("Could not find Hveto analysis for this day") return t_vetoed
def get_triggers(channel, etg, segments, cache=None, snr=None, frange=None, raw=False, trigfind_kwargs={}, **read_kwargs): """Get triggers for the given channel """ etg = _sanitize_name(etg) # format arguments try: readfmt = read_kwargs.pop("format", DEFAULT_FORMAT[etg]) except KeyError: raise ValueError("unsupported ETG {!r}".format(etg)) trigfind_kwargs, read_kwargs = _format_params(channel, etg, readfmt, trigfind_kwargs, read_kwargs) # find triggers if cache is None: cache = find_trigger_files(channel, etg, segments, **trigfind_kwargs) # read files tables = [] for segment in segments: segaslist = SegmentList([segment]) segcache = io_cache.sieve(cache, segment=segment) # try and work out if cache overextends segment (so we need to crop) cachesegs = io_cache.cache_segments(segcache) outofbounds = abs(cachesegs - segaslist) if segcache: if len(segcache) == 1: # just pass the single filename segcache = segcache[0] new = EventTable.read(segcache, **read_kwargs) new.meta = {k: new.meta[k] for k in TABLE_META if new.meta.get(k)} if outofbounds: new = new[in_segmentlist(new[new.dtype.names[0]], segaslist)] tables.append(new) if len(tables): table = vstack_tables(tables) else: table = EventTable( names=read_kwargs.get('columns', ['time', 'frequency', 'snr'])) # parse time, frequency-like and snr-like column names columns = table.dtype.names tcolumn = columns[0] fcolumn = columns[1] scolumn = columns[2] # filter keep = numpy.ones(len(table), dtype=bool) if snr is not None: keep &= table[scolumn] >= snr if frange is not None: keep &= table[fcolumn] >= frange[0] keep &= table[fcolumn] < frange[1] table = table[keep] # return basic table if 'raw' if raw: return table # rename time column so that all tables match in at least that if tcolumn != "time": table.rename_column(tcolumn, 'time') # add channel column to identify all triggers table.add_column( table.Column(data=numpy.repeat(channel, len(table)), name='channel')) table.sort('time') return table
def get_triggers(channel, etg, segments, cache=None, snr=None, frange=None, raw=False, trigfind_kwargs={}, **read_kwargs): """Get triggers for the given channel """ etg = _sanitize_name(etg) # format arguments try: readfmt = read_kwargs.pop("format", DEFAULT_FORMAT[etg]) except KeyError: raise ValueError("unsupported ETG {!r}".format(etg)) trigfind_kwargs, read_kwargs = _format_params( channel, etg, readfmt, trigfind_kwargs, read_kwargs ) # find triggers if cache is None: cache = find_trigger_files(channel, etg, segments, **trigfind_kwargs) # read files tables = [] for segment in segments: segaslist = SegmentList([segment]) segcache = io_cache.sieve(cache, segment=segment) # try and work out if cache overextends segment (so we need to crop) cachesegs = io_cache.cache_segments(segcache) outofbounds = abs(cachesegs - segaslist) if segcache: if len(segcache) == 1: # just pass the single filename segcache = segcache[0] new = EventTable.read(segcache, **read_kwargs) new.meta = {k: new.meta[k] for k in TABLE_META if new.meta.get(k)} if outofbounds: new = new[new[new.dtype.names[0]].in_segmentlist(segaslist)] tables.append(new) if len(tables): table = vstack_tables(tables) else: table = EventTable(names=read_kwargs.get( 'columns', ['time', 'frequency', 'snr'])) # parse time, frequency-like and snr-like column names columns = table.dtype.names tcolumn = columns[0] fcolumn = columns[1] scolumn = columns[2] # filter keep = numpy.ones(len(table), dtype=bool) if snr is not None: keep &= table[scolumn] >= snr if frange is not None: keep &= table[fcolumn] >= frange[0] keep &= table[fcolumn] < frange[1] table = table[keep] # return basic table if 'raw' if raw: return table # rename time column so that all tables match in at least that if tcolumn != "time": table.rename_column(tcolumn, 'time') # add channel column to identify all triggers table.add_column(table.Column(data=numpy.repeat(channel, len(table)), name='channel')) table.sort('time') return table