def test_get_table_row_methods(self): testfile = SnglBurstTableTestCase.TEST_XML_FILE table = GWRecArray.read(SnglBurstTableTestCase.TEST_XML_FILE, format='sngl_burst') # test simple column snr = get_table_column(table, 'snr') nptest.assert_array_equal(snr, table['snr']) # test 'time' special-case time = get_table_column(table, 'time') nptest.assert_array_equal( time, table['peak_time'] + table['peak_time_ns'] * 1e-9) # test row row = table[0] self.assertEqual(get_row_value(row, 'snr'), row['snr']) self.assertEqual(get_row_value(row, 'time'), row['peak_time'] + row['peak_time_ns'] * 1e-9)
def vetoed(table, flag): """Apply a veto to a set of event table and return vetoed events """ if isinstance(flag, DataQualityFlag): flag = flag.active after = table.copy() for row in table: if float(get_row_value(row, 'time')) in flag: after.append(row) return after
def add_loudest(self, table, rank, x, y, *columns, **kwargs): """Display the loudest event according to some rank. The loudest event is displayed as a gold star at its position given by the values in columns ``x``, and ``y``, and those values are displayed in a text box. Parameters ---------- table : `EventTable` event table in which to find the loudest event rank : `str` name of column to use for ranking x : `str` name of column to display on the X-axis y : `str` name of column to display on the Y-axis color : `str`, optional name of column by which to colour the data **kwargs any other arguments applicable to :meth:`~matplotlib.axes.Axes.text` Returns ------- out : `tuple` (`collection`, `text`) tuple of items added to the `Axes` """ ylim = self.get_ylim() try: idx = table[rank].argmax() except TypeError: if hasattr(table, 'tableName'): # glue.ligolw.table.Table from gwpy.table.utils import (get_table_column, get_row_value) warnings.warn('EventTableAxes.add_loudest will stop ' 'supporting glue.ligolw.table.Table objects ' 'before the 1.0 release of GWpy, please move ' 'to using the gwpy.table.EventTable') use_ligolw = True idx = get_table_column(table, rank).argmax() else: raise else: use_ligolw = False row = table[idx] disp = "Loudest event:" columns = [x, y, rank] + list(columns) scat = [] for i, column in enumerate(columns): if not column or column in columns[:i]: continue if i: disp += ',' try: val = row[column] except TypeError: if use_ligolw: # DEPRECATED - remove before 1.0 release val = get_row_value(row, column) else: raise if i < 2: scat.append([float(val)]) column = get_column_string(column) if pyplot.rcParams['text.usetex'] and column.endswith('Time'): disp += (r" %s$= %s$" % (column, LIGOTimeGPS(float(val)))) elif pyplot.rcParams['text.usetex']: disp += (r" %s$=$ %s" % (column, float_to_latex(val, '%.3g'))) else: disp += " %s = %.2g" % (column, val) disp = disp.rstrip(',') pos = kwargs.pop('position', [0.5, 1.00]) kwargs.setdefault('transform', self.axes.transAxes) kwargs.setdefault('verticalalignment', 'bottom') kwargs.setdefault('horizontalalignment', 'center') args = pos + [disp] self.scatter(*scat, marker='*', zorder=1000, facecolor='gold', edgecolor='black', s=200) self.text(*args, **kwargs) if self.get_title(): pos = self.title.get_position() self.title.set_position((pos[0], pos[1] + 0.05)) self.set_ylim(*ylim)
def get_triggers(channel, etg, segments, config=ConfigParser(), cache=None, query=True, multiprocess=False, tablename=None, columns=None, contenthandler=None, return_=True): """Read a table of transient event triggers for a given channel. """ key = '%s,%s' % (str(channel), etg.lower()) if isinstance(segments, DataQualityFlag): segments = segments.active segments = SegmentList(segments) # get LIGO_LW table for this etg if tablename: TableClass = lsctables.TableByName[tablename] register_etg_table(etg, TableClass, force=True) elif key in globalv.TRIGGERS: TableClass = type(globalv.TRIGGERS[key]) else: TableClass = get_etg_table(etg) # work out columns if columns is None: try: columns = config.get(etg, 'columns').split(',') except (NoSectionError, NoOptionError): if etg.lower() in ['cwb', 'cwb-ascii']: columns = None else: columns = TableClass.validcolumns.keys() if columns is not None: for col in ['process_id', 'search', 'channel']: if col not in columns: columns.append(col) # read segments from global memory try: havesegs = globalv.TRIGGERS[key].segments except KeyError: new = segments globalv.TRIGGERS.setdefault( key, lsctables.New(TableClass, columns=columns)) globalv.TRIGGERS[key].segments = type(segments)() else: new = segments - havesegs # read new triggers query &= (abs(new) != 0) if query: # store read kwargs kwargs = {'columns': columns} # set content handler if contenthandler is None: contenthandler = get_partial_contenthandler(TableClass) lsctables.use_in(contenthandler) for segment in new: kwargs['filt'] = ( lambda t: float(get_row_value(t, 'time')) in segment) # find trigger files if cache is None and etg.lower() == 'hacr': raise NotImplementedError("HACR parsing has not been " "implemented.") if cache is None and re.match('dmt(.*)omega', etg.lower()): segcache = find_dmt_omega(channel, segment[0], segment[1]) kwargs['format'] = 'ligolw' elif cache is None and etg.lower() in ['kw', 'kleinewelle']: segcache = find_kw(channel, segment[0], segment[1]) kwargs['format'] = 'ligolw' kwargs['filt'] = lambda t: ( float(get_row_value(t, 'time')) in segment and t.channel == str(channel)) elif cache is None: segcache = trigfind.find_trigger_urls(str(channel), etg, segment[0], segment[1]) kwargs['format'] = 'ligolw' elif isinstance(cache, Cache): segcache = cache.sieve(segment=segment) else: segcache = cache if isinstance(segcache, Cache): segcache = segcache.checkfilesexist()[0] if 'format' not in kwargs and 'ahope' not in etg.lower(): kwargs['format'] = etg.lower() if (issubclass(TableClass, lsctables.SnglBurstTable) and etg.lower().startswith('cwb')): kwargs['ifo'] = get_channel(channel).ifo # read triggers and store if len(segcache) == 0: continue if kwargs.get('format', None) == 'ligolw': kwargs['contenthandler'] = contenthandler table = TableClass.read(segcache, **kwargs) globalv.TRIGGERS[key].extend(table) try: csegs = cache_segments(segcache) except AttributeError: csegs = SegmentList() try: globalv.TRIGGERS[key].segments.extend(csegs) except AttributeError: globalv.TRIGGERS[key].segments = csegs finally: globalv.TRIGGERS[key].segments.coalesce() vprint('\r') # work out time function if return_: times = get_table_column(globalv.TRIGGERS[key], 'time').astype(float) # return correct triggers out = lsctables.New(TableClass, columns=columns) out.channel = str(channel) out.etg = str(etg) out.extend(t for (i, t) in enumerate(globalv.TRIGGERS[key]) if times[i] in segments) out.segments = segments & globalv.TRIGGERS[key].segments return out else: return
def write_state_html(self, state): """Write the '#main' HTML content for this `DailyAhopeTab`. """ daydir = os.path.split(self.segmentfile)[0] # did it run if not os.path.isdir(daydir): page = html.markup.page() page.div(class_='alert alert-warning') page.p("No analysis was performed for this period, " "please try again later.") page.p("If you believe these data should have been found, please " "contact %s." % html.markup.oneliner.a('the CBC DQ group', class_='alert-link', href='mailto:[email protected]')) page.div.close() elif (not os.path.isfile(self.segmentfile) or len(self.states[0].active) != 0 and not os.path.isfile(self.inspiralcachefile)): page = html.markup.page() page.div(class_='alert alert-danger') page.p("This analysis seems to have failed.") page.p("If you believe these data should have been found, please " "contact %s." % html.markup.oneliner.a('the CBC DQ group', class_='alert-link', href='mailto:[email protected]')) page.div.close() elif len(self.states[0].active) == 0: page = html.markup.page() page.div(class_='alert alert-info') page.p("This analysis found no segments over which to run.") page.p("If you believe this to be an error, please contact %s." % html.markup.oneliner.a('the CBC DQ group', class_='alert-link', href='mailto:[email protected]')) page.div.close() else: # otherwise, carry on... page = self.scaffold_plots(state=state) # link full results page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full Daily Ahope results', href=self.ihopepage, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') if self.loudest: table = get_triggers(self.channel, self.plots[0].etg, state, query=False) rank = get_table_column(table, self.loudest['rank']).argsort()[::-1] loudest = [] i = 0 while len(loudest) < self.loudest['N'] and i < rank.size: t = table[rank[i]] if i == 0 or all([ abs(float(t.get_end()) - float(t2.get_end())) >= self.loudest['dt'] for t2 in loudest ]): loudest.append(t) i += 1 page.h1('Loudest events') page.p('The following table displays the %d loudest events as ' 'recorded by Daily Ahope (with at least %s-second ' 'separation).' % (self.loudest['N'], self.loudest['dt'])) headers = self.loudest['labels'] if 'time' in headers[0]: headers.insert(1, 'UTC time') date = True else: data = False data = [] for row in loudest: data.append([]) for column in self.loudest['columns']: data[-1].append('%.3f' % float(get_row_value(row, column))) if date: data[-1].insert( 1, from_gps(row.get_end()).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) page.add(str(html.table(headers, data, table='data'))) if self.subplots: page.hr(class_='row-divider') page.h1('Sub-plots') layout = get_mode() == Mode.week and [7] or [4] plist = [p for p in self.subplots if p.state in [state, None]] page.add( str( self.scaffold_plots(plots=plist, state=state, layout=layout))) # link full results page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full Daily Ahope results', href=self.ihopepage, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]