def write_arguments(content, start, end, flag=None, section='Parameters', info='This analysis used the following parameters:', id_='parameters'): """Render an informative section with run parameters in HTML Parameters ---------- content: `dict` a collection of parameters to list section: `str` name of the section, will appear as a header with an <h2> tag """ content.insert(0, ('Start time', '{} ({})'.format(start, from_gps(start)))) content.insert(1, ('End time', '{} ({})'.format(end, from_gps(end)))) if flag is not None: content.insert(2, ('State flag', flag)) page = markup.page() page.h2(section, id_=id_) page.p(info) for item in content: page.add(write_param(*item)) page.add(write_param('Command-line', '')) page.add(get_command_line()) return page()
def from_ini(cls, config, section): """Create a new `SummaryState` from a section in a `ConfigParser`. Parameters ---------- config : :class:`~gwsumm.config.GWConfigParser` customised configuration parser containing given section section : `str` name of section to parse Returns ------- `SummaryState` a new state, with attributes set from the options in the configuration """ config = GWSummConfigParser.from_configparser(config) # get span times start = config.getint(section, 'gps-start-time') end = min(globalv.NOW, config.getint(section, 'gps-end-time')) # get parameters params = dict(config.nditems(section)) # parse name name = params.pop('name', section) if re.match(r'state[-\s]', name): name = section[6:] # get hours hours = params.pop('hours', None) if hours is not None: segs = re.split(r'(,|, )', hours)[::2] hours = [] offset = 0 for seg in segs: try: # parse hour segment hours.append(list(map(float, seg.split('-', 1)))) except ValueError: # parse time-zone if seg == segs[-1]: if seg.lower() == 'utc': offset = 0 elif seg.lower() == 'local': try: ifo = config.get(DEFAULTSECT, 'ifo') except NoOptionError: raise ValueError("The relevant IFO must be " "given either from the --ifo " "command-line option, or the " "[DEFAULT] section of any " "INI file") offset = get_timezone_offset(ifo, from_gps(start)) else: offset = get_timezone_offset(seg, from_gps(start)) else: raise # apply time-zone for i, (h0, h1) in enumerate(hours): hours[i] = (h0 - offset / 3600., h1 - offset / 3600.) # generate state return cls(name, known=[(start, end)], hours=hours, **params)
def print_segments(flag, table=False, caption=None): """Print the contents of a `SegmentList` in HTML """ if isinstance(flag, DataQualityFlag): flag = flag.active dtype = float(abs(flag)).is_integer() and int or float if table: headers = [ 'GPS start', 'GPS end', 'UTC start', 'UTC end', 'Duration [s]' ] data = [] for seg in flag: data.append([ dtype(seg[0]), dtype(seg[1]), from_gps(seg[0]).strftime('%B %d %Y %H:%M:%S.%f')[:-3], from_gps(seg[1]).strftime('%B %d %Y %H:%M:%S.%f')[:-3], dtype(abs(seg)), ]) return gwhtml.table(headers, data, id='state-information', caption=caption) else: segwizard = StringIO() flag.write(segwizard, format='segwizard', coltype=dtype) return markup.oneliner.pre(segwizard.getvalue())
def build_html_calendar(self): """Build the datepicker calendar for this tab. Parameters ---------- mode : `int` the mode in which to print the calendar, defaults to the selected mode in `gwsumm.globalv`. Notes ----- The datetime for the calendar is taken from this tab's :attr:`~StateTab.span`. """ date = from_gps(self.start) # double-check path matches what is required from custom datepicker try: requiredpath = mode.get_base(date, mode=self.mode) except ValueError: return html.markup.oneliner.div('%d-%d' % (self.start, self.end), class_='navbar-brand') if not requiredpath in self.path: raise RuntimeError("Tab path %r inconsistent with required " "format including %r for archive calendar" % (self.path, requiredpath)) # format calendar return html.calendar(date, mode=self.mode % 3)
def get_hvetopath(gpstime): """ Returns the path of hveto output files Given a gpstime, the path of the folder containing hveto trigger files is returned Parameters ---------- gpstime : `str` or `float` start time of the day for this analysis Returns _______ path : `str` path to the hveto output file on the local filesystem Example _______ >>> from hveto.const import get_hvetopath >>> get_hvetopath(1257811218) '/home/detchar/public_html/hveto/day/20191115/latest' """ date = from_gps(gpstime).strftime('%Y%m%d') # the following hveto_dir path exists for normal Detchar workflow # at LHO and LLO computing clusters hveto_dir = '/home/detchar/public_html/hveto/day/' path = os.path.join(hveto_dir, date, 'latest') return path
def find_daily_archives(start, end, ifo, tag, basedir=os.curdir): """Find the daily archives spanning the given GPS [start, end) interval """ archives = [] s = from_gps(to_gps(start)) e = from_gps(to_gps(end)) while s < e: daybase = mode.get_base(s, mode=mode.Mode.day) ds = to_gps(s) s += datetime.timedelta(days=1) de = to_gps(s) archivedir = os.path.join(basedir, daybase, 'archive') arch = os.path.join(archivedir, '%s-%s-%d-%d.h5' % (ifo, tag, ds, de-ds)) if os.path.isfile(arch): archives.append(arch) return archives
def find_daily_archives(start, end, ifo, tag, basedir=os.curdir): """Find the daily archives spanning the given GPS [start, end) interval """ archives = [] s = from_gps(to_gps(start)) e = from_gps(to_gps(end)) while s < e: daybase = mode.get_base(s, mode=mode.Mode.day) ds = to_gps(s) s += datetime.timedelta(days=1) de = to_gps(s) archivedir = os.path.join(basedir, daybase, 'archive') arch = os.path.join(archivedir, '%s-%s-%d-%d.hdf' % (ifo, tag, ds, de - ds)) if os.path.isfile(arch): archives.append(arch) return archives
def plot(data, **kwargs): ylim = kwargs.pop('ylim', (0.0, 0.3)) color = kwargs.pop('color', 'k') is_ok, is_locked, is_good, is_good_p, is_good_s = kwargs.pop('sv', []) title = kwargs.pop('title', None) hlines = kwargs.pop('hlines', None) version = kwargs.pop('version', None) tmp = kwargs.pop('tmp', True) prefix = '{0}-{1}-'.format( str(from_gps(start)).split(' ')[0], str(from_gps(end)).split(' ')[0]) fname = version + '_' + data.name.lower() plot = data.plot(ylim=ylim, epoch=start, figsize=(25, 5), color=color, label=data.name) ax = plot.gca() ax.set_title(title) #ax.set_xscale('days') #plot.refresh() ax.legend(loc='upper right') if hlines: for hline in hlines: ax.hlines(hline, start, end, linestyle='--', color=color) if is_ok: obs = is_ok.to_dqflag(round=True) absp = is_locked.to_dqflag(round=True) ppol = is_good_p.to_dqflag(round=True) spol = is_good_s.to_dqflag(round=True) plot.add_segments_bar(obs, label='Observing') plot.add_segments_bar(absp, label='absorp') plot.add_segments_bar(ppol, label='p-pol') plot.add_segments_bar(spol, label='s-pol') if not 'tmp' in version: yyyy = prefix.split('-')[0] mm = prefix.split('-')[1] print('./img/{0}/{1}/{2}.png'.format(yyyy, mm, fname)) plot.savefig('./img/{0}/{1}/{2}.png'.format(yyyy, mm, fname)) else: plot.savefig('tmp_{0}.png'.format(fname)) plot.close()
def get_brand(ifo, name, gps, about=None): """Return a brand for navigation bar formatting Parameters ---------- ifo : `str` interferometer prefix for color-coding, e.g. `'L1'` name : `str` name of the analysis, e.g. `'Scattering'` gps : `float` GPS second of the analysis about : `str`, optional relative path to the `about` page for this analysis, default: None Returns ------- brand : `~MarkupPy.markup.page` the navbar brand `page` object class_ : `str` object class of the navbar """ # navbar brand brand = markup.oneliner.div( ' '.join([ifo, name]), class_='navbar-brand border border-white rounded', ) # IFO links page = markup.page() page.ul(class_='nav navbar-nav') page.li(class_='nav-item dropdown') page.a('Links', class_='nav-link dropdown-toggle', href='#', role='button', **{'data-toggle': 'dropdown'}) page.div(class_='dropdown-menu dropdown-menu-right shadow') if about is not None: page.h6('Internal', class_='dropdown-header') page.a('About this page', href=about, class_='dropdown-item') page.div('', class_='dropdown-divider') page.h6('External', class_='dropdown-header') for name, url in OBSERVATORY_MAP[ifo]['links'].items(): if 'Summary' in name: day = from_gps(gps).strftime(r"%Y%m%d") url = '/'.join([url, day]) page.a(name, href=url, class_='dropdown-item', target='_blank') page.div.close() # dropdown-menu page.li.close() # nav-link dropdown-toggle page.ul.close() # nav navbar-nav class_ = ('navbar fixed-top navbar-expand-md navbar-{} ' 'shadow-sm').format(ifo.lower()) return ((brand, page()), class_)
def Get_Rates_3(chunks, segs, verbose = False): """Returns the glitch rates for a given set of time chunks defined by a list of start times, with an end time at the last entry. Arguments: chunks -- Sorted list of times representing the beginnings of the time periods for which rate is to be calculated, with 'end' tacked on. segs -- Ordered and non-overlapping SegmentList such that every element in 'chunks' (except the last one) is in an entry in 'segs'. verbose -- Set to 'True' if you want to see the ends of each chunk in 'chunks' printed as it is processed. Returns: normcounts -- A list of glitch rates (Hz) associated with each time period represented in 'chunks'.""" traced = False normcounts = [] j = 0 for i in range(len(chunks)-1): while not chunks[i] in segs[j]: j = j+1 segend = segs[j][1] if chunks[i+1]>segend: chunkend = segend else: chunkend = chunks[i+1] if verbose: print(from_gps(chunks[i]), from_gps(chunkend)) files = find_trigger_files('L1:GDS-CALIB_STRAIN', 'Omicron', chunks[i], chunkend) if len(files)>0: events = EventTable.read(files, format='ligolw', tablename='sngl_burst', columns=['peak','peak_time_ns', 'peak_frequency', 'snr']) events = events[(events['peak']>=chunks[i]) & (events['peak']<chunkend)] counts = len(events['peak']) length = chunkend - chunks[i] normcount = counts/(length) normcounts.append(normcount) else: normcounts.append(0) return normcounts
def get_brand(ifo, name, gps, about=None): """Return a brand for navigation bar formatting Parameters ---------- ifo : `str` interferometer prefix for color-coding, e.g. `'L1'` name : `str` name of the analysis, e.g. `'Scattering'` gps : `float` GPS second of the analysis about : `str`, optional relative path to the `about` page for this analysis, default: None Returns ------- brand : `~MarkupPy.markup.page` the navbar brand `page` object class_ : `str` object class of the navbar """ page = markup.page() page.div(ifo, class_='navbar-brand') page.div(name, class_='navbar-brand') page.div(class_='btn-group pull-right ifo-links') page.a(class_='navbar-brand dropdown-toggle', href='#', **{'data-toggle': 'dropdown'}) page.add('Links') page.b('', class_='caret') page.a.close() page.ul(class_='dropdown-menu') if about is not None: page.li('Internal', class_='dropdown-header') page.li() page.a('About this page', href=about) page.li.close() page.li('', class_='divider') page.li('External', class_='dropdown-header') for name, url in OBSERVATORY_MAP[ifo]['links'].items(): if 'Summary' in name: day = from_gps(gps).strftime(r"%Y%m%d") url = '/'.join([url, day]) page.li() page.a(name, href=url, target='_blank') page.li.close() page.ul.close() page.div.close() # btn-group pull-right class_ = 'navbar navbar-fixed-top navbar-{}'.format(ifo.lower()) return (page(), class_)
def correct_time(): HOST = '[email protected]' COMMAND = 'caget -t -f10 C4:DAQ-DC0_GPS' ssh = subprocess.Popen(['ssh', '%s' % HOST, COMMAND], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = ssh.stdout.readlines() fb4gps = to_gps(result[0].strip('\n')) print('{} ({})'.format(from_gps(fb4gps), fb4gps)) return fb4gps
def Datetimegen(t): """Generate a date and a time in my own format. Arguments: t -- A GPS time (unit removed). Returns: A date of form DDMMYYYY and a time of form HH:MM:SS.""" gpst = round(t, 2) gpst = from_gps(gpst) gpst = str(gpst)[0:22] #get a string to slice date = gpst[8:10]+gpst[5:7]+gpst[2:4] #so that now gpst[0:10] has form DDMMYYYY time = gpst[11:] #the time is everything after the date return date, time
def get_bins(self): """Work out the correct histogram binning for this `DutyDataPlot` """ # if not given anything, work it out from the mode if self.bins is None: m = mode.MODE_NAME[mode.get_mode()] duration = float(abs(self.span)) # for year mode, use a month if m in ['YEAR'] or duration >= 86400 * 300: dt = relativedelta(months=1) # for more than 8 weeks, use weeks elif duration >= 86400 * 7 * 8: dt = relativedelta(weeks=1) # for week and month mode, use daily elif m in ['WEEK', 'MONTH'] or duration >= 86400 * 7: dt = relativedelta(days=1) # for day mode, make hourly duty factor elif m in ['DAY']: dt = relativedelta(hours=1) # otherwise provide 10 bins else: dt = relativedelta(seconds=float(abs(self.span))/10.) # if given a float, assume this is the bin size elif isinstance(self.bins, (float, int)): dt = relativedelta(seconds=self.bins) # if we don't have a list, we must have worked out dt if not isinstance(self.bins, (list, tuple, numpy.ndarray)): self.bins = [] s = from_gps(self.start) e = from_gps(self.end) while s < e: t = int(to_gps(s + dt) - to_gps(s)) self.bins.append(t) s += dt self.bins = numpy.asarray(self.bins) return self.bins
def print_segments(flag, table=False, caption=None): """Print the contents of a `SegmentList` in HTML """ if isinstance(flag, DataQualityFlag): flag = flag.active dtype = float(abs(flag)).is_integer() and int or float if table: headers = ['GPS start', 'GPS end', 'UTC start', 'UTC end', 'Duration [s]'] data = [] for seg in flag: data.append([ dtype(seg[0]), dtype(seg[1]), from_gps(seg[0]).strftime('%B %d %Y %H:%M:%S.%f')[:-3], from_gps(seg[1]).strftime('%B %d %Y %H:%M:%S.%f')[:-3], dtype(abs(seg)), ]) return gwhtml.table(headers, data, id='state-information', caption=caption) else: segwizard = StringIO() flag.write(segwizard, format='segwizard', coltype=dtype) return markup.oneliner.pre(segwizard.getvalue())
def html_calendar(self): """Build the datepicker calendar for this tab. Notes ----- The datetime for the calendar is taken from this tab's `~GpsTab.span` """ date = from_gps(self.start) # double-check path matches what is required from custom datepicker try: requiredpath = get_base(date, mode=self.mode) except ValueError: return ['%d-%d' % (self.start, self.end)] if requiredpath not in self.path: raise RuntimeError("Tab path %r inconsistent with required " "format including %r for archive calendar" % (self.path, requiredpath)) # format calendar return html.calendar(date, mode=self.mode)
def html_calendar(self): """Build the datepicker calendar for this tab. Notes ----- The datetime for the calendar is taken from this tab's `~GpsTab.span` """ date = from_gps(self.start) # double-check path matches what is required from custom datepicker try: requiredpath = get_base(date, mode=self.mode) except ValueError: return markup.oneliner.div('%d-%d' % (self.start, self.end), class_='navbar-brand') if requiredpath not in self.path: raise RuntimeError("Tab path %r inconsistent with required " "format including %r for archive calendar" % (self.path, requiredpath)) # format calendar return html.calendar(date, mode=self.mode)
def edit_calendar(calendar_file, results_url, current_gps): """ Add a results url link to calendar file. """ ymdh = from_gps(current_gps).strftime('%Y%m%d%H') year = ymdh[:4] month = ymdh[4:6] hour = ymdh[-2:] month_dir = '%s_%s' % (year, month) calendar_temp = '%s_%s.html' % (calendar_file.rstrip('.html'), current_gps) # String to be replaced stringold = '<!-- %s --> <li class="sgrayl l1"><p>%s:00</p></li>'\ % (ymdh, hour) stringnew = '<li class="greenish l1"><p><a href="%s">%s:00</a></p></li>'\ % (results_url, hour) shutil.copy2(calendar_file, calendar_temp) for j, line in enumerate(fileinput.input(calendar_temp, inplace=1)): sys.stdout.write(line.replace(stringold, stringnew)) shutil.copy2(calendar_temp, calendar_file) os.remove(calendar_temp)
def test_from_gps(): """Test :func:`gwpy.time.from_gps` """ # basic d = time.from_gps(1167264018) assert isinstance(d, datetime) assert d == datetime(2017, 1, 1) # str assert time.from_gps('1167264018') == datetime(2017, 1, 1) # float assert time.from_gps(1126259462.391) == ( datetime(2015, 9, 14, 9, 50, 45, 391000)) assert time.from_gps('1.13e9') == datetime(2015, 10, 27, 16, 53, 3) # errors with pytest.raises((RuntimeError, ValueError)): time.from_gps('test')
def write_state_html(self, state): """Write the '#main' HTML content for this `GraceDbTab`. """ page = html.markup.page() # build table of events page.div(class_='scaffold well') page.table(class_='table table-condensed table-hover table-striped') # thead page.thead() page.tr() for head in self.headers: page.th(head) page.tr.close() page.thead.close() # tbody page.tbody() for event in self.events[str(state)]: context = None try: l = event['labels'].split(', ') except (AttributeError, KeyError): pass else: for label in ['ADVNO', 'H1NO', 'L1NO', 'DQV', 'INJ', 'EM_READY']: if label in l: context = LABELS[label] break if context is not None: page.tr(class_=context) for col in self.columns: if col == 'date': page.td(from_gps(event['gpstime']).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) continue try: v = event[col] except KeyError: try: v = event['extra_attributes']['GRB'][col] assert v is not None except (KeyError, AssertionError): page.td('-') continue if col == 'graceid': page.td() href='%s/events/view/%s' % (self.url, v) page.a(v, href=href, target='_blank', rel='external') page.td.close() elif col != 'gpstime' and isinstance(v, float): page.td('%.3g' % v) else: page.td(str(v)) page.tr.close() page.tbody.close() page.table.close() if len(self.events[str(state)]) == 0: page.p("No events were recovered for this state.") page.div.close() # scaffold well # query doc page.p("The above table was generated from a query to %s with the " "form <code>%s</code>." % (self.url, self.query)) # reference the labelling page.h4("Labelling reference") page.p("Events in the above table may have a context based on " "its labels as follows:") contexts = set(LABELS.values()) for c in contexts: labels = [k for k, v in LABELS.items() if v == c] labstr = ', '.join(['<b>%s</b>' % l for l in labels]) page.p(labstr, class_='bg-%s' % c, style='width: auto;') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def write_state_html(self, state): """Write the '#main' HTML content for this `EventTriggerTab`. """ if self.error.get(state, None): level, message = self.error[state] # no segments, print warning page = html.markup.page() page.div(class_='alert alert-%s' % level) page.p(message) page.p("If you believe this to be an error, please contact %s." % html.markup.oneliner.a('the DetChar group', class_='alert-link', href='mailto:[email protected]')) page.div.close() else: # otherwise, carry on... page = self.scaffold_plots(state=state) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') if self.loudest: page.h1('Loudest events') page.p('The following table(s) displays the %d loudest events ' 'as recorded by %s (with at least %s-second ' 'separation).' % (self.loudest['N'], self.etg, self.loudest['dt'])) # get triggers table = get_triggers(self.channel, self.plots[0].etg, state, query=False) # set table headers headers = list(self.loudest['labels']) if 'time' in headers[0]: headers.insert(1, 'UTC time') date = True else: date = False # loop over rank columns for rank in self.loudest['rank']: try: rankstr = self.loudest['labels'][ self.loudest['columns'].index(rank)] except ValueError: rankstr = repr(rank) page.h3('Loudest events by %s' % rankstr) rank = get_table_column(table, rank).argsort()[::-1] loudest = [] i = 0 while len(loudest) < self.loudest['N'] and i < rank.size: t = table[rank[i]] if i == 0 or all([ abs( float(get_row_value(t, 'time')) - float(get_row_value(t2, 'time'))) >= self.loudest['dt'] for t2 in loudest ]): loudest.append(t) i += 1 data = [] for row in loudest: data.append([]) for column in self.loudest['columns']: data[-1].append('%.3f' % float(get_row_value(row, column))) if date: data[-1].insert( 1, from_gps( get_row_value( row, self.loudest['columns'][0])).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) page.add(str(html.data_table(headers, data, table='data'))) if self.subplots: page.hr(class_='row-divider') page.h1('Sub-plots') layout = get_mode() == MODE_ENUM['WEEK'] and [7] or [4] plist = [p for p in self.subplots if p.state in [state, None]] page.add( str( self.scaffold_plots(plots=plist, state=state, layout=layout))) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def specplot(self, filename, ts=None, est_amb=True, show_darm_threshold=True, upper_lim=True,\ freq_min=None, freq_max=None, spec_min=None, spec_max=None, fig_w=12, fig_h=6): """ Export an estimated ambient plot from the data. Parameters ---------- filename : str ts : time.time object Timestamp object. est_amb : bool If True, show estimated ambient super-imposed on DARM spectrum. show_darm_threshold: bool If True, draw a dashed spectrum representing one order of magnitude below DARM. upper_lim : bool If True, include upper limits in the plot. Otherwise just plot measured (real) values. freq_min : float Minimum frequency (x-axis). freq_max : float Maximum frequency (x_axis). spec_min : float Minimum value of coupling factor axis (y-axis). spec_max : float Maximum value of coupling factor axis (y-axis). fig_w : float or int Figure width. fig_h : float or int Figure height. """ if ts is None: ts = time.time() try: float(freq_min) except: freq_min = self.freqs[0] try: float(freq_max) except: freq_max = self.freqs[-1] loc_freq_min, loc_freq_max = 0, -1 while self.freqs[loc_freq_min] < freq_min: loc_freq_min += 1 while self.freqs[loc_freq_max] > freq_max: loc_freq_max -= 1 darm_y = list(self.darm_bg[loc_freq_min:loc_freq_max]) + list( self.darm_inj[loc_freq_min:loc_freq_max]) amp_max_spec = max(darm_y) amp_min_spec = min(darm_y) # Y-AXIS LIMITS FOR SENSOR SPECTROGRAM spec_sens_min = np.min(self.sens_bg[loc_freq_min:loc_freq_max]) / 2 spec_sens_max = np.max(self.sens_inj[loc_freq_min:loc_freq_max]) * 4 # Y-AXIS LIMITS FOR DARM/EST AMB SPECTROGRAM amb_values = self.ambients[np.isfinite(self.ambients) & (self.ambients > 0)] amb_min = np.min(amb_values) if np.any( amb_values) else self.darm_bg.min() / 10. try: float(spec_min) except TypeError: if show_darm_threshold: spec_min = min([amb_min, min(self.darm_bg)]) / 4 else: spec_min = amp_min_spec / 4 try: float(spec_max) except TypeError: spec_max = amp_max_spec * 2 # CREATE FIGURE FOR SPECTRUM PLOTS try: float(fig_w) except TypeError: fig_w = 14 try: float(fig_h) except TypeError: fig_h = 6 fig = plt.figure(figsize=(fig_w, fig_h)) # PLOT SENSOR SPECTRA ax1 = fig.add_subplot(211) plt.plot(self.freqs, self.sens_inj, color='r', label='Injection', zorder=5) plt.plot(self.freqs, self.sens_bg, color='k', label='Background', zorder=5) # CREATE LEGEND AND LABELS plt.legend() ylabel = self.qty + '[' + str(self.unit).replace('(', '').replace( ')', '') + '/Hz$^{1/2}$]' plt.ylabel(ylabel, size=18) plt.title(self.name.replace('_', ' ') + ' - Spectrum', size=20) # CUSTOMIZE AXES plt.xlim([freq_min, freq_max]) plt.ylim([spec_sens_min, spec_sens_max]) if ((freq_max / freq_min) > 5): ax1.set_xscale('log', nonposx='clip') ax1.set_yscale('log', nonposy='clip') ax1.autoscale(False) plt.grid(b=True, which='major', color='0.0', linestyle=':', linewidth=1, zorder=0) plt.minorticks_on() plt.grid(b=True, which='minor', color='0.6', linestyle=':', zorder=0) for tick in ax1.xaxis.get_major_ticks(): tick.label.set_fontsize(18) for tick in ax1.yaxis.get_major_ticks(): tick.label.set_fontsize(18) p1 = ax1.get_position() p2 = [p1.x0, p1.y0 + 0.02, p1.width, p1.height] ax1.set_position(p2) # PLOT DARM LINES ax2 = fig.add_subplot(212) plt.plot(self.freqs, self.darm_inj, '-', color='r', label='DARM during injection', zorder=3) plt.plot(self.freqs, self.darm_bg, '-', color='0.1', label='DARM background', zorder=4) if show_darm_threshold == True: plt.plot(self.freqs, self.darm_bg / 10., '--', color='k', label='DARM background / 10', zorder=2) # PLOT ESTIMATED AMBIENT if est_amb: real_amb = [[], []] upper_amb = [[], []] for y in range(len(self.freqs)): if self.flags[y] == 'Upper Limit': upper_amb[1].append(self.ambients[y]) upper_amb[0].append(self.freqs[y]) elif self.flags[y] == 'Real': real_amb[1].append(self.ambients[y]) real_amb[0].append(self.freqs[y]) plt.plot(real_amb[0], real_amb[1], 'o', color='lime', markersize=6, markeredgewidth=.5, label='Est. Amb.', zorder=6) if upper_lim: plt.plot(upper_amb[0], upper_amb[1], '^', markersize=5, markerfacecolor='none', markeredgecolor='0.3', markeredgewidth=.8, label='Upper Limit Est. Amb.', zorder=5) # CREATE LEGEND AND LABELS legend = plt.legend(prop={'size': 12}, loc=1) legend.get_frame().set_alpha(0.5) plt.ylabel('DARM ASD [m/Hz$^{1/2}$]', size=18) plt.xlabel('Frequency [Hz]', size=18) if est_amb: plt.title(self.name.replace('_DQ', '').replace('_', ' ') + ' - Estimated Ambient', size=20) else: plt.title('DARM - Spectrum', size=20) # FIGURE TEXT # Supertitle (timestamp) plt.suptitle( datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'), fontsize=16, y=1.01) # Miscellaneous captions str_quiet_time = 'Background Time: ' + str(from_gps( self.t_bg)) + '\n({})'.format(self.t_bg) + ' ' * 11 str_inj_time = 'Injection Time: ' + str(from_gps( self.t_inj)) + '\n' + ' ' * 25 + '({})'.format(self.t_inj) plt.figtext(.9, .01, str_quiet_time, ha='right', fontsize=12, color='b') plt.figtext(.1, .01, str_inj_time, fontsize=12, color='b') plt.figtext(.5, 0.0, 'Band Width: {:1.3f} Hz'.format(self.df), ha='center', va='top', fontsize=14, color='b') plt.figtext(.1, .97, 'Injection name:\n{} '.format(self.injection_name), fontsize=12, color='b') if est_amb: plt.figtext(.9,.99, 'Measured coupling factors: {}'.format(len(real_amb[1])), ha='right', \ fontsize = 14, color = 'b') if upper_lim: plt.figtext(.9,.965, 'Upper limit coupling factors: {}'.format(len(upper_amb[1])), ha='right', \ fontsize = 14, color = 'b') # CUSTOMIZE AXES plt.xlim([freq_min, freq_max]) plt.ylim([spec_min, spec_max]) if ((freq_max / freq_min) > 5): ax2.set_xscale('log', nonposx='clip') ax2.set_yscale('log', nonposy='clip') ax2.autoscale(False) plt.grid(b=True, which='major', color='0.0', linestyle=':', linewidth=1, zorder=0) plt.minorticks_on() plt.grid(b=True, which='minor', color='0.6', linestyle=':', zorder=0) for tick in ax2.xaxis.get_major_ticks(): tick.label.set_fontsize(18) for tick in ax2.yaxis.get_major_ticks(): tick.label.set_fontsize(18) # EXPORT PLOT plt.savefig(filename, bbox_inches='tight') plt.close() return fig
def build_spec(data, tmin=None, tmax=None, fmin=None, fmax=None, vmin=None, vmax=None, mode='fourier', omega0=6, dt=1, dj=0.05, fct='morlet', stride=None, nfft=None, overlap=None, scale='log', station=None, dy=None, xmin=None, xmax=None, funit='Hz', tunit='secs', cmap='inferno', fname=None): """ Plot multiplot figure with time series, PSD and spectrogram. Parameters ---------- data : TimeSeries Magnetic field data tmin, tmax : datetime First and last timestamps fmin, fmax : float Minimum and maximum frequencies vmin, vmax : float Minimum and maximum color values mode : str Spectrogram mode, wavelet or Fourier. Default is Fourier omega0 : int Wavelet function parameter dt : float Time step dj : float Scale resolution (smaller values of dj give finer resolution) fct : str Wavelet function (morlet,paul,dog) stride : float Length of segment nfft : float Length of the FFT used, if a zero padded FFT is desired. overlap : float Length of overlapping segment cmap : str Colormap scale : str Plotted frequency scale. Default is "log". station : str Name of the station. dy : float Half the difference between the maximum and minimum magnetic field values to be plotted. This can be used if multiple figures are made from different stations such that the plotted range of the time series is of the same size for every station. xmin : float Minimum value in the power spectral density plot xmax : float Maximum value in the power spectral density plot funit : strg Frequency unit, Hz or mHz. Default is Hz. tunit : str Time unit, secs, mins or hrs. Default is mins. fname : str Output file name. Notes ----- The `matplotlib.pyplot.imshow <https://matplotlib.org/api/pyplot_api.html?highlight=matplotlib%20pyplot%20imshow#matplotlib.pyplot.imshow>`_ module is used to plot the wavelet spectrogram. This module is usually used to plot raw images and assumes that the position of the cell in the input spectrogram array directly represents the position of the pixel in the raw image. That is, for an input Python array (in which rows are appended below previous ones), the first row in the array is assumed to represent the top line of pixel in the image. Therefore, in order to plot the spectrogram array using the imshow module, one needs to carefully check that the rows (which are representative of the frequency bands), are stored in descending order such that the lowest frequency is placed at the end (bottom) of the array. """ if mode == 'wavelet' and scale == 'linear': print 'Warning: Wavelet mode chosen. Scale will be changed to log.' scale = 'log' # Initialise figure fig = plt.figure(figsize=(24, 14), frameon=False) plt.subplots_adjust(left=0.07, right=0.95, bottom=0.1, top=0.95, hspace=0, wspace=0) if station != None: fig.suptitle(station) ax1 = fig.add_axes([0.20, 0.75, 0.683, 0.20]) ax2 = fig.add_axes([0.20, 0.10, 0.683, 0.64], sharex=ax1) ax3 = fig.add_axes([0.07, 0.10, 0.123, 0.64]) ax4 = fig.add_axes([0.89, 0.10, 0.030, 0.64]) # Prepare timing range tmin = data.times[0].value if tmin == None else tmin tmax = data.times[-1].value if tmax == None else tmax mask = (data.times.value >= tmin) & (data.times.value <= tmax) scale_factor = 3600. if tunit == 'hrs' else 60. if tunit == 'mins' else 1 times = (data[mask].times.value - tmin) / scale_factor # Plot time series if dy == None: ax1.plot(times, data[mask].value, alpha=0.5) else: ax1.plot(times, data[mask].value - numpy.mean(data[mask].value), alpha=0.5) ax1.set_ylim(-dy / 2, dy / 2) ax1.set_ylabel('Magnetic Fields [uT]', fontsize=11) ax1.tick_params(bottom='off', labelbottom='off') ax1.set_xlim(0, (tmax - tmin) / scale_factor) ax1.grid(b=True, which='major', alpha=0.7, ls='--') if mode == 'wavelet': # Calculate wavelet parameters scales = mlpy.wavelet.autoscales(N=len(data[mask].value), dt=dt, dj=dj, wf=fct, p=omega0) spec = mlpy.wavelet.cwt(data[mask].value, dt=dt, scales=scales, wf=fct, p=omega0) freq = (omega0 + numpy.sqrt(2.0 + omega0**2)) / (4 * numpy.pi * scales[1:]) freq = freq * 1000. if funit == 'mHz' else freq spec = numpy.abs(spec)**2 spec = spec[::-1] # Define minimum and maximum frequencies fmin_log, fmax_log = min(freq), max(freq) fmin_linear, fmax_linear = min(freq), max(freq) if fmin != None: log_ratio = (numpy.log10(fmin) - numpy.log10(min(freq))) / ( numpy.log10(max(freq)) - numpy.log10(min(freq))) fmin_linear = min(freq) + log_ratio * (max(freq) - min(freq)) fmin_log = fmin if fmax != None: log_ratio = (numpy.log10(fmax) - numpy.log10(min(freq))) / ( numpy.log10(max(freq)) - numpy.log10(min(freq))) fmax_linear = min(freq) + log_ratio * (max(freq) - min(freq)) fmax_log = fmax # Get minimum and maximum amplitude in selected frequency range idx = numpy.where( numpy.logical_and(fmin_log < freq[::-1], freq[::-1] < fmax_log))[0] vmin = vmin if vmin != None else numpy.sort(numpy.unique(spec[idx]))[1] vmax = spec[idx].max() if vmax == None else vmax # Plot spectrogram img = ax2.imshow(spec, extent=[times[0], times[-1], freq[-1], freq[0]], aspect='auto', interpolation='nearest', cmap=cmap, norm=matplotlib.colors.LogNorm(vmin, vmax)) ax2.set_xlabel('Time [%s] from %s UTC' % (tunit, from_gps(tmin)), fontsize=15) ax2.set_xlim(0, (tmax - tmin) / scale_factor) ax2.set_yscale('linear') ax2.set_ylim(fmin_linear, fmax_linear) ax2.grid(False) # Set up axis range for spectrogram twin_ax = ax2.twinx() twin_ax.set_yscale('log') twin_ax.set_xlim(0, (tmax - tmin) / scale_factor) twin_ax.set_ylim(fmin_log, fmax_log) twin_ax.spines['top'].set_visible(False) twin_ax.spines['right'].set_visible(False) twin_ax.spines['bottom'].set_visible(False) ax2.tick_params(which='both', labelleft=False, left=False) twin_ax.tick_params(which='both', labelleft=False, left=False, labelright=False, right=False) twin_ax.grid(False) if mode == 'fourier': freq, times, spec = signal.spectrogram(data[mask], fs=data.sample_rate.value, nfft=nfft, nperseg=stride, noverlap=overlap) # Convert time array into minute unit times = (numpy.linspace(data[mask].times.value[0], data[mask].times.value[-1], len(times)) - tmin) / scale_factor # Define minimum and maximum frequencies freq = freq * 1000. if funit == 'mHz' else freq fmin = freq[1] if fmin == None else fmin fmax = max(freq) if fmax == None else fmax fmin_log, fmax_log = fmin, fmax # Get minimum and maximum amplitude in selected frequency range idx = numpy.where(numpy.logical_and(fmin <= freq, freq <= fmax))[0] vmin = vmin if vmin != None else numpy.sort(numpy.unique(spec[idx]))[1] vmax = spec[idx].max() if vmax == None else vmax # Plot spectrogram img = ax2.pcolormesh(times, freq, spec, cmap=cmap, norm=matplotlib.colors.LogNorm(vmin, vmax)) ax2.set_xlabel('Time [%s] from %s UTC' % (tunit, from_gps(tmin)), fontsize=15) ax2.set_xlim(0, (tmax - tmin) / scale_factor) ax2.set_ylim(fmin, fmax) ax2.set_yscale(scale) ax2.set_ylabel('Frequency [%s]' % funit, fontsize=15, labelpad=40) ax2.tick_params(which='both', labelleft=False, left=False) ax2.grid(False) # Calculate Power Spectral Density N = len(data[mask].value) delta_t = 1 / data.sample_rate.value delta_f = 1. / (N * delta_t) f = delta_f * numpy.arange(N / 2) f = f * 1000. if funit == 'mHz' else f PSD = abs(delta_t * fftpack.fft(data[mask].value)[:N / 2])**2 psd = numpy.vstack((f, PSD)).T # Plot Power Spectral Density imin = abs(psd[:, 0] - fmin_log).argmin() imax = abs(psd[:, 0] - fmax_log).argmin() min_power = min(psd[imin:imax, 1]) if xmin == None else xmin max_power = max(psd[imin:imax, 1]) if xmax == None else xmax ticks = matplotlib.ticker.FuncFormatter(lambda v, _: ("$10^{%.0f}$" % math.log(v, 10))) ax3.loglog(psd[:, 1], psd[:, 0], alpha=0.5) ax3.invert_xaxis() ax3.set_xlim(min_power, max_power) ax3.set_ylim(fmin_log, fmax_log) ax3.set_yscale(scale) ax3.set_ylabel('Frequency [%s]' % funit, fontsize=15) ax3.set_xlabel('PSD', fontsize=15) ax3.grid(b=True, which='major', alpha=0.7, ls='--') # Add color bar and save figure cb = fig.colorbar(img, cax=ax4) cb.set_ticks(LogLocator()) cb.set_clim(vmin, vmax) ax4.set_ylabel('Power $|\mathrm{W}_v|^2$ $[\mu T^2/\mathrm{Hz}]$', fontsize=15) plt.show() if fname == None else plt.savefig(fname) plt.close(fig)
def write_state_html(self, state, pre=None): """Write the '#main' HTML content for this `EventTriggerTab`. """ page = markup.page() if self.error.get(state, None): level, message = self.error[state] # no segments, print warning page.div(class_='alert alert-%s' % level) page.p(message) page.p("If you believe this to be an error, please contact %s." % markup.oneliner.a('the DetChar group', class_='alert-link', href='mailto:[email protected]')) page.div.close() else: # otherwise, carry on... if pre is not None: page.add(str(pre)) elif self.foreword: page.add(str(self.foreword)) page.add(str(self.scaffold_plots(state=state))) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') if self.loudest: # get triggers table = get_triggers(self.channel, self.plots[0].etg, state, query=False) if self.filterstr is not None: table = table.filter(self.filterstr) tcol = get_time_column(table, self.etg) # set table headers headers = list(self.loudest['labels']) columns = list(self.loudest['columns']) if tcol in columns: headers.insert(1, 'UTC time') date = True else: date = False # loop over rank columns for rank in self.loudest['rank']: try: rankstr = self.loudest['labels'][columns.index(rank)] except ValueError: rankstr = repr(rank) page.h2('Loudest events by %s' % rankstr) rank = table[rank].argsort()[::-1] loudest = [] i = 0 dt = self.loudest['dt'] while len(loudest) < self.loudest['N'] and i < rank.size: e = table[rank[i]] t = e[tcol] if i == 0 or all( abs((t - e2[tcol])) >= dt for e2 in loudest): loudest.append(e) i += 1 data = [] for row in loudest: data.append([]) for column in columns: data[-1].append( '%.3f' % float(row[column])) if date: data[-1].insert( 1, from_gps(row[tcol]).strftime( '%B %d %Y %H:%M:%S.%f')[:-3]) page.add(str(html.table( headers, data, id='%s-loudest-table' % self.etg, caption=("%d loudest <samp>%s</samp> (%s) events " "by %s with minimum %ss separation" % (self.loudest['N'], self.channel, self.etg, rankstr, self.loudest['dt']))))) if self.subplots: page.hr(class_='row-divider') page.h1('Sub-plots') layout = get_mode() == Mode.week and [7] or [4] plist = [p for p in self.subplots if p.state in [state, None]] page.add(str(self.scaffold_plots(plots=plist, state=state, layout=layout))) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') # write state information page.add(str(self.write_state_information(state))) # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def datetime(self): return from_gps(self.gpstime)
else: extent = (0, tmax, 0, f_in / 2) spec_im = spec_ax.imshow(np.full((2, 2), np.nan), aspect='auto', extent=extent, origin='lower') spec_im.get_cmap().set_bad(color='grey') # https://stackoverflow.com/a/46649061 time_cut, brms_dots = time_cut_ax.plot([], [], 'k', [], [], 'r') freq_cut, = freq_cut_ax.plot([], [], 'k') max_dots, = spec_ax.plot([], [], '.r') hline = freq_cut_ax.axhline(np.nan, c='r', lw=1) vline = time_cut_ax.axvline(np.nan, c='r', lw=1) g0 = data.t0.value spec_ax.set_xlabel('Time (s) after gps = {:d} ({:%Y-%m-%d %H:%M:%S} UTC)'.format(int(g0), from_gps(g0))) spec_ax.set_ylabel('Frequency (Hz)') veto = Veto(spec_ax) points = [ None, Points(spec_ax, color='r'), Points(spec_ax, color='lime'), Points(spec_ax, color='b'), None, None, None, None, None, veto,
maxNumCoinc = len(trigXData)*len(trigHData) # Get current working director folder = os.popen('pwd').readlines()[0].split('\n')[0] print 'folder: ', folder channelXName = trigxfile.split('_', 3)[3].split('.')[0] channelHName = trighfile.split('_', 3)[3].split('.')[0] segIdx = np.intersect1d(np.where(trigXData[:, 2] > startTime)[0], np.where(trigXData[:,2] < endTime)[0]) trigXData = trigXData[segIdx] segIdh = np.intersect1d(np.where(trigHData[:, 2] > startTime)[0], np.where(trigHData[:,2] < endTime)[0]) trigHData = trigHData[segIdh] [coincTrigH, coincTrigX] = bcv.mcoinc(maxNumCoinc, trigHData[:, 2], trigXData[:, 2], COINC_TIME_WINDOW, segLength, uniqueArgument) plt.figure() plt.suptitle('Triggers between %s and %s' %(gtime.from_gps(startTime), gtime.from_gps(endTime))) plt.subplot(2,1,1) ax = plt.gca() ax.errorbar(trigXData[:, 2], trigXData[:, 3], xerr=[trigXData[:,2] - trigXData[:, 0], trigXData[:, 1] - trigXData[:, 2]], fmt='o', label='Triggers') plt.plot(trigXData[coincTrigX,2], trigXData[coincTrigX,3], '*', label='coincident triggers') plt.xlabel('Central Times') plt.ylabel('Central Freqs') ax.set_title('Triggers of %s'%(channelXName)) plt.subplot(2,1,2) ax = plt.gca() ax.errorbar(trigHData[:, 2], trigHData[:, 3], xerr=[trigHData[:,2] - trigHData[:, 0], trigHData[:, 1] - trigHData[:, 2]], fmt='o', label='Triggers') plt.plot(trigHData[coincTrigH,2], trigHData[coincTrigH,3], '*', label='coincident triggers') plt.xlabel('Central Times') plt.ylabel('Central Freqs') ax.set_title('Triggers of %s'%(channelHName))
def write_state_html(self, state): """Write the '#main' HTML content for this `DailyAhopeTab`. """ daydir = os.path.split(self.segmentfile)[0] # did it run if not os.path.isdir(daydir): page = html.markup.page() page.div(class_='alert alert-warning') page.p("No analysis was performed for this period, " "please try again later.") page.p("If you believe these data should have been found, please " "contact %s." % html.markup.oneliner.a('the CBC DQ group', class_='alert-link', href='mailto:[email protected]')) page.div.close() elif (not os.path.isfile(self.segmentfile) or len(self.states[0].active) != 0 and not os.path.isfile(self.inspiralcachefile)): page = html.markup.page() page.div(class_='alert alert-danger') page.p("This analysis seems to have failed.") page.p("If you believe these data should have been found, please " "contact %s." % html.markup.oneliner.a('the CBC DQ group', class_='alert-link', href='mailto:[email protected]')) page.div.close() elif len(self.states[0].active) == 0: page = html.markup.page() page.div(class_='alert alert-info') page.p("This analysis found no segments over which to run.") page.p("If you believe this to be an error, please contact %s." % html.markup.oneliner.a('the CBC DQ group', class_='alert-link', href='mailto:[email protected]')) page.div.close() else: # otherwise, carry on... page = self.scaffold_plots(state=state) # link full results page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full Daily Ahope results', href=self.ihopepage, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') if self.loudest: table = get_triggers(self.channel, self.plots[0].etg, state, query=False) rank = get_table_column( table, self.loudest['rank']).argsort()[::-1] loudest = [] i = 0 while len(loudest) < self.loudest['N'] and i < rank.size: t = table[rank[i]] if i == 0 or all([abs(float(t.get_end()) - float(t2.get_end())) >= self.loudest['dt'] for t2 in loudest]): loudest.append(t) i += 1 page.h1('Loudest events') page.p('The following table displays the %d loudest events as ' 'recorded by Daily Ahope (with at least %s-second ' 'separation).' % (self.loudest['N'], self.loudest['dt'])) headers = self.loudest['labels'] if 'time' in headers[0]: headers.insert(1, 'UTC time') date = True else: data = False data = [] for row in loudest: data.append([]) for column in self.loudest['columns']: data[-1].append('%.3f' % float(get_row_value(row, column))) if date: data[-1].insert(1, from_gps(row.get_end()).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) page.add(str(html.data_table(headers, data, table='data'))) if self.subplots: page.hr(class_='row-divider') page.h1('Sub-plots') layout = get_mode() == MODE_ENUM['WEEK'] and [7] or [4] plist = [p for p in self.subplots if p.state in [state, None]] page.add(str(self.scaffold_plots(plots=plist, state=state, layout=layout))) # link full results page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full Daily Ahope results', href=self.ihopepage, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def test_from_gps(self): date = time.from_gps(GPS) self.assertEqual(date, DATE)
def write_state_html(self, state, pre=None): """Write the '#main' HTML content for this `EventTriggerTab`. """ page = html.markup.page() if self.error.get(state, None): level, message = self.error[state] # no segments, print warning page.div(class_='alert alert-%s' % level) page.p(message) page.p("If you believe this to be an error, please contact %s." % html.markup.oneliner.a('the DetChar group', class_='alert-link', href='mailto:[email protected]')) page.div.close() else: # otherwise, carry on... if pre is not None: page.add(str(pre)) elif self.foreword: page.add(str(self.foreword)) page.add(str(self.scaffold_plots(state=state))) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') if self.loudest: # get triggers table = get_triggers(self.channel, self.plots[0].etg, state, filter=self.filterstr, query=False) # set table headers headers = list(self.loudest['labels']) columns = list(self.loudest['columns']) if columns[0].endswith('time') or headers[0].endswith('time'): headers.insert(1, 'UTC time') date = True tcol = columns[0] else: date = False tcol = 'time' # loop over rank columns for rank in self.loudest['rank']: try: rankstr = self.loudest['labels'][columns.index(rank)] except ValueError: rankstr = repr(rank) page.h2('Loudest events by %s' % rankstr) rank = table[rank].argsort()[::-1] loudest = [] i = 0 dt = self.loudest['dt'] while len(loudest) < self.loudest['N'] and i < rank.size: e = table[rank[i]] t = e[tcol] if i == 0 or all( abs((t - e2[tcol])) >= dt for e2 in loudest): loudest.append(e) i += 1 data = [] for row in loudest: data.append([]) for column in columns: data[-1].append( '%.3f' % float(row[column])) if date: data[-1].insert( 1, from_gps(row[tcol]).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) page.add(str(html.table( headers, data, caption=("%d loudest <samp>%s</samp> (%s) events by %s " "with minimum %ss separation" % (self.loudest['N'], self.channel, self.etg, rankstr, self.loudest['dt']))))) if self.subplots: page.hr(class_='row-divider') page.h1('Sub-plots') layout = get_mode() == Mode.week and [7] or [4] plist = [p for p in self.subplots if p.state in [state, None]] page.add(str(self.scaffold_plots(plots=plist, state=state, layout=layout))) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') # write state information page.add(str(self.write_state_information(state))) # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def write_state_html(self, state): """Write the '#main' HTML content for this `GraceDbTab`. """ page = markup.page() # build table of events page.div(class_='scaffold well') page.table(class_='table table-condensed table-hover table-striped', id_='gracedb') # thead page.thead() page.tr() for head in self.headers: page.th(head) page.tr.close() page.thead.close() # tbody page.tbody() for event in sorted(self.events[str(state)], key=lambda e: e['gpstime']): context = None try: labs = set(event['labels'].split(', ')) except (AttributeError, KeyError): pass else: for ctx, labels in LABELS.items(): if ( ctx == "success" and labs.union(labels) == labs or labs.intersection(labels) ): context = ctx break if context: page.tr(class_=context) else: page.tr() for col in self.columns: if col == 'date': gpskey = 't_0' if 'superevent_id' in event else 'gpstime' page.td(from_gps(event[gpskey]).strftime( '%B %d %Y %H:%M:%S.%f', )[:-3]) continue try: v = event[col] except KeyError: try: v = event['extra_attributes']['GRB'][col] assert v is not None except (KeyError, AssertionError): page.td('-') continue if col in ("graceid", "superevent_id", "preferred_event"): page.td() tag = "superevents" if col == "superevent_id" else "events" href = '{}/{}/view/{}'.format(self.url, tag, v) title = "GraceDB {} page for {}".format(tag[:-1], v) page.a(v, title=title, href=href, target='_blank', rel='external', class_="btn btn-info btn-xs") page.td.close() elif col not in ("gpstime", "t_0") and isinstance(v, float): page.td('%.3g' % v) elif col == "labels": page.td(", ".join(['<samp>%s</samp>' % l for l in sorted(labs)])) else: page.td(str(v)) page.tr.close() page.tbody.close() page.table.close() if len(self.events[str(state)]) == 0: page.p("No events were recovered for this state.") else: page.button( 'Export to CSV', class_='btn btn-default btn-table', onclick="exportTableToCSV('{name}.csv', '{name}')".format( name='gracedb')) page.div.close() # scaffold well # query doc qurl = "{}/search/?query={}&query_type={}&results_format=S".format( self.url, self.query.replace(" ", "+"), getattr(self, "_query_type", "E"), ) qlink = markup.oneliner.a( "here", href=qurl, target="_blank", ) page.p("The above table was generated from a query to {} with the " "form <code>{}</code>. To view the results of the same query " "via the GraceDB web interface, click {}.".format( self.url, self.query, qlink), ) # reference the labelling page.h4("Labelling reference") page.p("Events in the above table may have a context based on " "its labels as follows:") for c, labels in LABELS.items(): labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)]) page.p(labstr, class_='bg-%s' % c, style='width: auto;') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def parameter_table(content=[], start=None, end=None, flag=None, id_='parameters', tableclass='table table-sm table-hover'): """Render an informative section with run parameters in HTML Parameters ---------- content: `list` of `tuple` of `str` collection of parameters to list start : `float` GPS start time of the analysis end : `float` GPS end time of the analysis flag : `str`, optional name of a data-quality state flag required for this analysis section : `str`, optional text to label the section header (``<h2>``), default: ``Parameters`` id_ : `str`, optional unique HTML identifier for this section, default: ``parameters`` tableclass : `str`, optional the ``class`` for the summary ``<table>``, defaults to a responsive Bootstrap table Returns ------- page : `~MarkupPy.markup.page` fully rendered table of parameters """ # front-load time and flag info common = [ ('Start time (UTC)', '{0} ({1})'.format(from_gps(start), start)), ('End time (UTC)', '{0} ({1})'.format(from_gps(end), end)), ] if flag is not None: common.append(('State flag', markup.oneliner.code(flag))) content = common + content + [ ('System prefix', markup.oneliner.code(sys.prefix)) ] # initialize page page = markup.page() page.table(class_=tableclass) # table body page.tbody() for row in content: col1, col2 = row page.tr() page.th(col1, scope='row') page.td(col2) page.tr.close() page.tbody.close() # close table and write command-line page.table.close() return page()
highPassCutoff, triggerListHSeg, triggerListXSeg, couplingModel, transFnXtoH, segStartTime, segEndTime, timeShift, outDirList, logFid, debugLevel) else: logFid.write('LOG: No triggers in this segment numTrigsHseg = %d numTrigsXseg = %d\n'%(numTrigsHseg, numTrigsXseg)) del triggerListHSeg, triggerListXSeg for iDir in xrange(len(outDirList)): textSummaryFID = open(outDirList[iDir] + '/summary.txt', 'w') # print summary of results textSummaryFID.write('# Summary file of veto analysis') textSummaryFID.write('Created by : %s\n' %(os.getenv('USER'))) textSummaryFID.write('Created on: %s\n'%(strftime("%Y-%m-%d"))) textSummaryFID.write('analysisStartTime UTC : %s\n' %(gtime.from_gps(analysisStartTime))) textSummaryFID.write('segEndTime UTC : %s\n' %(gtime.from_gps(segEndTime))) textSummaryFID.write('configurationFile : %s\n' %(configurationFile)) textSummaryFID.write('frameCacheFile : %s\n'%(frameCacheFile)) textSummaryFID.write('couplingModel :%s\n' %(couplingModel)) textSummaryFID.write('highPassCutoff : %3.2f\n' %(highPassCutoff)) textSummaryFID.write('outDir : %s\n' %(outDirList[iDir])) textSummaryFID.write('logFile : %s\n' %(logFile)) textSummaryFID.write('debugLevel : %d\n' %(debugLevel)) textSummaryFID.write('numTrigsH :%d\n' %(numTrigsH)) textSummaryFID.write('numTrigsX: %d\n' %(numTrigsX)) textSummaryFID.close() # Exit
def from_ini(cls, config, section, **kwargs): """Define a new `DailyAhopeTab` from a `ConfigParser`. """ # parse states ifo = config.get(DEFAULTSECT, 'ifo') start = config.getint(DEFAULTSECT, 'gps-start-time') end = config.getint(DEFAULTSECT, 'gps-end-time') if config.has_option(section, 'states'): raise ValueError("DailyAhopeTab does not support configuration of " "multiple states, please use the 'state' option " "to name the Hveto state") try: state = re_quote.sub('', config.get(section, 'state')) except NoOptionError: state = 'Daily Ahope' if state in globalv.STATES: raise ValueError("State name for DailyAhopeTab must be unique, " "please do not select '%s'" % state) globalv.STATES[state] = SummaryState(state, known=(start, end)) globalv.STATES[state].definition = '%s:ahope' % ifo config.set(section, 'states', state) # parse generic configuration new = super(DailyAhopeTab, cls).from_ini(config, section, **kwargs) new.channel = re_quote.sub('', config.get(section, 'channel')) for p in new.plots + new.subplots: p.etg = new.name.lower() # work out day directory and url utc = from_gps(new.span[0]) basedir = os.path.normpath(config.get(section, 'base-directory')) daydir = os.path.join(basedir, utc.strftime('%Y%m'), utc.strftime('%Y%m%d')) home_, postbase = daydir.split('/public_html/', 1) user = os.path.split(home_)[1] new.ihopepage = '/~%s/%s/' % (user, postbase) # get cache options cachefile = config.get(section, 'inspiral-cache') new.inspiralcachefile = os.path.join(daydir, cachefile) cachefile = config.get(section, 'tmpltbank-cache') new.tmpltbankcachefile = os.path.join(daydir, cachefile) segfile = config.get(section, 'segment-file') new.segmentfile = os.path.join(daydir, segfile) # get loudest options if config.has_option(section, 'loudest'): # set defaults new.loudest = { 'N': config.getint(section, 'loudest'), 'columns': ['end', 'new_snr', 'snr', 'mchirp', 'mass1', 'mass2', 'reduced_chisq'], 'rank': 'new_snr', 'dt': 8, } # override from config if config.has_option(section, 'loudest-columns'): new.loudest['columns'] = map( lambda s: re_quote.sub('', s), config.get(section, 'loudest-columns').split(',')) if config.has_option(section, 'loudest-labels'): new.loudest['labels'] = map( lambda s: re_quote.sub('', s), config.get(section, 'loudest-labels').split(',')) else: new.loudest['labels'] = [' '.join(map(str.title, s.split('_'))) for s in new.loudest['columns']] if config.has_option(section, 'loudest-rank'): new.loudest['rank'] = re_quote.sub( '', config.get(section, 'loudest-rank')) if config.has_option(section, 'loudest-dt'): new.loudest['dt'] = config.getfloat(section, 'loudest-dt') else: new.loudest = None return new
def write_state_html(self, state): """Write the '#main' HTML content for this `GraceDbTab`. """ page = html.markup.page() # build table of events page.div(class_='scaffold well') page.table(class_='table table-condensed table-hover table-striped') # thead page.thead() page.tr() for head in self.headers: page.th(head) page.tr.close() page.thead.close() # tbody page.tbody() for event in sorted(self.events[str(state)], key=lambda e: e['gpstime']): context = None try: l = event['labels'].split(', ') except (AttributeError, KeyError): pass else: for label in [ 'ADVNO', 'H1NO', 'L1NO', 'DQV', 'INJ', 'EM_READY' ]: if label in l: context = LABELS[label] break if context is not None: page.tr(class_=context) for col in self.columns: if col == 'date': page.td( from_gps(event['gpstime']).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) continue try: v = event[col] except KeyError: try: v = event['extra_attributes']['GRB'][col] assert v is not None except (KeyError, AssertionError): page.td('-') continue if col == 'graceid': page.td() href = '%s/events/view/%s' % (self.url, v) page.a(v, href=href, target='_blank', rel='external') page.td.close() elif col != 'gpstime' and isinstance(v, float): page.td('%.3g' % v) else: page.td(str(v)) page.tr.close() page.tbody.close() page.table.close() if len(self.events[str(state)]) == 0: page.p("No events were recovered for this state.") page.div.close() # scaffold well # query doc page.p("The above table was generated from a query to %s with the " "form <code>%s</code>." % (self.url, self.query)) # reference the labelling page.h4("Labelling reference") page.p("Events in the above table may have a context based on " "its labels as follows:") contexts = set(LABELS.values()) for c in contexts: labels = [k for k, v in LABELS.items() if v == c] labstr = ', '.join(['<b>%s</b>' % l for l in labels]) page.p(labstr, class_='bg-%s' % c, style='width: auto;') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def from_ini(cls, config, section): """Create a new `SummaryState` from a section in a `ConfigParser`. Parameters ---------- config : :class:`~gwsumm.config.GWConfigParser` customised configuration parser containing given section section : `str` name of section to parse Returns ------- `SummaryState` a new state, with attributes set from the options in the configuration """ from .all import ALLSTATE, generate_all_state config = GWSummConfigParser.from_configparser(config) # get span times start = config.getint(section, 'gps-start-time') end = min(globalv.NOW, config.getint(section, 'gps-end-time')) # get parameters params = dict(config.nditems(section)) # parse name name = params.pop('name', section) if re.match('state[-\s]', name): name = section[6:] # get hours hours = params.pop('hours', None) if hours is not None: segs = re.split('(,|, )', hours)[::2] hours = [] offset = 0 for seg in segs: try: # parse hour segment hours.append(map(float, seg.split('-', 1))) except ValueError: # parse time-zone if seg == segs[-1]: if seg.lower() == 'utc': offset = 0 elif seg.lower() == 'local': try: ifo = config.get(DEFAULTSECT, 'ifo') except NoOptionError: raise ValueError("The relevant IFO must be " "given either from the --ifo " "command-line option, or the " "[DEFAULT] section of any " "INI file") offset = get_timezone_offset(ifo, from_gps(start)) else: offset = get_timezone_offset(seg, from_gps(start)) else: raise # apply time-zone for i, (h0, h1) in enumerate(hours): hours[i] = (h0 - offset / 3600., h1 - offset / 3600.) # generate state if name == ALLSTATE: return generate_all_state(start, end, register=False, **params) else: return cls(name, known=[(start, end)], hours=hours, **params)
ylim=(_mean - 2 * _std, _mean + 2 * _std), **plotkwargs) plot(absp, color='g', ylim=(0, 1), hlines=[0.2], **plotkwargs) plot(ppol, color='b', **plotkwargs) plot(spol, color='r', **plotkwargs) if pk2pk: plot(c_p, color='b', hlines=[0.01], ylim=(0, 0.05), **plotkwargs) plot(c_s, color='r', hlines=[0.01], ylim=(0, 0.05), **plotkwargs) else: plot(c_p, color='b', hlines=[0.02], ylim=(0, 0.2), **plotkwargs) plot(c_s, color='r', hlines=[0.02], ylim=(0, 0.2), **plotkwargs) if False: ppol = ppol.value[:300] spol = spol.value[:300] pmax, pmin = ppol.max(), ppol.min() smax, smin = spol.max(), spol.min() plt.clf() fig, ax = plt.subplots(1, 1, figsize=(7, 7)) ax.plot(ppol, spol, 'ko', markersize=2) #ax.set_ylim(0,0.3) #ax.set_xlim(0,0.3) ax.set_ylabel('spol [V]') ax.set_xlabel('ppol [V]') fname = str(from_gps(start)) + '_UTC' fname = fname.replace(' ', '_') plt.title(fname) #plt.savefig('tmp_lisa_{0}.png'.format(fname)) plt.savefig('tmp_lisa.png') plt.close()
def write_state_html(self, state): """Write the '#main' HTML content for this `GraceDbTab`. """ page = markup.page() # build table of events page.div(class_='scaffold well') page.table(class_='table table-condensed table-hover table-striped', id_='gracedb') # thead page.thead() page.tr() for head in self.headers: page.th(head) page.tr.close() page.thead.close() # tbody page.tbody() for event in sorted(self.events[str(state)], key=lambda e: e['gpstime']): context = None try: labs = set(event['labels'].split(', ')) except (AttributeError, KeyError): pass else: for ctx, labels in LABELS.items(): if (ctx == "success" and labs.union(labels) == labs or labs.intersection(labels)): context = ctx break if context: page.tr(class_=context) else: page.tr() for col in self.columns: if col == 'date': gpskey = 't_0' if 'superevent_id' in event else 'gpstime' page.td( from_gps(event[gpskey]).strftime( '%B %d %Y %H:%M:%S.%f', )[:-3]) continue try: v = event[col] except KeyError: try: v = event['extra_attributes']['GRB'][col] assert v is not None except (KeyError, AssertionError): page.td('-') continue if col in ("graceid", "superevent_id", "preferred_event"): page.td() tag = "superevents" if col == "superevent_id" else "events" href = '{}/{}/view/{}'.format(self.url, tag, v) title = "GraceDB {} page for {}".format(tag[:-1], v) page.a(v, title=title, href=href, target='_blank', rel='external', class_="btn btn-info btn-xs") page.td.close() elif col not in ("gpstime", "t_0") and isinstance(v, float): page.td('%.3g' % v) elif col == "labels": page.td(", ".join( ['<samp>%s</samp>' % l for l in sorted(labs)])) else: page.td(str(v)) page.tr.close() page.tbody.close() page.table.close() if len(self.events[str(state)]) == 0: page.p("No events were recovered for this state.") else: page.button( 'Export to CSV', class_='btn btn-default btn-table', onclick="exportTableToCSV('{name}.csv', '{name}')".format( name='gracedb')) page.div.close() # scaffold well # query doc qurl = "{}/search/?query={}&query_type={}&results_format=S".format( self.url, self.query.replace(" ", "+"), getattr(self, "_query_type", "E"), ) qlink = markup.oneliner.a( "here", href=qurl, target="_blank", ) page.p( "The above table was generated from a query to {} with the " "form <code>{}</code>. To view the results of the same query " "via the GraceDB web interface, click {}.".format( self.url, self.query, qlink), ) # reference the labelling page.h4("Labelling reference") page.p("Events in the above table may have a context based on " "its labels as follows:") for c, labels in LABELS.items(): labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)]) page.p(labstr, class_='bg-%s' % c, style='width: auto;') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
c2v = 20.0 / 2**15 gain = 10**(30.0 / 20) blrms100_300 = TimeSeries.read( './data2/blrms/Z_100_300mHz_1211817600_1245372032.gwf', 'K1:PEM-EX1_SEIS_Z_SENSINF_IN1_DQ') blrms100_300 = blrms100_300 / gain * c2v / 1000 * 1e6 #blrms200_300 = TimeSeries.read('./data2/blrms/Z_200_300mHz_1211817600_1245372032.gwf','K1:PEM-EX1_SEIS_Z_SENSINF_IN1_DQ') #blrms200_300 = blrms200_300/gain*c2v/1000*1e6 blrms100_200 = TimeSeries.read( './data2/blrms/Z_100_200mHz_1211817600_1245372032.gwf', 'K1:PEM-EX1_SEIS_Z_SENSINF_IN1_DQ') blrms100_200 = blrms100_200 / gain * c2v / 1000 * 1e6 t0 = blrms100_300.t0 print from_gps(t0.value) tlen = 3600 * 24 * 30 * 12 * u.s #t0 = t0 + tlen tend = t0 + tlen # blrms100_300 = blrms100_300.crop(t0.value, (t0 + tlen).value) #blrms200_300 = blrms200_300.crop(t0.value,(t0+tlen).value) blrms100_200 = blrms100_200.crop(t0.value, (t0 + tlen).value) # toyama = toyama.crop(t0.value, (t0 + tlen).value) takayama = takayama.crop(t0.value, (t0 + tlen).value) inotani = inotani.crop(t0.value, (t0 + tlen).value) kamioka = kamioka.crop(t0.value, (t0 + tlen).value) shiomisaki = shiomisaki.crop(t0.value, (t0 + tlen).value) aomori = aomori.crop(t0.value, (t0 + tlen).value) tanegashima = tanegashima.crop(t0.value, (t0 + tlen).value)
else: logFid.write( 'LOG: No triggers in this segment numTrigsHseg = %d numTrigsXseg = %d\n' % (numTrigsHseg, numTrigsXseg)) del triggerListHSeg, triggerListXSeg for iDir in xrange(len(outDirList)): textSummaryFID = open(outDirList[iDir] + '/summary.txt', 'w') # print summary of results textSummaryFID.write('# Summary file of veto analysis') textSummaryFID.write('Created by : %s\n' % (os.getenv('USER'))) textSummaryFID.write('Created on: %s\n' % (strftime("%Y-%m-%d"))) textSummaryFID.write('analysisStartTime UTC : %s\n' % (gtime.from_gps(analysisStartTime))) textSummaryFID.write('segEndTime UTC : %s\n' % (gtime.from_gps(segEndTime))) textSummaryFID.write('configurationFile : %s\n' % (configurationFile)) textSummaryFID.write('frameCacheFile : %s\n' % (frameCacheFile)) textSummaryFID.write('couplingModel :%s\n' % (couplingModel)) textSummaryFID.write('highPassCutoff : %3.2f\n' % (highPassCutoff)) textSummaryFID.write('outDir : %s\n' % (outDirList[iDir])) textSummaryFID.write('logFile : %s\n' % (logFile)) textSummaryFID.write('debugLevel : %d\n' % (debugLevel)) textSummaryFID.write('numTrigsH :%d\n' % (numTrigsH)) textSummaryFID.write('numTrigsX: %d\n' % (numTrigsX)) textSummaryFID.close() # Exit
from gwpy.timeseries import TimeSeries from gwpy.time import from_gps from numpy import sqrt, array, arange, nonzero, argmax, log10 from pylab import savefig, specgram import sys, string import matplotlib.pyplot as plt from matplotlib.image import NonUniformImage import matplotlib.mlab as mlab from os import makedirs, path # read input argumentsi (update this to use argparse) if len(sys.argv) == 6: ifo = sys.argv[1] # H1 or L1 start_time = int(sys.argv[2]) # GPS start time startutc = str(from_gps(start_time)) # get UTC time dur = int(sys.argv[3]) # duration in seconds thresh = int(sys.argv[4]) # threshold frequency [Hz] (add as input argument later) plotspec = int(sys.argv[5]) # 0 for no spectrogram, 1 for spectrogram end_time = int(start_time) + int(dur) else: print "Usage: python scatMon.py ifo start_time dur thresh plospec" sys.exit(2) ### Set the channel that witnesses fringes (to plot specgram for) witness_base = "GDS-CALIB_STRAIN" #witness_base = "LSC-MICH_IN1_DQ" #witness_base = '%s:ASC-Y_TR_A_NSUM_OUT_DQ' % ifo #witness_base = 'LSC-SRCL_IN1_DQ' #witness_base = "LSC-REFL_A_RF9_Q_ERR_DQ" #witness_base = "ASC-AS_A_RF45_Q_PIT_OUT_DQ" #witness_base = "ASC-AS_B_RF36_Q_PIT_OUT_DQ"
def write_state_html(self, state): """Write the '#main' HTML content for this `GraceDbTab`. """ page = markup.page() # build table of events page.table(class_='table table-sm table-hover table-striped mt-2', id_='gracedb') # thead page.thead() page.tr() for head in self.headers: page.th(head) page.tr.close() page.thead.close() # tbody page.tbody() for event in sorted(self.events[str(state)], key=lambda e: e['gpstime']): context = None try: labs = set(event['labels'].split(', ')) except (AttributeError, KeyError): pass else: for ctx, labels in LABELS.items(): if ( ctx == 'success' and labs.union(labels) == labs or labs.intersection(labels) ): context = ctx break if context: page.tr(class_='table-%s' % context) else: page.tr() for col in self.columns: if col == 'date': gpskey = 't_0' if 'superevent_id' in event else 'gpstime' page.td(from_gps(event[gpskey]).strftime( '%B %d %Y %H:%M:%S.%f', )[:-3]) continue elif col.lower() == 'dqr' and 'superevent_id' in event: page.td() sid = event['superevent_id'] href = ('{0}/apiweb/superevents/{1}/files/' 'dqr.html'.format(self.url, sid)) try: self.connection.get(href) except self.exception: page.p('—') else: title = 'Data-quality report for {}'.format(sid) page.a('DQR', title=title, href=href, target='_blank', rel='external', class_='btn btn-info btn-sm') page.td.close() continue elif col.lower() == 'dqr': page.td() page.p('—') page.td.close() continue try: v = event[col] except KeyError: try: v = event['extra_attributes']['GRB'][col] assert v is not None except (KeyError, AssertionError): page.td('-') continue if col in ('graceid', 'superevent_id', 'preferred_event'): page.td() tag = 'superevents' if col == 'superevent_id' else 'events' href = '{}/{}/view/{}'.format(self.url, tag, v) title = 'GraceDB {} page for {}'.format(tag[:-1], v) page.a(v, title=title, href=href, target='_blank', rel='external', class_='btn btn-info btn-sm') page.td.close() elif col not in ('gpstime', 't_0') and isinstance(v, float): page.td('%.3g' % v) elif col == 'labels': page.td(', '.join( ['<samp>%s</samp>' % l for l in sorted(labs)])) else: page.td(str(v)) page.tr.close() page.tbody.close() page.table.close() if len(self.events[str(state)]) == 0: page.p('No events were recovered for this state.') else: page.button( 'Export to CSV', class_='btn btn-outline-secondary btn-table mt-2', **{'data-table-id': 'gracedb', 'data-filename': 'gracedb.csv'}) # query doc qurl = '{}/search/?query={}&query_type={}&results_format=S'.format( self.url, self.query.replace(' ', '+'), getattr(self, '_query_type', 'E'), ) qlink = markup.oneliner.a( 'here', href=qurl, target='_blank', ) page.p('The above table was generated from a query to {} with the ' 'form <code>{}</code>. To view the results of the same query ' 'via the GraceDB web interface, click {}.'.format( self.url, self.query, qlink), class_='mt-2') # reference the labelling page.h4('Labelling reference') page.p('Events in the above table may have a context based on ' 'its labels as follows:') for c, labels in LABELS.items(): c = (c if c == 'warning' else '%s text-white' % c) labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)]) page.p(labstr, class_='bg-%s pl-2' % c, style='width: auto;') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def write_state_html(self, state): """Write the '#main' HTML content for this `EventTriggerTab`. """ if self.error.get(state, None): level, message = self.error[state] # no segments, print warning page = html.markup.page() page.div(class_='alert alert-%s' % level) page.p(message) page.p("If you believe this to be an error, please contact %s." % html.markup.oneliner.a('the DetChar group', class_='alert-link', href='mailto:[email protected]')) page.div.close() else: # otherwise, carry on... page = self.scaffold_plots(state=state) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') if self.loudest: page.h1('Loudest events') page.p('The following table(s) displays the %d loudest events ' 'as recorded by %s (with at least %s-second ' 'separation).' % (self.loudest['N'], self.etg, self.loudest['dt'])) # get triggers table = get_triggers(self.channel, self.plots[0].etg, state, query=False) # set table headers headers = list(self.loudest['labels']) if 'time' in headers[0]: headers.insert(1, 'UTC time') date = True else: date = False # loop over rank columns for rank in self.loudest['rank']: try: rankstr = self.loudest['labels'][ self.loudest['columns'].index(rank)] except ValueError: rankstr = repr(rank) page.h3('Loudest events by %s' % rankstr) rank = get_table_column(table, rank).argsort()[::-1] loudest = [] i = 0 while len(loudest) < self.loudest['N'] and i < rank.size: t = table[rank[i]] if i == 0 or all([ abs(float(get_row_value(t, 'time')) - float(get_row_value(t2, 'time'))) >= self.loudest['dt'] for t2 in loudest]): loudest.append(t) i += 1 data = [] for row in loudest: data.append([]) for column in self.loudest['columns']: data[-1].append( '%.3f' % float(get_row_value(row, column))) if date: data[-1].insert( 1, from_gps(get_row_value( row, self.loudest['columns'][0])).strftime( '%B %d %Y, %H:%M:%S.%f')[:-3]) page.add(str(html.data_table(headers, data, table='data'))) if self.subplots: page.hr(class_='row-divider') page.h1('Sub-plots') layout = get_mode() == MODE_ENUM['WEEK'] and [7] or [4] plist = [p for p in self.subplots if p.state in [state, None]] page.add(str(self.scaffold_plots(plots=plist, state=state, layout=layout))) # link full results if self.url: page.hr(class_='row-divider') page.div(class_='btn-group') page.a('Click here for the full %s results' % self.name, href=self.url, rel='external', target='_blank', class_='btn btn-default btn-info btn-xl') page.div.close() page.hr(class_='row-divider') # write to file idx = self.states.index(state) with open(self.frames[idx], 'w') as fobj: fobj.write(str(page)) return self.frames[idx]
def plot(self, filename, in_counts=False, ts=None, upper_lim=True, freq_min=None, freq_max=None,\ factor_min=None, factor_max=None, fig_w=15, fig_h=6): """ Export a coupling function plot from the data Parameters ---------- filename : str in_counts : bool If True, convert coupling function to counts, and treat data as such. ts : time.time object Timestamp object. upper_lim : bool If True, include upper limits in the plot. Otherwise just plot measured (real) values. freq_min : float Minimum frequency (x-axis). freq_max : float Maximum frequency (x_axis). factor_min : float Minimum value of coupling factor axis (y-axis). factor_max : float Maximum value of coupling factor axis (y-axis). fig_w : float or int Figure width. fig_h : float or int Figure height. """ if in_counts: factors = self.values_in_counts unit = 'Counts' type_c = '' else: factors = self.values unit = str(self.unit) type_c = self.coupling # Create time stamp if not provided if ts is None: ts = time.time() # X-AXIS LIMIT if freq_min is None: freq_min = self.freqs[0] if freq_max is None: freq_max = self.freqs[-1] # Y-AXIS LIMITS factor_values = factors[np.isfinite(factors) & (factors > 1e-30)] if len(factor_values) == 0: print('Warning: No coupling factors found for channel ' + self.name) return if factor_min is None: factor_min = min(factor_values) / 3 if factor_max is None: factor_max = max(factor_values) * 3 # ORGANIZE DATA FOR COUPLING FUNCTION PLOT real = [[], []] upper = [[], []] for i in range(len(self.freqs)): if (self.flags[i] == 'Upper Limit'): upper[1].append(factors[i]) upper[0].append(self.freqs[i]) elif (self.flags[i] == 'Real'): real[1].append(factors[i]) real[0].append(self.freqs[i]) # PLOT SIZE OPTIONS BASED ON CHANNEL TYPE ms = (8. if 'MAG' in self.name else 4.) edgew_circle = (.7 if 'MAG' in self.name else .5) edgew_triangle = (1 if 'MAG' in self.name else .7) ms_triangle = ms * (.8 if 'MAG' in self.name else .6) # CREATE FIGURE FOR COUPLING FUNCTION PLOT fig = plt.figure() ax = fig.add_subplot(111) fig.set_figheight(fig_h) fig.set_figwidth(fig_w) p1 = ax.get_position() p2 = [p1.x0, p1.y0 + 0.02, p1.width, p1.height - 0.02] ax.set_position(p2) # PLOT COUPLING FUNCTION DATA plt.plot(real[0], real[1], 'o', color='lime', markersize=ms, markeredgewidth=edgew_circle, label='Measured', zorder=6) if upper_lim: plt.plot(upper[0], upper[1], '^', markersize=ms_triangle, markerfacecolor='none', markeredgecolor='0.0', markeredgewidth=edgew_triangle, label='Upper Limits', zorder=2) # CREATE LEGEND, LABELS, AND TITLES legend = plt.legend(prop={'size': 18}, loc=1) legend.get_frame().set_alpha(0.5) plt.ylabel('{0} Coupling [m/{1}]'.format(type_c, unit), size=18) plt.xlabel(r'Frequency [Hz]', size=18) plt.title(self.name.replace('_DQ', '').replace('_', ' ') + ' - Coupling Function', size=20) plt.suptitle( datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'), fontsize=14, y=1.01) str_quiet_time = 'Background Time: {}\n({})'.format( from_gps(self.t_bg), self.t_bg) str_inj_time = 'Injection Time: {}\n({})'.format( from_gps(self.t_inj), self.t_inj) plt.figtext(.95, 0, str_quiet_time, ha='right', fontsize=12, color='b') plt.figtext(.05, 0, str_inj_time, fontsize=12, color='b') plt.figtext(.5, 0, 'Band Width: {:1.3f} Hz'.format(self.df), ha='center', va='top', fontsize=12, color='b') plt.figtext(.05, .96, 'Injection name:\n{} '.format(self.injection_name), fontsize=12, color='b') plt.figtext(.95, .99, 'Measured coupling factors: {}'.format(len(real[1])), ha='right', fontsize=12, color='b') if upper_lim: plt.figtext(.95, .96, 'Upper limit coupling factors: {}'.format(len( upper[1])), ha='right', fontsize=12, color='b') # CUSTOMIZE AXES plt.xlim([freq_min, freq_max]) plt.ylim([factor_min, factor_max]) ax.set_xscale('log', nonposx='clip') ax.set_yscale('log', nonposy='clip') ax.autoscale(False) plt.grid(b=True, which='major', color='0.0', linestyle=':', linewidth=1, zorder=0) plt.minorticks_on() plt.grid(b=True, which='minor', color='0.6', linestyle=':', zorder=0) for tick in ax.xaxis.get_major_ticks(): tick.label.set_fontsize(18) for tick in ax.yaxis.get_major_ticks(): tick.label.set_fontsize(18) # EXPORT PLOT plt.savefig(filename, bbox_inches='tight') plt.close() return fig