def test_recarray_read(self): tablename = lsctables.strip_table_name(self.TABLE_CLASS.tableName) # simple table = GWRecArray.read(self.TEST_XML_FILE, format=tablename) self.assertEquals(len(table), 2052) # nproc table = GWRecArray.read(self.TEST_XML_FILE, format=tablename, nproc=2) # get_as_column table = GWRecArray.read(self.TEST_XML_FILE, format=tablename, get_as_columns=True)
def test_get_table_row_methods(self): testfile = SnglBurstTableTestCase.TEST_XML_FILE table = GWRecArray.read(SnglBurstTableTestCase.TEST_XML_FILE, format='sngl_burst') # test simple column snr = get_table_column(table, 'snr') nptest.assert_array_equal(snr, table['snr']) # test 'time' special-case time = get_table_column(table, 'time') nptest.assert_array_equal( time, table['peak_time'] + table['peak_time_ns'] * 1e-9) # test row row = table[0] self.assertEqual(get_row_value(row, 'snr'), row['snr']) self.assertEqual(get_row_value(row, 'time'), row['peak_time'] + row['peak_time_ns'] * 1e-9)
def get_triggers(channel, etg, segments, config=GWSummConfigParser(), cache=None, columns=None, query=True, multiprocess=False, ligolwtable=None, return_=True): """Read a table of transient event triggers for a given channel. """ key = '%s,%s' % (str(channel), etg.lower()) # convert input segments to a segmentlist (for convenience) if isinstance(segments, DataQualityFlag): segments = segments.active segments = SegmentList(segments) # get processes if multiprocess is True: nproc = count_free_cores() elif multiprocess is False: nproc = 1 else: nproc = multiprocess # find LIGO_LW table for this ETG try: TableClass = get_etg_table(etg) except KeyError: TableClass = None # work out columns if columns is None: try: columns = config.get(etg, 'columns').split(',') except (NoSectionError, NoOptionError): columns = None # read segments from global memory try: havesegs = globalv.TRIGGERS[key].segments except KeyError: new = segments else: new = segments - havesegs # read new triggers if query and abs(new) != 0: ntrigs = 0 vprint(" Grabbing %s triggers for %s" % (etg, str(channel))) # store read kwargs kwargs = get_etg_read_kwargs(config, etg, exclude=['columns']) kwargs['columns'] = columns if etg.lower().replace('-', '_') in ['cwb', 'pycbc_live']: kwargs['ifo'] = get_channel(channel).ifo if 'format' not in kwargs and 'ahope' not in etg.lower(): kwargs['format'] = etg.lower() # set up ligolw options if needed if TableClass is not None: contenthandler = get_partial_contenthandler(TableClass) lsctables.use_in(contenthandler) # loop over segments for segment in new: # find trigger files if cache is None and etg.lower() == 'hacr': raise NotImplementedError("HACR parsing has not been " "implemented.") if cache is None and etg.lower() in ['kw', 'kleinewelle']: kwargs['filt'] = lambda t: t.channel == str(channel) if cache is None: try: segcache = trigfind.find_trigger_urls(str(channel), etg, segment[0], segment[1]) except ValueError as e: warnings.warn("Caught %s: %s" % (type(e).__name__, str(e))) continue else: for regex in TRIGFIND_FORMAT: if regex.match(etg): kwargs['format'] = TRIGFIND_FORMAT[regex] if TRIGFIND_FORMAT[regex] in lsctables.TableByName: kwargs['get_as_columns'] = True break if etg.lower() == 'omega': kwargs.setdefault('format', 'omega') else: kwargs.setdefault('format', 'ligolw') else: segcache = cache if isinstance(segcache, Cache): segcache = segcache.sieve(segment=segment) segcache = segcache.checkfilesexist()[0] segcache.sort(key=lambda x: x.segment[0]) if etg == 'pycbc_live': # remove empty HDF5 files segcache = type(segcache)( filter_pycbc_live_files(segcache, ifo=kwargs['ifo'])) # if no files, skip if len(segcache) == 0: continue # read triggers if kwargs.get('format', None) == 'ligolw': kwargs['contenthandler'] = contenthandler try: # try directly reading a numpy.recarray table = GWRecArray.read(segcache, nproc=nproc, **kwargs) except Exception as e: # back up to LIGO_LW if TableClass is not None and 'No reader' in str(e): try: table = TableClass.read(segcache, **kwargs) except Exception: raise e else: table = table.to_recarray(get_as_columns=True) else: raise # append new events to existing table try: csegs = cache_segments(segcache) except AttributeError: csegs = SegmentList() table.segments = csegs t2 = keep_in_segments(table, SegmentList([segment]), etg) add_triggers(t2, key, csegs) ntrigs += len(t2) vprint(".") vprint(" | %d events read\n" % ntrigs) # if asked to read triggers, but didn't actually read any, # create an empty table so that subsequent calls don't raise KeyErrors if query and key not in globalv.TRIGGERS: if TableClass is not None: tab = lsctables.New(TableClass, columns=columns).to_recarray( get_as_columns=True) else: tab = GWRecArray((0,), dtype=[(c, float) for c in columns]) tab.segments = SegmentList() add_triggers(tab, key, tab.segments) # work out time function if return_: return keep_in_segments(globalv.TRIGGERS[key], segments, etg) else: return