def test_read_write_gwf(self): table = self.create(100, ['time', 'blah', 'frequency']) columns = table.dtype.names tempdir = tempfile.mkdtemp() try: fp = tempfile.mktemp(suffix='.gwf', dir=tempdir) # check write table.write(fp, 'test_read_write_gwf') # check read gives back same table t2 = self.TABLE.read(fp, 'test_read_write_gwf', columns=columns) utils.assert_table_equal(table, t2, meta=False, almost_equal=True) # check selections works t3 = self.TABLE.read(fp, 'test_read_write_gwf', columns=columns, selection='frequency>500') utils.assert_table_equal( filter_table(t2, 'frequency>500'), t3) except ImportError as e: pytest.skip(str(e)) finally: if os.path.isdir(tempdir): shutil.rmtree(tempdir)
def test_fetch_hacr(self): table = self.create(100, names=HACR_COLUMNS) try: from pymysql import connect except ImportError: mockee = 'gwpy.table.io.hacr.connect' else: mockee = 'pymysql.connect' with mock.patch(mockee) as mock_connect: mock_connect.return_value = mock_hacr_connection(table, 123, 456) # test simple query returns the full table t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456) utils.assert_table_equal(table, t2) # test column selection works t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456, columns=['gps_start', 'snr']) utils.assert_table_equal(table['gps_start', 'snr'], t2) # test column selection works t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456, columns=['gps_start', 'snr'], selection='freq_central>500') utils.assert_table_equal( filter_table(table, 'freq_central>500')['gps_start', 'snr'], t2)
def test_read_write_gwf(self): table = self.create(100, ['time', 'blah', 'frequency']) columns = table.dtype.names tempdir = tempfile.mkdtemp() try: fp = tempfile.mktemp(suffix='.gwf', dir=tempdir) # check write table.write(fp, 'test_read_write_gwf') # check read gives back same table t2 = self.TABLE.read(fp, 'test_read_write_gwf', columns=columns) utils.assert_table_equal(table, t2, meta=False, almost_equal=True) # check selections works t3 = self.TABLE.read(fp, 'test_read_write_gwf', columns=columns, selection='frequency>500') utils.assert_table_equal(filter_table(t2, 'frequency>500'), t3) except ImportError as e: pytest.skip(str(e)) finally: if os.path.isdir(tempdir): shutil.rmtree(tempdir)
def test_fetch_hacr(self): table = self.create(100, names=HACR_COLUMNS) try: from pymysql import connect except ImportError: mockee = 'gwpy.table.io.hacr.connect' else: mockee = 'pymysql.connect' with mock.patch(mockee) as mock_connect: mock_connect.return_value = mock_hacr_connection( table, 123, 456) # test simple query returns the full table t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456) utils.assert_table_equal(table, t2) # test column selection works t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456, columns=['gps_start', 'snr']) utils.assert_table_equal(table['gps_start', 'snr'], t2) # test column selection works t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456, columns=['gps_start', 'snr'], selection='freq_central>500') utils.assert_table_equal( filter_table(table, 'freq_central>500')['gps_start', 'snr'], t2)
def test_read_write_root(self, table): tempdir = tempfile.mkdtemp() try: fp = tempfile.mktemp(suffix='.root', dir=tempdir) # check write table.write(fp) def _read(*args, **kwargs): return type(table).read(fp, *args, **kwargs) # check read gives back same table utils.assert_table_equal(table, _read()) # check that reading table from file with multiple trees without # specifying fails table.write(fp, treename='test') with pytest.raises(ValueError) as exc: _read() assert str(exc.value).startswith('Multiple trees found') # test selections work t2 = _read(treename='test', selection='frequency > 500') utils.assert_table_equal( t2, filter_table(table, 'frequency > 500')) finally: if os.path.isdir(tempdir): shutil.rmtree(tempdir)
def fetchall(): if cursor._query.get_real_name() == 'job': return [(1, start, stop)] if cursor._query.get_real_name() == 'mhacr': columns = list(map( str, list(cursor._query.get_sublists())[0].get_identifiers())) selections = list(map( str, list(cursor._query.get_sublists())[2].get_sublists())) return filter_table(table, selections[3:])[columns]
def test_read_pycbc_live(self): import h5py table = self.create( 100, names=['a', 'b', 'c', 'chisq', 'd', 'e', 'f', 'mass1', 'mass2', 'snr']) table.meta['ifo'] = 'X1' fp = os.path.join(tempfile.mkdtemp(), 'X1-Live-0-0.hdf') try: # write table in pycbc_live format (by hand) with h5py.File(fp, 'w') as h5f: group = h5f.create_group('X1') for col in table.columns: group.create_dataset(data=table[col], name=col) # check that we can read t2 = self.TABLE.read(fp) utils.assert_table_equal(table, t2) # check keyword arguments result in same table t2 = self.TABLE.read(fp, format='hdf5.pycbc_live') utils.assert_table_equal(table, t2) t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1') utils.assert_table_equal(table, t2) # add another IFO, then assert that reading the table without # specifying the IFO fails with h5py.File(fp) as h5f: h5f.create_group('Z1') with pytest.raises(ValueError) as exc: self.TABLE.read(fp) assert str(exc.value).startswith( 'PyCBC live HDF5 file contains dataset groups') # but check that we can still read the original t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1') utils.assert_table_equal(table, t2) # assert processed colums works t2 = self.TABLE.read(fp, ifo='X1', columns=['mchirp', 'new_snr']) mchirp = (table['mass1'] * table['mass2']) ** (3/5.) / ( table['mass1'] + table['mass2']) ** (1/5.) utils.assert_array_equal(t2['mchirp'], mchirp) # test with selection t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1', selection='snr>.5') utils.assert_table_equal(filter_table(table, 'snr>.5'), t2) finally: if os.path.isdir(os.path.dirname(fp)): shutil.rmtree(os.path.dirname(fp))
def test_read_write_root(self, table): tempdir = tempfile.mkdtemp() try: fp = tempfile.mktemp(suffix='.root', dir=tempdir) # check write table.write(fp) def _read(*args, **kwargs): return type(table).read(fp, *args, **kwargs) # check read gives back same table utils.assert_table_equal(table, _read()) # check that reading table from file with multiple trees without # specifying fails table.write(fp, treename='test') with pytest.raises(ValueError) as exc: _read() assert str(exc.value).startswith('Multiple trees found') # test selections work segs = SegmentList([Segment(100, 200), Segment(400, 500)]) t2 = _read(treename='test', selection=['200 < frequency < 500', ('time', filters.in_segmentlist, segs)]) utils.assert_table_equal( t2, filter_table(table, 'frequency > 200', 'frequency < 500', ('time', filters.in_segmentlist, segs)), ) finally: if os.path.isdir(tempdir): shutil.rmtree(tempdir)
def test_read_pycbc_live(self): import h5py table = self.create(100, names=[ 'a', 'b', 'c', 'chisq', 'd', 'e', 'f', 'mass1', 'mass2', 'snr' ]) loudest = (table['snr'] > 500).nonzero()[0] psd = FrequencySeries(random.randn(1000), df=1) fp = os.path.join(tempfile.mkdtemp(), 'X1-Live-0-0.hdf') try: # write table in pycbc_live format (by hand) with h5py.File(fp, 'w') as h5f: group = h5f.create_group('X1') for col in table.columns: group.create_dataset(data=table[col], name=col) group.create_dataset('loudest', data=loudest) group.create_dataset('psd', data=psd.value) group['psd'].attrs['delta_f'] = psd.df.to('Hz').value # check that we can read t2 = self.TABLE.read(fp) utils.assert_table_equal(table, t2) # and check metadata was recorded correctly assert t2.meta['ifo'] == 'X1' # check keyword arguments result in same table t2 = self.TABLE.read(fp, format='hdf5.pycbc_live') utils.assert_table_equal(table, t2) t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1') # assert loudest works t2 = self.TABLE.read(fp, loudest=True) utils.assert_table_equal(table.filter('snr > 500'), t2) # check extended_metadata=True works (default) t2 = self.TABLE.read(fp, extended_metadata=True) utils.assert_table_equal(table, t2) utils.assert_array_equal(t2.meta['loudest'], loudest) utils.assert_quantity_sub_equal( t2.meta['psd'], psd, exclude=['name', 'channel', 'unit', 'epoch']) # check extended_metadata=False works t2 = self.TABLE.read(fp, extended_metadata=False) assert t2.meta == {'ifo': 'X1'} # double-check that loudest and extended_metadata=False work t2 = self.TABLE.read(fp, loudest=True, extended_metadata=False) utils.assert_table_equal(table.filter('snr > 500'), t2) assert t2.meta == {'ifo': 'X1'} # add another IFO, then assert that reading the table without # specifying the IFO fails with h5py.File(fp) as h5f: h5f.create_group('Z1') with pytest.raises(ValueError) as exc: self.TABLE.read(fp) assert str(exc.value).startswith( 'PyCBC live HDF5 file contains dataset groups') # but check that we can still read the original t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1') utils.assert_table_equal(table, t2) # assert processed colums works t2 = self.TABLE.read(fp, ifo='X1', columns=['mchirp', 'new_snr']) mchirp = (table['mass1'] * table['mass2'])**(3 / 5.) / ( table['mass1'] + table['mass2'])**(1 / 5.) utils.assert_array_equal(t2['mchirp'], mchirp) # test with selection t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1', selection='snr>.5') utils.assert_table_equal(filter_table(table, 'snr>.5'), t2) finally: if os.path.isdir(os.path.dirname(fp)): shutil.rmtree(os.path.dirname(fp))
def test_read_pycbc_live(self): import h5py table = self.create( 100, names=['a', 'b', 'c', 'chisq', 'd', 'e', 'f', 'mass1', 'mass2', 'snr']) loudest = (table['snr'] > 500).nonzero()[0] psd = FrequencySeries(random.randn(1000), df=1) fp = os.path.join(tempfile.mkdtemp(), 'X1-Live-0-0.hdf') try: # write table in pycbc_live format (by hand) with h5py.File(fp, 'w') as h5f: group = h5f.create_group('X1') for col in table.columns: group.create_dataset(data=table[col], name=col) group.create_dataset('loudest', data=loudest) group.create_dataset('psd', data=psd.value) group['psd'].attrs['delta_f'] = psd.df.to('Hz').value # check that we can read t2 = self.TABLE.read(fp) utils.assert_table_equal(table, t2) # and check metadata was recorded correctly assert t2.meta['ifo'] == 'X1' # check keyword arguments result in same table t2 = self.TABLE.read(fp, format='hdf5.pycbc_live') utils.assert_table_equal(table, t2) t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1') # assert loudest works t2 = self.TABLE.read(fp, loudest=True) utils.assert_table_equal(table.filter('snr > 500'), t2) # check extended_metadata=True works (default) t2 = self.TABLE.read(fp, extended_metadata=True) utils.assert_table_equal(table, t2) utils.assert_array_equal(t2.meta['loudest'], loudest) utils.assert_quantity_sub_equal( t2.meta['psd'], psd, exclude=['name', 'channel', 'unit', 'epoch']) # check extended_metadata=False works t2 = self.TABLE.read(fp, extended_metadata=False) assert t2.meta == {'ifo': 'X1'} # double-check that loudest and extended_metadata=False work t2 = self.TABLE.read(fp, loudest=True, extended_metadata=False) utils.assert_table_equal(table.filter('snr > 500'), t2) assert t2.meta == {'ifo': 'X1'} # add another IFO, then assert that reading the table without # specifying the IFO fails with h5py.File(fp) as h5f: h5f.create_group('Z1') with pytest.raises(ValueError) as exc: self.TABLE.read(fp) assert str(exc.value).startswith( 'PyCBC live HDF5 file contains dataset groups') # but check that we can still read the original t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1') utils.assert_table_equal(table, t2) # assert processed colums works t2 = self.TABLE.read(fp, ifo='X1', columns=['mchirp', 'new_snr']) mchirp = (table['mass1'] * table['mass2']) ** (3/5.) / ( table['mass1'] + table['mass2']) ** (1/5.) utils.assert_array_equal(t2['mchirp'], mchirp) # test with selection t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1', selection='snr>.5') utils.assert_table_equal(filter_table(table, 'snr>.5'), t2) finally: if os.path.isdir(os.path.dirname(fp)): shutil.rmtree(os.path.dirname(fp))