Exemple #1
0
    def test_read_write_gwf(self):
        table = self.create(100, ['time', 'blah', 'frequency'])
        columns = table.dtype.names
        tempdir = tempfile.mkdtemp()
        try:
            fp = tempfile.mktemp(suffix='.gwf', dir=tempdir)

            # check write
            table.write(fp, 'test_read_write_gwf')

            # check read gives back same table
            t2 = self.TABLE.read(fp, 'test_read_write_gwf', columns=columns)
            utils.assert_table_equal(table, t2, meta=False, almost_equal=True)

            # check selections works
            t3 = self.TABLE.read(fp, 'test_read_write_gwf',
                                 columns=columns, selection='frequency>500')
            utils.assert_table_equal(
                filter_table(t2, 'frequency>500'), t3)

        except ImportError as e:
            pytest.skip(str(e))
        finally:
            if os.path.isdir(tempdir):
                shutil.rmtree(tempdir)
Exemple #2
0
    def test_read_write_gwf(self):
        table = self.create(100, ['time', 'blah', 'frequency'])
        columns = table.dtype.names
        tempdir = tempfile.mkdtemp()
        try:
            fp = tempfile.mktemp(suffix='.gwf', dir=tempdir)

            # check write
            table.write(fp, 'test_read_write_gwf')

            # check read gives back same table
            t2 = self.TABLE.read(fp, 'test_read_write_gwf', columns=columns)
            utils.assert_table_equal(table, t2, meta=False, almost_equal=True)

            # check selections works
            t3 = self.TABLE.read(fp,
                                 'test_read_write_gwf',
                                 columns=columns,
                                 selection='frequency>500')
            utils.assert_table_equal(filter_table(t2, 'frequency>500'), t3)

        except ImportError as e:
            pytest.skip(str(e))
        finally:
            if os.path.isdir(tempdir):
                shutil.rmtree(tempdir)
Exemple #3
0
    def test_filter(self, table):
        # check simple filter
        lowf = table.filter('frequency < 100')
        assert isinstance(lowf, type(table))
        assert len(lowf) == 11
        assert isclose(lowf['frequency'].max(), 96.5309156606)

        # check filtering everything returns an empty table
        assert len(table.filter('snr>5', 'snr<=5')) == 0

        # check compounding works
        loud = table.filter('snr > 100')
        lowfloud = table.filter('frequency < 100', 'snr > 100')
        brute = type(table)(rows=[row for row in lowf if row in loud],
                            names=table.dtype.names)
        utils.assert_table_equal(brute, lowfloud)

        # check double-ended filter
        midf = table.filter('100 < frequency < 1000')
        utils.assert_table_equal(
            midf,
            table.filter('frequency > 100').filter('frequency < 1000'))

        # check unicode parsing (PY2)
        loud2 = table.filter(u'snr > 100')
Exemple #4
0
    def test_read_write_root(self, table):
        tempdir = tempfile.mkdtemp()
        try:
            fp = tempfile.mktemp(suffix='.root', dir=tempdir)

            # check write
            table.write(fp)

            def _read(*args, **kwargs):
                return type(table).read(fp, *args, **kwargs)

            # check read gives back same table
            utils.assert_table_equal(table, _read())

            # check that reading table from file with multiple trees without
            # specifying fails
            table.write(fp, treename='test')
            with pytest.raises(ValueError) as exc:
                _read()
            assert str(exc.value).startswith('Multiple trees found')

            # test selections work
            t2 = _read(treename='test', selection='frequency > 500')
            utils.assert_table_equal(
                t2, filter_table(table, 'frequency > 500'))

        finally:
            if os.path.isdir(tempdir):
                shutil.rmtree(tempdir)
Exemple #5
0
    def test_search(self):
        try:
            t2 = self.TABLE.search(uniqueID="8FHTgA8MEu", howmany=1)
        except (URLError, SSLError) as e:
            pytest.skip(str(e))

        import json
        with open(TEST_JSON_RESPONSE_FILE) as f:
            table = GravitySpyTable(json.load(f))

        utils.assert_table_equal(table, t2)
Exemple #6
0
    def test_search(self):
        try:
            t2 = self.TABLE.search(uniqueID="8FHTgA8MEu", howmany=1)
        except (URLError, SSLError) as e:
            pytest.skip(str(e))

        import json
        with open(TEST_JSON_RESPONSE_FILE) as f:
            table = GravitySpyTable(json.load(f))

        utils.assert_table_equal(table, t2)
Exemple #7
0
    def test_read_pycbc_live(self):
        import h5py
        table = self.create(
            100, names=['a', 'b', 'c', 'chisq', 'd', 'e', 'f',
                        'mass1', 'mass2', 'snr'])
        table.meta['ifo'] = 'X1'
        fp = os.path.join(tempfile.mkdtemp(), 'X1-Live-0-0.hdf')
        try:
            # write table in pycbc_live format (by hand)
            with h5py.File(fp, 'w') as h5f:
                group = h5f.create_group('X1')
                for col in table.columns:
                    group.create_dataset(data=table[col], name=col)

            # check that we can read
            t2 = self.TABLE.read(fp)
            utils.assert_table_equal(table, t2)

            # check keyword arguments result in same table
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live')
            utils.assert_table_equal(table, t2)
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1')
            utils.assert_table_equal(table, t2)

            # add another IFO, then assert that reading the table without
            # specifying the IFO fails
            with h5py.File(fp) as h5f:
                h5f.create_group('Z1')
            with pytest.raises(ValueError) as exc:
                self.TABLE.read(fp)
            assert str(exc.value).startswith(
                'PyCBC live HDF5 file contains dataset groups')

            # but check that we can still read the original
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1')
            utils.assert_table_equal(table, t2)

            # assert processed colums works
            t2 = self.TABLE.read(fp, ifo='X1', columns=['mchirp', 'new_snr'])
            mchirp = (table['mass1'] * table['mass2']) ** (3/5.) / (
                table['mass1'] + table['mass2']) ** (1/5.)
            utils.assert_array_equal(t2['mchirp'], mchirp)

            # test with selection
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live',
                                 ifo='X1', selection='snr>.5')
            utils.assert_table_equal(filter_table(table, 'snr>.5'), t2)
        finally:
            if os.path.isdir(os.path.dirname(fp)):
                shutil.rmtree(os.path.dirname(fp))
Exemple #8
0
    def test_read_write_ascii(self, table, fmtname):
        fmt = 'ascii.%s' % fmtname.lower()
        with tempfile.NamedTemporaryFile(suffix='.txt', mode='w') as f:
            print(f.name)
            # check write/read returns the same table
            table.write(f, format=fmt)
            f.seek(0)
            utils.assert_table_equal(table, self.TABLE.read(f, format=fmt),
                                     almost_equal=True)

        with tempfile.NamedTemporaryFile(suffix='.txt') as f:
            # assert reading blank file doesn't work with column name error
            with pytest.raises(InconsistentTableError) as exc:
                self.TABLE.read(f, format=fmt)
            assert str(exc.value) == ('No column names found in %s header'
                                      % fmtname)
Exemple #9
0
    def test_read_write_ascii(self, table, fmtname):
        fmt = 'ascii.%s' % fmtname.lower()
        with tempfile.NamedTemporaryFile(suffix='.txt', mode='w') as f:
            print(f.name)
            # check write/read returns the same table
            table.write(f, format=fmt)
            f.seek(0)
            utils.assert_table_equal(table, self.TABLE.read(f, format=fmt),
                                     almost_equal=True)

        with tempfile.NamedTemporaryFile(suffix='.txt') as f:
            # assert reading blank file doesn't work with column name error
            with pytest.raises(InconsistentTableError) as exc:
                self.TABLE.read(f, format=fmt)
            assert str(exc.value) == ('No column names found in %s header'
                                      % fmtname)
Exemple #10
0
    def test_read_write_ligolw_property_columns(self):
        table = self.create(100, ['peak', 'snr', 'central_freq'],
                            ['f8', 'f4', 'f4'])
        with tempfile.NamedTemporaryFile(suffix='.xml') as f:
            # write table
            table.write(f, format='ligolw', tablename='sngl_burst')

            # read raw ligolw and check gpsproperty was unpacked properly
            llw = io_ligolw.read_table(f, tablename='sngl_burst')
            for col in ('peak_time', 'peak_time_ns'):
                assert col in llw.columnnames
            with io_ligolw.patch_ligotimegps():
                utils.assert_array_equal(llw.get_peak(), table['peak'])

            # read table and assert gpsproperty was repacked properly
            t2 = self.TABLE.read(f, columns=table.colnames,
                                 use_numpy_dtypes=True)
            utils.assert_table_equal(t2, table, almost_equal=True)
Exemple #11
0
    def test_read_write_ligolw_property_columns(self):
        table = self.create(100, ['peak', 'snr', 'central_freq'],
                            ['f8', 'f4', 'f4'])
        with tempfile.NamedTemporaryFile(suffix='.xml') as f:
            # write table
            table.write(f, format='ligolw', tablename='sngl_burst')

            # read raw ligolw and check gpsproperty was unpacked properly
            llw = io_ligolw.read_table(f, tablename='sngl_burst')
            for col in ('peak_time', 'peak_time_ns'):
                assert col in llw.columnnames
            with io_ligolw.patch_ligotimegps():
                utils.assert_array_equal(llw.get_peak(), table['peak'])

            # read table and assert gpsproperty was repacked properly
            t2 = self.TABLE.read(f,
                                 columns=table.colnames,
                                 use_numpy_dtypes=True)
            utils.assert_table_equal(t2, table, almost_equal=True)
Exemple #12
0
    def test_fetch_hacr(self):
        table = self.create(100, names=HACR_COLUMNS)
        try:
            from pymysql import connect
        except ImportError:
            mockee = 'gwpy.table.io.hacr.connect'
        else:
            mockee = 'pymysql.connect'
        with mock.patch(mockee) as mock_connect:
            mock_connect.return_value = mock_hacr_connection(
                table, 123, 456)

            # test simple query returns the full table
            t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456)
            utils.assert_table_equal(table, t2)

            # test column selection works
            t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456,
                                  columns=['gps_start', 'snr'])
            utils.assert_table_equal(table['gps_start', 'snr'], t2)

            # test column selection works
            t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456,
                                  columns=['gps_start', 'snr'],
                                  selection='freq_central>500')
            utils.assert_table_equal(
                filter_table(table, 'freq_central>500')['gps_start', 'snr'],
                t2)
Exemple #13
0
    def test_fetch_hacr(self):
        table = self.create(100, names=HACR_COLUMNS)
        try:
            from pymysql import connect
        except ImportError:
            mockee = 'gwpy.table.io.hacr.connect'
        else:
            mockee = 'pymysql.connect'
        with mock.patch(mockee) as mock_connect:
            mock_connect.return_value = mock_hacr_connection(table, 123, 456)

            # test simple query returns the full table
            t2 = self.TABLE.fetch('hacr', 'X1:TEST-CHANNEL', 123, 456)
            utils.assert_table_equal(table, t2)

            # test column selection works
            t2 = self.TABLE.fetch('hacr',
                                  'X1:TEST-CHANNEL',
                                  123,
                                  456,
                                  columns=['gps_start', 'snr'])
            utils.assert_table_equal(table['gps_start', 'snr'], t2)

            # test column selection works
            t2 = self.TABLE.fetch('hacr',
                                  'X1:TEST-CHANNEL',
                                  123,
                                  456,
                                  columns=['gps_start', 'snr'],
                                  selection='freq_central>500')
            utils.assert_table_equal(
                filter_table(table, 'freq_central>500')['gps_start', 'snr'],
                t2)
Exemple #14
0
    def test_read_write_root(self, table):
        tempdir = tempfile.mkdtemp()
        try:
            fp = tempfile.mktemp(suffix='.root', dir=tempdir)

            # check write
            table.write(fp)

            def _read(*args, **kwargs):
                return type(table).read(fp, *args, **kwargs)

            # check read gives back same table
            utils.assert_table_equal(table, _read())

            # check that reading table from file with multiple trees without
            # specifying fails
            table.write(fp, treename='test')
            with pytest.raises(ValueError) as exc:
                _read()
            assert str(exc.value).startswith('Multiple trees found')

            # test selections work
            segs = SegmentList([Segment(100, 200), Segment(400, 500)])
            t2 = _read(treename='test',
                       selection=['200 < frequency < 500',
                                  ('time', filters.in_segmentlist, segs)])
            utils.assert_table_equal(
                t2, filter_table(table,
                                 'frequency > 200',
                                 'frequency < 500',
                                 ('time', filters.in_segmentlist, segs)),
            )

        finally:
            if os.path.isdir(tempdir):
                shutil.rmtree(tempdir)
Exemple #15
0
    def test_filter(self, table):
        # check simple filter
        lowf = table.filter('frequency < 100')
        assert isinstance(lowf, type(table))
        assert len(lowf) == 11
        assert isclose(lowf['frequency'].max(), 96.5309156606)

        # check filtering everything returns an empty table
        assert len(table.filter('snr>5', 'snr<=5')) == 0

        # check compounding works
        loud = table.filter('snr > 100')
        lowfloud = table.filter('frequency < 100', 'snr > 100')
        brute = type(table)(rows=[row for row in lowf if row in loud],
                            names=table.dtype.names)
        utils.assert_table_equal(brute, lowfloud)

        # check double-ended filter
        midf = table.filter('100 < frequency < 1000')
        utils.assert_table_equal(
            midf, table.filter('frequency > 100').filter('frequency < 1000'))

        # check unicode parsing (PY2)
        loud2 = table.filter(u'snr > 100')
Exemple #16
0
    def test_filter_in_segmentlist(self, table):
        print(table)
        # check filtering on segments works
        segs = SegmentList([Segment(100, 200), Segment(400, 500)])
        inseg = table.filter(('time', filters.in_segmentlist, segs))
        brute = type(table)(rows=[row for row in table if row['time'] in segs],
                            names=table.colnames)
        utils.assert_table_equal(inseg, brute)

        # check empty segmentlist is handled well
        utils.assert_table_equal(
            table.filter(('time', filters.in_segmentlist, SegmentList())),
            type(table)(names=table.colnames))

        # check inverse works
        notsegs = SegmentList([Segment(0, 1000)]) - segs
        utils.assert_table_equal(
            inseg, table.filter(('time', filters.not_in_segmentlist, notsegs)))
        utils.assert_table_equal(
            table,
            table.filter(('time', filters.not_in_segmentlist, SegmentList())))
Exemple #17
0
    def test_filter_in_segmentlist(self, table):
        print(table)
        # check filtering on segments works
        segs = SegmentList([Segment(100, 200), Segment(400, 500)])
        inseg = table.filter(('time', filters.in_segmentlist, segs))
        brute = type(table)(rows=[row for row in table if row['time'] in segs],
                            names=table.colnames)
        utils.assert_table_equal(inseg, brute)

        # check empty segmentlist is handled well
        utils.assert_table_equal(
            table.filter(('time', filters.in_segmentlist, SegmentList())),
            type(table)(names=table.colnames))

        # check inverse works
        notsegs = SegmentList([Segment(0, 1000)]) - segs
        utils.assert_table_equal(
            inseg, table.filter(('time', filters.not_in_segmentlist, notsegs)))
        utils.assert_table_equal(
            table,
            table.filter(('time', filters.not_in_segmentlist, SegmentList())))
Exemple #18
0
    def test_read_pycbc_live(self):
        import h5py
        table = self.create(100,
                            names=[
                                'a', 'b', 'c', 'chisq', 'd', 'e', 'f', 'mass1',
                                'mass2', 'snr'
                            ])
        loudest = (table['snr'] > 500).nonzero()[0]
        psd = FrequencySeries(random.randn(1000), df=1)
        fp = os.path.join(tempfile.mkdtemp(), 'X1-Live-0-0.hdf')
        try:
            # write table in pycbc_live format (by hand)
            with h5py.File(fp, 'w') as h5f:
                group = h5f.create_group('X1')
                for col in table.columns:
                    group.create_dataset(data=table[col], name=col)
                group.create_dataset('loudest', data=loudest)
                group.create_dataset('psd', data=psd.value)
                group['psd'].attrs['delta_f'] = psd.df.to('Hz').value

            # check that we can read
            t2 = self.TABLE.read(fp)
            utils.assert_table_equal(table, t2)
            # and check metadata was recorded correctly
            assert t2.meta['ifo'] == 'X1'

            # check keyword arguments result in same table
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live')
            utils.assert_table_equal(table, t2)
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1')

            # assert loudest works
            t2 = self.TABLE.read(fp, loudest=True)
            utils.assert_table_equal(table.filter('snr > 500'), t2)

            # check extended_metadata=True works (default)
            t2 = self.TABLE.read(fp, extended_metadata=True)
            utils.assert_table_equal(table, t2)
            utils.assert_array_equal(t2.meta['loudest'], loudest)
            utils.assert_quantity_sub_equal(
                t2.meta['psd'],
                psd,
                exclude=['name', 'channel', 'unit', 'epoch'])

            # check extended_metadata=False works
            t2 = self.TABLE.read(fp, extended_metadata=False)
            assert t2.meta == {'ifo': 'X1'}

            # double-check that loudest and extended_metadata=False work
            t2 = self.TABLE.read(fp, loudest=True, extended_metadata=False)
            utils.assert_table_equal(table.filter('snr > 500'), t2)
            assert t2.meta == {'ifo': 'X1'}

            # add another IFO, then assert that reading the table without
            # specifying the IFO fails
            with h5py.File(fp) as h5f:
                h5f.create_group('Z1')
            with pytest.raises(ValueError) as exc:
                self.TABLE.read(fp)
            assert str(exc.value).startswith(
                'PyCBC live HDF5 file contains dataset groups')

            # but check that we can still read the original
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1')
            utils.assert_table_equal(table, t2)

            # assert processed colums works
            t2 = self.TABLE.read(fp, ifo='X1', columns=['mchirp', 'new_snr'])
            mchirp = (table['mass1'] * table['mass2'])**(3 / 5.) / (
                table['mass1'] + table['mass2'])**(1 / 5.)
            utils.assert_array_equal(t2['mchirp'], mchirp)

            # test with selection
            t2 = self.TABLE.read(fp,
                                 format='hdf5.pycbc_live',
                                 ifo='X1',
                                 selection='snr>.5')
            utils.assert_table_equal(filter_table(table, 'snr>.5'), t2)
        finally:
            if os.path.isdir(os.path.dirname(fp)):
                shutil.rmtree(os.path.dirname(fp))
Exemple #19
0
    def test_read_write_ligolw(self, ext):
        table = self.create(
            100, ['peak_time', 'peak_time_ns', 'snr', 'central_freq'],
            ['i4', 'i4', 'f4', 'f4'])
        with tempfile.NamedTemporaryFile(suffix='.{}'.format(ext),
                                         delete=False) as f:
            def _read(*args, **kwargs):
                kwargs.setdefault('format', 'ligolw')
                kwargs.setdefault('tablename', 'sngl_burst')
                return self.TABLE.read(f, *args, **kwargs)

            def _write(*args, **kwargs):
                kwargs.setdefault('format', 'ligolw')
                kwargs.setdefault('tablename', 'sngl_burst')
                return table.write(f.name, *args, **kwargs)

            # check simple write (using open file descriptor, not file path)
            table.write(f, format='ligolw', tablename='sngl_burst')

            # check simple read
            t2 = _read()
            utils.assert_table_equal(table, t2, almost_equal=True)
            assert t2.meta.get('tablename', None) == 'sngl_burst'

            # check auto-discovery of 'time' columns works
            from glue.ligolw.lsctables import LIGOTimeGPS
            with pytest.warns(DeprecationWarning):
                t3 = _read(columns=['time'])
            assert 'time' in t3.columns
            assert isinstance(t3[0]['time'], LIGOTimeGPS)
            utils.assert_array_equal(
                t3['time'], table['peak_time'] + table['peak_time_ns'] * 1e-9)

            # check numpy type casting works
            with pytest.warns(DeprecationWarning):
                t3 = _read(columns=['time'], use_numpy_dtypes=True)
            assert t3['time'].dtype == dtype('float64')
            utils.assert_array_equal(
                t3['time'], table['peak_time'] + table['peak_time_ns'] * 1e-9)

            # check reading multiple tables works
            try:
                t3 = self.TABLE.read([f.name, f.name], format='ligolw',
                                     tablename='sngl_burst')
            except NameError as e:
                if not PY2:  # ligolw not patched for python3 just yet
                    pytest.xfail(str(e))
                raise
            utils.assert_table_equal(vstack((t2, t2)), t3)

            # check writing to existing file raises IOError
            with pytest.raises(IOError) as exc:
                _write()
            assert str(exc.value) == 'File exists: %s' % f.name

            # check overwrite=True, append=False rewrites table
            try:
                _write(overwrite=True)
            except TypeError as e:
                # ligolw is not python3-compatbile, so skip if it fails
                if not PY2 and (
                        str(e) == 'write() argument must be str, not bytes'):
                    pytest.xfail(str(e))
                raise
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # check append=True duplicates table
            _write(append=True)
            t3 = _read()
            utils.assert_table_equal(vstack((t2, t2)), t3)

            # check overwrite=True, append=True rewrites table
            _write(append=True, overwrite=True)
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # write another table and check we can still get back the first
            insp = self.create(10, ['end_time', 'snr', 'chisq_dof'])
            insp.write(f.name, format='ligolw', tablename='sngl_inspiral',
                       append=True)
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # write another table with append=False and check the first table
            # is gone
            insp.write(f.name, format='ligolw', tablename='sngl_inspiral',
                       append=False, overwrite=True)
            with pytest.raises(ValueError) as exc:
                _read()
            assert str(exc.value) == ('document must contain exactly '
                                      'one sngl_burst table')

            # -- deprecations
            # check deprecations print warnings where expected

            with pytest.warns(DeprecationWarning):
                table.write(f.name, format='ligolw.sngl_burst', overwrite=True)
            with pytest.warns(DeprecationWarning):
                _read(format='ligolw.sngl_burst')
            with pytest.warns(DeprecationWarning):
                _read(get_as_columns=True)
            with pytest.warns(DeprecationWarning):
                _read(on_attributeerror='anything')
Exemple #20
0
    def test_read_write_ligolw(self, ext):
        table = self.create(
            100, ['peak_time', 'peak_time_ns', 'snr', 'central_freq'],
            ['i4', 'i4', 'f4', 'f4'])
        with tempfile.NamedTemporaryFile(suffix='.{}'.format(ext),
                                         delete=False) as f:

            def _read(*args, **kwargs):
                kwargs.setdefault('format', 'ligolw')
                kwargs.setdefault('tablename', 'sngl_burst')
                return self.TABLE.read(f, *args, **kwargs)

            def _write(*args, **kwargs):
                kwargs.setdefault('format', 'ligolw')
                kwargs.setdefault('tablename', 'sngl_burst')
                return table.write(f.name, *args, **kwargs)

            # check simple write (using open file descriptor, not file path)
            table.write(f, format='ligolw', tablename='sngl_burst')

            # check simple read
            t2 = _read()
            utils.assert_table_equal(table, t2, almost_equal=True)
            assert t2.meta.get('tablename', None) == 'sngl_burst'

            # check auto-discovery of 'time' columns works
            from glue.ligolw.lsctables import LIGOTimeGPS
            with pytest.warns(DeprecationWarning):
                t3 = _read(columns=['time'])
            assert 'time' in t3.columns
            assert isinstance(t3[0]['time'], LIGOTimeGPS)
            utils.assert_array_equal(
                t3['time'], table['peak_time'] + table['peak_time_ns'] * 1e-9)

            # check numpy type casting works
            with pytest.warns(DeprecationWarning):
                t3 = _read(columns=['time'], use_numpy_dtypes=True)
            assert t3['time'].dtype == dtype('float64')
            utils.assert_array_equal(
                t3['time'], table['peak_time'] + table['peak_time_ns'] * 1e-9)

            # check reading multiple tables works
            try:
                t3 = self.TABLE.read([f.name, f.name],
                                     format='ligolw',
                                     tablename='sngl_burst')
            except NameError as e:
                if not PY2:  # ligolw not patched for python3 just yet
                    pytest.xfail(str(e))
                raise
            utils.assert_table_equal(vstack((t2, t2)), t3)

            # check writing to existing file raises IOError
            with pytest.raises(IOError) as exc:
                _write()
            assert str(exc.value) == 'File exists: %s' % f.name

            # check overwrite=True, append=False rewrites table
            try:
                _write(overwrite=True)
            except TypeError as e:
                # ligolw is not python3-compatbile, so skip if it fails
                if not PY2 and (str(e)
                                == 'write() argument must be str, not bytes'):
                    pytest.xfail(str(e))
                raise
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # check append=True duplicates table
            _write(append=True)
            t3 = _read()
            utils.assert_table_equal(vstack((t2, t2)), t3)

            # check overwrite=True, append=True rewrites table
            _write(append=True, overwrite=True)
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # write another table and check we can still get back the first
            insp = self.create(10, ['end_time', 'snr', 'chisq_dof'])
            insp.write(f.name,
                       format='ligolw',
                       tablename='sngl_inspiral',
                       append=True)
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # write another table with append=False and check the first table
            # is gone
            insp.write(f.name,
                       format='ligolw',
                       tablename='sngl_inspiral',
                       append=False,
                       overwrite=True)
            with pytest.raises(ValueError) as exc:
                _read()
            assert str(exc.value) == ('document must contain exactly '
                                      'one sngl_burst table')

            # -- deprecations
            # check deprecations print warnings where expected

            with pytest.warns(DeprecationWarning):
                table.write(f.name, format='ligolw.sngl_burst', overwrite=True)
            with pytest.warns(DeprecationWarning):
                _read(format='ligolw.sngl_burst')
            with pytest.warns(DeprecationWarning):
                _read(get_as_columns=True)
            with pytest.warns(DeprecationWarning):
                _read(on_attributeerror='anything')
Exemple #21
0
    def test_read_pycbc_live(self):
        import h5py
        table = self.create(
            100, names=['a', 'b', 'c', 'chisq', 'd', 'e', 'f',
                        'mass1', 'mass2', 'snr'])
        loudest = (table['snr'] > 500).nonzero()[0]
        psd = FrequencySeries(random.randn(1000), df=1)
        fp = os.path.join(tempfile.mkdtemp(), 'X1-Live-0-0.hdf')
        try:
            # write table in pycbc_live format (by hand)
            with h5py.File(fp, 'w') as h5f:
                group = h5f.create_group('X1')
                for col in table.columns:
                    group.create_dataset(data=table[col], name=col)
                group.create_dataset('loudest', data=loudest)
                group.create_dataset('psd', data=psd.value)
                group['psd'].attrs['delta_f'] = psd.df.to('Hz').value

            # check that we can read
            t2 = self.TABLE.read(fp)
            utils.assert_table_equal(table, t2)
            # and check metadata was recorded correctly
            assert t2.meta['ifo'] == 'X1'

            # check keyword arguments result in same table
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live')
            utils.assert_table_equal(table, t2)
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1')

            # assert loudest works
            t2 = self.TABLE.read(fp, loudest=True)
            utils.assert_table_equal(table.filter('snr > 500'), t2)

            # check extended_metadata=True works (default)
            t2 = self.TABLE.read(fp, extended_metadata=True)
            utils.assert_table_equal(table, t2)
            utils.assert_array_equal(t2.meta['loudest'], loudest)
            utils.assert_quantity_sub_equal(
                t2.meta['psd'], psd,
                exclude=['name', 'channel', 'unit', 'epoch'])

            # check extended_metadata=False works
            t2 = self.TABLE.read(fp, extended_metadata=False)
            assert t2.meta == {'ifo': 'X1'}

            # double-check that loudest and extended_metadata=False work
            t2 = self.TABLE.read(fp, loudest=True, extended_metadata=False)
            utils.assert_table_equal(table.filter('snr > 500'), t2)
            assert t2.meta == {'ifo': 'X1'}

            # add another IFO, then assert that reading the table without
            # specifying the IFO fails
            with h5py.File(fp) as h5f:
                h5f.create_group('Z1')
            with pytest.raises(ValueError) as exc:
                self.TABLE.read(fp)
            assert str(exc.value).startswith(
                'PyCBC live HDF5 file contains dataset groups')

            # but check that we can still read the original
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live', ifo='X1')
            utils.assert_table_equal(table, t2)

            # assert processed colums works
            t2 = self.TABLE.read(fp, ifo='X1', columns=['mchirp', 'new_snr'])
            mchirp = (table['mass1'] * table['mass2']) ** (3/5.) / (
                table['mass1'] + table['mass2']) ** (1/5.)
            utils.assert_array_equal(t2['mchirp'], mchirp)

            # test with selection
            t2 = self.TABLE.read(fp, format='hdf5.pycbc_live',
                                 ifo='X1', selection='snr>.5')
            utils.assert_table_equal(filter_table(table, 'snr>.5'), t2)
        finally:
            if os.path.isdir(os.path.dirname(fp)):
                shutil.rmtree(os.path.dirname(fp))
Exemple #22
0
    def test_read_write_ligolw(self, ext):
        table = self.create(
            100, ['peak_time', 'peak_time_ns', 'snr', 'central_freq'],
            ['i4', 'i4', 'f4', 'f4'])
        with tempfile.NamedTemporaryFile(suffix=ext) as f:
            table.write(f, format='ligolw.sngl_burst')

            def _read(*args, **kwargs):
                kwargs.setdefault('format', 'ligolw.sngl_burst')
                return self.TABLE.read(f, *args, **kwargs)

            # check simple read
            t2 = _read()
            utils.assert_table_equal(table, t2, almost_equal=True)

            # check read with get_as_columns
            t3 = _read(get_as_columns=True, on_attributeerror='ignore')
            assert 'peak' in t3.columns
            utils.assert_array_equal(
                t3['peak'], table['peak_time'] + table['peak_time_ns'] * 1e-9)

            # check reading multiple tables works
            try:
                t3 = self.TABLE.read([f.name, f.name],
                                     format='ligolw.sngl_burst')
            except NameError as e:
                if not PY2:  # ligolw not patched for python3 just yet
                    pytest.xfail(str(e))
                raise
            utils.assert_table_equal(vstack((t2, t2)), t3)

            # check writing to existing file raises IOError
            with pytest.raises(IOError) as exc:
                table.write(f.name, format='ligolw.sngl_burst')
            assert str(exc.value) == 'File exists: %s' % f.name

            # check overwrite=True, append=False rewrites table
            try:
                table.write(f.name, format='ligolw.sngl_burst', overwrite=True)
            except TypeError as e:
                # ligolw is not python3-compatbile, so skip if it fails
                if not PY2 and (
                        str(e) == 'write() argument must be str, not bytes'):
                    pytest.xfail(str(e))
                raise
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # check append=True duplicates table
            table.write(f.name, format='ligolw.sngl_burst', append=True)
            t3 = _read()
            utils.assert_table_equal(vstack((t2, t2)), t3)

            # check overwrite=True, append=True rewrites table
            table.write(f.name, format='ligolw.sngl_burst',
                        append=True, overwrite=True)
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # write another table and check we can still get back the first
            insp = self.create(10, ['end_time', 'snr', 'chisq_dof'])
            insp.write(f.name, format='ligolw.sngl_inspiral', append=True)
            t3 = _read()
            utils.assert_table_equal(t2, t3)

            # write another table with append=False and check the first table
            # is gone
            insp.write(f.name, format='ligolw.sngl_inspiral', append=False,
                       overwrite=True)
            with pytest.raises(ValueError) as exc:
                _read()
            assert str(exc.value) == ('document must contain exactly '
                                      'one sngl_burst table')