def test_get_waveform(buffer_mb):
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
    handler = logging.FileHandler('%s/a_%d.txt' % (tempdir, int(buffer_mb)),
                                  mode='w')
    handler.setFormatter(formatter)
    logger = logging.getLogger('test')
    logger.setLevel(logging.DEBUG)
    logger.addHandler(handler)

    fds = FederatedASDFDataSet(asdf_file_list,
                               logger=logger,
                               single_item_read_limit_in_mb=buffer_mb)

    rows = np.array(
        fds.get_stations('1900-01-01T00:00:00', '2100-01-01T00:00:00'))

    for n, s, l, c in rows[:, 0:4]:
        wc = fds.get_waveform_count(n, s, l, c, '1900-01-01T00:00:00',
                                    '2100-01-01T00:00:00')
        stream = fds.get_waveforms(n,
                                   s,
                                   l,
                                   c,
                                   '1900-01-01T00:00:00',
                                   '2100-01-01T00:00:00',
                                   trace_count_threshold=1e4)

        assert wc == len(stream)
        logger.info('%s.%s: %d traces fetched' % (n, s, len(stream)))
Beispiel #2
0
def test_db_integrity():
    fds = FederatedASDFDataSet(asdf_file_list)

    # get number of waveforms from the db directly
    conn = sqlite3.connect(fds.fds.db_fn)
    query = 'select count(*) from wdb;'
    db_waveform_count = conn.execute(query).fetchall()[0][0]

    # fetch waveform counts for each unique combination of net, sta, loc, cha
    waveform_count = 0
    rows = fds.get_stations('1900-01-01T00:00:00', '2100-01-01T00:00:00')
    for row in rows:
        n, s, l, c, _, _ = row

        waveform_count += fds.get_waveform_count(n, s, l, c, '1900:01:01T00:00:00', '2100:01:01T00:00:00')
    # end for

    assert waveform_count == db_waveform_count