Пример #1
0
def _get_waveclient_from_path(path):
    """ get a waveform client from a path. """
    path = Path(path)
    if path.is_dir():
        return get_waveform_client(obsplus.WaveBank(path))
    else:
        return get_waveform_client(obspy.read(str(path)))
Пример #2
0
def archive_to_sds(
    bank: Union[Path, str, "obsplus.WaveBank"],
    sds_path: Union[Path, str],
    starttime: Optional[UTCDateTime] = None,
    endtime: Optional[UTCDateTime] = None,
    overlap: float = 30,
    type_code: str = "D",
    stream_processor: Optional[callable] = None,
):
    """
    Create a seiscomp data structure archive from a waveform source.

    Parameters
    ----------
    bank
        A wavebank or path to such.
    sds_path
        The path for the new sds archive to be created.
    starttime
        If not None, the starttime to convert data from bank.
    endtime
        If not None, the endtime to convert data from bank.
    overlap
        The overlap to use for each file.
    type_code
        The str indicating the datatype.
    stream_processor
        A callable that will take a single stream as input and return a
        a single stream. May return and empty stream to skip a stream.

    Notes
    -----
    see: https://www.seiscomp3.org/doc/applications/slarchive/SDS.html
    """
    sds_path = Path(sds_path)
    # create a fetcher object for yielding continuous waveforms
    bank = obsplus.WaveBank(bank)
    bank.update_index()
    # get starttime/endtimes
    index = bank.read_index()
    ts1 = index.starttime.min() if not starttime else starttime
    t1 = _nearest_day(ts1)
    t2 = to_utc(index.endtime.max() if not endtime else endtime)
    nslcs = get_seed_id_series(index).unique()
    # iterate over nslc and get data for selected channel
    for nslc in nslcs:
        nslc_dict = {n: v for n, v in zip(NSLC, nslc.split("."))}
        # yield waveforms in desired chunks
        ykwargs = dict(starttime=t1,
                       endtime=t2,
                       overlap=overlap,
                       duration=86400)
        ykwargs.update(nslc_dict)
        for st in bank.yield_waveforms(**ykwargs):
            if stream_processor:  # apply stream processor if needed.
                st = stream_processor(st)
            if st:
                path = _get_sds_filename(st, sds_path, type_code, **nslc_dict)
                st.write(str(path), "mseed")
Пример #3
0
def transcended_bank(tmpdir_factory):
    """ Transcend a bank server. """
    tmpdir = Path(tmpdir_factory.mktemp("bob"))
    ds = obsplus.load_dataset("bingham_test").copy_to(tmpdir)
    bank = obsplus.WaveBank(ds.waveform_path)
    proxy = transcend(bank, "wavebank")
    yield proxy
    proxy.close()
Пример #4
0
    def client_chain(self, kem_fetcher, ta_archive):
        wclients = [kem_fetcher.waveform_client, obsplus.WaveBank(ta_archive)]
        kwargs = dict(
            event_clients=[kem_fetcher.event_client.get_events()],
            station_clients=kem_fetcher.station_client.get_stations(),
            waveform_clients=wclients,
        )

        return ChainClient(**kwargs)
Пример #5
0
 def test_init_with_banks(self, bingham_dataset):
     """ Ensure the fetcher can be init'ed with all bank inputs. """
     wbank = obsplus.WaveBank(bingham_dataset.waveform_path)
     ebank = obsplus.EventBank(bingham_dataset.event_path)
     wbank.update_index(), ebank.update_index()
     sbank = bingham_dataset.station_client
     fetcher = Fetcher(waveforms=wbank, events=ebank, stations=sbank)
     edf = fetcher.event_df
     sdf = fetcher.station_df
     for df in [edf, sdf]:
         assert isinstance(df, pd.DataFrame)
         assert not df.empty
Пример #6
0
 def test_init_with_banks(self, bingham_dataset):
     """Ensure the fetcher can be init'ed with all bank inputs."""
     wbank = obsplus.WaveBank(bingham_dataset.waveform_path).update_index()
     ebank = obsplus.EventBank(bingham_dataset.event_path).update_index()
     sbank = bingham_dataset.station_client
     # ignore warnings (endtimes of inv are out of range)
     with suppress_warnings():
         fetcher = Fetcher(waveforms=wbank, events=ebank, stations=sbank)
     edf = fetcher.event_df
     sdf = fetcher.station_df
     for df in [edf, sdf]:
         assert isinstance(df, pd.DataFrame)
         assert not df.empty
Пример #7
0
 def het_bank(self, class_tmp_dir):
     """ create a directory that has multiple file types in it. """
     new_dir = join(class_tmp_dir, "temp1")
     ardir = ArchiveDirectory(
         new_dir, self.start, self.end, self.sampling_rate, seed_ids=self.seed_ids
     )
     ardir.create_directory()
     # add non-mseed files
     for format, seed_id in self.format_key.items():
         st = ardir.create_stream(self.start, self.end, seed_ids=[seed_id])
         path = join(new_dir, format)
         st.write(path + ".mseed", format)
     return obsplus.WaveBank(ardir.path)
Пример #8
0
 def bank49(self, tmpdir):
     """ setup a WaveBank to test issue #49. """
     path = Path(tmpdir)
     # create two traces with a slight gap between the two
     st1 = obspy.read()
     st2 = obspy.read()
     for tr1, tr2 in zip(st1, st2):
         tr1.stats.starttime = tr1.stats.endtime + 10
     # save files to directory, create bank and update
     st1.write(str(path / "st1.mseed"), "mseed")
     st2.write(str(path / "st2.mseed"), "mseed")
     bank = obsplus.WaveBank(path)
     bank.update_index()
     return bank
Пример #9
0
 def fetcher_one_event(self, bingham_dataset, tmp_path):
     """Make a fetcher with only one event."""
     fetcher = bingham_dataset.get_fetcher()
     inv = bingham_dataset.station_client.get_stations()
     # get stream and event
     kwargs = dict(time_before=1, time_after=1)
     for eid, st in fetcher.yield_event_waveforms(**kwargs):
         break
     eve = fetcher.event_client.get_events(eventid=eid)
     # create a new bank and return new fetcher
     wbank_path = tmp_path / "waveforms"
     wbank_path.mkdir(exist_ok=True, parents=True)
     wbank = obsplus.WaveBank(wbank_path)
     wbank.put_waveforms(st, update_index=True)
     wbank.read_index()  # need to cache index
     return Fetcher(events=eve, stations=inv, waveforms=wbank)
Пример #10
0
 def sds_wavebank(self, converted_archive):
     """ Create a new WaveBank on the converted archive. """
     wb = obsplus.WaveBank(converted_archive)
     wb.update_index()
     return wb
Пример #11
0
 def test_bank_can_init_bank(self, default_wbank):
     """ WaveBank should be able to take a Wavebank as an input arg. """
     bank = obsplus.WaveBank(default_wbank)
     assert isinstance(bank, obsplus.WaveBank)
Пример #12
0
 def download_waveforms(self):
     """Copy the default Stream into a directory."""
     st = obspy.read()
     obsplus.WaveBank(self.waveform_path).put_waveforms(st)
Пример #13
0
 def download_waveforms(self):
     """ download waveform data and store them in self.waveform_path """
     bank = obsplus.WaveBank(self.waveform_path)
     st = self.client.get_waveforms_bulk(bulk)
     bank.put_waveforms(st)
Пример #14
0
def kem_archive(kemmerer_dataset):
    """ download the kemmerer data (will take a few minutes but only
     done once) """
    return Path(obsplus.WaveBank(
        kemmerer_dataset.waveform_client).index_path).parent
Пример #15
0
def ta_archive(ta_dataset):
    """ make sure the TA archive, generated with the setup_test_archive
    script, has been downloaded, else download it """
    return Path(obsplus.WaveBank(ta_dataset.waveform_client).index_path).parent
Пример #16
0
def crandall_bank(crandall_dataset):
    return obsplus.WaveBank(crandall_dataset.waveform_client)