Example #1
0
def bingham_dataset():
    """ load the bingham dataset """
    return obsplus.load_dataset("bingham")
Example #2
0
 def test_str_and_repr(self):
     """ensure str is returned from str and repr"""
     ds = obsplus.load_dataset("bingham_test")
     assert isinstance(str(ds), str)  # these are dumb COV tests
     assert isinstance(ds.__repr__(), str)
Example #3
0
def kem_fetcher():
    """ init a waveform fetcher passing a path to a directory as the arg """
    return obsplus.load_dataset("kemmerer").get_fetcher()
def dataset():
    """
    Load the new dataset via obsplus plugin.
    """
    return obsplus.load_dataset("slc_2020_small")
Example #5
0
def crandall_ds():
    return obsplus.load_dataset("crandall")
Example #6
0
def event_directory():
    """ Return the directory of the bingham_test catalog. """
    ds = obsplus.load_dataset("bingham_test")
    return ds.event_path
Example #7
0
def catalog():
    """ load the Crandal events """
    return obsplus.load_dataset("crandall").event_client
Example #8
0
 def inv(self):
     ds = obsplus.load_dataset("crandall")
     return ds.station_client.get_stations()
def dataset():
    """
    Load the new dataset via obsplus plugin.
    """
    return obsplus.load_dataset("coal_node")
Example #10
0
 def inv(self):
     """Return the crandall inventory."""
     ds = obsplus.load_dataset("crandall_test")
     return ds.station_client.get_stations()
Example #11
0
def dataset():
    """
    Load the new dataset via obsplus plugin.
    """
    return obsplus.load_dataset("{{ cookiecutter.dataset_name }}")
Example #12
0
# absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))

# Get the project root dir, which is the parent dir of this
cwd = dirname(abspath(__file__))
project_root = dirname(cwd)

# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)

import obsplus

# load the datasets used by docs here so notebooks don't have to
obsplus.load_dataset("crandall_test")
obsplus.load_dataset("ta_test")

# -- General configuration ---------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
    "sphinx.ext.autodoc",
    "sphinx.ext.doctest",
    "sphinx.ext.todo",
    "sphinx.ext.coverage",
    "sphinx.ext.mathjax",
Example #13
0
def kem_fetcher():
    """ return a wavefetcher of the kemmerer dataset, download if needed """
    return obsplus.load_dataset("kemmerer").get_fetcher()
Example #14
0
def crandall_dataset():
    """ load the crandall canyon dataset. """
    return obsplus.load_dataset("crandall")
 def test_kem_catalog(self):
     """ test converting the kemmerer catalog to an inv dataframe. """
     ds = obsplus.load_dataset("kemmerer")
     df = stations_to_df(ds.event_client.get_events())
     assert isinstance(df, pd.DataFrame)
     assert not df.empty
Example #16
0
def bing_fetcher():
    """init a waveform fetcher passing a path to a directory as the arg"""
    return obsplus.load_dataset("bingham_test").get_fetcher()
Example #17
0
def dataset():
    """Ensure the dataset is downloaded and return."""
    return obsplus.load_dataset(DATASET_NAME)
Example #18
0
 def data_fetcher(self, request):
     """Return a datafetcher from all datasets."""
     with suppress_warnings(UserWarning):
         return obsplus.load_dataset(request.param).get_fetcher()
Example #19
0
"""
Script to iterate all ObsPlus datasets and delete corresponding indices.
"""

from pathlib import Path

import obsplus

SOURCE_PATHS = [f"{x}_path" for x in ["event", "station", "waveform"]]

if __name__ == "__main__":
    # first load the dataset entry points
    obsplus.DataSet._load_dataset_entry_point()
    for name in obsplus.DataSet._entry_points:
        try:
            ds = obsplus.load_dataset(name)
        except ValueError:
            continue
        for source_path in SOURCE_PATHS:
            path = Path(getattr(ds, source_path))
            for index in path.rglob(".index.*"):
                index.unlink()
Example #20
0
 def old_wavebank(self):
     """ get the wavebank of the archive before converting to sds """
     ds = obsplus.load_dataset(self.dataset_name)
     bank = ds.waveform_client
     assert isinstance(bank, obsplus.WaveBank)
     return bank
Example #21
0
# absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))

# Get the project root dir, which is the parent dir of this
cwd = dirname(abspath(__file__))
project_root = dirname(cwd)

# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)

import obsplus

# load the datasets used by docs here so notebooks don't have to
obsplus.load_dataset("crandall")
obsplus.load_dataset("TA")

# -- General configuration ---------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
    "sphinx.ext.autodoc",
    "sphinx.ext.doctest",
    "sphinx.ext.todo",
    "sphinx.ext.coverage",
    "sphinx.ext.mathjax",
Example #22
0
def simple_df():
    """ Return a simple dataframe. """
    cat = obsplus.load_dataset("bingham_test").event_client.get_events()
    df = obsplus.events_to_df(cat)
    return df
Example #23
0
def node_dataset():
    """ Return a dataset of the node data. """
    return obsplus.load_dataset('coal_node')
Example #24
0
 def pick_df(self):
     """Return the pick dataframe of Bingham."""
     ds = obsplus.load_dataset("bingham_test")
     cat = ds.event_client.get_events()
     return obsplus.picks_to_df(cat)
Example #25
0
 def test_copy_unknown_dataset(self):
     """ensure copying a dataset that doesn't exit raises."""
     with pytest.raises(ValueError):
         obsplus.load_dataset("probably_not_a_real_dataset")
Example #26
0
def node_dataset() -> obsplus.DataSet:
    """Return a dataset of the node data."""
    return obsplus.load_dataset("coal_node")

# first extractor, get basic info from the magnitude object
@ml_to_df.extractor
def _get_basic(obj: ev.Magnitude):
    # check mag type, if not ML return None to not add a row for this object
    if obj.magnitude_type != "ML":
        return None
    out = dict(
        magnitude=obj.mag,
        resource_id=str(obj.resource_id),
        azimuthal_gap=obj.azimuthal_gap,
        origin_id=obj.origin_id,
    )
    return out


# add another extractor to get the number of stations.
# the column is obtained from the function name.
@ml_to_df.extractor
def _get_station_count(obj):
    if obj.magnitude_type != "ML":
        return None
    return getattr(obj,
                   "station_count") or -10  # we need a default value for ints


# get events and list of magnitudes
cat = load_dataset("bingham").event_client.get_events()
magnitudes = [mag for event in cat for mag in event.magnitudes]
Example #28
0
def crandall_ds() -> obsplus.DataSet:
    """Load the crandall canyon dataset"""
    return obsplus.load_dataset("crandall_test")
Example #29
0
 def data_fetcher(self, request):
     return obsplus.load_dataset(request.param).get_fetcher()
Example #30
0
def kemmerer_dataset():
    """ Load the kemmerer test case """
    return obsplus.load_dataset("kemmerer")