def bingham_dataset(): """ load the bingham dataset """ return obsplus.load_dataset("bingham")
def test_str_and_repr(self): """ensure str is returned from str and repr""" ds = obsplus.load_dataset("bingham_test") assert isinstance(str(ds), str) # these are dumb COV tests assert isinstance(ds.__repr__(), str)
def kem_fetcher(): """ init a waveform fetcher passing a path to a directory as the arg """ return obsplus.load_dataset("kemmerer").get_fetcher()
def dataset(): """ Load the new dataset via obsplus plugin. """ return obsplus.load_dataset("slc_2020_small")
def crandall_ds(): return obsplus.load_dataset("crandall")
def event_directory(): """ Return the directory of the bingham_test catalog. """ ds = obsplus.load_dataset("bingham_test") return ds.event_path
def catalog(): """ load the Crandal events """ return obsplus.load_dataset("crandall").event_client
def inv(self): ds = obsplus.load_dataset("crandall") return ds.station_client.get_stations()
def dataset(): """ Load the new dataset via obsplus plugin. """ return obsplus.load_dataset("coal_node")
def inv(self): """Return the crandall inventory.""" ds = obsplus.load_dataset("crandall_test") return ds.station_client.get_stations()
def dataset(): """ Load the new dataset via obsplus plugin. """ return obsplus.load_dataset("{{ cookiecutter.dataset_name }}")
# absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = dirname(abspath(__file__)) project_root = dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import obsplus # load the datasets used by docs here so notebooks don't have to obsplus.load_dataset("crandall_test") obsplus.load_dataset("ta_test") # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.mathjax",
def kem_fetcher(): """ return a wavefetcher of the kemmerer dataset, download if needed """ return obsplus.load_dataset("kemmerer").get_fetcher()
def crandall_dataset(): """ load the crandall canyon dataset. """ return obsplus.load_dataset("crandall")
def test_kem_catalog(self): """ test converting the kemmerer catalog to an inv dataframe. """ ds = obsplus.load_dataset("kemmerer") df = stations_to_df(ds.event_client.get_events()) assert isinstance(df, pd.DataFrame) assert not df.empty
def bing_fetcher(): """init a waveform fetcher passing a path to a directory as the arg""" return obsplus.load_dataset("bingham_test").get_fetcher()
def dataset(): """Ensure the dataset is downloaded and return.""" return obsplus.load_dataset(DATASET_NAME)
def data_fetcher(self, request): """Return a datafetcher from all datasets.""" with suppress_warnings(UserWarning): return obsplus.load_dataset(request.param).get_fetcher()
""" Script to iterate all ObsPlus datasets and delete corresponding indices. """ from pathlib import Path import obsplus SOURCE_PATHS = [f"{x}_path" for x in ["event", "station", "waveform"]] if __name__ == "__main__": # first load the dataset entry points obsplus.DataSet._load_dataset_entry_point() for name in obsplus.DataSet._entry_points: try: ds = obsplus.load_dataset(name) except ValueError: continue for source_path in SOURCE_PATHS: path = Path(getattr(ds, source_path)) for index in path.rglob(".index.*"): index.unlink()
def old_wavebank(self): """ get the wavebank of the archive before converting to sds """ ds = obsplus.load_dataset(self.dataset_name) bank = ds.waveform_client assert isinstance(bank, obsplus.WaveBank) return bank
# absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = dirname(abspath(__file__)) project_root = dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import obsplus # load the datasets used by docs here so notebooks don't have to obsplus.load_dataset("crandall") obsplus.load_dataset("TA") # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.mathjax",
def simple_df(): """ Return a simple dataframe. """ cat = obsplus.load_dataset("bingham_test").event_client.get_events() df = obsplus.events_to_df(cat) return df
def node_dataset(): """ Return a dataset of the node data. """ return obsplus.load_dataset('coal_node')
def pick_df(self): """Return the pick dataframe of Bingham.""" ds = obsplus.load_dataset("bingham_test") cat = ds.event_client.get_events() return obsplus.picks_to_df(cat)
def test_copy_unknown_dataset(self): """ensure copying a dataset that doesn't exit raises.""" with pytest.raises(ValueError): obsplus.load_dataset("probably_not_a_real_dataset")
def node_dataset() -> obsplus.DataSet: """Return a dataset of the node data.""" return obsplus.load_dataset("coal_node")
# first extractor, get basic info from the magnitude object @ml_to_df.extractor def _get_basic(obj: ev.Magnitude): # check mag type, if not ML return None to not add a row for this object if obj.magnitude_type != "ML": return None out = dict( magnitude=obj.mag, resource_id=str(obj.resource_id), azimuthal_gap=obj.azimuthal_gap, origin_id=obj.origin_id, ) return out # add another extractor to get the number of stations. # the column is obtained from the function name. @ml_to_df.extractor def _get_station_count(obj): if obj.magnitude_type != "ML": return None return getattr(obj, "station_count") or -10 # we need a default value for ints # get events and list of magnitudes cat = load_dataset("bingham").event_client.get_events() magnitudes = [mag for event in cat for mag in event.magnitudes]
def crandall_ds() -> obsplus.DataSet: """Load the crandall canyon dataset""" return obsplus.load_dataset("crandall_test")
def data_fetcher(self, request): return obsplus.load_dataset(request.param).get_fetcher()
def kemmerer_dataset(): """ Load the kemmerer test case """ return obsplus.load_dataset("kemmerer")