def catalog_no_magnitude(self): """ get a events with no magnitudes (should just fill with NaN) """ t1 = obspy.UTCDateTime("2099-04-01T00-01-00") ori = ev.Origin(time=t1, latitude=47.1, longitude=-100.22) event = ev.Event(origins=[ori]) cat = ev.Catalog(events=[event]) return events_to_df(cat)
def rewritten_file_event_df(self, event_df_subset): """ write the event_df to disk and try to read it in again """ with tempfile.NamedTemporaryFile() as tf: event_df_subset.to_csv(tf.name) yield events_to_df(tf.name) if os.path.exists(tf.name): # clean up temp file if needed os.remove(tf.name)
def _download_crandall(self): """download waveform/station info for dataset.""" bank = WaveBank(self.waveform_path) domain = CircularDomain( self.latitude, self.longitude, minradius=0, maxradius=kilometers2degrees(self.max_dist), ) cat = obspy.read_events(str(self.source_path / "events.xml")) df = events_to_df(cat) for _, row in df.iterrows(): starttime = row.time - self.time_before endtime = row.time + self.time_after restrictions = Restrictions( starttime=UTC(starttime), endtime=UTC(endtime), minimum_length=0.90, minimum_interstation_distance_in_m=100, channel_priorities=["HH[ZNE]", "BH[ZNE]"], location_priorities=["", "00", "01", "--"], ) kwargs = dict( domain=domain, restrictions=restrictions, mseed_storage=str(self.waveform_path), stationxml_storage=str(self.station_path), ) MassDownloader(providers=[self._download_client]).download( **kwargs) # ensure data have actually been downloaded bank.update_index() assert not bank.read_index(starttime=starttime, endtime=endtime).empty
def test_origin_quality_wins(self, events_with_origin_quality): """ Ensure the phase counts from the OriginQuality take priority over a count of picks/arrivals """ df = events_to_df(events_with_origin_quality) assert df.associated_phase_count.iloc[0] == 42 assert df.used_phase_count.iloc[0] == 10
def get_event_summary(cat: obspy.Catalog, **kwargs) -> pd.DataFrame: """ Return a dataframe from a events object after applying filters. See obspy.core.fdsn.Client.get_events for supported arguments. """ df = obsplus.events_to_df(cat) event_ids = _get_ids(df, kwargs) return df[df.event_id.isin(event_ids)]
def events_from_dataframe(self): event_dict = { "time": obspy.UTCDateTime(), "latitude": 41, "longitude": -111.1, "depth": 10.0, "magnitude": 4.5, } df = pd.DataFrame(pd.Series(event_dict)).T return events_to_df(df)
def _get_distance_tuple(obj): """ return a list of tuples for entities """ cols = ["latitude", "longitude", "elevation", "id"] try: df = obsplus.events_to_df(obj) df["elevation"] = -df["depth"] df["id"] = df["event_id"] except (TypeError, ValueError, AttributeError): df = obsplus.stations_to_df(obj) df["id"] = df["seed_id"] return set(df[cols].itertuples(index=False, name=None))
def split_inventory(self, inv_df, cat): """ Split the inventory and duplicate so first have encompasses half of the events and second gets the second half. """ edf = obsplus.events_to_df(cat).sort_values("time") ser = edf.loc[len(edf) // 2] inv1, inv2 = inv_df.copy(), inv_df.copy() inv1["end_date"] = ser["time"] inv2["start_date"] = ser["time"] new = pd.concat([inv1, inv2], ignore_index=True, axis=0).reset_index() return df_to_inventory(new)
def test_catalog(self): """Tests for (modified) default catalog.""" import obspy import obsplus cat = obspy.read_events() cat[0].origins[0].longitude = 799 cat[1].origins[0].longitude = -181 df = obsplus.events_to_df(cat) longitudes = df["longitude"] assert np.all(np.abs(longitudes) <= 180)
def get_event_summary(cat: Union[obspy.Catalog, pd.DataFrame], **kwargs) -> pd.DataFrame: """ Return a dataframe from a events object after applying filters. Parameters ---------- {get_event_parameters} """ df = obsplus.events_to_df(cat) event_ids = _get_ids(df, kwargs) return df[df.event_id.isin(event_ids)]
def test_with_tuple_no_id(self, spatial_calc): """Test getting relations with tuple and catalog.""" cat = obspy.read_events() df = obsplus.events_to_df(cat) ser = df.iloc[0] # first test with no id tuple1 = (ser["latitude"], ser["longitude"], -ser["depth"]) with suppress_warnings(): out1 = spatial_calc(cat, tuple1) # the default index should be sequential assert set(out1.index.get_level_values("id2")) == {0} # expected len is 3 assert len(out1) == 3
def get_events(cat: obspy.Catalog, **kwargs) -> obspy.Catalog: """ Return a subset of a events filtered on input parameters. See obspy.core.fdsn.Client.get_events for supported arguments. """ # if empty just return events if not kwargs: return cat # make sure all inputs are supported if not set(kwargs).issubset(SUPPORTED_PARAMS): bad_params = set(kwargs) - SUPPORTED_PARAMS msg = f"{bad_params} are not supported get_events parameters" raise TypeError(msg) event_ids = _get_ids(obsplus.events_to_df(cat), kwargs) events = [eve for eve in cat if str(eve.resource_id) in event_ids] return obspy.Catalog(events=events)
def set_events(self, events: fetcher_event_type): """ Set event state in fetcher. Parameters ---------- events Data representing events, from which a client or dataframe can be obtained. """ # set event and dataframe try: self.event_client = get_event_client(events) except TypeError: self.event_client = getattr(self, "event_client", None) try: self.event_df = events_to_df(events) except TypeError: self.event_df = None self._picks_df = None
def get_events(cat: obspy.Catalog, **kwargs) -> obspy.Catalog: """ Return a subset of a events filtered on input parameters. Parameters ---------- {get_event_parameters} """ # If not kwargs are passed just return all events if not kwargs: return cat # Make sure all inputs are supported if not set(kwargs).issubset(SUPPORTED_PARAMS): bad_params = set(kwargs) - SUPPORTED_PARAMS msg = f"{bad_params} are not supported get_events parameters" raise TypeError(msg) # Ensure all times are numpy datetimes kwargs = _dict_times_to_npdatetimes(kwargs) event_ids = _get_ids(obsplus.events_to_df(cat), kwargs) events = [eve for eve in cat if str(eve.resource_id) in event_ids] return obspy.Catalog(events=events)
def _df_from_events(self, obj): """Get the needed dataframe from some objects with event data.""" df = obsplus.events_to_df(obj).set_index("event_id") df["elevation"] = -df["depth"] return df
def df(self, test_catalog): """ call the catalog2df method, return result """ out = events_to_df(test_catalog.copy()) out.reset_index(inplace=True, drop=True) return out
def test_event_directory_to_df(self, event_directory): """Test for getting dataframe from event directory.""" df = events_to_df(event_directory) assert len(df) assert isinstance(df, pd.DataFrame)
def test_event_bank_to_df(self, default_ebank): """ Ensure event banks can be used to get dataframes. """ df = obsplus.events_to_df(default_ebank) assert isinstance(df, pd.DataFrame)
def read_catalog(self, catalog_directory): """ return the results of calling catalog_to_df on directory """ return events_to_df(catalog_directory)
def test_rejected_phases_still_counted(self, events_rejected_picks): """ ensure rejected picks are still counted in arrival numbering """ df = events_to_df(events_rejected_picks) assert (df.p_phase_count != 0).all()
def bing_first_time(bingham_dataset): """Get a new time based on the first event in bingham_test event + 1""" df = obsplus.events_to_df(bingham_dataset.event_client.get_events()) return to_utc(df.iloc[0]["time"])
def test_df_are_same(self, read_catalog): df = events_to_df(obspy.read_events()) assert (df.columns == read_catalog.columns).all() assert len(df) == len(read_catalog) assert set(df.time) == set(read_catalog.time)
def event_df(self, request): """Collect all the supported inputs and parametrize.""" return events_to_df(request.getfixturevalue(request.param))
def catalog_df(self, catalog): """ Get a catalog of events. """ return events_to_df(catalog)
def df(self, test_catalog): """ call the catalog2df method, return result""" cat = test_catalog.copy() return events_to_df(cat)
def events_from_catalog(self): """ read events from a events object """ cat = obspy.read_events() return events_to_df(cat)
def event_df_subset(self, kem_archive): """ read in the partial list of events """ path = join(Path(kem_archive).parent, "catalog_subset.csv") return events_to_df(path)
def catalog_empty(self): """ get a with one blank event """ event = ev.Event() cat = ev.Catalog(events=[event]) return events_to_df(cat)
def simple_df(): """ Return a simple dataframe. """ cat = obsplus.load_dataset("bingham_test").event_client.get_events() df = obsplus.events_to_df(cat) return df
def cat_df(self, request, kem_archive): """ collect all the supported inputs are parametrize""" return events_to_df(self.base_path / "kemmerer" / request.param)