def _create_origins(): ori = ev.Origin( resource_id=ev.ResourceIdentifier('smi:local/First'), time=UTCDateTime('2016-05-04T12:00:00'), time_errors={'uncertainty': .01}, longitude=-111.12525, longitude_errors={'uncertainty': .020}, latitude=47.48589325, latitude_errors={'uncertainty': .021}, depth=2.123, depth_errors={'uncertainty': 1.22}, depth_type='from location', time_fixed=False, epicenter_fixed=False, reference_system_id=ev.ResourceIdentifier(), method_id=ev.ResourceIdentifier(), earth_model_id=ev.ResourceIdentifier(), arrivals=[_get_arrival()], composite_times=[_get_composite_times()], quality=_get_origin_quality(), origin_type='hypocenter', origin_uncertainty=_get_origin_uncertainty(), region='US', evaluation_mode='manual', evaluation_status='final', ) state['origin_id'] = ori.resource_id return ori
def _create_pick(): # setup some of the classes creation = ev.CreationInfo( agency='SwanCo', author='Indago', creation_time=UTCDateTime(), version='10.10', author_url=ev.ResourceIdentifier('smi:local/me.com'), ) pick = ev.Pick( time=state['time'], comments=[ev.Comment(x) for x in 'BOB'], evaluation_mode='manual', evaluation_status='final', creation_info=creation, phase_hint='P', polarity='positive', onset='emergent', back_azimith_errors={"uncertainty": 10}, slowness_method_id=ev.ResourceIdentifier('smi:local/slow'), backazimuth=122.1, horizontal_slowness=12, method_id=ev.ResourceIdentifier(), horizontal_slowness_errors={'uncertainty': 12}, filter_id=ev.ResourceIdentifier(), waveform_id=ev.WaveformStreamID('UU', 'FOO', '--', 'HHZ'), ) state['pick_id'] = pick.resource_id return pick
def mag_generator(mag_types): """Function to create magnitudes for testing.""" params = { "origin_id": ev.ResourceIdentifier(), "method_id": ev.ResourceIdentifier("mag_calculator"), "station_count": 2, "azimuthal_gap": 30, "evaluation_mode": "manual", "evaluation_status": "reviewed", } mags = [] counter = 1 for mt in mag_types: m = ev.Magnitude( mag=counter, magnitude_type=mt, mag_errors=ev.QuantityError(uncertainty=counter * 0.1, confidence_level=95), creation_info=ev.CreationInfo(agency_id="dummy_agency", author="dummy", creation_time=UTCDateTime()), **params, ) mags.append(m) return mags
def test_duplicate_station_different_network(self, cat1): """ Ensure picks can have duplicated station codes if they have different network codes. See issue #173. """ # Add a copy of first pick, add new resource id and a new network code new_pick1 = copy.deepcopy(cat1[0].picks[0]) new_pick1.waveform_id.network_code = "NW" new_pick1.resource_id = ev.ResourceIdentifier() cat1[0].picks.append(new_pick1) # Do the same for network codes new_pick2 = copy.deepcopy(cat1[0].picks[0]) new_pick2.waveform_id.location_code = "04" new_pick2.resource_id = ev.ResourceIdentifier() # test passes if this doesnt raise validate_catalog(cat1)
def make_arrivals(picks): """Create arrivals for testing.""" counter = 1 params = {"phase": "P"} arrivals = [] picks = picks or [] for pick in picks: a = ev.Arrival( pick_id=pick.resource_id, time_correction=counter * 0.05, azimuth=counter * 5, distance=counter * 0.1, takeoff_angle=counter * 2, time_residual=counter * 0.15, horizontal_slowness_residual=counter * 0.2, backazimuth_residual=counter * 0.25, time_weight=counter * 0.3, horizontal_slowness_weight=counter * 0.4, backazimuth_weight=counter * 0.5, earth_model_id=ev.ResourceIdentifier(), creation_info=ev.CreationInfo( agency_id="dummy_agency", author="dummy", creation_time=UTCDateTime() ), **params, ) arrivals.append(a) counter += 1 return arrivals
def make_amplitudes(scnls=None, picks=None): """Create amplitudes for testing.""" counter = 1 amps = [] scnls = scnls or [] params = { "type": "A", "unit": "dimensionless", "method_id": "mag_calculator", "filter_id": ev.ResourceIdentifier("Wood-Anderson"), "magnitude_hint": "M", "category": "point", "evaluation_mode": "manual", "evaluation_status": "confirmed", } for scnl in scnls: a = ev.Amplitude( generic_amplitude=counter, generic_amplitude_errors=ev.QuantityError( uncertainty=counter * 0.1, confidence_level=95 ), period=counter * 2, snr=counter * 5, time_window=ev.TimeWindow(0, 0.1, UTCDateTime()), waveform_id=ev.WaveformStreamID(seed_string=scnl), scaling_time=UTCDateTime(), scaling_time_errors=ev.QuantityError( uncertainty=counter * 0.001, confidence_level=95 ), creation_info=ev.CreationInfo( agency_id="dummy_agency", author="dummy", creation_time=UTCDateTime() ), **params, ) amps.append(a) counter += 1 picks = picks or [] for pick in picks: a = ev.Amplitude( generic_amplitude=counter, generic_amplitude_errors=ev.QuantityError( uncertainty=counter * 0.1, confidence_level=95 ), period=counter * 2, snr=counter * 5, time_window=ev.TimeWindow(0, 0.1, UTCDateTime()), pick_id=pick.resource_id, scaling_time=UTCDateTime(), scaling_time_errors=ev.QuantityError( uncertainty=counter * 0.001, confidence_level=95 ), creation_info=ev.CreationInfo( agency_id="dummy_agency", author="dummy", creation_time=UTCDateTime() ), **params, ) amps.append(a) counter += 1 return amps
def test_put_event_no_reference_time(self, ebank): """Test that putting an event with no reference time raises.""" # get an event with no reference time and no id event = obspy.read_events()[0] event.origins.clear() event.preferred_origin_id = None event.resource_id = ev.ResourceIdentifier() with pytest.raises(ValueError): ebank.put_events(event)
def _get_arrival(): return ev.Arrival( resource_id=ev.ResourceIdentifier('smi:local/Ar1'), pick_id=state['pick_id'], phase='P', time_correction=.2, azimuth=12, distance=10, takeoff_angle=15, takeoff_angle_errors={'uncertainty': 10.2}, time_residual=.02, horizontal_slowness_residual=12.2, backazimuth_residual=12.2, time_weight=.23, horizontal_slowness_weight=12, backazimuth_weight=12, earth_model_id=ev.ResourceIdentifier(), commens=[ev.Comment(x) for x in 'Nothing'], )
def _create_magnitudes(): return ev.Magnitude( resource_id=ev.ResourceIdentifier(), mag=5.5, mag_errors={'uncertainty': .01}, magnitude_type='Mw', origin_id=state['origin_id'], station_count=1, station_magnitude_contributions=[_get_station_mag_contrib()], )
def _construct_object(ser: pd.Series, df_dict: Dict[str, pd.DataFrame], cls, recursive=True): """ Construct the object represented by a series. Parameters ---------- ser df_dict cls recursive Returns ------- """ # using pandas string methods to classify type of each index member istr = ser.index.str flattened_attrs = istr.startswith("__") nested_atts = istr.startswith("_") & (~istr.endswith("_")) & ( ~flattened_attrs) special = istr.startswith("_") & istr.endswith("_") basic = (~flattened_attrs) & (~nested_atts) & (~special) # ensure each index falls into exactly one category attr_sum = flattened_attrs.astype(int) + nested_atts + special + basic assert np.all(np.equal(attr_sum, 1)) # put basic types into dict basics = ser[basic][ser[basic].astype(bool)] # collect NonNull basic type out = { x: TO_CLASS_FUNCS[x](ser[x]) if x in TO_CLASS_FUNCS else v for x, v in basics.items() } # add resource id obj_id = ser.name out["resource_id"] = ev.ResourceIdentifier(obj_id) # inflate flattened objects flat = ser[flattened_attrs] out.update(_inflate_flattened(flat[flat.astype(bool)])) # add nested objects if recursive: for attr_name, table_name in ser[nested_atts].items(): klass = ATTR_TO_CLASS[attr_name[1:]] # get dataframe, filter on parent id dff = df_dict.get(table_name, None) if dff is None or dff.empty: continue # none of this type are defined in tables df = dff[dff["_parent_id_"] == obj_id] if df.empty: continue # recurse, creating nested objects func = partial(_construct_object, df_dict=df_dict, cls=klass) out[attr_name[1:]] = df.apply(func, axis=1).values.tolist() return cls(**out)
def simple_catalog_to_merge(self, bingham_catalog): """ Create a simple catalog to merge into bingham_cat using only one event. """ cat = obspy.Catalog(events=bingham_catalog[:2]).copy() # drop first pick cat[0].picks = cat[0].picks[1:] # modify the picks to whole seconds, reset pick IDS for pick, _, _ in yield_obj_parent_attr(cat, ev.Pick): pick.time -= (pick.time.timestamp) % 1 pick.id = ev.ResourceIdentifier(referred_object=pick) return cat
def picks_from_picksdict(picks, creation_info=None): picks_list = list() for station, onsets in picks.items(): for label, phase in onsets.items(): if not isinstance(phase, dict) and not isinstance( phase, AttribDict): continue onset = phase['mpp'] try: ccode = phase['channel'] ncode = phase['network'] except: continue pick = ope.Pick() if creation_info: pick.creation_info = creation_info pick.time = onset error = phase['spe'] pick.time_errors.uncertainty = error try: epp = phase['epp'] lpp = phase['lpp'] pick.time_errors.lower_uncertainty = onset - epp pick.time_errors.upper_uncertainty = lpp - onset except (KeyError, TypeError) as e: warnings.warn(str(e), RuntimeWarning) try: picker = phase['picker'] except KeyError as e: warnings.warn(e.message, RuntimeWarning) picker = 'Unknown' pick.phase_hint = label pick.method_id = ope.ResourceIdentifier(id=picker) pick.waveform_id = ope.WaveformStreamID(station_code=station, channel_code=ccode, network_code=ncode) try: polarity = phase['fm'] if polarity == 'U' or '+': pick.polarity = 'positive' elif polarity == 'D' or '-': pick.polarity = 'negative' else: pick.polarity = 'undecidable' except KeyError as e: if 'fm' in str( e): # no polarity information found for this phase pass else: raise e picks_list.append(pick) return picks_list
def _get_moment_tensor(): return ev.MomentTensor( scalar_moment=12213, tensor=_get_tensor(), variance=12.23, variance_reduction=98, double_couple=.22, clvd=.55, iso=.33, source_time_function=_get_source_time_function(), data_used=[_get_data_used()], method_id=ev.ResourceIdentifier(), inversion_type='general', )
def create_resourceID(timetohash, restype, authority_id=None, hrstr=None): ''' :param timetohash: :type timetohash :param restype: type of the resource, e.g. 'orig', 'earthquake' ... :type restype: str :param authority_id: name of the institution carrying out the processing :type authority_id: str, optional :param hrstr: :type hrstr: :return: ''' assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \ "UTCDateTime object" hid = getHash(timetohash) if hrstr is None: resID = ope.ResourceIdentifier(restype + '/' + hid[0:6]) else: resID = ope.ResourceIdentifier(restype + '/' + hrstr) if authority_id is not None: resID.convertIDToQuakeMLURI(authority_id=authority_id) return resID
def test_preferred_no_origins(self): """ when the preferred id is set but origin is empty None should be returned. """ event = obspy.read_events()[0] # clear origins and ensure resource_id is not holding a reference event.origins.clear() rid = str(ev.ResourceIdentifier()) event.preferred_origin_id = rid # It should now return None with pytest.warns(UserWarning): assert get_preferred(event, "origin") is None # but if init_empty it should return an empty origin with pytest.warns(UserWarning): ori = get_preferred(event, "origin", init_empty=True) assert isinstance(ori, ev.Origin)
def test_object_with_slots(self): """Ensure it still works with slots objects.""" class Slot: __slots__ = ("hey", "bob") def __init__(self, hey, bob): self.hey = hey self.bob = bob slot = Slot(hey=ev.ResourceIdentifier("bob"), bob="ugh") rids = [ x[0] for x in yield_obj_parent_attr(slot, ev.ResourceIdentifier) ] assert len(rids) == 1 assert str(rids[0]) == "bob"
def sm_generator(scnls=None, amplitudes=None): """Function to create station magntiudes for testing.""" counter = 1 sms = [] scnls = scnls or [] params = { "origin_id": ev.ResourceIdentifier(), "station_magnitude_type": "M", "method_id": "mag_calculator", } for scnl in scnls: sm = ev.StationMagnitude( mag=counter, mag_errors=ev.QuantityError(uncertainty=counter * 0.1, confidence_level=95), waveform_id=ev.WaveformStreamID(seed_string=scnl), creation_info=ev.CreationInfo(agency_id="dummy_agency", author="dummy", creation_time=UTCDateTime()), **params, ) sms.append(sm) counter += 1 amplitudes = amplitudes or [] for amp in amplitudes: sm = ev.StationMagnitude( mag=counter, mag_errors=ev.QuantityError(uncertainty=counter * 0.1, confidence_level=95), amplitude_id=amp.resource_id, creation_info=ev.CreationInfo(agency_id="dummy_agency", author="dummy", creation_time=UTCDateTime()), **params, ) sms.append(sm) counter += 1 return sms
def create_pick(origintime, picknum, picktime, eventnum, cinfo, phase, station, wfseedstr, authority_id): ''' create_pick - function to create an ObsPy Pick :param origintime: :type origintime: :param picknum: number of the created pick :type picknum: int :param picktime: :type picktime: :param eventnum: human-readable event identifier :type eventnum: str :param cinfo: An ObsPy :class: `~obspy.core.event.CreationInfo` object holding information on the creation of the returned object :type cinfo: :class: `~obspy.core.event.CreationInfo` object :param phase: name of the arrivals seismic phase :type phase: str :param station: name of the station at which the seismic phase has been picked :type station: str :param wfseedstr: A SEED formatted string of the form network.station.location.channel in order to set a referenced waveform :type wfseedstr: str, SEED formatted :param authority_id: name of the institution carrying out the processing :type authority_id: str :return: An ObsPy :class: `~obspy.core.event.Pick` object ''' pickID = eventnum + '_' + station.strip() + '/{0:03d}'.format(picknum) pickresID = create_resourceID(origintime, 'pick', authority_id, pickID) pick = ope.Pick() pick.resource_id = pickresID pick.time = picktime pick.creation_info = cinfo pick.phase_hint = phase pick.waveform_id = ope.ResourceIdentifier(id=wfseedstr, prefix='file:/') return pick
def new_catalog(self): """Change the resource ids of events in the default catalog, return.""" cat = obspy.read_events() for event in cat: event.resource_id = ev.ResourceIdentifier() return cat
def rid(some_id): if some_id not in RID_CACHE: RID_CACHE[some_id] = oe.ResourceIdentifier(some_id) return RID_CACHE[some_id]
def test_defined_resource_id(self): """Ensure the defined resource_id sticks.""" rid = str(ev.ResourceIdentifier()) out = esc.ResourceIdentifier(id=rid) assert out.id == rid