def test_same_resource_id_different_referred_object(self): """ Tests the handling of the case that different ResourceIdentifier instances are created that have the same resource id but different objects. The referred objects should still return the same objects used in the ResourceIdentifier construction or set_referred_object call. However, if an object is set to a resource_id that is not equal to the last object set it should issue a warning. """ warnings.simplefilter('default') object_a = UTCDateTime(1000) object_b = UTCDateTime(1000) object_c = UTCDateTime(1001) self.assertFalse(object_a is object_b) id = 'obspy.org/tests/test_resource' res_a = ResourceIdentifier(id=id, referred_object=object_a) # Now create a new resource with the same id but a different object. # This should not raise a warning as the object a and b are equal. with warnings.catch_warnings(record=True) as w: res_b = ResourceIdentifier(id=id, referred_object=object_b) self.assertEqual(len(w), 0) # if the set object is not equal to the last object set to the same # resource_id, however, a warning should be issued. with warnings.catch_warnings(record=True) as w: res_c = ResourceIdentifier(id=id, referred_object=object_c) self.assertEqual(len(w), 1) expected_text = 'which is not equal to the last object bound' self.assertIn(expected_text, str(w[0])) # even though the resource_id are the same, the referred objects # should point to the original (different) objects self.assertIs(object_a, res_a.get_referred_object()) self.assertIs(object_b, res_b.get_referred_object()) self.assertIs(object_c, res_c.get_referred_object())
def test_automatic_dereferring_if_resource_id_goes_out_of_scope(self): """ Tests that objects that have no more referrer are no longer stored in the reference dictionary. """ t1 = UTCDateTime(2010, 1, 1) # test object r_dict = ResourceIdentifier._ResourceIdentifier__resource_id_weak_dict rid = 'a' # test resource id # Create object and assert the reference has been created. r1 = ResourceIdentifier(rid, referred_object=t1) self.assertEqual(r1.get_referred_object(), t1) self.assertTrue(rid in r_dict) # Deleting the object should remove the reference. del r1 self.assertFalse(rid in r_dict) # Now create two equal references. r1 = ResourceIdentifier(rid, referred_object=t1) r2 = ResourceIdentifier(rid, referred_object=t1) self.assertEqual(r1.get_referred_object(), t1) # Deleting one should not remove the reference. del r1 self.assertEqual(r2.get_referred_object(), t1) self.assertTrue(rid in r_dict) # Deleting the second one should del r2 self.assertFalse(rid in r_dict)
def test_getting_gc_with_shared_resource_id(self): """ Test that calling get_referred_object on a resource id whose object has been garbage collected, but that has another object that shares the same resource_id, returns the other object with the same resource id and issues a warning """ uri = 'testuri' obj1 = UTCDateTime(1000) obj2 = UTCDateTime(1000) rid1 = ResourceIdentifier(uri, referred_object=obj1) rid2 = ResourceIdentifier(uri, referred_object=obj2) self.assertFalse( rid1.get_referred_object() is rid2.get_referred_object()) self.assertNotEqual(rid1._object_id, rid2._object_id) del obj1 warnings.simplefilter('default') with warnings.catch_warnings(record=True) as w: rid1.get_referred_object() self.assertEqual(len(w), 1) self.assertIn('The object with identity', str(w[0])) # now both rids should return the same object self.assertIs(rid1.get_referred_object(), rid2.get_referred_object()) # the object id should now be bound to obj2 self.assertEqual(rid1._object_id, rid2._object_id)
def test_stationmagnitude(self): """ Tests StationMagnitude object. """ filename = os.path.join(self.path, 'quakeml_1.2_stationmagnitude.xml') catalog = readQuakeML(filename) self.assertEquals(len(catalog), 1) self.assertEquals(len(catalog[0].station_magnitudes), 1) mag = catalog[0].station_magnitudes[0] # Assert the actual StationMagnitude object. Everything that is not set # in the QuakeML file should be set to None. self.assertEqual( mag.resource_id, ResourceIdentifier("smi:ch.ethz.sed/magnitude/station/881342")) self.assertEquals(mag.origin_id, ResourceIdentifier('smi:some/example/id')) self.assertEquals(mag.mag, 6.5) self.assertEquals(mag.mag_errors.uncertainty, 0.2) self.assertEquals(mag.station_magnitude_type, 'MS') self.assertEqual( mag.amplitude_id, ResourceIdentifier("smi:ch.ethz.sed/amplitude/824315")) self.assertEqual(mag.method_id, ResourceIdentifier(\ "smi:ch.ethz.sed/magnitude/generic/surface_wave_magnitude")) self.assertEqual( mag.waveform_id, WaveformStreamID(network_code='BW', station_code='FUR', resource_uri="smi:ch.ethz.sed/waveform/201754")) self.assertEqual(mag.creation_info, None) # exporting back to XML should result in the same document original = open(filename, "rt").read() processed = Pickler().dumps(catalog) self._compareStrings(original, processed)
def test_same_resource_id_different_referred_object(self): """ Tests the handling of the case that different ResourceIdentifier instances are created that have the same resource id but different objects. This should not happen and thus a warning should be emitted. """ object_a = UTCDateTime(1000) object_b = UTCDateTime(1001) self.assertEqual(object_a is object_b, False) id = 'obspy.org/tests/test_resource' res_a = ResourceIdentifier(id=id, referred_object=object_a) # Now create a new resource with the same id but a different object. # This will raise a warning. with warnings.catch_warnings(record=True): warnings.simplefilter('error', UserWarning) self.assertRaises(UserWarning, ResourceIdentifier, id=id, referred_object=object_b) # Now ignore the warning and actually create the new # ResourceIdentifier. warnings.simplefilter('ignore', UserWarning) res_b = ResourceIdentifier(id=id, referred_object=object_b) # Object b was the last to added, thus all resource identifiers will # now point to it. self.assertEqual(object_b is res_a.get_referred_object(), True) self.assertEqual(object_b is res_b.get_referred_object(), True)
def _set_resource_id(self, value): if isinstance(value, dict): value = ResourceIdentifier(**value) elif type(value) != ResourceIdentifier: value = ResourceIdentifier(value) value.set_referred_object(self, warn=False) self.__dict__['resource_id'] = value
def test_arrival(self): """ Tests Arrival object. """ filename = os.path.join(self.path, 'quakeml_1.2_arrival.xml') catalog = _read_quakeml(filename) self.assertEqual(len(catalog), 1) self.assertEqual(len(catalog[0].origins[0].arrivals), 2) ar = catalog[0].origins[0].arrivals[0] # Test the actual Arrival object. Everything not set in the QuakeML # file should be None. self.assertEqual( ar.pick_id, ResourceIdentifier('smi:ch.ethz.sed/pick/117634')) self.assertEqual(ar.phase, 'Pn') self.assertEqual(ar.azimuth, 12.0) self.assertEqual(ar.distance, 0.5) self.assertEqual(ar.takeoff_angle, 11.0) self.assertEqual(ar.takeoff_angle_errors.uncertainty, 0.2) self.assertEqual(ar.time_residual, 1.6) self.assertEqual(ar.horizontal_slowness_residual, 1.7) self.assertEqual(ar.backazimuth_residual, 1.8) self.assertEqual(ar.time_weight, 0.48) self.assertEqual(ar.horizontal_slowness_weight, 0.49) self.assertEqual(ar.backazimuth_weight, 0.5) self.assertEqual( ar.earth_model_id, ResourceIdentifier('smi:ch.ethz.sed/earthmodel/U21')) self.assertEqual(len(ar.comments), 1) self.assertEqual(ar.creation_info.author, "Erika Mustermann") # exporting back to XML should result in the same document with open(filename, "rt") as fp: original = fp.read() processed = Pickler().dumps(catalog) compare_xml_strings(original, processed)
def test_read_quakeml(self): """ """ # IRIS filename = os.path.join(self.path, 'iris_events.xml') catalog = _read_quakeml(filename) self.assertEqual(len(catalog), 2) self.assertEqual( catalog[0].resource_id, ResourceIdentifier( 'smi:www.iris.edu/ws/event/query?eventId=3279407')) self.assertEqual( catalog[1].resource_id, ResourceIdentifier( 'smi:www.iris.edu/ws/event/query?eventId=2318174')) # NERIES catalog = self.neries_catalog self.assertEqual(len(catalog), 3) self.assertEqual( catalog[0].resource_id, ResourceIdentifier('quakeml:eu.emsc/event/20120404_0000041')) self.assertEqual( catalog[1].resource_id, ResourceIdentifier('quakeml:eu.emsc/event/20120404_0000038')) self.assertEqual( catalog[2].resource_id, ResourceIdentifier('quakeml:eu.emsc/event/20120404_0000039'))
def get_events(individual=True): endtime = UTC('2015-09-01') kw = { 'starttime': UTC('2005-04-01'), 'endtime': endtime, 'minmagnitude': 1.5, 'maxmagnitude': 3.5, 'minlatitude': 25, 'maxlatitude': 49.5, 'minlongitude': -124, 'maxlongitude': -67, 'maxdepth': 40, 'includearrivals': individual, 'catalog': 'ANF' } client = FSDNClient() if individual: for t in range(6, 17): kw['endtime'] = UTC('20%02d-01-01' % t) if t < 16 else endtime events = client.get_events(**kw) for ev in events: id_ = _extract_eventid(ev) ev.resource_id = ResourceIdentifier(id_) ev.write(EVENT_PICKS_FNAME + id_ + '.xml', 'QUAKEML') print('fetched events of year %d' % kw['starttime'].year) kw['starttime'] = kw['endtime'] else: events = client.get_events(**kw) events.plot(projection='local', outfile=EVENT_FNAME + '.png') for ev in events: id_ = _extract_eventid(ev) ev.resource_id = ResourceIdentifier(id_) events.write(EVENT_FNAME + '.xml', 'QUAKEML') return events
def _parse_record_hy(self, line): """ Parses the 'hypocenter' record HY """ date = line[2:10] time = line[11:20] # unused: location_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] depth = self._float(line[38:43]) # unused: depth_quality = line[43] standard_dev = self._float(line[44:48]) station_number = self._int(line[48:51]) # unused: version_flag = line[51] fe_region_number = line[52:55] fe_region_name = self._decode_fe_region_number(fe_region_number) source_code = line[55:60].strip() event = Event() # FIXME: a smarter way to define evid? evid = date + time res_id = '/'.join((res_id_prefix, 'event', evid)) event.resource_id = ResourceIdentifier(id=res_id) description = EventDescription( type='region name', text=fe_region_name) event.event_descriptions.append(description) description = EventDescription( type='Flinn-Engdahl region', text=fe_region_number) event.event_descriptions.append(description) origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid)) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo() if source_code: origin.creation_info.agency_id = source_code else: origin.creation_info.agency_id = 'USGS-NEIC' res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) origin.earth_model_id = ResourceIdentifier(id=res_id) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinate_sign(lat_type) origin.longitude = longitude * self._coordinate_sign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.associated_station_count = station_number origin.quality.standard_error = standard_dev # associated_phase_count can be incremented in records 'P ' and 'S ' origin.quality.associated_phase_count = 0 # depth_phase_count can be incremented in record 'S ' origin.quality.depth_phase_count = 0 origin.origin_type = 'hypocenter' origin.region = fe_region_name event.origins.append(origin) return event
def test_initialize_with_resource_identifier(self): """ Test initializing an ResourceIdentifier with an ResourceIdentifier. """ rid = ResourceIdentifier() rid2 = ResourceIdentifier(str(rid)) rid3 = ResourceIdentifier(rid) self.assertEqual(rid, rid2) self.assertEqual(rid, rid3)
def test_event(self): """ Tests Event object. """ filename = os.path.join(self.path, 'quakeml_1.2_event.xml') catalog = _read_quakeml(filename) self.assertEqual(len(catalog), 1) event = catalog[0] self.assertEqual( event.resource_id, ResourceIdentifier('smi:ch.ethz.sed/event/historical/1165')) # enums self.assertEqual(event.event_type, 'earthquake') self.assertEqual(event.event_type_certainty, 'suspected') # comments self.assertEqual(len(event.comments), 2) c = event.comments self.assertEqual(c[0].text, 'Relocated after re-evaluation') self.assertEqual(c[0].resource_id, None) self.assertEqual(c[0].creation_info.agency_id, 'EMSC') self.assertEqual(c[1].text, 'Another comment') self.assertEqual( c[1].resource_id, ResourceIdentifier(id="smi:some/comment/id/number_3")) self.assertEqual(c[1].creation_info, None) # event descriptions self.assertEqual(len(event.event_descriptions), 3) d = event.event_descriptions self.assertEqual(d[0].text, '1906 San Francisco Earthquake') self.assertEqual(d[0].type, 'earthquake name') self.assertEqual(d[1].text, 'NEAR EAST COAST OF HONSHU, JAPAN') self.assertEqual(d[1].type, 'Flinn-Engdahl region') self.assertEqual(d[2].text, 'free-form string') self.assertEqual(d[2].type, None) # creation info self.assertEqual(event.creation_info.author, "Erika Mustermann") self.assertEqual(event.creation_info.agency_id, "EMSC") self.assertEqual( event.creation_info.author_uri, ResourceIdentifier("smi:smi-registry/organization/EMSC")) self.assertEqual( event.creation_info.agency_uri, ResourceIdentifier("smi:smi-registry/organization/EMSC")) self.assertEqual( event.creation_info.creation_time, UTCDateTime("2012-04-04T16:40:50+00:00")) self.assertEqual(event.creation_info.version, "1.0.1") # exporting back to XML should result in the same document with open(filename, "rt") as fp: original = fp.read() processed = Pickler().dumps(catalog) compare_xml_strings(original, processed)
def _parseRecordAE(self, line, event): """ Parses the 'additional hypocenter error and magnitude record' AE """ orig_time_stderr = self._floatUnused(line[2:7]) latitude_stderr = self._floatUnused(line[8:14]) longitude_stderr = self._floatUnused(line[15:21]) depth_stderr = self._floatUnused(line[22:27]) gap = self._floatUnused(line[28:33]) mag1 = self._float(line[33:36]) mag1_type = line[36:38] mag2 = self._float(line[43:46]) mag2_type = line[46:48] evid = event.resource_id.id.split('/')[-1] #this record is to be associated to the latest origin origin = event.origins[-1] self._storeUncertainty(origin.time_errors, orig_time_stderr) self._storeUncertainty(origin.latitude_errors, self._latErrToDeg(latitude_stderr)) self._storeUncertainty( origin.longitude_errors, self._lonErrToDeg(longitude_stderr, origin.latitude)) self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000) origin.quality.azimuthal_gap = gap if mag1 > 0: mag = Magnitude() mag1_id = mag1_type.lower() res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo( agency_id=origin.creation_info.agency_id) mag.mag = mag1 mag.magnitude_type = mag1_type mag.origin_id = origin.resource_id event.magnitudes.append(mag) if mag2 > 0: mag = Magnitude() mag2_id = mag2_type.lower() if mag2_id == mag1_id: mag2_id += '2' res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id)) mag.resource_id = ResourceIdentifier(id=res_id) mag.creation_info = CreationInfo( agency_id=origin.creation_info.agency_id) mag.mag = mag2 mag.magnitude_type = mag2_type mag.origin_id = origin.resource_id event.magnitudes.append(mag)
def test_resource_ids_refer_to_newest_object(self): """ Tests that resource ids which are assigned multiple times but point to identical objects always point to the newest object. This prevents some odd behaviour. """ t1 = UTCDateTime(2010, 1, 1) t2 = UTCDateTime(2010, 1, 1) rid = ResourceIdentifier("a", referred_object=t1) # @UnusedVariable rid = ResourceIdentifier("a", referred_object=t2) del t1 self.assertEqual(rid.get_referred_object(), t2)
def _set_tw(self, data_df, index, pick_info, append=False): """write the time window to the dataframe""" # Get the start and end times try: starttime = UTCDateTime(pick_info[0]).timestamp except TypeError: raise TypeError("starttime must be an obspy UTCDateTime") try: endtime = UTCDateTime(pick_info[1]).timestamp except TypeError: raise TypeError("endtime must be an obspy UTCDateTime") if endtime <= starttime: raise ValueError("time window starttime must be earlier than endtime") # Set the start and end times data_df.loc[index, "starttime"] = starttime data_df.loc[index, "endtime"] = endtime if append: # Populate the minimum information for it to be a valid pick data_df.loc[index, list(NSLC_DTYPES.keys())] = list(index[-1].split(".")) data_df.loc[index, ["time", "pick_id"]] = [ starttime, ResourceIdentifier().id, ]
def __toMagnitude(parser, magnitude_el, origin): """ Parses a given magnitude etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type magnitude_el: etree.element :param magnitude_el: magnitude element to be parsed. :return: A ObsPy :class:`~obspy.core.event.Magnitude` object. """ global CURRENT_TYPE mag = Magnitude() mag.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "magnitude"])) mag.origin_id = origin.resource_id mag.mag, mag.mag_errors = __toFloatQuantity(parser, magnitude_el, "mag") # obspyck used to write variance (instead of std) in magnitude error fields if CURRENT_TYPE == "obspyck": if mag.mag_errors.uncertainty is not None: mag.mag_errors.uncertainty = math.sqrt(mag.mag_errors.uncertainty) mag.mag_errors.confidence_level = 68.3 mag.magnitude_type = parser.xpath2obj("type", magnitude_el) mag.station_count = parser.xpath2obj("stationCount", magnitude_el, int) mag.method_id = "%s/magnitude_method/%s/1" % ( RESOURCE_ROOT, parser.xpath2obj('program', magnitude_el)) if str(mag.method_id).lower().endswith("none"): mag.method_id = None return mag
def test_event(self): """ Tests Event object. """ event = self.catalog[0] self.assertEqual( event.resource_id, ResourceIdentifier( id='quakeml:us.anss.org/event/20120101052755.98')) # enums self.assertEqual(event.event_type, None) self.assertEqual(event.event_type_certainty, None) # comments self.assertEqual(len(event.comments), 1) c = event.comments self.assertEqual(c[0].text, 'MW 6.8 (WCMT), 6.8 (UCMT), 6.8 (GCMT). \ Felt (V) at Chiba; (IV) at Fussa, Kawasaki, Saitama, Tokyo, \ Yokohama and Yokosuka; (III) at Ebina, Zama and Zushi; (II) \ at Misawa and Narita, Honshu. Recorded (4 JMA) in Chiba, Fukushima, \ Gumma, Ibaraki, Kanagawa, Miyagi, Saitama, Tochigi and Tokyo.') # event descriptions self.assertEqual(len(event.event_descriptions), 2) d = event.event_descriptions self.assertEqual(d[0].text, 'SOUTHEAST OF HONSHU, JAPAN') self.assertEqual(d[0].type, 'region name') self.assertEqual(d[1].text, '211') self.assertEqual(d[1].type, 'Flinn-Engdahl region') # creation info self.assertEqual(event.creation_info, None)
def _parseRecordAH(self, line, event): """ Parses the 'additional hypocenter' record AH """ date = line[2:10] time = line[11:20] #unused: hypocenter_quality = line[20] latitude = self._float(line[21:27]) lat_type = line[27] longitude = self._float(line[29:36]) lon_type = line[36] #unused: preliminary_flag = line[37] depth = self._float(line[38:43]) #unused: depth_quality = line[43] standard_dev = self._floatUnused(line[44:48]) station_number = self._intUnused(line[48:51]) phase_number = self._intUnused(line[51:55]) source_code = line[56:60].strip() evid = event.resource_id.id.split('/')[-1] origin = Origin() res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower())) origin.resource_id = ResourceIdentifier(id=res_id) origin.creation_info = CreationInfo(agency_id=source_code) origin.time = UTCDateTime(date + time) origin.latitude = latitude * self._coordinateSign(lat_type) origin.longitude = longitude * self._coordinateSign(lon_type) origin.depth = depth * 1000 origin.depth_type = 'from location' origin.quality = OriginQuality() origin.quality.standard_error = standard_dev origin.quality.used_station_count = station_number origin.quality.used_phase_count = phase_number origin.type = 'hypocenter' event.origins.append(origin)
def _get_res_id(self, ident, parent=None, parent_res_id=None): """ Create a :class:`~obspy.core.event.resourceid.ResourceIdentifier` object. :type ident: str :param ident: Id of the :class:`~obspy.core.event.resourceid.ResourceIdentifier`. :type parent: :class:`~obspy.core.event.origin.Origin`, :class:`~obspy.core.event.event.Event` or any other object with a resource_id attribute. :param parent: The resource_id attribute of the parent will be used as a prefix for the new :class:`~obspy.core.event.resourceid.ResourceIdentifier`. :type parent_res_id: :class:`~obspy.core.event.resourceid.ResourceIdentifier` of the parent. :param parent_res_id: :class:`~obspy.core.event.resourceid.ResourceIdentifier` :rtype: :class:`~obspy.core.event.resourceid.ResourceIdentifier` :return: ResourceIdentifier object. """ prefix = self.res_id_prefix # Put the parent id as prefix # Example: smi:local/origin/351412/arrival/6389611 # | prefix | ident | if parent: prefix = parent.resource_id.id elif parent_res_id: prefix = parent_res_id.id public_id = "%s/%s" % (prefix, ident) return ResourceIdentifier(public_id)
def test_resources_in_global_dict_get_garbage_collected(self): """ Tests that the ResourceIdentifiers in the class level resource dict get deleted if they have no other reference and the object they refer to goes out of scope. """ obj_a = UTCDateTime() obj_b = UTCDateTime() res1 = ResourceIdentifier(referred_object=obj_a) res2 = ResourceIdentifier(referred_object=obj_b) # Now two keys should be in the global dict. rdict = ResourceIdentifier._ResourceIdentifier__resource_id_weak_dict self.assertEqual(len(list(rdict.keys())), 2) del obj_a, obj_b self.assertIs(res1.get_referred_object(), None) self.assertIs(res2.get_referred_object(), None)
def reloc2cat(reloc_file, cat): from obspy import UTCDateTime from obspy.core.event import Origin, ResourceIdentifier with open(reloc_file, 'rb') as f: for row in f: ev_id = int(row.split()[0]) row_lst = row.split() # Catch stupid 60 entries for seconds if row_lst[15].split('.')[0] == '60': row_lst[14] = int(row_lst[14]) + 1 row_lst[15] = '00.000' cat[ev_id].origins.append( Origin(latitude=float(row_lst[1]), longitude=float(row_lst[2]), depth=float(row_lst[3]) * 1000, time=UTCDateTime(year=int(row_lst[10]), month=int(row_lst[11]), day=int(row_lst[12]), hour=int(row_lst[13]), minute=int(row_lst[14]), second=int(row_lst[15].split('.')[0]), microsecond=(int( row_lst[15].split('.')[1]))), method_id=ResourceIdentifier(id='HypoDD'))) cat[ev_id].preferred_origin_id = str( cat[ev_id].origins[-1].resource_id) return cat
def reassign_selfs(cat, det_cat, temp_dir): selfs = find_self_dets(det_cat, temp_dir) for ev in cat: temp_str, det_time_str = str(ev.resource_id).split('/')[-1].split('_') if ev.resource_id in selfs and det_time_str != 'self': ev.resource_id = ResourceIdentifier('smi:local/%s_self' % temp_str) return cat
def make_pk_dict(name_map, pk_file): # Mapping of event new_name --> old name from stefan txt file names = {} with open(name_map, 'rb') as f: f_str = [line for line in f] for ln in f_str: ln.split(',') if ln[0] not in names: names[ln[0]] = ln[1] else: print(line[0] + ' used miltiple times?') print(line[0], names[line[0]]) print(line) continue # Now make a dictionary of all the Spicks keyed to event rid picks = {} with open(pk_file, 'rb') as f2: pk_str = [line for line in f2[1:]] for ln2 in pk_str: ln2.split(',') rid = ResourceIdentifier('smi:local/{}'.format( names[ln2[1].split('/')[-1]])) if rid not in picks: picks[rid] = [{ 'time': ln2[-6], 'error': int(ln2[-3]) / 1000., 'sta': ln2[0].split('/')[0] }] else: picks[rid].append({ 'time': ln2[-6], 'error': int(ln2[-3]) / 1000, 'sta': ln2[1].split('/')[0] }) return picks
def read_pick(line): """ Convert REST pick string to ObsPy Pick object :param line: string containing pick information :type line: str :returns: :class:`obspy.core.event.Pick` and :class:`obspy.core.event.origin.Arrival` """ # line = line.split() # Cannot just split the line :( splits = [0, 6, 10, 15, 18, 22, 28, 29, 41, 49, -1] _line = [] for split in range(len(splits) - 1): _line.append(line[splits[split]: splits[split + 1]].strip()) line = _line pick = Pick(time=UTCDateTime( year=int(line[1]), julday=int(line[2]), hour=int(line[3]), minute=int(line[4])) + float(line[5]), phase_hint=line[7], evaluation_mode="automatic", method_id=ResourceIdentifier("smi:local/REST"), waveform_id=WaveformStreamID(station_code=line[0]), time_errors=QuantityError(uncertainty=float(line[8]))) arrival = Arrival( pick_id=pick.resource_id, time_residual=float(line[9])) return pick, arrival
def test_multiple_origins(self): """ Parameters of multiple origins should not interfere with each other. """ origin = Origin() origin.resource_id = 'smi:ch.ethz.sed/origin/37465' origin.time = UTCDateTime(0) origin.latitude = 12 origin.latitude_errors.confidence_level = 95 origin.longitude = 42 origin.depth_type = 'from location' self.assertEqual( origin.resource_id, ResourceIdentifier(id='smi:ch.ethz.sed/origin/37465')) self.assertEqual(origin.latitude, 12) self.assertEqual(origin.latitude_errors.confidence_level, 95) self.assertEqual(origin.latitude_errors.uncertainty, None) self.assertEqual(origin.longitude, 42) origin2 = Origin(force_resource_id=False) origin2.latitude = 13.4 self.assertEqual(origin2.depth_type, None) self.assertEqual(origin2.resource_id, None) self.assertEqual(origin2.latitude, 13.4) self.assertEqual(origin2.latitude_errors.confidence_level, None) self.assertEqual(origin2.longitude, None)
def crandall_s_before_p(self, crandall_event) -> Tuple[Event, int]: """ Set the S-pick time to be before the P-pick time for a couple of stations Parameters """ # Make a copy of the event eve = crandall_event.copy() # Fix the phase hints to simply be P or S for pick in eve.picks: if pick.phase_hint in {"P", "Pb"}: pick.phase_hint = "P" elif pick.phase_hint in {"S", "Sb"}: pick.phase_hint = "S" # Get a list of picks for stations that have both P- and S-picks picks = eve.picks_to_df() reviewed = picks.loc[(picks["evaluation_status"] == "reviewed")] picks_to_modify = reviewed.loc[~(picks["phase_hint"] == "?")] picks_to_modify = picks_to_modify.groupby("station").filter( lambda x: len(x) == 2) picks_to_modify = picks_to_modify.loc[picks_to_modify["phase_hint"] == "S"].resource_id # Move the S-picks before the P-picks for num, p in picks_to_modify.items(): pick = ResourceIdentifier(p).get_referred_object() pick.time -= 120 number_picks = len(reviewed) - len(picks_to_modify) * 2 return eve, number_picks
def _set_pick(self, data_df, index, pick_info, append=False): """write the pick information to the dataframe""" if isinstance(pick_info, Pick): # parse a pick object for col in PICK_DTYPES: if col == "time": data_df.loc[index, "time"] = pick_info.time.timestamp elif col == "pick_id": data_df.loc[index, "pick_id"] = pick_info.resource_id.id else: data_df.loc[index, col] = pick_info.__dict__[col] data_df.loc[index, "phase_hint"] = pick_info.__dict__["phase_hint"] data_df.loc[index, list(NSLC_DTYPES.keys())] = list( get_seed_id(pick_info).split(".") ) else: # assign the provided pick time to the dataframe try: time = UTCDateTime(pick_info).timestamp except TypeError: raise TypeError("Pick time must be an obspy UTCDateTime") else: data_df.loc[index, "time"] = time # Do the nslc info data_df.loc[index, list(NSLC_DTYPES.keys())] = list( index[-1].split(".") ) # seed_id if append: # Since there is no resource_id for the pick, create a new one data_df.loc[index, "pick_id"] = ResourceIdentifier().id
def test_resource_id_valid_quakemluri(self): """ Test that a resource identifier per default (i.e. no arguments to __init__()) gets set up with a QUAKEML conform ID. """ rid = ResourceIdentifier() self.assertEqual(rid.id, rid.get_quakeml_uri())
def test_latest_in_scope_object_returned(self): """ Test that the most recently defined object with the same resource_id, that is still in scope, is returned from the get_referred_object method """ cat1 = read_events() # The resource_id attached to the first event is self-pointing self.assertIs(cat1[0], cat1[0].resource_id.get_referred_object()) # make a copy and re-read catalog cat2 = cat1.copy() cat3 = read_events() # the resource_id on the new catalogs point to their attached objects self.assertIs(cat1[0], cat1[0].resource_id.get_referred_object()) self.assertIs(cat2[0], cat2[0].resource_id.get_referred_object()) self.assertIs(cat3[0], cat3[0].resource_id.get_referred_object()) # now delete cat1 and make sure cat2 and cat3 still work del cat1 self.assertIs(cat2[0], cat2[0].resource_id.get_referred_object()) self.assertIs(cat3[0], cat3[0].resource_id.get_referred_object()) # create a resource_id with the same id as the last defined object # with the same resource id (that is still in scope) is returned new_id = cat2[0].resource_id.id rid = ResourceIdentifier(new_id) self.assertIs(rid.get_referred_object(), cat3[0]) del cat3 # raises UserWarning with warnings.catch_warnings(): warnings.simplefilter("ignore", UserWarning) self.assertIs(rid.get_referred_object(), cat2[0]) del cat2 self.assertIs(rid.get_referred_object(), None)
def test_resources_in_global_dict_get_garbage_colleted(self): """ Tests that the ResourceIdentifiers in the class level resource dict get deleted if they have no other reference and the object they refer to goes out of scope. """ obj_a = UTCDateTime() obj_b = UTCDateTime() ResourceIdentifier(referred_object=obj_a) ResourceIdentifier(referred_object=obj_b) # Now two keys should be in the global dict. rdict = ResourceIdentifier._ResourceIdentifier__resource_id_weak_dict self.assertEqual(len(rdict.keys()), 2) # Deleting the objects should also remove the from the dictionary. del obj_a, obj_b self.assertEqual(len(rdict.keys()), 0)