def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except Exception: try: data = filename.decode() except Exception: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1:next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ("Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo(agency_id="GCMT", version=record["version_code"]) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event(force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ]) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[ Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False) ]) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"] }, latitude=record["centroid_latitude"], latitude_errors={"uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000 }, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy()) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude(force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append( Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[ Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." ) ])) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor(m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy()) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"]), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy()) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1]), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"])), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"]) ], creation_info=creation_info.copy()) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
o.time = UTCDateTime(2014, 2, 23, 18, 0, 0) o.latitude = 47.6 o.longitude = 12.0 o.depth = 10000 o.depth_type = "operator assigned" o.evaluation_mode = "manual" o.evaluation_status = "preliminary" o.region = FlinnEngdahl().get_region(o.longitude, o.latitude) m = Magnitude() m.mag = 7.2 m.magnitude_type = "Mw" m2 = Magnitude() m2.mag = 7.4 m2.magnitude_type = "Ms" # also included could be: custom picks, amplitude measurements, station magnitudes, # focal mechanisms, moment tensors, ... # make associations, put everything together cat.append(e) e.origins = [o] e.magnitudes = [m, m2] m.origin_id = o.resource_id m2.origin_id = o.resource_id print(cat) cat.write("/tmp/my_custom_events.xml", format="QUAKEML") # !cat /tmp/my_custom_events.xml
def _read_ndk(filename, *args, **kwargs): # @UnusedVariable """ Reads an NDK file to a :class:`~obspy.core.event.Catalog` object. :param filename: File or file-like object in text mode. """ # Read the whole file at once. While an iterator would be more efficient # the largest NDK file out in the wild is 13.7 MB so it does not matter # much. if not hasattr(filename, "read"): # Check if it exists, otherwise assume its a string. try: with open(filename, "rt") as fh: data = fh.read() except: try: data = filename.decode() except: data = str(filename) data = data.strip() else: data = filename.read() if hasattr(data, "decode"): data = data.decode() # Create iterator that yields lines. def lines_iter(): prev_line = -1 while True: next_line = data.find("\n", prev_line + 1) if next_line < 0: break yield data[prev_line + 1: next_line] prev_line = next_line if len(data) > prev_line + 1: yield data[prev_line + 1:] # Use one Flinn Engdahl object for all region determinations. fe = FlinnEngdahl() cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4()))) # Loop over 5 lines at once. for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)): if None in lines: msg = "Skipped last %i lines. Not a multiple of 5 lines." % ( lines.count(None)) warnings.warn(msg, ObsPyNDKWarning) continue # Parse the lines to a human readable dictionary. try: record = _read_lines(*lines) except (ValueError, ObsPyNDKException): exc = traceback.format_exc() msg = ( "Could not parse event %i (faulty file?). Will be " "skipped. Lines of the event:\n" "\t%s\n" "%s") % (_i + 1, "\n\t".join(lines), exc) warnings.warn(msg, ObsPyNDKWarning) continue # Use one creation info for essentially every item. creation_info = CreationInfo( agency_id="GCMT", version=record["version_code"] ) # Use the ObsPy Flinn Engdahl region determiner as the region in the # NDK files is oftentimes trimmed. region = fe.get_region(record["centroid_longitude"], record["centroid_latitude"]) # Create an event object. event = Event( force_resource_id=False, event_type="earthquake", event_type_certainty="known", event_descriptions=[ EventDescription(text=region, type="Flinn-Engdahl region"), EventDescription(text=record["cmt_event_name"], type="earthquake name") ] ) # Assemble the time for the reference origin. try: time = _parse_date_time(record["date"], record["time"]) except ObsPyNDKException: msg = ("Invalid time in event %i. '%s' and '%s' cannot be " "assembled to a valid time. Event will be skipped.") % \ (_i + 1, record["date"], record["time"]) warnings.warn(msg, ObsPyNDKWarning) continue # Create two origins, one with the reference latitude/longitude and # one with the centroidal values. ref_origin = Origin( force_resource_id=False, time=time, longitude=record["hypo_lng"], latitude=record["hypo_lat"], # Convert to m. depth=record["hypo_depth_in_km"] * 1000.0, origin_type="hypocenter", comments=[Comment(text="Hypocenter catalog: %s" % record["hypocenter_reference_catalog"], force_resource_id=False)] ) ref_origin.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="ref_origin") ref_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="reforigin") cmt_origin = Origin( force_resource_id=False, longitude=record["centroid_longitude"], longitude_errors={ "uncertainty": record["centroid_longitude_error"]}, latitude=record["centroid_latitude"], latitude_errors={ "uncertainty": record["centroid_latitude_error"]}, # Convert to m. depth=record["centroid_depth_in_km"] * 1000.0, depth_errors={ "uncertainty": record["centroid_depth_in_km_error"] * 1000}, time=ref_origin["time"] + record["centroid_time"], time_errors={"uncertainty": record["centroid_time_error"]}, depth_type=record["type_of_centroid_depth"], origin_type="centroid", time_fixed=False, epicenter_fixed=False, creation_info=creation_info.copy() ) cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"], "origin", tag="cmtorigin") event.origins = [ref_origin, cmt_origin] event.preferred_origin_id = cmt_origin.resource_id.id # Create the magnitude object. mag = Magnitude( force_resource_id=False, mag=round(record["Mw"], 2), magnitude_type="Mwc", origin_id=cmt_origin.resource_id, creation_info=creation_info.copy() ) mag.resource_id = _get_resource_id(record["cmt_event_name"], "magnitude", tag="moment_mag") event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id.id # Add the reported mb, MS magnitudes as additional magnitude objects. event.magnitudes.append(Magnitude( force_resource_id=False, mag=record["mb"], magnitude_type="mb", comments=[Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'mb'." )] )) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="mb_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="mb") event.magnitudes.append(Magnitude( force_resource_id=False, mag=record["MS"], magnitude_type="MS", comments=[Comment( force_resource_id=False, text="Reported magnitude in NDK file. Most likely 'MS'." )] )) event.magnitudes[-1].comments[-1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="MS_magnitude") event.magnitudes[-1].resource_id = _get_resource_id( record["cmt_event_name"], "magnitude", tag="MS") # Take care of the moment tensor. tensor = Tensor( m_rr=record["m_rr"], m_rr_errors={"uncertainty": record["m_rr_error"]}, m_pp=record["m_pp"], m_pp_errors={"uncertainty": record["m_pp_error"]}, m_tt=record["m_tt"], m_tt_errors={"uncertainty": record["m_tt_error"]}, m_rt=record["m_rt"], m_rt_errors={"uncertainty": record["m_rt_error"]}, m_rp=record["m_rp"], m_rp_errors={"uncertainty": record["m_rp_error"]}, m_tp=record["m_tp"], m_tp_errors={"uncertainty": record["m_tp_error"]}, creation_info=creation_info.copy() ) mt = MomentTensor( force_resource_id=False, scalar_moment=record["scalar_moment"], tensor=tensor, data_used=[DataUsed(**i) for i in record["data_used"]], inversion_type=record["source_type"], source_time_function=SourceTimeFunction( type=record["moment_rate_type"], duration=record["moment_rate_duration"] ), derived_origin_id=cmt_origin.resource_id, creation_info=creation_info.copy() ) mt.resource_id = _get_resource_id(record["cmt_event_name"], "momenttensor") axis = [Axis(**i) for i in record["principal_axis"]] focmec = FocalMechanism( force_resource_id=False, moment_tensor=mt, principal_axes=PrincipalAxes( # The ordering is the same as for the IRIS SPUD service and # from a website of the Saint Louis University Earthquake # center so it should be correct. t_axis=axis[0], p_axis=axis[2], n_axis=axis[1] ), nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(**record["nodal_plane_1"]), nodal_plane_2=NodalPlane(**record["nodal_plane_2"]) ), comments=[ Comment(force_resource_id=False, text="CMT Analysis Type: %s" % record["cmt_type"].capitalize()), Comment(force_resource_id=False, text="CMT Timestamp: %s" % record["cmt_timestamp"])], creation_info=creation_info.copy() ) focmec.comments[0].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_type") focmec.comments[1].resource_id = _get_resource_id( record["cmt_event_name"], "comment", tag="cmt_timestamp") focmec.resource_id = _get_resource_id(record["cmt_event_name"], "focal_mechanism") event.focal_mechanisms = [focmec] event.preferred_focal_mechanism_id = focmec.resource_id.id # Set at end to avoid duplicate resource id warning. event.resource_id = _get_resource_id(record["cmt_event_name"], "event") cat.append(event) if len(cat) == 0: msg = "No valid events found in NDK file." raise ObsPyNDKException(msg) return cat
def _load_events(self): self._load_events_helper() cache = {} notFound = defaultdict(int) oEvents = [] missingStations = defaultdict(int) for e in self.eventList: if (e.preferred_origin and len(e.preferred_origin.arrival_list)): cullList = [] for a in e.preferred_origin.arrival_list: if (len(a.net)): continue seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha) newCode = None if (seedid not in cache): sc = a.sta lonlat = self.isc_coords_dict[sc] if (len(lonlat) == 0): cullList.append(a) continue # end if r = self.fdsn_inventory.getClosestStations(lonlat[0], lonlat[1], maxdist=1e3) #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r if (not r): notFound[sc] += 1 else: for cr in r[0]: c = cr.split('.')[0] newCode = c # end for # end if if (newCode): cache[seedid] = newCode # end if else: newCode = cache[seedid] # end if if (newCode): #print a.net, newCode a.net = newCode sc = self.fdsn_inventory.t[a.net][a.sta] if (type(sc) == defaultdict): cullList.append(a) continue # end if da = gps2dist_azimuth(e.preferred_origin.lat, e.preferred_origin.lon, sc[1], sc[0]) dist = kilometers2degrees(da[0] / 1e3) if (np.fabs(a.distance - dist) > 0.5): cullList.append(a) # end if # end if # end for for c in cullList: e.preferred_origin.arrival_list.remove(c) # end if # Create obspy event object ci = OCreationInfo(author='GA', creation_time=UTCDateTime(), agency_id='GA-iteration-1') oid = self.get_id() origin = OOrigin(resource_id=OResourceIdentifier(id=oid), time=UTCDateTime(e.preferred_origin.utctime), longitude=e.preferred_origin.lon, latitude=e.preferred_origin.lat, depth=e.preferred_origin.depthkm * 1e3, method_id=OResourceIdentifier(id='unknown'), earth_model_id=OResourceIdentifier(id='iasp91'), evaluation_mode='automatic', creation_info=ci) magnitude = OMagnitude( resource_id=OResourceIdentifier(id=self.get_id()), mag=e.preferred_magnitude.magnitude_value, magnitude_type=e.preferred_magnitude.magnitude_type, origin_id=OResourceIdentifier(id=oid), creation_info=ci) event = OEvent(resource_id=OResourceIdentifier(id=self.get_id()), creation_info=ci, event_type='earthquake') event.origins = [origin] event.magnitudes = [magnitude] event.preferred_magnitude_id = magnitude.resource_id event.preferred_origin_id = origin.resource_id # Insert old picks for a in e.preferred_origin.arrival_list: if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict): missingStations[a.net + '.' + a.sta] += 1 continue # end if oldPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(a.utctime), waveform_id=OWaveformStreamID(network_code=a.net, station_code=a.sta, channel_code=a.cha), methodID=OResourceIdentifier('unknown'), phase_hint=a.phase, evaluation_mode='automatic', creation_info=ci) oldArr = OArrival(resource_id=OResourceIdentifier( id=oldPick.resource_id.id + "#"), pick_id=oldPick.resource_id, phase=oldPick.phase_hint, distance=a.distance, earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(oldPick) event.preferred_origin().arrivals.append(oldArr) # end for # Insert our picks opList = self.our_picks.picks[e.public_id] if (len(opList)): for op in opList: if (type(self.fdsn_inventory.t[op[1]][op[2]]) == defaultdict): missingStations[op[1] + '.' + op[2]] += 1 continue # end if newPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(op[0]), waveform_id=OWaveformStreamID(network_code=op[1], station_code=op[2], channel_code=op[3]), methodID=OResourceIdentifier('phasepapy/aicd'), backazimuth=op[-1], phase_hint=op[4], evaluation_mode='automatic', comments=op[6], creation_info=ci) newArr = OArrival( resource_id=OResourceIdentifier( id=newPick.resource_id.id + "#"), pick_id=newPick.resource_id, phase=newPick.phase_hint, azimuth=op[-2], distance=op[-3], time_residual=op[5], time_weight=1., earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(newPick) event.preferred_origin().arrivals.append(newArr) # end for # end if quality = OOriginQuality( associated_phase_count=len(e.preferred_origin.arrival_list) + len(self.our_picks.picks[e.public_id]), used_phase_count=len(e.preferred_origin.arrival_list) + len(self.our_picks.picks[e.public_id])) event.preferred_origin().quality = quality oEvents.append(event) # end for // loop over e #print notFound print self.rank, missingStations cat = OCatalog(events=oEvents) ofn = self.output_path + '/%d.xml' % (self.rank) cat.write(ofn, format='SC3ML')
def __read_single_fnetmt_entry(line, **kwargs): """ Reads a single F-net moment tensor solution to a :class:`~obspy.core.event.Event` object. :param line: String containing moment tensor information. :type line: str. """ a = line.split() try: ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f') except ValueError: ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S') lat, lon, depjma, magjma = map(float, a[1:5]) depjma *= 1000 region = a[5] strike = tuple(map(int, a[6].split(';'))) dip = tuple(map(int, a[7].split(';'))) rake = tuple(map(int, a[8].split(';'))) mo = float(a[9]) depmt = float(a[10]) * 1000 magmt = float(a[11]) var_red = float(a[12]) mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20]) event_name = util.gen_sc3_id(ot) e = Event(event_type="earthquake") e.resource_id = _get_resource_id(event_name, 'event') # Standard JMA solution o_jma = Origin(time=ot, latitude=lat, longitude=lon, depth=depjma, depth_type="from location", region=region) o_jma.resource_id = _get_resource_id(event_name, 'origin', 'JMA') m_jma = Magnitude(mag=magjma, magnitude_type='ML', origin_id=o_jma.resource_id) m_jma.resource_id = _get_resource_id(event_name, 'magnitude', 'JMA') # MT solution o_mt = Origin(time=ot, latitude=lat, longitude=lon, depth=depmt, region=region, depth_type="from moment tensor inversion") o_mt.resource_id = _get_resource_id(event_name, 'origin', 'MT') m_mt = Magnitude(mag=magmt, magnitude_type='Mw', origin_id=o_mt.resource_id) m_mt.resource_id = _get_resource_id(event_name, 'magnitude', 'MT') foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id) foc_mec.resource_id = _get_resource_id(event_name, "focal_mechanism") nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0]) nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1]) nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2) foc_mec.nodal_planes = nod tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz) cm = Comment(text="Basis system: North,East,Down (Jost and \ Herrmann 1989") cm.resource_id = _get_resource_id(event_name, 'comment', 'mt') mt = MomentTensor(derived_origin_id=o_mt.resource_id, moment_magnitude_id=m_mt.resource_id, scalar_moment=mo, comments=[cm], tensor=tensor, variance_reduction=var_red) mt.resource_id = _get_resource_id(event_name, 'moment_tensor') foc_mec.moment_tensor = mt e.origins = [o_jma, o_mt] e.magnitudes = [m_jma, m_mt] e.focal_mechanisms = [foc_mec] e.preferred_magnitude_id = m_mt.resource_id.id e.preferred_origin_id = o_mt.resource_id.id e.preferred_focal_mechanism_id = foc_mec.resource_id.id return e
def build(self): """ Build an obspy moment tensor focal mech event This makes the tensor output into an Event containing: 1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes 2) a Magnitude of the Mw from the Tensor Which is what we want for outputting QuakeML using the (slightly modified) obspy code. Input ----- filehandle => open file OR str from filehandle.read() Output ------ event => instance of Event() class as described above """ p = self.parser event = Event(event_type='earthquake') origin = Origin() focal_mech = FocalMechanism() nodal_planes = NodalPlanes() moment_tensor = MomentTensor() principal_ax = PrincipalAxes() magnitude = Magnitude() data_used = DataUsed() creation_info = CreationInfo(agency_id='NN') ev_mode = 'automatic' ev_stat = 'preliminary' evid = None orid = None # Parse the entire file line by line. for n,l in enumerate(p.line): if 'REVIEWED BY NSL STAFF' in l: ev_mode = 'manual' ev_stat = 'reviewed' if 'Event ID' in l: evid = p._id(n) if 'Origin ID' in l: orid = p._id(n) if 'Ichinose' in l: moment_tensor.category = 'regional' if re.match(r'^\d{4}\/\d{2}\/\d{2}', l): ev = p._event_info(n) if 'Depth' in l: derived_depth = p._depth(n) if 'Mw' in l: magnitude.mag = p._mw(n) magnitude.magnitude_type = 'Mw' if 'Mo' in l and 'dyne' in l: moment_tensor.scalar_moment = p._mo(n) if 'Percent Double Couple' in l: moment_tensor.double_couple = p._percent(n) if 'Percent CLVD' in l: moment_tensor.clvd = p._percent(n) if 'Epsilon' in l: moment_tensor.variance = p._epsilon(n) if 'Percent Variance Reduction' in l: moment_tensor.variance_reduction = p._percent(n) if 'Major Double Couple' in l and 'strike' in p.line[n+1]: np = p._double_couple(n) nodal_planes.nodal_plane_1 = NodalPlane(*np[0]) nodal_planes.nodal_plane_2 = NodalPlane(*np[1]) nodal_planes.preferred_plane = 1 if 'Spherical Coordinates' in l: mt = p._mt_sphere(n) moment_tensor.tensor = Tensor( m_rr = mt['Mrr'], m_tt = mt['Mtt'], m_pp = mt['Mff'], m_rt = mt['Mrt'], m_rp = mt['Mrf'], m_tp = mt['Mtf'], ) if 'Eigenvalues and eigenvectors of the Major Double Couple' in l: ax = p._vectors(n) principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev']) principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev']) principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev']) if 'Number of Stations' in l: data_used.station_count = p._number_of_stations(n) if 'Maximum' in l and 'Gap' in l: focal_mech.azimuthal_gap = p._gap(n) if re.match(r'^Date', l): creation_info.creation_time = p._creation_time(n) # Creation Time creation_info.version = orid # Fill in magnitude values magnitude.evaluation_mode = ev_mode magnitude.evaluation_status = ev_stat magnitude.creation_info = creation_info.copy() magnitude.resource_id = self._rid(magnitude) # Stub origin origin.time = ev.get('time') origin.latitude = ev.get('lat') origin.longitude = ev.get('lon') origin.depth = derived_depth * 1000. origin.depth_type = "from moment tensor inversion" origin.creation_info = creation_info.copy() # Unique from true origin ID _oid = self._rid(origin) origin.resource_id = ResourceIdentifier(str(_oid) + '/mt') del _oid # Make an id for the MT that references this origin ogid = str(origin.resource_id) doid = ResourceIdentifier(ogid, referred_object=origin) # Make an id for the moment tensor mag which references this mag mrid = str(magnitude.resource_id) mmid = ResourceIdentifier(mrid, referred_object=magnitude) # MT todo: could check/use URL for RID if parsing the php file moment_tensor.evaluation_mode = ev_mode moment_tensor.evaluation_status = ev_stat moment_tensor.data_used = data_used moment_tensor.moment_magnitude_id = mmid moment_tensor.derived_origin_id = doid moment_tensor.creation_info = creation_info.copy() moment_tensor.resource_id = self._rid(moment_tensor) # Fill in focal_mech values focal_mech.nodal_planes = nodal_planes focal_mech.moment_tensor = moment_tensor focal_mech.principal_axes = principal_ax focal_mech.creation_info = creation_info.copy() focal_mech.resource_id = self._rid(focal_mech) # add mech and new magnitude to event event.focal_mechanisms = [focal_mech] event.magnitudes = [magnitude] event.origins = [origin] event.creation_info = creation_info.copy() # If an MT was done, that's the preferred mag/mech event.preferred_magnitude_id = str(magnitude.resource_id) event.preferred_focal_mechanism_id = str(focal_mech.resource_id) if evid: event.creation_info.version = evid event.resource_id = self._rid(event) self.event = event
def _load_events(self): self._load_events_helper() cache = {} notFound = defaultdict(int) oEvents = [] missingStations = defaultdict(int) lines = [] for e in tqdm(self.eventList, desc='Rank %d' % (self.rank)): if (e.preferred_origin and len(e.preferred_origin.arrival_list)): cullList = [] for a in e.preferred_origin.arrival_list: if (len(a.net)): continue seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha) newCode = None if (seedid not in cache): sc = a.sta lonlat = self.isc_coords_dict[sc] if (len(lonlat) == 0): cullList.append(a) continue # end if r = self.fdsn_inventory.getClosestStation( lonlat[0], lonlat[1], maxdist=1e3) # 1km #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r if (not r): notFound[sc] += 1 else: c = r[0].split('.')[0] newCode = c # end if if (newCode): cache[seedid] = newCode # end if else: newCode = cache[seedid] # end if if (newCode): #print a.net, newCode a.net = newCode sc = self.fdsn_inventory.t[a.net][a.sta] if (type(sc) == defaultdict): cullList.append(a) continue # end if da = gps2dist_azimuth(e.preferred_origin.lat, e.preferred_origin.lon, sc[1], sc[0]) dist = kilometers2degrees(da[0] / 1e3) if (np.fabs(a.distance - dist) > 0.5): #print ([e.preferred_origin.lon, e.preferred_origin.lat, sc[0], sc[1]]) cullList.append(a) # end if # end if # end for for c in cullList: e.preferred_origin.arrival_list.remove(c) # end if # Create obspy event object ci = OCreationInfo(author='GA', creation_time=UTCDateTime(), agency_id='GA-iteration-1') oid = self.get_id() origin = OOrigin(resource_id=OResourceIdentifier(id=oid), time=UTCDateTime(e.preferred_origin.utctime), longitude=e.preferred_origin.lon, latitude=e.preferred_origin.lat, depth=e.preferred_origin.depthkm * 1e3, method_id=OResourceIdentifier(id='unknown'), earth_model_id=OResourceIdentifier(id='iasp91'), evaluation_mode='automatic', creation_info=ci) magnitude = OMagnitude( resource_id=OResourceIdentifier(id=self.get_id()), mag=e.preferred_magnitude.magnitude_value, magnitude_type=e.preferred_magnitude.magnitude_type, origin_id=OResourceIdentifier(id=oid), creation_info=ci) event = OEvent( resource_id=OResourceIdentifier(id=str(e.public_id)), creation_info=ci, event_type='earthquake') event.origins = [origin] event.magnitudes = [magnitude] event.preferred_magnitude_id = magnitude.resource_id event.preferred_origin_id = origin.resource_id # Insert old picks if (not self.discard_old_picks): for a in e.preferred_origin.arrival_list: if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict): missingStations[a.net + '.' + a.sta] += 1 continue # end if oldPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(a.utctime), waveform_id=OWaveformStreamID(network_code=a.net, station_code=a.sta, channel_code=a.cha), methodID=OResourceIdentifier('unknown'), phase_hint=a.phase, evaluation_mode='automatic', creation_info=ci) oldArr = OArrival( resource_id=OResourceIdentifier( id=oldPick.resource_id.id + "#"), pick_id=oldPick.resource_id, phase=oldPick.phase_hint, distance=a.distance, earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(oldPick) event.preferred_origin().arrivals.append(oldArr) # polulate list for text output line = [ str(e.public_id), '{:<25s}', e.preferred_origin.utctime.timestamp, '{:f}', e.preferred_magnitude.magnitude_value, '{:f}', e.preferred_origin.lon, '{:f}', e.preferred_origin.lat, '{:f}', e.preferred_origin.depthkm, '{:f}', a.net, '{:<5s}', a.sta, '{:<5s}', a.cha, '{:<5s}', a.utctime.timestamp, '{:f}', a.phase, '{:<5s}', self.fdsn_inventory.t[a.net][a.sta][0], '{:f}', self.fdsn_inventory.t[a.net][a.sta][1], '{:f}', -999, '{:f}', -999, '{:f}', a.distance, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:d}', -999, '{:d}' ] lines.append(line) # end for # end if # Insert our picks opList = self.our_picks.picks[e.public_id] if (len(opList)): for op in opList: if (type(self.fdsn_inventory.t[op[1]][op[2]]) == defaultdict): missingStations[op[1] + '.' + op[2]] += 1 continue # end if newPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(op[0]), waveform_id=OWaveformStreamID(network_code=op[1], station_code=op[2], channel_code=op[3]), methodID=OResourceIdentifier('phasepapy/aicd'), backazimuth=op[-1], phase_hint=op[4], evaluation_mode='automatic', comments=[ OComment( text='phasepapy_snr = ' + str(op[6][0]) + ', quality_measure_cwt = ' + str(op[6][1]) + ', dom_freq = ' + str(op[6][2]) + ', quality_measure_slope = ' + str(op[6][3]) + ', band_index = ' + str(op[6][4]) + ', nsigma = ' + str(op[6][5]), force_resource_id=False) ], creation_info=ci) newArr = OArrival( resource_id=OResourceIdentifier( id=newPick.resource_id.id + "#"), pick_id=newPick.resource_id, phase=newPick.phase_hint, azimuth=op[-2], distance=op[-3], time_residual=op[5], time_weight=1., earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(newPick) event.preferred_origin().arrivals.append(newArr) # polulate list for text output line = [ str(e.public_id), '{:<25s}', e.preferred_origin.utctime.timestamp, '{:f}', e.preferred_magnitude.magnitude_value, '{:f}', e.preferred_origin.lon, '{:f}', e.preferred_origin.lat, '{:f}', e.preferred_origin.depthkm, '{:f}', op[1], '{:<5s}', op[2], '{:<5s}', op[3], '{:<5s}', UTCDateTime(op[0]).timestamp, '{:f}', op[4], '{:<5s}', op[10], '{:f}', op[9], '{:f}', op[12], '{:f}', op[13], '{:f}', op[11], '{:f}', op[5], '{:f}', op[6][0], '{:f}', op[6][1], '{:f}', op[6][2], '{:f}', op[6][3], '{:f}', int(op[6][4]), '{:d}', int(op[6][5]), '{:d}' ] lines.append(line) # end for # end if quality= OOriginQuality(associated_phase_count= len(e.preferred_origin.arrival_list) * \ int(self.discard_old_picks) + \ len(self.our_picks.picks[e.public_id]), used_phase_count=len(e.preferred_origin.arrival_list) * \ int(self.discard_old_picks) + \ len(self.our_picks.picks[e.public_id])) event.preferred_origin().quality = quality if (len(self.our_picks.picks[e.public_id]) == 0 and self.discard_old_picks): continue # end if oEvents.append(event) # end for // loop over e if (len(missingStations)): for k, v in missingStations.items(): self.logger.warning('Missing station %s: %d picks' % (k, v)) # end for # end if # write xml output if (len(oEvents)): cat = OCatalog(events=oEvents) ofn = self.output_path + '/%d.xml' % (self.rank) cat.write(ofn, format='SC3ML') # end if # write text output procfile = open('%s/proc.%d.txt' % (self.output_path, self.rank), 'w+') for line in lines: lineout = ' '.join(line[1::2]).format(*line[::2]) procfile.write(lineout + '\n') # end for procfile.close() # combine text output header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n' self.comm.barrier() if (self.rank == 0): of = open('%s/ensemble.txt' % (self.output_path), 'w+') of.write(header) for i in range(self.nproc): fn = '%s/proc.%d.txt' % (self.output_path, i) lines = open(fn, 'r').readlines() for line in lines: of.write(line) # end for if (os.path.exists(fn)): os.remove(fn) # end for of.close()
def setEventData(eventParser, arrivals, count): global originCount global eventCount global pickCount creation_info = CreationInfo( author='niket_engdahl_parser', creation_time=UTCDateTime(), agency_uri=ResourceIdentifier(id='smi:engdahl.ga.gov.au/ga-engdahl'), agency_id='ga-engdahl') # magnitudeSurface = Magnitude(resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)+'#netMag.Ms'), # mag=eventParser.ms, # magnitude_type='Ms', # origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)), # azimuthal_gap=eventParser.openaz2, # creation_info=creation_info) origin = Origin( resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' + str(originCount)), time=UTCDateTime(int(str(2000 + int(eventParser.iyr))), int(eventParser.mon), int(eventParser.iday), int(eventParser.ihr), int(eventParser.min), int(eventParser.sec.split('.')[0]), int(eventParser.sec.split('.')[1] + '0')), longitude=eventParser.glon, latitude=eventParser.glat, depth=float(eventParser.depth) * 1000, # engdahl files report kms, obspy expects m depth_errors=eventParser.sedep, method_id=ResourceIdentifier(id='EHB'), earth_model_id=ResourceIdentifier(id='ak135'), quality=OriginQuality(associated_phase_count=len(arrivals), used_phase_count=len(arrivals), standard_error=eventParser.se, azimuthal_gap=eventParser.openaz2), evaluation_mode='automatic', creation_info=creation_info) magnitude = Magnitude( resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' + str(originCount) + '#netMag.Mb'), mag=eventParser.mb, magnitude_type='Mb', origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' + str(originCount)), azimuthal_gap=eventParser.openaz1, creation_info=creation_info) originCount += 1 pickList = [] arrivalList = [] pPhaseArrival = None for arrParser in arrivals: pickOnset = None pol = None if arrParser.year and arrParser.month and arrParser.day and arrParser.station: pPhaseArrival = arrParser else: arrParser.year = pPhaseArrival.year arrParser.day = pPhaseArrival.day arrParser.month = pPhaseArrival.month arrParser.station = pPhaseArrival.station arrParser.delta = pPhaseArrival.delta arrParser.dtdd = pPhaseArrival.dtdd arrParser.backaz = pPhaseArrival.backaz arrParser.focalDip = pPhaseArrival.focalDip arrParser.angleAzimuth = pPhaseArrival.angleAzimuth if arrParser.phase1 == 'LR' or arrParser.phase2 == 'LR' or arrParser.hour == '24': continue if arrParser.phase1.startswith('i'): pickOnset = PickOnset.impulsive if arrParser.fm == '+': pol = PickPolarity.positive elif arrParser.fm == '-': pol = PickPolarity.negative elif arrParser.phase1.startswith('e'): pickOnset = PickOnset.emergent pick = Pick( resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' + str(pickCount)), time=UTCDateTime(int(str(2000 + int(arrParser.year))), int(arrParser.month), int(arrParser.day), int(arrParser.hour), int(arrParser.minute), int(arrParser.second.split('.')[0]), int(arrParser.second.split('.')[1] + '0')), waveform_id=WaveformStreamID(network_code='', station_code=arrParser.station, channel_code='BHZ'), methodID=ResourceIdentifier('STA/LTA'), backazimuth=arrParser.backaz if arrParser.backaz else None, onset=pickOnset, phase_hint=arrParser.phase, polarity=pol, evaluation_mode='automatic', # TO-DO comment='populate all the remaining fields here as key value', creation_info=creation_info) if not arrParser.backaz: print "arrParser.backaz is empty. printing the arrParser for debugging" pickCount += 1 pickList.append(pick) arrival = Arrival( pick_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' + str(pickCount - 1)), phase=arrParser.phase if arrParser.phase else None, azimuth=arrParser.backaz if arrParser.backaz else None, distance=arrParser.delta if arrParser.delta else None, # if the * has some significance, it should be accounted for. ignoring for now. time_residual=arrParser.residual.rstrip('*'), time_weight=arrParser.wgt if arrParser.wgt else None, backazimuth_weight=arrParser.wgt if arrParser.wgt else None) arrivalList.append(arrival) if not arrParser.wgt: print "arrParser.wgt is empty. printing the arrParser for debugging" # pprint.pprint(arrParser) origin.arrivals = arrivalList event = Event(resource_id=ResourceIdentifier( id='smi:engdahl.ga.gov.au/event/' + str(eventCount)), creation_info=creation_info, event_type='earthquake') eventCount += 1 event.picks = pickList event.origins = [ origin, ] event.magnitudes = [ magnitude, ] event.preferred_origin_id = origin.resource_id event.preferred_magnitude_id = magnitude.resource_id return event
def _read_single_event(event_file, locate_dir, units, local_mag_ph): """ Parse an event file from QuakeMigrate into an obspy Event object. Parameters ---------- event_file : `pathlib.Path` object Path to .event file to read. locate_dir : `pathlib.Path` object Path to locate directory (contains "events", "picks" etc. directories). units : {"km", "m"} Grid projection coordinates for QM LUT (determines units of depths and uncertainties in the .event files). local_mag_ph : {"S", "P"} Amplitude measurement used to calculate local magnitudes. Returns ------- event : `obspy.Event` object Event object populated with all available information output by :class:`~quakemigrate.signal.scan.locate()`, including event locations and uncertainties, picks, and amplitudes and magnitudes if available. """ # Parse information from event file event_info = pd.read_csv(event_file).iloc[0] event_uid = str(event_info["EventID"]) # Set distance conversion factor (from units of QM LUT projection units). if units == "km": factor = 1e3 elif units == "m": factor = 1 else: raise AttributeError(f"units must be 'km' or 'm'; not {units}") # Create event object to store origin and pick information event = Event() event.extra = AttribDict() event.resource_id = str(event_info["EventID"]) event.creation_info = CreationInfo(author="QuakeMigrate", version=quakemigrate.__version__) # Add COA info to extra event.extra.coa = {"value": event_info["COA"], "namespace": ns} event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns} event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns} event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns} event.extra.dec_coa_norm = { "value": event_info["DEC_COA_NORM"], "namespace": ns } # Determine location of cut waveform data - add to event object as a # custom extra attribute. mseed = locate_dir / "raw_cut_waveforms" / event_uid event.extra.cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "real_cut_waveforms").exists(): mseed = locate_dir / "real_cut_waveforms" / event_uid event.extra.real_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "wa_cut_waveforms").exists(): mseed = locate_dir / "wa_cut_waveforms" / event_uid event.extra.wa_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } # Create origin with spline location and set to preferred event origin. origin = Origin() origin.method_id = "spline" origin.longitude = event_info["X"] origin.latitude = event_info["Y"] origin.depth = event_info["Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins = [origin] event.preferred_origin_id = origin.resource_id # Create origin with gaussian location and associate with event origin = Origin() origin.method_id = "gaussian" origin.longitude = event_info["GAU_X"] origin.latitude = event_info["GAU_Y"] origin.depth = event_info["GAU_Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins.append(origin) ouc = OriginUncertainty() ce = ConfidenceEllipsoid() ce.semi_major_axis_length = event_info["COV_ErrY"] * factor ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor ce.major_axis_plunge = 0 ce.major_axis_azimuth = 0 ce.major_axis_rotation = 0 ouc.confidence_ellipsoid = ce ouc.preferred_description = "confidence ellipsoid" # Set uncertainties for both as the gaussian uncertainties for origin in event.origins: origin.longitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrX"] * factor / 1e3) origin.latitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrY"] * factor / 1e3) origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor origin.origin_uncertainty = ouc # Add OriginQuality info to each origin? for origin in event.origins: origin.origin_type = "hypocenter" origin.evaluation_mode = "automatic" # --- Handle picks file --- pick_file = locate_dir / "picks" / event_uid if pick_file.with_suffix(".picks").is_file(): picks = pd.read_csv(pick_file.with_suffix(".picks")) else: return None for _, pickline in picks.iterrows(): station = str(pickline["Station"]) phase = str(pickline["Phase"]) wid = WaveformStreamID(network_code="", station_code=station) for method in ["modelled", "autopick"]: pick = Pick() pick.extra = AttribDict() pick.waveform_id = wid pick.method_id = method pick.phase_hint = phase if method == "autopick" and str(pickline["PickTime"]) != "-1": pick.time = UTCDateTime(pickline["PickTime"]) pick.time_errors.uncertainty = float(pickline["PickError"]) pick.extra.snr = { "value": float(pickline["SNR"]), "namespace": ns } elif method == "modelled": pick.time = UTCDateTime(pickline["ModelledTime"]) else: continue event.picks.append(pick) # --- Handle amplitudes file --- amps_file = locate_dir / "amplitudes" / event_uid if amps_file.with_suffix(".amps").is_file(): amps = pd.read_csv(amps_file.with_suffix(".amps")) i = 0 for _, ampsline in amps.iterrows(): wid = WaveformStreamID(seed_string=ampsline["id"]) noise_amp = ampsline["Noise_amp"] / 1000 # mm to m for phase in ["P_amp", "S_amp"]: amp = Amplitude() if pd.isna(ampsline[phase]): continue amp.generic_amplitude = ampsline[phase] / 1000 # mm to m amp.generic_amplitude_errors.uncertainty = noise_amp amp.unit = "m" amp.type = "AML" amp.method_id = phase amp.period = 1 / ampsline[f"{phase[0]}_freq"] amp.time_window = TimeWindow( reference=UTCDateTime(ampsline[f"{phase[0]}_time"])) # amp.pick_id = ? amp.waveform_id = wid # amp.filter_id = ? amp.magnitude_hint = "ML" amp.evaluation_mode = "automatic" amp.extra = AttribDict() try: amp.extra.filter_gain = { "value": ampsline[f"{phase[0]}_filter_gain"], "namespace": ns } amp.extra.avg_amp = { "value": ampsline[f"{phase[0]}_avg_amp"] / 1000, # m "namespace": ns } except KeyError: pass if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]): i += 1 stat_mag = StationMagnitude() stat_mag.extra = AttribDict() # stat_mag.origin_id = ? local_mag_loc stat_mag.mag = ampsline["ML"] stat_mag.mag_errors.uncertainty = ampsline["ML_Err"] stat_mag.station_magnitude_type = "ML" stat_mag.amplitude_id = amp.resource_id stat_mag.extra.picked = { "value": ampsline["is_picked"], "namespace": ns } stat_mag.extra.epi_dist = { "value": ampsline["epi_dist"], "namespace": ns } stat_mag.extra.z_dist = { "value": ampsline["z_dist"], "namespace": ns } event.station_magnitudes.append(stat_mag) event.amplitudes.append(amp) mag = Magnitude() mag.extra = AttribDict() mag.mag = event_info["ML"] mag.mag_errors.uncertainty = event_info["ML_Err"] mag.magnitude_type = "ML" # mag.origin_id = ? mag.station_count = i mag.evaluation_mode = "automatic" mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns} event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id return event