def _parse_record_l(self, line, event): """ Parses the '90 percent error ellipse' record L """ origin = event.origins[0] semi_major_axis_azimuth = self._float(line[2:8]) if semi_major_axis_azimuth is None: return semi_major_axis_plunge = self._float(line[8:13]) semi_major_axis_length = self._float(line[13:21]) intermediate_axis_azimuth = self._float(line[21:27]) intermediate_axis_plunge = self._float(line[27:32]) # This is called "intermediate_axis_length", # but it is definitively a "semi_intermediate_axis_length", # since in most cases: # (intermediate_axis_length / 2) < semi_minor_axis_length intermediate_axis_length = self._float(line[32:40]) semi_minor_axis_azimuth = self._float(line[40:46]) semi_minor_axis_plunge = self._float(line[46:51]) semi_minor_axis_length = self._float(line[51:59]) if (semi_minor_axis_azimuth == semi_minor_axis_plunge == semi_minor_axis_length == 0): semi_minor_axis_azimuth = intermediate_axis_azimuth semi_minor_axis_plunge = intermediate_axis_plunge semi_minor_axis_length = intermediate_axis_length origin.depth_type = 'operator assigned' # FIXME: The following code needs to be double-checked! semi_major_axis_unit_vect = \ self._spherical_to_cartesian((1, semi_major_axis_azimuth, semi_major_axis_plunge)) semi_minor_axis_unit_vect = \ self._spherical_to_cartesian((1, semi_minor_axis_azimuth, semi_minor_axis_plunge)) major_axis_rotation = \ self._angle_between(semi_major_axis_unit_vect, semi_minor_axis_unit_vect) origin.origin_uncertainty = OriginUncertainty() origin.origin_uncertainty.preferred_description = \ 'confidence ellipsoid' origin.origin_uncertainty.confidence_level = 90 confidence_ellipsoid = ConfidenceEllipsoid() confidence_ellipsoid.semi_major_axis_length = \ semi_major_axis_length * 1000 confidence_ellipsoid.semi_minor_axis_length = \ semi_minor_axis_length * 1000 confidence_ellipsoid.semi_intermediate_axis_length = \ intermediate_axis_length * 1000 confidence_ellipsoid.major_axis_plunge = semi_major_axis_plunge confidence_ellipsoid.major_axis_azimuth = semi_major_axis_azimuth # We need to add 90 to match NEIC QuakeML format, # but I don't understand why... confidence_ellipsoid.major_axis_rotation = \ major_axis_rotation + 90 origin.origin_uncertainty.confidence_ellipsoid = confidence_ellipsoid
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0, depth=15000)) test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.creation_info = CreationInfo(agency_id='TES') test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add in error info (line E) test_event.origins[0].quality = OriginQuality(standard_error=0.01, azimuthal_gap=36) # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has # semi-major and semi-minor test_event.origins[0].origin_uncertainty = OriginUncertainty( confidence_ellipsoid=ConfidenceEllipsoid( semi_major_axis_length=3000, semi_minor_axis_length=1000, semi_intermediate_axis_length=2000, major_axis_plunge=20, major_axis_azimuth=100, major_axis_rotation=4)) test_event.origins[0].time_errors = QuantityError(uncertainty=0.5) # Add in fault-plane solution info (line F) - Note have to check program # used to determine which fields are filled.... test_event.focal_mechanisms.append( FocalMechanism(nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(strike=180, dip=20, rake=30, strike_errors=QuantityError(10), dip_errors=QuantityError(10), rake_errors=QuantityError(20))), method_id=ResourceIdentifier( "smi:nc.anss.org/focalMechanism/FPFIT"), creation_info=CreationInfo(agency_id="NC"), misfit=0.5, station_distribution_ratio=0.8)) # Need to test high-precision origin and that it is preferred origin. # Moment tensor includes another origin test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2, depth=14500)) test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'), origin_id=test_event.origins[-1].resource_id)) # Moment tensors go with focal-mechanisms test_event.focal_mechanisms.append( FocalMechanism(moment_tensor=MomentTensor( derived_origin_id=test_event.origins[-1].resource_id, moment_magnitude_id=test_event.magnitudes[-1].resource_id, scalar_moment=100, tensor=Tensor( m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15), method_id=ResourceIdentifier( 'smi:nc.anss.org/momentTensor/BLAH')))) return test_event
def computeOriginErrors(org): """ Given a NLL's event build the Confidence Ellipsoid from Covariance Matrix :param evt: NLL's QML Event :return: Dictionary containing computed errors """ # WARNING: QuakeML uses meter for origin depth, origin uncertainty and confidence ellipsoid, SC3ML uses kilometers. d = {} confidenceLevel = 0.90 # Conficence level kp1 = np.sqrt(chi2.ppf(confidenceLevel, 1)) # 1D confidence coefficient kp2 = np.sqrt(chi2.ppf(confidenceLevel, 2)) # 2D confidence coefficient kp3 = np.sqrt(chi2.ppf(confidenceLevel, 3)) # 3D confidence coefficient # Covariance matrix is given in the NLL's "STATISTICS" line of *.grid0.loc.hyp file and in the Origin's comments parsed by ObsPy comments = org['comments'][0].text stats = comments.split('STATISTICS')[-1].split() cvm = [float(i) for i in stats[1::2]][3:9] # Covariance matrix # Code adapted from IGN's computation of ConfidenceEllipsoid in "locsat.cpp" program cvxx = cvm[0] cvxy = cvm[1] cvxz = cvm[2] cvyy = cvm[3] cvyz = cvm[4] cvzz = cvm[5] nll3d = np.array([[cvxx, cvxy, cvxz], [cvxy, cvyy, cvyz], [cvxz, cvyz, cvzz] ]) # 1D confidence intervals at confidenceLevel errx = kp1 * np.sqrt(cvxx) qe = QuantityError(uncertainty=errx, confidence_level=confidenceLevel * 100.0) d['longitude_errors'] = qe erry = kp1 * np.sqrt(cvyy) qe = QuantityError(uncertainty=erry, confidence_level=confidenceLevel * 100.0) d['latitude_errors'] = qe errz = kp1 * np.sqrt(cvzz) qe = QuantityError(uncertainty=errz, confidence_level=confidenceLevel * 100.0) d['depth_errors'] = qe #NLL kp1=1 because is up to 1 sigma 68.3%, LocSAT kp1=2.71 because is up to 90% (one dim) #LocSAT np.sqrt(cvzz)/2.71 = NLL np.sqrt(cvzz) # 2D confidence intervals at confidenceLevel nll2d = np.array(nll3d[:2, :2]) eigval2d, eigvec2d = np.linalg.eig(nll2d) # XY (horizontal) plane # indexes are not necessarily ordered. Sort them by eigenvalues idx = eigval2d.argsort() eigval2d = eigval2d[idx] eigvec2d = eigvec2d[:, idx] # sminax = kp2 * np.sqrt(eigval2d[0]) * 1.0e3 # QML in meters # smajax = kp2 * np.sqrt(eigval2d[1]) * 1.0e3 # QML in meters sminax = kp2 * np.sqrt(eigval2d[0]) # SC3ML in kilometers smajax = kp2 * np.sqrt(eigval2d[1]) # SC3ML in kilometers strike = 90.0 - np.rad2deg(np.arctan(eigvec2d[1, 1] / eigvec2d[0, 1])) # calculate and refer it to North # horizontalUncertainty = np.sqrt((errx ** 2) + (erry ** 2)) * 1.0e3 # QML in meters horizontalUncertainty = np.sqrt((errx ** 2) + (erry ** 2)) # SC3ML in kilometers # 3D confidence intervals at confidenceLevel eigval3d, eigvec3d = np.linalg.eig(nll3d) idx = eigval3d.argsort() eigval3d = eigval3d[idx] eigvec3d = eigvec3d[:, idx] # s3dminax = kp3 * np.sqrt(eigval3d[0]) * 1.0e3 # QML in meters # s3dintax = kp3 * np.sqrt(eigval3d[1]) * 1.0e3 # QML in meters # s3dmaxax = kp3 * np.sqrt(eigval3d[2]) * 1.0e3 # QML in meters s3dminax = kp3 * np.sqrt(eigval3d[0]) # SC3ML in kilometers s3dintax = kp3 * np.sqrt(eigval3d[1]) # SC3ML in kilometers s3dmaxax = kp3 * np.sqrt(eigval3d[2]) # SCEML in kilometers majaxplunge = normalizeAngle( np.rad2deg(np.arctan(eigvec3d[2, 2] / np.sqrt((eigvec3d[2, 0] ** 2) + (eigvec3d[2, 1] ** 2))))) majaxazimuth = normalizeAngle(np.rad2deg(np.arctan(eigvec3d[2, 1] / eigvec3d[2, 0]))) majaxrotation = normalizeAngle( np.rad2deg(np.arctan(eigvec3d[0, 2] / np.sqrt((eigvec3d[0, 0] ** 2) + (eigvec3d[0, 1] ** 2))))) # print('2D sminax:\t{}\tsmajax:\t{}\tstrike:\t{}'.format(sminax, smajax, strike)) # print('3D sminax:\t{}\tsmajax:\t{}\tsintax:\t{}'.format(s3dminax, s3dmaxax, s3dintax)) # print(' plunge:\t{}\tazim:\t{}\trotat:\t{}'.format(majaxplunge, majaxazimuth, majaxrotation)) # print('-' * 144) ce = ConfidenceEllipsoid(semi_major_axis_length=s3dmaxax, semi_minor_axis_length=s3dminax, semi_intermediate_axis_length=s3dintax, major_axis_plunge=majaxplunge, major_axis_azimuth=majaxazimuth, major_axis_rotation=majaxrotation) ou = OriginUncertainty(horizontal_uncertainty=horizontalUncertainty, min_horizontal_uncertainty=sminax, max_horizontal_uncertainty=smajax, azimuth_max_horizontal_uncertainty=strike, confidence_ellipsoid=ce, preferred_description='confidence ellipsoid', confidence_level=confidenceLevel * 100.0) d['origin_uncertainty'] = ou return d
def _read_single_event(event_file, locate_dir, units, local_mag_ph): """ Parse an event file from QuakeMigrate into an obspy Event object. Parameters ---------- event_file : `pathlib.Path` object Path to .event file to read. locate_dir : `pathlib.Path` object Path to locate directory (contains "events", "picks" etc. directories). units : {"km", "m"} Grid projection coordinates for QM LUT (determines units of depths and uncertainties in the .event files). local_mag_ph : {"S", "P"} Amplitude measurement used to calculate local magnitudes. Returns ------- event : `obspy.Event` object Event object populated with all available information output by :class:`~quakemigrate.signal.scan.locate()`, including event locations and uncertainties, picks, and amplitudes and magnitudes if available. """ # Parse information from event file event_info = pd.read_csv(event_file).iloc[0] event_uid = str(event_info["EventID"]) # Set distance conversion factor (from units of QM LUT projection units). if units == "km": factor = 1e3 elif units == "m": factor = 1 else: raise AttributeError(f"units must be 'km' or 'm'; not {units}") # Create event object to store origin and pick information event = Event() event.extra = AttribDict() event.resource_id = str(event_info["EventID"]) event.creation_info = CreationInfo(author="QuakeMigrate", version=quakemigrate.__version__) # Add COA info to extra event.extra.coa = {"value": event_info["COA"], "namespace": ns} event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns} event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns} event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns} event.extra.dec_coa_norm = { "value": event_info["DEC_COA_NORM"], "namespace": ns } # Determine location of cut waveform data - add to event object as a # custom extra attribute. mseed = locate_dir / "raw_cut_waveforms" / event_uid event.extra.cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "real_cut_waveforms").exists(): mseed = locate_dir / "real_cut_waveforms" / event_uid event.extra.real_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "wa_cut_waveforms").exists(): mseed = locate_dir / "wa_cut_waveforms" / event_uid event.extra.wa_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } # Create origin with spline location and set to preferred event origin. origin = Origin() origin.method_id = "spline" origin.longitude = event_info["X"] origin.latitude = event_info["Y"] origin.depth = event_info["Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins = [origin] event.preferred_origin_id = origin.resource_id # Create origin with gaussian location and associate with event origin = Origin() origin.method_id = "gaussian" origin.longitude = event_info["GAU_X"] origin.latitude = event_info["GAU_Y"] origin.depth = event_info["GAU_Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins.append(origin) ouc = OriginUncertainty() ce = ConfidenceEllipsoid() ce.semi_major_axis_length = event_info["COV_ErrY"] * factor ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor ce.major_axis_plunge = 0 ce.major_axis_azimuth = 0 ce.major_axis_rotation = 0 ouc.confidence_ellipsoid = ce ouc.preferred_description = "confidence ellipsoid" # Set uncertainties for both as the gaussian uncertainties for origin in event.origins: origin.longitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrX"] * factor / 1e3) origin.latitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrY"] * factor / 1e3) origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor origin.origin_uncertainty = ouc # Add OriginQuality info to each origin? for origin in event.origins: origin.origin_type = "hypocenter" origin.evaluation_mode = "automatic" # --- Handle picks file --- pick_file = locate_dir / "picks" / event_uid if pick_file.with_suffix(".picks").is_file(): picks = pd.read_csv(pick_file.with_suffix(".picks")) else: return None for _, pickline in picks.iterrows(): station = str(pickline["Station"]) phase = str(pickline["Phase"]) wid = WaveformStreamID(network_code="", station_code=station) for method in ["modelled", "autopick"]: pick = Pick() pick.extra = AttribDict() pick.waveform_id = wid pick.method_id = method pick.phase_hint = phase if method == "autopick" and str(pickline["PickTime"]) != "-1": pick.time = UTCDateTime(pickline["PickTime"]) pick.time_errors.uncertainty = float(pickline["PickError"]) pick.extra.snr = { "value": float(pickline["SNR"]), "namespace": ns } elif method == "modelled": pick.time = UTCDateTime(pickline["ModelledTime"]) else: continue event.picks.append(pick) # --- Handle amplitudes file --- amps_file = locate_dir / "amplitudes" / event_uid if amps_file.with_suffix(".amps").is_file(): amps = pd.read_csv(amps_file.with_suffix(".amps")) i = 0 for _, ampsline in amps.iterrows(): wid = WaveformStreamID(seed_string=ampsline["id"]) noise_amp = ampsline["Noise_amp"] / 1000 # mm to m for phase in ["P_amp", "S_amp"]: amp = Amplitude() if pd.isna(ampsline[phase]): continue amp.generic_amplitude = ampsline[phase] / 1000 # mm to m amp.generic_amplitude_errors.uncertainty = noise_amp amp.unit = "m" amp.type = "AML" amp.method_id = phase amp.period = 1 / ampsline[f"{phase[0]}_freq"] amp.time_window = TimeWindow( reference=UTCDateTime(ampsline[f"{phase[0]}_time"])) # amp.pick_id = ? amp.waveform_id = wid # amp.filter_id = ? amp.magnitude_hint = "ML" amp.evaluation_mode = "automatic" amp.extra = AttribDict() try: amp.extra.filter_gain = { "value": ampsline[f"{phase[0]}_filter_gain"], "namespace": ns } amp.extra.avg_amp = { "value": ampsline[f"{phase[0]}_avg_amp"] / 1000, # m "namespace": ns } except KeyError: pass if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]): i += 1 stat_mag = StationMagnitude() stat_mag.extra = AttribDict() # stat_mag.origin_id = ? local_mag_loc stat_mag.mag = ampsline["ML"] stat_mag.mag_errors.uncertainty = ampsline["ML_Err"] stat_mag.station_magnitude_type = "ML" stat_mag.amplitude_id = amp.resource_id stat_mag.extra.picked = { "value": ampsline["is_picked"], "namespace": ns } stat_mag.extra.epi_dist = { "value": ampsline["epi_dist"], "namespace": ns } stat_mag.extra.z_dist = { "value": ampsline["z_dist"], "namespace": ns } event.station_magnitudes.append(stat_mag) event.amplitudes.append(amp) mag = Magnitude() mag.extra = AttribDict() mag.mag = event_info["ML"] mag.mag_errors.uncertainty = event_info["ML_Err"] mag.magnitude_type = "ML" # mag.origin_id = ? mag.station_count = i mag.evaluation_mode = "automatic" mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns} event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id return event